Compare commits

..

1 Commits

Author SHA1 Message Date
vikrantgupta25
d4b2958e3a fix(apikey): remove last seen sql update for api-key middleware 2025-11-28 16:47:11 +05:30
90 changed files with 598 additions and 6215 deletions

View File

@@ -129,12 +129,6 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
return authtypes.NewCallbackIdentity("", email, authDomain.StorableAuthDomain().OrgID, state), nil
}
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
return &authtypes.AuthNProviderInfo{
RelayStatePath: nil,
}
}
func (a *AuthN) oidcProviderAndoauth2Config(ctx context.Context, siteURL *url.URL, authDomain *authtypes.AuthDomain) (*oidc.Provider, *oauth2.Config, error) {
if authDomain.AuthDomainConfig().OIDC.IssuerAlias != "" {
ctx = oidc.InsecureIssuerURLContext(ctx, authDomain.AuthDomainConfig().OIDC.IssuerAlias)

View File

@@ -99,14 +99,6 @@ func (a *AuthN) HandleCallback(ctx context.Context, formValues url.Values) (*aut
return authtypes.NewCallbackIdentity("", email, authDomain.StorableAuthDomain().OrgID, state), nil
}
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
state := authtypes.NewState(&url.URL{Path: "login"}, authDomain.StorableAuthDomain().ID).URL.String()
return &authtypes.AuthNProviderInfo{
RelayStatePath: &state,
}
}
func (a *AuthN) serviceProvider(siteURL *url.URL, authDomain *authtypes.AuthDomain) (*saml2.SAMLServiceProvider, error) {
certStore, err := a.getCertificateStore(authDomain)
if err != nil {

View File

@@ -1,5 +1,5 @@
module.exports = {
ignorePatterns: ['src/parser/*.ts', 'scripts/update-registry.js'],
ignorePatterns: ['src/parser/*.ts'],
env: {
browser: true,
es2021: true,

View File

@@ -14,7 +14,7 @@
"jest": "jest",
"jest:coverage": "jest --coverage",
"jest:watch": "jest --watch",
"postinstall": "yarn i18n:generate-hash && (is-ci || yarn husky:configure) && node scripts/update-registry.js",
"postinstall": "yarn i18n:generate-hash && (is-ci || yarn husky:configure)",
"husky:configure": "cd .. && husky install frontend/.husky && cd frontend && chmod ug+x .husky/*",
"commitlint": "commitlint --edit $1",
"test": "jest",

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" fill="currentColor" fill-rule="evenodd" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>AWS</title><path d="M6.763 11.212q.002.446.088.71c.064.176.144.368.256.576.04.063.056.127.056.183q.002.12-.152.24l-.503.335a.4.4 0 0 1-.208.072q-.12-.002-.239-.112a2.5 2.5 0 0 1-.287-.375 6 6 0 0 1-.248-.471q-.934 1.101-2.347 1.101c-.67 0-1.205-.191-1.596-.574-.39-.384-.59-.894-.59-1.533 0-.678.24-1.23.726-1.644.487-.415 1.133-.623 1.955-.623.272 0 .551.024.846.064.296.04.6.104.918.176v-.583q-.001-.908-.375-1.277c-.255-.248-.686-.367-1.3-.367-.28 0-.568.031-.863.103s-.583.16-.862.272a2 2 0 0 1-.28.104.5.5 0 0 1-.127.023q-.168.002-.168-.247v-.391c0-.128.016-.224.056-.28a.6.6 0 0 1 .224-.167 4.6 4.6 0 0 1 1.005-.36 4.8 4.8 0 0 1 1.246-.151c.95 0 1.644.216 2.091.647q.661.646.662 1.963v2.586zm-3.24 1.214c.263 0 .534-.048.822-.144a1.8 1.8 0 0 0 .758-.51 1.3 1.3 0 0 0 .272-.512c.047-.191.08-.423.08-.694v-.335a7 7 0 0 0-.735-.136 6 6 0 0 0-.75-.048c-.535 0-.926.104-1.19.32-.263.215-.39.518-.39.917 0 .375.095.655.295.846.191.2.47.296.838.296m6.41.862c-.144 0-.24-.024-.304-.08-.064-.048-.12-.16-.168-.311L7.586 6.726a1.4 1.4 0 0 1-.072-.32c0-.128.064-.2.191-.2h.783q.227-.001.31.08c.065.048.113.16.16.312l1.342 5.284 1.245-5.284q.058-.24.151-.312a.55.55 0 0 1 .32-.08h.638c.152 0 .256.025.32.08.063.048.12.16.151.312l1.261 5.348 1.381-5.348q.074-.24.16-.312a.52.52 0 0 1 .311-.08h.743c.127 0 .2.065.2.2 0 .04-.009.08-.017.128a1 1 0 0 1-.056.2l-1.923 6.17q-.072.24-.168.311a.5.5 0 0 1-.303.08h-.687c-.15 0-.255-.024-.32-.08-.063-.056-.119-.16-.15-.32L12.32 7.747l-1.23 5.14c-.04.16-.087.264-.15.32-.065.056-.177.08-.32.08zm10.256.215c-.415 0-.83-.048-1.229-.143-.399-.096-.71-.2-.918-.32-.128-.071-.215-.151-.247-.223a.6.6 0 0 1-.048-.224v-.407c0-.167.064-.247.183-.247q.072 0 .144.024c.048.016.12.048.2.08q.408.181.878.279c.32.064.63.096.95.096.502 0 .894-.088 1.165-.264a.86.86 0 0 0 .415-.758.78.78 0 0 0-.215-.559c-.144-.151-.416-.287-.807-.415l-1.157-.36c-.583-.183-1.014-.454-1.277-.813a1.9 1.9 0 0 1-.4-1.158q0-.502.216-.886c.144-.255.335-.479.575-.654.24-.184.51-.32.83-.415.32-.096.655-.136 1.006-.136.175 0 .36.008.535.032.183.024.35.056.518.088q.24.058.455.127.216.072.336.144a.7.7 0 0 1 .24.2.43.43 0 0 1 .071.263v.375q-.002.254-.184.256a.8.8 0 0 1-.303-.096 3.65 3.65 0 0 0-1.532-.311c-.455 0-.815.071-1.062.223s-.375.383-.375.71c0 .224.08.416.24.567.16.152.454.304.877.44l1.134.358c.574.184.99.44 1.237.767s.367.702.367 1.117c0 .343-.072.655-.207.926a2.2 2.2 0 0 1-.583.703c-.248.2-.543.343-.886.447-.36.111-.734.167-1.142.167"/><path fill="#f90" d="M.378 15.475c3.384 1.963 7.56 3.153 11.877 3.153 2.914 0 6.114-.607 9.06-1.852.44-.2.814.287.383.607-2.626 1.94-6.442 2.969-9.722 2.969-4.598 0-8.74-1.7-11.87-4.526-.247-.223-.024-.527.272-.351m23.531-.2c.287.36-.08 2.826-1.485 4.007-.215.184-.423.088-.327-.151l.175-.439c.343-.88.802-2.198.52-2.555-.336-.43-2.22-.207-3.074-.103-.255.032-.295-.192-.063-.36 1.5-1.053 3.967-.75 4.254-.399"/></svg>

Before

Width:  |  Height:  |  Size: 3.0 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 20 KiB

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>Azure</title><path fill="url(#a)" d="M7.242 1.613A1.11 1.11 0 0 1 8.295.857h6.977L8.03 22.316a1.11 1.11 0 0 1-1.052.755h-5.43a1.11 1.11 0 0 1-1.053-1.466z"/><path fill="#0078d4" d="M18.397 15.296H7.4a.51.51 0 0 0-.347.882l7.066 6.595c.206.192.477.298.758.298h6.226z"/><path fill="url(#b)" d="M15.272.857H7.497L0 23.071h7.775l1.596-4.73 5.068 4.73h6.665l-2.707-7.775h-7.998z"/><path fill="url(#c)" d="M17.193 1.613a1.11 1.11 0 0 0-1.052-.756h-7.81.035c.477 0 .9.304 1.052.756l6.748 19.992a1.11 1.11 0 0 1-1.052 1.466h-.12 7.895a1.11 1.11 0 0 0 1.052-1.466z"/><defs><linearGradient id="a" x1="8.247" x2="1.002" y1="1.626" y2="23.03" gradientUnits="userSpaceOnUse"><stop stop-color="#114a8b"/><stop offset="1" stop-color="#0669bc"/></linearGradient><linearGradient id="b" x1="14.042" x2="12.324" y1="15.302" y2="15.888" gradientUnits="userSpaceOnUse"><stop stop-opacity=".3"/><stop offset=".071" stop-opacity=".2"/><stop offset=".321" stop-opacity=".1"/><stop offset=".623" stop-opacity=".05"/><stop offset="1" stop-opacity="0"/></linearGradient><linearGradient id="c" x1="12.841" x2="20.793" y1="1.626" y2="22.814" gradientUnits="userSpaceOnUse"><stop stop-color="#3ccbf4"/><stop offset="1" stop-color="#2892df"/></linearGradient></defs></svg>

Before

Width:  |  Height:  |  Size: 1.3 KiB

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>CrewAI</title><path fill="#461816" d="M19.41 10.783a2.75 2.75 0 0 1 2.471 1.355c.483.806.622 1.772.385 2.68l-.136.522a10 10 0 0 1-3.156 5.058c-.605.517-1.283 1.062-2.083 1.524l-.028.017c-.402.232-.884.511-1.398.756-1.19.602-2.475.997-3.798 1.167-.854.111-1.716.155-2.577.132h-.018a8.6 8.6 0 0 1-5.046-1.87l-.012-.01-.012-.01A8.02 8.02 0 0 1 1.22 17.42a10.9 10.9 0 0 1-.102-3.779A15.6 15.6 0 0 1 2.88 8.4a21.8 21.8 0 0 1 2.432-3.678 15.4 15.4 0 0 1 3.56-3.182A10 10 0 0 1 12.44.104h.004l.003-.002c2.057-.384 3.743.374 5.024 1.26a8.3 8.3 0 0 1 2.395 2.513l.024.04.023.042a5.47 5.47 0 0 1 .508 4.012c-.239.97-.577 1.914-1.01 2.814z"/><path fill="#fff" d="M18.861 13.165a.748.748 0 0 1 1.256.031c.199.332.256.73.159 1.103l-.137.522a7.94 7.94 0 0 1-2.504 4.014c-.572.49-1.138.939-1.774 1.306-.427.247-.857.496-1.303.707a9.6 9.6 0 0 1-3.155.973 14.3 14.3 0 0 1-2.257.116 6.53 6.53 0 0 1-3.837-1.422 5.97 5.97 0 0 1-2.071-3.494 8.9 8.9 0 0 1-.085-3.08 13.6 13.6 0 0 1 1.54-4.568 19.7 19.7 0 0 1 2.212-3.348 13.4 13.4 0 0 1 3.088-2.76 7.9 7.9 0 0 1 2.832-1.14c1.307-.245 2.434.207 3.481.933a6.2 6.2 0 0 1 1.806 1.892c.423.767.536 1.668.314 2.515a12.4 12.4 0 0 1-.99 2.67l-.223.497q-.48 1.07-.97 2.137a.76.76 0 0 1-.97.467 3.39 3.39 0 0 1-2.283-2.49c-.095-.83.04-1.669.39-2.426.288-.746.61-1.477.933-2.208l.248-.563a.53.53 0 0 0-.204-.742 2.35 2.35 0 0 0-1.2.702 25 25 0 0 0-1.614 1.767 21.6 21.6 0 0 0-2.619 4.184 7.6 7.6 0 0 0-.816 2.753 7 7 0 0 0 .07 2.219 2.055 2.055 0 0 0 1.934 1.715c1.801.1 3.59-.363 5.116-1.328a19 19 0 0 0 1.675-1.294c.752-.71 1.376-1.519 1.958-2.36"/></svg>

Before

Width:  |  Height:  |  Size: 1.7 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 19 KiB

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>PydanticAI</title><path fill="#e72564" d="M13.223 22.86c-.605.83-1.844.83-2.448 0L5.74 15.944a1.514 1.514 0 0 1 .73-2.322l5.035-1.738c.32-.11.668-.11.988 0l5.035 1.738c.962.332 1.329 1.5.73 2.322zm-1.224-1.259 4.688-6.439-4.688-1.618-4.688 1.618L12 21.602z"/><path fill="#e723a0" d="M23.71 13.463c.604.832.221 2.01-.756 2.328l-8.133 2.652a1.514 1.514 0 0 1-1.983-1.412l-.097-5.326c-.006-.338.101-.67.305-.94l3.209-4.25a1.514 1.514 0 0 1 2.434.022l5.022 6.926zm-1.574.775L17.46 7.79l-2.988 3.958.09 4.959z"/><path fill="#e520e9" d="M18.016.591a1.514 1.514 0 0 1 1.98 1.44l.009 8.554a1.514 1.514 0 0 1-1.956 1.45l-5.095-1.554a1.5 1.5 0 0 1-.8-.58l-3.05-4.366a1.514 1.514 0 0 1 .774-2.308zm.25 1.738L10.69 4.783l2.841 4.065 4.744 1.446-.008-7.965z"/><path fill="#e520e9" d="M5.99.595a1.514 1.514 0 0 0-1.98 1.44L4 10.588a1.514 1.514 0 0 0 1.956 1.45l5.095-1.554c.323-.098.605-.303.799-.58l3.052-4.366a1.514 1.514 0 0 0-.775-2.308zm-.25 1.738 7.577 2.454-2.842 4.065-4.743 1.446.007-7.965z"/><path fill="#e723a0" d="M.29 13.461a1.514 1.514 0 0 0 .756 2.329l8.133 2.651a1.514 1.514 0 0 0 1.983-1.412l.097-5.325a1.5 1.5 0 0 0-.305-.94L7.745 6.513a1.514 1.514 0 0 0-2.434.023L.289 13.461zm1.574.776L6.54 7.788l2.988 3.959-.09 4.958z"/><path fill="#ff96d1" d="m16.942 17.751 1.316-1.806q.178-.248.245-.523l-2.63.858-1.627 2.235a1.5 1.5 0 0 0 .575-.072zm-4.196-5.78.033 1.842 1.742.602-.034-1.843-1.741-.6zm7.257-3.622-1.314-1.812a1.5 1.5 0 0 0-.419-.393l.003 2.767 1.624 2.24q.107-.261.108-.566zm-5.038 2.746-1.762-.537 1.11-1.471 1.762.537zm-2.961-1.41 1.056-1.51-1.056-1.51-1.056 1.51zM9.368 3.509c.145-.122.316-.219.51-.282l2.12-.686 2.13.69c.191.062.36.157.503.276l-2.634.853zm1.433 7.053L9.691 9.09l-1.762.537 1.11 1.47 1.762-.537zm-6.696.584L5.733 8.9l.003-2.763c-.16.1-.305.232-.425.398L4.003 8.339l-.002 2.25q.002.299.104.557m7.149.824-1.741.601-.034 1.843 1.742-.601zM9.75 18.513l-1.628-2.237-2.629-.857q.068.276.247.525l1.313 1.804 2.126.693c.192.062.385.085.571.072"/></svg>

Before

Width:  |  Height:  |  Size: 2.1 KiB

View File

@@ -1,50 +0,0 @@
/* eslint-disable @typescript-eslint/no-var-requires, import/no-dynamic-require, simple-import-sort/imports, simple-import-sort/exports */
const fs = require('fs');
const path = require('path');
// 1. Define paths
const packageJsonPath = path.resolve(__dirname, '../package.json');
const registryPath = path.resolve(
__dirname,
'../src/auto-import-registry.d.ts',
);
// 2. Read package.json
const packageJson = require(packageJsonPath);
// 3. Combine dependencies and devDependencies
const allDeps = {
...packageJson.dependencies,
...packageJson.devDependencies,
};
// 4. Filter for @signozhq packages
const signozPackages = Object.keys(allDeps).filter((dep) =>
dep.startsWith('@signozhq/'),
);
// 5. Generate file content
const fileContent = `// -------------------------------------------------------------------------
// AUTO-GENERATED FILE
// -------------------------------------------------------------------------
// This file is generated by scripts/update-registry.js automatically
// whenever you run 'yarn install' or 'npm install'.
//
// It forces VS Code to index these specific packages to fix auto-import
// performance issues in TypeScript 4.x.
//
// PR for reference: https://github.com/SigNoz/signoz/pull/9694
// -------------------------------------------------------------------------
${signozPackages.map((pkg) => `import '${pkg}';`).join('\n')}
`;
// 6. Write the file
try {
fs.writeFileSync(registryPath, fileContent);
console.log(
`✅ Auto-import registry updated with ${signozPackages.length} @signozhq packages.`,
);
} catch (err) {
console.error('❌ Failed to update auto-import registry:', err);
}

View File

@@ -1,23 +0,0 @@
// -------------------------------------------------------------------------
// AUTO-GENERATED FILE
// -------------------------------------------------------------------------
// This file is generated by scripts/update-registry.js automatically
// whenever you run 'yarn install' or 'npm install'.
//
// It forces VS Code to index these specific packages to fix auto-import
// performance issues in TypeScript 4.x.
//
// PR for reference: https://github.com/SigNoz/signoz/pull/9694
// -------------------------------------------------------------------------
import '@signozhq/badge';
import '@signozhq/button';
import '@signozhq/calendar';
import '@signozhq/callout';
import '@signozhq/design-tokens';
import '@signozhq/input';
import '@signozhq/popover';
import '@signozhq/resizable';
import '@signozhq/sonner';
import '@signozhq/table';
import '@signozhq/tooltip';

View File

@@ -1,6 +1,5 @@
/* eslint-disable sonarjs/no-duplicate-string */
import { PrecisionOptionsEnum } from '../types';
import { getYAxisFormattedValue } from '../yAxisConfig';
import { getYAxisFormattedValue, PrecisionOptionsEnum } from '../yAxisConfig';
const testFullPrecisionGetYAxisFormattedValue = (
value: string,

View File

@@ -78,18 +78,3 @@ export interface ITimeRange {
minTime: number | null;
maxTime: number | null;
}
export const DEFAULT_SIGNIFICANT_DIGITS = 15;
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
export const MAX_DECIMALS = 15;
export enum PrecisionOptionsEnum {
ZERO = 0,
ONE = 1,
TWO = 2,
THREE = 3,
FOUR = 4,
FULL = 'full',
}
export type PrecisionOption = 0 | 1 | 2 | 3 | 4 | PrecisionOptionsEnum.FULL;

View File

@@ -16,12 +16,8 @@ import {
} from './Plugin/IntersectionCursor';
import {
CustomChartOptions,
DEFAULT_SIGNIFICANT_DIGITS,
GraphOnClickHandler,
IAxisTimeConfig,
MAX_DECIMALS,
PrecisionOption,
PrecisionOptionsEnum,
StaticLineProps,
} from './types';
import { getToolTipValue, getYAxisFormattedValue } from './yAxisConfig';
@@ -246,68 +242,3 @@ declare module 'chart.js' {
custom: TooltipPositionerFunction<ChartType>;
}
}
/**
* Formats a number for display, preserving leading zeros after the decimal point
* and showing up to DEFAULT_SIGNIFICANT_DIGITS digits after the first non-zero decimal digit.
* It avoids scientific notation and removes unnecessary trailing zeros.
*
* @example
* formatDecimalWithLeadingZeros(1.2345); // "1.2345"
* formatDecimalWithLeadingZeros(0.0012345); // "0.0012345"
* formatDecimalWithLeadingZeros(5.0); // "5"
*
* @param value The number to format.
* @returns The formatted string.
*/
export const formatDecimalWithLeadingZeros = (
value: number,
precision: PrecisionOption,
): string => {
if (value === 0) {
return '0';
}
// Use toLocaleString to get a full decimal representation without scientific notation.
const numStr = value.toLocaleString('en-US', {
useGrouping: false,
maximumFractionDigits: 20,
});
const [integerPart, decimalPart = ''] = numStr.split('.');
// If there's no decimal part, the integer part is the result.
if (!decimalPart) {
return integerPart;
}
// Find the index of the first non-zero digit in the decimal part.
const firstNonZeroIndex = decimalPart.search(/[^0]/);
// If the decimal part consists only of zeros, return just the integer part.
if (firstNonZeroIndex === -1) {
return integerPart;
}
// Determine the number of decimals to keep: leading zeros + up to N significant digits.
const significantDigits =
precision === PrecisionOptionsEnum.FULL
? DEFAULT_SIGNIFICANT_DIGITS
: precision;
const decimalsToKeep = firstNonZeroIndex + (significantDigits || 0);
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
const finalDecimalsToKeep = Math.min(decimalsToKeep, MAX_DECIMALS);
const trimmedDecimalPart = decimalPart.substring(0, finalDecimalsToKeep);
// If precision is 0, we drop the decimal part entirely.
if (precision === 0) {
return integerPart;
}
// Remove any trailing zeros from the result to keep it clean.
const finalDecimalPart = trimmedDecimalPart.replace(/0+$/, '');
// Return the integer part, or the integer and decimal parts combined.
return finalDecimalPart ? `${integerPart}.${finalDecimalPart}` : integerPart;
};

View File

@@ -1,17 +1,86 @@
/* eslint-disable sonarjs/cognitive-complexity */
import { formattedValueToString, getValueFormat } from '@grafana/data';
import * as Sentry from '@sentry/react';
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
import { isUniversalUnit } from 'components/YAxisUnitSelector/utils';
import { isNaN } from 'lodash-es';
import { formatUniversalUnit } from '../YAxisUnitSelector/formatter';
import {
DEFAULT_SIGNIFICANT_DIGITS,
PrecisionOption,
PrecisionOptionsEnum,
} from './types';
import { formatDecimalWithLeadingZeros } from './utils';
const DEFAULT_SIGNIFICANT_DIGITS = 15;
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
const MAX_DECIMALS = 15;
export enum PrecisionOptionsEnum {
ZERO = 0,
ONE = 1,
TWO = 2,
THREE = 3,
FOUR = 4,
FULL = 'full',
}
export type PrecisionOption = 0 | 1 | 2 | 3 | 4 | PrecisionOptionsEnum.FULL;
/**
* Formats a number for display, preserving leading zeros after the decimal point
* and showing up to DEFAULT_SIGNIFICANT_DIGITS digits after the first non-zero decimal digit.
* It avoids scientific notation and removes unnecessary trailing zeros.
*
* @example
* formatDecimalWithLeadingZeros(1.2345); // "1.2345"
* formatDecimalWithLeadingZeros(0.0012345); // "0.0012345"
* formatDecimalWithLeadingZeros(5.0); // "5"
*
* @param value The number to format.
* @returns The formatted string.
*/
const formatDecimalWithLeadingZeros = (
value: number,
precision: PrecisionOption,
): string => {
if (value === 0) {
return '0';
}
// Use toLocaleString to get a full decimal representation without scientific notation.
const numStr = value.toLocaleString('en-US', {
useGrouping: false,
maximumFractionDigits: 20,
});
const [integerPart, decimalPart = ''] = numStr.split('.');
// If there's no decimal part, the integer part is the result.
if (!decimalPart) {
return integerPart;
}
// Find the index of the first non-zero digit in the decimal part.
const firstNonZeroIndex = decimalPart.search(/[^0]/);
// If the decimal part consists only of zeros, return just the integer part.
if (firstNonZeroIndex === -1) {
return integerPart;
}
// Determine the number of decimals to keep: leading zeros + up to N significant digits.
const significantDigits =
precision === PrecisionOptionsEnum.FULL
? DEFAULT_SIGNIFICANT_DIGITS
: precision;
const decimalsToKeep = firstNonZeroIndex + (significantDigits || 0);
// max decimals to keep should not exceed 15 decimal places to avoid floating point precision issues
const finalDecimalsToKeep = Math.min(decimalsToKeep, MAX_DECIMALS);
const trimmedDecimalPart = decimalPart.substring(0, finalDecimalsToKeep);
// If precision is 0, we drop the decimal part entirely.
if (precision === 0) {
return integerPart;
}
// Remove any trailing zeros from the result to keep it clean.
const finalDecimalPart = trimmedDecimalPart.replace(/0+$/, '');
// Return the integer part, or the integer and decimal parts combined.
return finalDecimalPart ? `${integerPart}.${finalDecimalPart}` : integerPart;
};
/**
* Formats a Y-axis value based on a given format string.
@@ -57,17 +126,6 @@ export const getYAxisFormattedValue = (
return formatDecimalWithLeadingZeros(numValue, precision);
}
// Separate logic for universal units// Separate logic for universal units
if (format && isUniversalUnit(format)) {
const decimals = computeDecimals();
return formatUniversalUnit(
numValue,
format as UniversalYAxisUnit,
precision,
decimals,
);
}
const formatter = getValueFormat(format);
const formattedValue = formatter(numValue, computeDecimals(), undefined);
if (formattedValue.text && formattedValue.text.includes('.')) {
@@ -76,7 +134,6 @@ export const getYAxisFormattedValue = (
precision,
);
}
return formattedValueToString(formattedValue);
} catch (error) {
Sentry.captureEvent({

View File

@@ -3,9 +3,9 @@ import './styles.scss';
import { Select } from 'antd';
import { DefaultOptionType } from 'antd/es/select';
import { UniversalYAxisUnitMappings } from './constants';
import { UniversalYAxisUnitMappings, Y_AXIS_CATEGORIES } from './constants';
import { UniversalYAxisUnit, YAxisUnitSelectorProps } from './types';
import { getYAxisCategories, mapMetricUnitToUniversalUnit } from './utils';
import { mapMetricUnitToUniversalUnit } from './utils';
function YAxisUnitSelector({
value,
@@ -13,7 +13,6 @@ function YAxisUnitSelector({
placeholder = 'Please select a unit',
loading = false,
'data-testid': dataTestId,
source,
}: YAxisUnitSelectorProps): JSX.Element {
const universalUnit = mapMetricUnitToUniversalUnit(value);
@@ -38,8 +37,6 @@ function YAxisUnitSelector({
return aliases.some((alias) => alias.toLowerCase().includes(search));
};
const categories = getYAxisCategories(source);
return (
<div className="y-axis-unit-selector-component">
<Select
@@ -51,7 +48,7 @@ function YAxisUnitSelector({
loading={loading}
data-testid={dataTestId}
>
{categories.map((category) => (
{Y_AXIS_CATEGORIES.map((category) => (
<Select.OptGroup key={category.name} label={category.name}>
{category.units.map((unit) => (
<Select.Option key={unit.id} value={unit.id}>

View File

@@ -1,6 +1,5 @@
import { fireEvent, render, screen } from '@testing-library/react';
import { YAxisSource } from '../types';
import YAxisUnitSelector from '../YAxisUnitSelector';
describe('YAxisUnitSelector', () => {
@@ -11,13 +10,7 @@ describe('YAxisUnitSelector', () => {
});
it('renders with default placeholder', () => {
render(
<YAxisUnitSelector
value=""
onChange={mockOnChange}
source={YAxisSource.ALERTS}
/>,
);
render(<YAxisUnitSelector value="" onChange={mockOnChange} />);
expect(screen.getByText('Please select a unit')).toBeInTheDocument();
});
@@ -27,20 +20,13 @@ describe('YAxisUnitSelector', () => {
value=""
onChange={mockOnChange}
placeholder="Custom placeholder"
source={YAxisSource.ALERTS}
/>,
);
expect(screen.queryByText('Custom placeholder')).toBeInTheDocument();
});
it('calls onChange when a value is selected', () => {
render(
<YAxisUnitSelector
value=""
onChange={mockOnChange}
source={YAxisSource.ALERTS}
/>,
);
render(<YAxisUnitSelector value="" onChange={mockOnChange} />);
const select = screen.getByRole('combobox');
fireEvent.mouseDown(select);
@@ -55,30 +41,18 @@ describe('YAxisUnitSelector', () => {
});
it('filters options based on search input', () => {
render(
<YAxisUnitSelector
value=""
onChange={mockOnChange}
source={YAxisSource.ALERTS}
/>,
);
render(<YAxisUnitSelector value="" onChange={mockOnChange} />);
const select = screen.getByRole('combobox');
fireEvent.mouseDown(select);
const input = screen.getByRole('combobox');
fireEvent.change(input, { target: { value: 'bytes/sec' } });
fireEvent.change(input, { target: { value: 'byte' } });
expect(screen.getByText('Bytes/sec')).toBeInTheDocument();
});
it('shows all categories and their units', () => {
render(
<YAxisUnitSelector
value=""
onChange={mockOnChange}
source={YAxisSource.ALERTS}
/>,
);
render(<YAxisUnitSelector value="" onChange={mockOnChange} />);
const select = screen.getByRole('combobox');
fireEvent.mouseDown(select);

View File

@@ -1,951 +0,0 @@
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
import {
AdditionalLabelsMappingForGrafanaUnits,
UniversalUnitToGrafanaUnit,
} from '../constants';
import { formatUniversalUnit } from '../formatter';
describe('formatUniversalUnit', () => {
describe('Time', () => {
test.each([
// Days
[31, UniversalYAxisUnit.DAYS, '4.43 weeks'],
[7, UniversalYAxisUnit.DAYS, '1 week'],
[6, UniversalYAxisUnit.DAYS, '6 days'],
[1, UniversalYAxisUnit.DAYS, '1 day'],
// Hours
[25, UniversalYAxisUnit.HOURS, '1.04 days'],
[23, UniversalYAxisUnit.HOURS, '23 hour'],
[1, UniversalYAxisUnit.HOURS, '1 hour'],
// Minutes
[61, UniversalYAxisUnit.MINUTES, '1.02 hours'],
[60, UniversalYAxisUnit.MINUTES, '1 hour'],
[45, UniversalYAxisUnit.MINUTES, '45 min'],
[1, UniversalYAxisUnit.MINUTES, '1 min'],
// Seconds
[100000, UniversalYAxisUnit.SECONDS, '1.16 days'],
[10065, UniversalYAxisUnit.SECONDS, '2.8 hours'],
[61, UniversalYAxisUnit.SECONDS, '1.02 mins'],
[60, UniversalYAxisUnit.SECONDS, '1 min'],
[12, UniversalYAxisUnit.SECONDS, '12 s'],
[1, UniversalYAxisUnit.SECONDS, '1 s'],
// Milliseconds
[1006, UniversalYAxisUnit.MILLISECONDS, '1.01 s'],
[10000000, UniversalYAxisUnit.MILLISECONDS, '2.78 hours'],
[100006, UniversalYAxisUnit.MICROSECONDS, '100 ms'],
[1, UniversalYAxisUnit.MICROSECONDS, '1 µs'],
[12, UniversalYAxisUnit.MICROSECONDS, '12 µs'],
// Nanoseconds
[10000000000, UniversalYAxisUnit.NANOSECONDS, '10 s'],
[10000006, UniversalYAxisUnit.NANOSECONDS, '10 ms'],
[1006, UniversalYAxisUnit.NANOSECONDS, '1.01 µs'],
[1, UniversalYAxisUnit.NANOSECONDS, '1 ns'],
[12, UniversalYAxisUnit.NANOSECONDS, '12 ns'],
])('formats time value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Data', () => {
test.each([
// Bytes
[864, UniversalYAxisUnit.BYTES, '864 B'],
[1000, UniversalYAxisUnit.BYTES, '1 kB'],
[1020, UniversalYAxisUnit.BYTES, '1.02 kB'],
// Kilobytes
[512, UniversalYAxisUnit.KILOBYTES, '512 kB'],
[1000, UniversalYAxisUnit.KILOBYTES, '1 MB'],
[1023, UniversalYAxisUnit.KILOBYTES, '1.02 MB'],
// Megabytes
[777, UniversalYAxisUnit.MEGABYTES, '777 MB'],
[1000, UniversalYAxisUnit.MEGABYTES, '1 GB'],
[1023, UniversalYAxisUnit.MEGABYTES, '1.02 GB'],
// Gigabytes
[432, UniversalYAxisUnit.GIGABYTES, '432 GB'],
[1000, UniversalYAxisUnit.GIGABYTES, '1 TB'],
[1023, UniversalYAxisUnit.GIGABYTES, '1.02 TB'],
// Terabytes
[678, UniversalYAxisUnit.TERABYTES, '678 TB'],
[1000, UniversalYAxisUnit.TERABYTES, '1 PB'],
[1023, UniversalYAxisUnit.TERABYTES, '1.02 PB'],
// Petabytes
[845, UniversalYAxisUnit.PETABYTES, '845 PB'],
[1000, UniversalYAxisUnit.PETABYTES, '1 EB'],
[1023, UniversalYAxisUnit.PETABYTES, '1.02 EB'],
// Exabytes
[921, UniversalYAxisUnit.EXABYTES, '921 EB'],
[1000, UniversalYAxisUnit.EXABYTES, '1 ZB'],
[1023, UniversalYAxisUnit.EXABYTES, '1.02 ZB'],
// Zettabytes
[921, UniversalYAxisUnit.ZETTABYTES, '921 ZB'],
[1000, UniversalYAxisUnit.ZETTABYTES, '1 YB'],
[1023, UniversalYAxisUnit.ZETTABYTES, '1.02 YB'],
// Yottabytes
[921, UniversalYAxisUnit.YOTTABYTES, '921 YB'],
[1000, UniversalYAxisUnit.YOTTABYTES, '1000 YB'],
[1023, UniversalYAxisUnit.YOTTABYTES, '1023 YB'],
])('formats data value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Data rate', () => {
test.each([
// Bytes/second
[864, UniversalYAxisUnit.BYTES_SECOND, '864 B/s'],
[1000, UniversalYAxisUnit.BYTES_SECOND, '1 kB/s'],
[1020, UniversalYAxisUnit.BYTES_SECOND, '1.02 kB/s'],
// Kilobytes/second
[512, UniversalYAxisUnit.KILOBYTES_SECOND, '512 kB/s'],
[1000, UniversalYAxisUnit.KILOBYTES_SECOND, '1 MB/s'],
[1023, UniversalYAxisUnit.KILOBYTES_SECOND, '1.02 MB/s'],
// Megabytes/second
[777, UniversalYAxisUnit.MEGABYTES_SECOND, '777 MB/s'],
[1000, UniversalYAxisUnit.MEGABYTES_SECOND, '1 GB/s'],
[1023, UniversalYAxisUnit.MEGABYTES_SECOND, '1.02 GB/s'],
// Gigabytes/second
[432, UniversalYAxisUnit.GIGABYTES_SECOND, '432 GB/s'],
[1000, UniversalYAxisUnit.GIGABYTES_SECOND, '1 TB/s'],
[1023, UniversalYAxisUnit.GIGABYTES_SECOND, '1.02 TB/s'],
// Terabytes/second
[678, UniversalYAxisUnit.TERABYTES_SECOND, '678 TB/s'],
[1000, UniversalYAxisUnit.TERABYTES_SECOND, '1 PB/s'],
[1023, UniversalYAxisUnit.TERABYTES_SECOND, '1.02 PB/s'],
// Petabytes/second
[845, UniversalYAxisUnit.PETABYTES_SECOND, '845 PB/s'],
[1000, UniversalYAxisUnit.PETABYTES_SECOND, '1 EB/s'],
[1023, UniversalYAxisUnit.PETABYTES_SECOND, '1.02 EB/s'],
// Exabytes/second
[921, UniversalYAxisUnit.EXABYTES_SECOND, '921 EB/s'],
[1000, UniversalYAxisUnit.EXABYTES_SECOND, '1 ZB/s'],
[1023, UniversalYAxisUnit.EXABYTES_SECOND, '1.02 ZB/s'],
// Zettabytes/second
[921, UniversalYAxisUnit.ZETTABYTES_SECOND, '921 ZB/s'],
[1000, UniversalYAxisUnit.ZETTABYTES_SECOND, '1 YB/s'],
[1023, UniversalYAxisUnit.ZETTABYTES_SECOND, '1.02 YB/s'],
// Yottabytes/second
[921, UniversalYAxisUnit.YOTTABYTES_SECOND, '921 YB/s'],
[1000, UniversalYAxisUnit.YOTTABYTES_SECOND, '1000 YB/s'],
[1023, UniversalYAxisUnit.YOTTABYTES_SECOND, '1023 YB/s'],
])('formats data value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Bit', () => {
test.each([
// Bits
[1, UniversalYAxisUnit.BITS, '1 b'],
[250, UniversalYAxisUnit.BITS, '250 b'],
[1000, UniversalYAxisUnit.BITS, '1 kb'],
[1023, UniversalYAxisUnit.BITS, '1.02 kb'],
// Kilobits
[0.5, UniversalYAxisUnit.KILOBITS, '500 b'],
[375, UniversalYAxisUnit.KILOBITS, '375 kb'],
[1000, UniversalYAxisUnit.KILOBITS, '1 Mb'],
[1023, UniversalYAxisUnit.KILOBITS, '1.02 Mb'],
// Megabits
[0.5, UniversalYAxisUnit.MEGABITS, '500 kb'],
[640, UniversalYAxisUnit.MEGABITS, '640 Mb'],
[1000, UniversalYAxisUnit.MEGABITS, '1 Gb'],
[1023, UniversalYAxisUnit.MEGABITS, '1.02 Gb'],
// Gigabits
[0.5, UniversalYAxisUnit.GIGABITS, '500 Mb'],
[875, UniversalYAxisUnit.GIGABITS, '875 Gb'],
[1000, UniversalYAxisUnit.GIGABITS, '1 Tb'],
[1023, UniversalYAxisUnit.GIGABITS, '1.02 Tb'],
// Terabits
[0.5, UniversalYAxisUnit.TERABITS, '500 Gb'],
[430, UniversalYAxisUnit.TERABITS, '430 Tb'],
[1000, UniversalYAxisUnit.TERABITS, '1 Pb'],
[1023, UniversalYAxisUnit.TERABITS, '1.02 Pb'],
// Petabits
[0.5, UniversalYAxisUnit.PETABITS, '500 Tb'],
[590, UniversalYAxisUnit.PETABITS, '590 Pb'],
[1000, UniversalYAxisUnit.PETABITS, '1 Eb'],
[1023, UniversalYAxisUnit.PETABITS, '1.02 Eb'],
// Exabits
[0.5, UniversalYAxisUnit.EXABITS, '500 Pb'],
[715, UniversalYAxisUnit.EXABITS, '715 Eb'],
[1000, UniversalYAxisUnit.EXABITS, '1 Zb'],
[1023, UniversalYAxisUnit.EXABITS, '1.02 Zb'],
// Zettabits
[0.5, UniversalYAxisUnit.ZETTABITS, '500 Eb'],
[840, UniversalYAxisUnit.ZETTABITS, '840 Zb'],
[1000, UniversalYAxisUnit.ZETTABITS, '1 Yb'],
[1023, UniversalYAxisUnit.ZETTABITS, '1.02 Yb'],
// Yottabits
[0.5, UniversalYAxisUnit.YOTTABITS, '500 Zb'],
[965, UniversalYAxisUnit.YOTTABITS, '965 Yb'],
[1000, UniversalYAxisUnit.YOTTABITS, '1000 Yb'],
[1023, UniversalYAxisUnit.YOTTABITS, '1023 Yb'],
])('formats bit value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Bit rate', () => {
test.each([
// Bits/second
[512, UniversalYAxisUnit.BITS_SECOND, '512 b/s'],
[1000, UniversalYAxisUnit.BITS_SECOND, '1 kb/s'],
[1023, UniversalYAxisUnit.BITS_SECOND, '1.02 kb/s'],
// Kilobits/second
[0.5, UniversalYAxisUnit.KILOBITS_SECOND, '500 b/s'],
[512, UniversalYAxisUnit.KILOBITS_SECOND, '512 kb/s'],
[1000, UniversalYAxisUnit.KILOBITS_SECOND, '1 Mb/s'],
[1023, UniversalYAxisUnit.KILOBITS_SECOND, '1.02 Mb/s'],
// Megabits/second
[0.5, UniversalYAxisUnit.MEGABITS_SECOND, '500 kb/s'],
[512, UniversalYAxisUnit.MEGABITS_SECOND, '512 Mb/s'],
[1000, UniversalYAxisUnit.MEGABITS_SECOND, '1 Gb/s'],
[1023, UniversalYAxisUnit.MEGABITS_SECOND, '1.02 Gb/s'],
// Gigabits/second
[0.5, UniversalYAxisUnit.GIGABITS_SECOND, '500 Mb/s'],
[512, UniversalYAxisUnit.GIGABITS_SECOND, '512 Gb/s'],
[1000, UniversalYAxisUnit.GIGABITS_SECOND, '1 Tb/s'],
[1023, UniversalYAxisUnit.GIGABITS_SECOND, '1.02 Tb/s'],
// Terabits/second
[0.5, UniversalYAxisUnit.TERABITS_SECOND, '500 Gb/s'],
[512, UniversalYAxisUnit.TERABITS_SECOND, '512 Tb/s'],
[1000, UniversalYAxisUnit.TERABITS_SECOND, '1 Pb/s'],
[1023, UniversalYAxisUnit.TERABITS_SECOND, '1.02 Pb/s'],
// Petabits/second
[0.5, UniversalYAxisUnit.PETABITS_SECOND, '500 Tb/s'],
[512, UniversalYAxisUnit.PETABITS_SECOND, '512 Pb/s'],
[1000, UniversalYAxisUnit.PETABITS_SECOND, '1 Eb/s'],
[1023, UniversalYAxisUnit.PETABITS_SECOND, '1.02 Eb/s'],
// Exabits/second
[512, UniversalYAxisUnit.EXABITS_SECOND, '512 Eb/s'],
[1000, UniversalYAxisUnit.EXABITS_SECOND, '1 Zb/s'],
[1023, UniversalYAxisUnit.EXABITS_SECOND, '1.02 Zb/s'],
// Zettabits/second
[0.5, UniversalYAxisUnit.ZETTABITS_SECOND, '500 Eb/s'],
[512, UniversalYAxisUnit.ZETTABITS_SECOND, '512 Zb/s'],
[1000, UniversalYAxisUnit.ZETTABITS_SECOND, '1 Yb/s'],
[1023, UniversalYAxisUnit.ZETTABITS_SECOND, '1.02 Yb/s'],
// Yottabits/second
[0.5, UniversalYAxisUnit.YOTTABITS_SECOND, '500 Zb/s'],
[512, UniversalYAxisUnit.YOTTABITS_SECOND, '512 Yb/s'],
[1000, UniversalYAxisUnit.YOTTABITS_SECOND, '1000 Yb/s'],
[1023, UniversalYAxisUnit.YOTTABITS_SECOND, '1023 Yb/s'],
])('formats bit rate value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Count', () => {
test.each([
[100, UniversalYAxisUnit.COUNT, '100'],
[875, UniversalYAxisUnit.COUNT, '875'],
[1000, UniversalYAxisUnit.COUNT, '1 K'],
[2500, UniversalYAxisUnit.COUNT, '2.5 K'],
[10000, UniversalYAxisUnit.COUNT, '10 K'],
[25000, UniversalYAxisUnit.COUNT, '25 K'],
[100000, UniversalYAxisUnit.COUNT, '100 K'],
[1000000, UniversalYAxisUnit.COUNT, '1 Mil'],
[10000000, UniversalYAxisUnit.COUNT, '10 Mil'],
[100000000, UniversalYAxisUnit.COUNT, '100 Mil'],
[1000000000, UniversalYAxisUnit.COUNT, '1 Bil'],
[10000000000, UniversalYAxisUnit.COUNT, '10 Bil'],
[100000000000, UniversalYAxisUnit.COUNT, '100 Bil'],
[1000000000000, UniversalYAxisUnit.COUNT, '1 Tri'],
[10000000000000, UniversalYAxisUnit.COUNT, '10 Tri'],
])('formats count value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
test.each([
[100, UniversalYAxisUnit.COUNT_SECOND, '100 c/s'],
[875, UniversalYAxisUnit.COUNT_SECOND, '875 c/s'],
[1000, UniversalYAxisUnit.COUNT_SECOND, '1K c/s'],
[2500, UniversalYAxisUnit.COUNT_SECOND, '2.5K c/s'],
[10000, UniversalYAxisUnit.COUNT_SECOND, '10K c/s'],
[25000, UniversalYAxisUnit.COUNT_SECOND, '25K c/s'],
])('formats count per time value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
test.each([
[100, UniversalYAxisUnit.COUNT_MINUTE, '100 c/m'],
[875, UniversalYAxisUnit.COUNT_MINUTE, '875 c/m'],
[1000, UniversalYAxisUnit.COUNT_MINUTE, '1K c/m'],
[2500, UniversalYAxisUnit.COUNT_MINUTE, '2.5K c/m'],
[10000, UniversalYAxisUnit.COUNT_MINUTE, '10K c/m'],
[25000, UniversalYAxisUnit.COUNT_MINUTE, '25K c/m'],
])('formats count per time value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Operations units', () => {
test.each([
[780, UniversalYAxisUnit.OPS_SECOND, '780 ops/s'],
[1000, UniversalYAxisUnit.OPS_SECOND, '1K ops/s'],
[520, UniversalYAxisUnit.OPS_MINUTE, '520 ops/m'],
[1000, UniversalYAxisUnit.OPS_MINUTE, '1K ops/m'],
[2500, UniversalYAxisUnit.OPS_MINUTE, '2.5K ops/m'],
[10000, UniversalYAxisUnit.OPS_MINUTE, '10K ops/m'],
[25000, UniversalYAxisUnit.OPS_MINUTE, '25K ops/m'],
])(
'formats operations per time value %s %s as %s',
(value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
},
);
});
describe('Request units', () => {
test.each([
[615, UniversalYAxisUnit.REQUESTS_SECOND, '615 req/s'],
[1000, UniversalYAxisUnit.REQUESTS_SECOND, '1K req/s'],
[480, UniversalYAxisUnit.REQUESTS_MINUTE, '480 req/m'],
[1000, UniversalYAxisUnit.REQUESTS_MINUTE, '1K req/m'],
[2500, UniversalYAxisUnit.REQUESTS_MINUTE, '2.5K req/m'],
[10000, UniversalYAxisUnit.REQUESTS_MINUTE, '10K req/m'],
[25000, UniversalYAxisUnit.REQUESTS_MINUTE, '25K req/m'],
])('formats requests per time value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Read/Write units', () => {
test.each([
[505, UniversalYAxisUnit.READS_SECOND, '505 rd/s'],
[1000, UniversalYAxisUnit.READS_SECOND, '1K rd/s'],
[610, UniversalYAxisUnit.WRITES_SECOND, '610 wr/s'],
[1000, UniversalYAxisUnit.WRITES_SECOND, '1K wr/s'],
[715, UniversalYAxisUnit.READS_MINUTE, '715 rd/m'],
[1000, UniversalYAxisUnit.READS_MINUTE, '1K rd/m'],
[2500, UniversalYAxisUnit.READS_MINUTE, '2.5K rd/m'],
[10000, UniversalYAxisUnit.READS_MINUTE, '10K rd/m'],
[25000, UniversalYAxisUnit.READS_MINUTE, '25K rd/m'],
[830, UniversalYAxisUnit.WRITES_MINUTE, '830 wr/m'],
[1000, UniversalYAxisUnit.WRITES_MINUTE, '1K wr/m'],
[2500, UniversalYAxisUnit.WRITES_MINUTE, '2.5K wr/m'],
[10000, UniversalYAxisUnit.WRITES_MINUTE, '10K wr/m'],
[25000, UniversalYAxisUnit.WRITES_MINUTE, '25K wr/m'],
])(
'formats reads and writes per time value %s %s as %s',
(value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
},
);
});
describe('IO Operations units', () => {
test.each([
[777, UniversalYAxisUnit.IOOPS_SECOND, '777 io/s'],
[1000, UniversalYAxisUnit.IOOPS_SECOND, '1K io/s'],
[2500, UniversalYAxisUnit.IOOPS_SECOND, '2.5K io/s'],
[10000, UniversalYAxisUnit.IOOPS_SECOND, '10K io/s'],
[25000, UniversalYAxisUnit.IOOPS_SECOND, '25K io/s'],
])('formats IOPS value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Percent units', () => {
it('formats percent as-is', () => {
expect(formatUniversalUnit(456, UniversalYAxisUnit.PERCENT)).toBe('456%');
});
it('multiplies percent_unit by 100', () => {
expect(formatUniversalUnit(9, UniversalYAxisUnit.PERCENT_UNIT)).toBe('900%');
});
});
describe('None unit', () => {
it('formats as plain number', () => {
expect(formatUniversalUnit(742, UniversalYAxisUnit.NONE)).toBe('742');
});
});
describe('Time (additional)', () => {
test.each([
[900, UniversalYAxisUnit.DURATION_MS, '900 milliseconds'],
[1000, UniversalYAxisUnit.DURATION_MS, '1 second'],
[1, UniversalYAxisUnit.DURATION_MS, '1 millisecond'],
[900, UniversalYAxisUnit.DURATION_S, '15 minutes'],
[1, UniversalYAxisUnit.DURATION_HMS, '00:00:01'],
[90005, UniversalYAxisUnit.DURATION_HMS, '25:00:05'],
[90005, UniversalYAxisUnit.DURATION_DHMS, '1 d 01:00:05'],
[900, UniversalYAxisUnit.TIMETICKS, '9 s'],
[1, UniversalYAxisUnit.TIMETICKS, '10 ms'],
[900, UniversalYAxisUnit.CLOCK_MS, '900ms'],
[1, UniversalYAxisUnit.CLOCK_MS, '001ms'],
[1, UniversalYAxisUnit.CLOCK_S, '01s:000ms'],
[900, UniversalYAxisUnit.CLOCK_S, '15m:00s:000ms'],
[900, UniversalYAxisUnit.TIME_HERTZ, '900 Hz'],
[1000, UniversalYAxisUnit.TIME_HERTZ, '1 kHz'],
[1000000, UniversalYAxisUnit.TIME_HERTZ, '1 MHz'],
[1000000000, UniversalYAxisUnit.TIME_HERTZ, '1 GHz'],
[1008, UniversalYAxisUnit.TIME_HERTZ, '1.01 kHz'],
])('formats duration value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Data (IEC/Binary)', () => {
test.each([
// Bytes
[900, UniversalYAxisUnit.BYTES_IEC, '900 B'],
[1024, UniversalYAxisUnit.BYTES_IEC, '1 KiB'],
[1080, UniversalYAxisUnit.BYTES_IEC, '1.05 KiB'],
// Kibibytes
[900, UniversalYAxisUnit.KIBIBYTES, '900 KiB'],
[1024, UniversalYAxisUnit.KIBIBYTES, '1 MiB'],
[1080, UniversalYAxisUnit.KIBIBYTES, '1.05 MiB'],
// Mebibytes
[900, UniversalYAxisUnit.MEBIBYTES, '900 MiB'],
[1024, UniversalYAxisUnit.MEBIBYTES, '1 GiB'],
[1080, UniversalYAxisUnit.MEBIBYTES, '1.05 GiB'],
// Gibibytes
[900, UniversalYAxisUnit.GIBIBYTES, '900 GiB'],
[1024, UniversalYAxisUnit.GIBIBYTES, '1 TiB'],
[1080, UniversalYAxisUnit.GIBIBYTES, '1.05 TiB'],
// Tebibytes
[900, UniversalYAxisUnit.TEBIBYTES, '900 TiB'],
[1024, UniversalYAxisUnit.TEBIBYTES, '1 PiB'],
[1080, UniversalYAxisUnit.TEBIBYTES, '1.05 PiB'],
// Pebibytes
[900, UniversalYAxisUnit.PEBIBYTES, '900 PiB'],
[1024, UniversalYAxisUnit.PEBIBYTES, '1 EiB'],
[1080, UniversalYAxisUnit.PEBIBYTES, '1.05 EiB'],
// Exbibytes
[900, UniversalYAxisUnit.EXBIBYTES, '900 EiB'],
[1024, UniversalYAxisUnit.EXBIBYTES, '1 ZiB'],
[1080, UniversalYAxisUnit.EXBIBYTES, '1.05 ZiB'],
// Zebibytes
[900, UniversalYAxisUnit.ZEBIBYTES, '900 ZiB'],
[1024, UniversalYAxisUnit.ZEBIBYTES, '1 YiB'],
[1080, UniversalYAxisUnit.ZEBIBYTES, '1.05 YiB'],
// Yobibytes
[900, UniversalYAxisUnit.YOBIBYTES, '900 YiB'],
[1024, UniversalYAxisUnit.YOBIBYTES, '1024 YiB'],
])('formats IEC bytes value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Data Rate (IEC/Binary)', () => {
test.each([
// Kibibytes/second
[900, UniversalYAxisUnit.KIBIBYTES_SECOND, '900 KiB/s'],
[1024, UniversalYAxisUnit.KIBIBYTES_SECOND, '1 MiB/s'],
[1080, UniversalYAxisUnit.KIBIBYTES_SECOND, '1.05 MiB/s'],
// Mebibytes/second
[900, UniversalYAxisUnit.MEBIBYTES_SECOND, '900 MiB/s'],
[1024, UniversalYAxisUnit.MEBIBYTES_SECOND, '1 GiB/s'],
[1080, UniversalYAxisUnit.MEBIBYTES_SECOND, '1.05 GiB/s'],
// Gibibytes/second
[900, UniversalYAxisUnit.GIBIBYTES_SECOND, '900 GiB/s'],
[1024, UniversalYAxisUnit.GIBIBYTES_SECOND, '1 TiB/s'],
[1080, UniversalYAxisUnit.GIBIBYTES_SECOND, '1.05 TiB/s'],
// Tebibytes/second
[900, UniversalYAxisUnit.TEBIBYTES_SECOND, '900 TiB/s'],
[1024, UniversalYAxisUnit.TEBIBYTES_SECOND, '1 PiB/s'],
[1080, UniversalYAxisUnit.TEBIBYTES_SECOND, '1.05 PiB/s'],
// Pebibytes/second
[900, UniversalYAxisUnit.PEBIBYTES_SECOND, '900 PiB/s'],
[1024, UniversalYAxisUnit.PEBIBYTES_SECOND, '1 EiB/s'],
[1080, UniversalYAxisUnit.PEBIBYTES_SECOND, '1.05 EiB/s'],
// Exbibytes/second
[900, UniversalYAxisUnit.EXBIBYTES_SECOND, '900 EiB/s'],
[1024, UniversalYAxisUnit.EXBIBYTES_SECOND, '1 ZiB/s'],
[1080, UniversalYAxisUnit.EXBIBYTES_SECOND, '1.05 ZiB/s'],
// Zebibytes/second
[900, UniversalYAxisUnit.ZEBIBYTES_SECOND, '900 ZiB/s'],
[1024, UniversalYAxisUnit.ZEBIBYTES_SECOND, '1 YiB/s'],
[1080, UniversalYAxisUnit.ZEBIBYTES_SECOND, '1.05 YiB/s'],
// Yobibytes/second
[900, UniversalYAxisUnit.YOBIBYTES_SECOND, '900 YiB/s'],
[1024, UniversalYAxisUnit.YOBIBYTES_SECOND, '1024 YiB/s'],
[1080, UniversalYAxisUnit.YOBIBYTES_SECOND, '1080 YiB/s'],
// Packets/second
[900, UniversalYAxisUnit.DATA_RATE_PACKETS_PER_SECOND, '900 p/s'],
[1000, UniversalYAxisUnit.DATA_RATE_PACKETS_PER_SECOND, '1 kp/s'],
[1080, UniversalYAxisUnit.DATA_RATE_PACKETS_PER_SECOND, '1.08 kp/s'],
])('formats IEC byte rates value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Bits (IEC)', () => {
test.each([
[900, UniversalYAxisUnit.BITS_IEC, '900 b'],
[1024, UniversalYAxisUnit.BITS_IEC, '1 Kib'],
[1080, UniversalYAxisUnit.BITS_IEC, '1.05 Kib'],
])('formats IEC bits value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Hash Rate', () => {
test.each([
// Hashes/second
[412, UniversalYAxisUnit.HASH_RATE_HASHES_PER_SECOND, '412 H/s'],
[1000, UniversalYAxisUnit.HASH_RATE_HASHES_PER_SECOND, '1 kH/s'],
[1023, UniversalYAxisUnit.HASH_RATE_HASHES_PER_SECOND, '1.02 kH/s'],
// Kilohashes/second
[412, UniversalYAxisUnit.HASH_RATE_KILOHASHES_PER_SECOND, '412 kH/s'],
[1000, UniversalYAxisUnit.HASH_RATE_KILOHASHES_PER_SECOND, '1 MH/s'],
[1023, UniversalYAxisUnit.HASH_RATE_KILOHASHES_PER_SECOND, '1.02 MH/s'],
// Megahashes/second
[412, UniversalYAxisUnit.HASH_RATE_MEGAHASHES_PER_SECOND, '412 MH/s'],
[1000, UniversalYAxisUnit.HASH_RATE_MEGAHASHES_PER_SECOND, '1 GH/s'],
[1023, UniversalYAxisUnit.HASH_RATE_MEGAHASHES_PER_SECOND, '1.02 GH/s'],
// Gigahashes/second
[412, UniversalYAxisUnit.HASH_RATE_GIGAHASHES_PER_SECOND, '412 GH/s'],
[1000, UniversalYAxisUnit.HASH_RATE_GIGAHASHES_PER_SECOND, '1 TH/s'],
[1023, UniversalYAxisUnit.HASH_RATE_GIGAHASHES_PER_SECOND, '1.02 TH/s'],
// Terahashes/second
[412, UniversalYAxisUnit.HASH_RATE_TERAHASHES_PER_SECOND, '412 TH/s'],
[1000, UniversalYAxisUnit.HASH_RATE_TERAHASHES_PER_SECOND, '1 PH/s'],
[1023, UniversalYAxisUnit.HASH_RATE_TERAHASHES_PER_SECOND, '1.02 PH/s'],
// Petahashes/second
[412, UniversalYAxisUnit.HASH_RATE_PETAHASHES_PER_SECOND, '412 PH/s'],
[1000, UniversalYAxisUnit.HASH_RATE_PETAHASHES_PER_SECOND, '1 EH/s'],
[1023, UniversalYAxisUnit.HASH_RATE_PETAHASHES_PER_SECOND, '1.02 EH/s'],
// Exahashes/second
[412, UniversalYAxisUnit.HASH_RATE_EXAHASHES_PER_SECOND, '412 EH/s'],
[1000, UniversalYAxisUnit.HASH_RATE_EXAHASHES_PER_SECOND, '1 ZH/s'],
[1023, UniversalYAxisUnit.HASH_RATE_EXAHASHES_PER_SECOND, '1.02 ZH/s'],
])('formats hash rate value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Miscellaneous', () => {
test.each([
[742, UniversalYAxisUnit.MISC_STRING, '742'],
[688, UniversalYAxisUnit.MISC_SHORT, '688'],
[555, UniversalYAxisUnit.MISC_HUMIDITY, '555 %H'],
[812, UniversalYAxisUnit.MISC_DECIBEL, '812 dB'],
[1024, UniversalYAxisUnit.MISC_HEXADECIMAL, '400'],
[1024, UniversalYAxisUnit.MISC_HEXADECIMAL_0X, '0x400'],
[900, UniversalYAxisUnit.MISC_SCIENTIFIC_NOTATION, '9e+2'],
[678, UniversalYAxisUnit.MISC_LOCALE_FORMAT, '678'],
[444, UniversalYAxisUnit.MISC_PIXELS, '444 px'],
])('formats miscellaneous value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Acceleration', () => {
test.each([
[
875,
UniversalYAxisUnit.ACCELERATION_METERS_PER_SECOND_SQUARED,
'875 m/sec²',
],
[640, UniversalYAxisUnit.ACCELERATION_FEET_PER_SECOND_SQUARED, '640 f/sec²'],
[512, UniversalYAxisUnit.ACCELERATION_G_UNIT, '512 g'],
[
2500,
UniversalYAxisUnit.ACCELERATION_METERS_PER_SECOND_SQUARED,
'2500 m/sec²',
],
])('formats acceleration value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Angular', () => {
test.each([
[415, UniversalYAxisUnit.ANGULAR_DEGREE, '415 °'],
[732, UniversalYAxisUnit.ANGULAR_RADIAN, '732 rad'],
[128, UniversalYAxisUnit.ANGULAR_GRADIAN, '128 grad'],
[560, UniversalYAxisUnit.ANGULAR_ARC_MINUTE, '560 arcmin'],
[945, UniversalYAxisUnit.ANGULAR_ARC_SECOND, '945 arcsec'],
])('formats angular value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Area', () => {
test.each([
[210, UniversalYAxisUnit.AREA_SQUARE_METERS, '210 m²'],
[152, UniversalYAxisUnit.AREA_SQUARE_FEET, '152 ft²'],
[64, UniversalYAxisUnit.AREA_SQUARE_MILES, '64 mi²'],
])('formats area value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('FLOPs', () => {
test.each([
// FLOPS
[150, UniversalYAxisUnit.FLOPS_FLOPS, '150 FLOPS'],
[1000, UniversalYAxisUnit.FLOPS_FLOPS, '1 kFLOPS'],
[1080, UniversalYAxisUnit.FLOPS_FLOPS, '1.08 kFLOPS'],
// MFLOPS
[275, UniversalYAxisUnit.FLOPS_MFLOPS, '275 MFLOPS'],
[1000, UniversalYAxisUnit.FLOPS_MFLOPS, '1 GFLOPS'],
[1080, UniversalYAxisUnit.FLOPS_MFLOPS, '1.08 GFLOPS'],
// GFLOPS
[640, UniversalYAxisUnit.FLOPS_GFLOPS, '640 GFLOPS'],
[1000, UniversalYAxisUnit.FLOPS_GFLOPS, '1 TFLOPS'],
[1080, UniversalYAxisUnit.FLOPS_GFLOPS, '1.08 TFLOPS'],
// TFLOPS
[875, UniversalYAxisUnit.FLOPS_TFLOPS, '875 TFLOPS'],
[1000, UniversalYAxisUnit.FLOPS_TFLOPS, '1 PFLOPS'],
[1080, UniversalYAxisUnit.FLOPS_TFLOPS, '1.08 PFLOPS'],
// PFLOPS
[430, UniversalYAxisUnit.FLOPS_PFLOPS, '430 PFLOPS'],
[1000, UniversalYAxisUnit.FLOPS_PFLOPS, '1 EFLOPS'],
[1080, UniversalYAxisUnit.FLOPS_PFLOPS, '1.08 EFLOPS'],
// EFLOPS
[590, UniversalYAxisUnit.FLOPS_EFLOPS, '590 EFLOPS'],
[1000, UniversalYAxisUnit.FLOPS_EFLOPS, '1 ZFLOPS'],
[1080, UniversalYAxisUnit.FLOPS_EFLOPS, '1.08 ZFLOPS'],
// ZFLOPS
[715, UniversalYAxisUnit.FLOPS_ZFLOPS, '715 ZFLOPS'],
[1000, UniversalYAxisUnit.FLOPS_ZFLOPS, '1 YFLOPS'],
[1080, UniversalYAxisUnit.FLOPS_ZFLOPS, '1.08 YFLOPS'],
// YFLOPS
[840, UniversalYAxisUnit.FLOPS_YFLOPS, '840 YFLOPS'],
[1000, UniversalYAxisUnit.FLOPS_YFLOPS, '1000 YFLOPS'],
])('formats FLOPs value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Concentration', () => {
test.each([
[415, UniversalYAxisUnit.CONCENTRATION_PPM, '415 ppm'],
[1000, UniversalYAxisUnit.CONCENTRATION_PPM, '1000 ppm'],
[732, UniversalYAxisUnit.CONCENTRATION_PPB, '732 ppb'],
[1000, UniversalYAxisUnit.CONCENTRATION_PPB, '1000 ppb'],
[128, UniversalYAxisUnit.CONCENTRATION_NG_M3, '128 ng/m³'],
[1000, UniversalYAxisUnit.CONCENTRATION_NG_M3, '1000 ng/m³'],
[560, UniversalYAxisUnit.CONCENTRATION_NG_NORMAL_CUBIC_METER, '560 ng/Nm³'],
[
1000,
UniversalYAxisUnit.CONCENTRATION_NG_NORMAL_CUBIC_METER,
'1000 ng/Nm³',
],
[945, UniversalYAxisUnit.CONCENTRATION_UG_M3, '945 μg/m³'],
[1000, UniversalYAxisUnit.CONCENTRATION_UG_M3, '1000 μg/m³'],
[210, UniversalYAxisUnit.CONCENTRATION_UG_NORMAL_CUBIC_METER, '210 μg/Nm³'],
[
1000,
UniversalYAxisUnit.CONCENTRATION_UG_NORMAL_CUBIC_METER,
'1000 μg/Nm³',
],
[152, UniversalYAxisUnit.CONCENTRATION_MG_M3, '152 mg/m³'],
[64, UniversalYAxisUnit.CONCENTRATION_MG_NORMAL_CUBIC_METER, '64 mg/Nm³'],
[508, UniversalYAxisUnit.CONCENTRATION_G_M3, '508 g/m³'],
[1000, UniversalYAxisUnit.CONCENTRATION_G_M3, '1000 g/m³'],
[377, UniversalYAxisUnit.CONCENTRATION_G_NORMAL_CUBIC_METER, '377 g/Nm³'],
[1000, UniversalYAxisUnit.CONCENTRATION_G_NORMAL_CUBIC_METER, '1000 g/Nm³'],
[286, UniversalYAxisUnit.CONCENTRATION_MG_PER_DL, '286 mg/dL'],
[1000, UniversalYAxisUnit.CONCENTRATION_MG_PER_DL, '1000 mg/dL'],
[675, UniversalYAxisUnit.CONCENTRATION_MMOL_PER_L, '675 mmol/L'],
[1000, UniversalYAxisUnit.CONCENTRATION_MMOL_PER_L, '1000 mmol/L'],
])('formats concentration value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Currency', () => {
test.each([
[812, UniversalYAxisUnit.CURRENCY_USD, '$812'],
[645, UniversalYAxisUnit.CURRENCY_GBP, '£645'],
[731, UniversalYAxisUnit.CURRENCY_EUR, '€731'],
[508, UniversalYAxisUnit.CURRENCY_JPY, '¥508'],
[963, UniversalYAxisUnit.CURRENCY_RUB, '₽963'],
[447, UniversalYAxisUnit.CURRENCY_UAH, '₴447'],
[592, UniversalYAxisUnit.CURRENCY_BRL, 'R$592'],
[375, UniversalYAxisUnit.CURRENCY_DKK, '375kr'],
[418, UniversalYAxisUnit.CURRENCY_ISK, '418kr'],
[536, UniversalYAxisUnit.CURRENCY_NOK, '536kr'],
[689, UniversalYAxisUnit.CURRENCY_SEK, '689kr'],
[724, UniversalYAxisUnit.CURRENCY_CZK, 'czk724'],
[381, UniversalYAxisUnit.CURRENCY_CHF, 'CHF381'],
[267, UniversalYAxisUnit.CURRENCY_PLN, 'PLN267'],
[154, UniversalYAxisUnit.CURRENCY_BTC, '฿154'],
[999, UniversalYAxisUnit.CURRENCY_MBTC, 'mBTC999'],
[423, UniversalYAxisUnit.CURRENCY_UBTC, 'μBTC423'],
[611, UniversalYAxisUnit.CURRENCY_ZAR, 'R611'],
[782, UniversalYAxisUnit.CURRENCY_INR, '₹782'],
[834, UniversalYAxisUnit.CURRENCY_KRW, '₩834'],
[455, UniversalYAxisUnit.CURRENCY_IDR, 'Rp455'],
[978, UniversalYAxisUnit.CURRENCY_PHP, 'PHP978'],
[366, UniversalYAxisUnit.CURRENCY_VND, '366đ'],
])('formats currency value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Datetime', () => {
it('formats datetime units', () => {
expect(formatUniversalUnit(900, UniversalYAxisUnit.DATETIME_FROM_NOW)).toBe(
'56 years ago',
);
});
});
describe('Power/Electrical', () => {
test.each([
[715, UniversalYAxisUnit.POWER_WATT, '715 W'],
[1000, UniversalYAxisUnit.POWER_WATT, '1 kW'],
[1080, UniversalYAxisUnit.POWER_WATT, '1.08 kW'],
[438, UniversalYAxisUnit.POWER_KILOWATT, '438 kW'],
[1000, UniversalYAxisUnit.POWER_KILOWATT, '1 MW'],
[1080, UniversalYAxisUnit.POWER_KILOWATT, '1.08 MW'],
[582, UniversalYAxisUnit.POWER_MEGAWATT, '582 MW'],
[1000, UniversalYAxisUnit.POWER_MEGAWATT, '1 GW'],
[1080, UniversalYAxisUnit.POWER_MEGAWATT, '1.08 GW'],
[267, UniversalYAxisUnit.POWER_GIGAWATT, '267 GW'],
[853, UniversalYAxisUnit.POWER_MILLIWATT, '853 mW'],
[693, UniversalYAxisUnit.POWER_WATT_PER_SQUARE_METER, '693 W/m²'],
[544, UniversalYAxisUnit.POWER_VOLT_AMPERE, '544 VA'],
[812, UniversalYAxisUnit.POWER_KILOVOLT_AMPERE, '812 kVA'],
[478, UniversalYAxisUnit.POWER_VOLT_AMPERE_REACTIVE, '478 VAr'],
[365, UniversalYAxisUnit.POWER_KILOVOLT_AMPERE_REACTIVE, '365 kVAr'],
[629, UniversalYAxisUnit.POWER_WATT_HOUR, '629 Wh'],
[471, UniversalYAxisUnit.POWER_WATT_HOUR_PER_KG, '471 Wh/kg'],
[557, UniversalYAxisUnit.POWER_KILOWATT_HOUR, '557 kWh'],
[389, UniversalYAxisUnit.POWER_KILOWATT_MINUTE, '389 kW-Min'],
[642, UniversalYAxisUnit.POWER_AMPERE_HOUR, '642 Ah'],
[731, UniversalYAxisUnit.POWER_KILOAMPERE_HOUR, '731 kAh'],
[815, UniversalYAxisUnit.POWER_MILLIAMPERE_HOUR, '815 mAh'],
[963, UniversalYAxisUnit.POWER_JOULE, '963 J'],
[506, UniversalYAxisUnit.POWER_ELECTRON_VOLT, '506 eV'],
[298, UniversalYAxisUnit.POWER_AMPERE, '298 A'],
[654, UniversalYAxisUnit.POWER_KILOAMPERE, '654 kA'],
[187, UniversalYAxisUnit.POWER_MILLIAMPERE, '187 mA'],
[472, UniversalYAxisUnit.POWER_VOLT, '472 V'],
[538, UniversalYAxisUnit.POWER_KILOVOLT, '538 kV'],
[226, UniversalYAxisUnit.POWER_MILLIVOLT, '226 mV'],
[592, UniversalYAxisUnit.POWER_DECIBEL_MILLIWATT, '592 dBm'],
[333, UniversalYAxisUnit.POWER_OHM, '333 Ω'],
[447, UniversalYAxisUnit.POWER_KILOOHM, '447 kΩ'],
[781, UniversalYAxisUnit.POWER_MEGAOHM, '781 MΩ'],
[650, UniversalYAxisUnit.POWER_FARAD, '650 F'],
[512, UniversalYAxisUnit.POWER_MICROFARAD, '512 µF'],
[478, UniversalYAxisUnit.POWER_NANOFARAD, '478 nF'],
[341, UniversalYAxisUnit.POWER_PICOFARAD, '341 pF'],
[129, UniversalYAxisUnit.POWER_FEMTOFARAD, '129 fF'],
[904, UniversalYAxisUnit.POWER_HENRY, '904 H'],
[1000, UniversalYAxisUnit.POWER_HENRY, '1 kH'],
[275, UniversalYAxisUnit.POWER_MILLIHENRY, '275 mH'],
[618, UniversalYAxisUnit.POWER_MICROHENRY, '618 µH'],
[1000, UniversalYAxisUnit.POWER_MICROHENRY, '1 mH'],
[1080, UniversalYAxisUnit.POWER_MICROHENRY, '1.08 mH'],
[459, UniversalYAxisUnit.POWER_LUMENS, '459 Lm'],
[1000, UniversalYAxisUnit.POWER_LUMENS, '1 kLm'],
[1080, UniversalYAxisUnit.POWER_LUMENS, '1.08 kLm'],
])('formats power value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Flow', () => {
test.each([
[512, UniversalYAxisUnit.FLOW_GALLONS_PER_MINUTE, '512 gpm'],
[1000, UniversalYAxisUnit.FLOW_GALLONS_PER_MINUTE, '1000 gpm'],
[678, UniversalYAxisUnit.FLOW_CUBIC_METERS_PER_SECOND, '678 cms'],
[1000, UniversalYAxisUnit.FLOW_CUBIC_METERS_PER_SECOND, '1000 cms'],
[245, UniversalYAxisUnit.FLOW_CUBIC_FEET_PER_SECOND, '245 cfs'],
[389, UniversalYAxisUnit.FLOW_CUBIC_FEET_PER_MINUTE, '389 cfm'],
[1000, UniversalYAxisUnit.FLOW_CUBIC_FEET_PER_MINUTE, '1000 cfm'],
[731, UniversalYAxisUnit.FLOW_LITERS_PER_HOUR, '731 L/h'],
[1000, UniversalYAxisUnit.FLOW_LITERS_PER_HOUR, '1000 L/h'],
[864, UniversalYAxisUnit.FLOW_LITERS_PER_MINUTE, '864 L/min'],
[1000, UniversalYAxisUnit.FLOW_LITERS_PER_MINUTE, '1000 L/min'],
[150, UniversalYAxisUnit.FLOW_MILLILITERS_PER_MINUTE, '150 mL/min'],
[1000, UniversalYAxisUnit.FLOW_MILLILITERS_PER_MINUTE, '1000 mL/min'],
[947, UniversalYAxisUnit.FLOW_LUX, '947 lux'],
[1000, UniversalYAxisUnit.FLOW_LUX, '1000 lux'],
])('formats flow value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Force', () => {
test.each([
[845, UniversalYAxisUnit.FORCE_NEWTON_METERS, '845 Nm'],
[1000, UniversalYAxisUnit.FORCE_NEWTON_METERS, '1 kNm'],
[1080, UniversalYAxisUnit.FORCE_NEWTON_METERS, '1.08 kNm'],
[268, UniversalYAxisUnit.FORCE_KILONEWTON_METERS, '268 kNm'],
[1000, UniversalYAxisUnit.FORCE_KILONEWTON_METERS, '1 MNm'],
[1080, UniversalYAxisUnit.FORCE_KILONEWTON_METERS, '1.08 MNm'],
[593, UniversalYAxisUnit.FORCE_NEWTONS, '593 N'],
[1000, UniversalYAxisUnit.FORCE_KILONEWTONS, '1 MN'],
[1080, UniversalYAxisUnit.FORCE_KILONEWTONS, '1.08 MN'],
])('formats force value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Mass', () => {
test.each([
[120, UniversalYAxisUnit.MASS_MILLIGRAM, '120 mg'],
[120000, UniversalYAxisUnit.MASS_MILLIGRAM, '120 g'],
[987, UniversalYAxisUnit.MASS_GRAM, '987 g'],
[1020, UniversalYAxisUnit.MASS_GRAM, '1.02 kg'],
[456, UniversalYAxisUnit.MASS_POUND, '456 lb'],
[321, UniversalYAxisUnit.MASS_KILOGRAM, '321 kg'],
[654, UniversalYAxisUnit.MASS_METRIC_TON, '654 t'],
])('formats mass value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Length', () => {
test.each([
[88, UniversalYAxisUnit.LENGTH_MILLIMETER, '88 mm'],
[100, UniversalYAxisUnit.LENGTH_MILLIMETER, '100 mm'],
[1000, UniversalYAxisUnit.LENGTH_MILLIMETER, '1 m'],
[177, UniversalYAxisUnit.LENGTH_INCH, '177 in'],
[266, UniversalYAxisUnit.LENGTH_FOOT, '266 ft'],
[355, UniversalYAxisUnit.LENGTH_METER, '355 m'],
[355000, UniversalYAxisUnit.LENGTH_METER, '355 km'],
[444, UniversalYAxisUnit.LENGTH_KILOMETER, '444 km'],
[533, UniversalYAxisUnit.LENGTH_MILE, '533 mi'],
])('formats length value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Pressure', () => {
test.each([
[45, UniversalYAxisUnit.PRESSURE_MILLIBAR, '45 mbar'],
[1013, UniversalYAxisUnit.PRESSURE_MILLIBAR, '1.01 bar'],
[27, UniversalYAxisUnit.PRESSURE_BAR, '27 bar'],
[62, UniversalYAxisUnit.PRESSURE_KILOBAR, '62 kbar'],
[845, UniversalYAxisUnit.PRESSURE_PASCAL, '845 Pa'],
[540, UniversalYAxisUnit.PRESSURE_HECTOPASCAL, '540 hPa'],
[378, UniversalYAxisUnit.PRESSURE_KILOPASCAL, '378 kPa'],
[29, UniversalYAxisUnit.PRESSURE_INCHES_HG, '29 "Hg'],
[65, UniversalYAxisUnit.PRESSURE_PSI, '65psi'],
])('formats pressure value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Radiation', () => {
test.each([
[452, UniversalYAxisUnit.RADIATION_BECQUEREL, '452 Bq'],
[37, UniversalYAxisUnit.RADIATION_CURIE, '37 Ci'],
[128, UniversalYAxisUnit.RADIATION_GRAY, '128 Gy'],
[512, UniversalYAxisUnit.RADIATION_RAD, '512 rad'],
[256, UniversalYAxisUnit.RADIATION_SIEVERT, '256 Sv'],
[640, UniversalYAxisUnit.RADIATION_MILLISIEVERT, '640 mSv'],
[875, UniversalYAxisUnit.RADIATION_MICROSIEVERT, '875 µSv'],
[875000, UniversalYAxisUnit.RADIATION_MICROSIEVERT, '875 mSv'],
[92, UniversalYAxisUnit.RADIATION_REM, '92 rem'],
[715, UniversalYAxisUnit.RADIATION_EXPOSURE_C_PER_KG, '715 C/kg'],
[833, UniversalYAxisUnit.RADIATION_ROENTGEN, '833 R'],
[468, UniversalYAxisUnit.RADIATION_SIEVERT_PER_HOUR, '468 Sv/h'],
[590, UniversalYAxisUnit.RADIATION_MILLISIEVERT_PER_HOUR, '590 mSv/h'],
[712, UniversalYAxisUnit.RADIATION_MICROSIEVERT_PER_HOUR, '712 µSv/h'],
])('formats radiation value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Rotation Speed', () => {
test.each([
[345, UniversalYAxisUnit.ROTATION_SPEED_REVOLUTIONS_PER_MINUTE, '345 rpm'],
[789, UniversalYAxisUnit.ROTATION_SPEED_HERTZ, '789 Hz'],
[789000, UniversalYAxisUnit.ROTATION_SPEED_HERTZ, '789 kHz'],
[213, UniversalYAxisUnit.ROTATION_SPEED_RADIANS_PER_SECOND, '213 rad/s'],
[654, UniversalYAxisUnit.ROTATION_SPEED_DEGREES_PER_SECOND, '654 °/s'],
])('formats rotation speed value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Temperature', () => {
test.each([
[37, UniversalYAxisUnit.TEMPERATURE_CELSIUS, '37 °C'],
[451, UniversalYAxisUnit.TEMPERATURE_FAHRENHEIT, '451 °F'],
[310, UniversalYAxisUnit.TEMPERATURE_KELVIN, '310 K'],
])('formats temperature value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Velocity', () => {
test.each([
[900, UniversalYAxisUnit.VELOCITY_METERS_PER_SECOND, '900 m/s'],
[456, UniversalYAxisUnit.VELOCITY_KILOMETERS_PER_HOUR, '456 km/h'],
[789, UniversalYAxisUnit.VELOCITY_MILES_PER_HOUR, '789 mph'],
[222, UniversalYAxisUnit.VELOCITY_KNOT, '222 kn'],
])('formats velocity value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Volume', () => {
test.each([
[1200, UniversalYAxisUnit.VOLUME_MILLILITER, '1.2 L'],
[9000000, UniversalYAxisUnit.VOLUME_MILLILITER, '9 kL'],
[9, UniversalYAxisUnit.VOLUME_LITER, '9 L'],
[9000, UniversalYAxisUnit.VOLUME_LITER, '9 kL'],
[9000000, UniversalYAxisUnit.VOLUME_LITER, '9 ML'],
[9000000000, UniversalYAxisUnit.VOLUME_LITER, '9 GL'],
[9000000000000, UniversalYAxisUnit.VOLUME_LITER, '9 TL'],
[9000000000000000, UniversalYAxisUnit.VOLUME_LITER, '9 PL'],
[9010000000000000000, UniversalYAxisUnit.VOLUME_LITER, '9.01 EL'],
[9020000000000000000000, UniversalYAxisUnit.VOLUME_LITER, '9.02 ZL'],
[9030000000000000000000000, UniversalYAxisUnit.VOLUME_LITER, '9.03 YL'],
[900, UniversalYAxisUnit.VOLUME_CUBIC_METER, '900 m³'],
[
9000000000000000000000000000000,
UniversalYAxisUnit.VOLUME_CUBIC_METER,
'9e+30 m³',
],
[900, UniversalYAxisUnit.VOLUME_NORMAL_CUBIC_METER, '900 Nm³'],
[
9000000000000000000000000000000,
UniversalYAxisUnit.VOLUME_NORMAL_CUBIC_METER,
'9e+30 Nm³',
],
[900, UniversalYAxisUnit.VOLUME_CUBIC_DECIMETER, '900 dm³'],
[
9000000000000000000000000000000,
UniversalYAxisUnit.VOLUME_CUBIC_DECIMETER,
'9e+30 dm³',
],
[900, UniversalYAxisUnit.VOLUME_GALLON, '900 gal'],
[
9000000000000000000000000000000,
UniversalYAxisUnit.VOLUME_GALLON,
'9e+30 gal',
],
])('formats volume value %s %s as %s', (value, unit, expected) => {
expect(formatUniversalUnit(value, unit)).toBe(expected);
});
});
describe('Boolean', () => {
it('formats boolean units', () => {
expect(formatUniversalUnit(1, UniversalYAxisUnit.TRUE_FALSE)).toBe('True');
expect(formatUniversalUnit(1, UniversalYAxisUnit.YES_NO)).toBe('Yes');
expect(formatUniversalUnit(1, UniversalYAxisUnit.ON_OFF)).toBe('On');
});
});
});
describe('Mapping Validator', () => {
it('validates that all units have a mapping', () => {
// Each universal unit should have a mapping to a 1:1 Grafana unit in UniversalUnitToGrafanaUnit or an additional mapping in AdditionalLabelsMappingForGrafanaUnits
const units = Object.values(UniversalYAxisUnit);
expect(
units.every((unit) => {
const hasBaseMapping = unit in UniversalUnitToGrafanaUnit;
const hasAdditionalMapping = unit in AdditionalLabelsMappingForGrafanaUnits;
const hasMapping = hasBaseMapping || hasAdditionalMapping;
if (!hasMapping) {
throw new Error(`Unit ${unit} does not have a mapping`);
}
return hasMapping;
}),
).toBe(true);
});
});

View File

@@ -1,8 +1,6 @@
import { UniversalYAxisUnit } from '../types';
import {
getUniversalNameFromMetricUnit,
mapMetricUnitToUniversalUnit,
mergeCategories,
} from '../utils';
describe('YAxisUnitSelector utils', () => {
@@ -38,43 +36,4 @@ describe('YAxisUnitSelector utils', () => {
expect(getUniversalNameFromMetricUnit('s')).toBe('Seconds (s)');
});
});
describe('mergeCategories', () => {
it('merges categories correctly', () => {
const categories1 = [
{
name: 'Data',
units: [
{ name: 'bytes', id: UniversalYAxisUnit.BYTES },
{ name: 'kilobytes', id: UniversalYAxisUnit.KILOBYTES },
],
},
];
const categories2 = [
{
name: 'Data',
units: [{ name: 'bits', id: UniversalYAxisUnit.BITS }],
},
{
name: 'Time',
units: [{ name: 'seconds', id: UniversalYAxisUnit.SECONDS }],
},
];
const mergedCategories = mergeCategories(categories1, categories2);
expect(mergedCategories).toEqual([
{
name: 'Data',
units: [
{ name: 'bytes', id: UniversalYAxisUnit.BYTES },
{ name: 'kilobytes', id: UniversalYAxisUnit.KILOBYTES },
{ name: 'bits', id: UniversalYAxisUnit.BITS },
],
},
{
name: 'Time',
units: [{ name: 'seconds', id: UniversalYAxisUnit.SECONDS }],
},
]);
});
});
});

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,90 +0,0 @@
import { formattedValueToString, getValueFormat } from '@grafana/data';
import { PrecisionOption, PrecisionOptionsEnum } from 'components/Graph/types';
import { formatDecimalWithLeadingZeros } from 'components/Graph/utils';
import {
AdditionalLabelsMappingForGrafanaUnits,
CUSTOM_SCALING_FAMILIES,
UniversalUnitToGrafanaUnit,
} from 'components/YAxisUnitSelector/constants';
import { UniversalYAxisUnit } from 'components/YAxisUnitSelector/types';
function scaleValue(
value: number,
unit: UniversalYAxisUnit,
family: UniversalYAxisUnit[],
factor: number,
): { value: number; label: string } {
let idx = family.indexOf(unit);
// If the unit is not in the family, return the unit with the additional label
if (idx === -1) {
return { value, label: AdditionalLabelsMappingForGrafanaUnits[unit] || '' };
}
// Scale the value up or down to the nearest unit in the family
let scaled = value;
// Scale up
while (scaled >= factor && idx < family.length - 1) {
scaled /= factor;
idx += 1;
}
// Scale down
while (scaled < 1 && idx > 0) {
scaled *= factor;
idx -= 1;
}
// Return the scaled value and the label of the nearest unit in the family
return {
value: scaled,
label: AdditionalLabelsMappingForGrafanaUnits[family[idx]] || '',
};
}
export function formatUniversalUnit(
value: number,
unit: UniversalYAxisUnit,
precision: PrecisionOption = PrecisionOptionsEnum.FULL,
decimals: number | undefined = undefined,
): string {
// Check if this unit belongs to a family that needs custom scaling
const family = CUSTOM_SCALING_FAMILIES.find((family) =>
family.units.includes(unit),
);
if (family) {
const scaled = scaleValue(value, unit, family.units, family.scaleFactor);
const formatter = getValueFormat(scaled.label);
const formatted = formatter(scaled.value, decimals);
if (formatted.text && formatted.text.includes('.')) {
formatted.text = formatDecimalWithLeadingZeros(
parseFloat(formatted.text),
precision,
);
}
return `${formatted.text} ${scaled.label}`;
}
// Use Grafana formatting with custom label mappings
const grafanaFormat = UniversalUnitToGrafanaUnit[unit];
if (grafanaFormat) {
const formatter = getValueFormat(grafanaFormat);
const formatted = formatter(value, decimals);
if (formatted.text && formatted.text.includes('.')) {
formatted.text = formatDecimalWithLeadingZeros(
parseFloat(formatted.text),
precision,
);
}
return formattedValueToString(formatted);
}
// Fallback to short format for other units
const formatter = getValueFormat('short');
const formatted = formatter(value, decimals);
if (formatted.text && formatted.text.includes('.')) {
formatted.text = formatDecimalWithLeadingZeros(
parseFloat(formatted.text),
precision,
);
}
return `${formatted.text} ${unit}`;
}

View File

@@ -5,11 +5,11 @@ export interface YAxisUnitSelectorProps {
loading?: boolean;
disabled?: boolean;
'data-testid'?: string;
source: YAxisSource;
}
export enum UniversalYAxisUnit {
// Time
WEEKS = 'wk',
DAYS = 'd',
HOURS = 'h',
MINUTES = 'min',
@@ -17,14 +17,6 @@ export enum UniversalYAxisUnit {
MICROSECONDS = 'us',
MILLISECONDS = 'ms',
NANOSECONDS = 'ns',
DURATION_MS = 'dtdurationms',
DURATION_S = 'dtdurations',
DURATION_HMS = 'dthms',
DURATION_DHMS = 'dtdhms',
TIMETICKS = 'timeticks',
CLOCK_MS = 'clockms',
CLOCK_S = 'clocks',
TIME_HERTZ = 'hertz',
// Data
BYTES = 'By',
@@ -37,17 +29,6 @@ export enum UniversalYAxisUnit {
ZETTABYTES = 'ZBy',
YOTTABYTES = 'YBy',
// Binary (IEC) Data
BYTES_IEC = 'bytes',
KIBIBYTES = 'KiBy',
MEBIBYTES = 'MiBy',
GIBIBYTES = 'GiBy',
TEBIBYTES = 'TiBy',
PEBIBYTES = 'PiBy',
EXBIBYTES = 'EiBy',
ZEBIBYTES = 'ZiBy',
YOBIBYTES = 'YiBy',
// Data Rate
BYTES_SECOND = 'By/s',
KILOBYTES_SECOND = 'kBy/s',
@@ -58,21 +39,9 @@ export enum UniversalYAxisUnit {
EXABYTES_SECOND = 'EBy/s',
ZETTABYTES_SECOND = 'ZBy/s',
YOTTABYTES_SECOND = 'YBy/s',
DATA_RATE_PACKETS_PER_SECOND = 'pps',
// Binary (IEC) Data Rate
KIBIBYTES_SECOND = 'KiBy/s',
MEBIBYTES_SECOND = 'MiBy/s',
GIBIBYTES_SECOND = 'GiBy/s',
TEBIBYTES_SECOND = 'TiBy/s',
PEBIBYTES_SECOND = 'PiBy/s',
EXBIBYTES_SECOND = 'EiBy/s',
ZEBIBYTES_SECOND = 'ZiBy/s',
YOBIBYTES_SECOND = 'YiBy/s',
// Bits
BITS = 'bit',
BITS_IEC = 'bits',
KILOBITS = 'kbit',
MEGABITS = 'Mbit',
GIGABITS = 'Gbit',
@@ -93,16 +62,6 @@ export enum UniversalYAxisUnit {
ZETTABITS_SECOND = 'Zbit/s',
YOTTABITS_SECOND = 'Ybit/s',
// Binary (IEC) Bit Rate
KIBIBITS_SECOND = 'Kibit/s',
MEBIBITS_SECOND = 'Mibit/s',
GIBIBITS_SECOND = 'Gibit/s',
TEBIBITS_SECOND = 'Tibit/s',
PEBIBITS_SECOND = 'Pibit/s',
EXBIBITS_SECOND = 'Eibit/s',
ZEBIBITS_SECOND = 'Zibit/s',
YOBIBITS_SECOND = 'Yibit/s',
// Count
COUNT = '{count}',
COUNT_SECOND = '{count}/s',
@@ -128,231 +87,7 @@ export enum UniversalYAxisUnit {
// Percent
PERCENT = '%',
PERCENT_UNIT = 'percentunit',
// Boolean
TRUE_FALSE = '{bool}',
YES_NO = '{bool_yn}',
ON_OFF = 'bool_on_off',
// None
NONE = '1',
// Hash rate
HASH_RATE_HASHES_PER_SECOND = 'Hs',
HASH_RATE_KILOHASHES_PER_SECOND = 'KHs',
HASH_RATE_MEGAHASHES_PER_SECOND = 'MHs',
HASH_RATE_GIGAHASHES_PER_SECOND = 'GHs',
HASH_RATE_TERAHASHES_PER_SECOND = 'THs',
HASH_RATE_PETAHASHES_PER_SECOND = 'PHs',
HASH_RATE_EXAHASHES_PER_SECOND = 'EHs',
// Miscellaneous
MISC_STRING = 'string',
MISC_SHORT = 'short',
MISC_HUMIDITY = 'humidity',
MISC_DECIBEL = 'dB',
MISC_HEXADECIMAL = 'hex',
MISC_HEXADECIMAL_0X = 'hex0x',
MISC_SCIENTIFIC_NOTATION = 'sci',
MISC_LOCALE_FORMAT = 'locale',
MISC_PIXELS = 'pixel',
// Acceleration
ACCELERATION_METERS_PER_SECOND_SQUARED = 'accMS2',
ACCELERATION_FEET_PER_SECOND_SQUARED = 'accFS2',
ACCELERATION_G_UNIT = 'accG',
// Angular
ANGULAR_DEGREE = 'degree',
ANGULAR_RADIAN = 'radian',
ANGULAR_GRADIAN = 'grad',
ANGULAR_ARC_MINUTE = 'arcmin',
ANGULAR_ARC_SECOND = 'arcsec',
// Area
AREA_SQUARE_METERS = 'areaM2',
AREA_SQUARE_FEET = 'areaF2',
AREA_SQUARE_MILES = 'areaMI2',
// FLOPs
FLOPS_FLOPS = 'flops',
FLOPS_MFLOPS = 'mflops',
FLOPS_GFLOPS = 'gflops',
FLOPS_TFLOPS = 'tflops',
FLOPS_PFLOPS = 'pflops',
FLOPS_EFLOPS = 'eflops',
FLOPS_ZFLOPS = 'zflops',
FLOPS_YFLOPS = 'yflops',
// Concentration
CONCENTRATION_PPM = 'ppm',
CONCENTRATION_PPB = 'conppb',
CONCENTRATION_NG_M3 = 'conngm3',
CONCENTRATION_NG_NORMAL_CUBIC_METER = 'conngNm3',
CONCENTRATION_UG_M3 = 'conμgm3',
CONCENTRATION_UG_NORMAL_CUBIC_METER = 'conμgNm3',
CONCENTRATION_MG_M3 = 'conmgm3',
CONCENTRATION_MG_NORMAL_CUBIC_METER = 'conmgNm3',
CONCENTRATION_G_M3 = 'congm3',
CONCENTRATION_G_NORMAL_CUBIC_METER = 'congNm3',
CONCENTRATION_MG_PER_DL = 'conmgdL',
CONCENTRATION_MMOL_PER_L = 'conmmolL',
// Currency
CURRENCY_USD = 'currencyUSD',
CURRENCY_GBP = 'currencyGBP',
CURRENCY_EUR = 'currencyEUR',
CURRENCY_JPY = 'currencyJPY',
CURRENCY_RUB = 'currencyRUB',
CURRENCY_UAH = 'currencyUAH',
CURRENCY_BRL = 'currencyBRL',
CURRENCY_DKK = 'currencyDKK',
CURRENCY_ISK = 'currencyISK',
CURRENCY_NOK = 'currencyNOK',
CURRENCY_SEK = 'currencySEK',
CURRENCY_CZK = 'currencyCZK',
CURRENCY_CHF = 'currencyCHF',
CURRENCY_PLN = 'currencyPLN',
CURRENCY_BTC = 'currencyBTC',
CURRENCY_MBTC = 'currencymBTC',
CURRENCY_UBTC = 'currencyμBTC',
CURRENCY_ZAR = 'currencyZAR',
CURRENCY_INR = 'currencyINR',
CURRENCY_KRW = 'currencyKRW',
CURRENCY_IDR = 'currencyIDR',
CURRENCY_PHP = 'currencyPHP',
CURRENCY_VND = 'currencyVND',
// Datetime
DATETIME_ISO = 'dateTimeAsIso',
DATETIME_ISO_NO_DATE_IF_TODAY = 'dateTimeAsIsoNoDateIfToday',
DATETIME_US = 'dateTimeAsUS',
DATETIME_US_NO_DATE_IF_TODAY = 'dateTimeAsUSNoDateIfToday',
DATETIME_LOCAL = 'dateTimeAsLocal',
DATETIME_LOCAL_NO_DATE_IF_TODAY = 'dateTimeAsLocalNoDateIfToday',
DATETIME_SYSTEM = 'dateTimeAsSystem',
DATETIME_FROM_NOW = 'dateTimeFromNow',
// Power/Electrical
POWER_WATT = 'watt',
POWER_KILOWATT = 'kwatt',
POWER_MEGAWATT = 'megwatt',
POWER_GIGAWATT = 'gwatt',
POWER_MILLIWATT = 'mwatt',
POWER_WATT_PER_SQUARE_METER = 'Wm2',
POWER_VOLT_AMPERE = 'voltamp',
POWER_KILOVOLT_AMPERE = 'kvoltamp',
POWER_VOLT_AMPERE_REACTIVE = 'voltampreact',
POWER_KILOVOLT_AMPERE_REACTIVE = 'kvoltampreact',
POWER_WATT_HOUR = 'watth',
POWER_WATT_HOUR_PER_KG = 'watthperkg',
POWER_KILOWATT_HOUR = 'kwatth',
POWER_KILOWATT_MINUTE = 'kwattm',
POWER_AMPERE_HOUR = 'amph',
POWER_KILOAMPERE_HOUR = 'kamph',
POWER_MILLIAMPERE_HOUR = 'mamph',
POWER_JOULE = 'joule',
POWER_ELECTRON_VOLT = 'ev',
POWER_AMPERE = 'amp',
POWER_KILOAMPERE = 'kamp',
POWER_MILLIAMPERE = 'mamp',
POWER_VOLT = 'volt',
POWER_KILOVOLT = 'kvolt',
POWER_MILLIVOLT = 'mvolt',
POWER_DECIBEL_MILLIWATT = 'dBm',
POWER_OHM = 'ohm',
POWER_KILOOHM = 'kohm',
POWER_MEGAOHM = 'Mohm',
POWER_FARAD = 'farad',
POWER_MICROFARAD = 'µfarad',
POWER_NANOFARAD = 'nfarad',
POWER_PICOFARAD = 'pfarad',
POWER_FEMTOFARAD = 'ffarad',
POWER_HENRY = 'henry',
POWER_MILLIHENRY = 'mhenry',
POWER_MICROHENRY = 'µhenry',
POWER_LUMENS = 'lumens',
// Flow
FLOW_GALLONS_PER_MINUTE = 'flowgpm',
FLOW_CUBIC_METERS_PER_SECOND = 'flowcms',
FLOW_CUBIC_FEET_PER_SECOND = 'flowcfs',
FLOW_CUBIC_FEET_PER_MINUTE = 'flowcfm',
FLOW_LITERS_PER_HOUR = 'litreh',
FLOW_LITERS_PER_MINUTE = 'flowlpm',
FLOW_MILLILITERS_PER_MINUTE = 'flowmlpm',
FLOW_LUX = 'lux',
// Force
FORCE_NEWTON_METERS = 'forceNm',
FORCE_KILONEWTON_METERS = 'forcekNm',
FORCE_NEWTONS = 'forceN',
FORCE_KILONEWTONS = 'forcekN',
// Mass
MASS_MILLIGRAM = 'massmg',
MASS_GRAM = 'massg',
MASS_POUND = 'masslb',
MASS_KILOGRAM = 'masskg',
MASS_METRIC_TON = 'masst',
// Length
LENGTH_MILLIMETER = 'lengthmm',
LENGTH_INCH = 'lengthin',
LENGTH_FOOT = 'lengthft',
LENGTH_METER = 'lengthm',
LENGTH_KILOMETER = 'lengthkm',
LENGTH_MILE = 'lengthmi',
// Pressure
PRESSURE_MILLIBAR = 'pressurembar',
PRESSURE_BAR = 'pressurebar',
PRESSURE_KILOBAR = 'pressurekbar',
PRESSURE_PASCAL = 'pressurepa',
PRESSURE_HECTOPASCAL = 'pressurehpa',
PRESSURE_KILOPASCAL = 'pressurekpa',
PRESSURE_INCHES_HG = 'pressurehg',
PRESSURE_PSI = 'pressurepsi',
// Radiation
RADIATION_BECQUEREL = 'radbq',
RADIATION_CURIE = 'radci',
RADIATION_GRAY = 'radgy',
RADIATION_RAD = 'radrad',
RADIATION_SIEVERT = 'radsv',
RADIATION_MILLISIEVERT = 'radmsv',
RADIATION_MICROSIEVERT = 'radusv',
RADIATION_REM = 'radrem',
RADIATION_EXPOSURE_C_PER_KG = 'radexpckg',
RADIATION_ROENTGEN = 'radr',
RADIATION_SIEVERT_PER_HOUR = 'radsvh',
RADIATION_MILLISIEVERT_PER_HOUR = 'radmsvh',
RADIATION_MICROSIEVERT_PER_HOUR = 'radusvh',
// Rotation speed
ROTATION_SPEED_REVOLUTIONS_PER_MINUTE = 'rotrpm',
ROTATION_SPEED_HERTZ = 'rothz',
ROTATION_SPEED_RADIANS_PER_SECOND = 'rotrads',
ROTATION_SPEED_DEGREES_PER_SECOND = 'rotdegs',
// Temperature
TEMPERATURE_CELSIUS = 'celsius',
TEMPERATURE_FAHRENHEIT = 'fahrenheit',
TEMPERATURE_KELVIN = 'kelvin',
// Velocity
VELOCITY_METERS_PER_SECOND = 'velocityms',
VELOCITY_KILOMETERS_PER_HOUR = 'velocitykmh',
VELOCITY_MILES_PER_HOUR = 'velocitymph',
VELOCITY_KNOT = 'velocityknot',
// Volume
VOLUME_MILLILITER = 'mlitre',
VOLUME_LITER = 'litre',
VOLUME_CUBIC_METER = 'm3',
VOLUME_NORMAL_CUBIC_METER = 'Nm3',
VOLUME_CUBIC_DECIMETER = 'dm3',
VOLUME_GALLON = 'gallons',
}
export enum YAxisUnit {
@@ -558,15 +293,6 @@ export enum YAxisUnit {
UCUM_PEBIBYTES = 'PiBy',
OPEN_METRICS_PEBIBYTES = 'pebibytes',
UCUM_EXBIBYTES = 'EiBy',
OPEN_METRICS_EXBIBYTES = 'exbibytes',
UCUM_ZEBIBYTES = 'ZiBy',
OPEN_METRICS_ZEBIBYTES = 'zebibytes',
UCUM_YOBIBYTES = 'YiBy',
OPEN_METRICS_YOBIBYTES = 'yobibytes',
UCUM_KIBIBYTES_SECOND = 'KiBy/s',
OPEN_METRICS_KIBIBYTES_SECOND = 'kibibytes_per_second',
@@ -597,24 +323,6 @@ export enum YAxisUnit {
UCUM_PEBIBITS_SECOND = 'Pibit/s',
OPEN_METRICS_PEBIBITS_SECOND = 'pebibits_per_second',
UCUM_EXBIBYTES_SECOND = 'EiBy/s',
OPEN_METRICS_EXBIBYTES_SECOND = 'exbibytes_per_second',
UCUM_EXBIBITS_SECOND = 'Eibit/s',
OPEN_METRICS_EXBIBITS_SECOND = 'exbibits_per_second',
UCUM_ZEBIBYTES_SECOND = 'ZiBy/s',
OPEN_METRICS_ZEBIBYTES_SECOND = 'zebibytes_per_second',
UCUM_ZEBIBITS_SECOND = 'Zibit/s',
OPEN_METRICS_ZEBIBITS_SECOND = 'zebibits_per_second',
UCUM_YOBIBYTES_SECOND = 'YiBy/s',
OPEN_METRICS_YOBIBYTES_SECOND = 'yobibytes_per_second',
UCUM_YOBIBITS_SECOND = 'Yibit/s',
OPEN_METRICS_YOBIBITS_SECOND = 'yobibits_per_second',
UCUM_TRUE_FALSE = '{bool}',
OPEN_METRICS_TRUE_FALSE = 'boolean_true_false',
@@ -656,27 +364,3 @@ export enum YAxisUnit {
OPEN_METRICS_PERCENT_UNIT = 'percentunit',
}
export interface ScaledValue {
value: number;
label: string;
}
export interface UnitFamilyConfig {
units: UniversalYAxisUnit[];
scaleFactor: number;
}
export interface YAxisCategory {
name: string;
units: {
name: string;
id: UniversalYAxisUnit;
}[];
}
export enum YAxisSource {
ALERTS = 'alerts',
DASHBOARDS = 'dashboards',
EXPLORER = 'explorer',
}

View File

@@ -1,11 +1,5 @@
import { UniversalYAxisUnitMappings, Y_AXIS_UNIT_NAMES } from './constants';
import { ADDITIONAL_Y_AXIS_CATEGORIES, BASE_Y_AXIS_CATEGORIES } from './data';
import {
UniversalYAxisUnit,
YAxisCategory,
YAxisSource,
YAxisUnit,
} from './types';
import { UniversalYAxisUnit, YAxisUnit } from './types';
export const mapMetricUnitToUniversalUnit = (
unit: string | undefined,
@@ -15,7 +9,7 @@ export const mapMetricUnitToUniversalUnit = (
}
const universalUnit = Object.values(UniversalYAxisUnit).find(
(u) => UniversalYAxisUnitMappings[u]?.has(unit as YAxisUnit) || unit === u,
(u) => UniversalYAxisUnitMappings[u].has(unit as YAxisUnit) || unit === u,
);
return universalUnit || (unit as UniversalYAxisUnit) || null;
@@ -37,44 +31,3 @@ export const getUniversalNameFromMetricUnit = (
return universalName || unit || '-';
};
export function isUniversalUnit(format: string): boolean {
return Object.values(UniversalYAxisUnit).includes(
format as UniversalYAxisUnit,
);
}
export function mergeCategories(
categories1: YAxisCategory[],
categories2: YAxisCategory[],
): YAxisCategory[] {
const mapOfCategories = new Map<string, YAxisCategory>();
categories1.forEach((category) => {
mapOfCategories.set(category.name, category);
});
categories2.forEach((category) => {
if (mapOfCategories.has(category.name)) {
mapOfCategories.set(category.name, {
name: category.name,
units: [
...(mapOfCategories.get(category.name)?.units ?? []),
...category.units,
],
});
} else {
mapOfCategories.set(category.name, category);
}
});
return Array.from(mapOfCategories.values());
}
export function getYAxisCategories(source: YAxisSource): YAxisCategory[] {
if (source !== YAxisSource.DASHBOARDS) {
return BASE_Y_AXIS_CATEGORIES;
}
return mergeCategories(BASE_Y_AXIS_CATEGORIES, ADDITIONAL_Y_AXIS_CATEGORIES);
}

View File

@@ -1,8 +1,7 @@
import { Button, Flex, Switch, Typography } from 'antd';
import { BaseOptionType, DefaultOptionType, SelectProps } from 'antd/es/select';
import { getInvolvedQueriesInTraceOperator } from 'components/QueryBuilderV2/QueryV2/TraceOperator/utils/utils';
import { YAxisSource } from 'components/YAxisUnitSelector/types';
import { getYAxisCategories } from 'components/YAxisUnitSelector/utils';
import { Y_AXIS_CATEGORIES } from 'components/YAxisUnitSelector/constants';
import ROUTES from 'constants/routes';
import {
AlertThresholdMatchType,
@@ -40,8 +39,7 @@ export function getQueryNames(currentQuery: Query): BaseOptionType[] {
}
export function getCategoryByOptionId(id: string): string | undefined {
const categories = getYAxisCategories(YAxisSource.ALERTS);
return categories.find((category) =>
return Y_AXIS_CATEGORIES.find((category) =>
category.units.some((unit) => unit.id === id),
)?.name;
}
@@ -49,15 +47,14 @@ export function getCategoryByOptionId(id: string): string | undefined {
export function getCategorySelectOptionByName(
name: string,
): DefaultOptionType[] {
const categories = getYAxisCategories(YAxisSource.ALERTS);
return (
categories
.find((category) => category.name === name)
?.units.map((unit) => ({
Y_AXIS_CATEGORIES.find((category) => category.name === name)?.units.map(
(unit) => ({
label: unit.name,
value: unit.id,
'data-testid': `threshold-unit-select-option-${unit.id}`,
})) || []
}),
) || []
);
}

View File

@@ -1,5 +1,4 @@
import YAxisUnitSelector from 'components/YAxisUnitSelector';
import { YAxisSource } from 'components/YAxisUnitSelector/types';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { useCreateAlertState } from 'container/CreateAlertV2/context';
import ChartPreviewComponent from 'container/FormAlertRules/ChartPreview';
@@ -38,7 +37,6 @@ function ChartPreview({ alertDef }: ChartPreviewProps): JSX.Element {
onChange={(value): void => {
setAlertState({ type: 'SET_Y_AXIS_UNIT', payload: value });
}}
source={YAxisSource.ALERTS}
/>
</div>
);

View File

@@ -1,5 +1,5 @@
import { TableProps } from 'antd';
import { PrecisionOption } from 'components/Graph/types';
import { PrecisionOption } from 'components/Graph/yAxisConfig';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { LogsExplorerTableProps } from 'container/LogsExplorerTable/LogsExplorerTable.interfaces';
import {

View File

@@ -175,18 +175,7 @@ function LiveLogsContainer(): JSX.Element {
if (isConnectionError && reconnectDueToError) {
// Small delay to prevent immediate reconnection attempts
const reconnectTimer = setTimeout(() => {
const fallbackFilterExpression =
prevFilterExpressionRef.current ||
currentQuery?.builder.queryData[0]?.filter?.expression?.trim() ||
null;
const validationResult = validateQuery(fallbackFilterExpression || '');
if (validationResult.isValid) {
handleStartNewConnection(fallbackFilterExpression);
} else {
handleStartNewConnection(null);
}
handleStartNewConnection();
}, 1000);
return (): void => clearTimeout(reconnectTimer);
@@ -197,7 +186,6 @@ function LiveLogsContainer(): JSX.Element {
reconnectDueToError,
compositeQuery,
handleStartNewConnection,
currentQuery,
]);
// clean up the connection when the component unmounts

View File

@@ -124,7 +124,7 @@
.builder-units-filter-label {
margin-bottom: 0px !important;
font-size: 12px;
font-size: 13px;
}
}
}

View File

@@ -6,7 +6,6 @@ import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { UpdateMetricMetadataProps } from 'api/metricsExplorer/updateMetricMetadata';
import { ResizeTable } from 'components/ResizeTable';
import YAxisUnitSelector from 'components/YAxisUnitSelector';
import { YAxisSource } from 'components/YAxisUnitSelector/types';
import { getUniversalNameFromMetricUnit } from 'components/YAxisUnitSelector/utils';
import FieldRenderer from 'container/LogDetailedView/FieldRenderer';
import { DataType } from 'container/LogDetailedView/TableView';
@@ -121,7 +120,6 @@ function Metadata({
setMetricMetadata((prev) => ({ ...prev, unit: value }));
}}
data-testid="unit-select"
source={YAxisSource.EXPLORER}
/>
);
}

View File

@@ -12,7 +12,10 @@ import {
Switch,
Typography,
} from 'antd';
import { PrecisionOption, PrecisionOptionsEnum } from 'components/Graph/types';
import {
PrecisionOption,
PrecisionOptionsEnum,
} from 'components/Graph/yAxisConfig';
import TimePreference from 'components/TimePreferenceDropDown';
import { PANEL_TYPES, PanelDisplay } from 'constants/queryBuilder';
import GraphTypes, {

View File

@@ -4,7 +4,10 @@ import './NewWidget.styles.scss';
import { WarningOutlined } from '@ant-design/icons';
import { Button, Flex, Modal, Space, Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import { PrecisionOption, PrecisionOptionsEnum } from 'components/Graph/types';
import {
PrecisionOption,
PrecisionOptionsEnum,
} from 'components/Graph/yAxisConfig';
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
import { adjustQueryForV5 } from 'components/QueryBuilderV2/utils';
import { QueryParams } from 'constants/query';

View File

@@ -1,6 +1,6 @@
import { DefaultOptionType } from 'antd/es/select';
import { omitIdFromQuery } from 'components/ExplorerCard/utils';
import { PrecisionOptionsEnum } from 'components/Graph/types';
import { PrecisionOptionsEnum } from 'components/Graph/yAxisConfig';
import {
initialQueryBuilderFormValuesMap,
PANEL_TYPES,

View File

@@ -1237,9 +1237,9 @@
},
{
"dataSource": "opentelemetry-cloudflare",
"label": "Cloudflare - Tracing",
"label": "Cloudflare",
"imgUrl": "/Logos/cloudflare.svg",
"tags": ["apm/traces"],
"tags": ["apm/traces", "logs"],
"module": "apm",
"relatedSearchKeywords": [
"apm",
@@ -1260,30 +1260,6 @@
"id": "opentelemetry-cloudflare",
"link": "https://signoz.io/docs/instrumentation/opentelemetry-cloudflare/"
},
{
"dataSource": "opentelemetry-cloudflare-logs",
"label": "Cloudflare Logs",
"imgUrl": "/Logos/cloudflare.svg",
"tags": ["logs"],
"module": "logs",
"relatedSearchKeywords": [
"logs",
"cloudflare",
"cloudflare workers",
"cloudflare monitoring",
"cloudflare logging",
"cloudflare observability",
"opentelemetry cloudflare",
"otel cloudflare",
"cloudflare instrumentation",
"monitor cloudflare workers",
"cloudflare logs",
"edge computing monitoring",
"cloudflare to signoz"
],
"id": "opentelemetry-cloudflare-logs",
"link": "https://signoz.io/docs/logs-management/send-logs/cloudflare-logs/"
},
{
"dataSource": "kubernetes-pod-logs",
"label": "Kubernetes Pod Logs",
@@ -2845,133 +2821,6 @@
],
"link": "https://signoz.io/docs/vercel-ai-sdk-monitoring/"
},
{
"dataSource": "amazon-bedrock",
"label": "Amazon Bedrock",
"imgUrl": "/Logos/amazon-bedrock.svg",
"tags": ["LLM Monitoring"],
"module": "apm",
"relatedSearchKeywords": [
"amazon bedrock monitoring",
"amazon bedrock observability",
"amazon bedrock performance tracking",
"amazon bedrock latency tracing",
"amazon bedrock metrics",
"otel amazon bedrock integration",
"amazon bedrock response time",
"amazon bedrock logs",
"amazon bedrock error tracking",
"amazon bedrock debugging",
"traces"
],
"link": "https://signoz.io/docs/amazon-bedrock-monitoring/"
},
{
"dataSource": "autogen",
"label": "AutoGen",
"imgUrl": "/Logos/autogen.svg",
"tags": ["LLM Monitoring"],
"module": "apm",
"relatedSearchKeywords": [
"autogen monitoring",
"autogen observability",
"autogen performance tracking",
"autogen latency tracing",
"autogen metrics",
"otel autogen integration",
"autogen response time",
"autogen logs",
"autogen error tracking",
"autogen debugging",
"traces"
],
"link": "https://signoz.io/docs/autogen-observability/"
},
{
"dataSource": "azure-openai",
"label": "Azure OpenAI",
"imgUrl": "/Logos/azure-openai.svg",
"tags": ["LLM Monitoring"],
"module": "apm",
"relatedSearchKeywords": [
"azure open ai monitoring",
"azure open ai observability",
"azure open ai performance tracking",
"azure open ai latency tracing",
"azure open ai metrics",
"otel azure open ai integration",
"azure open ai response time",
"azure open ai logs",
"azure open ai error tracking",
"azure open ai debugging",
"traces"
],
"link": "https://signoz.io/docs/azure-openai-monitoring/"
},
{
"dataSource": "crew-ai",
"label": "Crew AI",
"imgUrl": "/Logos/crew-ai.svg",
"tags": ["LLM Monitoring"],
"module": "apm",
"relatedSearchKeywords": [
"crew ai monitoring",
"crew ai observability",
"crew ai performance tracking",
"crew ai latency tracing",
"crew ai metrics",
"otel crew ai integration",
"crew ai response time",
"crew ai logs",
"crew ai error tracking",
"crew ai debugging",
"traces"
],
"link": "https://signoz.io/docs/crewai-observability/"
},
{
"dataSource": "litellm",
"label": "LiteLLM",
"imgUrl": "/Logos/litellm.svg",
"tags": ["LLM Monitoring"],
"module": "apm",
"relatedSearchKeywords": [
"litellm monitoring",
"litellm observability",
"litellm performance tracking",
"litellm latency tracing",
"litellm metrics",
"otel litellm integration",
"litellm response time",
"litellm logs",
"litellm error tracking",
"litellm debugging",
"traces"
],
"link": "https://signoz.io/docs/litellm-observability/"
},
{
"dataSource": "pydantic-ai",
"label": "Pydantic AI",
"imgUrl": "/Logos/pydantic-ai.svg",
"tags": ["LLM Monitoring"],
"module": "apm",
"relatedSearchKeywords": [
"pydantic ai monitoring",
"pydantic ai observability",
"pydantic ai performance tracking",
"pydantic ai latency tracing",
"pydantic ai metrics",
"otel pydantic ai integration",
"pydantic ai response time",
"pydantic ai logs",
"pydantic ai error tracking",
"pydantic ai debugging",
"traces"
],
"link": "https://signoz.io/docs/pydantic-ai-observability/"
},
{
"dataSource": "mastra-monitoring",
"label": "Mastra",

View File

@@ -6,7 +6,6 @@ import { ColumnsType } from 'antd/lib/table';
import deleteDomain from 'api/v1/domains/id/delete';
import listAllDomain from 'api/v1/domains/list';
import ErrorContent from 'components/ErrorModal/components/ErrorContent';
import CopyToClipboard from 'periscope/components/CopyToClipboard';
import { useErrorModal } from 'providers/ErrorModalProvider';
import { useState } from 'react';
import { useQuery } from 'react-query';
@@ -33,23 +32,6 @@ const columns: ColumnsType<GettableAuthDomain> = [
<Toggle isDefaultChecked={value} record={record} />
),
},
{
title: 'IDP Initiated SSO URL',
dataIndex: 'relayState',
key: 'relayState',
width: 80,
render: (_, record: GettableAuthDomain): JSX.Element => {
const relayPath = record.authNProviderInfo.relayStatePath;
if (!relayPath) {
return (
<Typography.Text style={{ paddingLeft: '6px' }}>N/A</Typography.Text>
);
}
const href = `${window.location.origin}/${relayPath}`;
return <CopyToClipboard textToCopy={href} />;
},
},
{
title: 'Action',
dataIndex: 'action',

View File

@@ -116,11 +116,12 @@
flex: 1 0 0;
border-radius: 2px;
background: var(--bg-cherry-500);
border: none;
border-color: none;
}
.cancel-run:hover {
background-color: #ff7875 !important;
color: var(--bg-vanilla-100) !important;
border: none;
}
}

View File

@@ -1,10 +1,10 @@
import { Select, SelectProps, Space, Typography } from 'antd';
import { Select, SelectProps, Space } from 'antd';
import { getCategorySelectOptionByName } from 'container/NewWidget/RightContainer/alertFomatCategories';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { popupContainer } from 'utils/selectPopupContainer';
import { categoryToSupport } from './config';
import { selectStyles } from './styles';
import { DefaultLabel, selectStyles } from './styles';
import { IBuilderUnitsFilterProps } from './types';
import { filterOption } from './utils';
@@ -31,9 +31,9 @@ function BuilderUnitsFilter({
return (
<Space className="builder-units-filter">
<Typography.Text className="builder-units-filter-label">
<DefaultLabel className="builder-units-filter-label">
Y-axis unit
</Typography.Text>
</DefaultLabel>
<Select
getPopupContainer={popupContainer}
style={selectStyles}

View File

@@ -8,13 +8,4 @@
min-height: 350px;
padding: 0px 12px;
}
.time-series-view-container {
.time-series-view-container-header {
display: flex;
justify-content: flex-start;
align-items: center;
padding: 12px 0;
}
}
}

View File

@@ -11,7 +11,6 @@ import { LogsLoading } from 'container/LogsLoading/LogsLoading';
import EmptyMetricsSearch from 'container/MetricsExplorer/Explorer/EmptyMetricsSearch';
import { MetricsLoading } from 'container/MetricsExplorer/MetricsLoading/MetricsLoading';
import NoLogs from 'container/NoLogs/NoLogs';
import { BuilderUnitsFilter } from 'container/QueryBuilder/filters';
import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/config';
import { TracesLoading } from 'container/TracesExplorer/TraceLoading/TraceLoading';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
@@ -82,14 +81,6 @@ function TimeSeriesView({
const [minTimeScale, setMinTimeScale] = useState<number>();
const [maxTimeScale, setMaxTimeScale] = useState<number>();
const [graphVisibility, setGraphVisibility] = useState<boolean[]>([]);
const [yAxisUnitInternal, setYAxisUnitInternal] = useState<string>(
yAxisUnit || '',
);
const onUnitChangeHandler = (value: string): void => {
setYAxisUnitInternal(value);
};
const legendScrollPositionRef = useRef<{
scrollTop: number;
scrollLeft: number;
@@ -198,7 +189,7 @@ function TimeSeriesView({
const chartOptions = getUPlotChartOptions({
id: 'time-series-explorer',
onDragSelect,
yAxisUnit: yAxisUnitInternal || '',
yAxisUnit: yAxisUnit || '',
apiResponse: data?.payload,
dimensions: {
width: containerDimensions.width,
@@ -270,17 +261,7 @@ function TimeSeriesView({
!isError &&
chartData &&
!isEmpty(chartData?.[0]) &&
chartOptions && (
<div className="time-series-view-container">
<div className="time-series-view-container-header">
<BuilderUnitsFilter
onChange={onUnitChangeHandler}
yAxisUnit={yAxisUnitInternal}
/>
</div>
<Uplot data={chartData} options={chartOptions} />
</div>
)}
chartOptions && <Uplot data={chartData} options={chartOptions} />}
</div>
</div>
);

View File

@@ -1,5 +1,4 @@
import { PrecisionOption } from 'components/Graph/types';
import { getToolTipValue } from 'components/Graph/yAxisConfig';
import { getToolTipValue, PrecisionOption } from 'components/Graph/yAxisConfig';
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
import { themeColors } from 'constants/theme';
import dayjs from 'dayjs';

View File

@@ -1,7 +1,6 @@
/* eslint-disable @typescript-eslint/ban-ts-comment */
// @ts-nocheck
import { PrecisionOption } from 'components/Graph/types';
import { getToolTipValue } from 'components/Graph/yAxisConfig';
import { getToolTipValue, PrecisionOption } from 'components/Graph/yAxisConfig';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { uPlotXAxisValuesFormat } from './constants';

View File

@@ -1,4 +1,4 @@
import { PrecisionOption } from 'components/Graph/types';
import { PrecisionOption } from 'components/Graph/yAxisConfig';
import { PANEL_GROUP_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems';

View File

@@ -11,7 +11,6 @@ export interface GettableAuthDomain {
orgId: string;
ssoEnabled: boolean;
ssoType: string;
authNProviderInfo: AuthNProviderInfo;
samlConfig?: SAMLConfig;
googleAuthConfig?: GoogleAuthConfig;
oidcConfig?: OIDCConfig;
@@ -43,7 +42,3 @@ export interface OIDCConfig {
export interface ClaimMapping {
email: string;
}
export interface AuthNProviderInfo {
relayStatePath: string;
}

View File

@@ -22,7 +22,4 @@ type CallbackAuthN interface {
// Handle the callback from the provider.
HandleCallback(context.Context, url.Values) (*authtypes.CallbackIdentity, error)
// Get provider info such as `relay state`
ProviderInfo(context.Context, *authtypes.AuthDomain) *authtypes.AuthNProviderInfo
}

View File

@@ -117,12 +117,6 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
}
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
return &authtypes.AuthNProviderInfo{
RelayStatePath: nil,
}
}
func (a *AuthN) oauth2Config(siteURL *url.URL, authDomain *authtypes.AuthDomain, provider *oidc.Provider) *oauth2.Config {
return &oauth2.Config{
ClientID: authDomain.AuthDomainConfig().Google.ClientID,

View File

@@ -105,16 +105,7 @@ func (a *APIKey) Wrap(next http.Handler) http.Handler {
comment.Set("user_id", claims.UserID)
comment.Set("org_id", claims.OrgID)
r = r.WithContext(ctxtypes.NewContextWithComment(ctx, comment))
next.ServeHTTP(w, r)
apiKey.LastUsed = time.Now()
_, err = a.store.BunDB().NewUpdate().Model(&apiKey).Column("last_used").Where("token = ?", apiKeyToken).Where("revoked = false").Exec(r.Context())
if err != nil {
a.logger.ErrorContext(r.Context(), "failed to update last used of api key", "error", err)
}
next.ServeHTTP(w, r.WithContext(ctxtypes.NewContextWithComment(ctx, comment)))
})
}

View File

@@ -29,9 +29,6 @@ type Module interface {
// Delete an existing auth domain by id.
Delete(context.Context, valuer.UUID, valuer.UUID) error
// Get the IDP info of the domain provided.
GetAuthNProviderInfo(context.Context, *authtypes.AuthDomain) (*authtypes.AuthNProviderInfo)
}
type Handler interface {

View File

@@ -95,7 +95,7 @@ func (handler *handler) List(rw http.ResponseWriter, r *http.Request) {
authDomains := make([]*authtypes.GettableAuthDomain, len(domains))
for i, domain := range domains {
authDomains[i] = authtypes.NewGettableAuthDomainFromAuthDomain(domain, handler.module.GetAuthNProviderInfo(ctx, domain))
authDomains[i] = authtypes.NewGettableAuthDomainFromAuthDomain(domain)
}
render.Success(rw, http.StatusOK, authDomains)

View File

@@ -3,19 +3,17 @@ package implauthdomain
import (
"context"
"github.com/SigNoz/signoz/pkg/authn"
"github.com/SigNoz/signoz/pkg/modules/authdomain"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type module struct {
store authtypes.AuthDomainStore
authNs map[authtypes.AuthNProvider]authn.AuthN
store authtypes.AuthDomainStore
}
func NewModule(store authtypes.AuthDomainStore, authNs map[authtypes.AuthNProvider]authn.AuthN) authdomain.Module {
return &module{store: store, authNs: authNs}
func NewModule(store authtypes.AuthDomainStore) authdomain.Module {
return &module{store: store}
}
func (module *module) Create(ctx context.Context, domain *authtypes.AuthDomain) error {
@@ -26,13 +24,6 @@ func (module *module) Get(ctx context.Context, id valuer.UUID) (*authtypes.AuthD
return module.store.Get(ctx, id)
}
func (module *module) GetAuthNProviderInfo(ctx context.Context, domain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
if callbackAuthN, ok := module.authNs[domain.AuthDomainConfig().AuthNProvider].(authn.CallbackAuthN); ok {
return callbackAuthN.ProviderInfo(ctx, domain)
}
return &authtypes.AuthNProviderInfo{}
}
func (module *module) GetByOrgIDAndID(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*authtypes.AuthDomain, error) {
return module.store.GetByOrgIDAndID(ctx, orgID, id)
}

View File

@@ -1,139 +0,0 @@
package implmetricsexplorer
import (
"net/http"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/binding"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/metricsexplorertypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux"
)
type handler struct {
module metricsexplorer.Module
}
// NewHandler returns a metricsexplorer.Handler implementation.
func NewHandler(m metricsexplorer.Module) metricsexplorer.Handler {
return &handler{
module: m,
}
}
func (h *handler) GetStats(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
var in metricsexplorertypes.StatsRequest
if err := binding.JSON.BindBody(req.Body, &in); err != nil {
render.Error(rw, err)
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
out, err := h.module.GetStats(req.Context(), orgID, &in)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, out)
}
func (h *handler) GetTreemap(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
var in metricsexplorertypes.TreemapRequest
if err := binding.JSON.BindBody(req.Body, &in); err != nil {
render.Error(rw, err)
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
out, err := h.module.GetTreemap(req.Context(), orgID, &in)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, out)
}
func (h *handler) UpdateMetricMetadata(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
// Extract metric_name from URL path
vars := mux.Vars(req)
metricName := vars["metric_name"]
if metricName == "" {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "metric_name is required in URL path"))
return
}
var in metricsexplorertypes.UpdateMetricMetadataRequest
if err := binding.JSON.BindBody(req.Body, &in); err != nil {
render.Error(rw, err)
return
}
// Set metric name from URL path
in.MetricName = metricName
orgID := valuer.MustNewUUID(claims.OrgID)
err = h.module.UpdateMetricMetadata(req.Context(), orgID, &in)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, nil)
}
func (h *handler) GetMetricMetadata(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
metricName := strings.TrimSpace(req.URL.Query().Get("metricName"))
if metricName == "" {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName query parameter is required"))
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
metadataMap, err := h.module.GetMetricMetadataMulti(req.Context(), orgID, []string{metricName})
if err != nil {
render.Error(rw, err)
return
}
metadata, ok := metadataMap[metricName]
if !ok || metadata == nil {
render.Error(rw, errors.NewNotFoundf(errors.CodeNotFound, "metadata not found for metric %q", metricName))
return
}
render.Success(rw, http.StatusOK, metadata)
}

View File

@@ -1,73 +0,0 @@
package implmetricsexplorer
import (
"fmt"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/metricsexplorertypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
)
// used for mapping the sqlColumns via orderBy
const (
sqlColumnTimeSeries = "timeseries"
sqlColumnSamples = "samples"
)
func generateMetricMetadataCacheKey(metricName string) string {
return fmt.Sprintf("metrics::metadata::%s", metricName)
}
func getStatsOrderByColumn(order *qbtypes.OrderBy) (string, string, error) {
if order == nil {
return sqlColumnTimeSeries, qbtypes.OrderDirectionDesc.StringValue(), nil
}
var columnName string
switch strings.ToLower(order.Key.Name) {
case metricsexplorertypes.OrderByTimeSeries.StringValue():
columnName = sqlColumnTimeSeries
case metricsexplorertypes.OrderBySamples.StringValue():
columnName = sqlColumnSamples
default:
return "", "", errors.NewInvalidInputf(
errors.CodeInvalidInput,
"unsupported order column %q: supported columns are %q or %q",
order.Key.Name,
metricsexplorertypes.OrderByTimeSeries,
metricsexplorertypes.OrderBySamples,
)
}
// Extract direction from OrderDirection and convert to SQL format (uppercase)
var direction qbtypes.OrderDirection
var ok bool
// Validate direction using OrderDirectionMap
if direction, ok = qbtypes.OrderDirectionMap[strings.ToLower(order.Direction.StringValue())]; !ok {
return "", "", errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported order direction %q, should be one of %s, %s", direction, qbtypes.OrderDirectionAsc, qbtypes.OrderDirectionDesc)
}
return columnName, direction.StringValue(), nil
}
func extractMissingMetricNamesInMap(metricNames []string, metricMetadataMap map[string]*metricsexplorertypes.MetricMetadata) []string {
misses := make([]string, 0)
for _, name := range metricNames {
if _, ok := metricMetadataMap[name]; !ok {
misses = append(misses, name)
}
}
return misses
}
// enrichStatsWithMetadata enriches metric stats with metadata from the provided metadata map.
func enrichStatsWithMetadata(metricStats []metricsexplorertypes.Stat, metadata map[string]*metricsexplorertypes.MetricMetadata) {
for i := range metricStats {
if meta, ok := metadata[metricStats[i].MetricName]; ok {
metricStats[i].Description = meta.Description
metricStats[i].MetricType = meta.MetricType
metricStats[i].MetricUnit = meta.MetricUnit
}
}
}

View File

@@ -1,773 +0,0 @@
package implmetricsexplorer
import (
"context"
"fmt"
"log/slog"
"strings"
"time"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/types/metricsexplorertypes"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
sqlbuilder "github.com/huandu/go-sqlbuilder"
)
type module struct {
telemetryStore telemetrystore.TelemetryStore
telemetryMetadataStore telemetrytypes.MetadataStore
fieldMapper qbtypes.FieldMapper
condBuilder qbtypes.ConditionBuilder
logger *slog.Logger
cache cache.Cache
}
// NewModule constructs the metrics module with the provided dependencies.
func NewModule(ts telemetrystore.TelemetryStore, telemetryMetadataStore telemetrytypes.MetadataStore, cache cache.Cache, providerSettings factory.ProviderSettings) metricsexplorer.Module {
fieldMapper := telemetrymetrics.NewFieldMapper()
condBuilder := telemetrymetrics.NewConditionBuilder(fieldMapper)
return &module{
telemetryStore: ts,
fieldMapper: fieldMapper,
condBuilder: condBuilder,
logger: providerSettings.Logger,
telemetryMetadataStore: telemetryMetadataStore,
cache: cache,
}
}
func (m *module) GetStats(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.StatsRequest) (*metricsexplorertypes.StatsResponse, error) {
if err := req.Validate(); err != nil {
return nil, err
}
filterWhereClause, err := m.buildFilterClause(ctx, req.Filter, req.Start, req.End)
if err != nil {
return nil, err
}
// Single query to get stats with samples, timeseries counts in required sorting order
metricStats, total, err := m.fetchMetricsStatsWithSamples(
ctx,
req,
filterWhereClause,
false,
req.OrderBy,
)
if err != nil {
return nil, err
}
if len(metricStats) == 0 {
return &metricsexplorertypes.StatsResponse{
Metrics: []metricsexplorertypes.Stat{},
Total: 0,
}, nil
}
// Get metadata for all metrics
metricNames := make([]string, len(metricStats))
for i := range metricStats {
metricNames[i] = metricStats[i].MetricName
}
metadata, err := m.GetMetricMetadataMulti(ctx, orgID, metricNames)
if err != nil {
return nil, err
}
// Enrich stats with metadata
enrichStatsWithMetadata(metricStats, metadata)
return &metricsexplorertypes.StatsResponse{
Metrics: metricStats,
Total: total,
}, nil
}
// GetTreemap will return metrics treemap information once implemented.
func (m *module) GetTreemap(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.TreemapRequest) (*metricsexplorertypes.TreemapResponse, error) {
if err := req.Validate(); err != nil {
return nil, err
}
filterWhereClause, err := m.buildFilterClause(ctx, req.Filter, req.Start, req.End)
if err != nil {
return nil, err
}
resp := &metricsexplorertypes.TreemapResponse{}
switch req.Treemap {
case metricsexplorertypes.TreemapModeSamples:
entries, err := m.computeSamplesTreemap(ctx, req, filterWhereClause)
if err != nil {
return nil, err
}
resp.Samples = entries
default: // TreemapModeTimeSeries
entries, err := m.computeTimeseriesTreemap(ctx, req, filterWhereClause)
if err != nil {
return nil, err
}
resp.TimeSeries = entries
}
return resp, nil
}
func (m *module) GetMetricMetadataMulti(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, error) {
if len(metricNames) == 0 {
return map[string]*metricsexplorertypes.MetricMetadata{}, nil
}
metadata := make(map[string]*metricsexplorertypes.MetricMetadata)
cacheHits, cacheMisses := m.fetchMetadataFromCache(ctx, orgID, metricNames)
for name, meta := range cacheHits {
metadata[name] = meta
}
if len(cacheMisses) == 0 {
return metadata, nil
}
updatedMetadata, err := m.fetchUpdatedMetadata(ctx, orgID, cacheMisses)
if err != nil {
return nil, err
}
for name, meta := range updatedMetadata {
metadata[name] = meta
}
remainingMisses := extractMissingMetricNamesInMap(cacheMisses, updatedMetadata)
if len(remainingMisses) == 0 {
return metadata, nil
}
timeseriesMetadata, err := m.fetchTimeseriesMetadata(ctx, orgID, remainingMisses)
if err != nil {
return nil, err
}
for name, meta := range timeseriesMetadata {
metadata[name] = meta
}
return metadata, nil
}
func (m *module) UpdateMetricMetadata(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.UpdateMetricMetadataRequest) error {
if req == nil {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "request is nil")
}
if req.MetricName == "" {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "metric name is required")
}
// Validate and normalize metric type and temporality
if err := m.validateAndNormalizeMetricType(req); err != nil {
return err
}
// Validate labels for histogram and summary types
if err := m.validateMetricLabels(ctx, req); err != nil {
return err
}
// Insert new metadata (keeping history of all updates)
if err := m.insertMetricsMetadata(ctx, orgID, req); err != nil {
return err
}
return nil
}
func (m *module) fetchMetadataFromCache(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, []string) {
hits := make(map[string]*metricsexplorertypes.MetricMetadata)
misses := make([]string, 0)
for _, metricName := range metricNames {
cacheKey := generateMetricMetadataCacheKey(metricName)
var cachedMetadata metricsexplorertypes.MetricMetadata
if err := m.cache.Get(ctx, orgID, cacheKey, &cachedMetadata); err == nil {
hits[metricName] = &cachedMetadata
} else {
m.logger.WarnContext(ctx, "cache miss for metric metadata", "metric_name", metricName, "error", err)
misses = append(misses, metricName)
}
}
return hits, misses
}
func (m *module) fetchUpdatedMetadata(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, error) {
if len(metricNames) == 0 {
return map[string]*metricsexplorertypes.MetricMetadata{}, nil
}
args := make([]any, len(metricNames))
for i := range metricNames {
args[i] = metricNames[i]
}
sb := sqlbuilder.NewSelectBuilder()
sb.Select(
"metric_name",
"argMax(description, created_at) AS description",
"argMax(type, created_at) AS type",
"argMax(unit, created_at) AS unit",
"argMax(temporality, created_at) AS temporality",
"argMax(is_monotonic, created_at) AS is_monotonic",
)
sb.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, telemetrymetrics.UpdatedMetadataTableName))
sb.Where(sb.In("metric_name", args...))
sb.GroupBy("metric_name")
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
db := m.telemetryStore.ClickhouseDB()
rows, err := db.Query(ctx, query, args...)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to fetch updated metrics metadata")
}
defer rows.Close()
result := make(map[string]*metricsexplorertypes.MetricMetadata)
for rows.Next() {
var (
metricMetadata metricsexplorertypes.MetricMetadata
metricName string
)
if err := rows.Scan(&metricName, &metricMetadata.Description, &metricMetadata.MetricType, &metricMetadata.MetricUnit, &metricMetadata.Temporality, &metricMetadata.IsMonotonic); err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to scan updated metrics metadata")
}
result[metricName] = &metricMetadata
cacheKey := generateMetricMetadataCacheKey(metricName)
if err := m.cache.Set(ctx, orgID, cacheKey, &metricMetadata, 0); err != nil {
m.logger.WarnContext(ctx, "failed to set metric metadata in cache", "metric_name", metricName, "error", err)
}
}
if err := rows.Err(); err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "error iterating updated metrics metadata rows")
}
return result, nil
}
func (m *module) fetchTimeseriesMetadata(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, error) {
if len(metricNames) == 0 {
return map[string]*metricsexplorertypes.MetricMetadata{}, nil
}
args := make([]any, len(metricNames))
for i := range metricNames {
args[i] = metricNames[i]
}
sb := sqlbuilder.NewSelectBuilder()
sb.Select(
"metric_name",
"ANY_VALUE(description) AS description",
"ANY_VALUE(type) AS metric_type",
"ANY_VALUE(unit) AS metric_unit",
"ANY_VALUE(temporality) AS temporality",
"ANY_VALUE(is_monotonic) AS is_monotonic",
)
sb.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, telemetrymetrics.TimeseriesV4TableName))
sb.Where(sb.In("metric_name", args...))
sb.GroupBy("metric_name")
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
db := m.telemetryStore.ClickhouseDB()
rows, err := db.Query(ctx, query, args...)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to fetch metrics metadata from timeseries table")
}
defer rows.Close()
result := make(map[string]*metricsexplorertypes.MetricMetadata)
for rows.Next() {
var (
metricMetadata metricsexplorertypes.MetricMetadata
metricName string
)
if err := rows.Scan(&metricName, &metricMetadata.Description, &metricMetadata.MetricType, &metricMetadata.MetricUnit, &metricMetadata.Temporality, &metricMetadata.IsMonotonic); err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to scan timeseries metadata")
}
result[metricName] = &metricMetadata
cacheKey := generateMetricMetadataCacheKey(metricName)
if err := m.cache.Set(ctx, orgID, cacheKey, &metricMetadata, 0); err != nil {
m.logger.WarnContext(ctx, "failed to set metric metadata in cache", "metric_name", metricName, "error", err)
}
}
if err := rows.Err(); err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "error iterating timeseries metadata rows")
}
return result, nil
}
func (m *module) validateAndNormalizeMetricType(req *metricsexplorertypes.UpdateMetricMetadataRequest) error {
switch req.Type {
case metrictypes.SumType:
if req.Temporality.IsZero() {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "temporality is required when metric type is Sum")
}
if req.Temporality != metrictypes.Delta && req.Temporality != metrictypes.Cumulative {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid value for temporality")
}
// Special case: if Sum is not monotonic and cumulative, convert to Gauge
if !req.IsMonotonic && req.Temporality == metrictypes.Cumulative {
req.Type = metrictypes.GaugeType
req.Temporality = metrictypes.Unspecified
}
case metrictypes.HistogramType:
if req.Temporality.IsZero() {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "temporality is required when metric type is Histogram")
}
if req.Temporality != metrictypes.Delta && req.Temporality != metrictypes.Cumulative {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid value for temporality")
}
case metrictypes.ExpHistogramType:
if req.Temporality.IsZero() {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "temporality is required when metric type is exponential histogram")
}
if req.Temporality != metrictypes.Delta && req.Temporality != metrictypes.Cumulative {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid value for temporality")
}
case metrictypes.GaugeType:
// Gauge always has unspecified temporality
req.Temporality = metrictypes.Unspecified
case metrictypes.SummaryType:
// Summary always has cumulative temporality
req.Temporality = metrictypes.Cumulative
default:
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid metric type")
}
return nil
}
func (m *module) validateMetricLabels(ctx context.Context, req *metricsexplorertypes.UpdateMetricMetadataRequest) error {
if req.Type == metrictypes.HistogramType {
hasLabel, err := m.checkForLabelInMetric(ctx, req.MetricName, "le")
if err != nil {
return err
}
if !hasLabel {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "metric '%s' cannot be set as histogram type", req.MetricName)
}
}
if req.Type == metrictypes.SummaryType {
hasLabel, err := m.checkForLabelInMetric(ctx, req.MetricName, "quantile")
if err != nil {
return err
}
if !hasLabel {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "metric '%s' cannot be set as summary type", req.MetricName)
}
}
return nil
}
func (m *module) checkForLabelInMetric(ctx context.Context, metricName string, label string) (bool, error) {
sb := sqlbuilder.NewSelectBuilder()
sb.Select("count(*) > 0 AS has_label")
sb.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, telemetrymetrics.AttributesMetadataTableName))
sb.Where(sb.E("metric_name", metricName))
sb.Where(sb.E("attr_name", label))
sb.Limit(1)
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
var hasLabel bool
db := m.telemetryStore.ClickhouseDB()
err := db.QueryRow(ctx, query, args...).Scan(&hasLabel)
if err != nil {
return false, errors.WrapInternalf(err, errors.CodeInternal, "error checking metric label %q", label)
}
return hasLabel, nil
}
func (m *module) insertMetricsMetadata(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.UpdateMetricMetadataRequest) error {
createdAt := time.Now().UnixMilli()
ib := sqlbuilder.NewInsertBuilder()
ib.InsertInto(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, telemetrymetrics.UpdatedMetadataTableName))
ib.Cols("metric_name", "temporality", "is_monotonic", "type", "description", "unit", "created_at")
ib.Values(
req.MetricName,
req.Temporality,
req.IsMonotonic,
req.Type,
req.Description,
req.Unit,
createdAt,
)
query, args := ib.BuildWithFlavor(sqlbuilder.ClickHouse)
db := m.telemetryStore.ClickhouseDB()
if err := db.Exec(ctx, query, args...); err != nil {
return errors.WrapInternalf(err, errors.CodeInternal, "failed to insert metrics metadata")
}
// Set in cache after successful DB insert
metricMetadata := &metricsexplorertypes.MetricMetadata{
Description: req.Description,
MetricType: req.Type,
MetricUnit: req.Unit,
Temporality: req.Temporality,
IsMonotonic: req.IsMonotonic,
}
cacheKey := generateMetricMetadataCacheKey(req.MetricName)
if err := m.cache.Set(ctx, orgID, cacheKey, metricMetadata, 0); err != nil {
m.logger.WarnContext(ctx, "failed to set metric metadata in cache after insert", "metric_name", req.MetricName, "error", err)
}
return nil
}
func (m *module) buildFilterClause(ctx context.Context, filter *qbtypes.Filter, startMillis, endMillis int64) (*sqlbuilder.WhereClause, error) {
expression := ""
if filter != nil {
expression = strings.TrimSpace(filter.Expression)
}
if expression == "" {
return sqlbuilder.NewWhereClause(), nil
}
// TODO(nikhilmantri0902, srikanthccv): if this is the right way of dealing with whereClauseSelectors
whereClauseSelectors := querybuilder.QueryStringToKeysSelectors(expression)
for idx := range whereClauseSelectors {
whereClauseSelectors[idx].Signal = telemetrytypes.SignalMetrics
whereClauseSelectors[idx].SelectorMatchType = telemetrytypes.FieldSelectorMatchTypeExact
// whereClauseSelectors[idx].MetricContext = &telemetrytypes.MetricContext{
// MetricName: query.Aggregations[0].MetricName,
// }
// whereClauseSelectors[idx].Source = query.Source
}
keys, _, err := m.telemetryMetadataStore.GetKeysMulti(ctx, whereClauseSelectors)
if err != nil {
return nil, err
}
opts := querybuilder.FilterExprVisitorOpts{
Logger: m.logger,
FieldMapper: m.fieldMapper,
ConditionBuilder: m.condBuilder,
FullTextColumn: &telemetrytypes.TelemetryFieldKey{
Name: "labels"},
FieldKeys: keys,
}
startNs := uint64(startMillis * 1_000_000)
endNs := uint64(endMillis * 1_000_000)
whereClause, err := querybuilder.PrepareWhereClause(expression, opts, startNs, endNs)
if err != nil {
return nil, err
}
if whereClause == nil || whereClause.WhereClause == nil {
return sqlbuilder.NewWhereClause(), nil
}
return whereClause.WhereClause, nil
}
func (m *module) fetchMetricsStatsWithSamples(
ctx context.Context,
req *metricsexplorertypes.StatsRequest,
filterWhereClause *sqlbuilder.WhereClause,
normalized bool,
orderBy *qbtypes.OrderBy,
) ([]metricsexplorertypes.Stat, uint64, error) {
start, end, distributedTsTable, localTsTable := telemetrymetrics.WhichTSTableToUse(uint64(req.Start), uint64(req.End), nil)
samplesTable := telemetrymetrics.WhichSamplesTableToUse(uint64(req.Start), uint64(req.End), metrictypes.UnspecifiedType, metrictypes.TimeAggregationUnspecified, nil)
countExp := telemetrymetrics.CountExpressionForSamplesTable(samplesTable)
// Timeseries counts per metric
tsSB := sqlbuilder.NewSelectBuilder()
tsSB.Select(
"metric_name",
"uniq(fingerprint) AS timeseries",
)
tsSB.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, distributedTsTable))
tsSB.Where(tsSB.Between("unix_milli", start, end))
tsSB.Where("NOT startsWith(metric_name, 'signoz')")
tsSB.Where(tsSB.E("__normalized", normalized))
if filterWhereClause != nil {
tsSB.AddWhereClause(sqlbuilder.CopyWhereClause(filterWhereClause))
}
tsSB.GroupBy("metric_name")
// Samples counts per metric
samplesSB := sqlbuilder.NewSelectBuilder()
samplesSB.Select(
"metric_name",
fmt.Sprintf("%s AS samples", countExp),
)
samplesSB.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, samplesTable))
samplesSB.Where(samplesSB.Between("unix_milli", req.Start, req.End))
samplesSB.Where("NOT startsWith(metric_name, 'signoz')")
ctes := []*sqlbuilder.CTEQueryBuilder{
sqlbuilder.CTEQuery("__time_series_counts").As(tsSB),
}
if filterWhereClause != nil {
fingerprintSB := sqlbuilder.NewSelectBuilder()
fingerprintSB.Select("fingerprint")
fingerprintSB.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, localTsTable))
fingerprintSB.Where(fingerprintSB.Between("unix_milli", start, end))
fingerprintSB.Where("NOT startsWith(metric_name, 'signoz')")
fingerprintSB.Where(fingerprintSB.E("__normalized", normalized))
fingerprintSB.AddWhereClause(sqlbuilder.CopyWhereClause(filterWhereClause))
fingerprintSB.GroupBy("fingerprint")
ctes = append(ctes, sqlbuilder.CTEQuery("__filtered_fingerprints").As(fingerprintSB))
samplesSB.Where("fingerprint IN (SELECT fingerprint FROM __filtered_fingerprints)")
}
samplesSB.GroupBy("metric_name")
ctes = append(ctes, sqlbuilder.CTEQuery("__sample_counts").As(samplesSB))
cteBuilder := sqlbuilder.With(ctes...)
finalSB := cteBuilder.Select(
"COALESCE(ts.metric_name, s.metric_name) AS metric_name",
"COALESCE(ts.timeseries, 0) AS timeseries",
"COALESCE(s.samples, 0) AS samples",
"COUNT(*) OVER() AS total",
)
finalSB.From("__time_series_counts ts")
finalSB.JoinWithOption(sqlbuilder.FullOuterJoin, "__sample_counts s", "ts.metric_name = s.metric_name")
finalSB.Where("(COALESCE(ts.timeseries, 0) > 0 OR COALESCE(s.samples, 0) > 0)")
orderByColumn, orderDirection, err := getStatsOrderByColumn(orderBy)
if err != nil {
return nil, 0, err
}
finalSB.OrderBy(
fmt.Sprintf("%s %s", orderByColumn, strings.ToUpper(orderDirection)),
"metric_name ASC",
)
finalSB.Limit(req.Limit)
finalSB.Offset(req.Offset)
query, args := finalSB.BuildWithFlavor(sqlbuilder.ClickHouse)
db := m.telemetryStore.ClickhouseDB()
rows, err := db.Query(ctx, query, args...)
if err != nil {
return nil, 0, errors.WrapInternalf(err, errors.CodeInternal, "failed to execute metrics stats with samples query")
}
defer rows.Close()
metricStats := make([]metricsexplorertypes.Stat, 0)
var total uint64
for rows.Next() {
var (
metricStat metricsexplorertypes.Stat
rowTotal uint64
)
if err := rows.Scan(&metricStat.MetricName, &metricStat.TimeSeries, &metricStat.Samples, &rowTotal); err != nil {
return nil, 0, errors.WrapInternalf(err, errors.CodeInternal, "failed to scan metrics stats row")
}
metricStats = append(metricStats, metricStat)
total = rowTotal
}
if err := rows.Err(); err != nil {
return nil, 0, errors.WrapInternalf(err, errors.CodeInternal, "error iterating metrics stats rows")
}
return metricStats, total, nil
}
func (m *module) computeTimeseriesTreemap(ctx context.Context, req *metricsexplorertypes.TreemapRequest, filterWhereClause *sqlbuilder.WhereClause) ([]metricsexplorertypes.TreemapEntry, error) {
start, end, distributedTsTable, _ := telemetrymetrics.WhichTSTableToUse(uint64(req.Start), uint64(req.End), nil)
totalTSBuilder := sqlbuilder.NewSelectBuilder()
totalTSBuilder.Select("uniq(fingerprint) AS total_time_series")
totalTSBuilder.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, distributedTsTable))
totalTSBuilder.Where(totalTSBuilder.Between("unix_milli", start, end))
totalTSBuilder.Where(totalTSBuilder.E("__normalized", false))
metricsSB := sqlbuilder.NewSelectBuilder()
metricsSB.Select(
"metric_name",
"uniq(fingerprint) AS total_value",
)
metricsSB.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, distributedTsTable))
metricsSB.Where(metricsSB.Between("unix_milli", start, end))
metricsSB.Where("NOT startsWith(metric_name, 'signoz')")
metricsSB.Where(metricsSB.E("__normalized", false))
if filterWhereClause != nil {
metricsSB.WhereClause.AddWhereClause(sqlbuilder.CopyWhereClause(filterWhereClause))
}
metricsSB.GroupBy("metric_name")
cteBuilder := sqlbuilder.With(
sqlbuilder.CTEQuery("__total_time_series").As(totalTSBuilder),
sqlbuilder.CTEQuery("__metric_totals").As(metricsSB),
)
finalSB := cteBuilder.Select(
"mt.metric_name",
"mt.total_value",
"CASE WHEN tts.total_time_series = 0 THEN 0 ELSE (mt.total_value * 100.0 / tts.total_time_series) END AS percentage",
)
finalSB.From("__metric_totals mt")
finalSB.Join("__total_time_series tts", "1=1")
finalSB.OrderBy("percentage").Desc()
finalSB.Limit(req.Limit)
query, args := finalSB.BuildWithFlavor(sqlbuilder.ClickHouse)
db := m.telemetryStore.ClickhouseDB()
rows, err := db.Query(ctx, query, args...)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to execute timeseries treemap query")
}
defer rows.Close()
entries := make([]metricsexplorertypes.TreemapEntry, 0)
for rows.Next() {
var treemapEntry metricsexplorertypes.TreemapEntry
if err := rows.Scan(&treemapEntry.MetricName, &treemapEntry.TotalValue, &treemapEntry.Percentage); err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to scan timeseries treemap row")
}
entries = append(entries, treemapEntry)
}
if err := rows.Err(); err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "error iterating timeseries treemap rows")
}
return entries, nil
}
func (m *module) computeSamplesTreemap(ctx context.Context, req *metricsexplorertypes.TreemapRequest, filterWhereClause *sqlbuilder.WhereClause) ([]metricsexplorertypes.TreemapEntry, error) {
start, end, distributedTsTable, localTsTable := telemetrymetrics.WhichTSTableToUse(uint64(req.Start), uint64(req.End), nil)
samplesTable := telemetrymetrics.WhichSamplesTableToUse(uint64(req.Start), uint64(req.End), metrictypes.UnspecifiedType, metrictypes.TimeAggregationUnspecified, nil)
countExp := telemetrymetrics.CountExpressionForSamplesTable(samplesTable)
candidateLimit := req.Limit + 50
metricCandidatesSB := sqlbuilder.NewSelectBuilder()
metricCandidatesSB.Select("metric_name")
metricCandidatesSB.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, distributedTsTable))
metricCandidatesSB.Where("NOT startsWith(metric_name, 'signoz')")
metricCandidatesSB.Where(metricCandidatesSB.E("__normalized", false))
metricCandidatesSB.Where(metricCandidatesSB.Between("unix_milli", start, end))
if filterWhereClause != nil {
metricCandidatesSB.AddWhereClause(sqlbuilder.CopyWhereClause(filterWhereClause))
}
metricCandidatesSB.GroupBy("metric_name")
metricCandidatesSB.OrderBy("uniq(fingerprint) DESC")
metricCandidatesSB.Limit(candidateLimit)
cteQueries := []*sqlbuilder.CTEQueryBuilder{
sqlbuilder.CTEQuery("__metric_candidates").As(metricCandidatesSB),
}
totalSamplesSB := sqlbuilder.NewSelectBuilder()
totalSamplesSB.Select(fmt.Sprintf("%s AS total_samples", countExp))
totalSamplesSB.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, samplesTable))
totalSamplesSB.Where(totalSamplesSB.Between("unix_milli", req.Start, req.End))
sampleCountsSB := sqlbuilder.NewSelectBuilder()
sampleCountsSB.Select(
"metric_name",
fmt.Sprintf("%s AS samples", countExp),
)
sampleCountsSB.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, samplesTable))
sampleCountsSB.Where(sampleCountsSB.Between("unix_milli", req.Start, req.End))
sampleCountsSB.Where("metric_name IN (SELECT metric_name FROM __metric_candidates)")
if filterWhereClause != nil {
fingerprintSB := sqlbuilder.NewSelectBuilder()
fingerprintSB.Select("fingerprint")
fingerprintSB.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, localTsTable))
fingerprintSB.Where(fingerprintSB.Between("unix_milli", start, end))
fingerprintSB.Where("NOT startsWith(metric_name, 'signoz')")
fingerprintSB.Where(fingerprintSB.E("__normalized", false))
fingerprintSB.AddWhereClause(sqlbuilder.CopyWhereClause(filterWhereClause))
fingerprintSB.Where("metric_name IN (SELECT metric_name FROM __metric_candidates)")
fingerprintSB.GroupBy("fingerprint")
sampleCountsSB.Where("fingerprint IN (SELECT fingerprint FROM __filtered_fingerprints)")
cteQueries = append(cteQueries, sqlbuilder.CTEQuery("__filtered_fingerprints").As(fingerprintSB))
}
sampleCountsSB.GroupBy("metric_name")
cteQueries = append(cteQueries,
sqlbuilder.CTEQuery("__sample_counts").As(sampleCountsSB),
sqlbuilder.CTEQuery("__total_samples").As(totalSamplesSB),
)
cteBuilder := sqlbuilder.With(cteQueries...)
finalSB := cteBuilder.Select(
"mc.metric_name",
"COALESCE(sc.samples, 0) AS samples",
"CASE WHEN ts.total_samples = 0 THEN 0 ELSE (COALESCE(sc.samples, 0) * 100.0 / ts.total_samples) END AS percentage",
)
finalSB.From("__metric_candidates mc")
finalSB.JoinWithOption(sqlbuilder.LeftJoin, "__sample_counts sc", "mc.metric_name = sc.metric_name")
finalSB.Join("__total_samples ts", "1=1")
finalSB.OrderBy("percentage DESC")
finalSB.Limit(req.Limit)
query, args := finalSB.BuildWithFlavor(sqlbuilder.ClickHouse)
db := m.telemetryStore.ClickhouseDB()
rows, err := db.Query(ctx, query, args...)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to execute samples treemap query")
}
defer rows.Close()
entries := make([]metricsexplorertypes.TreemapEntry, 0)
for rows.Next() {
var treemapEntry metricsexplorertypes.TreemapEntry
if err := rows.Scan(&treemapEntry.MetricName, &treemapEntry.TotalValue, &treemapEntry.Percentage); err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to scan samples treemap row")
}
entries = append(entries, treemapEntry)
}
if err := rows.Err(); err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "error iterating samples treemap rows")
}
return entries, nil
}

View File

@@ -1,25 +0,0 @@
package metricsexplorer
import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/types/metricsexplorertypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
// Handler exposes HTTP handlers for the metrics module.
type Handler interface {
GetStats(http.ResponseWriter, *http.Request)
GetTreemap(http.ResponseWriter, *http.Request)
GetMetricMetadata(http.ResponseWriter, *http.Request)
UpdateMetricMetadata(http.ResponseWriter, *http.Request)
}
// Module represents the metrics module interface.
type Module interface {
GetStats(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.StatsRequest) (*metricsexplorertypes.StatsResponse, error)
GetTreemap(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.TreemapRequest) (*metricsexplorertypes.TreemapResponse, error)
GetMetricMetadataMulti(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, error)
UpdateMetricMetadata(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.UpdateMetricMetadataRequest) error
}

View File

@@ -19,7 +19,7 @@ type inMemoryQueryProgressTracker struct {
func (tracker *inMemoryQueryProgressTracker) ReportQueryStarted(
queryId string,
) (postQueryCleanup func(), apiErr *model.ApiError) {
) (postQueryCleanup func(), err *model.ApiError) {
tracker.lock.Lock()
defer tracker.lock.Unlock()

View File

@@ -9,7 +9,7 @@ type QueryProgressTracker interface {
// Tells the tracker that query with id `queryId` has started.
// Progress can only be reported for and tracked for a query that is in progress.
// Returns a cleanup function that must be called after the query finishes.
ReportQueryStarted(queryId string) (postQueryCleanup func(), apiErr *model.ApiError)
ReportQueryStarted(queryId string) (postQueryCleanup func(), err *model.ApiError)
// Report progress stats received from clickhouse for `queryId`
ReportQueryProgress(queryId string, chProgress *clickhouse.Progress) *model.ApiError
@@ -18,7 +18,7 @@ type QueryProgressTracker interface {
// The returned channel will produce `QueryProgress` instances representing
// the latest state of query progress stats. Also returns a function that
// can be called to unsubscribe before the query finishes, if needed.
SubscribeToQueryProgress(queryId string) (ch <-chan model.QueryProgress, unsubscribe func(), apiErr *model.ApiError)
SubscribeToQueryProgress(queryId string) (ch <-chan model.QueryProgress, unsubscribe func(), err *model.ApiError)
}
func NewQueryProgressTracker() QueryProgressTracker {

View File

@@ -1297,8 +1297,8 @@ func (r *ClickHouseReader) setTTLLogs(ctx context.Context, orgID string, params
// check if there is existing things to be done
for _, tableName := range tableNameArray {
statusItem, apiErr := r.checkTTLStatusItem(ctx, orgID, tableName)
if apiErr != nil {
statusItem, err := r.checkTTLStatusItem(ctx, orgID, tableName)
if err != nil {
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing ttl_status check sql query")}
}
if statusItem.Status == constants.StatusPending {
@@ -1378,8 +1378,8 @@ func (r *ClickHouseReader) setTTLLogs(ctx context.Context, orgID string, params
err := r.setColdStorage(context.Background(), tableName, params.ColdStorageVolume)
if err != nil {
zap.L().Error("error in setting cold storage", zap.Error(err))
statusItem, apiErr := r.checkTTLStatusItem(ctx, orgID, tableName)
if apiErr == nil {
statusItem, err := r.checkTTLStatusItem(ctx, orgID, tableName)
if err == nil {
_, dbErr := r.
sqlDB.
BunDB().
@@ -1455,8 +1455,8 @@ func (r *ClickHouseReader) setTTLTraces(ctx context.Context, orgID string, param
// check if there is existing things to be done
for _, tableName := range tableNames {
statusItem, apiErr := r.checkTTLStatusItem(ctx, orgID, tableName)
if apiErr != nil {
statusItem, err := r.checkTTLStatusItem(ctx, orgID, tableName)
if err != nil {
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing ttl_status check sql query")}
}
if statusItem.Status == constants.StatusPending {
@@ -1523,8 +1523,8 @@ func (r *ClickHouseReader) setTTLTraces(ctx context.Context, orgID string, param
err := r.setColdStorage(context.Background(), tableName, params.ColdStorageVolume)
if err != nil {
zap.L().Error("Error in setting cold storage", zap.Error(err))
statusItem, apiErr := r.checkTTLStatusItem(ctx, orgID, tableName)
if apiErr == nil {
statusItem, err := r.checkTTLStatusItem(ctx, orgID, tableName)
if err == nil {
_, dbErr := r.
sqlDB.
BunDB().
@@ -1669,8 +1669,8 @@ func (r *ClickHouseReader) SetTTLV2(ctx context.Context, orgID string, params *m
}
for _, tableName := range tableNames {
statusItem, apiErr := r.checkCustomRetentionTTLStatusItem(ctx, orgID, tableName)
if apiErr != nil {
statusItem, err := r.checkCustomRetentionTTLStatusItem(ctx, orgID, tableName)
if err != nil {
return nil, errorsV2.Newf(errorsV2.TypeInternal, errorsV2.CodeInternal, "error in processing custom_retention_ttl_status check sql query")
}
if statusItem.Status == constants.StatusPending {
@@ -1974,8 +1974,8 @@ func (r *ClickHouseReader) checkCustomRetentionTTLStatusItem(ctx context.Context
}
func (r *ClickHouseReader) updateCustomRetentionTTLStatus(ctx context.Context, orgID, tableName, status string) {
statusItem, apiErr := r.checkCustomRetentionTTLStatusItem(ctx, orgID, tableName)
if apiErr == nil && statusItem != nil {
statusItem, err := r.checkCustomRetentionTTLStatusItem(ctx, orgID, tableName)
if err == nil && statusItem != nil {
_, dbErr := r.sqlDB.BunDB().NewUpdate().
Model(new(types.TTLSetting)).
Set("updated_at = ?", time.Now()).
@@ -2126,8 +2126,8 @@ func (r *ClickHouseReader) setTTLMetrics(ctx context.Context, orgID string, para
signozMetricDBName + "." + signozTSLocalTableNameV41Week,
}
for _, tableName := range tableNames {
statusItem, apiErr := r.checkTTLStatusItem(ctx, orgID, tableName)
if apiErr != nil {
statusItem, err := r.checkTTLStatusItem(ctx, orgID, tableName)
if err != nil {
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing ttl_status check sql query")}
}
if statusItem.Status == constants.StatusPending {
@@ -2176,8 +2176,8 @@ func (r *ClickHouseReader) setTTLMetrics(ctx context.Context, orgID string, para
err := r.setColdStorage(context.Background(), tableName, params.ColdStorageVolume)
if err != nil {
zap.L().Error("Error in setting cold storage", zap.Error(err))
statusItem, apiErr := r.checkTTLStatusItem(ctx, orgID, tableName)
if apiErr == nil {
statusItem, err := r.checkTTLStatusItem(ctx, orgID, tableName)
if err == nil {
_, dbErr := r.
sqlDB.
BunDB().
@@ -2285,17 +2285,17 @@ func (r *ClickHouseReader) checkTTLStatusItem(ctx context.Context, orgID string,
return ttl, nil
}
// getTTLQueryStatus fetches ttl_status table status from DB
func (r *ClickHouseReader) getTTLQueryStatus(ctx context.Context, orgID string, tableNameArray []string) (string, *model.ApiError) {
// setTTLQueryStatus fetches ttl_status table status from DB
func (r *ClickHouseReader) setTTLQueryStatus(ctx context.Context, orgID string, tableNameArray []string) (string, *model.ApiError) {
failFlag := false
status := constants.StatusSuccess
for _, tableName := range tableNameArray {
statusItem, apiErr := r.checkTTLStatusItem(ctx, orgID, tableName)
statusItem, err := r.checkTTLStatusItem(ctx, orgID, tableName)
emptyStatusStruct := new(types.TTLSetting)
if statusItem == emptyStatusStruct {
return "", nil
}
if apiErr != nil {
if err != nil {
return "", &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing ttl_status check sql query")}
}
if statusItem.Status == constants.StatusPending && statusItem.UpdatedAt.Unix()-time.Now().Unix() < 3600 {
@@ -2439,26 +2439,20 @@ func (r *ClickHouseReader) GetTTL(ctx context.Context, orgID string, ttlParams *
switch ttlParams.Type {
case constants.TraceTTL:
tableNameArray := []string{
r.TraceDB + "." + r.traceTableName,
r.TraceDB + "." + r.traceResourceTableV3,
r.TraceDB + "." + signozErrorIndexTable,
r.TraceDB + "." + signozUsageExplorerTable,
r.TraceDB + "." + defaultDependencyGraphTable,
r.TraceDB + "." + r.traceSummaryTable,
}
tableNameArray := []string{signozTraceDBName + "." + signozTraceTableName, signozTraceDBName + "." + signozDurationMVTable, signozTraceDBName + "." + signozSpansTable, signozTraceDBName + "." + signozErrorIndexTable, signozTraceDBName + "." + signozUsageExplorerTable, signozTraceDBName + "." + defaultDependencyGraphTable}
tableNameArray = getLocalTableNameArray(tableNameArray)
status, apiErr := r.getTTLQueryStatus(ctx, orgID, tableNameArray)
if apiErr != nil {
return nil, apiErr
status, err := r.setTTLQueryStatus(ctx, orgID, tableNameArray)
if err != nil {
return nil, err
}
dbResp, apiErr := getTracesTTL()
if apiErr != nil {
return nil, apiErr
dbResp, err := getTracesTTL()
if err != nil {
return nil, err
}
ttlQuery, apiErr := r.checkTTLStatusItem(ctx, orgID, tableNameArray[0])
if apiErr != nil {
return nil, apiErr
ttlQuery, err := r.checkTTLStatusItem(ctx, orgID, tableNameArray[0])
if err != nil {
return nil, err
}
ttlQuery.TTL = ttlQuery.TTL / 3600 // convert to hours
if ttlQuery.ColdStorageTTL != -1 {
@@ -2471,17 +2465,17 @@ func (r *ClickHouseReader) GetTTL(ctx context.Context, orgID string, ttlParams *
case constants.MetricsTTL:
tableNameArray := []string{signozMetricDBName + "." + signozSampleTableName}
tableNameArray = getLocalTableNameArray(tableNameArray)
status, apiErr := r.getTTLQueryStatus(ctx, orgID, tableNameArray)
if apiErr != nil {
return nil, apiErr
status, err := r.setTTLQueryStatus(ctx, orgID, tableNameArray)
if err != nil {
return nil, err
}
dbResp, apiErr := getMetricsTTL()
if apiErr != nil {
return nil, apiErr
dbResp, err := getMetricsTTL()
if err != nil {
return nil, err
}
ttlQuery, apiErr := r.checkTTLStatusItem(ctx, orgID, tableNameArray[0])
if apiErr != nil {
return nil, apiErr
ttlQuery, err := r.checkTTLStatusItem(ctx, orgID, tableNameArray[0])
if err != nil {
return nil, err
}
ttlQuery.TTL = ttlQuery.TTL / 3600 // convert to hours
if ttlQuery.ColdStorageTTL != -1 {
@@ -2494,17 +2488,17 @@ func (r *ClickHouseReader) GetTTL(ctx context.Context, orgID string, ttlParams *
case constants.LogsTTL:
tableNameArray := []string{r.logsDB + "." + r.logsTableName}
tableNameArray = getLocalTableNameArray(tableNameArray)
status, apiErr := r.getTTLQueryStatus(ctx, orgID, tableNameArray)
if apiErr != nil {
return nil, apiErr
status, err := r.setTTLQueryStatus(ctx, orgID, tableNameArray)
if err != nil {
return nil, err
}
dbResp, apiErr := getLogsTTL()
if apiErr != nil {
return nil, apiErr
dbResp, err := getLogsTTL()
if err != nil {
return nil, err
}
ttlQuery, apiErr := r.checkTTLStatusItem(ctx, orgID, tableNameArray[0])
if apiErr != nil {
return nil, apiErr
ttlQuery, err := r.checkTTLStatusItem(ctx, orgID, tableNameArray[0])
if err != nil {
return nil, err
}
ttlQuery.TTL = ttlQuery.TTL / 3600 // convert to hours
if ttlQuery.ColdStorageTTL != -1 {
@@ -2687,19 +2681,19 @@ func (r *ClickHouseReader) GetNextPrevErrorIDs(ctx context.Context, queryParams
zap.L().Error("errorId missing from params")
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("ErrorID missing from params")}
}
var apiErr *model.ApiError
var err *model.ApiError
getNextPrevErrorIDsResponse := model.NextPrevErrorIDs{
GroupID: queryParams.GroupID,
}
getNextPrevErrorIDsResponse.NextErrorID, getNextPrevErrorIDsResponse.NextTimestamp, apiErr = r.getNextErrorID(ctx, queryParams)
if apiErr != nil {
zap.L().Error("Unable to get next error ID due to err: ", zap.Error(apiErr))
return nil, apiErr
getNextPrevErrorIDsResponse.NextErrorID, getNextPrevErrorIDsResponse.NextTimestamp, err = r.getNextErrorID(ctx, queryParams)
if err != nil {
zap.L().Error("Unable to get next error ID due to err: ", zap.Error(err))
return nil, err
}
getNextPrevErrorIDsResponse.PrevErrorID, getNextPrevErrorIDsResponse.PrevTimestamp, apiErr = r.getPrevErrorID(ctx, queryParams)
if apiErr != nil {
zap.L().Error("Unable to get prev error ID due to err: ", zap.Error(apiErr))
return nil, apiErr
getNextPrevErrorIDsResponse.PrevErrorID, getNextPrevErrorIDsResponse.PrevTimestamp, err = r.getPrevErrorID(ctx, queryParams)
if err != nil {
zap.L().Error("Unable to get prev error ID due to err: ", zap.Error(err))
return nil, err
}
return &getNextPrevErrorIDsResponse, nil

View File

@@ -659,11 +659,6 @@ func (ah *APIHandler) MetricExplorerRoutes(router *mux.Router, am *middleware.Au
router.HandleFunc("/api/v1/metrics/{metric_name}/metadata",
am.ViewAccess(ah.UpdateMetricsMetadata)).
Methods(http.MethodPost)
// v2 endpoints
router.HandleFunc("/api/v2/metrics/stats", am.ViewAccess(ah.Signoz.Handlers.Metrics.GetStats)).Methods(http.MethodPost)
router.HandleFunc("/api/v2/metrics/treemap", am.ViewAccess(ah.Signoz.Handlers.Metrics.GetTreemap)).Methods(http.MethodPost)
router.HandleFunc("/api/v2/metrics/metadata", am.ViewAccess(ah.Signoz.Handlers.Metrics.GetMetricMetadata)).Methods(http.MethodGet)
router.HandleFunc("/api/v2/metrics/{metric_name}/metadata", am.ViewAccess(ah.Signoz.Handlers.Metrics.UpdateMetricMetadata)).Methods(http.MethodPost)
}
func Intersection(a, b []int) (c []int) {
@@ -985,14 +980,14 @@ func (aH *APIHandler) metaForLinks(ctx context.Context, rule *ruletypes.Gettable
keys := make(map[string]v3.AttributeKey)
if rule.AlertType == ruletypes.AlertTypeLogs {
logFields, apiErr := aH.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(rule.PostableRule.RuleCondition.CompositeQuery))
if apiErr == nil {
logFields, err := aH.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(rule.PostableRule.RuleCondition.CompositeQuery))
if err == nil {
params := &v3.QueryRangeParamsV3{
CompositeQuery: rule.RuleCondition.CompositeQuery,
}
keys = model.GetLogFieldsV3(ctx, params, logFields)
} else {
zap.L().Error("failed to get log fields using empty keys; the link might not work as expected", zap.Error(apiErr))
zap.L().Error("failed to get log fields using empty keys; the link might not work as expected", zap.Error(err))
}
} else if rule.AlertType == ruletypes.AlertTypeTraces {
traceFields, err := aH.reader.GetSpanAttributeKeysByNames(ctx, logsv3.GetFieldNames(rule.PostableRule.RuleCondition.CompositeQuery))
@@ -4295,9 +4290,9 @@ func (aH *APIHandler) getQueryBuilderSuggestions(w http.ResponseWriter, r *http.
return
}
response, apiErr := aH.reader.GetQBFilterSuggestionsForLogs(r.Context(), req)
if apiErr != nil {
RespondError(w, apiErr, nil)
response, err := aH.reader.GetQBFilterSuggestionsForLogs(r.Context(), req)
if err != nil {
RespondError(w, err, nil)
return
}
@@ -4453,9 +4448,10 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
}
// check if any enrichment is required for logs if yes then enrich them
if logsv3.EnrichmentRequired(queryRangeParams) && hasLogsQuery {
logsFields, apiErr := aH.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(queryRangeParams.CompositeQuery))
if apiErr != nil {
RespondError(w, apiErr, errQuriesByName)
logsFields, err := aH.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(queryRangeParams.CompositeQuery))
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
return
}
// get the fields if any logs query is present
@@ -4492,12 +4488,12 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
// Hook up query progress tracking if requested
queryIdHeader := r.Header.Get("X-SIGNOZ-QUERY-ID")
if len(queryIdHeader) > 0 {
onQueryFinished, apiErr := aH.reader.ReportQueryStartForProgressTracking(queryIdHeader)
onQueryFinished, err := aH.reader.ReportQueryStartForProgressTracking(queryIdHeader)
if apiErr != nil {
if err != nil {
zap.L().Error(
"couldn't report query start for progress tracking",
zap.String("queryId", queryIdHeader), zap.Error(apiErr),
zap.String("queryId", queryIdHeader), zap.Error(err),
)
} else {
@@ -4808,9 +4804,10 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
// check if any enrichment is required for logs if yes then enrich them
if logsv3.EnrichmentRequired(queryRangeParams) && hasLogsQuery {
// get the fields if any logs query is present
logsFields, apiErr := aH.reader.GetLogFieldsFromNames(r.Context(), logsv3.GetFieldNames(queryRangeParams.CompositeQuery))
if apiErr != nil {
RespondError(w, apiErr, nil)
logsFields, err := aH.reader.GetLogFieldsFromNames(r.Context(), logsv3.GetFieldNames(queryRangeParams.CompositeQuery))
if err != nil {
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
RespondError(w, apiErrObj, nil)
return
}
fields := model.GetLogFieldsV3(r.Context(), queryRangeParams, logsFields)

View File

@@ -293,7 +293,7 @@ func (m *Manager) dashboardUuid(integrationId string, dashboardId string) string
}
func (m *Manager) parseDashboardUuid(dashboardUuid string) (
integrationId string, dashboardId string, apiErr *model.ApiError,
integrationId string, dashboardId string, err *model.ApiError,
) {
parts := strings.SplitN(dashboardUuid, "--", 3)
if len(parts) != 3 || parts[0] != "integration" {

View File

@@ -62,23 +62,23 @@ func (receiver *SummaryService) FilterValues(ctx context.Context, orgID valuer.U
response.FilterValues = filterValues
return &response, nil
case "metric_unit":
attributes, apiErr := receiver.reader.GetAllMetricFilterUnits(ctx, params)
if apiErr != nil {
return nil, apiErr
attributes, err := receiver.reader.GetAllMetricFilterUnits(ctx, params)
if err != nil {
return nil, err
}
response.FilterValues = attributes
return &response, nil
case "metric_type":
attributes, apiErr := receiver.reader.GetAllMetricFilterTypes(ctx, params)
if apiErr != nil {
return nil, apiErr
attributes, err := receiver.reader.GetAllMetricFilterTypes(ctx, params)
if err != nil {
return nil, err
}
response.FilterValues = attributes
return &response, nil
default:
attributes, apiErr := receiver.reader.GetAllMetricFilterAttributeValues(ctx, params)
if apiErr != nil {
return nil, apiErr
attributes, err := receiver.reader.GetAllMetricFilterAttributeValues(ctx, params)
if err != nil {
return nil, err
}
response.FilterValues = attributes
return &response, nil
@@ -108,45 +108,45 @@ func (receiver *SummaryService) GetMetricsSummary(ctx context.Context, orgID val
})
g.Go(func() error {
dataPoints, apiErr := receiver.reader.GetMetricsDataPoints(ctx, metricName)
if apiErr != nil {
return apiErr.ToError()
dataPoints, err := receiver.reader.GetMetricsDataPoints(ctx, metricName)
if err != nil {
return err
}
metricDetailsDTO.Samples = dataPoints
return nil
})
g.Go(func() error {
lastReceived, apiErr := receiver.reader.GetMetricsLastReceived(ctx, metricName)
if apiErr != nil {
return apiErr.ToError()
lastReceived, err := receiver.reader.GetMetricsLastReceived(ctx, metricName)
if err != nil {
return err
}
metricDetailsDTO.LastReceived = lastReceived
return nil
})
g.Go(func() error {
totalSeries, apiErr := receiver.reader.GetTotalTimeSeriesForMetricName(ctx, metricName)
if apiErr != nil {
return apiErr.ToError()
totalSeries, err := receiver.reader.GetTotalTimeSeriesForMetricName(ctx, metricName)
if err != nil {
return err
}
metricDetailsDTO.TimeSeriesTotal = totalSeries
return nil
})
g.Go(func() error {
activeSeries, apiErr := receiver.reader.GetActiveTimeSeriesForMetricName(ctx, metricName, 120*time.Minute)
if apiErr != nil {
return apiErr.ToError()
activeSeries, err := receiver.reader.GetActiveTimeSeriesForMetricName(ctx, metricName, 120*time.Minute)
if err != nil {
return err
}
metricDetailsDTO.TimeSeriesActive = activeSeries
return nil
})
g.Go(func() error {
attributes, apiErr := receiver.reader.GetAttributesForMetricName(ctx, metricName, nil, nil, nil)
if apiErr != nil {
return apiErr.ToError()
attributes, err := receiver.reader.GetAttributesForMetricName(ctx, metricName, nil, nil, nil)
if err != nil {
return err
}
if attributes != nil {
metricDetailsDTO.Attributes = *attributes

View File

@@ -53,14 +53,9 @@ func getClickHouseTracesColumnDataType(columnDataType v3.AttributeKeyDataType) s
func getColumnName(key v3.AttributeKey, replaceAlias bool) string {
if replaceAlias {
// It should be in DeprecatedStaticFieldsTraces
if _, ok := constants.DeprecatedStaticFieldsTraces[key.Key]; ok {
// It should not be in NewStaticFieldsTraces
if _, ok := constants.NewStaticFieldsTraces[key.Key]; !ok {
// It should have a mapping in OldToNewTraceFieldsMap
if _, ok := constants.OldToNewTraceFieldsMap[key.Key]; ok {
key = constants.NewStaticFieldsTraces[constants.OldToNewTraceFieldsMap[key.Key]]
}
key = constants.NewStaticFieldsTraces[constants.OldToNewTraceFieldsMap[key.Key]]
}
}
}

View File

@@ -200,7 +200,7 @@ func Test_getSelectLabels(t *testing.T) {
want: " name as `name`, `resource_string_service_name` as `service_name`",
},
{
name: "deprecated Columns",
name: "depricated Columns",
args: args{
groupBy: []v3.AttributeKey{
{Key: "spanKind", DataType: v3.AttributeKeyDataTypeString},
@@ -209,20 +209,19 @@ func Test_getSelectLabels(t *testing.T) {
{Key: "spanID", DataType: v3.AttributeKeyDataTypeString},
{Key: "serviceName", DataType: v3.AttributeKeyDataTypeString},
{Key: "httpRoute", DataType: v3.AttributeKeyDataTypeString},
{Key: "kind", DataType: v3.AttributeKeyDataTypeInt64},
},
},
want: " kind_string as `spanKind`, status_message as `statusMessage`, trace_id as `traceID`, span_id as `spanID`, resource_string_service$$name as `serviceName`, attribute_string_http$$route as `httpRoute`, kind as `kind`",
want: " kind_string as `spanKind`, status_message as `statusMessage`, trace_id as `traceID`, span_id as `spanID`, resource_string_service$$name as `serviceName`, attribute_string_http$$route as `httpRoute`",
},
{
name: "non depricated Columns",
args: args{
groupBy: []v3.AttributeKey{
{Key: "name", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
{Key: "kind_string", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
{Key: "kind", DataType: v3.AttributeKeyDataTypeString, Type: v3.AttributeKeyTypeTag},
},
},
want: " name as `name`, kind_string as `kind_string`",
want: " name as `name`, kind as `kind`",
},
}
for _, tt := range tests {

View File

@@ -362,6 +362,11 @@ var NewStaticFieldsTraces = map[string]v3.AttributeKey{
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"kind": {
Key: "kind",
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"kind_string": {
Key: "kind_string",
DataType: v3.AttributeKeyDataTypeString,
@@ -516,7 +521,7 @@ var DeprecatedStaticFieldsTraces = map[string]v3.AttributeKey{
},
"kind": {
Key: "kind",
DataType: v3.AttributeKeyDataTypeInt64,
DataType: v3.AttributeKeyDataTypeString,
IsColumn: true,
},
"spanKind": {

View File

@@ -95,7 +95,7 @@ type Reader interface {
GetMinAndMaxTimestampForTraceID(ctx context.Context, traceID []string) (int64, int64, error)
// Query Progress tracking helpers.
ReportQueryStartForProgressTracking(queryId string) (reportQueryFinished func(), apiErr *model.ApiError)
ReportQueryStartForProgressTracking(queryId string) (reportQueryFinished func(), err *model.ApiError)
SubscribeToQueryProgress(queryId string) (<-chan model.QueryProgress, func(), *model.ApiError)
GetCountOfThings(ctx context.Context, query string) (uint64, error)

View File

@@ -404,9 +404,9 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
if hasLogsQuery {
// check if any enrichment is required for logs if yes then enrich them
if logsv3.EnrichmentRequired(params) {
logsFields, apiErr := r.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(params.CompositeQuery))
if apiErr != nil {
return nil, apiErr.ToError()
logsFields, err := r.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(params.CompositeQuery))
if err != nil {
return nil, err
}
logsKeys := model.GetLogFieldsV3(ctx, params, logsFields)
r.logsKeys = logsKeys

View File

@@ -94,7 +94,6 @@ type PreparedWhereClause struct {
// PrepareWhereClause generates a ClickHouse compatible WHERE clause from the filter query
func PrepareWhereClause(query string, opts FilterExprVisitorOpts, startNs uint64, endNs uint64) (*PreparedWhereClause, error) {
// Setup the ANTLR parsing pipeline
input := antlr.NewInputStream(query)
lexer := grammar.NewFilterQueryLexer(input)

View File

@@ -9,8 +9,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/authdomain/implauthdomain"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer/implmetricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/modules/preference"
@@ -48,7 +46,6 @@ type Handlers struct {
Session session.Handler
SpanPercentile spanpercentile.Handler
Services services.Handler
Metrics metricsexplorer.Handler
}
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing) Handlers {
@@ -65,7 +62,6 @@ func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, que
AuthDomain: implauthdomain.NewHandler(modules.AuthDomain),
Session: implsession.NewHandler(modules.Session),
Services: implservices.NewHandler(modules.Services),
Metrics: implmetricsexplorer.NewHandler(modules.Metrics),
SpanPercentile: implspanpercentile.NewHandler(modules.SpanPercentile),
}
}

View File

@@ -35,8 +35,7 @@ func TestNewHandlers(t *testing.T) {
require.NoError(t, err)
tokenizer := tokenizertest.New()
emailing := emailingtest.New()
require.NoError(t, err)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil)
handlers := NewHandlers(modules, providerSettings, nil, nil)

View File

@@ -5,7 +5,6 @@ import (
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/authn"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/emailing"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/modules/apdex"
@@ -14,8 +13,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/authdomain/implauthdomain"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer/implmetricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/modules/preference"
@@ -43,7 +40,6 @@ import (
"github.com/SigNoz/signoz/pkg/tokenizer"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
type Modules struct {
@@ -62,7 +58,6 @@ type Modules struct {
Session session.Module
Services services.Module
SpanPercentile spanpercentile.Module
Metrics metricsexplorer.Module
}
func NewModules(
@@ -75,10 +70,8 @@ func NewModules(
analytics analytics.Analytics,
querier querier.Querier,
telemetryStore telemetrystore.TelemetryStore,
telemetryMetadataStore telemetrytypes.MetadataStore,
authNs map[authtypes.AuthNProvider]authn.AuthN,
authz authz.AuthZ,
cache cache.Cache,
) Modules {
quickfilter := implquickfilter.NewModule(implquickfilter.NewStore(sqlstore))
orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter)
@@ -97,10 +90,9 @@ func NewModules(
QuickFilter: quickfilter,
TraceFunnel: impltracefunnel.NewModule(impltracefunnel.NewStore(sqlstore)),
RawDataExport: implrawdataexport.NewModule(querier),
AuthDomain: implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs),
Session: implsession.NewModule(providerSettings, authNs, user, userGetter, implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs), tokenizer, orgGetter),
AuthDomain: implauthdomain.NewModule(implauthdomain.NewStore(sqlstore)),
Session: implsession.NewModule(providerSettings, authNs, user, userGetter, implauthdomain.NewModule(implauthdomain.NewStore(sqlstore)), tokenizer, orgGetter),
SpanPercentile: implspanpercentile.NewModule(querier, providerSettings),
Services: implservices.NewModule(querier, telemetryStore),
Metrics: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, providerSettings),
}
}

View File

@@ -35,8 +35,7 @@ func TestNewModules(t *testing.T) {
require.NoError(t, err)
tokenizer := tokenizertest.New()
emailing := emailingtest.New()
require.NoError(t, err)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil)
reflectVal := reflect.ValueOf(modules)
for i := 0; i < reflectVal.NumField(); i++ {

View File

@@ -26,15 +26,9 @@ import (
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/statsreporter"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
"github.com/SigNoz/signoz/pkg/telemetrymeter"
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/telemetrytraces"
pkgtokenizer "github.com/SigNoz/signoz/pkg/tokenizer"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/version"
"github.com/SigNoz/signoz/pkg/zeus"
@@ -43,25 +37,24 @@ import (
type SigNoz struct {
*factory.Registry
Instrumentation instrumentation.Instrumentation
Analytics analytics.Analytics
Cache cache.Cache
Web web.Web
SQLStore sqlstore.SQLStore
TelemetryStore telemetrystore.TelemetryStore
TelemetryMetadataStore telemetrytypes.MetadataStore
Prometheus prometheus.Prometheus
Alertmanager alertmanager.Alertmanager
Querier querier.Querier
Zeus zeus.Zeus
Licensing licensing.Licensing
Emailing emailing.Emailing
Sharder sharder.Sharder
StatsReporter statsreporter.StatsReporter
Tokenizer pkgtokenizer.Tokenizer
Authz authz.AuthZ
Modules Modules
Handlers Handlers
Instrumentation instrumentation.Instrumentation
Analytics analytics.Analytics
Cache cache.Cache
Web web.Web
SQLStore sqlstore.SQLStore
TelemetryStore telemetrystore.TelemetryStore
Prometheus prometheus.Prometheus
Alertmanager alertmanager.Alertmanager
Querier querier.Querier
Zeus zeus.Zeus
Licensing licensing.Licensing
Emailing emailing.Emailing
Sharder sharder.Sharder
StatsReporter statsreporter.StatsReporter
Tokenizer pkgtokenizer.Tokenizer
Authz authz.AuthZ
Modules Modules
Handlers Handlers
}
func New(
@@ -316,30 +309,8 @@ func New(
return nil, err
}
// Initialize telemetry metadata store
// TODO: consolidate other telemetrymetadata.NewTelemetryMetaStore initializations to reuse this instance instead.
telemetryMetadataStore := telemetrymetadata.NewTelemetryMetaStore(
providerSettings,
telemetrystore,
telemetrytraces.DBName,
telemetrytraces.TagAttributesV2TableName,
telemetrytraces.SpanAttributesKeysTblName,
telemetrytraces.SpanIndexV3TableName,
telemetrymetrics.DBName,
telemetrymetrics.AttributesMetadataTableName,
telemetrymeter.DBName,
telemetrymeter.SamplesAgg1dTableName,
telemetrylogs.DBName,
telemetrylogs.LogsV2TableName,
telemetrylogs.TagAttributesV2TableName,
telemetrylogs.LogAttributeKeysTblName,
telemetrylogs.LogResourceKeysTblName,
telemetrymetadata.DBName,
telemetrymetadata.AttributesMetadataLocalTableName,
)
// Initialize all modules
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, authNs, authz)
// Initialize all handlers for the modules
handlers := NewHandlers(modules, providerSettings, querier, licensing)
@@ -383,24 +354,23 @@ func New(
}
return &SigNoz{
Registry: registry,
Analytics: analytics,
Instrumentation: instrumentation,
Cache: cache,
Web: web,
SQLStore: sqlstore,
TelemetryStore: telemetrystore,
TelemetryMetadataStore: telemetryMetadataStore,
Prometheus: prometheus,
Alertmanager: alertmanager,
Querier: querier,
Zeus: zeus,
Licensing: licensing,
Emailing: emailing,
Sharder: sharder,
Tokenizer: tokenizer,
Authz: authz,
Modules: modules,
Handlers: handlers,
Registry: registry,
Analytics: analytics,
Instrumentation: instrumentation,
Cache: cache,
Web: web,
SQLStore: sqlstore,
TelemetryStore: telemetrystore,
Prometheus: prometheus,
Alertmanager: alertmanager,
Querier: querier,
Zeus: zeus,
Licensing: licensing,
Emailing: emailing,
Sharder: sharder,
Tokenizer: tokenizer,
Authz: authz,
Modules: modules,
Handlers: handlers,
}, nil
}

View File

@@ -66,13 +66,9 @@ func (c *conditionBuilder) conditionFor(
case qbtypes.FilterOperatorNotLike:
return sb.NotILike(tblFieldName, value), nil
case qbtypes.FilterOperatorRegexp:
// Note: Escape $$ to $$$$ to avoid sqlbuilder interpreting materialized $ signs
// Only needed because we are using sprintf instead of sb.Match (not implemented in sqlbuilder)
return fmt.Sprintf(`match(LOWER(%s), LOWER(%s))`, sqlbuilder.Escape(tblFieldName), sb.Var(value)), nil
return fmt.Sprintf(`match(LOWER(%s), LOWER(%s))`, tblFieldName, sb.Var(value)), nil
case qbtypes.FilterOperatorNotRegexp:
// Note: Escape $$ to $$$$ to avoid sqlbuilder interpreting materialized $ signs
// Only needed because we are using sprintf instead of sb.Match (not implemented in sqlbuilder)
return fmt.Sprintf(`NOT match(LOWER(%s), LOWER(%s))`, sqlbuilder.Escape(tblFieldName), sb.Var(value)), nil
return fmt.Sprintf(`NOT match(LOWER(%s), LOWER(%s))`, tblFieldName, sb.Var(value)), nil
}
}
@@ -108,13 +104,9 @@ func (c *conditionBuilder) conditionFor(
return sb.NotILike(tblFieldName, fmt.Sprintf("%%%s%%", value)), nil
case qbtypes.FilterOperatorRegexp:
// Note: Escape $$ to $$$$ to avoid sqlbuilder interpreting materialized $ signs
// Only needed because we are using sprintf instead of sb.Match (not implemented in sqlbuilder)
return fmt.Sprintf(`match(%s, %s)`, sqlbuilder.Escape(tblFieldName), sb.Var(value)), nil
return fmt.Sprintf(`match(%s, %s)`, tblFieldName, sb.Var(value)), nil
case qbtypes.FilterOperatorNotRegexp:
// Note: Escape $$ to $$$$ to avoid sqlbuilder interpreting materialized $ signs
// Only needed because we are using sprintf instead of sb.Match (not implemented in sqlbuilder)
return fmt.Sprintf(`NOT match(%s, %s)`, sqlbuilder.Escape(tblFieldName), sb.Var(value)), nil
return fmt.Sprintf(`NOT match(%s, %s)`, tblFieldName, sb.Var(value)), nil
// between and not between
case qbtypes.FilterOperatorBetween:
values, ok := value.([]any)
@@ -226,8 +218,8 @@ func (c *conditionBuilder) ConditionFor(
operator qbtypes.FilterOperator,
value any,
sb *sqlbuilder.SelectBuilder,
_ uint64,
_ uint64,
_ uint64,
_ uint64,
) (string, error) {
condition, err := c.conditionFor(ctx, key, operator, value, sb)
if err != nil {

View File

@@ -257,108 +257,6 @@ func TestConditionFor(t *testing.T) {
expectedSQL: "WHERE multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NULL",
expectedError: nil,
},
{
name: "Regexp operator - body field",
key: telemetrytypes.TelemetryFieldKey{
Name: "body",
FieldContext: telemetrytypes.FieldContextLog,
},
operator: qbtypes.FilterOperatorRegexp,
value: "error.*failed",
expectedSQL: "match(LOWER(body), LOWER(?))",
expectedArgs: []any{"error.*failed"},
expectedError: nil,
},
{
name: "Not Regexp operator - body field",
key: telemetrytypes.TelemetryFieldKey{
Name: "body",
FieldContext: telemetrytypes.FieldContextLog,
},
operator: qbtypes.FilterOperatorNotRegexp,
value: "debug|trace",
expectedSQL: "NOT match(LOWER(body), LOWER(?))",
expectedArgs: []any{"debug|trace"},
expectedError: nil,
},
{
name: "Regexp operator - string attribute",
key: telemetrytypes.TelemetryFieldKey{
Name: "http.url",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
operator: qbtypes.FilterOperatorRegexp,
value: "^https://.*\\.example\\.com.*$",
expectedSQL: "(match(attributes_string['http.url'], ?) AND mapContains(attributes_string, 'http.url') = ?)",
expectedArgs: []any{"^https://.*\\.example\\.com.*$", true},
expectedError: nil,
},
{
name: "Not Regexp operator - string attribute",
key: telemetrytypes.TelemetryFieldKey{
Name: "http.url",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
operator: qbtypes.FilterOperatorNotRegexp,
value: "^http://localhost.*",
expectedSQL: "WHERE NOT match(attributes_string['http.url'], ?)",
expectedArgs: []any{"^http://localhost.*"},
expectedError: nil,
},
{
name: "Regexp operator - resource attribute",
key: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
operator: qbtypes.FilterOperatorRegexp,
value: "frontend-.*",
expectedSQL: "(match(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, `resource_string_service$$name_exists`==true, `resource_string_service$$name`, NULL), ?) AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, `resource_string_service$$name_exists`==true, `resource_string_service$$name`, NULL) IS NOT NULL)",
expectedArgs: []any{"frontend-.*"},
expectedError: nil,
},
{
name: "Not Regexp operator - resource attribute",
key: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
operator: qbtypes.FilterOperatorNotRegexp,
value: "test-.*",
expectedSQL: "WHERE NOT match(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, `resource_string_service$$name_exists`==true, `resource_string_service$$name`, NULL), ?)",
expectedArgs: []any{"test-.*"},
expectedError: nil,
},
{
name: "Regexp operator - severity_text",
key: telemetrytypes.TelemetryFieldKey{
Name: "severity_text",
FieldContext: telemetrytypes.FieldContextLog,
},
operator: qbtypes.FilterOperatorRegexp,
value: "ERROR|FATAL|CRITICAL",
expectedSQL: "match(severity_text, ?)",
expectedArgs: []any{"ERROR|FATAL|CRITICAL"},
expectedError: nil,
},
{
name: "Not Regexp operator - severity_text",
key: telemetrytypes.TelemetryFieldKey{
Name: "severity_text",
FieldContext: telemetrytypes.FieldContextLog,
},
operator: qbtypes.FilterOperatorNotRegexp,
value: "DEBUG|TRACE",
expectedSQL: "NOT match(severity_text, ?)",
expectedArgs: []any{"DEBUG|TRACE"},
expectedError: nil,
},
{
name: "Non-existent column",
key: telemetrytypes.TelemetryFieldKey{
@@ -378,7 +276,7 @@ func TestConditionFor(t *testing.T) {
for _, tc := range testCases {
sb := sqlbuilder.NewSelectBuilder()
t.Run(tc.name, func(t *testing.T) {
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
sb.Where(cond)
if tc.expectedError != nil {
@@ -433,7 +331,7 @@ func TestConditionForMultipleKeys(t *testing.T) {
t.Run(tc.name, func(t *testing.T) {
var err error
for _, key := range tc.keys {
cond, err := conditionBuilder.ConditionFor(ctx, &key, tc.operator, tc.value, sb, 0, 0)
cond, err := conditionBuilder.ConditionFor(ctx, &key, tc.operator, tc.value, sb, 0, 0)
sb.Where(cond)
if err != nil {
t.Fatalf("Error getting condition for key %s: %v", key.Name, err)
@@ -622,46 +520,6 @@ func TestConditionForJSONBodySearch(t *testing.T) {
expectedSQL: `(JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'Int64') = ? OR JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'Int64') = ? OR JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'Int64') = ?)`,
expectedError: nil,
},
{
name: "Regexp operator - json body string",
key: telemetrytypes.TelemetryFieldKey{
Name: "body.http.method",
},
operator: qbtypes.FilterOperatorRegexp,
value: "GET|POST|PUT",
expectedSQL: `match(JSON_VALUE(body, '$."http"."method"'), ?)`,
expectedError: nil,
},
{
name: "Not Regexp operator - json body string",
key: telemetrytypes.TelemetryFieldKey{
Name: "body.http.method",
},
operator: qbtypes.FilterOperatorNotRegexp,
value: "DELETE|PATCH",
expectedSQL: `NOT match(JSON_VALUE(body, '$."http"."method"'), ?)`,
expectedError: nil,
},
{
name: "Regexp operator - json body with dots in path",
key: telemetrytypes.TelemetryFieldKey{
Name: "body.user.email",
},
operator: qbtypes.FilterOperatorRegexp,
value: "^.*@example\\.com$",
expectedSQL: `match(JSON_VALUE(body, '$."user"."email"'), ?)`,
expectedError: nil,
},
{
name: "Not Regexp operator - json body nested path",
key: telemetrytypes.TelemetryFieldKey{
Name: "body.response.headers.content-type",
},
operator: qbtypes.FilterOperatorNotRegexp,
value: "^text/.*",
expectedSQL: `NOT match(JSON_VALUE(body, '$."response"."headers"."content-type"'), ?)`,
expectedError: nil,
},
}
fm := NewFieldMapper()
@@ -670,7 +528,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
for _, tc := range testCases {
sb := sqlbuilder.NewSelectBuilder()
t.Run(tc.name, func(t *testing.T) {
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
sb.Where(cond)
if tc.expectedError != nil {

View File

@@ -2387,7 +2387,7 @@ func TestFilterExprLogs(t *testing.T) {
for _, tc := range testCases {
t.Run(fmt.Sprintf("%s: %s", tc.category, limitString(tc.query, 50)), func(t *testing.T) {
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
if tc.shouldPass {
if err != nil {
@@ -2506,7 +2506,7 @@ func TestFilterExprLogsConflictNegation(t *testing.T) {
for _, tc := range testCases {
t.Run(fmt.Sprintf("%s: %s", tc.category, limitString(tc.query, 50)), func(t *testing.T) {
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
if tc.shouldPass {
if err != nil {

View File

@@ -172,28 +172,6 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
},
},
{
name: "Time series with materialised column using or with regex operator",
requestType: qbtypes.RequestTypeTimeSeries,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
Aggregations: []qbtypes.LogAggregation{
{
Expression: "count()",
},
},
Filter: &qbtypes.Filter{
Expression: "materialized.key.name REGEXP 'redis.*' OR materialized.key.name = 'memcached'",
},
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (true OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((match(`attribute_string_materialized$$key$$name`, ?) AND `attribute_string_materialized$$key$$name_exists` = ?) OR (`attribute_string_materialized$$key$$name` = ? AND `attribute_string_materialized$$key$$name_exists` = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY ts",
Args: []any{uint64(1747945619), uint64(1747983448), "redis.*", true, "memcached", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
},
expectedErr: nil,
},
}
fm := NewFieldMapper()
@@ -287,34 +265,6 @@ func TestStatementBuilderListQuery(t *testing.T) {
},
expectedErr: nil,
},
{
name: "list query with mat col using or and regex operator",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Filter: &qbtypes.Filter{
Expression: "materialized.key.name REGEXP 'redis.*' OR materialized.key.name = 'memcached'",
},
Limit: 10,
Order: []qbtypes.OrderBy{
{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "materialized.key.name",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
Direction: qbtypes.OrderDirectionDesc,
},
},
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (true OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((match(`attribute_string_materialized$$key$$name`, ?) AND `attribute_string_materialized$$key$$name_exists` = ?) OR (`attribute_string_materialized$$key$$name` = ? AND `attribute_string_materialized$$key$$name_exists` = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? ORDER BY `attribute_string_materialized$$key$$name` AS `materialized.key.name` desc LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "redis.*", true, "memcached", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
},
}
fm := NewFieldMapper()

View File

@@ -74,13 +74,10 @@ func (c *conditionBuilder) conditionFor(
return sb.NotILike(tblFieldName, fmt.Sprintf("%%%s%%", value)), nil
case qbtypes.FilterOperatorRegexp:
// Note: Escape $$ to $$$$ to avoid sqlbuilder interpreting materialized $ signs
// Only needed because we are using sprintf instead of sb.Match (not implemented in sqlbuilder)
return fmt.Sprintf(`match(%s, %s)`, sqlbuilder.Escape(tblFieldName), sb.Var(value)), nil
return fmt.Sprintf(`match(%s, %s)`, tblFieldName, sb.Var(value)), nil
case qbtypes.FilterOperatorNotRegexp:
// Note: Escape $$ to $$$$ to avoid sqlbuilder interpreting materialized $ signs
// Only needed because we are using sprintf instead of sb.Match (not implemented in sqlbuilder)
return fmt.Sprintf(`NOT match(%s, %s)`, sqlbuilder.Escape(tblFieldName), sb.Var(value)), nil
return fmt.Sprintf(`NOT match(%s, %s)`, tblFieldName, sb.Var(value)), nil
// between and not between
case qbtypes.FilterOperatorBetween:
values, ok := value.([]any)
@@ -139,8 +136,8 @@ func (c *conditionBuilder) ConditionFor(
operator qbtypes.FilterOperator,
value any,
sb *sqlbuilder.SelectBuilder,
_ uint64,
_ uint64,
_ uint64,
_ uint64,
) (string, error) {
condition, err := c.conditionFor(ctx, key, operator, value, sb)
if err != nil {

View File

@@ -348,13 +348,13 @@ func (b *MetricQueryStatementBuilder) buildTimeSeriesCTE(
FieldKeys: keys,
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
Variables: variables,
}, start, end)
}, start, end)
if err != nil {
return "", nil, err
}
}
start, end, _, tbl := WhichTSTableToUse(start, end, query.Aggregations[0].TableHints)
start, end, tbl := WhichTSTableToUse(start, end, query.Aggregations[0].TableHints)
sb.From(fmt.Sprintf("%s.%s", DBName, tbl))
sb.Select("fingerprint")

View File

@@ -54,39 +54,6 @@ func TestStatementBuilder(t *testing.T) {
},
expectedErr: nil,
},
{
name: "test_cumulative_rate_sum_with_mat_column",
requestType: qbtypes.RequestTypeTimeSeries,
query: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Signal: telemetrytypes.SignalMetrics,
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
Aggregations: []qbtypes.MetricAggregation{
{
MetricName: "signoz_calls_total",
Type: metrictypes.SumType,
Temporality: metrictypes.Cumulative,
TimeAggregation: metrictypes.TimeAggregationRate,
SpaceAggregation: metrictypes.SpaceAggregationSum,
},
},
Filter: &qbtypes.Filter{
Expression: "materialized.key.name REGEXP 'cartservice' OR service.name = 'cartservice'",
},
Limit: 10,
GroupBy: []qbtypes.GroupByKey{
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
},
},
},
},
expected: qbtypes.Statement{
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947360000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947360000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND (match(JSONExtractString(labels, 'materialized.key.name'), ?) OR JSONExtractString(labels, 'service.name') = ?) GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "cartservice", "cartservice", "signoz_calls_total", uint64(1747947360000), uint64(1747983420000), 0},
},
expectedErr: nil,
},
{
name: "test_delta_rate_sum",
requestType: qbtypes.RequestTypeTimeSeries,
@@ -224,13 +191,6 @@ func TestStatementBuilder(t *testing.T) {
t.Fatalf("failed to load field keys: %v", err)
}
mockMetadataStore.KeysMap = keys
// NOTE: LoadFieldKeysFromJSON doesn't set Materialized field
// for keys, so we have to set it manually here for testing
if _, ok := mockMetadataStore.KeysMap["materialized.key.name"]; ok {
if len(mockMetadataStore.KeysMap["materialized.key.name"]) > 0 {
mockMetadataStore.KeysMap["materialized.key.name"][0].Materialized = true
}
}
statementBuilder := NewMetricQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),

View File

@@ -8,8 +8,6 @@ import (
const (
DBName = "signoz_metrics"
UpdatedMetadataTableName = "distributed_updated_metadata"
UpdatedMetadataLocalTableName = "updated_metadata"
SamplesV4TableName = "distributed_samples_v4"
SamplesV4LocalTableName = "samples_v4"
SamplesV4Agg5mTableName = "distributed_samples_v4_agg_5m"
@@ -43,36 +41,29 @@ var (
offsetBucket = uint64(60 * time.Minute.Milliseconds())
)
// WhichTSTableToUse returns adjusted start, adjusted end, distributed table name, local table name
// in that order
func WhichTSTableToUse(
start, end uint64,
tableHints *metrictypes.MetricTableHints,
) (uint64, uint64, string, string) {
) (uint64, uint64, string) {
// if we have a hint for the table, we need to use it
// the hint will be used to override the default table selection logic
if tableHints != nil {
if tableHints.TimeSeriesTableName != "" {
var distributedTableName string
switch tableHints.TimeSeriesTableName {
case TimeseriesV4LocalTableName:
// adjust the start time to nearest 1 hour
start = start - (start % (oneHourInMilliseconds))
distributedTableName = TimeseriesV4TableName
case TimeseriesV46hrsLocalTableName:
// adjust the start time to nearest 6 hours
start = start - (start % (sixHoursInMilliseconds))
distributedTableName = TimeseriesV46hrsTableName
case TimeseriesV41dayLocalTableName:
// adjust the start time to nearest 1 day
start = start - (start % (oneDayInMilliseconds))
distributedTableName = TimeseriesV41dayTableName
case TimeseriesV41weekLocalTableName:
// adjust the start time to nearest 1 week
start = start - (start % (oneWeekInMilliseconds))
distributedTableName = TimeseriesV41weekTableName
}
return start, end, distributedTableName, tableHints.TimeSeriesTableName
return start, end, tableHints.TimeSeriesTableName
}
}
@@ -80,46 +71,26 @@ func WhichTSTableToUse(
// else if time range is less than 1 day and greater than 6 hours, we need to use the `time_series_v4_6hrs` table
// else if time range is less than 1 week and greater than 1 day, we need to use the `time_series_v4_1day` table
// else we need to use the `time_series_v4_1week` table
var distributedTableName string
var localTableName string
var tableName string
if end-start < sixHoursInMilliseconds {
// adjust the start time to nearest 1 hour
start = start - (start % (oneHourInMilliseconds))
distributedTableName = TimeseriesV4TableName
localTableName = TimeseriesV4LocalTableName
tableName = TimeseriesV4LocalTableName
} else if end-start < oneDayInMilliseconds {
// adjust the start time to nearest 6 hours
start = start - (start % (sixHoursInMilliseconds))
distributedTableName = TimeseriesV46hrsTableName
localTableName = TimeseriesV46hrsLocalTableName
tableName = TimeseriesV46hrsLocalTableName
} else if end-start < oneWeekInMilliseconds {
// adjust the start time to nearest 1 day
start = start - (start % (oneDayInMilliseconds))
distributedTableName = TimeseriesV41dayTableName
localTableName = TimeseriesV41dayLocalTableName
tableName = TimeseriesV41dayLocalTableName
} else {
// adjust the start time to nearest 1 week
start = start - (start % (oneWeekInMilliseconds))
distributedTableName = TimeseriesV41weekTableName
localTableName = TimeseriesV41weekLocalTableName
tableName = TimeseriesV41weekLocalTableName
}
return start, end, distributedTableName, localTableName
}
// CountExpressionForSamplesTable returns the count expression for a given samples table name.
// For non-aggregated tables (distributed_samples_v4, exp_hist), it returns "count(*)".
// For aggregated tables (distributed_samples_v4_agg_5m, distributed_samples_v4_agg_30m), it returns "sum(count)".
func CountExpressionForSamplesTable(tableName string) string {
// Non-aggregated tables use count(*)
if tableName == SamplesV4TableName ||
tableName == SamplesV4LocalTableName ||
tableName == ExpHistogramTableName ||
tableName == ExpHistogramLocalTableName {
return "count(*)"
}
// Aggregated tables use sum(count)
return "sum(count)"
return start, end, tableName
}
// start and end are in milliseconds
@@ -134,6 +105,7 @@ func WhichSamplesTableToUse(
timeAggregation metrictypes.TimeAggregation,
tableHints *metrictypes.MetricTableHints,
) string {
// if we have a hint for the table, we need to use it
// the hint will be used to override the default table selection logic
if tableHints != nil {

View File

@@ -30,14 +30,5 @@
"fieldDataType": "string",
"signal": "metrics"
}
],
"materialized.key.name": [
{
"name": "materialized.key.name",
"fieldContext": "attribute",
"fieldDataType": "string",
"materialized": true,
"signal": "metrics"
}
]
}

View File

@@ -108,13 +108,10 @@ func (c *conditionBuilder) conditionFor(
return sb.NotILike(tblFieldName, fmt.Sprintf("%%%s%%", value)), nil
case qbtypes.FilterOperatorRegexp:
// Note: Escape $$ to $$$$ to avoid sqlbuilder interpreting materialized $ signs
// Only needed because we are using sprintf instead of sb.Match (not implemented in sqlbuilder)
return fmt.Sprintf(`match(%s, %s)`, sqlbuilder.Escape(tblFieldName), sb.Var(value)), nil
return fmt.Sprintf(`match(%s, %s)`, tblFieldName, sb.Var(value)), nil
case qbtypes.FilterOperatorNotRegexp:
// Note: Escape $$ to $$$$ to avoid sqlbuilder interpreting materialized $ signs
// Only needed because we are using sprintf instead of sb.Match (not implemented in sqlbuilder)
return fmt.Sprintf(`NOT match(%s, %s)`, sqlbuilder.Escape(tblFieldName), sb.Var(value)), nil
return fmt.Sprintf(`NOT match(%s, %s)`, tblFieldName, sb.Var(value)), nil
// between and not between
case qbtypes.FilterOperatorBetween:
values, ok := value.([]any)
@@ -226,8 +223,8 @@ func (c *conditionBuilder) ConditionFor(
operator qbtypes.FilterOperator,
value any,
sb *sqlbuilder.SelectBuilder,
startNs uint64,
_ uint64,
startNs uint64,
_ uint64,
) (string, error) {
if c.isSpanScopeField(key.Name) {
return c.buildSpanScopeCondition(key, operator, value, startNs)
@@ -288,13 +285,11 @@ func (c *conditionBuilder) buildSpanScopeCondition(key *telemetrytypes.Telemetry
case SpanSearchScopeEntryPoint:
if startNs > 0 { // only add time filter if it is a valid time, else do not add
startS := int64(startNs / 1_000_000_000)
// Note: Escape $$ to $$$$ to avoid sqlbuilder interpreting materialized $ signs
return sqlbuilder.Escape(fmt.Sprintf("((name, resource_string_service$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from %s.%s WHERE time >= toDateTime(%d))) AND parent_span_id != ''",
DBName, TopLevelOperationsTableName, startS)), nil
return fmt.Sprintf("((name, resource_string_service$$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from %s.%s WHERE time >= toDateTime(%d))) AND parent_span_id != ''",
DBName, TopLevelOperationsTableName, startS), nil
}
// Note: Escape $$ to $$$$ to avoid sqlbuilder interpreting materialized $ signs
return sqlbuilder.Escape(fmt.Sprintf("((name, resource_string_service$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from %s.%s)) AND parent_span_id != ''",
DBName, TopLevelOperationsTableName)), nil
return fmt.Sprintf("((name, resource_string_service$$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from %s.%s)) AND parent_span_id != ''",
DBName, TopLevelOperationsTableName), nil
default:
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid span search scope: %s", key.Name)
}

View File

@@ -41,6 +41,13 @@ var (
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"kind": {
Name: "kind",
Description: "Span kind enum (number). Use `kind_string` instead. Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-kind)",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
"kind_string": {
Name: "kind_string",
Description: "Span kind enum (string). Known values are ['Client', 'Server', 'Internal', 'Producer', 'Consumer']. Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-kind)",
@@ -127,13 +134,6 @@ var (
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"kind": {
Name: "kind",
Description: "Span kind enum (number). Use `kind_string` instead. Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-kind)",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
}
CalculatedFields = map[string]telemetrytypes.TelemetryFieldKey{

View File

@@ -200,10 +200,7 @@ func (m *defaultFieldMapper) getColumn(
return indexV3Columns[oldToNew[key.Name]], nil
}
if _, ok := IntrinsicFieldsDeprecated[key.Name]; ok {
// Check if we have a mapping for the deprecated intrinsic field
if _, ok := indexV3Columns[oldToNew[key.Name]]; ok {
return indexV3Columns[oldToNew[key.Name]], nil
}
return indexV3Columns[oldToNew[key.Name]], nil
}
if col, ok := indexV3Columns[key.Name]; ok {

View File

@@ -94,35 +94,6 @@ func TestStatementBuilder(t *testing.T) {
},
expectedErr: nil,
},
{
name: "OR with regexp",
requestType: qbtypes.RequestTypeTimeSeries,
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Signal: telemetrytypes.SignalTraces,
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
Aggregations: []qbtypes.TraceAggregation{
{
Expression: "count()",
},
},
Filter: &qbtypes.Filter{
Expression: "materialized.key.name REGEXP 'redis-manual' OR service.name = 'redis-manual'",
},
Limit: 10,
GroupBy: []qbtypes.GroupByKey{
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
},
},
},
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (true OR (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?)) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((match(`attribute_string_materialized$$key$$name`, ?) AND `attribute_string_materialized$$key$$name_exists` = ?) OR (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL)) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((match(`attribute_string_materialized$$key$$name`, ?) AND `attribute_string_materialized$$key$$name_exists` = ?) OR (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL)) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), "redis-manual", true, "redis-manual", "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, "redis-manual", true, "redis-manual", "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)},
},
expectedErr: nil,
},
{
name: "legacy httpRoute in group by",
requestType: qbtypes.RequestTypeTimeSeries,
@@ -566,7 +537,7 @@ func TestStatementBuilderTraceQuery(t *testing.T) {
expectedErr error
}{
{
name: "List query with non-mat selected fields",
name: "List query with mat selected fields",
requestType: qbtypes.RequestTypeTrace,
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Signal: telemetrytypes.SignalTraces,
@@ -581,38 +552,6 @@ func TestStatementBuilderTraceQuery(t *testing.T) {
},
expectedErr: nil,
},
{
name: "List query with mat selected fields",
requestType: qbtypes.RequestTypeTrace,
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Signal: telemetrytypes.SignalTraces,
Filter: &qbtypes.Filter{
Expression: "materialized.key.name = 'redis-manual'",
},
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __toe AS (SELECT trace_id FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (`attribute_string_materialized$$key$$name` = ? AND `attribute_string_materialized$$key$$name_exists` = ?) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ?), __toe_duration_sorted AS (SELECT trace_id, duration_nano, resource_string_service$$name as `service.name`, name FROM signoz_traces.distributed_signoz_index_v3 WHERE parent_span_id = '' AND trace_id GLOBAL IN __toe AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? ORDER BY duration_nano DESC LIMIT 1 BY trace_id) SELECT __toe_duration_sorted.`service.name` AS `service.name`, __toe_duration_sorted.name AS `name`, count() AS span_count, __toe_duration_sorted.duration_nano AS `duration_nano`, __toe_duration_sorted.trace_id AS `trace_id` FROM __toe INNER JOIN __toe_duration_sorted ON __toe.trace_id = __toe_duration_sorted.trace_id GROUP BY trace_id, duration_nano, name, `service.name` ORDER BY duration_nano DESC LIMIT 1 BY trace_id LIMIT ? SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000",
Args: []any{uint64(1747945619), uint64(1747983448), "redis-manual", true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10},
},
expectedErr: nil,
},
{
name: "List query with mat selected fields using or and regexp",
requestType: qbtypes.RequestTypeTrace,
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Signal: telemetrytypes.SignalTraces,
Filter: &qbtypes.Filter{
Expression: "materialized.key.name REGEXP 'redis-manual' OR service.name = 'redis-manual'",
},
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (true OR (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?)) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __toe AS (SELECT trace_id FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((match(`attribute_string_materialized$$key$$name`, ?) AND `attribute_string_materialized$$key$$name_exists` = ?) OR (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL)) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ?), __toe_duration_sorted AS (SELECT trace_id, duration_nano, resource_string_service$$name as `service.name`, name FROM signoz_traces.distributed_signoz_index_v3 WHERE parent_span_id = '' AND trace_id GLOBAL IN __toe AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? ORDER BY duration_nano DESC LIMIT 1 BY trace_id) SELECT __toe_duration_sorted.`service.name` AS `service.name`, __toe_duration_sorted.name AS `name`, count() AS span_count, __toe_duration_sorted.duration_nano AS `duration_nano`, __toe_duration_sorted.trace_id AS `trace_id` FROM __toe INNER JOIN __toe_duration_sorted ON __toe.trace_id = __toe_duration_sorted.trace_id GROUP BY trace_id, duration_nano, name, `service.name` ORDER BY duration_nano DESC LIMIT 1 BY trace_id LIMIT ? SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000",
Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), "redis-manual", true, "redis-manual", "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10},
},
expectedErr: nil,
},
{
name: "List query without any filter",
requestType: qbtypes.RequestTypeTrace,
@@ -627,54 +566,6 @@ func TestStatementBuilderTraceQuery(t *testing.T) {
},
expectedErr: nil,
},
{
name: "list query with span scope filter",
requestType: qbtypes.RequestTypeTrace,
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Signal: telemetrytypes.SignalTraces,
Filter: &qbtypes.Filter{
Expression: "isentrypoint = true",
},
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __toe AS (SELECT trace_id FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((name, resource_string_service$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations WHERE time >= toDateTime(1747947419))) AND parent_span_id != '' AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ?), __toe_duration_sorted AS (SELECT trace_id, duration_nano, resource_string_service$$name as `service.name`, name FROM signoz_traces.distributed_signoz_index_v3 WHERE parent_span_id = '' AND trace_id GLOBAL IN __toe AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? ORDER BY duration_nano DESC LIMIT 1 BY trace_id) SELECT __toe_duration_sorted.`service.name` AS `service.name`, __toe_duration_sorted.name AS `name`, count() AS span_count, __toe_duration_sorted.duration_nano AS `duration_nano`, __toe_duration_sorted.trace_id AS `trace_id` FROM __toe INNER JOIN __toe_duration_sorted ON __toe.trace_id = __toe_duration_sorted.trace_id GROUP BY trace_id, duration_nano, name, `service.name` ORDER BY duration_nano DESC LIMIT 1 BY trace_id LIMIT ? SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000",
Args: []any{uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10},
},
expectedErr: nil,
},
{
name: "list query with span scope filter Or mat field",
requestType: qbtypes.RequestTypeTrace,
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Signal: telemetrytypes.SignalTraces,
Filter: &qbtypes.Filter{
Expression: "isentrypoint = true or materialized.key.name = 'redis-manual'",
},
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (true OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __toe AS (SELECT trace_id FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (((name, resource_string_service$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations WHERE time >= toDateTime(1747947419))) AND parent_span_id != '' OR (`attribute_string_materialized$$key$$name` = ? AND `attribute_string_materialized$$key$$name_exists` = ?)) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ?), __toe_duration_sorted AS (SELECT trace_id, duration_nano, resource_string_service$$name as `service.name`, name FROM signoz_traces.distributed_signoz_index_v3 WHERE parent_span_id = '' AND trace_id GLOBAL IN __toe AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? ORDER BY duration_nano DESC LIMIT 1 BY trace_id) SELECT __toe_duration_sorted.`service.name` AS `service.name`, __toe_duration_sorted.name AS `name`, count() AS span_count, __toe_duration_sorted.duration_nano AS `duration_nano`, __toe_duration_sorted.trace_id AS `trace_id` FROM __toe INNER JOIN __toe_duration_sorted ON __toe.trace_id = __toe_duration_sorted.trace_id GROUP BY trace_id, duration_nano, name, `service.name` ORDER BY duration_nano DESC LIMIT 1 BY trace_id LIMIT ? SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000",
Args: []any{uint64(1747945619), uint64(1747983448), "redis-manual", true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10},
},
expectedErr: nil,
},
{
name: "list query with deprecated filter field",
requestType: qbtypes.RequestTypeTrace,
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Signal: telemetrytypes.SignalTraces,
Filter: &qbtypes.Filter{
Expression: "kind = 2 or spanKind = 'Server'",
},
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __toe AS (SELECT trace_id FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (toFloat64(kind) = ? OR kind_string = ?) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ?), __toe_duration_sorted AS (SELECT trace_id, duration_nano, resource_string_service$$name as `service.name`, name FROM signoz_traces.distributed_signoz_index_v3 WHERE parent_span_id = '' AND trace_id GLOBAL IN __toe AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? ORDER BY duration_nano DESC LIMIT 1 BY trace_id) SELECT __toe_duration_sorted.`service.name` AS `service.name`, __toe_duration_sorted.name AS `name`, count() AS span_count, __toe_duration_sorted.duration_nano AS `duration_nano`, __toe_duration_sorted.trace_id AS `trace_id` FROM __toe INNER JOIN __toe_duration_sorted ON __toe.trace_id = __toe_duration_sorted.trace_id GROUP BY trace_id, duration_nano, name, `service.name` ORDER BY duration_nano DESC LIMIT 1 BY trace_id LIMIT ? SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000",
Args: []any{uint64(1747945619), uint64(1747983448), float64(2), "Server", "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10},
},
expectedErr: nil,
},
}
fm := NewFieldMapper()

View File

@@ -77,21 +77,6 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
},
"materialized.key.name": {
{
Name: "materialized.key.name",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
},
"isentrypoint": {
{
Name: "isentrypoint",
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
},
}
for _, keys := range keysMap {
for _, key := range keys {

View File

@@ -31,11 +31,6 @@ var (
type GettableAuthDomain struct {
*StorableAuthDomain
*AuthDomainConfig
AuthNProviderInfo *AuthNProviderInfo `json:"authNProviderInfo"`
}
type AuthNProviderInfo struct {
RelayStatePath *string `json:"relayStatePath"`
}
type PostableAuthDomain struct {
@@ -108,11 +103,10 @@ func NewAuthDomainFromStorableAuthDomain(storableAuthDomain *StorableAuthDomain)
}, nil
}
func NewGettableAuthDomainFromAuthDomain(authDomain *AuthDomain, authNProviderInfo *AuthNProviderInfo) *GettableAuthDomain {
func NewGettableAuthDomainFromAuthDomain(authDomain *AuthDomain) *GettableAuthDomain {
return &GettableAuthDomain{
StorableAuthDomain: authDomain.StorableAuthDomain(),
AuthDomainConfig: authDomain.AuthDomainConfig(),
AuthNProviderInfo: authNProviderInfo,
}
}

View File

@@ -1,222 +0,0 @@
package metricsexplorertypes
import (
"encoding/json"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/valuer"
)
// MetricOrderBy represents the order-by field for metrics queries.
type MetricOrderBy struct {
valuer.String
}
var (
OrderByTimeSeries = MetricOrderBy{valuer.NewString("timeseries")}
OrderBySamples = MetricOrderBy{valuer.NewString("samples")}
)
// TreemapMode indicates which treemap variant the caller requests.
type TreemapMode struct {
valuer.String
}
var (
// TreemapModeTimeSeries represents the treemap based on timeseries counts.
TreemapModeTimeSeries = TreemapMode{valuer.NewString("timeseries")}
// TreemapModeSamples represents the treemap based on sample counts.
TreemapModeSamples = TreemapMode{valuer.NewString("samples")}
)
// StatsRequest represents the payload accepted by the metrics stats endpoint.
type StatsRequest struct {
Filter *qbtypes.Filter `json:"filter,omitempty"`
Start int64 `json:"start"`
End int64 `json:"end"`
Limit int `json:"limit"`
Offset int `json:"offset"`
OrderBy *qbtypes.OrderBy `json:"orderBy,omitempty"`
}
// Validate ensures StatsRequest contains acceptable values.
func (req *StatsRequest) Validate() error {
if req == nil {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "request is nil")
}
if req.Start <= 0 {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid start time %d: start must be greater than 0",
req.Start,
)
}
if req.End <= 0 {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid end time %d: end must be greater than 0",
req.End,
)
}
if req.Start >= req.End {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid time range: start (%d) must be less than end (%d)",
req.Start,
req.End,
)
}
if req.Limit < 1 || req.Limit > 5000 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "limit must be between 1 and 5000")
}
if req.Offset < 0 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "offset cannot be negative")
}
return nil
}
// UnmarshalJSON validates input immediately after decoding.
func (req *StatsRequest) UnmarshalJSON(data []byte) error {
type raw StatsRequest
var decoded raw
if err := json.Unmarshal(data, &decoded); err != nil {
return err
}
*req = StatsRequest(decoded)
return req.Validate()
}
// Stat represents the summary information returned per metric.
type Stat struct {
MetricName string `json:"metricName"`
Description string `json:"description"`
MetricType metrictypes.Type `json:"type"`
MetricUnit string `json:"unit"`
TimeSeries uint64 `json:"timeseries"`
Samples uint64 `json:"samples"`
}
// StatsResponse represents the aggregated metrics statistics.
type StatsResponse struct {
Metrics []Stat `json:"metrics"`
Total uint64 `json:"total"`
}
type MetricMetadata struct {
Description string `json:"description"`
MetricType metrictypes.Type `json:"type"`
MetricUnit string `json:"unit"`
Temporality metrictypes.Temporality `json:"temporality"`
IsMonotonic bool `json:"isMonotonic"`
}
// MarshalBinary implements cachetypes.Cacheable interface
func (m *MetricMetadata) MarshalBinary() ([]byte, error) {
return json.Marshal(m)
}
// UnmarshalBinary implements cachetypes.Cacheable interface
func (m *MetricMetadata) UnmarshalBinary(data []byte) error {
return json.Unmarshal(data, m)
}
// UpdateMetricMetadataRequest represents the payload for updating metric metadata.
type UpdateMetricMetadataRequest struct {
MetricName string `json:"metricName"`
Type metrictypes.Type `json:"type"`
Description string `json:"description"`
Unit string `json:"unit"`
Temporality metrictypes.Temporality `json:"temporality"`
IsMonotonic bool `json:"isMonotonic"`
}
// TreemapRequest represents the payload for the metrics treemap endpoint.
type TreemapRequest struct {
Filter *qbtypes.Filter `json:"filter,omitempty"`
Start int64 `json:"start"`
End int64 `json:"end"`
Limit int `json:"limit"`
Treemap TreemapMode `json:"treemap"`
}
// Validate enforces basic constraints on TreemapRequest.
func (req *TreemapRequest) Validate() error {
if req == nil {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "request is nil")
}
if req.Start <= 0 {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid start time %d: start must be greater than 0",
req.Start,
)
}
if req.End <= 0 {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid end time %d: end must be greater than 0",
req.End,
)
}
if req.Start >= req.End {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid time range: start (%d) must be less than end (%d)",
req.Start,
req.End,
)
}
if req.Limit < 1 || req.Limit > 5000 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "limit must be between 1 and 5000")
}
if req.Treemap != TreemapModeSamples && req.Treemap != TreemapModeTimeSeries {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid treemap mode %q: supported values are %q or %q",
req.Treemap,
TreemapModeSamples,
TreemapModeTimeSeries,
)
}
return nil
}
// UnmarshalJSON validates treemap requests immediately after decoding.
func (req *TreemapRequest) UnmarshalJSON(data []byte) error {
type raw TreemapRequest
var decoded raw
if err := json.Unmarshal(data, &decoded); err != nil {
return err
}
*req = TreemapRequest(decoded)
return req.Validate()
}
// TreemapEntry represents each node in the treemap response.
type TreemapEntry struct {
MetricName string `json:"metricName"`
Percentage float64 `json:"percentage"`
TotalValue uint64 `json:"totalValue"`
}
// TreemapResponse is the output structure for the treemap endpoint.
type TreemapResponse struct {
TimeSeries []TreemapEntry `json:"timeseries"`
Samples []TreemapEntry `json:"samples"`
}

View File

@@ -1,10 +1,6 @@
package metrictypes
import (
"database/sql/driver"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -21,110 +17,12 @@ var (
Unknown = Temporality{valuer.NewString("")}
)
func (t Temporality) Value() (driver.Value, error) {
switch t {
case Delta:
return "Delta", nil
case Cumulative:
return "Cumulative", nil
case Unspecified:
return "Unspecified", nil
case Unknown:
return "", nil
default:
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "temporality: unsupported value %q", t.StringValue())
}
}
func (t *Temporality) Scan(src interface{}) error {
if src == nil {
*t = Unknown
return nil
}
var val string
switch v := src.(type) {
case string:
val = v
case []byte:
val = string(v)
default:
return errors.Newf(errors.TypeInternal, errors.CodeInternal, "temporality: cannot scan %T", src)
}
switch strings.ToLower(strings.TrimSpace(val)) {
case "delta":
*t = Delta
case "cumulative":
*t = Cumulative
case "unspecified":
*t = Unspecified
default:
*t = Unknown
}
return nil
}
// Type is the type of the metric in OTLP data model
// Read more here https://opentelemetry.io/docs/specs/otel/metrics/data-model/#metric-points
type Type struct {
valuer.String
}
func (t Type) Value() (driver.Value, error) {
switch t {
case GaugeType:
return "Gauge", nil
case SumType:
return "Sum", nil
case HistogramType:
return "Histogram", nil
case SummaryType:
return "Summary", nil
case ExpHistogramType:
return "ExponentialHistogram", nil
case UnspecifiedType:
return "", nil
default:
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "metric type: unsupported value %q", t.StringValue())
}
}
func (t *Type) Scan(src interface{}) error {
if src == nil {
*t = UnspecifiedType
return nil
}
var val string
switch v := src.(type) {
case string:
val = v
case []byte:
val = string(v)
default:
return errors.Newf(errors.TypeInternal, errors.CodeInternal, "metric type: cannot scan %T", src)
}
switch strings.ToLower(strings.TrimSpace(val)) {
case "gauge":
*t = GaugeType
case "sum":
*t = SumType
case "histogram":
*t = HistogramType
case "summary":
*t = SummaryType
case "exponentialhistogram":
*t = ExpHistogramType
default:
*t = UnspecifiedType
}
return nil
}
var (
GaugeType = Type{valuer.NewString("gauge")}
SumType = Type{valuer.NewString("sum")}

View File

@@ -1,4 +1,4 @@
from typing import Callable, Dict, Any
from typing import Callable
from urllib.parse import urljoin
from xml.etree import ElementTree
@@ -71,7 +71,6 @@ def create_saml_client(
"saml_signature_canonicalization_method": "http://www.w3.org/2001/10/xml-exc-c14n#",
"saml.onetimeuse.condition": "false",
"saml.server.signature.keyinfo.xmlSigKeyInfoKeyNameTransformer": "NONE",
"saml_assertion_consumer_url_post": urljoin(f"{signoz.self.host_configs['8080'].base()}", callback_path)
},
"authenticationFlowBindingOverrides": {},
"fullScopeAllowed": True,
@@ -122,31 +121,6 @@ def create_saml_client(
return _create_saml_client
@pytest.fixture(name="update_saml_client_attributes", scope="function")
def update_saml_client_attributes(
idp: types.TestContainerIDP
) -> Callable[[str, Dict[str, Any]], None]:
def _update_saml_client_attributes(client_id: str, attributes: Dict[str, Any]) -> None:
client = KeycloakAdmin(
server_url=idp.container.host_configs["6060"].base(),
username=IDP_ROOT_USERNAME,
password=IDP_ROOT_PASSWORD,
realm_name="master",
)
kc_client_id = client.get_client_id(client_id=client_id)
print("kc_client_id: " + kc_client_id)
payload = client.get_client(client_id=kc_client_id)
for attr_key, attr_value in attributes.items():
payload["attributes"][attr_key] = attr_value
client.update_client(client_id=kc_client_id, payload=payload)
return _update_saml_client_attributes
@pytest.fixture(name="create_oidc_client", scope="function")
def create_oidc_client(
idp: types.TestContainerIDP, signoz: types.SigNoz

View File

@@ -1,5 +1,5 @@
from http import HTTPStatus
from typing import Callable, List, Dict, Any
from typing import Callable, List
import requests
from selenium import webdriver
@@ -26,7 +26,6 @@ def test_create_auth_domain(
signoz: SigNoz,
idp: TestContainerIDP, # pylint: disable=unused-argument
create_saml_client: Callable[[str, str], None],
update_saml_client_attributes: Callable[[str, Dict[str, Any]], None],
get_saml_settings: Callable[[], dict],
create_user_admin: Callable[[], None], # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
@@ -60,42 +59,6 @@ def test_create_auth_domain(
assert response.status_code == HTTPStatus.CREATED
# Get the domains from signoz
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/domains"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.OK
found_domain = None
if len(response.json()["data"]) > 0:
found_domain = next(
(
domain
for domain in response.json()["data"]
if domain["name"] == "saml.integration.test"
),
None,
)
relay_state_path = found_domain["authNProviderInfo"]["relayStatePath"]
assert relay_state_path is not None
# Get the relay state url from domains API
relay_state_url = signoz.self.host_configs["8080"].base() + "/" + relay_state_path
# Update the saml client with new attributes
update_saml_client_attributes(
f"{signoz.self.host_configs['8080'].address}:{signoz.self.host_configs['8080'].port}",
{
"saml_idp_initiated_sso_url_name": "idp-initiated-saml-test",
"saml_idp_initiated_sso_relay_state": relay_state_url
}
)
def test_saml_authn(
signoz: SigNoz,
@@ -143,51 +106,3 @@ def test_saml_authn(
assert found_user is not None
assert found_user["role"] == "VIEWER"
def test_idp_initiated_saml_authn(
signoz: SigNoz,
idp: TestContainerIDP, # pylint: disable=unused-argument
driver: webdriver.Chrome,
create_user_idp: Callable[[str, str], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
) -> None:
# Create a user in the idp.
create_user_idp("viewer.idp.initiated@saml.integration.test", "password", True)
# Get the session context from signoz which will give the SAML login URL.
session_context = get_session_context("viewer.idp.initiated@saml.integration.test")
assert len(session_context["orgs"]) == 1
assert len(session_context["orgs"][0]["authNSupport"]["callback"]) == 1
idp_initiated_login_url = idp.container.host_configs["6060"].base() + "/realms/master/protocol/saml/clients/idp-initiated-saml-test"
driver.get(idp_initiated_login_url)
idp_login("viewer.idp.initiated@saml.integration.test", "password")
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Assert that the user was created in signoz.
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
assert response.status_code == HTTPStatus.OK
user_response = response.json()["data"]
found_user = next(
(
user
for user in user_response
if user["email"] == "viewer.idp.initiated@saml.integration.test"
),
None,
)
assert found_user is not None
assert found_user["role"] == "VIEWER"