Compare commits
17 Commits
imp/remove
...
v0.93.0-rc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e5ab664483 | ||
|
|
a3f32b3d85 | ||
|
|
9c2f127282 | ||
|
|
e30de5f13e | ||
|
|
019083983a | ||
|
|
fdcad997f5 | ||
|
|
03359a40a2 | ||
|
|
4f45801729 | ||
|
|
674556d672 | ||
|
|
af987e53ce | ||
|
|
59d5accd33 | ||
|
|
5a7ad670d8 | ||
|
|
9d04b397ac | ||
|
|
a4f3be5e46 | ||
|
|
8f833fa62c | ||
|
|
7029233596 | ||
|
|
d26efd2833 |
@@ -24,7 +24,7 @@ services:
|
||||
depends_on:
|
||||
- zookeeper
|
||||
zookeeper:
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
image: signoz/zookeeper:3.7.1
|
||||
container_name: zookeeper
|
||||
volumes:
|
||||
- ${PWD}/fs/tmp/zookeeper:/bitnami/zookeeper
|
||||
|
||||
@@ -39,7 +39,7 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
hard: 262144
|
||||
x-zookeeper-defaults: &zookeeper-defaults
|
||||
!!merge <<: *common
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
image: signoz/zookeeper:3.7.1
|
||||
user: root
|
||||
deploy:
|
||||
labels:
|
||||
|
||||
@@ -38,7 +38,7 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
hard: 262144
|
||||
x-zookeeper-defaults: &zookeeper-defaults
|
||||
!!merge <<: *common
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
image: signoz/zookeeper:3.7.1
|
||||
user: root
|
||||
deploy:
|
||||
labels:
|
||||
|
||||
@@ -42,7 +42,7 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
hard: 262144
|
||||
x-zookeeper-defaults: &zookeeper-defaults
|
||||
!!merge <<: *common
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
image: signoz/zookeeper:3.7.1
|
||||
user: root
|
||||
labels:
|
||||
signoz.io/scrape: "true"
|
||||
|
||||
@@ -38,7 +38,7 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
hard: 262144
|
||||
x-zookeeper-defaults: &zookeeper-defaults
|
||||
!!merge <<: *common
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
image: signoz/zookeeper:3.7.1
|
||||
user: root
|
||||
labels:
|
||||
signoz.io/scrape: "true"
|
||||
|
||||
@@ -257,6 +257,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
s.config.APIServer.Timeout.Max,
|
||||
).Wrap)
|
||||
r.Use(middleware.NewLogging(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes).Wrap)
|
||||
r.Use(middleware.NewComment().Wrap)
|
||||
|
||||
apiHandler.RegisterRoutes(r, am)
|
||||
apiHandler.RegisterLogsRoutes(r, am)
|
||||
|
||||
@@ -48,6 +48,6 @@
|
||||
"INFRASTRUCTURE_MONITORING_HOSTS": "SigNoz | Infra Monitoring",
|
||||
"INFRASTRUCTURE_MONITORING_KUBERNETES": "SigNoz | Infra Monitoring",
|
||||
"METER_EXPLORER": "SigNoz | Meter Explorer",
|
||||
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer",
|
||||
"METER_EXPLORER_BASE": "SigNoz | Meter Explorer"
|
||||
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer Views",
|
||||
"METER": "SigNoz | Meter"
|
||||
}
|
||||
|
||||
@@ -71,6 +71,6 @@
|
||||
"METRICS_EXPLORER_VIEWS": "SigNoz | Metrics Explorer",
|
||||
"API_MONITORING": "SigNoz | External APIs",
|
||||
"METER_EXPLORER": "SigNoz | Meter Explorer",
|
||||
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer",
|
||||
"METER_EXPLORER_BASE": "SigNoz | Meter Explorer"
|
||||
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer Views",
|
||||
"METER": "SigNoz | Meter"
|
||||
}
|
||||
|
||||
@@ -437,10 +437,10 @@ const routes: AppRoutes[] = [
|
||||
},
|
||||
|
||||
{
|
||||
path: ROUTES.METER_EXPLORER_BASE,
|
||||
path: ROUTES.METER,
|
||||
exact: true,
|
||||
component: MeterExplorer,
|
||||
key: 'METER_EXPLORER_BASE',
|
||||
key: 'METER',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
|
||||
@@ -137,5 +137,11 @@
|
||||
h6 {
|
||||
color: var(--text-ink-500);
|
||||
}
|
||||
|
||||
code {
|
||||
background-color: var(--bg-vanilla-300);
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
color: var(--text-ink-500);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -44,13 +44,14 @@
|
||||
.lightMode {
|
||||
.metrics-select-container {
|
||||
.ant-select-selector {
|
||||
border: 1px solid var(--bg-slate-300) !important;
|
||||
border: 1px solid var(--bg-vanilla-300) !important;
|
||||
background: var(--bg-vanilla-100);
|
||||
color: var(--text-ink-100);
|
||||
}
|
||||
|
||||
.ant-select-dropdown {
|
||||
background: var(--bg-vanilla-100);
|
||||
border: 1px solid var(--bg-vanilla-300) !important;
|
||||
box-shadow: 0 3px 6px -4px rgba(0, 0, 0, 0.12),
|
||||
0 6px 16px 0 rgba(0, 0, 0, 0.08), 0 9px 28px 8px rgba(0, 0, 0, 0.05);
|
||||
backdrop-filter: none;
|
||||
|
||||
536
frontend/src/components/QueryBuilderV2/__tests__/utils.test.ts
Normal file
536
frontend/src/components/QueryBuilderV2/__tests__/utils.test.ts
Normal file
@@ -0,0 +1,536 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { convertFiltersToExpression } from '../utils';
|
||||
|
||||
describe('convertFiltersToExpression', () => {
|
||||
it('should handle empty, null, and undefined inputs', () => {
|
||||
// Test null and undefined
|
||||
expect(convertFiltersToExpression(null as any)).toEqual({ expression: '' });
|
||||
expect(convertFiltersToExpression(undefined as any)).toEqual({
|
||||
expression: '',
|
||||
});
|
||||
|
||||
// Test empty filters
|
||||
expect(convertFiltersToExpression({ items: [], op: 'AND' })).toEqual({
|
||||
expression: '',
|
||||
});
|
||||
expect(
|
||||
convertFiltersToExpression({ items: undefined, op: 'AND' } as any),
|
||||
).toEqual({ expression: '' });
|
||||
});
|
||||
|
||||
it('should convert basic comparison operators with proper value formatting', () => {
|
||||
const filters: TagFilter = {
|
||||
items: [
|
||||
{
|
||||
id: '1',
|
||||
key: { key: 'service', type: 'string' },
|
||||
op: '=',
|
||||
value: 'api-gateway',
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
key: { key: 'status', type: 'string' },
|
||||
op: '!=',
|
||||
value: 'error',
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
key: { key: 'duration', type: 'number' },
|
||||
op: '>',
|
||||
value: 100,
|
||||
},
|
||||
{
|
||||
id: '4',
|
||||
key: { key: 'count', type: 'number' },
|
||||
op: '<=',
|
||||
value: 50,
|
||||
},
|
||||
{
|
||||
id: '5',
|
||||
key: { key: 'is_active', type: 'boolean' },
|
||||
op: '=',
|
||||
value: true,
|
||||
},
|
||||
{
|
||||
id: '6',
|
||||
key: { key: 'enabled', type: 'boolean' },
|
||||
op: '=',
|
||||
value: false,
|
||||
},
|
||||
{
|
||||
id: '7',
|
||||
key: { key: 'count', type: 'number' },
|
||||
op: '=',
|
||||
value: 0,
|
||||
},
|
||||
{
|
||||
id: '7',
|
||||
key: { key: 'regex', type: 'string' },
|
||||
op: 'regex',
|
||||
value: '.*',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const result = convertFiltersToExpression(filters);
|
||||
expect(result).toEqual({
|
||||
expression:
|
||||
"service = 'api-gateway' AND status != 'error' AND duration > 100 AND count <= 50 AND is_active = true AND enabled = false AND count = 0 AND regex REGEXP '.*'",
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle string value formatting and escaping', () => {
|
||||
const filters: TagFilter = {
|
||||
items: [
|
||||
{
|
||||
id: '1',
|
||||
key: { key: 'message', type: 'string' },
|
||||
op: '=',
|
||||
value: "user's data",
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
key: { key: 'description', type: 'string' },
|
||||
op: '=',
|
||||
value: '',
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
key: { key: 'path', type: 'string' },
|
||||
op: '=',
|
||||
value: '/api/v1/users',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const result = convertFiltersToExpression(filters);
|
||||
expect(result).toEqual({
|
||||
expression:
|
||||
"message = 'user\\'s data' AND description = '' AND path = '/api/v1/users'",
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle IN operator with various value types and array formatting', () => {
|
||||
const filters: TagFilter = {
|
||||
items: [
|
||||
{
|
||||
id: '1',
|
||||
key: { key: 'service', type: 'string' },
|
||||
op: 'IN',
|
||||
value: ['api-gateway', 'user-service', 'auth-service'],
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
key: { key: 'status', type: 'string' },
|
||||
op: 'IN',
|
||||
value: 'success', // Single value should be converted to array
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
key: { key: 'tags', type: 'string' },
|
||||
op: 'IN',
|
||||
value: [], // Empty array
|
||||
},
|
||||
{
|
||||
id: '4',
|
||||
key: { key: 'name', type: 'string' },
|
||||
op: 'IN',
|
||||
value: ["John's", "Mary's", 'Bob'], // Values with quotes
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const result = convertFiltersToExpression(filters);
|
||||
expect(result).toEqual({
|
||||
expression:
|
||||
"service in ['api-gateway', 'user-service', 'auth-service'] AND status in ['success'] AND tags in [] AND name in ['John\\'s', 'Mary\\'s', 'Bob']",
|
||||
});
|
||||
});
|
||||
|
||||
it('should convert deprecated operators to their modern equivalents', () => {
|
||||
const filters: TagFilter = {
|
||||
items: [
|
||||
{
|
||||
id: '1',
|
||||
key: { key: 'service', type: 'string' },
|
||||
op: 'nin',
|
||||
value: ['api-gateway', 'user-service'],
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
key: { key: 'message', type: 'string' },
|
||||
op: 'nlike',
|
||||
value: 'error',
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
key: { key: 'path', type: 'string' },
|
||||
op: 'nregex',
|
||||
value: '/api/.*',
|
||||
},
|
||||
{
|
||||
id: '4',
|
||||
key: { key: 'service', type: 'string' },
|
||||
op: 'NIN', // Test case insensitivity
|
||||
value: ['api-gateway'],
|
||||
},
|
||||
{
|
||||
id: '5',
|
||||
key: { key: 'user_id', type: 'string' },
|
||||
op: 'nexists',
|
||||
value: '',
|
||||
},
|
||||
{
|
||||
id: '6',
|
||||
key: { key: 'description', type: 'string' },
|
||||
op: 'ncontains',
|
||||
value: 'error',
|
||||
},
|
||||
{
|
||||
id: '7',
|
||||
key: { key: 'tags', type: 'string' },
|
||||
op: 'nhas',
|
||||
value: 'production',
|
||||
},
|
||||
{
|
||||
id: '8',
|
||||
key: { key: 'labels', type: 'string' },
|
||||
op: 'nhasany',
|
||||
value: ['env:prod', 'service:api'],
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const result = convertFiltersToExpression(filters);
|
||||
expect(result).toEqual({
|
||||
expression:
|
||||
"service NOT IN ['api-gateway', 'user-service'] AND message NOT LIKE 'error' AND path NOT REGEXP '/api/.*' AND service NOT IN ['api-gateway'] AND user_id NOT EXISTS AND description NOT CONTAINS 'error' AND NOT has(tags, 'production') AND NOT hasAny(labels, ['env:prod', 'service:api'])",
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle non-value operators and function operators', () => {
|
||||
const filters: TagFilter = {
|
||||
items: [
|
||||
{
|
||||
id: '1',
|
||||
key: { key: 'user_id', type: 'string' },
|
||||
op: 'EXISTS',
|
||||
value: '', // Value should be ignored for EXISTS
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
key: { key: 'user_id', type: 'string' },
|
||||
op: 'EXISTS',
|
||||
value: 'some-value', // Value should be ignored for EXISTS
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
key: { key: 'tags', type: 'string' },
|
||||
op: 'has',
|
||||
value: 'production',
|
||||
},
|
||||
{
|
||||
id: '4',
|
||||
key: { key: 'tags', type: 'string' },
|
||||
op: 'hasAny',
|
||||
value: ['production', 'staging'],
|
||||
},
|
||||
{
|
||||
id: '5',
|
||||
key: { key: 'tags', type: 'string' },
|
||||
op: 'hasAll',
|
||||
value: ['production', 'monitoring'],
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const result = convertFiltersToExpression(filters);
|
||||
expect(result).toEqual({
|
||||
expression:
|
||||
"user_id exists AND user_id exists AND has(tags, 'production') AND hasAny(tags, ['production', 'staging']) AND hasAll(tags, ['production', 'monitoring'])",
|
||||
});
|
||||
});
|
||||
|
||||
it('should filter out invalid filters and handle edge cases', () => {
|
||||
const filters: TagFilter = {
|
||||
items: [
|
||||
{
|
||||
id: '1',
|
||||
key: { key: 'service', type: 'string' },
|
||||
op: '=',
|
||||
value: 'api-gateway',
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
key: undefined, // Invalid filter - should be skipped
|
||||
op: '=',
|
||||
value: 'error',
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
key: { key: '', type: 'string' }, // Invalid filter with empty key - should be skipped
|
||||
op: '=',
|
||||
value: 'test',
|
||||
},
|
||||
{
|
||||
id: '4',
|
||||
key: { key: 'status', type: 'string' },
|
||||
op: ' = ', // Test whitespace handling
|
||||
value: 'success',
|
||||
},
|
||||
{
|
||||
id: '5',
|
||||
key: { key: 'service', type: 'string' },
|
||||
op: 'In', // Test mixed case handling
|
||||
value: ['api-gateway'],
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const result = convertFiltersToExpression(filters);
|
||||
expect(result).toEqual({
|
||||
expression:
|
||||
"service = 'api-gateway' AND status = 'success' AND service in ['api-gateway']",
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle complex mixed operator scenarios with proper joining', () => {
|
||||
const filters: TagFilter = {
|
||||
items: [
|
||||
{
|
||||
id: '1',
|
||||
key: { key: 'service', type: 'string' },
|
||||
op: 'IN',
|
||||
value: ['api-gateway', 'user-service'],
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
key: { key: 'user_id', type: 'string' },
|
||||
op: 'EXISTS',
|
||||
value: '',
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
key: { key: 'tags', type: 'string' },
|
||||
op: 'has',
|
||||
value: 'production',
|
||||
},
|
||||
{
|
||||
id: '4',
|
||||
key: { key: 'duration', type: 'number' },
|
||||
op: '>',
|
||||
value: 100,
|
||||
},
|
||||
{
|
||||
id: '5',
|
||||
key: { key: 'status', type: 'string' },
|
||||
op: 'nin',
|
||||
value: ['error', 'timeout'],
|
||||
},
|
||||
{
|
||||
id: '6',
|
||||
key: { key: 'method', type: 'string' },
|
||||
op: '=',
|
||||
value: 'POST',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const result = convertFiltersToExpression(filters);
|
||||
expect(result).toEqual({
|
||||
expression:
|
||||
"service in ['api-gateway', 'user-service'] AND user_id exists AND has(tags, 'production') AND duration > 100 AND status NOT IN ['error', 'timeout'] AND method = 'POST'",
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle all numeric comparison operators and edge cases', () => {
|
||||
const filters: TagFilter = {
|
||||
items: [
|
||||
{
|
||||
id: '1',
|
||||
key: { key: 'count', type: 'number' },
|
||||
op: '=',
|
||||
value: 0,
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
key: { key: 'score', type: 'number' },
|
||||
op: '>',
|
||||
value: 100,
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
key: { key: 'limit', type: 'number' },
|
||||
op: '>=',
|
||||
value: 50,
|
||||
},
|
||||
{
|
||||
id: '4',
|
||||
key: { key: 'threshold', type: 'number' },
|
||||
op: '<',
|
||||
value: 1000,
|
||||
},
|
||||
{
|
||||
id: '5',
|
||||
key: { key: 'max_value', type: 'number' },
|
||||
op: '<=',
|
||||
value: 999,
|
||||
},
|
||||
{
|
||||
id: '6',
|
||||
key: { key: 'values', type: 'string' },
|
||||
op: 'IN',
|
||||
value: ['1', '2', '3', '4', '5'],
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const result = convertFiltersToExpression(filters);
|
||||
expect(result).toEqual({
|
||||
expression:
|
||||
"count = 0 AND score > 100 AND limit >= 50 AND threshold < 1000 AND max_value <= 999 AND values in ['1', '2', '3', '4', '5']",
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle boolean values and string comparisons with special characters', () => {
|
||||
const filters: TagFilter = {
|
||||
items: [
|
||||
{
|
||||
id: '1',
|
||||
key: { key: 'is_active', type: 'boolean' },
|
||||
op: '=',
|
||||
value: true,
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
key: { key: 'is_deleted', type: 'boolean' },
|
||||
op: '=',
|
||||
value: false,
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
key: { key: 'email', type: 'string' },
|
||||
op: '=',
|
||||
value: 'user@example.com',
|
||||
},
|
||||
{
|
||||
id: '4',
|
||||
key: { key: 'description', type: 'string' },
|
||||
op: '=',
|
||||
value: 'Contains "quotes" and \'apostrophes\'',
|
||||
},
|
||||
{
|
||||
id: '5',
|
||||
key: { key: 'path', type: 'string' },
|
||||
op: '=',
|
||||
value: '/api/v1/users/123?filter=true',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const result = convertFiltersToExpression(filters);
|
||||
expect(result).toEqual({
|
||||
expression:
|
||||
"is_active = true AND is_deleted = false AND email = 'user@example.com' AND description = 'Contains \"quotes\" and \\'apostrophes\\'' AND path = '/api/v1/users/123?filter=true'",
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle all function operators and complex array scenarios', () => {
|
||||
const filters: TagFilter = {
|
||||
items: [
|
||||
{
|
||||
id: '1',
|
||||
key: { key: 'tags', type: 'string' },
|
||||
op: 'has',
|
||||
value: 'production',
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
key: { key: 'labels', type: 'string' },
|
||||
op: 'hasAny',
|
||||
value: ['env:prod', 'service:api'],
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
key: { key: 'metadata', type: 'string' },
|
||||
op: 'hasAll',
|
||||
value: ['version:1.0', 'team:backend'],
|
||||
},
|
||||
{
|
||||
id: '4',
|
||||
key: { key: 'services', type: 'string' },
|
||||
op: 'IN',
|
||||
value: ['api-gateway', 'user-service', 'auth-service', 'payment-service'],
|
||||
},
|
||||
{
|
||||
id: '5',
|
||||
key: { key: 'excluded_services', type: 'string' },
|
||||
op: 'nin',
|
||||
value: ['legacy-service', 'deprecated-service'],
|
||||
},
|
||||
{
|
||||
id: '6',
|
||||
key: { key: 'status_codes', type: 'string' },
|
||||
op: 'IN',
|
||||
value: ['200', '201', '400', '500'],
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const result = convertFiltersToExpression(filters);
|
||||
expect(result).toEqual({
|
||||
expression:
|
||||
"has(tags, 'production') AND hasAny(labels, ['env:prod', 'service:api']) AND hasAll(metadata, ['version:1.0', 'team:backend']) AND services in ['api-gateway', 'user-service', 'auth-service', 'payment-service'] AND excluded_services NOT IN ['legacy-service', 'deprecated-service'] AND status_codes in ['200', '201', '400', '500']",
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle specific deprecated operators: nhas, ncontains, nexists', () => {
|
||||
const filters: TagFilter = {
|
||||
items: [
|
||||
{
|
||||
id: '1',
|
||||
key: { key: 'user_id', type: 'string' },
|
||||
op: 'nexists',
|
||||
value: '',
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
key: { key: 'description', type: 'string' },
|
||||
op: 'ncontains',
|
||||
value: 'error',
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
key: { key: 'tags', type: 'string' },
|
||||
op: 'nhas',
|
||||
value: 'production',
|
||||
},
|
||||
{
|
||||
id: '4',
|
||||
key: { key: 'labels', type: 'string' },
|
||||
op: 'nhasany',
|
||||
value: ['env:prod', 'service:api'],
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const result = convertFiltersToExpression(filters);
|
||||
expect(result).toEqual({
|
||||
expression:
|
||||
"user_id NOT EXISTS AND description NOT CONTAINS 'error' AND NOT has(tags, 'production') AND NOT hasAny(labels, ['env:prod', 'service:api'])",
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,6 +1,10 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import { createAggregation } from 'api/v5/queryRange/prepareQueryRangePayloadV5';
|
||||
import { OPERATORS } from 'constants/antlrQueryConstants';
|
||||
import {
|
||||
DEPRECATED_OPERATORS_MAP,
|
||||
OPERATORS,
|
||||
QUERY_BUILDER_FUNCTIONS,
|
||||
} from 'constants/antlrQueryConstants';
|
||||
import { getOperatorValue } from 'container/QueryBuilder/filters/QueryBuilderSearch/utils';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { IQueryPair } from 'types/antlrQueryTypes';
|
||||
@@ -21,7 +25,7 @@ import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { extractQueryPairs } from 'utils/queryContextUtils';
|
||||
import { unquote } from 'utils/stringUtils';
|
||||
import { isFunctionOperator } from 'utils/tokenUtils';
|
||||
import { isFunctionOperator, isNonValueOperator } from 'utils/tokenUtils';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
/**
|
||||
@@ -87,12 +91,32 @@ export const convertFiltersToExpression = (
|
||||
return '';
|
||||
}
|
||||
|
||||
if (isFunctionOperator(op)) {
|
||||
return `${op}(${key.key}, ${value})`;
|
||||
let operator = op.trim().toLowerCase();
|
||||
if (Object.keys(DEPRECATED_OPERATORS_MAP).includes(operator)) {
|
||||
operator =
|
||||
DEPRECATED_OPERATORS_MAP[
|
||||
operator as keyof typeof DEPRECATED_OPERATORS_MAP
|
||||
];
|
||||
}
|
||||
|
||||
const formattedValue = formatValueForExpression(value, op);
|
||||
return `${key.key} ${op} ${formattedValue}`;
|
||||
if (isNonValueOperator(operator)) {
|
||||
return `${key.key} ${operator}`;
|
||||
}
|
||||
|
||||
if (isFunctionOperator(operator)) {
|
||||
// Get the proper function name from QUERY_BUILDER_FUNCTIONS
|
||||
const functionOperators = Object.values(QUERY_BUILDER_FUNCTIONS);
|
||||
const properFunctionName =
|
||||
functionOperators.find(
|
||||
(func: string) => func.toLowerCase() === operator.toLowerCase(),
|
||||
) || operator;
|
||||
|
||||
const formattedValue = formatValueForExpression(value, operator);
|
||||
return `${properFunctionName}(${key.key}, ${formattedValue})`;
|
||||
}
|
||||
|
||||
const formattedValue = formatValueForExpression(value, operator);
|
||||
return `${key.key} ${operator} ${formattedValue}`;
|
||||
})
|
||||
.filter((expression) => expression !== ''); // Remove empty expressions
|
||||
|
||||
@@ -117,7 +141,6 @@ export const convertExpressionToFilters = (
|
||||
if (!expression) return [];
|
||||
|
||||
const queryPairs = extractQueryPairs(expression);
|
||||
|
||||
const filters: TagFilterItem[] = [];
|
||||
|
||||
queryPairs.forEach((pair) => {
|
||||
@@ -145,19 +168,36 @@ export const convertFiltersToExpressionWithExistingQuery = (
|
||||
filters: TagFilter,
|
||||
existingQuery: string | undefined,
|
||||
): { filters: TagFilter; filter: { expression: string } } => {
|
||||
// Check for deprecated operators and replace them with new operators
|
||||
const updatedFilters = cloneDeep(filters);
|
||||
|
||||
// Replace deprecated operators in filter items
|
||||
if (updatedFilters?.items) {
|
||||
updatedFilters.items = updatedFilters.items.map((item) => {
|
||||
const opLower = item.op?.toLowerCase();
|
||||
if (Object.keys(DEPRECATED_OPERATORS_MAP).includes(opLower)) {
|
||||
return {
|
||||
...item,
|
||||
op: DEPRECATED_OPERATORS_MAP[
|
||||
opLower as keyof typeof DEPRECATED_OPERATORS_MAP
|
||||
].toLowerCase(),
|
||||
};
|
||||
}
|
||||
return item;
|
||||
});
|
||||
}
|
||||
|
||||
if (!existingQuery) {
|
||||
// If no existing query, return filters with a newly generated expression
|
||||
return {
|
||||
filters,
|
||||
filter: convertFiltersToExpression(filters),
|
||||
filters: updatedFilters,
|
||||
filter: convertFiltersToExpression(updatedFilters),
|
||||
};
|
||||
}
|
||||
|
||||
// Extract query pairs from the existing query
|
||||
const queryPairs = extractQueryPairs(existingQuery.trim());
|
||||
let queryPairsMap: Map<string, IQueryPair> = new Map();
|
||||
|
||||
const updatedFilters = cloneDeep(filters); // Clone filters to avoid direct mutation
|
||||
const nonExistingFilters: TagFilterItem[] = [];
|
||||
let modifiedQuery = existingQuery; // We'll modify this query as we proceed
|
||||
const visitedPairs: Set<string> = new Set(); // Set to track visited query pairs
|
||||
|
||||
@@ -5,8 +5,11 @@ import { SignalType } from 'components/QuickFilters/types';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useGetAggregateKeys } from 'hooks/queryBuilder/useGetAggregateKeys';
|
||||
import { useGetAttributeSuggestions } from 'hooks/queryBuilder/useGetAttributeSuggestions';
|
||||
import { useGetQueryKeySuggestions } from 'hooks/querySuggestions/useGetQueryKeySuggestions';
|
||||
import { useMemo } from 'react';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { QueryKeyDataSuggestionsProps } from 'types/api/querySuggestions/types';
|
||||
import { Filter as FilterType } from 'types/api/quickFilters/getCustomFilters';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
@@ -40,6 +43,10 @@ function OtherFilters({
|
||||
() => SIGNAL_DATA_SOURCE_MAP[signal as SignalType] === DataSource.LOGS,
|
||||
[signal],
|
||||
);
|
||||
const isMeterDataSource = useMemo(
|
||||
() => signal && signal === SignalType.METER_EXPLORER,
|
||||
[signal],
|
||||
);
|
||||
|
||||
const {
|
||||
data: suggestionsData,
|
||||
@@ -69,7 +76,22 @@ function OtherFilters({
|
||||
},
|
||||
{
|
||||
queryKey: [REACT_QUERY_KEY.GET_OTHER_FILTERS, inputValue],
|
||||
enabled: !!signal && !isLogDataSource,
|
||||
enabled: !!signal && !isLogDataSource && !isMeterDataSource,
|
||||
},
|
||||
);
|
||||
|
||||
const {
|
||||
data: fieldKeysData,
|
||||
isLoading: isLoadingFieldKeys,
|
||||
} = useGetQueryKeySuggestions(
|
||||
{
|
||||
searchText: inputValue,
|
||||
signal: SIGNAL_DATA_SOURCE_MAP[signal as SignalType],
|
||||
signalSource: 'meter',
|
||||
},
|
||||
{
|
||||
queryKey: [REACT_QUERY_KEY.GET_OTHER_FILTERS, inputValue],
|
||||
enabled: !!signal && isMeterDataSource,
|
||||
},
|
||||
);
|
||||
|
||||
@@ -77,13 +99,33 @@ function OtherFilters({
|
||||
let filterAttributes;
|
||||
if (isLogDataSource) {
|
||||
filterAttributes = suggestionsData?.payload?.attributes || [];
|
||||
} else if (isMeterDataSource) {
|
||||
const fieldKeys: QueryKeyDataSuggestionsProps[] = Object.values(
|
||||
fieldKeysData?.data?.data?.keys || {},
|
||||
)?.flat();
|
||||
filterAttributes = fieldKeys.map(
|
||||
(attr) =>
|
||||
({
|
||||
key: attr.name,
|
||||
dataType: attr.fieldDataType,
|
||||
type: attr.fieldContext,
|
||||
signal: attr.signal,
|
||||
} as BaseAutocompleteData),
|
||||
);
|
||||
} else {
|
||||
filterAttributes = aggregateKeysData?.payload?.attributeKeys || [];
|
||||
}
|
||||
return filterAttributes?.filter(
|
||||
(attr) => !addedFilters.some((filter) => filter.key === attr.key),
|
||||
);
|
||||
}, [suggestionsData, aggregateKeysData, addedFilters, isLogDataSource]);
|
||||
}, [
|
||||
suggestionsData,
|
||||
aggregateKeysData,
|
||||
addedFilters,
|
||||
isLogDataSource,
|
||||
fieldKeysData,
|
||||
isMeterDataSource,
|
||||
]);
|
||||
|
||||
const handleAddFilter = (filter: FilterType): void => {
|
||||
setAddedFilters((prev) => [
|
||||
@@ -99,7 +141,8 @@ function OtherFilters({
|
||||
};
|
||||
|
||||
const renderFilters = (): React.ReactNode => {
|
||||
const isLoading = isFetchingSuggestions || isFetchingAggregateKeys;
|
||||
const isLoading =
|
||||
isFetchingSuggestions || isFetchingAggregateKeys || isLoadingFieldKeys;
|
||||
if (isLoading) return <OtherFiltersSkeleton />;
|
||||
if (!otherFilters?.length)
|
||||
return <div className="no-values-found">No values found</div>;
|
||||
|
||||
@@ -0,0 +1,63 @@
|
||||
import './styles.scss';
|
||||
|
||||
import { Select } from 'antd';
|
||||
import { DefaultOptionType } from 'antd/es/select';
|
||||
|
||||
import { UniversalYAxisUnitMappings, Y_AXIS_CATEGORIES } from './constants';
|
||||
import { UniversalYAxisUnit, YAxisUnitSelectorProps } from './types';
|
||||
import { mapMetricUnitToUniversalUnit } from './utils';
|
||||
|
||||
function YAxisUnitSelector({
|
||||
value,
|
||||
onChange,
|
||||
placeholder = 'Please select a unit',
|
||||
loading = false,
|
||||
}: YAxisUnitSelectorProps): JSX.Element {
|
||||
const universalUnit = mapMetricUnitToUniversalUnit(value);
|
||||
|
||||
const handleSearch = (
|
||||
searchTerm: string,
|
||||
currentOption: DefaultOptionType | undefined,
|
||||
): boolean => {
|
||||
if (!currentOption?.value) return false;
|
||||
|
||||
const search = searchTerm.toLowerCase();
|
||||
const unitId = currentOption.value.toString().toLowerCase();
|
||||
const unitLabel = currentOption.children?.toString().toLowerCase() || '';
|
||||
|
||||
// Check label and id
|
||||
if (unitId.includes(search) || unitLabel.includes(search)) return true;
|
||||
|
||||
// Check aliases (from the mapping) using array iteration
|
||||
const aliases = Array.from(
|
||||
UniversalYAxisUnitMappings[currentOption.value as UniversalYAxisUnit] ?? [],
|
||||
);
|
||||
|
||||
return aliases.some((alias) => alias.toLowerCase().includes(search));
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="y-axis-unit-selector-component">
|
||||
<Select
|
||||
showSearch
|
||||
value={universalUnit}
|
||||
onChange={onChange}
|
||||
placeholder={placeholder}
|
||||
filterOption={(input, option): boolean => handleSearch(input, option)}
|
||||
loading={loading}
|
||||
>
|
||||
{Y_AXIS_CATEGORIES.map((category) => (
|
||||
<Select.OptGroup key={category.name} label={category.name}>
|
||||
{category.units.map((unit) => (
|
||||
<Select.Option key={unit.id} value={unit.id}>
|
||||
{unit.name}
|
||||
</Select.Option>
|
||||
))}
|
||||
</Select.OptGroup>
|
||||
))}
|
||||
</Select>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default YAxisUnitSelector;
|
||||
@@ -0,0 +1,68 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
|
||||
import YAxisUnitSelector from '../YAxisUnitSelector';
|
||||
|
||||
describe('YAxisUnitSelector', () => {
|
||||
const mockOnChange = jest.fn();
|
||||
|
||||
beforeEach(() => {
|
||||
mockOnChange.mockClear();
|
||||
});
|
||||
|
||||
it('renders with default placeholder', () => {
|
||||
render(<YAxisUnitSelector value="" onChange={mockOnChange} />);
|
||||
expect(screen.getByText('Please select a unit')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders with custom placeholder', () => {
|
||||
render(
|
||||
<YAxisUnitSelector
|
||||
value=""
|
||||
onChange={mockOnChange}
|
||||
placeholder="Custom placeholder"
|
||||
/>,
|
||||
);
|
||||
expect(screen.queryByText('Custom placeholder')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('calls onChange when a value is selected', () => {
|
||||
render(<YAxisUnitSelector value="" onChange={mockOnChange} />);
|
||||
const select = screen.getByRole('combobox');
|
||||
|
||||
fireEvent.mouseDown(select);
|
||||
const option = screen.getByText('Bytes (B)');
|
||||
fireEvent.click(option);
|
||||
|
||||
expect(mockOnChange).toHaveBeenCalledWith('By', {
|
||||
children: 'Bytes (B)',
|
||||
key: 'By',
|
||||
value: 'By',
|
||||
});
|
||||
});
|
||||
|
||||
it('filters options based on search input', () => {
|
||||
render(<YAxisUnitSelector value="" onChange={mockOnChange} />);
|
||||
const select = screen.getByRole('combobox');
|
||||
|
||||
fireEvent.mouseDown(select);
|
||||
const input = screen.getByRole('combobox');
|
||||
fireEvent.change(input, { target: { value: 'byte' } });
|
||||
|
||||
expect(screen.getByText('Bytes/sec')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows all categories and their units', () => {
|
||||
render(<YAxisUnitSelector value="" onChange={mockOnChange} />);
|
||||
const select = screen.getByRole('combobox');
|
||||
|
||||
fireEvent.mouseDown(select);
|
||||
|
||||
// Check for category headers
|
||||
expect(screen.getByText('Data')).toBeInTheDocument();
|
||||
expect(screen.getByText('Time')).toBeInTheDocument();
|
||||
|
||||
// Check for some common units
|
||||
expect(screen.getByText('Bytes (B)')).toBeInTheDocument();
|
||||
expect(screen.getByText('Seconds (s)')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,39 @@
|
||||
import {
|
||||
getUniversalNameFromMetricUnit,
|
||||
mapMetricUnitToUniversalUnit,
|
||||
} from '../utils';
|
||||
|
||||
describe('YAxisUnitSelector utils', () => {
|
||||
describe('mapMetricUnitToUniversalUnit', () => {
|
||||
it('maps known units correctly', () => {
|
||||
expect(mapMetricUnitToUniversalUnit('bytes')).toBe('By');
|
||||
expect(mapMetricUnitToUniversalUnit('seconds')).toBe('s');
|
||||
expect(mapMetricUnitToUniversalUnit('bytes_per_second')).toBe('By/s');
|
||||
});
|
||||
|
||||
it('returns null or self for unknown units', () => {
|
||||
expect(mapMetricUnitToUniversalUnit('unknown_unit')).toBe('unknown_unit');
|
||||
expect(mapMetricUnitToUniversalUnit('')).toBe(null);
|
||||
expect(mapMetricUnitToUniversalUnit(undefined)).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getUniversalNameFromMetricUnit', () => {
|
||||
it('returns human readable names for known units', () => {
|
||||
expect(getUniversalNameFromMetricUnit('bytes')).toBe('Bytes (B)');
|
||||
expect(getUniversalNameFromMetricUnit('seconds')).toBe('Seconds (s)');
|
||||
expect(getUniversalNameFromMetricUnit('bytes_per_second')).toBe('Bytes/sec');
|
||||
});
|
||||
|
||||
it('returns original unit for unknown units', () => {
|
||||
expect(getUniversalNameFromMetricUnit('unknown_unit')).toBe('unknown_unit');
|
||||
expect(getUniversalNameFromMetricUnit('')).toBe('-');
|
||||
expect(getUniversalNameFromMetricUnit(undefined)).toBe('-');
|
||||
});
|
||||
|
||||
it('handles case variations', () => {
|
||||
expect(getUniversalNameFromMetricUnit('bytes')).toBe('Bytes (B)');
|
||||
expect(getUniversalNameFromMetricUnit('s')).toBe('Seconds (s)');
|
||||
});
|
||||
});
|
||||
});
|
||||
627
frontend/src/components/YAxisUnitSelector/constants.ts
Normal file
627
frontend/src/components/YAxisUnitSelector/constants.ts
Normal file
@@ -0,0 +1,627 @@
|
||||
import { UniversalYAxisUnit, YAxisUnit } from './types';
|
||||
|
||||
// Mapping of universal y-axis units to their AWS, UCUM, and OpenMetrics equivalents
|
||||
export const UniversalYAxisUnitMappings: Record<
|
||||
UniversalYAxisUnit,
|
||||
Set<YAxisUnit>
|
||||
> = {
|
||||
// Time
|
||||
[UniversalYAxisUnit.NANOSECONDS]: new Set([
|
||||
YAxisUnit.UCUM_NANOSECONDS,
|
||||
YAxisUnit.OPEN_METRICS_NANOSECONDS,
|
||||
]),
|
||||
[UniversalYAxisUnit.MICROSECONDS]: new Set([
|
||||
YAxisUnit.AWS_MICROSECONDS,
|
||||
YAxisUnit.UCUM_MICROSECONDS,
|
||||
YAxisUnit.OPEN_METRICS_MICROSECONDS,
|
||||
]),
|
||||
[UniversalYAxisUnit.MILLISECONDS]: new Set([
|
||||
YAxisUnit.AWS_MILLISECONDS,
|
||||
YAxisUnit.UCUM_MILLISECONDS,
|
||||
YAxisUnit.OPEN_METRICS_MILLISECONDS,
|
||||
]),
|
||||
[UniversalYAxisUnit.SECONDS]: new Set([
|
||||
YAxisUnit.AWS_SECONDS,
|
||||
YAxisUnit.UCUM_SECONDS,
|
||||
YAxisUnit.OPEN_METRICS_SECONDS,
|
||||
]),
|
||||
[UniversalYAxisUnit.MINUTES]: new Set([
|
||||
YAxisUnit.UCUM_MINUTES,
|
||||
YAxisUnit.OPEN_METRICS_MINUTES,
|
||||
]),
|
||||
[UniversalYAxisUnit.HOURS]: new Set([
|
||||
YAxisUnit.UCUM_HOURS,
|
||||
YAxisUnit.OPEN_METRICS_HOURS,
|
||||
]),
|
||||
[UniversalYAxisUnit.DAYS]: new Set([
|
||||
YAxisUnit.UCUM_DAYS,
|
||||
YAxisUnit.OPEN_METRICS_DAYS,
|
||||
]),
|
||||
[UniversalYAxisUnit.WEEKS]: new Set([YAxisUnit.UCUM_WEEKS]),
|
||||
|
||||
// Data
|
||||
[UniversalYAxisUnit.BYTES]: new Set([
|
||||
YAxisUnit.AWS_BYTES,
|
||||
YAxisUnit.UCUM_BYTES,
|
||||
YAxisUnit.OPEN_METRICS_BYTES,
|
||||
]),
|
||||
[UniversalYAxisUnit.KILOBYTES]: new Set([
|
||||
YAxisUnit.AWS_KILOBYTES,
|
||||
YAxisUnit.UCUM_KILOBYTES,
|
||||
YAxisUnit.OPEN_METRICS_KILOBYTES,
|
||||
]),
|
||||
[UniversalYAxisUnit.MEGABYTES]: new Set([
|
||||
YAxisUnit.AWS_MEGABYTES,
|
||||
YAxisUnit.UCUM_MEGABYTES,
|
||||
YAxisUnit.OPEN_METRICS_MEGABYTES,
|
||||
]),
|
||||
[UniversalYAxisUnit.GIGABYTES]: new Set([
|
||||
YAxisUnit.AWS_GIGABYTES,
|
||||
YAxisUnit.UCUM_GIGABYTES,
|
||||
YAxisUnit.OPEN_METRICS_GIGABYTES,
|
||||
]),
|
||||
[UniversalYAxisUnit.TERABYTES]: new Set([
|
||||
YAxisUnit.AWS_TERABYTES,
|
||||
YAxisUnit.UCUM_TERABYTES,
|
||||
YAxisUnit.OPEN_METRICS_TERABYTES,
|
||||
]),
|
||||
[UniversalYAxisUnit.PETABYTES]: new Set([
|
||||
YAxisUnit.AWS_PETABYTES,
|
||||
YAxisUnit.UCUM_PEBIBYTES,
|
||||
YAxisUnit.OPEN_METRICS_PEBIBYTES,
|
||||
]),
|
||||
[UniversalYAxisUnit.EXABYTES]: new Set([
|
||||
YAxisUnit.AWS_EXABYTES,
|
||||
YAxisUnit.UCUM_EXABYTES,
|
||||
YAxisUnit.OPEN_METRICS_EXABYTES,
|
||||
]),
|
||||
[UniversalYAxisUnit.ZETTABYTES]: new Set([
|
||||
YAxisUnit.AWS_ZETTABYTES,
|
||||
YAxisUnit.UCUM_ZETTABYTES,
|
||||
YAxisUnit.OPEN_METRICS_ZETTABYTES,
|
||||
]),
|
||||
[UniversalYAxisUnit.YOTTABYTES]: new Set([
|
||||
YAxisUnit.AWS_YOTTABYTES,
|
||||
YAxisUnit.UCUM_YOTTABYTES,
|
||||
YAxisUnit.OPEN_METRICS_YOTTABYTES,
|
||||
]),
|
||||
|
||||
// Data Rate
|
||||
[UniversalYAxisUnit.BYTES_SECOND]: new Set([
|
||||
YAxisUnit.AWS_BYTES_SECOND,
|
||||
YAxisUnit.UCUM_BYTES_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_BYTES_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.KILOBYTES_SECOND]: new Set([
|
||||
YAxisUnit.AWS_KILOBYTES_SECOND,
|
||||
YAxisUnit.UCUM_KILOBYTES_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_KILOBYTES_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.MEGABYTES_SECOND]: new Set([
|
||||
YAxisUnit.AWS_MEGABYTES_SECOND,
|
||||
YAxisUnit.UCUM_MEGABYTES_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_MEGABYTES_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.GIGABYTES_SECOND]: new Set([
|
||||
YAxisUnit.AWS_GIGABYTES_SECOND,
|
||||
YAxisUnit.UCUM_GIGABYTES_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_GIGABYTES_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.TERABYTES_SECOND]: new Set([
|
||||
YAxisUnit.AWS_TERABYTES_SECOND,
|
||||
YAxisUnit.UCUM_TERABYTES_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_TERABYTES_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.PETABYTES_SECOND]: new Set([
|
||||
YAxisUnit.AWS_PETABYTES_SECOND,
|
||||
YAxisUnit.UCUM_PETABYTES_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_PETABYTES_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.EXABYTES_SECOND]: new Set([
|
||||
YAxisUnit.AWS_EXABYTES_SECOND,
|
||||
YAxisUnit.UCUM_EXABYTES_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_EXABYTES_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.ZETTABYTES_SECOND]: new Set([
|
||||
YAxisUnit.AWS_ZETTABYTES_SECOND,
|
||||
YAxisUnit.UCUM_ZETTABYTES_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_ZETTABYTES_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.YOTTABYTES_SECOND]: new Set([
|
||||
YAxisUnit.AWS_YOTTABYTES_SECOND,
|
||||
YAxisUnit.UCUM_YOTTABYTES_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_YOTTABYTES_SECOND,
|
||||
]),
|
||||
|
||||
// Bits
|
||||
[UniversalYAxisUnit.BITS]: new Set([
|
||||
YAxisUnit.AWS_BITS,
|
||||
YAxisUnit.UCUM_BITS,
|
||||
YAxisUnit.OPEN_METRICS_BITS,
|
||||
]),
|
||||
[UniversalYAxisUnit.KILOBITS]: new Set([
|
||||
YAxisUnit.AWS_KILOBITS,
|
||||
YAxisUnit.UCUM_KILOBITS,
|
||||
YAxisUnit.OPEN_METRICS_KILOBITS,
|
||||
]),
|
||||
[UniversalYAxisUnit.MEGABITS]: new Set([
|
||||
YAxisUnit.AWS_MEGABITS,
|
||||
YAxisUnit.UCUM_MEGABITS,
|
||||
YAxisUnit.OPEN_METRICS_MEGABITS,
|
||||
]),
|
||||
[UniversalYAxisUnit.GIGABITS]: new Set([
|
||||
YAxisUnit.AWS_GIGABITS,
|
||||
YAxisUnit.UCUM_GIGABITS,
|
||||
YAxisUnit.OPEN_METRICS_GIGABITS,
|
||||
]),
|
||||
[UniversalYAxisUnit.TERABITS]: new Set([
|
||||
YAxisUnit.AWS_TERABITS,
|
||||
YAxisUnit.UCUM_TERABITS,
|
||||
YAxisUnit.OPEN_METRICS_TERABITS,
|
||||
]),
|
||||
[UniversalYAxisUnit.PETABITS]: new Set([
|
||||
YAxisUnit.AWS_PETABITS,
|
||||
YAxisUnit.UCUM_PETABITS,
|
||||
YAxisUnit.OPEN_METRICS_PETABITS,
|
||||
]),
|
||||
[UniversalYAxisUnit.EXABITS]: new Set([
|
||||
YAxisUnit.AWS_EXABITS,
|
||||
YAxisUnit.UCUM_EXABITS,
|
||||
YAxisUnit.OPEN_METRICS_EXABITS,
|
||||
]),
|
||||
[UniversalYAxisUnit.ZETTABITS]: new Set([
|
||||
YAxisUnit.AWS_ZETTABITS,
|
||||
YAxisUnit.UCUM_ZETTABITS,
|
||||
YAxisUnit.OPEN_METRICS_ZETTABITS,
|
||||
]),
|
||||
[UniversalYAxisUnit.YOTTABITS]: new Set([
|
||||
YAxisUnit.AWS_YOTTABITS,
|
||||
YAxisUnit.UCUM_YOTTABITS,
|
||||
YAxisUnit.OPEN_METRICS_YOTTABITS,
|
||||
]),
|
||||
|
||||
// Bit Rate
|
||||
[UniversalYAxisUnit.BITS_SECOND]: new Set([
|
||||
YAxisUnit.AWS_BITS_SECOND,
|
||||
YAxisUnit.UCUM_BITS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_BITS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.KILOBITS_SECOND]: new Set([
|
||||
YAxisUnit.AWS_KILOBITS_SECOND,
|
||||
YAxisUnit.UCUM_KILOBITS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_KILOBITS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.MEGABITS_SECOND]: new Set([
|
||||
YAxisUnit.AWS_MEGABITS_SECOND,
|
||||
YAxisUnit.UCUM_MEGABITS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_MEGABITS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.GIGABITS_SECOND]: new Set([
|
||||
YAxisUnit.AWS_GIGABITS_SECOND,
|
||||
YAxisUnit.UCUM_GIGABITS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_GIGABITS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.TERABITS_SECOND]: new Set([
|
||||
YAxisUnit.AWS_TERABITS_SECOND,
|
||||
YAxisUnit.UCUM_TERABITS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_TERABITS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.PETABITS_SECOND]: new Set([
|
||||
YAxisUnit.AWS_PETABITS_SECOND,
|
||||
YAxisUnit.UCUM_PETABITS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_PETABITS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.EXABITS_SECOND]: new Set([
|
||||
YAxisUnit.AWS_EXABITS_SECOND,
|
||||
YAxisUnit.UCUM_EXABITS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_EXABITS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.ZETTABITS_SECOND]: new Set([
|
||||
YAxisUnit.AWS_ZETTABITS_SECOND,
|
||||
YAxisUnit.UCUM_ZETTABITS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_ZETTABITS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.YOTTABITS_SECOND]: new Set([
|
||||
YAxisUnit.AWS_YOTTABITS_SECOND,
|
||||
YAxisUnit.UCUM_YOTTABITS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_YOTTABITS_SECOND,
|
||||
]),
|
||||
|
||||
// Count
|
||||
[UniversalYAxisUnit.COUNT]: new Set([
|
||||
YAxisUnit.AWS_COUNT,
|
||||
YAxisUnit.UCUM_COUNT,
|
||||
YAxisUnit.OPEN_METRICS_COUNT,
|
||||
]),
|
||||
[UniversalYAxisUnit.COUNT_SECOND]: new Set([
|
||||
YAxisUnit.AWS_COUNT_SECOND,
|
||||
YAxisUnit.UCUM_COUNT_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_COUNT_SECOND,
|
||||
]),
|
||||
|
||||
// Percent
|
||||
[UniversalYAxisUnit.PERCENT]: new Set([
|
||||
YAxisUnit.AWS_PERCENT,
|
||||
YAxisUnit.UCUM_PERCENT,
|
||||
YAxisUnit.OPEN_METRICS_PERCENT,
|
||||
]),
|
||||
[UniversalYAxisUnit.NONE]: new Set([
|
||||
YAxisUnit.AWS_NONE,
|
||||
YAxisUnit.UCUM_NONE,
|
||||
YAxisUnit.OPEN_METRICS_NONE,
|
||||
]),
|
||||
[UniversalYAxisUnit.PERCENT_UNIT]: new Set([
|
||||
YAxisUnit.OPEN_METRICS_PERCENT_UNIT,
|
||||
]),
|
||||
|
||||
// Count Rate
|
||||
[UniversalYAxisUnit.COUNT_MINUTE]: new Set([
|
||||
YAxisUnit.UCUM_COUNTS_MINUTE,
|
||||
YAxisUnit.OPEN_METRICS_COUNTS_MINUTE,
|
||||
]),
|
||||
[UniversalYAxisUnit.OPS_SECOND]: new Set([
|
||||
YAxisUnit.UCUM_OPS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_OPS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.OPS_MINUTE]: new Set([
|
||||
YAxisUnit.UCUM_OPS_MINUTE,
|
||||
YAxisUnit.OPEN_METRICS_OPS_MINUTE,
|
||||
]),
|
||||
[UniversalYAxisUnit.REQUESTS_SECOND]: new Set([
|
||||
YAxisUnit.UCUM_REQUESTS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_REQUESTS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.REQUESTS_MINUTE]: new Set([
|
||||
YAxisUnit.UCUM_REQUESTS_MINUTE,
|
||||
YAxisUnit.OPEN_METRICS_REQUESTS_MINUTE,
|
||||
]),
|
||||
[UniversalYAxisUnit.READS_SECOND]: new Set([
|
||||
YAxisUnit.UCUM_READS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_READS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.WRITES_SECOND]: new Set([
|
||||
YAxisUnit.UCUM_WRITES_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_WRITES_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.READS_MINUTE]: new Set([
|
||||
YAxisUnit.UCUM_READS_MINUTE,
|
||||
YAxisUnit.OPEN_METRICS_READS_MINUTE,
|
||||
]),
|
||||
[UniversalYAxisUnit.WRITES_MINUTE]: new Set([
|
||||
YAxisUnit.UCUM_WRITES_MINUTE,
|
||||
YAxisUnit.OPEN_METRICS_WRITES_MINUTE,
|
||||
]),
|
||||
[UniversalYAxisUnit.IOOPS_SECOND]: new Set([
|
||||
YAxisUnit.UCUM_IOPS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_IOPS_SECOND,
|
||||
]),
|
||||
};
|
||||
|
||||
// Mapping of universal y-axis units to their display labels
|
||||
export const Y_AXIS_UNIT_NAMES: Record<UniversalYAxisUnit, string> = {
|
||||
[UniversalYAxisUnit.SECONDS]: 'Seconds (s)',
|
||||
[UniversalYAxisUnit.MILLISECONDS]: 'Milliseconds (ms)',
|
||||
[UniversalYAxisUnit.MICROSECONDS]: 'Microseconds (µs)',
|
||||
[UniversalYAxisUnit.BYTES]: 'Bytes (B)',
|
||||
[UniversalYAxisUnit.KILOBYTES]: 'Kilobytes (KB)',
|
||||
[UniversalYAxisUnit.MEGABYTES]: 'Megabytes (MB)',
|
||||
[UniversalYAxisUnit.GIGABYTES]: 'Gigabytes (GB)',
|
||||
[UniversalYAxisUnit.TERABYTES]: 'Terabytes (TB)',
|
||||
[UniversalYAxisUnit.PETABYTES]: 'Petabytes (PB)',
|
||||
[UniversalYAxisUnit.EXABYTES]: 'Exabytes (EB)',
|
||||
[UniversalYAxisUnit.ZETTABYTES]: 'Zettabytes (ZB)',
|
||||
[UniversalYAxisUnit.YOTTABYTES]: 'Yottabytes (YB)',
|
||||
[UniversalYAxisUnit.BITS]: 'Bits (b)',
|
||||
[UniversalYAxisUnit.KILOBITS]: 'Kilobits (Kb)',
|
||||
[UniversalYAxisUnit.MEGABITS]: 'Megabits (Mb)',
|
||||
[UniversalYAxisUnit.GIGABITS]: 'Gigabits (Gb)',
|
||||
[UniversalYAxisUnit.TERABITS]: 'Terabits (Tb)',
|
||||
[UniversalYAxisUnit.PETABITS]: 'Petabits (Pb)',
|
||||
[UniversalYAxisUnit.EXABITS]: 'Exabits (Eb)',
|
||||
[UniversalYAxisUnit.ZETTABITS]: 'Zettabits (Zb)',
|
||||
[UniversalYAxisUnit.YOTTABITS]: 'Yottabits (Yb)',
|
||||
[UniversalYAxisUnit.BYTES_SECOND]: 'Bytes/sec',
|
||||
[UniversalYAxisUnit.KILOBYTES_SECOND]: 'Kilobytes/sec',
|
||||
[UniversalYAxisUnit.MEGABYTES_SECOND]: 'Megabytes/sec',
|
||||
[UniversalYAxisUnit.GIGABYTES_SECOND]: 'Gigabytes/sec',
|
||||
[UniversalYAxisUnit.TERABYTES_SECOND]: 'Terabytes/sec',
|
||||
[UniversalYAxisUnit.PETABYTES_SECOND]: 'Petabytes/sec',
|
||||
[UniversalYAxisUnit.EXABYTES_SECOND]: 'Exabytes/sec',
|
||||
[UniversalYAxisUnit.ZETTABYTES_SECOND]: 'Zettabytes/sec',
|
||||
[UniversalYAxisUnit.YOTTABYTES_SECOND]: 'Yottabytes/sec',
|
||||
[UniversalYAxisUnit.BITS_SECOND]: 'Bits/sec',
|
||||
[UniversalYAxisUnit.KILOBITS_SECOND]: 'Kilobits/sec',
|
||||
[UniversalYAxisUnit.MEGABITS_SECOND]: 'Megabits/sec',
|
||||
[UniversalYAxisUnit.GIGABITS_SECOND]: 'Gigabits/sec',
|
||||
[UniversalYAxisUnit.TERABITS_SECOND]: 'Terabits/sec',
|
||||
[UniversalYAxisUnit.PETABITS_SECOND]: 'Petabits/sec',
|
||||
[UniversalYAxisUnit.EXABITS_SECOND]: 'Exabits/sec',
|
||||
[UniversalYAxisUnit.ZETTABITS_SECOND]: 'Zettabits/sec',
|
||||
[UniversalYAxisUnit.YOTTABITS_SECOND]: 'Yottabits/sec',
|
||||
[UniversalYAxisUnit.COUNT]: 'Count',
|
||||
[UniversalYAxisUnit.COUNT_SECOND]: 'Count/sec',
|
||||
[UniversalYAxisUnit.PERCENT]: 'Percent (0 - 100)',
|
||||
[UniversalYAxisUnit.NONE]: 'None',
|
||||
[UniversalYAxisUnit.WEEKS]: 'Weeks',
|
||||
[UniversalYAxisUnit.DAYS]: 'Days',
|
||||
[UniversalYAxisUnit.HOURS]: 'Hours',
|
||||
[UniversalYAxisUnit.MINUTES]: 'Minutes',
|
||||
[UniversalYAxisUnit.NANOSECONDS]: 'Nanoseconds',
|
||||
[UniversalYAxisUnit.COUNT_MINUTE]: 'Count/min',
|
||||
[UniversalYAxisUnit.OPS_SECOND]: 'Ops/sec',
|
||||
[UniversalYAxisUnit.OPS_MINUTE]: 'Ops/min',
|
||||
[UniversalYAxisUnit.REQUESTS_SECOND]: 'Requests/sec',
|
||||
[UniversalYAxisUnit.REQUESTS_MINUTE]: 'Requests/min',
|
||||
[UniversalYAxisUnit.READS_SECOND]: 'Reads/sec',
|
||||
[UniversalYAxisUnit.WRITES_SECOND]: 'Writes/sec',
|
||||
[UniversalYAxisUnit.READS_MINUTE]: 'Reads/min',
|
||||
[UniversalYAxisUnit.WRITES_MINUTE]: 'Writes/min',
|
||||
[UniversalYAxisUnit.IOOPS_SECOND]: 'IOPS/sec',
|
||||
[UniversalYAxisUnit.PERCENT_UNIT]: 'Percent (0.0 - 1.0)',
|
||||
};
|
||||
|
||||
// Splitting the universal y-axis units into categories
|
||||
export const Y_AXIS_CATEGORIES = [
|
||||
{
|
||||
name: 'Time',
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.SECONDS],
|
||||
id: UniversalYAxisUnit.SECONDS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.MILLISECONDS],
|
||||
id: UniversalYAxisUnit.MILLISECONDS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.MICROSECONDS],
|
||||
id: UniversalYAxisUnit.MICROSECONDS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.NANOSECONDS],
|
||||
id: UniversalYAxisUnit.NANOSECONDS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.MINUTES],
|
||||
id: UniversalYAxisUnit.MINUTES,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.HOURS],
|
||||
id: UniversalYAxisUnit.HOURS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.DAYS],
|
||||
id: UniversalYAxisUnit.DAYS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.WEEKS],
|
||||
id: UniversalYAxisUnit.WEEKS,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Data',
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.BYTES],
|
||||
id: UniversalYAxisUnit.BYTES,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.KILOBYTES],
|
||||
id: UniversalYAxisUnit.KILOBYTES,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.MEGABYTES],
|
||||
id: UniversalYAxisUnit.MEGABYTES,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.GIGABYTES],
|
||||
id: UniversalYAxisUnit.GIGABYTES,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.TERABYTES],
|
||||
id: UniversalYAxisUnit.TERABYTES,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.PETABYTES],
|
||||
id: UniversalYAxisUnit.PETABYTES,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXABYTES],
|
||||
id: UniversalYAxisUnit.EXABYTES,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZETTABYTES],
|
||||
id: UniversalYAxisUnit.ZETTABYTES,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOTTABYTES],
|
||||
id: UniversalYAxisUnit.YOTTABYTES,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.BITS],
|
||||
id: UniversalYAxisUnit.BITS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.KILOBITS],
|
||||
id: UniversalYAxisUnit.KILOBITS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.MEGABITS],
|
||||
id: UniversalYAxisUnit.MEGABITS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.GIGABITS],
|
||||
id: UniversalYAxisUnit.GIGABITS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.TERABITS],
|
||||
id: UniversalYAxisUnit.TERABITS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.PETABITS],
|
||||
id: UniversalYAxisUnit.PETABITS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXABITS],
|
||||
id: UniversalYAxisUnit.EXABITS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZETTABITS],
|
||||
id: UniversalYAxisUnit.ZETTABITS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOTTABITS],
|
||||
id: UniversalYAxisUnit.YOTTABITS,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Data Rate',
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.BYTES_SECOND],
|
||||
id: UniversalYAxisUnit.BYTES_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.KILOBYTES_SECOND],
|
||||
id: UniversalYAxisUnit.KILOBYTES_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.MEGABYTES_SECOND],
|
||||
id: UniversalYAxisUnit.MEGABYTES_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.GIGABYTES_SECOND],
|
||||
id: UniversalYAxisUnit.GIGABYTES_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.TERABYTES_SECOND],
|
||||
id: UniversalYAxisUnit.TERABYTES_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.PETABYTES_SECOND],
|
||||
id: UniversalYAxisUnit.PETABYTES_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXABYTES_SECOND],
|
||||
id: UniversalYAxisUnit.EXABYTES_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZETTABYTES_SECOND],
|
||||
id: UniversalYAxisUnit.ZETTABYTES_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOTTABYTES_SECOND],
|
||||
id: UniversalYAxisUnit.YOTTABYTES_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.BITS_SECOND],
|
||||
id: UniversalYAxisUnit.BITS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.KILOBITS_SECOND],
|
||||
id: UniversalYAxisUnit.KILOBITS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.MEGABITS_SECOND],
|
||||
id: UniversalYAxisUnit.MEGABITS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.GIGABITS_SECOND],
|
||||
id: UniversalYAxisUnit.GIGABITS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.TERABITS_SECOND],
|
||||
id: UniversalYAxisUnit.TERABITS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.PETABITS_SECOND],
|
||||
id: UniversalYAxisUnit.PETABITS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXABITS_SECOND],
|
||||
id: UniversalYAxisUnit.EXABITS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZETTABITS_SECOND],
|
||||
id: UniversalYAxisUnit.ZETTABITS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOTTABITS_SECOND],
|
||||
id: UniversalYAxisUnit.YOTTABITS_SECOND,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Count',
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.COUNT],
|
||||
id: UniversalYAxisUnit.COUNT,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.COUNT_SECOND],
|
||||
id: UniversalYAxisUnit.COUNT_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.COUNT_MINUTE],
|
||||
id: UniversalYAxisUnit.COUNT_MINUTE,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Operations',
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.OPS_SECOND],
|
||||
id: UniversalYAxisUnit.OPS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.OPS_MINUTE],
|
||||
id: UniversalYAxisUnit.OPS_MINUTE,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.REQUESTS_SECOND],
|
||||
id: UniversalYAxisUnit.REQUESTS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.REQUESTS_MINUTE],
|
||||
id: UniversalYAxisUnit.REQUESTS_MINUTE,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.READS_SECOND],
|
||||
id: UniversalYAxisUnit.READS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.WRITES_SECOND],
|
||||
id: UniversalYAxisUnit.WRITES_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.READS_MINUTE],
|
||||
id: UniversalYAxisUnit.READS_MINUTE,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.WRITES_MINUTE],
|
||||
id: UniversalYAxisUnit.WRITES_MINUTE,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.IOOPS_SECOND],
|
||||
id: UniversalYAxisUnit.IOOPS_SECOND,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Percentage',
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.PERCENT],
|
||||
id: UniversalYAxisUnit.PERCENT,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.PERCENT_UNIT],
|
||||
id: UniversalYAxisUnit.PERCENT_UNIT,
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
3
frontend/src/components/YAxisUnitSelector/index.ts
Normal file
3
frontend/src/components/YAxisUnitSelector/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
import YAxisUnitSelector from './YAxisUnitSelector';
|
||||
|
||||
export default YAxisUnitSelector;
|
||||
5
frontend/src/components/YAxisUnitSelector/styles.scss
Normal file
5
frontend/src/components/YAxisUnitSelector/styles.scss
Normal file
@@ -0,0 +1,5 @@
|
||||
.y-axis-unit-selector-component {
|
||||
.ant-select {
|
||||
width: 220px;
|
||||
}
|
||||
}
|
||||
365
frontend/src/components/YAxisUnitSelector/types.ts
Normal file
365
frontend/src/components/YAxisUnitSelector/types.ts
Normal file
@@ -0,0 +1,365 @@
|
||||
export interface YAxisUnitSelectorProps {
|
||||
value: string | undefined;
|
||||
onChange: (value: UniversalYAxisUnit) => void;
|
||||
placeholder?: string;
|
||||
loading?: boolean;
|
||||
disabled?: boolean;
|
||||
}
|
||||
|
||||
export enum UniversalYAxisUnit {
|
||||
// Time
|
||||
WEEKS = 'wk',
|
||||
DAYS = 'd',
|
||||
HOURS = 'h',
|
||||
MINUTES = 'min',
|
||||
SECONDS = 's',
|
||||
MICROSECONDS = 'us',
|
||||
MILLISECONDS = 'ms',
|
||||
NANOSECONDS = 'ns',
|
||||
|
||||
// Data
|
||||
BYTES = 'By',
|
||||
KILOBYTES = 'kBy',
|
||||
MEGABYTES = 'MBy',
|
||||
GIGABYTES = 'GBy',
|
||||
TERABYTES = 'TBy',
|
||||
PETABYTES = 'PBy',
|
||||
EXABYTES = 'EBy',
|
||||
ZETTABYTES = 'ZBy',
|
||||
YOTTABYTES = 'YBy',
|
||||
|
||||
// Data Rate
|
||||
BYTES_SECOND = 'By/s',
|
||||
KILOBYTES_SECOND = 'kBy/s',
|
||||
MEGABYTES_SECOND = 'MBy/s',
|
||||
GIGABYTES_SECOND = 'GBy/s',
|
||||
TERABYTES_SECOND = 'TBy/s',
|
||||
PETABYTES_SECOND = 'PBy/s',
|
||||
EXABYTES_SECOND = 'EBy/s',
|
||||
ZETTABYTES_SECOND = 'ZBy/s',
|
||||
YOTTABYTES_SECOND = 'YBy/s',
|
||||
|
||||
// Bits
|
||||
BITS = 'bit',
|
||||
KILOBITS = 'kbit',
|
||||
MEGABITS = 'Mbit',
|
||||
GIGABITS = 'Gbit',
|
||||
TERABITS = 'Tbit',
|
||||
PETABITS = 'Pbit',
|
||||
EXABITS = 'Ebit',
|
||||
ZETTABITS = 'Zbit',
|
||||
YOTTABITS = 'Ybit',
|
||||
|
||||
// Bit Rate
|
||||
BITS_SECOND = 'bit/s',
|
||||
KILOBITS_SECOND = 'kbit/s',
|
||||
MEGABITS_SECOND = 'Mbit/s',
|
||||
GIGABITS_SECOND = 'Gbit/s',
|
||||
TERABITS_SECOND = 'Tbit/s',
|
||||
PETABITS_SECOND = 'Pbit/s',
|
||||
EXABITS_SECOND = 'Ebit/s',
|
||||
ZETTABITS_SECOND = 'Zbit/s',
|
||||
YOTTABITS_SECOND = 'Ybit/s',
|
||||
|
||||
// Count
|
||||
COUNT = '{count}',
|
||||
COUNT_SECOND = '{count}/s',
|
||||
COUNT_MINUTE = '{count}/min',
|
||||
|
||||
// Operations
|
||||
OPS_SECOND = '{ops}/s',
|
||||
OPS_MINUTE = '{ops}/min',
|
||||
|
||||
// Requests
|
||||
REQUESTS_SECOND = '{req}/s',
|
||||
REQUESTS_MINUTE = '{req}/min',
|
||||
|
||||
// Reads/Writes
|
||||
READS_SECOND = '{read}/s',
|
||||
WRITES_SECOND = '{write}/s',
|
||||
READS_MINUTE = '{read}/min',
|
||||
WRITES_MINUTE = '{write}/min',
|
||||
|
||||
// IO Operations
|
||||
IOOPS_SECOND = '{iops}/s',
|
||||
|
||||
// Percent
|
||||
PERCENT = '%',
|
||||
PERCENT_UNIT = 'percentunit',
|
||||
NONE = '1',
|
||||
}
|
||||
|
||||
export enum YAxisUnit {
|
||||
AWS_SECONDS = 'Seconds',
|
||||
UCUM_SECONDS = 's',
|
||||
OPEN_METRICS_SECONDS = 'seconds',
|
||||
|
||||
AWS_MICROSECONDS = 'Microseconds',
|
||||
UCUM_MICROSECONDS = 'us',
|
||||
OPEN_METRICS_MICROSECONDS = 'microseconds',
|
||||
|
||||
AWS_MILLISECONDS = 'Milliseconds',
|
||||
UCUM_MILLISECONDS = 'ms',
|
||||
OPEN_METRICS_MILLISECONDS = 'milliseconds',
|
||||
|
||||
AWS_BYTES = 'Bytes',
|
||||
UCUM_BYTES = 'By',
|
||||
OPEN_METRICS_BYTES = 'bytes',
|
||||
|
||||
AWS_KILOBYTES = 'Kilobytes',
|
||||
UCUM_KILOBYTES = 'kBy',
|
||||
OPEN_METRICS_KILOBYTES = 'kilobytes',
|
||||
|
||||
AWS_MEGABYTES = 'Megabytes',
|
||||
UCUM_MEGABYTES = 'MBy',
|
||||
OPEN_METRICS_MEGABYTES = 'megabytes',
|
||||
|
||||
AWS_GIGABYTES = 'Gigabytes',
|
||||
UCUM_GIGABYTES = 'GBy',
|
||||
OPEN_METRICS_GIGABYTES = 'gigabytes',
|
||||
|
||||
AWS_TERABYTES = 'Terabytes',
|
||||
UCUM_TERABYTES = 'TBy',
|
||||
OPEN_METRICS_TERABYTES = 'terabytes',
|
||||
|
||||
AWS_PETABYTES = 'Petabytes',
|
||||
UCUM_PETABYTES = 'PBy',
|
||||
OPEN_METRICS_PETABYTES = 'petabytes',
|
||||
|
||||
AWS_EXABYTES = 'Exabytes',
|
||||
UCUM_EXABYTES = 'EBy',
|
||||
OPEN_METRICS_EXABYTES = 'exabytes',
|
||||
|
||||
AWS_ZETTABYTES = 'Zettabytes',
|
||||
UCUM_ZETTABYTES = 'ZBy',
|
||||
OPEN_METRICS_ZETTABYTES = 'zettabytes',
|
||||
|
||||
AWS_YOTTABYTES = 'Yottabytes',
|
||||
UCUM_YOTTABYTES = 'YBy',
|
||||
OPEN_METRICS_YOTTABYTES = 'yottabytes',
|
||||
|
||||
AWS_BYTES_SECOND = 'Bytes/Second',
|
||||
UCUM_BYTES_SECOND = 'By/s',
|
||||
OPEN_METRICS_BYTES_SECOND = 'bytes_per_second',
|
||||
|
||||
AWS_KILOBYTES_SECOND = 'Kilobytes/Second',
|
||||
UCUM_KILOBYTES_SECOND = 'kBy/s',
|
||||
OPEN_METRICS_KILOBYTES_SECOND = 'kilobytes_per_second',
|
||||
|
||||
AWS_MEGABYTES_SECOND = 'Megabytes/Second',
|
||||
UCUM_MEGABYTES_SECOND = 'MBy/s',
|
||||
OPEN_METRICS_MEGABYTES_SECOND = 'megabytes_per_second',
|
||||
|
||||
AWS_GIGABYTES_SECOND = 'Gigabytes/Second',
|
||||
UCUM_GIGABYTES_SECOND = 'GBy/s',
|
||||
OPEN_METRICS_GIGABYTES_SECOND = 'gigabytes_per_second',
|
||||
|
||||
AWS_TERABYTES_SECOND = 'Terabytes/Second',
|
||||
UCUM_TERABYTES_SECOND = 'TBy/s',
|
||||
OPEN_METRICS_TERABYTES_SECOND = 'terabytes_per_second',
|
||||
|
||||
AWS_PETABYTES_SECOND = 'Petabytes/Second',
|
||||
UCUM_PETABYTES_SECOND = 'PBy/s',
|
||||
OPEN_METRICS_PETABYTES_SECOND = 'petabytes_per_second',
|
||||
|
||||
AWS_EXABYTES_SECOND = 'Exabytes/Second',
|
||||
UCUM_EXABYTES_SECOND = 'EBy/s',
|
||||
OPEN_METRICS_EXABYTES_SECOND = 'exabytes_per_second',
|
||||
|
||||
AWS_ZETTABYTES_SECOND = 'Zettabytes/Second',
|
||||
UCUM_ZETTABYTES_SECOND = 'ZBy/s',
|
||||
OPEN_METRICS_ZETTABYTES_SECOND = 'zettabytes_per_second',
|
||||
|
||||
AWS_YOTTABYTES_SECOND = 'Yottabytes/Second',
|
||||
UCUM_YOTTABYTES_SECOND = 'YBy/s',
|
||||
OPEN_METRICS_YOTTABYTES_SECOND = 'yottabytes_per_second',
|
||||
|
||||
AWS_BITS = 'Bits',
|
||||
UCUM_BITS = 'bit',
|
||||
OPEN_METRICS_BITS = 'bits',
|
||||
|
||||
AWS_KILOBITS = 'Kilobits',
|
||||
UCUM_KILOBITS = 'kbit',
|
||||
OPEN_METRICS_KILOBITS = 'kilobits',
|
||||
|
||||
AWS_MEGABITS = 'Megabits',
|
||||
UCUM_MEGABITS = 'Mbit',
|
||||
OPEN_METRICS_MEGABITS = 'megabits',
|
||||
|
||||
AWS_GIGABITS = 'Gigabits',
|
||||
UCUM_GIGABITS = 'Gbit',
|
||||
OPEN_METRICS_GIGABITS = 'gigabits',
|
||||
|
||||
AWS_TERABITS = 'Terabits',
|
||||
UCUM_TERABITS = 'Tbit',
|
||||
OPEN_METRICS_TERABITS = 'terabits',
|
||||
|
||||
AWS_PETABITS = 'Petabits',
|
||||
UCUM_PETABITS = 'Pbit',
|
||||
OPEN_METRICS_PETABITS = 'petabits',
|
||||
|
||||
AWS_EXABITS = 'Exabits',
|
||||
UCUM_EXABITS = 'Ebit',
|
||||
OPEN_METRICS_EXABITS = 'exabits',
|
||||
|
||||
AWS_ZETTABITS = 'Zettabits',
|
||||
UCUM_ZETTABITS = 'Zbit',
|
||||
OPEN_METRICS_ZETTABITS = 'zettabits',
|
||||
|
||||
AWS_YOTTABITS = 'Yottabits',
|
||||
UCUM_YOTTABITS = 'Ybit',
|
||||
OPEN_METRICS_YOTTABITS = 'yottabits',
|
||||
|
||||
AWS_BITS_SECOND = 'Bits/Second',
|
||||
UCUM_BITS_SECOND = 'bit/s',
|
||||
OPEN_METRICS_BITS_SECOND = 'bits_per_second',
|
||||
|
||||
AWS_KILOBITS_SECOND = 'Kilobits/Second',
|
||||
UCUM_KILOBITS_SECOND = 'kbit/s',
|
||||
OPEN_METRICS_KILOBITS_SECOND = 'kilobits_per_second',
|
||||
|
||||
AWS_MEGABITS_SECOND = 'Megabits/Second',
|
||||
UCUM_MEGABITS_SECOND = 'Mbit/s',
|
||||
OPEN_METRICS_MEGABITS_SECOND = 'megabits_per_second',
|
||||
|
||||
AWS_GIGABITS_SECOND = 'Gigabits/Second',
|
||||
UCUM_GIGABITS_SECOND = 'Gbit/s',
|
||||
OPEN_METRICS_GIGABITS_SECOND = 'gigabits_per_second',
|
||||
|
||||
AWS_TERABITS_SECOND = 'Terabits/Second',
|
||||
UCUM_TERABITS_SECOND = 'Tbit/s',
|
||||
OPEN_METRICS_TERABITS_SECOND = 'terabits_per_second',
|
||||
|
||||
AWS_PETABITS_SECOND = 'Petabits/Second',
|
||||
UCUM_PETABITS_SECOND = 'Pbit/s',
|
||||
OPEN_METRICS_PETABITS_SECOND = 'petabits_per_second',
|
||||
|
||||
AWS_EXABITS_SECOND = 'Exabits/Second',
|
||||
UCUM_EXABITS_SECOND = 'Ebit/s',
|
||||
OPEN_METRICS_EXABITS_SECOND = 'exabits_per_second',
|
||||
|
||||
AWS_ZETTABITS_SECOND = 'Zettabits/Second',
|
||||
UCUM_ZETTABITS_SECOND = 'Zbit/s',
|
||||
OPEN_METRICS_ZETTABITS_SECOND = 'zettabits_per_second',
|
||||
|
||||
AWS_YOTTABITS_SECOND = 'Yottabits/Second',
|
||||
UCUM_YOTTABITS_SECOND = 'Ybit/s',
|
||||
OPEN_METRICS_YOTTABITS_SECOND = 'yottabits_per_second',
|
||||
|
||||
AWS_COUNT = 'Count',
|
||||
UCUM_COUNT = '{count}',
|
||||
OPEN_METRICS_COUNT = 'count',
|
||||
|
||||
AWS_COUNT_SECOND = 'Count/Second',
|
||||
UCUM_COUNT_SECOND = '{count}/s',
|
||||
OPEN_METRICS_COUNT_SECOND = 'count_per_second',
|
||||
|
||||
AWS_PERCENT = 'Percent',
|
||||
UCUM_PERCENT = '%',
|
||||
OPEN_METRICS_PERCENT = 'ratio',
|
||||
|
||||
AWS_NONE = 'None',
|
||||
UCUM_NONE = '1',
|
||||
OPEN_METRICS_NONE = 'none',
|
||||
|
||||
UCUM_NANOSECONDS = 'ns',
|
||||
OPEN_METRICS_NANOSECONDS = 'nanoseconds',
|
||||
|
||||
UCUM_MINUTES = 'min',
|
||||
OPEN_METRICS_MINUTES = 'minutes',
|
||||
|
||||
UCUM_HOURS = 'h',
|
||||
OPEN_METRICS_HOURS = 'hours',
|
||||
|
||||
UCUM_DAYS = 'd',
|
||||
OPEN_METRICS_DAYS = 'days',
|
||||
|
||||
UCUM_WEEKS = 'wk',
|
||||
OPEN_METRICS_WEEKS = 'weeks',
|
||||
|
||||
UCUM_KIBIBYTES = 'KiBy',
|
||||
OPEN_METRICS_KIBIBYTES = 'kibibytes',
|
||||
|
||||
UCUM_MEBIBYTES = 'MiBy',
|
||||
OPEN_METRICS_MEBIBYTES = 'mebibytes',
|
||||
|
||||
UCUM_GIBIBYTES = 'GiBy',
|
||||
OPEN_METRICS_GIBIBYTES = 'gibibytes',
|
||||
|
||||
UCUM_TEBIBYTES = 'TiBy',
|
||||
OPEN_METRICS_TEBIBYTES = 'tebibytes',
|
||||
|
||||
UCUM_PEBIBYTES = 'PiBy',
|
||||
OPEN_METRICS_PEBIBYTES = 'pebibytes',
|
||||
|
||||
UCUM_KIBIBYTES_SECOND = 'KiBy/s',
|
||||
OPEN_METRICS_KIBIBYTES_SECOND = 'kibibytes_per_second',
|
||||
|
||||
UCUM_KIBIBITS_SECOND = 'Kibit/s',
|
||||
OPEN_METRICS_KIBIBITS_SECOND = 'kibibits_per_second',
|
||||
|
||||
UCUM_MEBIBYTES_SECOND = 'MiBy/s',
|
||||
OPEN_METRICS_MEBIBYTES_SECOND = 'mebibytes_per_second',
|
||||
|
||||
UCUM_MEBIBITS_SECOND = 'Mibit/s',
|
||||
OPEN_METRICS_MEBIBITS_SECOND = 'mebibits_per_second',
|
||||
|
||||
UCUM_GIBIBYTES_SECOND = 'GiBy/s',
|
||||
OPEN_METRICS_GIBIBYTES_SECOND = 'gibibytes_per_second',
|
||||
|
||||
UCUM_GIBIBITS_SECOND = 'Gibit/s',
|
||||
OPEN_METRICS_GIBIBITS_SECOND = 'gibibits_per_second',
|
||||
|
||||
UCUM_TEBIBYTES_SECOND = 'TiBy/s',
|
||||
OPEN_METRICS_TEBIBYTES_SECOND = 'tebibytes_per_second',
|
||||
|
||||
UCUM_TEBIBITS_SECOND = 'Tibit/s',
|
||||
OPEN_METRICS_TEBIBITS_SECOND = 'tebibits_per_second',
|
||||
|
||||
UCUM_PEBIBYTES_SECOND = 'PiBy/s',
|
||||
OPEN_METRICS_PEBIBYTES_SECOND = 'pebibytes_per_second',
|
||||
|
||||
UCUM_PEBIBITS_SECOND = 'Pibit/s',
|
||||
OPEN_METRICS_PEBIBITS_SECOND = 'pebibits_per_second',
|
||||
|
||||
UCUM_TRUE_FALSE = '{bool}',
|
||||
OPEN_METRICS_TRUE_FALSE = 'boolean_true_false',
|
||||
|
||||
UCUM_YES_NO = '{bool}',
|
||||
OPEN_METRICS_YES_NO = 'boolean_yes_no',
|
||||
|
||||
UCUM_COUNTS_SECOND = '{count}/s',
|
||||
OPEN_METRICS_COUNTS_SECOND = 'counts_per_second',
|
||||
|
||||
UCUM_OPS_SECOND = '{ops}/s',
|
||||
OPEN_METRICS_OPS_SECOND = 'ops_per_second',
|
||||
|
||||
UCUM_REQUESTS_SECOND = '{requests}/s',
|
||||
OPEN_METRICS_REQUESTS_SECOND = 'requests_per_second',
|
||||
|
||||
UCUM_REQUESTS_MINUTE = '{requests}/min',
|
||||
OPEN_METRICS_REQUESTS_MINUTE = 'requests_per_minute',
|
||||
|
||||
UCUM_READS_SECOND = '{reads}/s',
|
||||
OPEN_METRICS_READS_SECOND = 'reads_per_second',
|
||||
|
||||
UCUM_WRITES_SECOND = '{writes}/s',
|
||||
OPEN_METRICS_WRITES_SECOND = 'writes_per_second',
|
||||
|
||||
UCUM_IOPS_SECOND = '{iops}/s',
|
||||
OPEN_METRICS_IOPS_SECOND = 'io_ops_per_second',
|
||||
|
||||
UCUM_COUNTS_MINUTE = '{count}/min',
|
||||
OPEN_METRICS_COUNTS_MINUTE = 'counts_per_minute',
|
||||
|
||||
UCUM_OPS_MINUTE = '{ops}/min',
|
||||
OPEN_METRICS_OPS_MINUTE = 'ops_per_minute',
|
||||
|
||||
UCUM_READS_MINUTE = '{reads}/min',
|
||||
OPEN_METRICS_READS_MINUTE = 'reads_per_minute',
|
||||
|
||||
UCUM_WRITES_MINUTE = '{writes}/min',
|
||||
OPEN_METRICS_WRITES_MINUTE = 'writes_per_minute',
|
||||
|
||||
OPEN_METRICS_PERCENT_UNIT = 'percentunit',
|
||||
}
|
||||
33
frontend/src/components/YAxisUnitSelector/utils.tsx
Normal file
33
frontend/src/components/YAxisUnitSelector/utils.tsx
Normal file
@@ -0,0 +1,33 @@
|
||||
import { UniversalYAxisUnitMappings, Y_AXIS_UNIT_NAMES } from './constants';
|
||||
import { UniversalYAxisUnit, YAxisUnit } from './types';
|
||||
|
||||
export const mapMetricUnitToUniversalUnit = (
|
||||
unit: string | undefined,
|
||||
): UniversalYAxisUnit | null => {
|
||||
if (!unit) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const universalUnit = Object.values(UniversalYAxisUnit).find(
|
||||
(u) => UniversalYAxisUnitMappings[u].has(unit as YAxisUnit) || unit === u,
|
||||
);
|
||||
|
||||
return universalUnit || (unit as UniversalYAxisUnit) || null;
|
||||
};
|
||||
|
||||
export const getUniversalNameFromMetricUnit = (
|
||||
unit: string | undefined,
|
||||
): string => {
|
||||
if (!unit) {
|
||||
return '-';
|
||||
}
|
||||
|
||||
const universalUnit = mapMetricUnitToUniversalUnit(unit);
|
||||
if (!universalUnit) {
|
||||
return unit;
|
||||
}
|
||||
|
||||
const universalName = Y_AXIS_UNIT_NAMES[universalUnit];
|
||||
|
||||
return universalName || unit || '-';
|
||||
};
|
||||
@@ -1,3 +1,5 @@
|
||||
/* eslint-disable @typescript-eslint/naming-convention */
|
||||
|
||||
export const OPERATORS = {
|
||||
IN: 'IN',
|
||||
LIKE: 'LIKE',
|
||||
@@ -21,6 +23,44 @@ export const QUERY_BUILDER_FUNCTIONS = {
|
||||
HASALL: 'hasAll',
|
||||
};
|
||||
|
||||
export function negateOperator(operatorOrFunction: string): string {
|
||||
// Special cases for equals/not equals
|
||||
if (operatorOrFunction === OPERATORS['=']) {
|
||||
return OPERATORS['!='];
|
||||
}
|
||||
if (operatorOrFunction === OPERATORS['!=']) {
|
||||
return OPERATORS['='];
|
||||
}
|
||||
// For all other operators and functions, add NOT in front
|
||||
return `${OPERATORS.NOT} ${operatorOrFunction}`;
|
||||
}
|
||||
|
||||
export enum DEPRECATED_OPERATORS {
|
||||
REGEX = 'regex',
|
||||
NIN = 'nin',
|
||||
NREGEX = 'nregex',
|
||||
NLIKE = 'nlike',
|
||||
NILIKE = 'nilike',
|
||||
NEXTISTS = 'nexists',
|
||||
NCONTAINS = 'ncontains',
|
||||
NHAS = 'nhas',
|
||||
NHASANY = 'nhasany',
|
||||
NHASALL = 'nhasall',
|
||||
}
|
||||
|
||||
export const DEPRECATED_OPERATORS_MAP = {
|
||||
[DEPRECATED_OPERATORS.REGEX]: OPERATORS.REGEXP,
|
||||
[DEPRECATED_OPERATORS.NIN]: negateOperator(OPERATORS.IN),
|
||||
[DEPRECATED_OPERATORS.NREGEX]: negateOperator(OPERATORS.REGEXP),
|
||||
[DEPRECATED_OPERATORS.NLIKE]: negateOperator(OPERATORS.LIKE),
|
||||
[DEPRECATED_OPERATORS.NILIKE]: negateOperator(OPERATORS.ILIKE),
|
||||
[DEPRECATED_OPERATORS.NEXTISTS]: negateOperator(OPERATORS.EXISTS),
|
||||
[DEPRECATED_OPERATORS.NCONTAINS]: negateOperator(OPERATORS.CONTAINS),
|
||||
[DEPRECATED_OPERATORS.NHAS]: negateOperator(QUERY_BUILDER_FUNCTIONS.HAS),
|
||||
[DEPRECATED_OPERATORS.NHASANY]: negateOperator(QUERY_BUILDER_FUNCTIONS.HASANY),
|
||||
[DEPRECATED_OPERATORS.NHASALL]: negateOperator(QUERY_BUILDER_FUNCTIONS.HASALL),
|
||||
};
|
||||
|
||||
export const NON_VALUE_OPERATORS = [OPERATORS.EXISTS];
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
@@ -82,15 +122,3 @@ export const queryOperatorSuggestions = [
|
||||
{ label: OPERATORS.NOT, type: 'operator', info: 'Not' },
|
||||
...negationQueryOperatorSuggestions,
|
||||
];
|
||||
|
||||
export function negateOperator(operatorOrFunction: string): string {
|
||||
// Special cases for equals/not equals
|
||||
if (operatorOrFunction === OPERATORS['=']) {
|
||||
return OPERATORS['!='];
|
||||
}
|
||||
if (operatorOrFunction === OPERATORS['!=']) {
|
||||
return OPERATORS['='];
|
||||
}
|
||||
// For all other operators and functions, add NOT in front
|
||||
return `${OPERATORS.NOT} ${operatorOrFunction}`;
|
||||
}
|
||||
|
||||
@@ -77,9 +77,9 @@ const ROUTES = {
|
||||
API_MONITORING: '/api-monitoring/explorer',
|
||||
METRICS_EXPLORER_BASE: '/metrics-explorer',
|
||||
WORKSPACE_ACCESS_RESTRICTED: '/workspace-access-restricted',
|
||||
METER_EXPLORER_BASE: '/meter-explorer',
|
||||
METER_EXPLORER: '/meter-explorer',
|
||||
METER_EXPLORER_VIEWS: '/meter-explorer/views',
|
||||
METER: '/meter',
|
||||
METER_EXPLORER: '/meter/explorer',
|
||||
METER_EXPLORER_VIEWS: '/meter/explorer/views',
|
||||
HOME_PAGE: '/',
|
||||
} as const;
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ export const flattenLabels = (labels: Labels): ILabelRecord[] => {
|
||||
if (!hiddenLabels.includes(key)) {
|
||||
recs.push({
|
||||
key,
|
||||
value: labels[key],
|
||||
value: labels[key] || '',
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
@@ -520,12 +520,6 @@ function ClusterDetails({
|
||||
>
|
||||
Cluster Name
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="entity-details-metadata-label"
|
||||
>
|
||||
Cluster Name
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<div className="values-row">
|
||||
<Typography.Text className="entity-details-metadata-value">
|
||||
@@ -533,9 +527,6 @@ function ClusterDetails({
|
||||
{cluster.meta.k8s_cluster_name}
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
<Typography.Text className="entity-details-metadata-value">
|
||||
<Tooltip title="Cluster name">{cluster.meta.k8s_cluster_name}</Tooltip>
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,351 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { K8sCategory } from 'container/InfraMonitoringK8s/constants';
|
||||
import { Time } from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import * as useQueryBuilderHooks from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import * as appContextHooks from 'providers/App/App';
|
||||
import { LicenseEvent } from 'types/api/licensesV3/getActive';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import EntityEvents from '../EntityEvents';
|
||||
|
||||
jest.mock('container/TopNav/DateTimeSelectionV2', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => (
|
||||
<div data-testid="date-time-selection">Date Time</div>
|
||||
),
|
||||
}));
|
||||
|
||||
const mockUseQuery = jest.fn();
|
||||
jest.mock('react-query', () => ({
|
||||
useQuery: (queryKey: any, queryFn: any, options: any): any =>
|
||||
mockUseQuery(queryKey, queryFn, options),
|
||||
}));
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: (): { pathname: string } => ({
|
||||
pathname: `${process.env.FRONTEND_API_ENDPOINT}/${ROUTES.INFRASTRUCTURE_MONITORING_KUBERNETES}/`,
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.spyOn(appContextHooks, 'useAppContext').mockReturnValue({
|
||||
user: {
|
||||
role: 'admin',
|
||||
},
|
||||
activeLicenseV3: {
|
||||
event_queue: {
|
||||
created_at: '0',
|
||||
event: LicenseEvent.NO_EVENT,
|
||||
scheduled_at: '0',
|
||||
status: '',
|
||||
updated_at: '0',
|
||||
},
|
||||
license: {
|
||||
license_key: 'test-license-key',
|
||||
license_type: 'trial',
|
||||
org_id: 'test-org-id',
|
||||
plan_id: 'test-plan-id',
|
||||
plan_name: 'test-plan-name',
|
||||
plan_type: 'trial',
|
||||
plan_version: 'test-plan-version',
|
||||
},
|
||||
},
|
||||
} as any);
|
||||
|
||||
const mockUseQueryBuilderData = {
|
||||
handleRunQuery: jest.fn(),
|
||||
stagedQuery: initialQueriesMap[DataSource.METRICS],
|
||||
updateAllQueriesOperators: jest.fn(),
|
||||
currentQuery: initialQueriesMap[DataSource.METRICS],
|
||||
resetQuery: jest.fn(),
|
||||
redirectWithQueryBuilderData: jest.fn(),
|
||||
isStagedQueryUpdated: jest.fn(),
|
||||
handleSetQueryData: jest.fn(),
|
||||
handleSetFormulaData: jest.fn(),
|
||||
handleSetQueryItemData: jest.fn(),
|
||||
handleSetConfig: jest.fn(),
|
||||
removeQueryBuilderEntityByIndex: jest.fn(),
|
||||
removeQueryTypeItemByIndex: jest.fn(),
|
||||
isDefaultQuery: jest.fn(),
|
||||
};
|
||||
|
||||
jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue({
|
||||
...mockUseQueryBuilderData,
|
||||
} as any);
|
||||
|
||||
const timeRange = {
|
||||
startTime: 1718236800,
|
||||
endTime: 1718236800,
|
||||
};
|
||||
|
||||
const mockHandleChangeEventFilters = jest.fn();
|
||||
|
||||
const mockFilters: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'pod-name',
|
||||
key: {
|
||||
id: 'pod-name',
|
||||
dataType: DataTypes.String,
|
||||
isColumn: true,
|
||||
key: 'pod-name',
|
||||
type: 'tag',
|
||||
isJSON: false,
|
||||
isIndexed: false,
|
||||
},
|
||||
op: '=',
|
||||
value: 'pod-1',
|
||||
},
|
||||
],
|
||||
op: 'and',
|
||||
};
|
||||
|
||||
const isModalTimeSelection = false;
|
||||
const mockHandleTimeChange = jest.fn();
|
||||
const selectedInterval: Time = '1m';
|
||||
const category = K8sCategory.PODS;
|
||||
const queryKey = 'pod-events';
|
||||
|
||||
const mockEventsData = {
|
||||
payload: {
|
||||
data: {
|
||||
newResult: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
list: [
|
||||
{
|
||||
timestamp: '2024-01-15T10:00:00Z',
|
||||
data: {
|
||||
id: 'event-1',
|
||||
severity_text: 'INFO',
|
||||
body: 'Test event 1',
|
||||
resources_string: { 'pod.name': 'test-pod-1' },
|
||||
attributes_string: { service: 'test-service' },
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamp: '2024-01-15T10:01:00Z',
|
||||
data: {
|
||||
id: 'event-2',
|
||||
severity_text: 'WARN',
|
||||
body: 'Test event 2',
|
||||
resources_string: { 'pod.name': 'test-pod-2' },
|
||||
attributes_string: { service: 'test-service' },
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const mockEmptyEventsData = {
|
||||
payload: {
|
||||
data: {
|
||||
newResult: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
list: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const createMockEvent = (
|
||||
id: string,
|
||||
severity: string,
|
||||
body: string,
|
||||
podName: string,
|
||||
): any => ({
|
||||
timestamp: `2024-01-15T10:${id.padStart(2, '0')}:00Z`,
|
||||
data: {
|
||||
id: `event-${id}`,
|
||||
severity_text: severity,
|
||||
body,
|
||||
resources_string: { 'pod.name': podName },
|
||||
attributes_string: { service: 'test-service' },
|
||||
},
|
||||
});
|
||||
|
||||
const createMockMoreEventsData = (): any => ({
|
||||
payload: {
|
||||
data: {
|
||||
newResult: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
list: Array.from({ length: 11 }, (_, i) =>
|
||||
createMockEvent(
|
||||
String(i + 1),
|
||||
['INFO', 'WARN', 'ERROR', 'DEBUG'][i % 4],
|
||||
`Test event ${i + 1}`,
|
||||
`test-pod-${i + 1}`,
|
||||
),
|
||||
),
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const renderEntityEvents = (overrides = {}): any => {
|
||||
const defaultProps = {
|
||||
timeRange,
|
||||
handleChangeEventFilters: mockHandleChangeEventFilters,
|
||||
filters: mockFilters,
|
||||
isModalTimeSelection,
|
||||
handleTimeChange: mockHandleTimeChange,
|
||||
selectedInterval,
|
||||
category,
|
||||
queryKey,
|
||||
...overrides,
|
||||
};
|
||||
|
||||
return render(
|
||||
<EntityEvents
|
||||
timeRange={defaultProps.timeRange}
|
||||
handleChangeEventFilters={defaultProps.handleChangeEventFilters}
|
||||
filters={defaultProps.filters}
|
||||
isModalTimeSelection={defaultProps.isModalTimeSelection}
|
||||
handleTimeChange={defaultProps.handleTimeChange}
|
||||
selectedInterval={defaultProps.selectedInterval}
|
||||
category={defaultProps.category}
|
||||
queryKey={defaultProps.queryKey}
|
||||
/>,
|
||||
);
|
||||
};
|
||||
|
||||
describe('EntityEvents', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockUseQuery.mockReturnValue({
|
||||
data: mockEventsData,
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
isFetching: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should render events list with data', () => {
|
||||
renderEntityEvents();
|
||||
expect(screen.getByText('Prev')).toBeInTheDocument();
|
||||
expect(screen.getByText('Next')).toBeInTheDocument();
|
||||
expect(screen.getByText('Test event 1')).toBeInTheDocument();
|
||||
expect(screen.getByText('Test event 2')).toBeInTheDocument();
|
||||
expect(screen.getByText('INFO')).toBeInTheDocument();
|
||||
expect(screen.getByText('WARN')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders empty state when no events are found', () => {
|
||||
mockUseQuery.mockReturnValue({
|
||||
data: mockEmptyEventsData,
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
isFetching: false,
|
||||
});
|
||||
|
||||
renderEntityEvents();
|
||||
expect(screen.getByText(/No events found for this pods/)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders loader when fetching events', () => {
|
||||
mockUseQuery.mockReturnValue({
|
||||
data: undefined,
|
||||
isLoading: true,
|
||||
isError: false,
|
||||
isFetching: true,
|
||||
});
|
||||
|
||||
renderEntityEvents();
|
||||
expect(screen.getByTestId('loader')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows pagination controls when events are present', () => {
|
||||
renderEntityEvents();
|
||||
expect(screen.getByText('Prev')).toBeInTheDocument();
|
||||
expect(screen.getByText('Next')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('disables Prev button on first page', () => {
|
||||
renderEntityEvents();
|
||||
const prevButton = screen.getByText('Prev').closest('button');
|
||||
expect(prevButton).toBeDisabled();
|
||||
});
|
||||
|
||||
it('enables Next button when more events are available', () => {
|
||||
mockUseQuery.mockReturnValue({
|
||||
data: createMockMoreEventsData(),
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
isFetching: false,
|
||||
});
|
||||
|
||||
renderEntityEvents();
|
||||
const nextButton = screen.getByText('Next').closest('button');
|
||||
expect(nextButton).not.toBeDisabled();
|
||||
});
|
||||
|
||||
it('navigates to next page when Next button is clicked', () => {
|
||||
mockUseQuery.mockReturnValue({
|
||||
data: createMockMoreEventsData(),
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
isFetching: false,
|
||||
});
|
||||
|
||||
renderEntityEvents();
|
||||
|
||||
const nextButton = screen.getByText('Next').closest('button');
|
||||
expect(nextButton).not.toBeNull();
|
||||
fireEvent.click(nextButton as Element);
|
||||
|
||||
const { calls } = mockUseQuery.mock;
|
||||
const hasPage2Call = calls.some((call) => {
|
||||
const { queryKey: callQueryKey } = call[0] || {};
|
||||
return Array.isArray(callQueryKey) && callQueryKey.includes(2);
|
||||
});
|
||||
expect(hasPage2Call).toBe(true);
|
||||
});
|
||||
|
||||
it('navigates to previous page when Prev button is clicked', () => {
|
||||
mockUseQuery.mockReturnValue({
|
||||
data: createMockMoreEventsData(),
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
isFetching: false,
|
||||
});
|
||||
|
||||
renderEntityEvents();
|
||||
|
||||
const nextButton = screen.getByText('Next').closest('button');
|
||||
expect(nextButton).not.toBeNull();
|
||||
fireEvent.click(nextButton as Element);
|
||||
|
||||
const prevButton = screen.getByText('Prev').closest('button');
|
||||
expect(prevButton).not.toBeNull();
|
||||
fireEvent.click(prevButton as Element);
|
||||
|
||||
const { calls } = mockUseQuery.mock;
|
||||
const hasPage1Call = calls.some((call) => {
|
||||
const { queryKey: callQueryKey } = call[0] || {};
|
||||
return Array.isArray(callQueryKey) && callQueryKey.includes(1);
|
||||
});
|
||||
expect(hasPage1Call).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,374 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { K8sCategory } from 'container/InfraMonitoringK8s/constants';
|
||||
import { Time } from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import * as appContextHooks from 'providers/App/App';
|
||||
import { LicenseEvent } from 'types/api/licensesV3/getActive';
|
||||
|
||||
import EntityMetrics from '../EntityMetrics';
|
||||
|
||||
jest.mock('lib/uPlotLib/getUplotChartOptions', () => ({
|
||||
getUPlotChartOptions: jest.fn().mockReturnValue({}),
|
||||
}));
|
||||
|
||||
jest.mock('lib/uPlotLib/utils/getUplotChartData', () => ({
|
||||
getUPlotChartData: jest.fn().mockReturnValue([]),
|
||||
}));
|
||||
|
||||
jest.mock('container/TopNav/DateTimeSelectionV2', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => (
|
||||
<div data-testid="date-time-selection">Date Time</div>
|
||||
),
|
||||
}));
|
||||
|
||||
jest.mock('components/Uplot', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => <div data-testid="uplot-chart">Uplot Chart</div>,
|
||||
}));
|
||||
|
||||
jest.mock('container/InfraMonitoringK8s/commonUtils', () => ({
|
||||
__esModule: true,
|
||||
getMetricsTableData: jest.fn().mockReturnValue([
|
||||
{
|
||||
rows: [
|
||||
{ data: { timestamp: '2024-01-15T10:00:00Z', value: '42.5' } },
|
||||
{ data: { timestamp: '2024-01-15T10:01:00Z', value: '43.2' } },
|
||||
],
|
||||
columns: [
|
||||
{ key: 'timestamp', label: 'Timestamp', isValueColumn: false },
|
||||
{ key: 'value', label: 'Value', isValueColumn: true },
|
||||
],
|
||||
},
|
||||
]),
|
||||
MetricsTable: jest
|
||||
.fn()
|
||||
.mockImplementation(
|
||||
(): JSX.Element => <div data-testid="metrics-table">Metrics Table</div>,
|
||||
),
|
||||
}));
|
||||
|
||||
const mockUseQueries = jest.fn();
|
||||
jest.mock('react-query', () => ({
|
||||
useQueries: (queryConfigs: any[]): any[] => mockUseQueries(queryConfigs),
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useDarkMode', () => ({
|
||||
useIsDarkMode: (): boolean => false,
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useDimensions', () => ({
|
||||
useResizeObserver: (): { width: number; height: number } => ({
|
||||
width: 800,
|
||||
height: 600,
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useMultiIntersectionObserver', () => ({
|
||||
useMultiIntersectionObserver: (count: number): any => ({
|
||||
visibilities: new Array(count).fill(true),
|
||||
setElement: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.spyOn(appContextHooks, 'useAppContext').mockReturnValue({
|
||||
user: {
|
||||
role: 'admin',
|
||||
},
|
||||
activeLicenseV3: {
|
||||
event_queue: {
|
||||
created_at: '0',
|
||||
event: LicenseEvent.NO_EVENT,
|
||||
scheduled_at: '0',
|
||||
status: '',
|
||||
updated_at: '0',
|
||||
},
|
||||
license: {
|
||||
license_key: 'test-license-key',
|
||||
license_type: 'trial',
|
||||
org_id: 'test-org-id',
|
||||
plan_id: 'test-plan-id',
|
||||
plan_name: 'test-plan-name',
|
||||
plan_type: 'trial',
|
||||
plan_version: 'test-plan-version',
|
||||
},
|
||||
},
|
||||
featureFlags: [
|
||||
{
|
||||
name: 'DOT_METRICS_ENABLED',
|
||||
active: false,
|
||||
},
|
||||
],
|
||||
} as any);
|
||||
|
||||
const mockEntity = {
|
||||
id: 'test-entity-1',
|
||||
name: 'test-entity',
|
||||
type: 'pod',
|
||||
};
|
||||
|
||||
const mockEntityWidgetInfo = [
|
||||
{
|
||||
title: 'CPU Usage',
|
||||
yAxisUnit: 'percentage',
|
||||
},
|
||||
{
|
||||
title: 'Memory Usage',
|
||||
yAxisUnit: 'bytes',
|
||||
},
|
||||
];
|
||||
|
||||
const mockGetEntityQueryPayload = jest.fn().mockReturnValue([
|
||||
{
|
||||
query: 'cpu_usage',
|
||||
start: 1705315200,
|
||||
end: 1705318800,
|
||||
},
|
||||
{
|
||||
query: 'memory_usage',
|
||||
start: 1705315200,
|
||||
end: 1705318800,
|
||||
},
|
||||
]);
|
||||
|
||||
const mockTimeRange = {
|
||||
startTime: 1705315200,
|
||||
endTime: 1705318800,
|
||||
};
|
||||
|
||||
const mockHandleTimeChange = jest.fn();
|
||||
|
||||
const mockQueries = [
|
||||
{
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [
|
||||
{ data: { timestamp: '2024-01-15T10:00:00Z', value: '42.5' } },
|
||||
{ data: { timestamp: '2024-01-15T10:01:00Z', value: '43.2' } },
|
||||
],
|
||||
columns: [
|
||||
{ key: 'timestamp', label: 'Timestamp', isValueColumn: false },
|
||||
{ key: 'value', label: 'Value', isValueColumn: true },
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
params: {
|
||||
compositeQuery: {
|
||||
panelType: 'time_series',
|
||||
},
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
error: null,
|
||||
},
|
||||
{
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [
|
||||
{ data: { timestamp: '2024-01-15T10:00:00Z', value: '1024' } },
|
||||
{ data: { timestamp: '2024-01-15T10:01:00Z', value: '1028' } },
|
||||
],
|
||||
columns: [
|
||||
{ key: 'timestamp', label: 'Timestamp', isValueColumn: false },
|
||||
{ key: 'value', label: 'Value', isValueColumn: true },
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
params: {
|
||||
compositeQuery: {
|
||||
panelType: 'table',
|
||||
},
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
error: null,
|
||||
},
|
||||
];
|
||||
|
||||
const mockLoadingQueries = [
|
||||
{
|
||||
data: undefined,
|
||||
isLoading: true,
|
||||
isError: false,
|
||||
error: null,
|
||||
},
|
||||
{
|
||||
data: undefined,
|
||||
isLoading: true,
|
||||
isError: false,
|
||||
error: null,
|
||||
},
|
||||
];
|
||||
|
||||
const mockErrorQueries = [
|
||||
{
|
||||
data: undefined,
|
||||
isLoading: false,
|
||||
isError: true,
|
||||
error: new Error('API Error'),
|
||||
},
|
||||
{
|
||||
data: undefined,
|
||||
isLoading: false,
|
||||
isError: true,
|
||||
error: new Error('Network Error'),
|
||||
},
|
||||
];
|
||||
|
||||
const mockEmptyQueries = [
|
||||
{
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [],
|
||||
},
|
||||
},
|
||||
params: {
|
||||
compositeQuery: {
|
||||
panelType: 'time_series',
|
||||
},
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
error: null,
|
||||
},
|
||||
{
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [],
|
||||
},
|
||||
},
|
||||
params: {
|
||||
compositeQuery: {
|
||||
panelType: 'table',
|
||||
},
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
error: null,
|
||||
},
|
||||
];
|
||||
|
||||
const renderEntityMetrics = (overrides = {}): any => {
|
||||
const defaultProps = {
|
||||
timeRange: mockTimeRange,
|
||||
isModalTimeSelection: false,
|
||||
handleTimeChange: mockHandleTimeChange,
|
||||
selectedInterval: '5m' as Time,
|
||||
entity: mockEntity,
|
||||
entityWidgetInfo: mockEntityWidgetInfo,
|
||||
getEntityQueryPayload: mockGetEntityQueryPayload,
|
||||
queryKey: 'test-query-key',
|
||||
category: K8sCategory.PODS,
|
||||
...overrides,
|
||||
};
|
||||
|
||||
return render(
|
||||
<EntityMetrics
|
||||
timeRange={defaultProps.timeRange}
|
||||
isModalTimeSelection={defaultProps.isModalTimeSelection}
|
||||
handleTimeChange={defaultProps.handleTimeChange}
|
||||
selectedInterval={defaultProps.selectedInterval}
|
||||
entity={defaultProps.entity}
|
||||
entityWidgetInfo={defaultProps.entityWidgetInfo}
|
||||
getEntityQueryPayload={defaultProps.getEntityQueryPayload}
|
||||
queryKey={defaultProps.queryKey}
|
||||
category={defaultProps.category}
|
||||
/>,
|
||||
);
|
||||
};
|
||||
|
||||
describe('EntityMetrics', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockUseQueries.mockReturnValue(mockQueries);
|
||||
});
|
||||
|
||||
it('should render metrics with data', () => {
|
||||
renderEntityMetrics();
|
||||
expect(screen.getByText('CPU Usage')).toBeInTheDocument();
|
||||
expect(screen.getByText('Memory Usage')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('date-time-selection')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('uplot-chart')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('metrics-table')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders loading state when fetching metrics', () => {
|
||||
mockUseQueries.mockReturnValue(mockLoadingQueries);
|
||||
renderEntityMetrics();
|
||||
expect(screen.getAllByText('CPU Usage')).toHaveLength(1);
|
||||
expect(screen.getAllByText('Memory Usage')).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('renders error state when query fails', () => {
|
||||
mockUseQueries.mockReturnValue(mockErrorQueries);
|
||||
renderEntityMetrics();
|
||||
expect(screen.getByText('API Error')).toBeInTheDocument();
|
||||
expect(screen.getByText('Network Error')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders empty state when no metrics data', () => {
|
||||
mockUseQueries.mockReturnValue(mockEmptyQueries);
|
||||
renderEntityMetrics();
|
||||
expect(screen.getByTestId('uplot-chart')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('metrics-table')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('calls handleTimeChange when datetime selection changes', () => {
|
||||
renderEntityMetrics();
|
||||
expect(screen.getByTestId('date-time-selection')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders multiple metric widgets', () => {
|
||||
renderEntityMetrics();
|
||||
expect(screen.getByText('CPU Usage')).toBeInTheDocument();
|
||||
expect(screen.getByText('Memory Usage')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('handles different panel types correctly', () => {
|
||||
renderEntityMetrics();
|
||||
expect(screen.getByTestId('uplot-chart')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('metrics-table')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('applies intersection observer for visibility', () => {
|
||||
renderEntityMetrics();
|
||||
expect(mockUseQueries).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
enabled: true,
|
||||
}),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('generates correct query payloads', () => {
|
||||
renderEntityMetrics();
|
||||
expect(mockGetEntityQueryPayload).toHaveBeenCalledWith(
|
||||
mockEntity,
|
||||
mockTimeRange.startTime,
|
||||
mockTimeRange.endTime,
|
||||
false,
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,288 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import { K8sCategory } from 'container/InfraMonitoringK8s/constants';
|
||||
import { Time } from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import * as useQueryBuilderHooks from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import * as appContextHooks from 'providers/App/App';
|
||||
import { LicenseEvent } from 'types/api/licensesV3/getActive';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import EntityTraces from '../EntityTraces';
|
||||
|
||||
jest.mock('container/TopNav/DateTimeSelectionV2', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => (
|
||||
<div data-testid="date-time-selection">Date Time</div>
|
||||
),
|
||||
}));
|
||||
|
||||
const mockUseQuery = jest.fn();
|
||||
jest.mock('react-query', () => ({
|
||||
useQuery: (queryKey: any, queryFn: any, options: any): any =>
|
||||
mockUseQuery(queryKey, queryFn, options),
|
||||
}));
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: (): { pathname: string } => ({
|
||||
pathname: '/test-path',
|
||||
}),
|
||||
useNavigate: (): jest.Mock => jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useSafeNavigate', () => ({
|
||||
useSafeNavigate: (): { safeNavigate: jest.Mock } => ({
|
||||
safeNavigate: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.spyOn(appContextHooks, 'useAppContext').mockReturnValue({
|
||||
user: {
|
||||
role: 'admin',
|
||||
},
|
||||
activeLicenseV3: {
|
||||
event_queue: {
|
||||
created_at: '0',
|
||||
event: LicenseEvent.NO_EVENT,
|
||||
scheduled_at: '0',
|
||||
status: '',
|
||||
updated_at: '0',
|
||||
},
|
||||
license: {
|
||||
license_key: 'test-license-key',
|
||||
license_type: 'trial',
|
||||
org_id: 'test-org-id',
|
||||
plan_id: 'test-plan-id',
|
||||
plan_name: 'test-plan-name',
|
||||
plan_type: 'trial',
|
||||
plan_version: 'test-plan-version',
|
||||
},
|
||||
},
|
||||
} as any);
|
||||
|
||||
const mockUseQueryBuilderData = {
|
||||
handleRunQuery: jest.fn(),
|
||||
stagedQuery: initialQueriesMap[DataSource.METRICS],
|
||||
updateAllQueriesOperators: jest.fn(),
|
||||
currentQuery: initialQueriesMap[DataSource.METRICS],
|
||||
resetQuery: jest.fn(),
|
||||
redirectWithQueryBuilderData: jest.fn(),
|
||||
isStagedQueryUpdated: jest.fn(),
|
||||
handleSetQueryData: jest.fn(),
|
||||
handleSetFormulaData: jest.fn(),
|
||||
handleSetQueryItemData: jest.fn(),
|
||||
handleSetConfig: jest.fn(),
|
||||
removeQueryBuilderEntityByIndex: jest.fn(),
|
||||
removeQueryTypeItemByIndex: jest.fn(),
|
||||
isDefaultQuery: jest.fn(),
|
||||
};
|
||||
|
||||
jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue({
|
||||
...mockUseQueryBuilderData,
|
||||
} as any);
|
||||
|
||||
const timeRange = {
|
||||
startTime: 1718236800,
|
||||
endTime: 1718236800,
|
||||
};
|
||||
|
||||
const mockHandleChangeTracesFilters = jest.fn();
|
||||
|
||||
const mockTracesFilters: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'service-name',
|
||||
key: {
|
||||
id: 'service-name',
|
||||
dataType: DataTypes.String,
|
||||
isColumn: true,
|
||||
key: 'service.name',
|
||||
type: 'tag',
|
||||
isJSON: false,
|
||||
isIndexed: false,
|
||||
},
|
||||
op: '=',
|
||||
value: 'test-service',
|
||||
},
|
||||
],
|
||||
op: 'and',
|
||||
};
|
||||
|
||||
const isModalTimeSelection = false;
|
||||
const mockHandleTimeChange = jest.fn();
|
||||
const selectedInterval: Time = '5m';
|
||||
const category = K8sCategory.PODS;
|
||||
const queryKey = 'pod-traces';
|
||||
const queryKeyFilters = ['service.name'];
|
||||
|
||||
const mockTracesData = {
|
||||
payload: {
|
||||
data: {
|
||||
newResult: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
list: [
|
||||
{
|
||||
timestamp: '2024-01-15T10:00:00Z',
|
||||
data: {
|
||||
trace_id: 'trace-1',
|
||||
span_id: 'span-1',
|
||||
service_name: 'test-service-1',
|
||||
operation_name: 'test-operation-1',
|
||||
duration: 100,
|
||||
status_code: 200,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamp: '2024-01-15T10:01:00Z',
|
||||
data: {
|
||||
trace_id: 'trace-2',
|
||||
span_id: 'span-2',
|
||||
service_name: 'test-service-2',
|
||||
operation_name: 'test-operation-2',
|
||||
duration: 150,
|
||||
status_code: 500,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const mockEmptyTracesData = {
|
||||
payload: {
|
||||
data: {
|
||||
newResult: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
list: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const renderEntityTraces = (overrides = {}): any => {
|
||||
const defaultProps = {
|
||||
timeRange,
|
||||
isModalTimeSelection,
|
||||
handleTimeChange: mockHandleTimeChange,
|
||||
handleChangeTracesFilters: mockHandleChangeTracesFilters,
|
||||
tracesFilters: mockTracesFilters,
|
||||
selectedInterval,
|
||||
queryKey,
|
||||
category,
|
||||
queryKeyFilters,
|
||||
...overrides,
|
||||
};
|
||||
|
||||
return render(
|
||||
<EntityTraces
|
||||
timeRange={defaultProps.timeRange}
|
||||
isModalTimeSelection={defaultProps.isModalTimeSelection}
|
||||
handleTimeChange={defaultProps.handleTimeChange}
|
||||
handleChangeTracesFilters={defaultProps.handleChangeTracesFilters}
|
||||
tracesFilters={defaultProps.tracesFilters}
|
||||
selectedInterval={defaultProps.selectedInterval}
|
||||
queryKey={defaultProps.queryKey}
|
||||
category={defaultProps.category}
|
||||
queryKeyFilters={defaultProps.queryKeyFilters}
|
||||
/>,
|
||||
);
|
||||
};
|
||||
|
||||
describe('EntityTraces', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockUseQuery.mockReturnValue({
|
||||
data: mockTracesData,
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
isFetching: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should render traces list with data', () => {
|
||||
renderEntityTraces();
|
||||
expect(screen.getByText('Previous')).toBeInTheDocument();
|
||||
expect(screen.getByText('Next')).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByText(/Search Filter : select options from suggested values/),
|
||||
).toBeInTheDocument();
|
||||
expect(screen.getByTestId('date-time-selection')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders empty state when no traces are found', () => {
|
||||
mockUseQuery.mockReturnValue({
|
||||
data: mockEmptyTracesData,
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
isFetching: false,
|
||||
});
|
||||
|
||||
renderEntityTraces();
|
||||
expect(screen.getByText(/No traces yet./)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders loader when fetching traces', () => {
|
||||
mockUseQuery.mockReturnValue({
|
||||
data: undefined,
|
||||
isLoading: true,
|
||||
isError: false,
|
||||
isFetching: true,
|
||||
});
|
||||
|
||||
renderEntityTraces();
|
||||
expect(screen.getByText('pending_data_placeholder')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows error state when query fails', () => {
|
||||
mockUseQuery.mockReturnValue({
|
||||
data: { error: 'API Error' },
|
||||
isLoading: false,
|
||||
isError: true,
|
||||
isFetching: false,
|
||||
});
|
||||
|
||||
renderEntityTraces();
|
||||
expect(screen.getByText('API Error')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('calls handleChangeTracesFilters when query builder search changes', () => {
|
||||
renderEntityTraces();
|
||||
expect(
|
||||
screen.getByText(/Search Filter : select options from suggested values/),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('calls handleTimeChange when datetime selection changes', () => {
|
||||
renderEntityTraces();
|
||||
expect(screen.getByTestId('date-time-selection')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows pagination controls when traces are present', () => {
|
||||
renderEntityTraces();
|
||||
expect(screen.getByText('Previous')).toBeInTheDocument();
|
||||
expect(screen.getByText('Next')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('disables pagination buttons when no more data', () => {
|
||||
renderEntityTraces();
|
||||
const prevButton = screen.getByText('Previous').closest('button');
|
||||
const nextButton = screen.getByText('Next').closest('button');
|
||||
expect(prevButton).toBeDisabled();
|
||||
expect(nextButton).toBeDisabled();
|
||||
});
|
||||
});
|
||||
@@ -4,7 +4,7 @@ import { Skeleton } from 'antd';
|
||||
|
||||
function LoadingContainer(): JSX.Element {
|
||||
return (
|
||||
<div className="k8s-list-loading-state">
|
||||
<div className="k8s-list-loading-state" data-testid="loader">
|
||||
<Skeleton.Input
|
||||
className="k8s-list-loading-state-item"
|
||||
size="large"
|
||||
|
||||
@@ -0,0 +1,131 @@
|
||||
/* eslint-disable import/first */
|
||||
// eslint-disable-next-line import/order
|
||||
import setupCommonMocks from '../../commonMocks';
|
||||
|
||||
setupCommonMocks();
|
||||
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import ClusterDetails from 'container/InfraMonitoringK8s/Clusters/ClusterDetails/ClusterDetails';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
describe('ClusterDetails', () => {
|
||||
const mockCluster = {
|
||||
meta: {
|
||||
k8s_cluster_name: 'test-cluster',
|
||||
},
|
||||
} as any;
|
||||
const mockOnClose = jest.fn();
|
||||
|
||||
it('should render modal with relevant metadata', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<ClusterDetails
|
||||
cluster={mockCluster}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const clusterNameElements = screen.getAllByText('test-cluster');
|
||||
expect(clusterNameElements.length).toBeGreaterThan(0);
|
||||
expect(clusterNameElements[0]).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render modal with 4 tabs', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<ClusterDetails
|
||||
cluster={mockCluster}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByText('Metrics');
|
||||
expect(metricsTab).toBeInTheDocument();
|
||||
|
||||
const eventsTab = screen.getByText('Events');
|
||||
expect(eventsTab).toBeInTheDocument();
|
||||
|
||||
const logsTab = screen.getByText('Logs');
|
||||
expect(logsTab).toBeInTheDocument();
|
||||
|
||||
const tracesTab = screen.getByText('Traces');
|
||||
expect(tracesTab).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('default tab should be metrics', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<ClusterDetails
|
||||
cluster={mockCluster}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByRole('radio', { name: 'Metrics' });
|
||||
expect(metricsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should switch to events tab when events tab is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<ClusterDetails
|
||||
cluster={mockCluster}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const eventsTab = screen.getByRole('radio', { name: 'Events' });
|
||||
expect(eventsTab).not.toBeChecked();
|
||||
fireEvent.click(eventsTab);
|
||||
expect(eventsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should close modal when close button is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<ClusterDetails
|
||||
cluster={mockCluster}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const closeButton = screen.getByRole('button', { name: 'Close' });
|
||||
fireEvent.click(closeButton);
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,141 @@
|
||||
/* eslint-disable import/first */
|
||||
// eslint-disable-next-line import/order
|
||||
import setupCommonMocks from '../../commonMocks';
|
||||
|
||||
setupCommonMocks();
|
||||
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import DaemonSetDetails from 'container/InfraMonitoringK8s/DaemonSets/DaemonSetDetails/DaemonSetDetails';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
describe('DaemonSetDetails', () => {
|
||||
const mockDaemonSet = {
|
||||
meta: {
|
||||
k8s_daemonset_name: 'test-daemon-set',
|
||||
k8s_cluster_name: 'test-cluster',
|
||||
k8s_namespace_name: 'test-namespace',
|
||||
},
|
||||
} as any;
|
||||
const mockOnClose = jest.fn();
|
||||
|
||||
it('should render modal with relevant metadata', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<DaemonSetDetails
|
||||
daemonSet={mockDaemonSet}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const daemonSetNameElements = screen.getAllByText('test-daemon-set');
|
||||
expect(daemonSetNameElements.length).toBeGreaterThan(0);
|
||||
expect(daemonSetNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const clusterNameElements = screen.getAllByText('test-cluster');
|
||||
expect(clusterNameElements.length).toBeGreaterThan(0);
|
||||
expect(clusterNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const namespaceNameElements = screen.getAllByText('test-namespace');
|
||||
expect(namespaceNameElements.length).toBeGreaterThan(0);
|
||||
expect(namespaceNameElements[0]).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render modal with 4 tabs', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<DaemonSetDetails
|
||||
daemonSet={mockDaemonSet}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByText('Metrics');
|
||||
expect(metricsTab).toBeInTheDocument();
|
||||
|
||||
const eventsTab = screen.getByText('Events');
|
||||
expect(eventsTab).toBeInTheDocument();
|
||||
|
||||
const logsTab = screen.getByText('Logs');
|
||||
expect(logsTab).toBeInTheDocument();
|
||||
|
||||
const tracesTab = screen.getByText('Traces');
|
||||
expect(tracesTab).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('default tab should be metrics', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<DaemonSetDetails
|
||||
daemonSet={mockDaemonSet}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByRole('radio', { name: 'Metrics' });
|
||||
expect(metricsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should switch to events tab when events tab is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<DaemonSetDetails
|
||||
daemonSet={mockDaemonSet}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const eventsTab = screen.getByRole('radio', { name: 'Events' });
|
||||
expect(eventsTab).not.toBeChecked();
|
||||
fireEvent.click(eventsTab);
|
||||
expect(eventsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should close modal when close button is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<DaemonSetDetails
|
||||
daemonSet={mockDaemonSet}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const closeButton = screen.getByRole('button', { name: 'Close' });
|
||||
fireEvent.click(closeButton);
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,141 @@
|
||||
/* eslint-disable import/first */
|
||||
// eslint-disable-next-line import/order
|
||||
import setupCommonMocks from '../../commonMocks';
|
||||
|
||||
setupCommonMocks();
|
||||
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import DeploymentDetails from 'container/InfraMonitoringK8s/Deployments/DeploymentDetails/DeploymentDetails';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
describe('DeploymentDetails', () => {
|
||||
const mockDeployment = {
|
||||
meta: {
|
||||
k8s_deployment_name: 'test-deployment',
|
||||
k8s_cluster_name: 'test-cluster',
|
||||
k8s_namespace_name: 'test-namespace',
|
||||
},
|
||||
} as any;
|
||||
const mockOnClose = jest.fn();
|
||||
|
||||
it('should render modal with relevant metadata', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<DeploymentDetails
|
||||
deployment={mockDeployment}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const deploymentNameElements = screen.getAllByText('test-deployment');
|
||||
expect(deploymentNameElements.length).toBeGreaterThan(0);
|
||||
expect(deploymentNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const clusterNameElements = screen.getAllByText('test-cluster');
|
||||
expect(clusterNameElements.length).toBeGreaterThan(0);
|
||||
expect(clusterNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const namespaceNameElements = screen.getAllByText('test-namespace');
|
||||
expect(namespaceNameElements.length).toBeGreaterThan(0);
|
||||
expect(namespaceNameElements[0]).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render modal with 4 tabs', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<DeploymentDetails
|
||||
deployment={mockDeployment}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByText('Metrics');
|
||||
expect(metricsTab).toBeInTheDocument();
|
||||
|
||||
const eventsTab = screen.getByText('Events');
|
||||
expect(eventsTab).toBeInTheDocument();
|
||||
|
||||
const logsTab = screen.getByText('Logs');
|
||||
expect(logsTab).toBeInTheDocument();
|
||||
|
||||
const tracesTab = screen.getByText('Traces');
|
||||
expect(tracesTab).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('default tab should be metrics', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<DeploymentDetails
|
||||
deployment={mockDeployment}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByRole('radio', { name: 'Metrics' });
|
||||
expect(metricsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should switch to events tab when events tab is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<DeploymentDetails
|
||||
deployment={mockDeployment}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const eventsTab = screen.getByRole('radio', { name: 'Events' });
|
||||
expect(eventsTab).not.toBeChecked();
|
||||
fireEvent.click(eventsTab);
|
||||
expect(eventsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should close modal when close button is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<DeploymentDetails
|
||||
deployment={mockDeployment}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const closeButton = screen.getByRole('button', { name: 'Close' });
|
||||
fireEvent.click(closeButton);
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,116 @@
|
||||
/* eslint-disable import/first */
|
||||
// eslint-disable-next-line import/order
|
||||
import setupCommonMocks from '../../commonMocks';
|
||||
|
||||
setupCommonMocks();
|
||||
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import JobDetails from 'container/InfraMonitoringK8s/Jobs/JobDetails/JobDetails';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
describe('JobDetails', () => {
|
||||
const mockJob = {
|
||||
meta: {
|
||||
k8s_job_name: 'test-job',
|
||||
k8s_namespace_name: 'test-namespace',
|
||||
},
|
||||
} as any;
|
||||
const mockOnClose = jest.fn();
|
||||
|
||||
it('should render modal with relevant metadata', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<JobDetails job={mockJob} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const jobNameElements = screen.getAllByText('test-job');
|
||||
expect(jobNameElements.length).toBeGreaterThan(0);
|
||||
expect(jobNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const namespaceNameElements = screen.getAllByText('test-namespace');
|
||||
expect(namespaceNameElements.length).toBeGreaterThan(0);
|
||||
expect(namespaceNameElements[0]).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render modal with 4 tabs', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<JobDetails job={mockJob} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByText('Metrics');
|
||||
expect(metricsTab).toBeInTheDocument();
|
||||
|
||||
const eventsTab = screen.getByText('Events');
|
||||
expect(eventsTab).toBeInTheDocument();
|
||||
|
||||
const logsTab = screen.getByText('Logs');
|
||||
expect(logsTab).toBeInTheDocument();
|
||||
|
||||
const tracesTab = screen.getByText('Traces');
|
||||
expect(tracesTab).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('default tab should be metrics', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<JobDetails job={mockJob} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByRole('radio', { name: 'Metrics' });
|
||||
expect(metricsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should switch to events tab when events tab is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<JobDetails job={mockJob} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const eventsTab = screen.getByRole('radio', { name: 'Events' });
|
||||
expect(eventsTab).not.toBeChecked();
|
||||
fireEvent.click(eventsTab);
|
||||
expect(eventsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should close modal when close button is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<JobDetails job={mockJob} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const closeButton = screen.getByRole('button', { name: 'Close' });
|
||||
fireEvent.click(closeButton);
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,136 @@
|
||||
/* eslint-disable import/first */
|
||||
// eslint-disable-next-line import/order
|
||||
import setupCommonMocks from '../../commonMocks';
|
||||
|
||||
setupCommonMocks();
|
||||
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import NamespaceDetails from 'container/InfraMonitoringK8s/Namespaces/NamespaceDetails/NamespaceDetails';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
describe('NamespaceDetails', () => {
|
||||
const mockNamespace = {
|
||||
namespaceName: 'test-namespace',
|
||||
meta: {
|
||||
k8s_cluster_name: 'test-cluster',
|
||||
},
|
||||
} as any;
|
||||
const mockOnClose = jest.fn();
|
||||
|
||||
it('should render modal with relevant metadata', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<NamespaceDetails
|
||||
namespace={mockNamespace}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const namespaceNameElements = screen.getAllByText('test-namespace');
|
||||
expect(namespaceNameElements.length).toBeGreaterThan(0);
|
||||
expect(namespaceNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const clusterNameElements = screen.getAllByText('test-cluster');
|
||||
expect(clusterNameElements.length).toBeGreaterThan(0);
|
||||
expect(clusterNameElements[0]).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render modal with 4 tabs', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<NamespaceDetails
|
||||
namespace={mockNamespace}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByText('Metrics');
|
||||
expect(metricsTab).toBeInTheDocument();
|
||||
|
||||
const eventsTab = screen.getByText('Events');
|
||||
expect(eventsTab).toBeInTheDocument();
|
||||
|
||||
const logsTab = screen.getByText('Logs');
|
||||
expect(logsTab).toBeInTheDocument();
|
||||
|
||||
const tracesTab = screen.getByText('Traces');
|
||||
expect(tracesTab).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('default tab should be metrics', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<NamespaceDetails
|
||||
namespace={mockNamespace}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByRole('radio', { name: 'Metrics' });
|
||||
expect(metricsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should switch to events tab when events tab is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<NamespaceDetails
|
||||
namespace={mockNamespace}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const eventsTab = screen.getByRole('radio', { name: 'Events' });
|
||||
expect(eventsTab).not.toBeChecked();
|
||||
fireEvent.click(eventsTab);
|
||||
expect(eventsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should close modal when close button is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<NamespaceDetails
|
||||
namespace={mockNamespace}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const closeButton = screen.getByRole('button', { name: 'Close' });
|
||||
fireEvent.click(closeButton);
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,116 @@
|
||||
/* eslint-disable import/first */
|
||||
// eslint-disable-next-line import/order
|
||||
import setupCommonMocks from '../../commonMocks';
|
||||
|
||||
setupCommonMocks();
|
||||
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import NodeDetails from 'container/InfraMonitoringK8s/Nodes/NodeDetails/NodeDetails';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
describe('NodeDetails', () => {
|
||||
const mockNode = {
|
||||
meta: {
|
||||
k8s_node_name: 'test-node',
|
||||
k8s_cluster_name: 'test-cluster',
|
||||
},
|
||||
} as any;
|
||||
const mockOnClose = jest.fn();
|
||||
|
||||
it('should render modal with relevant metadata', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<NodeDetails node={mockNode} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const nodeNameElements = screen.getAllByText('test-node');
|
||||
expect(nodeNameElements.length).toBeGreaterThan(0);
|
||||
expect(nodeNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const clusterNameElements = screen.getAllByText('test-cluster');
|
||||
expect(clusterNameElements.length).toBeGreaterThan(0);
|
||||
expect(clusterNameElements[0]).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render modal with 4 tabs', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<NodeDetails node={mockNode} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByText('Metrics');
|
||||
expect(metricsTab).toBeInTheDocument();
|
||||
|
||||
const eventsTab = screen.getByText('Events');
|
||||
expect(eventsTab).toBeInTheDocument();
|
||||
|
||||
const logsTab = screen.getByText('Logs');
|
||||
expect(logsTab).toBeInTheDocument();
|
||||
|
||||
const tracesTab = screen.getByText('Traces');
|
||||
expect(tracesTab).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('default tab should be metrics', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<NodeDetails node={mockNode} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByRole('radio', { name: 'Metrics' });
|
||||
expect(metricsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should switch to events tab when events tab is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<NodeDetails node={mockNode} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const eventsTab = screen.getByRole('radio', { name: 'Events' });
|
||||
expect(eventsTab).not.toBeChecked();
|
||||
fireEvent.click(eventsTab);
|
||||
expect(eventsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should close modal when close button is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<NodeDetails node={mockNode} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const closeButton = screen.getByRole('button', { name: 'Close' });
|
||||
fireEvent.click(closeButton);
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
122
frontend/src/container/InfraMonitoringK8s/__tests__/Pods/PodDetails/PodDetails.test.tsx
generated
Normal file
122
frontend/src/container/InfraMonitoringK8s/__tests__/Pods/PodDetails/PodDetails.test.tsx
generated
Normal file
@@ -0,0 +1,122 @@
|
||||
/* eslint-disable import/first */
|
||||
// eslint-disable-next-line import/order
|
||||
import setupCommonMocks from '../../commonMocks';
|
||||
|
||||
setupCommonMocks();
|
||||
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import PodDetails from 'container/InfraMonitoringK8s/Pods/PodDetails/PodDetails';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
describe('PodDetails', () => {
|
||||
const mockPod = {
|
||||
podName: 'test-pod',
|
||||
meta: {
|
||||
k8s_cluster_name: 'test-cluster',
|
||||
k8s_namespace_name: 'test-namespace',
|
||||
k8s_node_name: 'test-node',
|
||||
},
|
||||
} as any;
|
||||
const mockOnClose = jest.fn();
|
||||
|
||||
it('should render modal with relevant metadata', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<PodDetails pod={mockPod} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const clusterNameElements = screen.getAllByText('test-cluster');
|
||||
expect(clusterNameElements.length).toBeGreaterThan(0);
|
||||
expect(clusterNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const namespaceNameElements = screen.getAllByText('test-namespace');
|
||||
expect(namespaceNameElements.length).toBeGreaterThan(0);
|
||||
expect(namespaceNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const nodeNameElements = screen.getAllByText('test-node');
|
||||
expect(nodeNameElements.length).toBeGreaterThan(0);
|
||||
expect(nodeNameElements[0]).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render modal with 4 tabs', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<PodDetails pod={mockPod} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByText('Metrics');
|
||||
expect(metricsTab).toBeInTheDocument();
|
||||
|
||||
const eventsTab = screen.getByText('Events');
|
||||
expect(eventsTab).toBeInTheDocument();
|
||||
|
||||
const logsTab = screen.getByText('Logs');
|
||||
expect(logsTab).toBeInTheDocument();
|
||||
|
||||
const tracesTab = screen.getByText('Traces');
|
||||
expect(tracesTab).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('default tab should be metrics', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<PodDetails pod={mockPod} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByRole('radio', { name: 'Metrics' });
|
||||
expect(metricsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should switch to events tab when events tab is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<PodDetails pod={mockPod} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const eventsTab = screen.getByRole('radio', { name: 'Events' });
|
||||
expect(eventsTab).not.toBeChecked();
|
||||
fireEvent.click(eventsTab);
|
||||
expect(eventsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should close modal when close button is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<PodDetails pod={mockPod} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const closeButton = screen.getByRole('button', { name: 'Close' });
|
||||
fireEvent.click(closeButton);
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,136 @@
|
||||
/* eslint-disable import/first */
|
||||
// eslint-disable-next-line import/order
|
||||
import setupCommonMocks from '../../commonMocks';
|
||||
|
||||
setupCommonMocks();
|
||||
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import StatefulSetDetails from 'container/InfraMonitoringK8s/StatefulSets/StatefulSetDetails/StatefulSetDetails';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
describe('StatefulSetDetails', () => {
|
||||
const mockStatefulSet = {
|
||||
meta: {
|
||||
k8s_namespace_name: 'test-namespace',
|
||||
k8s_statefulset_name: 'test-stateful-set',
|
||||
},
|
||||
} as any;
|
||||
const mockOnClose = jest.fn();
|
||||
|
||||
it('should render modal with relevant metadata', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<StatefulSetDetails
|
||||
statefulSet={mockStatefulSet}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const statefulSetNameElements = screen.getAllByText('test-stateful-set');
|
||||
expect(statefulSetNameElements.length).toBeGreaterThan(0);
|
||||
expect(statefulSetNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const namespaceNameElements = screen.getAllByText('test-namespace');
|
||||
expect(namespaceNameElements.length).toBeGreaterThan(0);
|
||||
expect(namespaceNameElements[0]).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render modal with 4 tabs', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<StatefulSetDetails
|
||||
statefulSet={mockStatefulSet}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByText('Metrics');
|
||||
expect(metricsTab).toBeInTheDocument();
|
||||
|
||||
const eventsTab = screen.getByText('Events');
|
||||
expect(eventsTab).toBeInTheDocument();
|
||||
|
||||
const logsTab = screen.getByText('Logs');
|
||||
expect(logsTab).toBeInTheDocument();
|
||||
|
||||
const tracesTab = screen.getByText('Traces');
|
||||
expect(tracesTab).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('default tab should be metrics', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<StatefulSetDetails
|
||||
statefulSet={mockStatefulSet}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByRole('radio', { name: 'Metrics' });
|
||||
expect(metricsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should switch to events tab when events tab is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<StatefulSetDetails
|
||||
statefulSet={mockStatefulSet}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const eventsTab = screen.getByRole('radio', { name: 'Events' });
|
||||
expect(eventsTab).not.toBeChecked();
|
||||
fireEvent.click(eventsTab);
|
||||
expect(eventsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should close modal when close button is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<StatefulSetDetails
|
||||
statefulSet={mockStatefulSet}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const closeButton = screen.getByRole('button', { name: 'Close' });
|
||||
fireEvent.click(closeButton);
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,73 @@
|
||||
/* eslint-disable import/first */
|
||||
// eslint-disable-next-line import/order
|
||||
import setupCommonMocks from '../../commonMocks';
|
||||
|
||||
setupCommonMocks();
|
||||
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import VolumeDetails from 'container/InfraMonitoringK8s/Volumes/VolumeDetails/VolumeDetails';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
describe('VolumeDetails', () => {
|
||||
const mockVolume = {
|
||||
persistentVolumeClaimName: 'test-volume',
|
||||
meta: {
|
||||
k8s_cluster_name: 'test-cluster',
|
||||
k8s_namespace_name: 'test-namespace',
|
||||
},
|
||||
} as any;
|
||||
const mockOnClose = jest.fn();
|
||||
|
||||
it('should render modal with relevant metadata', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<VolumeDetails
|
||||
volume={mockVolume}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const volumeNameElements = screen.getAllByText('test-volume');
|
||||
expect(volumeNameElements.length).toBeGreaterThan(0);
|
||||
expect(volumeNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const clusterNameElements = screen.getAllByText('test-cluster');
|
||||
expect(clusterNameElements.length).toBeGreaterThan(0);
|
||||
expect(clusterNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const namespaceNameElements = screen.getAllByText('test-namespace');
|
||||
expect(namespaceNameElements.length).toBeGreaterThan(0);
|
||||
expect(namespaceNameElements[0]).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should close modal when close button is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<VolumeDetails
|
||||
volume={mockVolume}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const closeButton = screen.getByRole('button', { name: 'Close' });
|
||||
fireEvent.click(closeButton);
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,121 @@
|
||||
import * as appContextHooks from 'providers/App/App';
|
||||
import * as timezoneHooks from 'providers/Timezone';
|
||||
import { LicenseEvent } from 'types/api/licensesV3/getActive';
|
||||
|
||||
const setupCommonMocks = (): void => {
|
||||
const createMockObserver = (): {
|
||||
observe: jest.Mock;
|
||||
unobserve: jest.Mock;
|
||||
disconnect: jest.Mock;
|
||||
} => ({
|
||||
observe: jest.fn(),
|
||||
unobserve: jest.fn(),
|
||||
disconnect: jest.fn(),
|
||||
});
|
||||
|
||||
global.IntersectionObserver = jest.fn().mockImplementation(createMockObserver);
|
||||
global.ResizeObserver = jest.fn().mockImplementation(createMockObserver);
|
||||
|
||||
jest.mock('react-redux', () => ({
|
||||
...jest.requireActual('react-redux'),
|
||||
useSelector: jest.fn(() => ({
|
||||
globalTime: {
|
||||
selectedTime: {
|
||||
startTime: 1713734400000,
|
||||
endTime: 1713738000000,
|
||||
},
|
||||
maxTime: 1713738000000,
|
||||
minTime: 1713734400000,
|
||||
},
|
||||
})),
|
||||
}));
|
||||
|
||||
jest.mock('uplot', () => ({
|
||||
paths: {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
},
|
||||
default: jest.fn(() => ({
|
||||
paths: {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
},
|
||||
})),
|
||||
}));
|
||||
|
||||
jest.mock('react-router-dom-v5-compat', () => ({
|
||||
...jest.requireActual('react-router-dom-v5-compat'),
|
||||
useSearchParams: jest.fn().mockReturnValue([
|
||||
{
|
||||
get: jest.fn(),
|
||||
entries: jest.fn(() => []),
|
||||
set: jest.fn(),
|
||||
},
|
||||
jest.fn(),
|
||||
]),
|
||||
useNavigationType: (): any => 'PUSH',
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useUrlQuery', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(() => ({
|
||||
set: jest.fn(),
|
||||
delete: jest.fn(),
|
||||
get: jest.fn(),
|
||||
has: jest.fn(),
|
||||
entries: jest.fn(() => []),
|
||||
append: jest.fn(),
|
||||
toString: jest.fn(() => ''),
|
||||
})),
|
||||
}));
|
||||
|
||||
jest.mock('lib/getMinMax', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(() => ({
|
||||
minTime: 1713734400000,
|
||||
maxTime: 1713738000000,
|
||||
})),
|
||||
isValidTimeFormat: jest.fn().mockReturnValue(true),
|
||||
}));
|
||||
|
||||
jest.spyOn(appContextHooks, 'useAppContext').mockReturnValue({
|
||||
user: {
|
||||
role: 'admin',
|
||||
},
|
||||
activeLicenseV3: {
|
||||
event_queue: {
|
||||
created_at: '0',
|
||||
event: LicenseEvent.NO_EVENT,
|
||||
scheduled_at: '0',
|
||||
status: '',
|
||||
updated_at: '0',
|
||||
},
|
||||
license: {
|
||||
license_key: 'test-license-key',
|
||||
license_type: 'trial',
|
||||
org_id: 'test-org-id',
|
||||
plan_id: 'test-plan-id',
|
||||
plan_name: 'test-plan-name',
|
||||
plan_type: 'trial',
|
||||
plan_version: 'test-plan-version',
|
||||
},
|
||||
},
|
||||
} as any);
|
||||
|
||||
jest.spyOn(timezoneHooks, 'useTimezone').mockReturnValue({
|
||||
timezone: {
|
||||
offset: 0,
|
||||
},
|
||||
browserTimezone: {
|
||||
offset: 0,
|
||||
},
|
||||
} as any);
|
||||
|
||||
jest.mock('hooks/useSafeNavigate', () => ({
|
||||
useSafeNavigate: (): any => ({
|
||||
safeNavigate: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
};
|
||||
|
||||
export default setupCommonMocks;
|
||||
@@ -272,12 +272,11 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
|
||||
width: 80,
|
||||
key: 'severity',
|
||||
sorter: (a, b): number =>
|
||||
(a.labels ? a.labels.severity.length : 0) -
|
||||
(b.labels ? b.labels.severity.length : 0),
|
||||
(a?.labels?.severity?.length || 0) - (b?.labels?.severity?.length || 0),
|
||||
render: (value): JSX.Element => {
|
||||
const objectKeys = Object.keys(value);
|
||||
const objectKeys = value ? Object.keys(value) : [];
|
||||
const withSeverityKey = objectKeys.find((e) => e === 'severity') || '';
|
||||
const severityValue = value[withSeverityKey];
|
||||
const severityValue = withSeverityKey ? value[withSeverityKey] : '-';
|
||||
|
||||
return <Typography>{severityValue}</Typography>;
|
||||
},
|
||||
@@ -290,7 +289,7 @@ function ListAlert({ allAlertRules, refetch }: ListAlertProps): JSX.Element {
|
||||
align: 'center',
|
||||
width: 100,
|
||||
render: (value): JSX.Element => {
|
||||
const objectKeys = Object.keys(value);
|
||||
const objectKeys = value ? Object.keys(value) : [];
|
||||
const withOutSeverityKeys = objectKeys.filter((e) => e !== 'severity');
|
||||
|
||||
if (withOutSeverityKeys.length === 0) {
|
||||
|
||||
@@ -256,7 +256,6 @@ function LogsExplorerViewsContainer({
|
||||
} = useGetExplorerQueryRange(
|
||||
listChartQuery,
|
||||
PANEL_TYPES.TIME_SERIES,
|
||||
// ENTITY_VERSION_V4,
|
||||
ENTITY_VERSION_V5,
|
||||
{
|
||||
enabled:
|
||||
@@ -279,7 +278,6 @@ function LogsExplorerViewsContainer({
|
||||
} = useGetExplorerQueryRange(
|
||||
requestData,
|
||||
panelType,
|
||||
// ENTITY_VERSION_V4,
|
||||
ENTITY_VERSION_V5,
|
||||
{
|
||||
keepPreviousData: true,
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
.meter-explorer-breakdown {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
||||
.meter-explorer-date-time {
|
||||
display: flex;
|
||||
min-height: 30px;
|
||||
justify-content: end;
|
||||
border-bottom: 1px solid var(--bg-slate-500);
|
||||
padding: 10px 16px;
|
||||
}
|
||||
|
||||
.meter-explorer-graphs {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
padding: 20px;
|
||||
gap: 36px;
|
||||
|
||||
.meter-column-graph {
|
||||
.row-card {
|
||||
background-color: var(--bg-ink-400);
|
||||
padding-left: 10px;
|
||||
height: 32px;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
|
||||
.section-title {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
}
|
||||
|
||||
.graph-description {
|
||||
padding: 10px;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.meter-page-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(2, 1fr);
|
||||
align-items: flex-start;
|
||||
gap: 10px;
|
||||
.meter-graph {
|
||||
height: 400px;
|
||||
padding: 10px;
|
||||
width: 100%;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.total {
|
||||
.meter-column-graph {
|
||||
.meter-page-grid {
|
||||
grid-template-columns: repeat(3, 1fr);
|
||||
|
||||
.meter-graph {
|
||||
height: 200px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.meter-explorer-breakdown {
|
||||
.meter-explorer-date-time {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.meter-explorer-graphs {
|
||||
.meter-column-graph {
|
||||
.row-card {
|
||||
background-color: var(--bg-vanilla-300);
|
||||
|
||||
.section-title {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
200
frontend/src/container/MeterExplorer/Breakdown/BreakDown.tsx
Normal file
200
frontend/src/container/MeterExplorer/Breakdown/BreakDown.tsx
Normal file
@@ -0,0 +1,200 @@
|
||||
import './BreakDown.styles.scss';
|
||||
|
||||
import { Typography } from 'antd';
|
||||
// import useFilterConfig from 'components/QuickFilters/hooks/useFilterConfig';
|
||||
// import { SignalType } from 'components/QuickFilters/types';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import GridCard from 'container/GridCardLayout/GridCard';
|
||||
import { Card, CardContainer } from 'container/GridCardLayout/styles';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
// import { useGetQueryKeyValueSuggestions } from 'hooks/querySuggestions/useGetQueryKeyValueSuggestions';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { useCallback } from 'react';
|
||||
import { useDispatch } from 'react-redux';
|
||||
import { useHistory, useLocation } from 'react-router-dom';
|
||||
import { UpdateTimeInterval } from 'store/actions';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
// import { DataSource } from 'types/common/queryBuilder';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
import {
|
||||
getLogCountWidgetData,
|
||||
getLogSizeWidgetData,
|
||||
getMetricCountWidgetData,
|
||||
getSpanCountWidgetData,
|
||||
getSpanSizeWidgetData,
|
||||
getTotalLogSizeWidgetData,
|
||||
getTotalMetricDatapointCountWidgetData,
|
||||
getTotalTraceSizeWidgetData,
|
||||
} from './graphs';
|
||||
|
||||
type MetricSection = {
|
||||
id: string;
|
||||
title: string;
|
||||
graphs: Widgets[];
|
||||
};
|
||||
|
||||
const sections: MetricSection[] = [
|
||||
{
|
||||
id: uuid(),
|
||||
title: 'Total',
|
||||
graphs: [
|
||||
getTotalLogSizeWidgetData(),
|
||||
getTotalTraceSizeWidgetData(),
|
||||
getTotalMetricDatapointCountWidgetData(),
|
||||
],
|
||||
},
|
||||
{
|
||||
id: uuid(),
|
||||
title: 'Logs',
|
||||
graphs: [getLogCountWidgetData(), getLogSizeWidgetData()],
|
||||
},
|
||||
{
|
||||
id: uuid(),
|
||||
title: 'Traces',
|
||||
graphs: [getSpanCountWidgetData(), getSpanSizeWidgetData()],
|
||||
},
|
||||
{
|
||||
id: uuid(),
|
||||
title: 'Metrics',
|
||||
graphs: [getMetricCountWidgetData()],
|
||||
},
|
||||
];
|
||||
|
||||
function Section(section: MetricSection): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const { title, graphs } = section;
|
||||
const history = useHistory();
|
||||
const { pathname } = useLocation();
|
||||
const dispatch = useDispatch();
|
||||
const urlQuery = useUrlQuery();
|
||||
|
||||
const onDragSelect = useCallback(
|
||||
(start: number, end: number) => {
|
||||
const startTimestamp = Math.trunc(start);
|
||||
const endTimestamp = Math.trunc(end);
|
||||
|
||||
urlQuery.set(QueryParams.startTime, startTimestamp.toString());
|
||||
urlQuery.set(QueryParams.endTime, endTimestamp.toString());
|
||||
const generatedUrl = `${pathname}?${urlQuery.toString()}`;
|
||||
history.push(generatedUrl);
|
||||
|
||||
if (startTimestamp !== endTimestamp) {
|
||||
dispatch(UpdateTimeInterval('custom', [startTimestamp, endTimestamp]));
|
||||
}
|
||||
},
|
||||
[dispatch, history, pathname, urlQuery],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="meter-column-graph">
|
||||
<CardContainer className="row-card" isDarkMode={isDarkMode}>
|
||||
<Typography.Text className="section-title">{title}</Typography.Text>
|
||||
</CardContainer>
|
||||
<div className="meter-page-grid">
|
||||
{graphs.map((widget) => (
|
||||
<Card
|
||||
key={widget?.id}
|
||||
isDarkMode={isDarkMode}
|
||||
$panelType={PANEL_TYPES.BAR}
|
||||
className="meter-graph"
|
||||
>
|
||||
<GridCard widget={widget} onDragSelect={onDragSelect} version="v5" />
|
||||
</Card>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// function FilterDropdown({ attrKey }: { attrKey: string }): JSX.Element {
|
||||
// const {
|
||||
// data: keyValueSuggestions,
|
||||
// isLoading: isLoadingKeyValueSuggestions,
|
||||
// } = useGetQueryKeyValueSuggestions({
|
||||
// key: attrKey,
|
||||
// signal: DataSource.METRICS,
|
||||
// signalSource: 'meter',
|
||||
// options: {
|
||||
// keepPreviousData: true,
|
||||
// },
|
||||
// });
|
||||
|
||||
// const responseData = keyValueSuggestions?.data as any;
|
||||
// const values = responseData?.data?.values || {};
|
||||
// const stringValues = values.stringValues || [];
|
||||
// const numberValues = values.numberValues || [];
|
||||
|
||||
// const stringOptions = stringValues.filter(
|
||||
// (value: string | null | undefined): value is string =>
|
||||
// value !== null && value !== undefined && value !== '',
|
||||
// );
|
||||
|
||||
// const numberOptions = numberValues
|
||||
// .filter(
|
||||
// (value: number | null | undefined): value is number =>
|
||||
// value !== null && value !== undefined,
|
||||
// )
|
||||
// .map((value: number) => value.toString());
|
||||
|
||||
// const vals = [...stringOptions, ...numberOptions];
|
||||
|
||||
// return (
|
||||
// <div className="filter-dropdown">
|
||||
// <Typography.Text>{attrKey}</Typography.Text>
|
||||
// <Select
|
||||
// loading={isLoadingKeyValueSuggestions}
|
||||
// options={vals?.map((suggestion: any) => ({
|
||||
// label: suggestion,
|
||||
// value: suggestion,
|
||||
// }))}
|
||||
// placeholder={`Select ${attrKey}`}
|
||||
// />
|
||||
// </div>
|
||||
// );
|
||||
// }
|
||||
|
||||
function BreakDown(): JSX.Element {
|
||||
// const { customFilters } = useFilterConfig({
|
||||
// signal: SignalType.METER_EXPLORER,
|
||||
// config: [],
|
||||
// });
|
||||
|
||||
return (
|
||||
<div className="meter-explorer-breakdown">
|
||||
<section className="meter-explorer-date-time">
|
||||
{/* {customFilters.map((filter) => (
|
||||
<FilterDropdown key={filter.key} attrKey={filter.key} />
|
||||
))} */}
|
||||
<DateTimeSelectionV2 showAutoRefresh={false} />
|
||||
</section>
|
||||
<section className="meter-explorer-graphs">
|
||||
<section className="total">
|
||||
<Section
|
||||
id={sections[0].id}
|
||||
title={sections[0].title}
|
||||
graphs={sections[0].graphs}
|
||||
/>
|
||||
</section>
|
||||
{sections.map((section, idx) => {
|
||||
if (idx === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
return (
|
||||
<Section
|
||||
key={section.id}
|
||||
id={section.id}
|
||||
title={section.title}
|
||||
graphs={section.graphs}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</section>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default BreakDown;
|
||||
390
frontend/src/container/MeterExplorer/Breakdown/graphs.ts
Normal file
390
frontend/src/container/MeterExplorer/Breakdown/graphs.ts
Normal file
@@ -0,0 +1,390 @@
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { GetWidgetQueryBuilderProps } from 'container/MetricsApplication/types';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import {
|
||||
IBuilderFormula,
|
||||
IBuilderQuery,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
interface GetWidgetQueryProps {
|
||||
title: string;
|
||||
description: string;
|
||||
queryData: IBuilderQuery[];
|
||||
queryFormulas?: IBuilderFormula[];
|
||||
panelTypes?: PANEL_TYPES;
|
||||
yAxisUnit?: string;
|
||||
columnUnits?: Record<string, string>;
|
||||
}
|
||||
|
||||
interface GetWidgetQueryPropsReturn extends GetWidgetQueryBuilderProps {
|
||||
description?: string;
|
||||
nullZeroValues: string;
|
||||
columnUnits?: Record<string, string>;
|
||||
}
|
||||
|
||||
export const getWidgetQueryBuilder = ({
|
||||
query,
|
||||
title = '',
|
||||
panelTypes,
|
||||
yAxisUnit = '',
|
||||
fillSpans = false,
|
||||
id,
|
||||
nullZeroValues,
|
||||
description,
|
||||
}: GetWidgetQueryPropsReturn): Widgets => ({
|
||||
description: description || '',
|
||||
id: id || uuid(),
|
||||
isStacked: false,
|
||||
nullZeroValues: nullZeroValues || '',
|
||||
opacity: '1',
|
||||
panelTypes,
|
||||
query,
|
||||
timePreferance: 'GLOBAL_TIME',
|
||||
title,
|
||||
yAxisUnit,
|
||||
softMax: null,
|
||||
softMin: null,
|
||||
selectedLogFields: [],
|
||||
selectedTracesFields: [],
|
||||
fillSpans,
|
||||
});
|
||||
|
||||
export function getWidgetQuery(
|
||||
props: GetWidgetQueryProps,
|
||||
): GetWidgetQueryPropsReturn {
|
||||
const { title, description, panelTypes, yAxisUnit, columnUnits } = props;
|
||||
return {
|
||||
title,
|
||||
yAxisUnit: yAxisUnit || 'none',
|
||||
panelTypes: panelTypes || PANEL_TYPES.TIME_SERIES,
|
||||
fillSpans: false,
|
||||
description,
|
||||
nullZeroValues: 'zero',
|
||||
columnUnits,
|
||||
query: {
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
promql: [],
|
||||
builder: {
|
||||
queryData: props.queryData,
|
||||
queryFormulas: (props.queryFormulas as IBuilderFormula[]) || [],
|
||||
},
|
||||
clickhouse_sql: [],
|
||||
id: uuid(),
|
||||
},
|
||||
};
|
||||
}
|
||||
export const getTotalLogSizeWidgetData = (): Widgets =>
|
||||
getWidgetQueryBuilder(
|
||||
getWidgetQuery({
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
key: 'signoz.meter.log.size',
|
||||
id: 'signoz.meter.log.size--float64--Sum--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
type: 'Sum',
|
||||
},
|
||||
aggregateOperator: 'increase',
|
||||
dataSource: DataSource.METRICS,
|
||||
source: 'meter',
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: { items: [], op: 'AND' },
|
||||
functions: [],
|
||||
groupBy: [],
|
||||
having: [],
|
||||
legend: 'count',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'sum',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'increase',
|
||||
},
|
||||
],
|
||||
title: 'Total size of log records ingested',
|
||||
description: '',
|
||||
panelTypes: PANEL_TYPES.VALUE,
|
||||
yAxisUnit: 'bytes',
|
||||
}),
|
||||
);
|
||||
|
||||
export const getTotalTraceSizeWidgetData = (): Widgets =>
|
||||
getWidgetQueryBuilder(
|
||||
getWidgetQuery({
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
key: 'signoz.meter.span.size',
|
||||
id: 'signoz.meter.span.size--float64--Sum--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
type: 'Sum',
|
||||
},
|
||||
aggregateOperator: 'increase',
|
||||
dataSource: DataSource.METRICS,
|
||||
source: 'meter',
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: { items: [], op: 'AND' },
|
||||
functions: [],
|
||||
groupBy: [],
|
||||
having: [],
|
||||
legend: 'count',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'sum',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'increase',
|
||||
},
|
||||
],
|
||||
title: 'Total size of spans ingested',
|
||||
description: '',
|
||||
panelTypes: PANEL_TYPES.VALUE,
|
||||
yAxisUnit: 'bytes',
|
||||
}),
|
||||
);
|
||||
|
||||
export const getTotalMetricDatapointCountWidgetData = (): Widgets =>
|
||||
getWidgetQueryBuilder(
|
||||
getWidgetQuery({
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
key: 'signoz.meter.metric.datapoint.count',
|
||||
id: 'signoz.meter.metric.datapoint.count--float64--Sum--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
type: 'Sum',
|
||||
},
|
||||
aggregateOperator: 'increase',
|
||||
dataSource: DataSource.METRICS,
|
||||
source: 'meter',
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: { items: [], op: 'AND' },
|
||||
functions: [],
|
||||
groupBy: [],
|
||||
having: [],
|
||||
legend: 'count',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'sum',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'increase',
|
||||
},
|
||||
],
|
||||
title: 'Total metric datapoints ingested',
|
||||
description: '',
|
||||
panelTypes: PANEL_TYPES.VALUE,
|
||||
yAxisUnit: 'short',
|
||||
}),
|
||||
);
|
||||
|
||||
export const getLogCountWidgetData = (): Widgets =>
|
||||
getWidgetQueryBuilder(
|
||||
getWidgetQuery({
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
key: 'signoz.meter.log.count',
|
||||
id: 'signoz.meter.log.count--float64--Sum--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
type: 'Sum',
|
||||
},
|
||||
aggregateOperator: 'increase',
|
||||
dataSource: DataSource.METRICS,
|
||||
source: 'meter',
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: { items: [], op: 'AND' },
|
||||
functions: [],
|
||||
groupBy: [],
|
||||
having: [],
|
||||
legend: 'count',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'increase',
|
||||
},
|
||||
],
|
||||
title: 'Count of log records ingested',
|
||||
description: '',
|
||||
panelTypes: PANEL_TYPES.BAR,
|
||||
yAxisUnit: 'short',
|
||||
}),
|
||||
);
|
||||
|
||||
export const getLogSizeWidgetData = (): Widgets =>
|
||||
getWidgetQueryBuilder(
|
||||
getWidgetQuery({
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
key: 'signoz.meter.log.size',
|
||||
id: 'signoz.meter.log.size--float64--Sum--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
type: 'Sum',
|
||||
},
|
||||
aggregateOperator: 'increase',
|
||||
dataSource: DataSource.METRICS,
|
||||
source: 'meter',
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: { items: [], op: 'AND' },
|
||||
functions: [],
|
||||
groupBy: [],
|
||||
having: [],
|
||||
legend: 'size',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'increase',
|
||||
},
|
||||
],
|
||||
title: 'Size of log records ingested',
|
||||
description: '',
|
||||
panelTypes: PANEL_TYPES.BAR,
|
||||
yAxisUnit: 'bytes',
|
||||
}),
|
||||
);
|
||||
|
||||
export const getSpanCountWidgetData = (): Widgets =>
|
||||
getWidgetQueryBuilder(
|
||||
getWidgetQuery({
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
key: 'signoz.meter.span.count',
|
||||
id: 'signoz.meter.span.count--float64--Sum--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
type: 'Sum',
|
||||
},
|
||||
aggregateOperator: 'increase',
|
||||
dataSource: DataSource.METRICS,
|
||||
source: 'meter',
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: { items: [], op: 'AND' },
|
||||
functions: [],
|
||||
groupBy: [],
|
||||
having: [],
|
||||
legend: 'count',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'increase',
|
||||
},
|
||||
],
|
||||
title: 'Count of spans ingested',
|
||||
description: '',
|
||||
panelTypes: PANEL_TYPES.BAR,
|
||||
yAxisUnit: 'short',
|
||||
}),
|
||||
);
|
||||
|
||||
export const getSpanSizeWidgetData = (): Widgets =>
|
||||
getWidgetQueryBuilder(
|
||||
getWidgetQuery({
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
key: 'signoz.meter.span.size',
|
||||
id: 'signoz.meter.span.size--float64--Sum--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
type: 'Sum',
|
||||
},
|
||||
aggregateOperator: 'increase',
|
||||
dataSource: DataSource.METRICS,
|
||||
source: 'meter',
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: { items: [], op: 'AND' },
|
||||
functions: [],
|
||||
groupBy: [],
|
||||
having: [],
|
||||
legend: 'size',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'increase',
|
||||
},
|
||||
],
|
||||
title: 'Size of spans ingested',
|
||||
description: '',
|
||||
panelTypes: PANEL_TYPES.BAR,
|
||||
yAxisUnit: 'bytes',
|
||||
}),
|
||||
);
|
||||
|
||||
export const getMetricCountWidgetData = (): Widgets =>
|
||||
getWidgetQueryBuilder(
|
||||
getWidgetQuery({
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
key: 'signoz.meter.metric.datapoint.count',
|
||||
id: 'signoz.meter.metric.datapoint.count--float64--Sum--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
type: 'Sum',
|
||||
},
|
||||
aggregateOperator: 'increase',
|
||||
dataSource: DataSource.METRICS,
|
||||
source: 'meter',
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: { items: [], op: 'AND' },
|
||||
functions: [],
|
||||
groupBy: [],
|
||||
having: [],
|
||||
legend: 'count',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'increase',
|
||||
},
|
||||
],
|
||||
title: 'Count of metric datapoints ingested',
|
||||
description: '',
|
||||
panelTypes: PANEL_TYPES.BAR,
|
||||
yAxisUnit: 'short',
|
||||
}),
|
||||
);
|
||||
@@ -141,6 +141,19 @@
|
||||
background: var(--bg-vanilla-500);
|
||||
}
|
||||
}
|
||||
|
||||
.meter-explorer-content-section {
|
||||
.explore-content {
|
||||
.time-series-view-panel {
|
||||
background: var(--bg-vanilla-100);
|
||||
border-color: var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.meter-explorer-quick-filters-section {
|
||||
border-right: 1px solid var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ function Explorer(): JSX.Element {
|
||||
() =>
|
||||
updateAllQueriesOperators(
|
||||
initialQueryMeterWithType,
|
||||
PANEL_TYPES.TIME_SERIES,
|
||||
PANEL_TYPES.BAR,
|
||||
DataSource.METRICS,
|
||||
'meter' as 'meter' | '',
|
||||
),
|
||||
@@ -54,7 +54,7 @@ function Explorer(): JSX.Element {
|
||||
() =>
|
||||
updateAllQueriesOperators(
|
||||
currentQuery || initialQueryMeterWithType,
|
||||
PANEL_TYPES.TIME_SERIES,
|
||||
PANEL_TYPES.BAR,
|
||||
DataSource.METRICS,
|
||||
'meter' as 'meter' | '',
|
||||
),
|
||||
@@ -75,7 +75,7 @@ function Explorer(): JSX.Element {
|
||||
|
||||
const dashboardEditView = generateExportToDashboardLink({
|
||||
query: queryToExport || exportDefaultQuery,
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
panelType: PANEL_TYPES.BAR,
|
||||
dashboardId: dashboard.id,
|
||||
widgetId,
|
||||
});
|
||||
|
||||
@@ -69,7 +69,7 @@ function TimeSeries(): JSX.Element {
|
||||
GetMetricQueryRange(
|
||||
{
|
||||
query: payload,
|
||||
graphType: PANEL_TYPES.TIME_SERIES,
|
||||
graphType: PANEL_TYPES.BAR,
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
globalSelectedInterval: globalSelectedTime,
|
||||
params: {
|
||||
@@ -131,6 +131,7 @@ function TimeSeries(): JSX.Element {
|
||||
data={datapoint}
|
||||
dataSource={DataSource.METRICS}
|
||||
yAxisUnit={yAxisUnit}
|
||||
panelType={PANEL_TYPES.BAR}
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
|
||||
@@ -266,8 +266,8 @@ export const defaultMoreMenuItems: SidebarItem[] = [
|
||||
itemKey: 'external-apis',
|
||||
},
|
||||
{
|
||||
key: ROUTES.METER_EXPLORER,
|
||||
label: 'Meter Explorer',
|
||||
key: ROUTES.METER,
|
||||
label: 'Cost Meter',
|
||||
icon: <ChartArea size={16} />,
|
||||
isNew: false,
|
||||
isEnabled: false,
|
||||
|
||||
@@ -0,0 +1,160 @@
|
||||
import { Button, Popover, Spin, Tooltip } from 'antd';
|
||||
import GroupByIcon from 'assets/CustomIcons/GroupByIcon';
|
||||
import { OPERATORS } from 'constants/antlrQueryConstants';
|
||||
import { useTraceActions } from 'hooks/trace/useTraceActions';
|
||||
import { ArrowDownToDot, ArrowUpFromDot, Copy, Ellipsis } from 'lucide-react';
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
|
||||
interface AttributeRecord {
|
||||
field: string;
|
||||
value: string;
|
||||
}
|
||||
|
||||
interface AttributeActionsProps {
|
||||
record: AttributeRecord;
|
||||
}
|
||||
|
||||
export default function AttributeActions({
|
||||
record,
|
||||
}: AttributeActionsProps): JSX.Element {
|
||||
const [isOpen, setIsOpen] = useState<boolean>(false);
|
||||
const [isFilterInLoading, setIsFilterInLoading] = useState<boolean>(false);
|
||||
const [isFilterOutLoading, setIsFilterOutLoading] = useState<boolean>(false);
|
||||
|
||||
const {
|
||||
onAddToQuery,
|
||||
onGroupByAttribute,
|
||||
onCopyFieldName,
|
||||
onCopyFieldValue,
|
||||
} = useTraceActions();
|
||||
|
||||
const textToCopy = useMemo(() => {
|
||||
const str = record.value == null ? '' : String(record.value);
|
||||
// Remove surrounding double-quotes only (e.g., JSON-encoded string values)
|
||||
return str.replace(/^"|"$/g, '');
|
||||
}, [record.value]);
|
||||
|
||||
const handleFilterIn = useCallback(async (): Promise<void> => {
|
||||
if (!onAddToQuery || isFilterInLoading) return;
|
||||
setIsFilterInLoading(true);
|
||||
try {
|
||||
await Promise.resolve(
|
||||
onAddToQuery(record.field, record.value, OPERATORS['=']),
|
||||
);
|
||||
} finally {
|
||||
setIsFilterInLoading(false);
|
||||
}
|
||||
}, [onAddToQuery, record.field, record.value, isFilterInLoading]);
|
||||
|
||||
const handleFilterOut = useCallback(async (): Promise<void> => {
|
||||
if (!onAddToQuery || isFilterOutLoading) return;
|
||||
setIsFilterOutLoading(true);
|
||||
try {
|
||||
await Promise.resolve(
|
||||
onAddToQuery(record.field, record.value, OPERATORS['!=']),
|
||||
);
|
||||
} finally {
|
||||
setIsFilterOutLoading(false);
|
||||
}
|
||||
}, [onAddToQuery, record.field, record.value, isFilterOutLoading]);
|
||||
|
||||
const handleGroupBy = useCallback((): void => {
|
||||
if (onGroupByAttribute) {
|
||||
onGroupByAttribute(record.field);
|
||||
}
|
||||
setIsOpen(false);
|
||||
}, [onGroupByAttribute, record.field]);
|
||||
|
||||
const handleCopyFieldName = useCallback((): void => {
|
||||
if (onCopyFieldName) {
|
||||
onCopyFieldName(record.field);
|
||||
}
|
||||
setIsOpen(false);
|
||||
}, [onCopyFieldName, record.field]);
|
||||
|
||||
const handleCopyFieldValue = useCallback((): void => {
|
||||
if (onCopyFieldValue) {
|
||||
onCopyFieldValue(textToCopy);
|
||||
}
|
||||
setIsOpen(false);
|
||||
}, [onCopyFieldValue, textToCopy]);
|
||||
|
||||
const moreActionsContent = (
|
||||
<div className="attribute-actions-menu">
|
||||
<Button
|
||||
className="group-by-clause"
|
||||
type="text"
|
||||
icon={<GroupByIcon />}
|
||||
onClick={handleGroupBy}
|
||||
block
|
||||
>
|
||||
Group By Attribute
|
||||
</Button>
|
||||
<Button
|
||||
type="text"
|
||||
icon={<Copy size={14} />}
|
||||
onClick={handleCopyFieldName}
|
||||
block
|
||||
>
|
||||
Copy Field Name
|
||||
</Button>
|
||||
<Button
|
||||
type="text"
|
||||
icon={<Copy size={14} />}
|
||||
onClick={handleCopyFieldValue}
|
||||
block
|
||||
>
|
||||
Copy Field Value
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="action-btn">
|
||||
<Tooltip title="Filter for value">
|
||||
<Button
|
||||
className="filter-btn periscope-btn"
|
||||
aria-label="Filter for value"
|
||||
disabled={isFilterInLoading}
|
||||
icon={
|
||||
isFilterInLoading ? (
|
||||
<Spin size="small" />
|
||||
) : (
|
||||
<ArrowDownToDot size={14} style={{ transform: 'rotate(90deg)' }} />
|
||||
)
|
||||
}
|
||||
onClick={handleFilterIn}
|
||||
/>
|
||||
</Tooltip>
|
||||
<Tooltip title="Filter out value">
|
||||
<Button
|
||||
className="filter-btn periscope-btn"
|
||||
aria-label="Filter out value"
|
||||
disabled={isFilterOutLoading}
|
||||
icon={
|
||||
isFilterOutLoading ? (
|
||||
<Spin size="small" />
|
||||
) : (
|
||||
<ArrowUpFromDot size={14} style={{ transform: 'rotate(90deg)' }} />
|
||||
)
|
||||
}
|
||||
onClick={handleFilterOut}
|
||||
/>
|
||||
</Tooltip>
|
||||
<Popover
|
||||
open={isOpen}
|
||||
onOpenChange={setIsOpen}
|
||||
arrow={false}
|
||||
content={moreActionsContent}
|
||||
rootClassName="attribute-actions-content"
|
||||
trigger="hover"
|
||||
placement="bottomLeft"
|
||||
>
|
||||
<Button
|
||||
icon={<Ellipsis size={14} />}
|
||||
className="filter-btn periscope-btn"
|
||||
/>
|
||||
</Popover>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -24,6 +24,13 @@
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
justify-content: flex-start;
|
||||
position: relative;
|
||||
|
||||
&:hover {
|
||||
.action-btn {
|
||||
display: flex;
|
||||
}
|
||||
}
|
||||
|
||||
.item-key {
|
||||
color: var(--bg-vanilla-100);
|
||||
@@ -40,11 +47,12 @@
|
||||
padding: 2px 8px;
|
||||
align-items: center;
|
||||
width: fit-content;
|
||||
max-width: 100%;
|
||||
max-width: calc(100% - 120px); /* Reserve space for action buttons */
|
||||
gap: 8px;
|
||||
border-radius: 50px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-slate-500);
|
||||
|
||||
.item-value {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: Inter;
|
||||
@@ -55,6 +63,35 @@
|
||||
letter-spacing: 0.56px;
|
||||
}
|
||||
}
|
||||
|
||||
.action-btn {
|
||||
display: none;
|
||||
position: absolute;
|
||||
right: 8px;
|
||||
top: 50%;
|
||||
transform: translateY(-50%);
|
||||
gap: 4px;
|
||||
background: rgba(0, 0, 0, 0.8);
|
||||
border-radius: 4px;
|
||||
padding: 2px;
|
||||
|
||||
.filter-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
border: none;
|
||||
box-shadow: none;
|
||||
border-radius: 2px;
|
||||
background: var(--bg-slate-400);
|
||||
padding: 4px;
|
||||
gap: 3px;
|
||||
height: 24px;
|
||||
width: 24px;
|
||||
|
||||
&:hover {
|
||||
background: var(--bg-slate-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,6 +100,36 @@
|
||||
}
|
||||
}
|
||||
|
||||
.attribute-actions-menu {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
|
||||
.ant-btn {
|
||||
text-align: left;
|
||||
height: auto;
|
||||
padding: 6px 12px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
|
||||
&:hover {
|
||||
background-color: var(--bg-slate-400);
|
||||
}
|
||||
}
|
||||
|
||||
.group-by-clause {
|
||||
color: var(--text-primary);
|
||||
}
|
||||
}
|
||||
|
||||
.attribute-actions-content {
|
||||
.ant-popover-inner {
|
||||
padding: 8px;
|
||||
min-width: 160px;
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.attributes-corner {
|
||||
.attributes-container {
|
||||
@@ -79,6 +146,18 @@
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
|
||||
.action-btn {
|
||||
background: rgba(255, 255, 255, 0.9);
|
||||
|
||||
.filter-btn {
|
||||
background: var(--bg-vanilla-200);
|
||||
|
||||
&:hover {
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,4 +165,12 @@
|
||||
border-top: 1px solid var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
|
||||
.attribute-actions-menu {
|
||||
.ant-btn {
|
||||
&:hover {
|
||||
background-color: var(--bg-vanilla-200);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,11 +2,13 @@ import './Attributes.styles.scss';
|
||||
|
||||
import { Input, Tooltip, Typography } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import CopyClipboardHOC from 'components/Logs/CopyClipboardHOC';
|
||||
import { flattenObject } from 'container/LogDetailedView/utils';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { Span } from 'types/api/trace/getTraceV2';
|
||||
|
||||
import NoData from '../NoData/NoData';
|
||||
import AttributeActions from './AttributeActions';
|
||||
|
||||
interface IAttributesProps {
|
||||
span: Span;
|
||||
@@ -53,10 +55,13 @@ function Attributes(props: IAttributesProps): JSX.Element {
|
||||
</Typography.Text>
|
||||
<div className="value-wrapper">
|
||||
<Tooltip title={item.value}>
|
||||
<Typography.Text className="item-value" ellipsis>
|
||||
{item.value}
|
||||
</Typography.Text>
|
||||
<CopyClipboardHOC entityKey={item.value} textToCopy={item.value}>
|
||||
<Typography.Text className="item-value" ellipsis>
|
||||
{item.value}
|
||||
</Typography.Text>
|
||||
</CopyClipboardHOC>
|
||||
</Tooltip>
|
||||
<AttributeActions record={item} />
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
|
||||
@@ -4,6 +4,7 @@ import logEvent from 'api/common/logEvent';
|
||||
import ErrorInPlace from 'components/ErrorInPlace/ErrorInPlace';
|
||||
import Uplot from 'components/Uplot';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import EmptyLogsSearch from 'container/EmptyLogsSearch/EmptyLogsSearch';
|
||||
import { getLocalStorageGraphVisibilityState } from 'container/GridCardLayout/GridCard/utils';
|
||||
import { LogsLoading } from 'container/LogsLoading/LogsLoading';
|
||||
@@ -54,6 +55,7 @@ function TimeSeriesView({
|
||||
isFilterApplied,
|
||||
dataSource,
|
||||
setWarning,
|
||||
panelType = PANEL_TYPES.TIME_SERIES,
|
||||
}: TimeSeriesViewProps): JSX.Element {
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
@@ -191,6 +193,7 @@ function TimeSeriesView({
|
||||
maxTimeScale,
|
||||
softMax: null,
|
||||
softMin: null,
|
||||
panelType,
|
||||
tzDate: (timestamp: number) =>
|
||||
uPlot.tzDate(new Date(timestamp * 1e3), timezone.value),
|
||||
timezone: timezone.value,
|
||||
@@ -259,6 +262,7 @@ interface TimeSeriesViewProps {
|
||||
isFilterApplied: boolean;
|
||||
dataSource: DataSource;
|
||||
setWarning?: Dispatch<SetStateAction<Warning | undefined>>;
|
||||
panelType?: PANEL_TYPES;
|
||||
}
|
||||
|
||||
TimeSeriesView.defaultProps = {
|
||||
@@ -266,6 +270,7 @@ TimeSeriesView.defaultProps = {
|
||||
yAxisUnit: 'short',
|
||||
error: undefined,
|
||||
setWarning: undefined,
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
};
|
||||
|
||||
export default TimeSeriesView;
|
||||
|
||||
@@ -234,7 +234,7 @@ export const routesToSkip = [
|
||||
ROUTES.UN_AUTHORIZED,
|
||||
ROUTES.NOT_FOUND,
|
||||
ROUTES.METER_EXPLORER,
|
||||
ROUTES.METER_EXPLORER_BASE,
|
||||
ROUTES.METER,
|
||||
ROUTES.METER_EXPLORER_VIEWS,
|
||||
ROUTES.SOMETHING_WENT_WRONG,
|
||||
];
|
||||
|
||||
@@ -25,6 +25,7 @@ export const useGetQueryKeySuggestions: UseGetQueryKeySuggestions = (
|
||||
fieldContext,
|
||||
fieldDataType,
|
||||
metricName,
|
||||
signalSource,
|
||||
}: QueryKeyRequestProps,
|
||||
options?: UseQueryOptions<
|
||||
AxiosResponse<QueryKeySuggestionsResponseProps>,
|
||||
@@ -42,6 +43,7 @@ export const useGetQueryKeySuggestions: UseGetQueryKeySuggestions = (
|
||||
metricName,
|
||||
fieldContext,
|
||||
fieldDataType,
|
||||
signalSource,
|
||||
];
|
||||
}, [
|
||||
options?.queryKey,
|
||||
@@ -50,6 +52,7 @@ export const useGetQueryKeySuggestions: UseGetQueryKeySuggestions = (
|
||||
metricName,
|
||||
fieldContext,
|
||||
fieldDataType,
|
||||
signalSource,
|
||||
]);
|
||||
return useQuery<AxiosResponse<QueryKeySuggestionsResponseProps>, AxiosError>({
|
||||
queryKey,
|
||||
@@ -60,6 +63,7 @@ export const useGetQueryKeySuggestions: UseGetQueryKeySuggestions = (
|
||||
metricName,
|
||||
fieldContext,
|
||||
fieldDataType,
|
||||
signalSource,
|
||||
}),
|
||||
...options,
|
||||
});
|
||||
|
||||
193
frontend/src/hooks/trace/useTraceActions.ts
Normal file
193
frontend/src/hooks/trace/useTraceActions.ts
Normal file
@@ -0,0 +1,193 @@
|
||||
import { getAggregateKeys } from 'api/queryBuilder/getAttributeKeys';
|
||||
import { convertFiltersToExpressionWithExistingQuery } from 'components/QueryBuilderV2/utils';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import { QueryBuilderKeys } from 'constants/queryBuilder';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { getOperatorValue } from 'container/QueryBuilder/filters/QueryBuilderSearch/utils';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { chooseAutocompleteFromCustomValue } from 'lib/newQueryBuilder/chooseAutocompleteFromCustomValue';
|
||||
import { useCallback } from 'react';
|
||||
import { useQueryClient } from 'react-query';
|
||||
import { useCopyToClipboard } from 'react-use';
|
||||
import {
|
||||
BaseAutocompleteData,
|
||||
DataTypes,
|
||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { Query, TagFilterItem } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
export interface UseTraceActionsReturn {
|
||||
onAddToQuery: (
|
||||
fieldKey: string,
|
||||
fieldValue: string,
|
||||
operator: string,
|
||||
) => Promise<void>;
|
||||
onGroupByAttribute: (fieldKey: string) => Promise<void>;
|
||||
onCopyFieldName: (fieldName: string) => void;
|
||||
onCopyFieldValue: (fieldValue: string) => void;
|
||||
}
|
||||
|
||||
export const useTraceActions = (): UseTraceActionsReturn => {
|
||||
const { currentQuery, redirectWithQueryBuilderData } = useQueryBuilder();
|
||||
const queryClient = useQueryClient();
|
||||
const { notifications } = useNotifications();
|
||||
const [, setCopy] = useCopyToClipboard();
|
||||
|
||||
const removeExistingFieldFilters = useCallback(
|
||||
(filters: TagFilterItem[], fieldKey: BaseAutocompleteData): TagFilterItem[] =>
|
||||
filters.filter((filter: TagFilterItem) => filter.key?.key !== fieldKey.key),
|
||||
[],
|
||||
);
|
||||
|
||||
const getAutocompleteKey = useCallback(
|
||||
async (fieldKey: string): Promise<BaseAutocompleteData> => {
|
||||
const keysAutocompleteResponse = await queryClient.fetchQuery(
|
||||
[QueryBuilderKeys.GET_AGGREGATE_KEYS, fieldKey],
|
||||
async () =>
|
||||
getAggregateKeys({
|
||||
searchText: fieldKey,
|
||||
aggregateOperator:
|
||||
currentQuery.builder.queryData[0].aggregateOperator || '',
|
||||
dataSource: DataSource.TRACES,
|
||||
aggregateAttribute:
|
||||
currentQuery.builder.queryData[0].aggregateAttribute?.key || '',
|
||||
}),
|
||||
);
|
||||
|
||||
const keysAutocomplete: BaseAutocompleteData[] =
|
||||
keysAutocompleteResponse.payload?.attributeKeys || [];
|
||||
|
||||
return chooseAutocompleteFromCustomValue(
|
||||
keysAutocomplete,
|
||||
fieldKey,
|
||||
false,
|
||||
DataTypes.String,
|
||||
);
|
||||
},
|
||||
[queryClient, currentQuery.builder.queryData],
|
||||
);
|
||||
|
||||
const onAddToQuery = useCallback(
|
||||
async (
|
||||
fieldKey: string,
|
||||
fieldValue: string,
|
||||
operator: string,
|
||||
): Promise<void> => {
|
||||
try {
|
||||
const existAutocompleteKey = await getAutocompleteKey(fieldKey);
|
||||
const currentOperator = getOperatorValue(operator);
|
||||
|
||||
const nextQuery: Query = {
|
||||
...currentQuery,
|
||||
builder: {
|
||||
...currentQuery.builder,
|
||||
queryData: currentQuery.builder.queryData.map((item) => {
|
||||
// Get existing filters and remove any for the same field
|
||||
const currentFilters = item.filters?.items || [];
|
||||
const cleanedFilters = removeExistingFieldFilters(
|
||||
currentFilters,
|
||||
existAutocompleteKey,
|
||||
);
|
||||
|
||||
// Add the new filter to the cleaned list
|
||||
const newFilters = [
|
||||
...cleanedFilters,
|
||||
{
|
||||
id: uuid(),
|
||||
key: existAutocompleteKey,
|
||||
op: currentOperator,
|
||||
value: fieldValue,
|
||||
},
|
||||
];
|
||||
|
||||
const convertedFilter = convertFiltersToExpressionWithExistingQuery(
|
||||
{
|
||||
items: newFilters,
|
||||
op: item.filters?.op || 'AND',
|
||||
},
|
||||
item.filter?.expression || '',
|
||||
);
|
||||
|
||||
return {
|
||||
...item,
|
||||
dataSource: DataSource.TRACES,
|
||||
filters: convertedFilter.filters,
|
||||
filter: convertedFilter.filter,
|
||||
};
|
||||
}),
|
||||
},
|
||||
};
|
||||
|
||||
redirectWithQueryBuilderData(nextQuery, {}, ROUTES.TRACES_EXPLORER);
|
||||
} catch {
|
||||
notifications.error({ message: SOMETHING_WENT_WRONG });
|
||||
}
|
||||
},
|
||||
[
|
||||
currentQuery,
|
||||
notifications,
|
||||
getAutocompleteKey,
|
||||
redirectWithQueryBuilderData,
|
||||
removeExistingFieldFilters,
|
||||
],
|
||||
);
|
||||
|
||||
const onGroupByAttribute = useCallback(
|
||||
async (fieldKey: string): Promise<void> => {
|
||||
try {
|
||||
const existAutocompleteKey = await getAutocompleteKey(fieldKey);
|
||||
|
||||
const nextQuery: Query = {
|
||||
...currentQuery,
|
||||
builder: {
|
||||
...currentQuery.builder,
|
||||
queryData: currentQuery.builder.queryData.map((item) => ({
|
||||
...item,
|
||||
dataSource: DataSource.TRACES,
|
||||
groupBy: [...item.groupBy, existAutocompleteKey],
|
||||
})),
|
||||
},
|
||||
};
|
||||
|
||||
redirectWithQueryBuilderData(nextQuery, {}, ROUTES.TRACES_EXPLORER);
|
||||
} catch {
|
||||
notifications.error({ message: SOMETHING_WENT_WRONG });
|
||||
}
|
||||
},
|
||||
[
|
||||
currentQuery,
|
||||
notifications,
|
||||
getAutocompleteKey,
|
||||
redirectWithQueryBuilderData,
|
||||
],
|
||||
);
|
||||
|
||||
const onCopyFieldName = useCallback(
|
||||
(fieldName: string): void => {
|
||||
setCopy(fieldName);
|
||||
notifications.success({
|
||||
message: 'Field name copied to clipboard',
|
||||
});
|
||||
},
|
||||
[setCopy, notifications],
|
||||
);
|
||||
|
||||
const onCopyFieldValue = useCallback(
|
||||
(fieldValue: string): void => {
|
||||
setCopy(fieldValue);
|
||||
notifications.success({
|
||||
message: 'Field value copied to clipboard',
|
||||
});
|
||||
},
|
||||
[setCopy, notifications],
|
||||
);
|
||||
|
||||
return {
|
||||
onAddToQuery,
|
||||
onGroupByAttribute,
|
||||
onCopyFieldName,
|
||||
onCopyFieldValue,
|
||||
};
|
||||
};
|
||||
@@ -14,7 +14,7 @@ export type AlertHeaderProps = {
|
||||
state: string;
|
||||
alert: string;
|
||||
id: string;
|
||||
labels: Record<string, string>;
|
||||
labels: Record<string, string | undefined> | undefined;
|
||||
disabled: boolean;
|
||||
};
|
||||
};
|
||||
@@ -23,13 +23,14 @@ function AlertHeader({ alertDetails }: AlertHeaderProps): JSX.Element {
|
||||
const { alertRuleState } = useAlertRule();
|
||||
const [updatedName, setUpdatedName] = useState(alertName);
|
||||
|
||||
const labelsWithoutSeverity = useMemo(
|
||||
() =>
|
||||
Object.fromEntries(
|
||||
const labelsWithoutSeverity = useMemo(() => {
|
||||
if (labels) {
|
||||
return Object.fromEntries(
|
||||
Object.entries(labels).filter(([key]) => key !== 'severity'),
|
||||
),
|
||||
[labels],
|
||||
);
|
||||
);
|
||||
}
|
||||
return {};
|
||||
}, [labels]);
|
||||
|
||||
return (
|
||||
<div className="alert-info">
|
||||
@@ -43,7 +44,7 @@ function AlertHeader({ alertDetails }: AlertHeaderProps): JSX.Element {
|
||||
</div>
|
||||
</div>
|
||||
<div className="bottom-section">
|
||||
{labels.severity && <AlertSeverity severity={labels.severity} />}
|
||||
{labels?.severity && <AlertSeverity severity={labels.severity} />}
|
||||
|
||||
{/* // TODO(shaheer): Get actual data when we are able to get alert firing from state from API */}
|
||||
{/* <AlertStatus
|
||||
|
||||
@@ -2,19 +2,25 @@ import './MeterExplorer.styles.scss';
|
||||
|
||||
import RouteTab from 'components/RouteTab';
|
||||
import { TabRoutes } from 'components/RouteTab/types';
|
||||
import ROUTES from 'constants/routes';
|
||||
import history from 'lib/history';
|
||||
import { useLocation } from 'react-use';
|
||||
|
||||
import { Explorer, Views } from './constants';
|
||||
import { Explorer, Meter, Views } from './constants';
|
||||
|
||||
function MeterExplorerPage(): JSX.Element {
|
||||
const { pathname } = useLocation();
|
||||
|
||||
const routes: TabRoutes[] = [Explorer, Views];
|
||||
const routes: TabRoutes[] = [Meter, Explorer, Views];
|
||||
|
||||
return (
|
||||
<div className="meter-explorer-page">
|
||||
<RouteTab routes={routes} activeKey={pathname} history={history} />
|
||||
<RouteTab
|
||||
routes={routes}
|
||||
activeKey={pathname}
|
||||
history={history}
|
||||
defaultActiveKey={ROUTES.METER}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { TabRoutes } from 'components/RouteTab/types';
|
||||
import ROUTES from 'constants/routes';
|
||||
import BreakDownPage from 'container/MeterExplorer/Breakdown/BreakDown';
|
||||
import ExplorerPage from 'container/MeterExplorer/Explorer';
|
||||
import { Compass, TowerControl } from 'lucide-react';
|
||||
import SaveView from 'pages/SaveView';
|
||||
@@ -30,3 +31,14 @@ export const Views: TabRoutes = {
|
||||
route: ROUTES.METER_EXPLORER_VIEWS,
|
||||
key: ROUTES.METER_EXPLORER_VIEWS,
|
||||
};
|
||||
|
||||
export const Meter: TabRoutes = {
|
||||
Component: BreakDownPage,
|
||||
name: (
|
||||
<div className="tab-item">
|
||||
<TowerControl size={16} /> Meter
|
||||
</div>
|
||||
),
|
||||
route: ROUTES.METER,
|
||||
key: ROUTES.METER,
|
||||
};
|
||||
|
||||
@@ -59,7 +59,7 @@
|
||||
}
|
||||
|
||||
.signup-page-content {
|
||||
width: 720px;
|
||||
width: 540px;
|
||||
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
@@ -102,7 +102,7 @@
|
||||
flex-direction: column;
|
||||
|
||||
.ant-input {
|
||||
width: 60%;
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ import afterLogin from 'AppRoutes/utils';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import history from 'lib/history';
|
||||
import { ArrowRight } from 'lucide-react';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
@@ -23,7 +22,6 @@ import { FormContainer, Label } from './styles';
|
||||
import { isPasswordNotValidMessage, isPasswordValid } from './utils';
|
||||
|
||||
type FormValues = {
|
||||
firstName: string;
|
||||
email: string;
|
||||
organizationName: string;
|
||||
password: string;
|
||||
@@ -114,10 +112,9 @@ function SignUp(): JSX.Element {
|
||||
|
||||
const signUp = async (values: FormValues): Promise<void> => {
|
||||
try {
|
||||
const { organizationName, password, firstName, email } = values;
|
||||
const { organizationName, password, email } = values;
|
||||
const response = await signUpApi({
|
||||
email,
|
||||
name: firstName,
|
||||
orgDisplayName: organizationName,
|
||||
password,
|
||||
token: params.get('token') || undefined,
|
||||
@@ -142,11 +139,10 @@ function SignUp(): JSX.Element {
|
||||
|
||||
const acceptInvite = async (values: FormValues): Promise<void> => {
|
||||
try {
|
||||
const { password, email, firstName } = values;
|
||||
const { password, email } = values;
|
||||
await accept({
|
||||
password,
|
||||
token: params.get('token') || '',
|
||||
displayName: firstName,
|
||||
});
|
||||
const loginResponse = await loginApi({
|
||||
email,
|
||||
@@ -208,7 +204,6 @@ function SignUp(): JSX.Element {
|
||||
if (!isPasswordValid(values.password)) {
|
||||
logEvent('Account Creation Page - Invalid Password', {
|
||||
email: values.email,
|
||||
name: values.firstName,
|
||||
});
|
||||
setIsPasswordPolicyError(true);
|
||||
setLoading(false);
|
||||
@@ -219,7 +214,6 @@ function SignUp(): JSX.Element {
|
||||
await signUp(values);
|
||||
logEvent('Account Created Successfully', {
|
||||
email: values.email,
|
||||
name: values.firstName,
|
||||
});
|
||||
} else {
|
||||
await acceptInvite(values);
|
||||
@@ -235,11 +229,6 @@ function SignUp(): JSX.Element {
|
||||
})();
|
||||
};
|
||||
|
||||
const getIsNameVisible = (): boolean =>
|
||||
!(form.getFieldValue('firstName') === 0 && !isSignUp);
|
||||
|
||||
const isNameVisible = getIsNameVisible();
|
||||
|
||||
const handleValuesChange: (changedValues: Partial<FormValues>) => void = (
|
||||
changedValues,
|
||||
) => {
|
||||
@@ -260,7 +249,6 @@ function SignUp(): JSX.Element {
|
||||
loading ||
|
||||
!values.email ||
|
||||
(!precheck.sso && (!values.password || !values.confirmPassword)) ||
|
||||
(!isDetailsDisable && !values.firstName) ||
|
||||
confirmPasswordError ||
|
||||
isPasswordPolicyError
|
||||
);
|
||||
@@ -288,8 +276,8 @@ function SignUp(): JSX.Element {
|
||||
>
|
||||
<div className="signup-form-header">
|
||||
<Typography.Paragraph className="signup-form-header-text">
|
||||
Create your account to monitor, trace, and troubleshoot your applications
|
||||
effortlessly.
|
||||
You're almost in. Create a password to start monitoring your
|
||||
applications with SigNoz.
|
||||
</Typography.Paragraph>
|
||||
</div>
|
||||
|
||||
@@ -307,47 +295,22 @@ function SignUp(): JSX.Element {
|
||||
</FormContainer.Item>
|
||||
</div>
|
||||
|
||||
{isNameVisible && (
|
||||
<div className="first-name-container">
|
||||
<Label htmlFor="signupFirstName">Name</Label>{' '}
|
||||
<FormContainer.Item noStyle name="firstName">
|
||||
<Input
|
||||
placeholder="Your Name"
|
||||
required
|
||||
id="signupFirstName"
|
||||
disabled={isDetailsDisable && form.getFieldValue('firstName')}
|
||||
/>
|
||||
</FormContainer.Item>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="org-name-container">
|
||||
<Label htmlFor="organizationName">Organization Name</Label>{' '}
|
||||
<FormContainer.Item noStyle name="organizationName">
|
||||
<Input
|
||||
placeholder="Your Company"
|
||||
id="organizationName"
|
||||
disabled={isDetailsDisable}
|
||||
/>
|
||||
</FormContainer.Item>
|
||||
</div>
|
||||
|
||||
{!precheck.sso && (
|
||||
<div className="password-section">
|
||||
<>
|
||||
<div className="password-container">
|
||||
<label htmlFor="Password">Password</label>{' '}
|
||||
<Label htmlFor="currentPassword">Password</Label>
|
||||
<FormContainer.Item noStyle name="password">
|
||||
<Input.Password required id="currentPassword" />
|
||||
</FormContainer.Item>
|
||||
</div>
|
||||
|
||||
<div className="password-container">
|
||||
<label htmlFor="ConfirmPassword">Confirm Password</label>{' '}
|
||||
<Label htmlFor="confirmPassword">Confirm Password</Label>
|
||||
<FormContainer.Item noStyle name="confirmPassword">
|
||||
<Input.Password required id="confirmPassword" />
|
||||
</FormContainer.Item>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
<div className="password-error-container">
|
||||
@@ -382,9 +345,9 @@ function SignUp(): JSX.Element {
|
||||
loading={loading}
|
||||
disabled={isValidForm()}
|
||||
className="periscope-btn primary next-btn"
|
||||
icon={<ArrowRight size={12} />}
|
||||
block
|
||||
>
|
||||
Sign Up
|
||||
Access My Workspace
|
||||
</Button>
|
||||
</div>
|
||||
</FormContainer>
|
||||
|
||||
@@ -61,6 +61,7 @@ import {
|
||||
QueryBuilderData,
|
||||
} from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { sanitizeOrderByForExplorer } from 'utils/sanitizeOrderBy';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
export const QueryBuilderContext = createContext<QueryBuilderContextType>({
|
||||
@@ -102,6 +103,12 @@ export function QueryBuilderProvider({
|
||||
|
||||
const currentPathnameRef = useRef<string | null>(location.pathname);
|
||||
|
||||
// This is used to determine if the query was called from the handleRunQuery function - which means manual trigger from Stage and Run button
|
||||
const [
|
||||
calledFromHandleRunQuery,
|
||||
setCalledFromHandleRunQuery,
|
||||
] = useState<boolean>(false);
|
||||
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
@@ -184,6 +191,17 @@ export function QueryBuilderProvider({
|
||||
} as BaseAutocompleteData,
|
||||
};
|
||||
|
||||
// Explorer pages: sanitize stale orderBy before first query
|
||||
const isExplorer =
|
||||
location.pathname === ROUTES.LOGS_EXPLORER ||
|
||||
location.pathname === ROUTES.TRACES_EXPLORER;
|
||||
if (isExplorer) {
|
||||
const sanitizedOrderBy = sanitizeOrderByForExplorer(currentElement);
|
||||
return calledFromHandleRunQuery
|
||||
? currentElement
|
||||
: { ...currentElement, orderBy: sanitizedOrderBy };
|
||||
}
|
||||
|
||||
return currentElement;
|
||||
});
|
||||
|
||||
@@ -215,7 +233,7 @@ export function QueryBuilderProvider({
|
||||
|
||||
return nextQuery;
|
||||
},
|
||||
[initialDataSource],
|
||||
[initialDataSource, location.pathname, calledFromHandleRunQuery],
|
||||
);
|
||||
|
||||
const initQueryBuilderData = useCallback(
|
||||
@@ -428,6 +446,7 @@ export function QueryBuilderProvider({
|
||||
|
||||
const newQuery: IBuilderQuery = {
|
||||
...initialBuilderQuery,
|
||||
source: queries?.[0]?.source || '',
|
||||
queryName: createNewBuilderItemName({ existNames, sourceNames: alphabet }),
|
||||
expression: createNewBuilderItemName({
|
||||
existNames,
|
||||
@@ -522,6 +541,8 @@ export function QueryBuilderProvider({
|
||||
setCurrentQuery((prevState) => {
|
||||
if (prevState.builder.queryData.length >= MAX_QUERIES) return prevState;
|
||||
|
||||
console.log('prevState', prevState.builder.queryData);
|
||||
|
||||
const newQuery = createNewBuilderQuery(prevState.builder.queryData);
|
||||
|
||||
return {
|
||||
@@ -532,6 +553,7 @@ export function QueryBuilderProvider({
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
setSupersetQuery((prevState) => {
|
||||
if (prevState.builder.queryData.length >= MAX_QUERIES) return prevState;
|
||||
@@ -867,6 +889,12 @@ export function QueryBuilderProvider({
|
||||
|
||||
const handleRunQuery = useCallback(
|
||||
(shallUpdateStepInterval?: boolean, newQBQuery?: boolean) => {
|
||||
const isExplorer =
|
||||
location.pathname === ROUTES.LOGS_EXPLORER ||
|
||||
location.pathname === ROUTES.TRACES_EXPLORER;
|
||||
if (isExplorer) {
|
||||
setCalledFromHandleRunQuery(true);
|
||||
}
|
||||
let currentQueryData = currentQuery;
|
||||
|
||||
if (newQBQuery) {
|
||||
@@ -911,7 +939,14 @@ export function QueryBuilderProvider({
|
||||
queryType,
|
||||
});
|
||||
},
|
||||
[currentQuery, queryType, maxTime, minTime, redirectWithQueryBuilderData],
|
||||
[
|
||||
location.pathname,
|
||||
currentQuery,
|
||||
queryType,
|
||||
maxTime,
|
||||
minTime,
|
||||
redirectWithQueryBuilderData,
|
||||
],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -921,6 +956,7 @@ export function QueryBuilderProvider({
|
||||
setStagedQuery(null);
|
||||
// reset the last used query to 0 when navigating away from the page
|
||||
setLastUsedQuery(0);
|
||||
setCalledFromHandleRunQuery(false);
|
||||
}
|
||||
}, [location.pathname]);
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ export interface RuleCondition {
|
||||
seasonality?: string;
|
||||
}
|
||||
export interface Labels {
|
||||
[key: string]: string;
|
||||
[key: string]: string | undefined;
|
||||
}
|
||||
|
||||
export interface AlertRuleStats {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
export interface Props {
|
||||
name: string;
|
||||
orgDisplayName: string;
|
||||
email: string;
|
||||
password: string;
|
||||
|
||||
130
frontend/src/utils/__tests__/sanitizeOrderBy.test.ts
Normal file
130
frontend/src/utils/__tests__/sanitizeOrderBy.test.ts
Normal file
@@ -0,0 +1,130 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
OrderByPayload,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { getParsedAggregationOptionsForOrderBy } from 'utils/aggregationConverter';
|
||||
import { sanitizeOrderByForExplorer } from 'utils/sanitizeOrderBy';
|
||||
|
||||
jest.mock('utils/aggregationConverter', () => ({
|
||||
getParsedAggregationOptionsForOrderBy: jest.fn(),
|
||||
}));
|
||||
|
||||
const buildQuery = (overrides: Partial<IBuilderQuery> = {}): IBuilderQuery => ({
|
||||
queryName: 'A',
|
||||
dataSource: DataSource.TRACES,
|
||||
aggregateOperator: '',
|
||||
aggregateAttribute: undefined,
|
||||
aggregations: [],
|
||||
timeAggregation: '',
|
||||
spaceAggregation: '',
|
||||
temporality: '',
|
||||
functions: [],
|
||||
filter: { expression: '' } as any,
|
||||
filters: { items: [], op: 'AND' } as any,
|
||||
groupBy: [],
|
||||
expression: '',
|
||||
disabled: false,
|
||||
having: [] as any,
|
||||
limit: null,
|
||||
stepInterval: 60 as any,
|
||||
orderBy: [],
|
||||
legend: '',
|
||||
...overrides,
|
||||
});
|
||||
|
||||
describe('sanitizeOrderByForExplorer', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('keeps only orderBy items that are present in groupBy keys or aggregation keys (including alias)', () => {
|
||||
(getParsedAggregationOptionsForOrderBy as jest.Mock).mockReturnValue([
|
||||
{
|
||||
key: 'count()',
|
||||
dataType: DataTypes.Float64,
|
||||
isColumn: false,
|
||||
type: '',
|
||||
isJSON: false,
|
||||
},
|
||||
{
|
||||
key: 'avg(duration)',
|
||||
dataType: DataTypes.Float64,
|
||||
isColumn: false,
|
||||
type: '',
|
||||
isJSON: false,
|
||||
},
|
||||
{
|
||||
key: 'latency',
|
||||
dataType: DataTypes.Float64,
|
||||
isColumn: false,
|
||||
type: '',
|
||||
isJSON: false,
|
||||
},
|
||||
]);
|
||||
|
||||
const orderBy: OrderByPayload[] = [
|
||||
{ columnName: 'service.name', order: 'asc' },
|
||||
{ columnName: 'count()', order: 'desc' },
|
||||
{ columnName: 'avg(duration)', order: 'asc' },
|
||||
{ columnName: 'latency', order: 'asc' }, // alias
|
||||
{ columnName: 'not-allowed', order: 'desc' }, // invalid orderBy
|
||||
{ columnName: 'timestamp', order: 'desc' }, // invalid orderBy
|
||||
];
|
||||
|
||||
const query = buildQuery({
|
||||
groupBy: [
|
||||
{
|
||||
key: 'service.name',
|
||||
dataType: DataTypes.String,
|
||||
isColumn: true,
|
||||
type: 'resource',
|
||||
isJSON: false,
|
||||
},
|
||||
] as any,
|
||||
orderBy,
|
||||
});
|
||||
|
||||
const result = sanitizeOrderByForExplorer(query);
|
||||
|
||||
expect(result).toEqual([
|
||||
{ columnName: 'service.name', order: 'asc' },
|
||||
{ columnName: 'count()', order: 'desc' },
|
||||
{ columnName: 'avg(duration)', order: 'asc' },
|
||||
{ columnName: 'latency', order: 'asc' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns empty when none of the orderBy items are allowed', () => {
|
||||
(getParsedAggregationOptionsForOrderBy as jest.Mock).mockReturnValue([
|
||||
{
|
||||
key: 'count()',
|
||||
dataType: DataTypes.Float64,
|
||||
isColumn: false,
|
||||
type: '',
|
||||
isJSON: false,
|
||||
},
|
||||
]);
|
||||
|
||||
const query = buildQuery({
|
||||
groupBy: [],
|
||||
orderBy: [
|
||||
{ columnName: 'foo', order: 'asc' },
|
||||
{ columnName: 'bar', order: 'desc' },
|
||||
],
|
||||
});
|
||||
|
||||
const result = sanitizeOrderByForExplorer(query);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('handles missing orderBy by returning an empty array', () => {
|
||||
(getParsedAggregationOptionsForOrderBy as jest.Mock).mockReturnValue([]);
|
||||
|
||||
const query = buildQuery({ orderBy: [] });
|
||||
const result = sanitizeOrderByForExplorer(query);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
@@ -124,6 +124,6 @@ export const routePermission: Record<keyof typeof ROUTES, ROLES[]> = {
|
||||
API_MONITORING_BASE: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
MESSAGING_QUEUES_BASE: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
METER_EXPLORER: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
METER_EXPLORER_BASE: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
METER: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
METER_EXPLORER_VIEWS: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
};
|
||||
|
||||
32
frontend/src/utils/sanitizeOrderBy.ts
Normal file
32
frontend/src/utils/sanitizeOrderBy.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import * as Sentry from '@sentry/react';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
OrderByPayload,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { getParsedAggregationOptionsForOrderBy } from './aggregationConverter';
|
||||
|
||||
export function sanitizeOrderByForExplorer(
|
||||
query: IBuilderQuery,
|
||||
): OrderByPayload[] {
|
||||
const allowed = new Set<string>();
|
||||
(query.groupBy || []).forEach((g) => g?.key && allowed.add(g.key));
|
||||
getParsedAggregationOptionsForOrderBy(query).forEach((agg) => {
|
||||
// agg.key is the expression or alias (e.g., count(), avg(quantity), 'alias')
|
||||
if ((agg as any)?.key) allowed.add((agg as any).key as string);
|
||||
});
|
||||
|
||||
const current = query.orderBy || [];
|
||||
|
||||
const hasInvalidOrderBy = current.some((o) => !allowed.has(o.columnName));
|
||||
|
||||
if (hasInvalidOrderBy) {
|
||||
Sentry.captureEvent({
|
||||
message: `Invalid orderBy: current: ${JSON.stringify(
|
||||
current,
|
||||
)} - allowed: ${JSON.stringify(Array.from(allowed))}`,
|
||||
level: 'warning',
|
||||
});
|
||||
}
|
||||
return current.filter((o) => allowed.has(o.columnName));
|
||||
}
|
||||
@@ -100,16 +100,36 @@ export function isFunctionOperator(operator: string): boolean {
|
||||
const functionOperators = Object.values(QUERY_BUILDER_FUNCTIONS);
|
||||
|
||||
const sanitizedOperator = operator.trim();
|
||||
// Check if it's a direct function operator
|
||||
if (functionOperators.includes(sanitizedOperator)) {
|
||||
// Check if it's a direct function operator (case-insensitive)
|
||||
if (
|
||||
functionOperators.some(
|
||||
(func) => func.toLowerCase() === sanitizedOperator.toLowerCase(),
|
||||
)
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check if it's a NOT function operator (e.g., "NOT has")
|
||||
if (sanitizedOperator.toUpperCase().startsWith(OPERATORS.NOT)) {
|
||||
const operatorWithoutNot = sanitizedOperator.substring(4).toLowerCase();
|
||||
return functionOperators.includes(operatorWithoutNot);
|
||||
return functionOperators.some(
|
||||
(func) => func.toLowerCase() === operatorWithoutNot,
|
||||
);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
export function isNonValueOperator(operator: string): boolean {
|
||||
const upperOperator = operator.toUpperCase();
|
||||
// Check if it's a direct non-value operator
|
||||
if (NON_VALUE_OPERATORS.includes(upperOperator)) {
|
||||
return true;
|
||||
}
|
||||
// Check if it's a NOT non-value operator (e.g., "NOT EXISTS")
|
||||
if (upperOperator.startsWith(OPERATORS.NOT)) {
|
||||
const operatorWithoutNot = upperOperator.substring(4).trim(); // Remove "NOT " prefix
|
||||
return NON_VALUE_OPERATORS.includes(operatorWithoutNot);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -2175,54 +2175,10 @@
|
||||
resolved "https://registry.npmjs.org/@babel/regjsgen/-/regjsgen-0.8.0.tgz"
|
||||
integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA==
|
||||
|
||||
"@babel/runtime@^7.1.2", "@babel/runtime@^7.10.1", "@babel/runtime@^7.10.4", "@babel/runtime@^7.11.1", "@babel/runtime@^7.11.2", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.14.5", "@babel/runtime@^7.15.4", "@babel/runtime@^7.16.3", "@babel/runtime@^7.16.7", "@babel/runtime@^7.17.2", "@babel/runtime@^7.17.8", "@babel/runtime@^7.18.0", "@babel/runtime@^7.18.3", "@babel/runtime@^7.19.0", "@babel/runtime@^7.20.0", "@babel/runtime@^7.20.7", "@babel/runtime@^7.4.2", "@babel/runtime@^7.5.5", "@babel/runtime@^7.6.2", "@babel/runtime@^7.7.2", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2":
|
||||
version "7.21.0"
|
||||
resolved "https://registry.npmjs.org/@babel/runtime/-/runtime-7.21.0.tgz"
|
||||
integrity sha512-xwII0//EObnq89Ji5AKYQaRYiW/nZ3llSv29d49IuxPhKbtJoLP+9QUUZ4nVragQVtaVGeZrpB+ZtG/Pdy/POw==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.13.11"
|
||||
|
||||
"@babel/runtime@^7.13.10":
|
||||
version "7.23.6"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.6.tgz#c05e610dc228855dc92ef1b53d07389ed8ab521d"
|
||||
integrity sha512-zHd0eUrf5GZoOWVCXp6koAKQTfZV07eit6bGPmJgnZdnSAvvZee6zniW2XMF7Cmc4ISOOnPy3QaSiIJGJkVEDQ==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
|
||||
"@babel/runtime@^7.14.6":
|
||||
version "7.22.15"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.22.15.tgz#38f46494ccf6cf020bd4eed7124b425e83e523b8"
|
||||
integrity sha512-T0O+aa+4w0u06iNmapipJXMV4HoUir03hpx3/YqXXhu9xim3w+dVphjFWl1OH8NbZHw5Lbm9k45drDkgq2VNNA==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
|
||||
"@babel/runtime@^7.18.6":
|
||||
version "7.27.0"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.27.0.tgz#fbee7cf97c709518ecc1f590984481d5460d4762"
|
||||
integrity sha512-VtPOkrdPHZsKc/clNqyi9WUA8TINkZ4cGk63UUE3u4pmB2k+ZMQRDuIOagv8UVd6j7k0T3+RRIb7beKTebNbcw==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
|
||||
"@babel/runtime@^7.21.0", "@babel/runtime@^7.22.5", "@babel/runtime@^7.23.2":
|
||||
version "7.23.2"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.2.tgz#062b0ac103261d68a966c4c7baf2ae3e62ec3885"
|
||||
integrity sha512-mM8eg4yl5D6i3lu2QKPuPH4FArvJ8KhTofbE7jwMUv9KX5mBvwPAqnV3MlyBNqdp9RyRKP6Yck8TrfYrPvX3bg==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
|
||||
"@babel/runtime@^7.3.1":
|
||||
version "7.23.1"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.1.tgz#72741dc4d413338a91dcb044a86f3c0bc402646d"
|
||||
integrity sha512-hC2v6p8ZSI/W0HUzh3V8C5g+NwSKzKPtJwSpTjwl0o297GP9+ZLQSkdvHz46CM3LqyoXxq+5G9komY+eSqSO0g==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
|
||||
"@babel/runtime@^7.7.6":
|
||||
version "7.26.0"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.26.0.tgz#8600c2f595f277c60815256418b85356a65173c1"
|
||||
integrity sha512-FDSOghenHTiToteC/QRlv2q3DhPZ/oOXTBoirfWNx1Cx3TMVcGWQtMMmQcSvb/JjpNeGzx8Pq/b4fKEJuWm1sw==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
"@babel/runtime@^7.1.2", "@babel/runtime@^7.10.1", "@babel/runtime@^7.10.4", "@babel/runtime@^7.11.1", "@babel/runtime@^7.11.2", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.13.10", "@babel/runtime@^7.14.5", "@babel/runtime@^7.14.6", "@babel/runtime@^7.15.4", "@babel/runtime@^7.16.3", "@babel/runtime@^7.16.7", "@babel/runtime@^7.17.2", "@babel/runtime@^7.17.8", "@babel/runtime@^7.18.0", "@babel/runtime@^7.18.3", "@babel/runtime@^7.18.6", "@babel/runtime@^7.19.0", "@babel/runtime@^7.20.0", "@babel/runtime@^7.20.7", "@babel/runtime@^7.21.0", "@babel/runtime@^7.22.5", "@babel/runtime@^7.23.2", "@babel/runtime@^7.3.1", "@babel/runtime@^7.4.2", "@babel/runtime@^7.5.5", "@babel/runtime@^7.6.2", "@babel/runtime@^7.7.2", "@babel/runtime@^7.7.6", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2":
|
||||
version "7.28.2"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.28.2.tgz#2ae5a9d51cc583bd1f5673b3bb70d6d819682473"
|
||||
integrity sha512-KHp2IflsnGywDjBWDkR9iEqiWSpc8GIi0lgTT3mOElT0PP1tG26P4tmFI2YvAdzgq9RGyoHZQEIEdZy6Ec5xCA==
|
||||
|
||||
"@babel/template@^7.18.10", "@babel/template@^7.20.7", "@babel/template@^7.3.3":
|
||||
version "7.20.7"
|
||||
@@ -15291,16 +15247,6 @@ regenerator-runtime@^0.11.0:
|
||||
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9"
|
||||
integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg==
|
||||
|
||||
regenerator-runtime@^0.13.11:
|
||||
version "0.13.11"
|
||||
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz#f6dca3e7ceec20590d07ada785636a90cdca17f9"
|
||||
integrity sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==
|
||||
|
||||
regenerator-runtime@^0.14.0:
|
||||
version "0.14.0"
|
||||
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.0.tgz#5e19d68eb12d486f797e15a3c6a918f7cec5eb45"
|
||||
integrity sha512-srw17NI0TUWHuGa5CFGGmhfNIeja30WMBfbslPNhf6JrqQlLN5gcrvig1oqPxiVaXb0oW0XRKtH6Nngs5lKCIA==
|
||||
|
||||
regenerator-transform@^0.15.1:
|
||||
version "0.15.1"
|
||||
resolved "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.1.tgz"
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
@@ -97,7 +98,12 @@ func (a *APIKey) Wrap(next http.Handler) http.Handler {
|
||||
return
|
||||
}
|
||||
|
||||
r = r.WithContext(ctx)
|
||||
comment := ctxtypes.CommentFromContext(ctx)
|
||||
comment.Set("auth_type", "api_key")
|
||||
comment.Set("user_id", claims.UserID)
|
||||
comment.Set("org_id", claims.OrgID)
|
||||
|
||||
r = r.WithContext(ctxtypes.NewContextWithComment(ctx, comment))
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/sharder"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
@@ -50,7 +51,12 @@ func (a *Auth) Wrap(next http.Handler) http.Handler {
|
||||
return
|
||||
}
|
||||
|
||||
r = r.WithContext(ctx)
|
||||
comment := ctxtypes.CommentFromContext(ctx)
|
||||
comment.Set("auth_type", "jwt")
|
||||
comment.Set("user_id", claims.UserID)
|
||||
comment.Set("org_id", claims.OrgID)
|
||||
|
||||
r = r.WithContext(ctxtypes.NewContextWithComment(ctx, comment))
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
|
||||
24
pkg/http/middleware/comment.go
Normal file
24
pkg/http/middleware/comment.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
)
|
||||
|
||||
type Comment struct{}
|
||||
|
||||
func NewComment() *Comment {
|
||||
return &Comment{}
|
||||
}
|
||||
|
||||
func (middleware *Comment) Wrap(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
comment := ctxtypes.CommentFromContext(req.Context())
|
||||
comment.Merge(ctxtypes.CommentFromHTTPRequest(req))
|
||||
|
||||
req = req.WithContext(ctxtypes.NewContextWithComment(req.Context(), comment))
|
||||
next.ServeHTTP(rw, req)
|
||||
})
|
||||
}
|
||||
@@ -2,16 +2,11 @@ package middleware
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"log/slog"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/gorilla/mux"
|
||||
semconv "go.opentelemetry.io/otel/semconv/v1.26.0"
|
||||
)
|
||||
@@ -55,9 +50,6 @@ func (middleware *Logging) Wrap(next http.Handler) http.Handler {
|
||||
string(semconv.HTTPRouteKey), path,
|
||||
}
|
||||
|
||||
logCommentKVs := middleware.getLogCommentKVs(req)
|
||||
req = req.WithContext(context.WithValue(req.Context(), common.LogCommentKey, logCommentKVs))
|
||||
|
||||
badResponseBuffer := new(bytes.Buffer)
|
||||
writer := newBadResponseLoggingWriter(rw, badResponseBuffer)
|
||||
next.ServeHTTP(writer, req)
|
||||
@@ -85,67 +77,3 @@ func (middleware *Logging) Wrap(next http.Handler) http.Handler {
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func (middleware *Logging) getLogCommentKVs(r *http.Request) map[string]string {
|
||||
referrer := r.Header.Get("Referer")
|
||||
|
||||
var path, dashboardID, alertID, page, client, viewName, tab string
|
||||
|
||||
if referrer != "" {
|
||||
referrerURL, _ := url.Parse(referrer)
|
||||
client = "browser"
|
||||
path = referrerURL.Path
|
||||
|
||||
if strings.Contains(path, "/dashboard") {
|
||||
// Split the path into segments
|
||||
pathSegments := strings.Split(referrerURL.Path, "/")
|
||||
// The dashboard ID should be the segment after "/dashboard/"
|
||||
// Loop through pathSegments to find "dashboard" and then take the next segment as the ID
|
||||
for i, segment := range pathSegments {
|
||||
if segment == "dashboard" && i < len(pathSegments)-1 {
|
||||
// Return the next segment, which should be the dashboard ID
|
||||
dashboardID = pathSegments[i+1]
|
||||
}
|
||||
}
|
||||
page = "dashboards"
|
||||
} else if strings.Contains(path, "/alerts") {
|
||||
urlParams := referrerURL.Query()
|
||||
alertID = urlParams.Get("ruleId")
|
||||
page = "alerts"
|
||||
} else if strings.Contains(path, "logs") && strings.Contains(path, "explorer") {
|
||||
page = "logs-explorer"
|
||||
viewName = referrerURL.Query().Get("viewName")
|
||||
} else if strings.Contains(path, "/trace") || strings.Contains(path, "traces-explorer") {
|
||||
page = "traces-explorer"
|
||||
viewName = referrerURL.Query().Get("viewName")
|
||||
} else if strings.Contains(path, "/services") {
|
||||
page = "services"
|
||||
tab = referrerURL.Query().Get("tab")
|
||||
if tab == "" {
|
||||
tab = "OVER_METRICS"
|
||||
}
|
||||
} else if strings.Contains(path, "/metrics") {
|
||||
page = "metrics-explorer"
|
||||
}
|
||||
} else {
|
||||
client = "api"
|
||||
}
|
||||
|
||||
var email string
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err == nil {
|
||||
email = claims.Email
|
||||
}
|
||||
|
||||
kvs := map[string]string{
|
||||
"path": path,
|
||||
"dashboardID": dashboardID,
|
||||
"alertID": alertID,
|
||||
"source": page,
|
||||
"client": client,
|
||||
"viewName": viewName,
|
||||
"servicesTab": tab,
|
||||
"email": email,
|
||||
}
|
||||
return kvs
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"runtime/debug"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
@@ -12,6 +11,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/SigNoz/signoz/pkg/variables"
|
||||
@@ -166,49 +166,9 @@ func (a *API) logEvent(ctx context.Context, referrer string, event *qbtypes.QBEv
|
||||
return
|
||||
}
|
||||
|
||||
properties["referrer"] = referrer
|
||||
|
||||
logsExplorerMatched, _ := regexp.MatchString(`/logs/logs-explorer(?:\?.*)?$`, referrer)
|
||||
traceExplorerMatched, _ := regexp.MatchString(`/traces-explorer(?:\?.*)?$`, referrer)
|
||||
metricsExplorerMatched, _ := regexp.MatchString(`/metrics-explorer/explorer(?:\?.*)?$`, referrer)
|
||||
dashboardMatched, _ := regexp.MatchString(`/dashboard/[a-zA-Z0-9\-]+/(new|edit)(?:\?.*)?$`, referrer)
|
||||
alertMatched, _ := regexp.MatchString(`/alerts/(new|edit)(?:\?.*)?$`, referrer)
|
||||
|
||||
switch {
|
||||
case dashboardMatched:
|
||||
properties["module_name"] = "dashboard"
|
||||
case alertMatched:
|
||||
properties["module_name"] = "rule"
|
||||
case metricsExplorerMatched:
|
||||
properties["module_name"] = "metrics-explorer"
|
||||
case logsExplorerMatched:
|
||||
properties["module_name"] = "logs-explorer"
|
||||
case traceExplorerMatched:
|
||||
properties["module_name"] = "traces-explorer"
|
||||
default:
|
||||
return
|
||||
}
|
||||
|
||||
if dashboardMatched {
|
||||
if dashboardIDRegex, err := regexp.Compile(`/dashboard/([a-f0-9\-]+)/`); err == nil {
|
||||
if matches := dashboardIDRegex.FindStringSubmatch(referrer); len(matches) > 1 {
|
||||
properties["dashboard_id"] = matches[1]
|
||||
}
|
||||
}
|
||||
|
||||
if widgetIDRegex, err := regexp.Compile(`widgetId=([a-f0-9\-]+)`); err == nil {
|
||||
if matches := widgetIDRegex.FindStringSubmatch(referrer); len(matches) > 1 {
|
||||
properties["widget_id"] = matches[1]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if alertMatched {
|
||||
if alertIDRegex, err := regexp.Compile(`ruleId=(\d+)`); err == nil {
|
||||
if matches := alertIDRegex.FindStringSubmatch(referrer); len(matches) > 1 {
|
||||
properties["rule_id"] = matches[1]
|
||||
}
|
||||
}
|
||||
comments := ctxtypes.CommentFromContext(ctx).Map()
|
||||
for key, value := range comments {
|
||||
properties[key] = value
|
||||
}
|
||||
|
||||
if !event.HasData {
|
||||
|
||||
@@ -490,7 +490,6 @@ func (bc *bucketCache) mergeTimeSeriesValues(ctx context.Context, buckets []*cac
|
||||
key string
|
||||
}
|
||||
seriesMap := make(map[seriesKey]*qbtypes.TimeSeries, estimatedSeries)
|
||||
var queryName string
|
||||
|
||||
for _, bucket := range buckets {
|
||||
var tsData *qbtypes.TimeSeriesData
|
||||
@@ -499,11 +498,6 @@ func (bc *bucketCache) mergeTimeSeriesValues(ctx context.Context, buckets []*cac
|
||||
continue
|
||||
}
|
||||
|
||||
// Preserve the query name from the first bucket
|
||||
if queryName == "" && tsData.QueryName != "" {
|
||||
queryName = tsData.QueryName
|
||||
}
|
||||
|
||||
for _, aggBucket := range tsData.Aggregations {
|
||||
for _, series := range aggBucket.Series {
|
||||
// Create series key from labels
|
||||
@@ -549,7 +543,6 @@ func (bc *bucketCache) mergeTimeSeriesValues(ctx context.Context, buckets []*cac
|
||||
|
||||
// Convert map back to slice
|
||||
result := &qbtypes.TimeSeriesData{
|
||||
QueryName: queryName,
|
||||
Aggregations: make([]*qbtypes.AggregationBucket, 0, len(aggMap)),
|
||||
}
|
||||
|
||||
@@ -738,9 +731,7 @@ func (bc *bucketCache) trimResultToFluxBoundary(result *qbtypes.Result, fluxBoun
|
||||
case qbtypes.RequestTypeTimeSeries:
|
||||
// Trim time series data
|
||||
if tsData, ok := result.Value.(*qbtypes.TimeSeriesData); ok && tsData != nil {
|
||||
trimmedData := &qbtypes.TimeSeriesData{
|
||||
QueryName: tsData.QueryName,
|
||||
}
|
||||
trimmedData := &qbtypes.TimeSeriesData{}
|
||||
|
||||
for _, aggBucket := range tsData.Aggregations {
|
||||
trimmedBucket := &qbtypes.AggregationBucket{
|
||||
@@ -807,7 +798,6 @@ func (bc *bucketCache) filterResultToTimeRange(result *qbtypes.Result, startMs,
|
||||
case qbtypes.RequestTypeTimeSeries:
|
||||
if tsData, ok := result.Value.(*qbtypes.TimeSeriesData); ok {
|
||||
filteredData := &qbtypes.TimeSeriesData{
|
||||
QueryName: tsData.QueryName,
|
||||
Aggregations: make([]*qbtypes.AggregationBucket, 0, len(tsData.Aggregations)),
|
||||
}
|
||||
|
||||
|
||||
@@ -169,9 +169,8 @@ func TestBucketCache_Put_And_Get(t *testing.T) {
|
||||
assert.Equal(t, []string{"test warning"}, cached.Warnings)
|
||||
|
||||
// Verify the time series data
|
||||
tsData, ok := cached.Value.(*qbtypes.TimeSeriesData)
|
||||
_, ok := cached.Value.(*qbtypes.TimeSeriesData)
|
||||
require.True(t, ok)
|
||||
assert.Equal(t, "A", tsData.QueryName)
|
||||
}
|
||||
|
||||
func TestBucketCache_PartialHit(t *testing.T) {
|
||||
@@ -1077,7 +1076,6 @@ func TestBucketCache_FilteredCachedResults(t *testing.T) {
|
||||
// Verify the cached result only contains values within the requested range
|
||||
tsData, ok := cached.Value.(*qbtypes.TimeSeriesData)
|
||||
require.True(t, ok)
|
||||
assert.Equal(t, "A", tsData.QueryName)
|
||||
require.Len(t, tsData.Aggregations, 1)
|
||||
require.Len(t, tsData.Aggregations[0].Series, 1)
|
||||
|
||||
|
||||
@@ -110,6 +110,10 @@ func (q *querier) postProcessResults(ctx context.Context, results map[string]any
|
||||
|
||||
if req.RequestType == qbtypes.RequestTypeTimeSeries && req.FormatOptions != nil && req.FormatOptions.FillGaps {
|
||||
for name := range typedResults {
|
||||
if req.SkipFillGaps(name) {
|
||||
continue
|
||||
}
|
||||
|
||||
funcs := []qbtypes.Function{{Name: qbtypes.FunctionNameFillZero}}
|
||||
funcs = q.prepareFillZeroArgsWithStep(funcs, req, req.StepIntervalForQuery(name))
|
||||
// empty time series if it doesn't exist
|
||||
|
||||
@@ -23,6 +23,10 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
var (
|
||||
intervalWarn = "Query %s is requesting aggregation interval %v seconds, which is smaller than the minimum allowed interval of %v seconds for selected time range. Using the minimum instead"
|
||||
)
|
||||
|
||||
type querier struct {
|
||||
logger *slog.Logger
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
@@ -121,6 +125,8 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
PanelType: req.RequestType.StringValue(),
|
||||
}
|
||||
|
||||
intervalWarnings := []string{}
|
||||
|
||||
// First pass: collect all metric names that need temporality
|
||||
metricNames := make([]string, 0)
|
||||
for idx, query := range req.CompositeQuery.Queries {
|
||||
@@ -147,9 +153,11 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
}
|
||||
}
|
||||
if spec.StepInterval.Seconds() < float64(querybuilder.MinAllowedStepInterval(req.Start, req.End)) {
|
||||
spec.StepInterval = qbtypes.Step{
|
||||
newStep := qbtypes.Step{
|
||||
Duration: time.Second * time.Duration(querybuilder.MinAllowedStepInterval(req.Start, req.End)),
|
||||
}
|
||||
intervalWarnings = append(intervalWarnings, fmt.Sprintf(intervalWarn, spec.Name, spec.StepInterval.Seconds(), newStep.Duration.Seconds()))
|
||||
spec.StepInterval = newStep
|
||||
}
|
||||
req.CompositeQuery.Queries[idx].Spec = spec
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
|
||||
@@ -162,9 +170,11 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
}
|
||||
}
|
||||
if spec.StepInterval.Seconds() < float64(querybuilder.MinAllowedStepInterval(req.Start, req.End)) {
|
||||
spec.StepInterval = qbtypes.Step{
|
||||
newStep := qbtypes.Step{
|
||||
Duration: time.Second * time.Duration(querybuilder.MinAllowedStepInterval(req.Start, req.End)),
|
||||
}
|
||||
intervalWarnings = append(intervalWarnings, fmt.Sprintf(intervalWarn, spec.Name, spec.StepInterval.Seconds(), newStep.Duration.Seconds()))
|
||||
spec.StepInterval = newStep
|
||||
}
|
||||
req.CompositeQuery.Queries[idx].Spec = spec
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||
@@ -181,9 +191,11 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
}
|
||||
}
|
||||
if spec.StepInterval.Seconds() < float64(querybuilder.MinAllowedStepIntervalForMetric(req.Start, req.End)) {
|
||||
spec.StepInterval = qbtypes.Step{
|
||||
newStep := qbtypes.Step{
|
||||
Duration: time.Second * time.Duration(querybuilder.MinAllowedStepIntervalForMetric(req.Start, req.End)),
|
||||
}
|
||||
intervalWarnings = append(intervalWarnings, fmt.Sprintf(intervalWarn, spec.Name, spec.StepInterval.Seconds(), newStep.Duration.Seconds()))
|
||||
spec.StepInterval = newStep
|
||||
}
|
||||
}
|
||||
req.CompositeQuery.Queries[idx].Spec = spec
|
||||
@@ -290,6 +302,16 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
qbResp, qbErr := q.run(ctx, orgID, queries, req, steps, event)
|
||||
if qbResp != nil {
|
||||
qbResp.QBEvent = event
|
||||
if len(intervalWarnings) != 0 && req.RequestType == qbtypes.RequestTypeTimeSeries {
|
||||
if qbResp.Warning == nil {
|
||||
qbResp.Warning = &qbtypes.QueryWarnData{
|
||||
Warnings: make([]qbtypes.QueryWarnDataAdditional, len(intervalWarnings)),
|
||||
}
|
||||
for idx := range intervalWarnings {
|
||||
qbResp.Warning.Warnings[idx] = qbtypes.QueryWarnDataAdditional{Message: intervalWarnings[idx]}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return qbResp, qbErr
|
||||
}
|
||||
@@ -363,6 +385,15 @@ func (q *querier) run(
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
switch v := result.Value.(type) {
|
||||
case *qbtypes.TimeSeriesData:
|
||||
v.QueryName = name
|
||||
case *qbtypes.ScalarData:
|
||||
v.QueryName = name
|
||||
case *qbtypes.RawData:
|
||||
v.QueryName = name
|
||||
}
|
||||
|
||||
results[name] = result.Value
|
||||
warnings = append(warnings, result.Warnings...)
|
||||
warningsDocURL = result.WarningsDocURL
|
||||
|
||||
@@ -3640,28 +3640,8 @@ func readRowsForTimeSeriesResult(rows driver.Rows, vars []interface{}, columnNam
|
||||
return seriesList, getPersonalisedError(rows.Err())
|
||||
}
|
||||
|
||||
func logCommentKVs(ctx context.Context) map[string]string {
|
||||
kv := ctx.Value(common.LogCommentKey)
|
||||
if kv == nil {
|
||||
return nil
|
||||
}
|
||||
logCommentKVs, ok := kv.(map[string]string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return logCommentKVs
|
||||
}
|
||||
|
||||
// GetTimeSeriesResultV3 runs the query and returns list of time series
|
||||
func (r *ClickHouseReader) GetTimeSeriesResultV3(ctx context.Context, query string) ([]*v3.Series, error) {
|
||||
|
||||
ctxArgs := map[string]interface{}{"query": query}
|
||||
for k, v := range logCommentKVs(ctx) {
|
||||
ctxArgs[k] = v
|
||||
}
|
||||
|
||||
defer utils.Elapsed("GetTimeSeriesResultV3", ctxArgs)()
|
||||
|
||||
// Hook up query progress reporting if requested.
|
||||
queryId := ctx.Value("queryId")
|
||||
if queryId != nil {
|
||||
@@ -3725,20 +3705,12 @@ func (r *ClickHouseReader) GetTimeSeriesResultV3(ctx context.Context, query stri
|
||||
|
||||
// GetListResultV3 runs the query and returns list of rows
|
||||
func (r *ClickHouseReader) GetListResultV3(ctx context.Context, query string) ([]*v3.Row, error) {
|
||||
|
||||
ctxArgs := map[string]interface{}{"query": query}
|
||||
for k, v := range logCommentKVs(ctx) {
|
||||
ctxArgs[k] = v
|
||||
}
|
||||
|
||||
defer utils.Elapsed("GetListResultV3", ctxArgs)()
|
||||
|
||||
rows, err := r.db.Query(ctx, query)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("error while reading time series result", zap.Error(err))
|
||||
return nil, errors.New(err.Error())
|
||||
}
|
||||
|
||||
defer rows.Close()
|
||||
|
||||
var (
|
||||
|
||||
@@ -220,6 +220,7 @@ func (s *Server) createPublicServer(api *APIHandler, web web.Web) (*http.Server,
|
||||
).Wrap)
|
||||
r.Use(middleware.NewAPIKey(s.signoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.signoz.Instrumentation.Logger(), s.signoz.Sharder).Wrap)
|
||||
r.Use(middleware.NewLogging(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes).Wrap)
|
||||
r.Use(middleware.NewComment().Wrap)
|
||||
|
||||
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger())
|
||||
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
package common
|
||||
|
||||
type LogCommentContextKeyType string
|
||||
|
||||
const LogCommentKey LogCommentContextKeyType = "logComment"
|
||||
@@ -43,17 +43,17 @@ var (
|
||||
// FromUnit returns a converter for the given unit
|
||||
func FromUnit(u Unit) Converter {
|
||||
switch u {
|
||||
case "ns", "us", "µs", "ms", "s", "m", "h", "d":
|
||||
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min":
|
||||
return DurationConverter
|
||||
case "bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes":
|
||||
case "bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy":
|
||||
return DataConverter
|
||||
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits":
|
||||
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s":
|
||||
return DataRateConverter
|
||||
case "percent", "percentunit":
|
||||
case "percent", "percentunit", "%":
|
||||
return PercentConverter
|
||||
case "bool", "bool_yes_no", "bool_true_false", "bool_1_0":
|
||||
return BoolConverter
|
||||
case "cps", "ops", "reqps", "rps", "wps", "iops", "cpm", "opm", "rpm", "wpm":
|
||||
case "cps", "ops", "reqps", "rps", "wps", "iops", "cpm", "opm", "rpm", "wpm", "{count}/s", "{ops}/s", "{req}/s", "{read}/s", "{write}/s", "{iops}/s", "{count}/min", "{ops}/min", "{read}/min", "{write}/min":
|
||||
return ThroughputConverter
|
||||
default:
|
||||
return NoneConverter
|
||||
|
||||
@@ -60,7 +60,7 @@ func (*dataConverter) Name() string {
|
||||
|
||||
func FromDataUnit(u Unit) float64 {
|
||||
switch u {
|
||||
case "bytes": // base 2
|
||||
case "bytes", "By": // base 2
|
||||
return Byte
|
||||
case "decbytes": // base 10
|
||||
return Byte
|
||||
@@ -68,23 +68,23 @@ func FromDataUnit(u Unit) float64 {
|
||||
return Bit
|
||||
case "decbits": // base 10
|
||||
return Bit
|
||||
case "kbytes": // base 2
|
||||
case "kbytes", "kBy": // base 2
|
||||
return Kibibyte
|
||||
case "decKbytes", "deckbytes": // base 10
|
||||
return Kilobyte
|
||||
case "mbytes": // base 2
|
||||
case "mbytes", "MBy": // base 2
|
||||
return Mebibyte
|
||||
case "decMbytes", "decmbytes": // base 10
|
||||
return Megabyte
|
||||
case "gbytes": // base 2
|
||||
case "gbytes", "GBy": // base 2
|
||||
return Gibibyte
|
||||
case "decGbytes", "decgbytes": // base 10
|
||||
return Gigabyte
|
||||
case "tbytes": // base 2
|
||||
case "tbytes", "TBy": // base 2
|
||||
return Tebibyte
|
||||
case "decTbytes", "dectbytes": // base 10
|
||||
return Terabyte
|
||||
case "pbytes": // base 2
|
||||
case "pbytes", "PBy": // base 2
|
||||
return Pebibyte
|
||||
case "decPbytes", "decpbytes": // base 10
|
||||
return Petabyte
|
||||
|
||||
@@ -59,51 +59,51 @@ func FromDataRateUnit(u Unit) float64 {
|
||||
switch u {
|
||||
case "binBps": // bytes/sec(IEC)
|
||||
return BytePerSecond
|
||||
case "Bps": // bytes/sec(SI)
|
||||
case "Bps", "By/s": // bytes/sec(SI)
|
||||
return BytePerSecond
|
||||
case "binbps": // bits/sec(IEC)
|
||||
return BitPerSecond
|
||||
case "bps": // bits/sec(SI)
|
||||
case "bps", "bit/s": // bits/sec(SI)
|
||||
return BitPerSecond
|
||||
case "KiBs": // kibibytes/sec
|
||||
return KibibytePerSecond
|
||||
case "Kibits": // kibibits/sec
|
||||
return KibibitPerSecond
|
||||
case "KBs": // kilobytes/sec
|
||||
case "KBs", "kBy/s": // kilobytes/sec
|
||||
return KilobytePerSecond
|
||||
case "Kbits": // kilobits/sec
|
||||
case "Kbits", "kbit/s": // kilobits/sec
|
||||
return KilobitPerSecond
|
||||
case "MiBs": // mebibytes/sec
|
||||
return MebibytePerSecond
|
||||
case "Mibits": // mebibits/sec
|
||||
return MebibitPerSecond
|
||||
case "MBs": // megabytes/sec
|
||||
case "MBs", "MBy/s": // megabytes/sec
|
||||
return MegabytePerSecond
|
||||
case "Mbits": // megabits/sec
|
||||
case "Mbits", "Mbit/s": // megabits/sec
|
||||
return MegabitPerSecond
|
||||
case "GiBs": // gibibytes/sec
|
||||
return GibibytePerSecond
|
||||
case "Gibits": // gibibits/sec
|
||||
return GibibitPerSecond
|
||||
case "GBs": // gigabytes/sec
|
||||
case "GBs", "GBy/s": // gigabytes/sec
|
||||
return GigabytePerSecond
|
||||
case "Gbits": // gigabits/sec
|
||||
case "Gbits", "Gbit/s": // gigabits/sec
|
||||
return GigabitPerSecond
|
||||
case "TiBs": // tebibytes/sec
|
||||
return TebibytePerSecond
|
||||
case "Tibits": // tebibits/sec
|
||||
return TebibitPerSecond
|
||||
case "TBs": // terabytes/sec
|
||||
case "TBs", "TBy/s": // terabytes/sec
|
||||
return TerabytePerSecond
|
||||
case "Tbits": // terabits/sec
|
||||
case "Tbits", "Tbit/s": // terabits/sec
|
||||
return TerabitPerSecond
|
||||
case "PiBs": // pebibytes/sec
|
||||
return PebibytePerSecond
|
||||
case "Pibits": // pebibits/sec
|
||||
return PebibitPerSecond
|
||||
case "PBs": // petabytes/sec
|
||||
case "PBs", "PBy/s": // petabytes/sec
|
||||
return PetabytePerSecond
|
||||
case "Pbits": // petabits/sec
|
||||
case "Pbits", "Pbit/s": // petabits/sec
|
||||
return PetabitPerSecond
|
||||
default:
|
||||
return 1
|
||||
|
||||
@@ -36,10 +36,16 @@ func TestDataRate(t *testing.T) {
|
||||
|
||||
// 8 bits = 1 byte
|
||||
assert.Equal(t, Value{F: 1, U: "binBps"}, dataRateConverter.Convert(Value{F: 8, U: "binbps"}, "binBps"))
|
||||
// 8 bits = 1 byte
|
||||
assert.Equal(t, Value{F: 1, U: "Bps"}, dataRateConverter.Convert(Value{F: 8, U: "bps"}, "Bps"))
|
||||
// 8 bits = 1 byte
|
||||
assert.Equal(t, Value{F: 1, U: "By/s"}, dataRateConverter.Convert(Value{F: 8, U: "bit/s"}, "By/s"))
|
||||
// 1024 bytes = 1 kbytes
|
||||
assert.Equal(t, Value{F: 1, U: "KiBs"}, dataRateConverter.Convert(Value{F: 1024, U: "binBps"}, "KiBs"))
|
||||
// 1 byte = 8 bits
|
||||
assert.Equal(t, Value{F: 8, U: "binbps"}, dataRateConverter.Convert(Value{F: 1, U: "binBps"}, "binbps"))
|
||||
// 1 byte = 8 bits
|
||||
assert.Equal(t, Value{F: 8, U: "bit/s"}, dataRateConverter.Convert(Value{F: 1, U: "Bps"}, "bit/s"))
|
||||
// 1 mbytes = 1024 kbytes
|
||||
assert.Equal(t, Value{F: 1, U: "MiBs"}, dataRateConverter.Convert(Value{F: 1024, U: "KiBs"}, "MiBs"))
|
||||
// 1 kbytes = 1024 bytes
|
||||
@@ -57,6 +63,10 @@ func TestDataRate(t *testing.T) {
|
||||
// 1 gbytes = 1024 * 1024 kbytes
|
||||
assert.Equal(t, Value{F: 1024 * 1024, U: "KiBs"}, dataRateConverter.Convert(Value{F: 1, U: "GiBs"}, "KiBs"))
|
||||
// 1 gbytes = 1024 * 1024 * 1024 bytes
|
||||
assert.Equal(t, Value{F: (1024 * 1024 * 1024 * 8) / 1024, U: "Kibits"}, dataRateConverter.Convert(Value{F: 1, U: "GiBs"}, "Kibits"))
|
||||
// 1 gbytes = 1024 * 1024 * 1024 bytes
|
||||
assert.Equal(t, Value{F: float64(1024*1024*1024) / 1000.0, U: "kBy/s"}, dataRateConverter.Convert(Value{F: 1, U: "GiBs"}, "kBy/s"))
|
||||
// 1 gbytes = 1024 * 1024 * 1024 bytes
|
||||
assert.Equal(t, Value{F: 1024 * 1024 * 1024, U: "binBps"}, dataRateConverter.Convert(Value{F: 1, U: "GiBs"}, "binBps"))
|
||||
// 1024 * 1024 bytes = 1 mbytes
|
||||
assert.Equal(t, Value{F: 1, U: "MiBs"}, dataRateConverter.Convert(Value{F: 1024 * 1024, U: "binBps"}, "MiBs"))
|
||||
|
||||
@@ -10,8 +10,10 @@ func TestData(t *testing.T) {
|
||||
dataConverter := NewDataConverter()
|
||||
// 8 bits = 1 byte
|
||||
assert.Equal(t, Value{F: 1, U: "bytes"}, dataConverter.Convert(Value{F: 8, U: "bits"}, "bytes"))
|
||||
assert.Equal(t, Value{F: 1, U: "By"}, dataConverter.Convert(Value{F: 8, U: "bits"}, "By"))
|
||||
// 1024 bytes = 1 kbytes
|
||||
assert.Equal(t, Value{F: 1, U: "kbytes"}, dataConverter.Convert(Value{F: 1024, U: "bytes"}, "kbytes"))
|
||||
assert.Equal(t, Value{F: 1, U: "kBy"}, dataConverter.Convert(Value{F: 1024, U: "bytes"}, "kBy"))
|
||||
// 1 byte = 8 bits
|
||||
assert.Equal(t, Value{F: 8, U: "bits"}, dataConverter.Convert(Value{F: 1, U: "bytes"}, "bits"))
|
||||
// 1 mbytes = 1024 kbytes
|
||||
@@ -20,6 +22,7 @@ func TestData(t *testing.T) {
|
||||
assert.Equal(t, Value{F: 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "kbytes"}, "bytes"))
|
||||
// 1024 kbytes = 1 mbytes
|
||||
assert.Equal(t, Value{F: 1, U: "mbytes"}, dataConverter.Convert(Value{F: 1024, U: "kbytes"}, "mbytes"))
|
||||
assert.Equal(t, Value{F: 1, U: "MBy"}, dataConverter.Convert(Value{F: 1024, U: "kbytes"}, "MBy"))
|
||||
// 1 mbytes = 1024 * 1024 bytes
|
||||
assert.Equal(t, Value{F: 1024 * 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "mbytes"}, "bytes"))
|
||||
// 1024 mbytes = 1 gbytes
|
||||
@@ -42,6 +45,10 @@ func TestData(t *testing.T) {
|
||||
assert.Equal(t, Value{F: 1024 * 1024 * 1024 * 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "tbytes"}, "bytes"))
|
||||
// 1024 tbytes = 1 pbytes
|
||||
assert.Equal(t, Value{F: 1, U: "pbytes"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "pbytes"))
|
||||
// 1024 tbytes = 1 pbytes
|
||||
assert.Equal(t, Value{F: 1, U: "PBy"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "PBy"))
|
||||
// 1 pbytes = 1024 tbytes
|
||||
assert.Equal(t, Value{F: 1024, U: "tbytes"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "tbytes"))
|
||||
// 1024 pbytes = 1 tbytes
|
||||
assert.Equal(t, Value{F: 1024, U: "TBy"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "TBy"))
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ func (*percentConverter) Name() string {
|
||||
|
||||
func FromPercentUnit(u Unit) float64 {
|
||||
switch u {
|
||||
case "percent":
|
||||
case "percent", "%":
|
||||
return 1
|
||||
case "percentunit":
|
||||
return 100
|
||||
|
||||
@@ -13,4 +13,5 @@ func TestPercentConverter(t *testing.T) {
|
||||
assert.Equal(t, Value{F: 100, U: "percent"}, percentConverter.Convert(Value{F: 1, U: "percentunit"}, "percent"))
|
||||
assert.Equal(t, Value{F: 1, U: "percentunit"}, percentConverter.Convert(Value{F: 100, U: "percent"}, "percentunit"))
|
||||
assert.Equal(t, Value{F: 0.01, U: "percentunit"}, percentConverter.Convert(Value{F: 1, U: "percent"}, "percentunit"))
|
||||
assert.Equal(t, Value{F: 1, U: "percent"}, percentConverter.Convert(Value{F: 1, U: "%"}, "percent"))
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@ func FromTimeUnit(u Unit) Duration {
|
||||
return Decisecond
|
||||
case "s":
|
||||
return Second
|
||||
case "m":
|
||||
case "m", "min":
|
||||
return Minute
|
||||
case "h":
|
||||
return Hour
|
||||
|
||||
@@ -24,6 +24,8 @@ func TestDurationConvert(t *testing.T) {
|
||||
assert.Equal(t, Value{F: 60, U: "s"}, timeConverter.Convert(Value{F: 1, U: "m"}, "s"))
|
||||
// 60 m = 1 h
|
||||
assert.Equal(t, Value{F: 1, U: "h"}, timeConverter.Convert(Value{F: 60, U: "m"}, "h"))
|
||||
// 60 min = 1 h
|
||||
assert.Equal(t, Value{F: 1, U: "h"}, timeConverter.Convert(Value{F: 60, U: "min"}, "h"))
|
||||
// 168 h = 1 w
|
||||
assert.Equal(t, Value{F: 1, U: "w"}, timeConverter.Convert(Value{F: 168, U: "h"}, "w"))
|
||||
// 1 h = 60 m
|
||||
|
||||
@@ -20,7 +20,7 @@ func (*dataFormatter) Name() string {
|
||||
|
||||
func (f *dataFormatter) Format(value float64, unit string) string {
|
||||
switch unit {
|
||||
case "bytes":
|
||||
case "bytes", "By":
|
||||
return humanize.IBytes(uint64(value))
|
||||
case "decbytes":
|
||||
return humanize.Bytes(uint64(value))
|
||||
@@ -28,23 +28,23 @@ func (f *dataFormatter) Format(value float64, unit string) string {
|
||||
return humanize.IBytes(uint64(value * converter.Bit))
|
||||
case "decbits":
|
||||
return humanize.Bytes(uint64(value * converter.Bit))
|
||||
case "kbytes":
|
||||
case "kbytes", "kBy":
|
||||
return humanize.IBytes(uint64(value * converter.Kibibit))
|
||||
case "decKbytes", "deckbytes":
|
||||
return humanize.IBytes(uint64(value * converter.Kilobit))
|
||||
case "mbytes":
|
||||
case "mbytes", "MBy":
|
||||
return humanize.IBytes(uint64(value * converter.Mebibit))
|
||||
case "decMbytes", "decmbytes":
|
||||
return humanize.Bytes(uint64(value * converter.Megabit))
|
||||
case "gbytes":
|
||||
case "gbytes", "GBy":
|
||||
return humanize.IBytes(uint64(value * converter.Gibibit))
|
||||
case "decGbytes", "decgbytes":
|
||||
return humanize.Bytes(uint64(value * converter.Gigabit))
|
||||
case "tbytes":
|
||||
case "tbytes", "TBy":
|
||||
return humanize.IBytes(uint64(value * converter.Tebibit))
|
||||
case "decTbytes", "dectbytes":
|
||||
return humanize.Bytes(uint64(value * converter.Terabit))
|
||||
case "pbytes":
|
||||
case "pbytes", "PBy":
|
||||
return humanize.IBytes(uint64(value * converter.Pebibit))
|
||||
case "decPbytes", "decpbytes":
|
||||
return humanize.Bytes(uint64(value * converter.Petabit))
|
||||
|
||||
@@ -22,51 +22,51 @@ func (f *dataRateFormatter) Format(value float64, unit string) string {
|
||||
switch unit {
|
||||
case "binBps":
|
||||
return humanize.IBytes(uint64(value)) + "/s"
|
||||
case "Bps":
|
||||
case "Bps", "By/s":
|
||||
return humanize.Bytes(uint64(value)) + "/s"
|
||||
case "binbps":
|
||||
return humanize.IBytes(uint64(value*converter.BitPerSecond)) + "/s"
|
||||
case "bps":
|
||||
case "bps", "bit/s":
|
||||
return humanize.Bytes(uint64(value*converter.BitPerSecond)) + "/s"
|
||||
case "KiBs":
|
||||
return humanize.IBytes(uint64(value*converter.KibibitPerSecond)) + "/s"
|
||||
case "Kibits":
|
||||
return humanize.IBytes(uint64(value*converter.KibibytePerSecond)) + "/s"
|
||||
case "KBs":
|
||||
case "KBs", "kBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.KilobitPerSecond)) + "/s"
|
||||
case "Kbits":
|
||||
case "Kbits", "kbit/s":
|
||||
return humanize.IBytes(uint64(value*converter.KilobytePerSecond)) + "/s"
|
||||
case "MiBs":
|
||||
return humanize.IBytes(uint64(value*converter.MebibitPerSecond)) + "/s"
|
||||
case "Mibits":
|
||||
return humanize.IBytes(uint64(value*converter.MebibytePerSecond)) + "/s"
|
||||
case "MBs":
|
||||
case "MBs", "MBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.MegabitPerSecond)) + "/s"
|
||||
case "Mbits":
|
||||
case "Mbits", "Mbit/s":
|
||||
return humanize.IBytes(uint64(value*converter.MegabytePerSecond)) + "/s"
|
||||
case "GiBs":
|
||||
return humanize.IBytes(uint64(value*converter.GibibitPerSecond)) + "/s"
|
||||
case "Gibits":
|
||||
return humanize.IBytes(uint64(value*converter.GibibytePerSecond)) + "/s"
|
||||
case "GBs":
|
||||
case "GBs", "GBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.GigabitPerSecond)) + "/s"
|
||||
case "Gbits":
|
||||
case "Gbits", "Gbit/s":
|
||||
return humanize.IBytes(uint64(value*converter.GigabytePerSecond)) + "/s"
|
||||
case "TiBs":
|
||||
return humanize.IBytes(uint64(value*converter.TebibitPerSecond)) + "/s"
|
||||
case "Tibits":
|
||||
return humanize.IBytes(uint64(value*converter.TebibytePerSecond)) + "/s"
|
||||
case "TBs":
|
||||
case "TBs", "TBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.TerabitPerSecond)) + "/s"
|
||||
case "Tbits":
|
||||
case "Tbits", "Tbit/s":
|
||||
return humanize.IBytes(uint64(value*converter.TerabytePerSecond)) + "/s"
|
||||
case "PiBs":
|
||||
return humanize.IBytes(uint64(value*converter.PebibitPerSecond)) + "/s"
|
||||
case "Pibits":
|
||||
return humanize.IBytes(uint64(value*converter.PebibytePerSecond)) + "/s"
|
||||
case "PBs":
|
||||
case "PBs", "PBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.PetabitPerSecond)) + "/s"
|
||||
case "Pbits":
|
||||
case "Pbits", "Pbit/s":
|
||||
return humanize.IBytes(uint64(value*converter.PetabytePerSecond)) + "/s"
|
||||
}
|
||||
// When unit is not matched, return the value as it is.
|
||||
|
||||
@@ -10,14 +10,25 @@ func TestData(t *testing.T) {
|
||||
dataFormatter := NewDataFormatter()
|
||||
|
||||
assert.Equal(t, "1 B", dataFormatter.Format(1, "bytes"))
|
||||
assert.Equal(t, "1 B", dataFormatter.Format(1, "By"))
|
||||
assert.Equal(t, "1.0 KiB", dataFormatter.Format(1024, "bytes"))
|
||||
assert.Equal(t, "1.0 KiB", dataFormatter.Format(1024, "By"))
|
||||
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "mbytes"))
|
||||
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "MBy"))
|
||||
assert.Equal(t, "1.0 MiB", dataFormatter.Format(1024*1024, "bytes"))
|
||||
assert.Equal(t, "1.0 MiB", dataFormatter.Format(1024*1024, "By"))
|
||||
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "mbytes"))
|
||||
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "MBy"))
|
||||
assert.Equal(t, "102 KiB", dataFormatter.Format(102*1024, "bytes"))
|
||||
assert.Equal(t, "102 KiB", dataFormatter.Format(102*1024, "By"))
|
||||
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "kbytes"))
|
||||
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "kBy"))
|
||||
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "kbytes"))
|
||||
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "kBy"))
|
||||
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "kbytes"))
|
||||
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "kBy"))
|
||||
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "kbytes"))
|
||||
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "kBy"))
|
||||
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "mbytes"))
|
||||
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "MBy"))
|
||||
}
|
||||
|
||||
@@ -18,17 +18,17 @@ var (
|
||||
|
||||
func FromUnit(u string) Formatter {
|
||||
switch u {
|
||||
case "ns", "us", "µs", "ms", "s", "m", "h", "d":
|
||||
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min":
|
||||
return DurationFormatter
|
||||
case "bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes":
|
||||
case "bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy":
|
||||
return DataFormatter
|
||||
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits":
|
||||
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s":
|
||||
return DataRateFormatter
|
||||
case "percent", "percentunit":
|
||||
case "percent", "percentunit", "%":
|
||||
return PercentFormatter
|
||||
case "bool", "bool_yes_no", "bool_true_false", "bool_1_0":
|
||||
return BoolFormatter
|
||||
case "cps", "ops", "reqps", "rps", "wps", "iops", "cpm", "opm", "rpm", "wpm":
|
||||
case "cps", "ops", "reqps", "rps", "wps", "iops", "cpm", "opm", "rpm", "wpm", "{count}/s", "{ops}/s", "{req}/s", "{read}/s", "{write}/s", "{iops}/s", "{count}/min", "{ops}/min", "{read}/min", "{write}/min":
|
||||
return ThroughputFormatter
|
||||
default:
|
||||
return NoneFormatter
|
||||
|
||||
@@ -22,7 +22,7 @@ func toPercentUnit(value float64, decimals DecimalCount) string {
|
||||
|
||||
func (f *percentFormatter) Format(value float64, unit string) string {
|
||||
switch unit {
|
||||
case "percent":
|
||||
case "percent", "%":
|
||||
return toPercent(value, nil)
|
||||
case "percentunit":
|
||||
return toPercentUnit(value, nil)
|
||||
|
||||
@@ -22,25 +22,25 @@ func simpleCountUnit(value float64, decimals *int, symbol string) string {
|
||||
|
||||
func (f *throughputFormatter) Format(value float64, unit string) string {
|
||||
switch unit {
|
||||
case "cps":
|
||||
case "cps", "{count}/s":
|
||||
return simpleCountUnit(value, nil, "c/s")
|
||||
case "ops":
|
||||
case "ops", "{ops}/s":
|
||||
return simpleCountUnit(value, nil, "op/s")
|
||||
case "reqps":
|
||||
case "reqps", "{req}/s":
|
||||
return simpleCountUnit(value, nil, "req/s")
|
||||
case "rps":
|
||||
case "rps", "{read}/s":
|
||||
return simpleCountUnit(value, nil, "r/s")
|
||||
case "wps":
|
||||
case "wps", "{write}/s":
|
||||
return simpleCountUnit(value, nil, "w/s")
|
||||
case "iops":
|
||||
case "iops", "{iops}/s":
|
||||
return simpleCountUnit(value, nil, "iops")
|
||||
case "cpm":
|
||||
case "cpm", "{count}/min":
|
||||
return simpleCountUnit(value, nil, "c/m")
|
||||
case "opm":
|
||||
case "opm", "{ops}/min":
|
||||
return simpleCountUnit(value, nil, "op/m")
|
||||
case "rpm":
|
||||
case "rpm", "{read}/min":
|
||||
return simpleCountUnit(value, nil, "r/m")
|
||||
case "wpm":
|
||||
case "wpm", "{write}/min":
|
||||
return simpleCountUnit(value, nil, "w/m")
|
||||
}
|
||||
// When unit is not matched, return the value as it is.
|
||||
|
||||
@@ -10,6 +10,11 @@ func TestThroughput(t *testing.T) {
|
||||
throughputFormatter := NewThroughputFormatter()
|
||||
|
||||
assert.Equal(t, "10 req/s", throughputFormatter.Format(10, "reqps"))
|
||||
assert.Equal(t, "10 req/s", throughputFormatter.Format(10, "{req}/s"))
|
||||
assert.Equal(t, "1K req/s", throughputFormatter.Format(1000, "reqps"))
|
||||
assert.Equal(t, "1K req/s", throughputFormatter.Format(1000, "{req}/s"))
|
||||
assert.Equal(t, "1M req/s", throughputFormatter.Format(1000000, "reqps"))
|
||||
assert.Equal(t, "1M req/s", throughputFormatter.Format(1000000, "{req}/s"))
|
||||
assert.Equal(t, "10 c/m", throughputFormatter.Format(10, "cpm"))
|
||||
assert.Equal(t, "10 c/m", throughputFormatter.Format(10, "{count}/min"))
|
||||
}
|
||||
|
||||
@@ -26,7 +26,7 @@ func (f *durationFormatter) Format(value float64, unit string) string {
|
||||
return toMilliSeconds(value)
|
||||
case "s":
|
||||
return toSeconds(value)
|
||||
case "m":
|
||||
case "m", "min":
|
||||
return toMinutes(value)
|
||||
case "h":
|
||||
return toHours(value)
|
||||
|
||||
@@ -26,4 +26,5 @@ func TestDuration(t *testing.T) {
|
||||
assert.Equal(t, "1.82 min", durationFormatter.Format(109200000000, "ns"))
|
||||
assert.Equal(t, "1.27 day", durationFormatter.Format(109800000000000, "ns"))
|
||||
assert.Equal(t, "2 day", durationFormatter.Format(172800000, "ms"))
|
||||
assert.Equal(t, "1 hour", durationFormatter.Format(60, "min"))
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
opentracing "github.com/opentracing/opentracing-go"
|
||||
@@ -369,12 +369,10 @@ func (g *PromRuleTask) Eval(ctx context.Context, ts time.Time) {
|
||||
rule.SetEvaluationTimestamp(t)
|
||||
}(time.Now())
|
||||
|
||||
kvs := map[string]string{
|
||||
"alertID": rule.ID(),
|
||||
"source": "alerts",
|
||||
"client": "query-service",
|
||||
}
|
||||
ctx = context.WithValue(ctx, common.LogCommentKey, kvs)
|
||||
comment := ctxtypes.CommentFromContext(ctx)
|
||||
comment.Set("rule_id", rule.ID())
|
||||
comment.Set("auth_type", "internal")
|
||||
ctx = ctxtypes.NewContextWithComment(ctx, comment)
|
||||
|
||||
_, err := rule.Eval(ctx, ts)
|
||||
if err != nil {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user