Compare commits

..

32 Commits

Author SHA1 Message Date
Yunus M
abcfa880bb chore: address review comments 2025-08-06 13:38:58 +05:30
Yunus M
7c7e496f2f chore: fix type errors 2025-08-06 13:25:28 +05:30
Yunus M
b3551bf140 feat: handle save views 2025-08-06 13:23:15 +05:30
Yunus M
7363cf43b1 feat: update attribute keys api to accept meter as datasource 2025-08-06 13:23:15 +05:30
Yunus M
f515b077bf feat: update attribute keys api to accept signal source 2025-08-06 13:23:15 +05:30
Yunus M
07659f91c3 feat: integrate meter api changes 2025-08-06 13:23:11 +05:30
Yunus M
0595c526a7 feat: remove quick filters 2025-08-06 13:19:57 +05:30
Yunus M
8509146589 feat: meter explorer - initial commit 2025-08-06 13:18:24 +05:30
Vikrant Gupta
aedc9353d2 Merge branch 'main' into feat/telemetry-meter 2025-08-06 13:03:21 +05:30
vikrantgupta25
436fcb7453 feat(telemetrymeter): use meter aggregate keys 2025-08-06 13:02:45 +05:30
vikrantgupta25
c3238d761a feat(telemetrymeter): add the statement builder for the ranged cache queries 2025-08-05 18:03:04 +05:30
vikrantgupta25
855b381160 feat(telemetrymeter): incorporate the new changes to stmnt builder 2025-08-05 15:02:49 +05:30
Vikrant Gupta
0b97a6764e Merge branch 'main' into feat/telemetry-meter 2025-08-05 14:59:54 +05:30
Vikrant Gupta
f069adb195 Merge branch 'main' into feat/telemetry-meter 2025-08-04 16:54:18 +05:30
vikrantgupta25
e642ec9427 feat(telemetrymeter): added quick filters for meter explorer 2025-08-04 16:49:22 +05:30
vikrantgupta25
26e2e14a7f feat(telemetrymeter): better naming for source in metadata 2025-08-04 03:00:06 +05:30
vikrantgupta25
e522817df5 feat(telemetrymeter): introduce source for query 2025-08-04 02:49:09 +05:30
Vikrant Gupta
8370deaf35 Merge branch 'main' into feat/telemetry-meter 2025-08-03 18:02:30 +05:30
vikrantgupta25
0a71e0f0b4 feat(telemetrymeter): cleanup the types 2025-08-03 01:56:09 +05:30
vikrantgupta25
3e41397ef7 feat(telemetrymeter): deprecate the signal and use aggregation instead 2025-08-03 01:38:04 +05:30
vikrantgupta25
a193794403 feat(telemetrymeter): deprecate the signal and use aggregation instead 2025-08-03 01:36:50 +05:30
vikrantgupta25
9158b25d4d feat(telemetrymeter): deprecate the signal and use aggregation instead 2025-08-03 01:32:23 +05:30
vikrantgupta25
254c7f8396 feat(telemetrymeter): metadata changes and aggregate attribute changes 2025-08-02 16:36:08 +05:30
vikrantgupta25
240ce72c9a feat(telemetrymeter): metadata changes and aggregate attribute changes 2025-08-02 14:41:01 +05:30
vikrantgupta25
712fa3e041 feat(telemetrymeter): step interval improvements 2025-07-31 19:02:21 +05:30
vikrantgupta25
27305e6cc6 feat(telemetry/meter): improve error messages 2025-07-31 17:25:57 +05:30
vikrantgupta25
bd762d02e3 feat(telemetry/meter): test query range API fixes 2025-07-31 16:45:48 +05:30
Vikrant Gupta
73bc95a56a Merge branch 'main' into feat/telemetry-meter 2025-07-31 16:04:24 +05:30
vikrantgupta25
540ad965db feat(telemetry/meter): fix stmnt builder tests 2025-07-31 13:05:35 +05:30
Vikrant Gupta
f4cf7eb92e Merge branch 'main' into feat/telemetry-meter 2025-07-31 13:04:40 +05:30
vikrantgupta25
e7a5266cd3 feat(telemetry/meter): added metadata setup for meter 2025-07-31 01:41:55 +05:30
vikrantgupta25
71e17a760c feat(telemetry/meter): added base setup for telemetry meter signal 2025-07-31 00:10:10 +05:30
128 changed files with 913 additions and 4110 deletions

View File

@@ -40,7 +40,7 @@ services:
timeout: 5s
retries: 3
schema-migrator-sync:
image: signoz/signoz-schema-migrator:v0.129.0
image: signoz/signoz-schema-migrator:v0.128.2
container_name: schema-migrator-sync
command:
- sync
@@ -53,7 +53,7 @@ services:
condition: service_healthy
restart: on-failure
schema-migrator-async:
image: signoz/signoz-schema-migrator:v0.129.0
image: signoz/signoz-schema-migrator:v0.128.2
container_name: schema-migrator-async
command:
- async

View File

@@ -1,29 +0,0 @@
services:
signoz-otel-collector:
image: signoz/signoz-otel-collector:v0.128.2
container_name: signoz-otel-collector-dev
command:
- --config=/etc/otel-collector-config.yaml
- --feature-gates=-pkg.translator.prometheus.NormalizeName
volumes:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
environment:
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
- LOW_CARDINAL_EXCEPTION_GROUPING=false
ports:
- "4317:4317" # OTLP gRPC receiver
- "4318:4318" # OTLP HTTP receiver
- "13133:13133" # health check extension
healthcheck:
test:
- CMD
- wget
- --spider
- -q
- localhost:13133
interval: 30s
timeout: 5s
retries: 3
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"

View File

@@ -1,96 +0,0 @@
receivers:
otlp:
protocols:
grpc:
endpoint: 0.0.0.0:4317
http:
endpoint: 0.0.0.0:4318
prometheus:
config:
global:
scrape_interval: 60s
scrape_configs:
- job_name: otel-collector
static_configs:
- targets:
- localhost:8888
labels:
job_name: otel-collector
processors:
batch:
send_batch_size: 10000
send_batch_max_size: 11000
timeout: 10s
resourcedetection:
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
detectors: [env, system]
timeout: 2s
signozspanmetrics/delta:
metrics_exporter: signozclickhousemetrics
metrics_flush_interval: 60s
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 100000
aggregation_temporality: AGGREGATION_TEMPORALITY_DELTA
enable_exp_histogram: true
dimensions:
- name: service.namespace
default: default
- name: deployment.environment
default: default
# This is added to ensure the uniqueness of the timeseries
# Otherwise, identical timeseries produced by multiple replicas of
# collectors result in incorrect APM metrics
- name: signoz.collector.id
- name: service.version
- name: browser.platform
- name: browser.mobile
- name: k8s.cluster.name
- name: k8s.node.name
- name: k8s.namespace.name
- name: host.name
- name: host.type
- name: container.name
extensions:
health_check:
endpoint: 0.0.0.0:13133
pprof:
endpoint: 0.0.0.0:1777
exporters:
clickhousetraces:
datasource: tcp://host.docker.internal:9000/signoz_traces
low_cardinal_exception_grouping: ${env:LOW_CARDINAL_EXCEPTION_GROUPING}
use_new_schema: true
signozclickhousemetrics:
dsn: tcp://host.docker.internal:9000/signoz_metrics
clickhouselogsexporter:
dsn: tcp://host.docker.internal:9000/signoz_logs
timeout: 10s
use_new_schema: true
service:
telemetry:
logs:
encoding: json
extensions:
- health_check
- pprof
pipelines:
traces:
receivers: [otlp]
processors: [signozspanmetrics/delta, batch]
exporters: [clickhousetraces]
metrics:
receivers: [otlp]
processors: [batch]
exporters: [signozclickhousemetrics]
metrics/prometheus:
receivers: [prometheus]
processors: [batch]
exporters: [signozclickhousemetrics]
logs:
receivers: [otlp]
processors: [batch]
exporters: [clickhouselogsexporter]

View File

@@ -61,17 +61,6 @@ devenv-postgres: ## Run postgres in devenv
@cd .devenv/docker/postgres; \
docker compose -f compose.yaml up -d
.PHONY: devenv-signoz-otel-collector
devenv-signoz-otel-collector: ## Run signoz-otel-collector in devenv (requires clickhouse to be running)
@cd .devenv/docker/signoz-otel-collector; \
docker compose -f compose.yaml up -d
.PHONY: devenv-up
devenv-up: devenv-clickhouse devenv-signoz-otel-collector ## Start both clickhouse and signoz-otel-collector for local development
@echo "Development environment is ready!"
@echo " - ClickHouse: http://localhost:8123"
@echo " - Signoz OTel Collector: grpc://localhost:4317, http://localhost:4318"
##############################################################
# go commands
##############################################################

View File

@@ -121,8 +121,6 @@ telemetrystore:
timeout_before_checking_execution_speed: 0
max_bytes_to_read: 0
max_result_rows: 0
ignore_data_skipping_indices: ""
secondary_indices_enable_bulk_filtering: false
##################### Prometheus #####################
prometheus:

View File

@@ -174,7 +174,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.92.1
image: signoz/signoz:v0.91.0
command:
- --config=/root/config/prometheus.yml
ports:
@@ -207,7 +207,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.129.0
image: signoz/signoz-otel-collector:v0.128.2
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -231,7 +231,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.129.0
image: signoz/signoz-schema-migrator:v0.128.2
deploy:
restart_policy:
condition: on-failure

View File

@@ -115,7 +115,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.92.1
image: signoz/signoz:v0.91.0
command:
- --config=/root/config/prometheus.yml
ports:
@@ -148,7 +148,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.129.0
image: signoz/signoz-otel-collector:v0.128.2
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -174,7 +174,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.129.0
image: signoz/signoz-schema-migrator:v0.128.2
deploy:
restart_policy:
condition: on-failure

View File

@@ -177,7 +177,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.92.1}
image: signoz/signoz:${VERSION:-v0.91.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -211,7 +211,7 @@ services:
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.0}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.2}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -237,7 +237,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.0}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.2}
container_name: schema-migrator-sync
command:
- sync
@@ -248,7 +248,7 @@ services:
condition: service_healthy
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.0}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.2}
container_name: schema-migrator-async
command:
- async

View File

@@ -110,7 +110,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.92.1}
image: signoz/signoz:${VERSION:-v0.91.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -143,7 +143,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.0}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.2}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -165,7 +165,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.0}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.2}
container_name: schema-migrator-sync
command:
- sync
@@ -177,7 +177,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.0}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.2}
container_name: schema-migrator-async
command:
- async

View File

@@ -44,35 +44,20 @@ Before diving in, make sure you have these tools installed:
SigNoz has three main components: Clickhouse, Backend, and Frontend. Let's set them up one by one.
### 1. Setting up ClickHouse
### 1. Setting up Clickhouse
First, we need to get ClickHouse running:
First, we need to get Clickhouse running:
```bash
make devenv-clickhouse
```
This command:
- Starts ClickHouse in a single-shard, single-replica cluster
- Starts Clickhouse in a single-shard, single-replica cluster
- Sets up Zookeeper
- Runs the latest schema migrations
### 2. Setting up SigNoz OpenTelemetry Collector
Next, start the OpenTelemetry Collector to receive telemetry data:
```bash
make devenv-signoz-otel-collector
```
This command:
- Starts the SigNoz OpenTelemetry Collector
- Listens on port 4317 (gRPC) and 4318 (HTTP) for incoming telemetry data
- Forwards data to ClickHouse for storage
> 💡 **Quick Setup**: Use `make devenv-up` to start both ClickHouse and OTel Collector together
### 3. Starting the Backend
### 2. Starting the Backend
1. Run the backend server:
```bash
@@ -88,7 +73,7 @@ This command:
> 💡 **Tip**: The API server runs at `http://localhost:8080/` by default
### 4. Setting up the Frontend
### 3. Setting up the Frontend
1. Navigate to the frontend directory:
```bash
@@ -113,25 +98,3 @@ This command:
> 💡 **Tip**: `yarn dev` will automatically rebuild when you make changes to the code
Now you're all set to start developing! Happy coding! 🎉
## Verifying Your Setup
To verify everything is working correctly:
1. **Check ClickHouse**: `curl http://localhost:8123/ping` (should return "Ok.")
2. **Check OTel Collector**: `curl http://localhost:13133` (should return health status)
3. **Check Backend**: `curl http://localhost:8080/api/v1/health` (should return `{"status":"ok"}`)
4. **Check Frontend**: Open `http://localhost:3301` in your browser
## How to send test data?
You can now send telemetry data to your local SigNoz instance:
- **OTLP gRPC**: `localhost:4317`
- **OTLP HTTP**: `localhost:4318`
For example, using `curl` to send a test trace:
```bash
curl -X POST http://localhost:4318/v1/traces \
-H "Content-Type: application/json" \
-d '{"resourceSpans":[{"resource":{"attributes":[{"key":"service.name","value":{"stringValue":"test-service"}}]},"scopeSpans":[{"spans":[{"traceId":"12345678901234567890123456789012","spanId":"1234567890123456","name":"test-span","startTimeUnixNano":"1609459200000000000","endTimeUnixNano":"1609459201000000000"}]}]}]}'
```

View File

@@ -8,15 +8,14 @@ import {
export const getValueSuggestions = (
props: QueryKeyValueRequestProps,
): Promise<AxiosResponse<QueryKeyValueSuggestionsResponseProps>> => {
const { signal, key, searchText, signalSource, metricName } = props;
const { signal, key, searchText, signalSource } = props;
const encodedSignal = encodeURIComponent(signal);
const encodedKey = encodeURIComponent(key);
const encodedMetricName = encodeURIComponent(metricName || '');
const encodedSearchText = encodeURIComponent(searchText);
const encodedSource = encodeURIComponent(signalSource || '');
return axios.get(
`/fields/values?signal=${encodedSignal}&name=${encodedKey}&searchText=${encodedSearchText}&metricName=${encodedMetricName}&source=${encodedSource}`,
`/fields/values?signal=${encodedSignal}&name=${encodedKey}&searchText=${encodedSearchText}&source=${encodedSource}`,
);
};

View File

@@ -385,7 +385,7 @@ export function convertV5ResponseToLegacy(
data: {
resultType: 'scalar',
result: webTables,
warnings: v5Data?.data?.warning || [],
warnings: v5Data?.data?.warnings || [],
},
warning: v5Data?.warning || undefined,
},

View File

@@ -127,7 +127,6 @@
box-sizing: border-box;
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.6);
font-family: 'Space Mono', monospace !important;
color: var(--bg-vanilla-100) !important;
ul {
width: 100% !important;
@@ -163,6 +162,7 @@
overflow: hidden;
font-family: 'Space Mono', monospace !important;
color: var(--bg-vanilla-100) !important;
.cm-completionIcon {
display: none !important;
@@ -331,14 +331,13 @@
ul {
li {
color: var(--bg-ink-300) !important;
&:hover {
background: var(--bg-vanilla-300) !important;
}
&[aria-selected='true'] {
color: var(--bg-ink-500) !important;
background: var(--bg-vanilla-300) !important;
font-weight: 600 !important;
}
}
}

View File

@@ -271,13 +271,16 @@
ul {
li {
color: var(--bg-ink-300) !important;
&:hover {
background-color: var(--bg-vanilla-300) !important;
color: var(--bg-ink-500) !important;
font-weight: 600;
}
&:hover,
&[aria-selected='true'] {
background: var(--bg-vanilla-300) !important;
color: var(--bg-ink-500) !important;
font-weight: 600 !important;
font-weight: 600;
}
}
}

View File

@@ -585,7 +585,7 @@
&:hover,
&[aria-selected='true'] {
background-color: var(--bg-vanilla-300) !important;
font-weight: 600 !important;
font-weight: 600;
}
}
}

View File

@@ -155,7 +155,6 @@ function QuerySearch({
// Reference to the editor view for programmatic autocompletion
const editorRef = useRef<EditorView | null>(null);
const lastKeyRef = useRef<string>('');
const lastFetchedKeyRef = useRef<string>('');
const lastValueRef = useRef<string>('');
const isMountedRef = useRef<boolean>(true);
@@ -213,9 +212,6 @@ function QuerySearch({
setKeySuggestions([]);
return;
}
lastFetchedKeyRef.current = searchText || '';
const response = await getKeySuggestions({
signal: dataSource,
searchText: searchText || '',
@@ -383,7 +379,6 @@ function QuerySearch({
searchText: sanitizedSearchText,
signal: dataSource,
signalSource: signalSource as 'meter' | '',
metricName: debouncedMetricName ?? undefined,
});
// Skip updates if component unmounted or key changed
@@ -475,7 +470,6 @@ function QuerySearch({
activeKey,
dataSource,
isLoadingSuggestions,
debouncedMetricName,
signalSource,
toggleSuggestions,
],
@@ -821,7 +815,7 @@ function QuerySearch({
option.label.toLowerCase().includes(searchText),
);
if (options.length === 0 && lastFetchedKeyRef.current !== searchText) {
if (options.length === 0 && lastKeyRef.current !== searchText) {
debouncedFetchKeySuggestions(searchText);
}

View File

@@ -54,7 +54,6 @@ const quickFiltersListURL = `${BASE_URL}/api/v1/orgs/me/filters/${SIGNAL}`;
const saveQuickFiltersURL = `${BASE_URL}/api/v1/orgs/me/filters`;
const quickFiltersSuggestionsURL = `${BASE_URL}/api/v3/filter_suggestions`;
const quickFiltersAttributeValuesURL = `${BASE_URL}/api/v3/autocomplete/attribute_values`;
const fieldsValuesURL = `${BASE_URL}/api/v1/fields/values`;
const FILTER_OS_DESCRIPTION = 'os.description';
const FILTER_K8S_DEPLOYMENT_NAME = 'k8s.deployment.name';
@@ -78,11 +77,7 @@ const setupServer = (): void => {
putHandler(await req.json());
return res(ctx.status(200), ctx.json({}));
}),
rest.get(quickFiltersAttributeValuesURL, (req, res, ctx) =>
res(ctx.status(200), ctx.json(quickFiltersAttributeValuesResponse)),
),
rest.get(fieldsValuesURL, (req, res, ctx) =>
rest.get(quickFiltersAttributeValuesURL, (_, res, ctx) =>
res(ctx.status(200), ctx.json(quickFiltersAttributeValuesResponse)),
),
);

View File

@@ -23,7 +23,7 @@ export enum SignalType {
LOGS = 'logs',
API_MONITORING = 'api_monitoring',
EXCEPTIONS = 'exceptions',
METER_EXPLORER = 'meter',
METER_EXPLORER = 'meter_explorer',
}
export interface IQuickFiltersConfig {
@@ -54,5 +54,5 @@ export enum QuickFiltersSource {
TRACES_EXPLORER = 'traces-explorer',
API_MONITORING = 'api-monitoring',
EXCEPTIONS = 'exceptions',
METER_EXPLORER = 'meter',
METER_EXPLORER = 'meter-explorer',
}

View File

@@ -6,7 +6,6 @@ export const GlobalShortcuts = {
NavigateToAlerts: 'a+shift',
NavigateToExceptions: 'e+shift',
NavigateToMessagingQueues: 'm+shift',
ToggleSidebar: 'b+shift',
};
export const GlobalShortcutsName = {
@@ -17,7 +16,6 @@ export const GlobalShortcutsName = {
NavigateToAlerts: 'shift+a',
NavigateToExceptions: 'shift+e',
NavigateToMessagingQueues: 'shift+m',
ToggleSidebar: 'shift+b',
};
export const GlobalShortcutsDescription = {
@@ -28,5 +26,4 @@ export const GlobalShortcutsDescription = {
NavigateToAlerts: 'Navigate to alerts page',
NavigateToExceptions: 'Navigate to Exceptions page',
NavigateToMessagingQueues: 'Navigate to Messaging Queues page',
ToggleSidebar: 'Toggle sidebar visibility',
};

View File

@@ -1,176 +0,0 @@
import { render, screen } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import logEvent from 'api/common/logEvent';
import { GlobalShortcuts } from 'constants/shortcuts/globalShortcuts';
import { USER_PREFERENCES } from 'constants/userPreferences';
import {
KeyboardHotkeysProvider,
useKeyboardHotkeys,
} from 'hooks/hotkeys/useKeyboardHotkeys';
import { QueryClient, QueryClientProvider } from 'react-query';
// Mock dependencies
jest.mock('api/common/logEvent', () => jest.fn());
// Mock the AppContext
const mockUpdateUserPreferenceInContext = jest.fn();
const SHIFT_B_KEYBOARD_SHORTCUT = '{Shift>}b{/Shift}';
jest.mock('providers/App/App', () => ({
useAppContext: jest.fn(() => ({
userPreferences: [
{
name: USER_PREFERENCES.SIDENAV_PINNED,
value: false,
},
],
updateUserPreferenceInContext: mockUpdateUserPreferenceInContext,
})),
}));
function TestComponent({
mockHandleShortcut,
}: {
mockHandleShortcut: () => void;
}): JSX.Element {
const { registerShortcut } = useKeyboardHotkeys();
registerShortcut(GlobalShortcuts.ToggleSidebar, mockHandleShortcut);
return <div data-testid="test">Test</div>;
}
describe('Sidebar Toggle Shortcut', () => {
let queryClient: QueryClient;
beforeEach(() => {
queryClient = new QueryClient({
defaultOptions: {
queries: {
retry: false,
},
mutations: {
retry: false,
},
},
});
jest.clearAllMocks();
});
afterEach(() => {
jest.clearAllMocks();
});
describe('Global Shortcuts Constants', () => {
it('should have the correct shortcut key combination', () => {
expect(GlobalShortcuts.ToggleSidebar).toBe('b+shift');
});
});
describe('Keyboard Shortcut Registration', () => {
it('should register the sidebar toggle shortcut correctly', async () => {
const user = userEvent.setup();
const mockHandleShortcut = jest.fn();
render(
<QueryClientProvider client={queryClient}>
<KeyboardHotkeysProvider>
<TestComponent mockHandleShortcut={mockHandleShortcut} />
</KeyboardHotkeysProvider>
</QueryClientProvider>,
);
// Trigger the shortcut
await user.keyboard(SHIFT_B_KEYBOARD_SHORTCUT);
expect(mockHandleShortcut).toHaveBeenCalled();
});
it('should not trigger shortcut in input fields', async () => {
const user = userEvent.setup();
const mockHandleShortcut = jest.fn();
function TestComponent(): JSX.Element {
const { registerShortcut } = useKeyboardHotkeys();
registerShortcut(GlobalShortcuts.ToggleSidebar, mockHandleShortcut);
return (
<div>
<input data-testid="input-field" />
<div data-testid="test">Test</div>
</div>
);
}
render(
<QueryClientProvider client={queryClient}>
<KeyboardHotkeysProvider>
<TestComponent />
</KeyboardHotkeysProvider>
</QueryClientProvider>,
);
// Focus on input field
const inputField = screen.getByTestId('input-field');
await user.click(inputField);
// Try to trigger shortcut while focused on input
await user.keyboard('{Shift>}b{/Shift}');
// Should not trigger the shortcut
expect(mockHandleShortcut).not.toHaveBeenCalled();
});
});
describe('Sidebar Toggle Functionality', () => {
it('should log the toggle event with correct parameters', async () => {
const user = userEvent.setup();
const mockHandleShortcut = jest.fn(() => {
logEvent('Global Shortcut: Sidebar Toggle', {
previousState: false,
newState: true,
});
});
render(
<QueryClientProvider client={queryClient}>
<KeyboardHotkeysProvider>
<TestComponent mockHandleShortcut={mockHandleShortcut} />
</KeyboardHotkeysProvider>
</QueryClientProvider>,
);
await user.keyboard(SHIFT_B_KEYBOARD_SHORTCUT);
expect(logEvent).toHaveBeenCalledWith('Global Shortcut: Sidebar Toggle', {
previousState: false,
newState: true,
});
});
it('should update user preference in context', async () => {
const user = userEvent.setup();
const mockHandleShortcut = jest.fn(() => {
const save = {
name: USER_PREFERENCES.SIDENAV_PINNED,
value: true,
};
mockUpdateUserPreferenceInContext(save);
});
render(
<QueryClientProvider client={queryClient}>
<KeyboardHotkeysProvider>
<TestComponent mockHandleShortcut={mockHandleShortcut} />
</KeyboardHotkeysProvider>
</QueryClientProvider>,
);
await user.keyboard(SHIFT_B_KEYBOARD_SHORTCUT);
expect(mockUpdateUserPreferenceInContext).toHaveBeenCalledWith({
name: USER_PREFERENCES.SIDENAV_PINNED,
value: true,
});
});
});
});

View File

@@ -10,10 +10,8 @@ import setLocalStorageApi from 'api/browser/localstorage/set';
import getChangelogByVersion from 'api/changelog/getChangelogByVersion';
import logEvent from 'api/common/logEvent';
import manageCreditCardApi from 'api/v1/portal/create';
import updateUserPreference from 'api/v1/user/preferences/name/update';
import getUserLatestVersion from 'api/v1/version/getLatestVersion';
import getUserVersion from 'api/v1/version/getVersion';
import { AxiosError } from 'axios';
import cx from 'classnames';
import ChangelogModal from 'components/ChangelogModal/ChangelogModal';
import ChatSupportGateway from 'components/ChatSupportGateway/ChatSupportGateway';
@@ -24,12 +22,10 @@ import { Events } from 'constants/events';
import { FeatureKeys } from 'constants/features';
import { LOCALSTORAGE } from 'constants/localStorage';
import ROUTES from 'constants/routes';
import { GlobalShortcuts } from 'constants/shortcuts/globalShortcuts';
import { USER_PREFERENCES } from 'constants/userPreferences';
import SideNav from 'container/SideNav';
import TopNav from 'container/TopNav';
import dayjs from 'dayjs';
import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
import { useNotifications } from 'hooks/useNotifications';
@@ -72,10 +68,8 @@ import {
LicensePlatform,
LicenseState,
} from 'types/api/licensesV3/getActive';
import { UserPreference } from 'types/api/preferences/preference';
import AppReducer from 'types/reducer/app';
import { USER_ROLES } from 'types/roles';
import { showErrorNotification } from 'utils/error';
import { eventEmitter } from 'utils/getEventEmitter';
import {
getFormattedDate,
@@ -668,85 +662,10 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
</div>
);
const sideNavPinnedPreference = userPreferences?.find(
const sideNavPinned = userPreferences?.find(
(preference) => preference.name === USER_PREFERENCES.SIDENAV_PINNED,
)?.value as boolean;
// Add loading state to prevent layout shift during initial load
const [isSidebarLoaded, setIsSidebarLoaded] = useState(false);
// Get sidebar state from localStorage as fallback until preferences are loaded
const getSidebarStateFromLocalStorage = useCallback((): boolean => {
try {
const storedValue = getLocalStorageApi(USER_PREFERENCES.SIDENAV_PINNED);
return storedValue === 'true';
} catch {
return false;
}
}, []);
// Set sidebar as loaded after user preferences are fetched
useEffect(() => {
if (userPreferences !== null) {
setIsSidebarLoaded(true);
}
}, [userPreferences]);
// Use localStorage value as fallback until preferences are loaded
const isSideNavPinned = isSidebarLoaded
? sideNavPinnedPreference
: getSidebarStateFromLocalStorage();
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
const { updateUserPreferenceInContext } = useAppContext();
const { mutate: updateUserPreferenceMutation } = useMutation(
updateUserPreference,
{
onError: (error) => {
showErrorNotification(notifications, error as AxiosError);
},
},
);
const handleToggleSidebar = useCallback((): void => {
const newState = !isSideNavPinned;
logEvent('Global Shortcut: Sidebar Toggle', {
previousState: isSideNavPinned,
newState,
});
// Save to localStorage immediately for instant feedback
setLocalStorageApi(USER_PREFERENCES.SIDENAV_PINNED, newState.toString());
// Update the context immediately
const save = {
name: USER_PREFERENCES.SIDENAV_PINNED,
value: newState,
};
updateUserPreferenceInContext(save as UserPreference);
// Make the API call in the background
updateUserPreferenceMutation({
name: USER_PREFERENCES.SIDENAV_PINNED,
value: newState,
});
}, [
isSideNavPinned,
updateUserPreferenceInContext,
updateUserPreferenceMutation,
]);
// Register the sidebar toggle shortcut
useEffect(() => {
registerShortcut(GlobalShortcuts.ToggleSidebar, handleToggleSidebar);
return (): void => {
deregisterShortcut(GlobalShortcuts.ToggleSidebar);
};
}, [registerShortcut, deregisterShortcut, handleToggleSidebar]);
const SHOW_TRIAL_EXPIRY_BANNER =
showTrialExpiryBanner && !showPaymentFailedWarning;
const SHOW_WORKSPACE_RESTRICTED_BANNER = showWorkspaceRestricted;
@@ -820,14 +739,14 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
className={cx(
'app-layout',
isDarkMode ? 'darkMode dark' : 'lightMode',
isSideNavPinned ? 'side-nav-pinned' : '',
sideNavPinned ? 'side-nav-pinned' : '',
SHOW_WORKSPACE_RESTRICTED_BANNER ? 'isWorkspaceRestricted' : '',
SHOW_TRIAL_EXPIRY_BANNER ? 'isTrialExpired' : '',
SHOW_PAYMENT_FAILED_BANNER ? 'isPaymentFailed' : '',
)}
>
{isToDisplayLayout && !renderFullScreen && (
<SideNav isPinned={isSideNavPinned} />
<SideNav isPinned={sideNavPinned} />
)}
<div
className={cx('app-content', {

View File

@@ -28,7 +28,6 @@ import getTimeString from 'lib/getTimeString';
import history from 'lib/history';
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
import { isEmpty } from 'lodash-es';
import { useAppContext } from 'providers/App/App';
import { useTimezone } from 'providers/Timezone';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
@@ -314,7 +313,7 @@ function ChartPreview({
featureFlags?.find((flag) => flag.name === FeatureKeys.ANOMALY_DETECTION)
?.active || false;
const isWarning = !isEmpty(queryResponse.data?.warning);
const isWarning = !!queryResponse.data?.warning?.message;
return (
<div className="alert-chart-container" ref={graphRef}>
<ChartContainer>

View File

@@ -240,18 +240,8 @@ function FormAlertRules({
const queryData = currentQuery.builder.queryData[index];
const updatedFunctions = updateFunctions(queryData);
// Only update if functions actually changed to avoid resetting aggregateAttribute
const currentFunctions = queryData.functions || [];
const functionsChanged = !isEqual(currentFunctions, updatedFunctions);
if (functionsChanged) {
const updatedQueryData = {
...queryData,
functions: updatedFunctions,
};
handleSetQueryData(index, updatedQueryData);
}
queryData.functions = updatedFunctions;
handleSetQueryData(index, queryData);
}
};

View File

@@ -301,7 +301,7 @@ function GridCardGraph({
widget={widget}
queryResponse={queryResponse}
errorMessage={errorMessage}
isWarning={!isEmpty(queryResponse.data?.warning)}
isWarning={!!queryResponse.data?.warning?.message}
version={version}
threshold={threshold}
headerMenuList={menuList}

View File

@@ -73,8 +73,6 @@ export default function TableRow({
{tableColumns.map((column) => {
if (!column.render) return <td>Empty</td>;
if (!column.key) return null;
const element: ColumnTypeRender<Record<string, unknown>> = column.render(
log[column.key as keyof Record<string, unknown>],
log,
@@ -99,7 +97,6 @@ export default function TableRow({
fontSize={fontSize}
columnKey={column.key as string}
onClick={handleShowLogDetails}
className={column.key as string}
>
{cloneElement(children, props)}
</TableCellStyled>

View File

@@ -136,7 +136,7 @@ const InfinityTable = forwardRef<TableVirtuosoHandle, InfinityTableProps>(
key={column.key}
fontSize={tableViewProps?.fontSize}
// eslint-disable-next-line react/jsx-props-no-spreading
{...(isDragColumn && { className: `dragHandler ${column.key}` })}
{...(isDragColumn && { className: 'dragHandler' })}
columnKey={column.key as string}
>
{(column.title as string).replace(/^\w/, (c) => c.toUpperCase())}

View File

@@ -1,5 +1,4 @@
import { TelemetryFieldKey } from 'api/v5/v5';
import { isEmpty } from 'lodash-es';
import { IField } from 'types/api/logs/fields';
import {
IBuilderQuery,
@@ -9,13 +8,11 @@ import {
export const convertKeysToColumnFields = (
keys: TelemetryFieldKey[],
): IField[] =>
keys
.filter((item) => !isEmpty(item.name))
.map((item) => ({
dataType: item.fieldDataType ?? '',
name: item.name,
type: item.fieldContext ?? '',
}));
keys.map((item) => ({
dataType: item.fieldDataType ?? '',
name: item.name,
type: item.fieldContext ?? '',
}));
/**
* Determines if a query represents a trace-to-logs navigation
* by checking for the presence of a trace_id filter.

View File

@@ -0,0 +1,220 @@
import { render, screen } from '@testing-library/react';
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
import * as useOptionsMenuHooks from 'container/OptionsMenu';
import * as useUpdateDashboardHooks from 'hooks/dashboard/useUpdateDashboard';
import * as useQueryBuilderHooks from 'hooks/queryBuilder/useQueryBuilder';
import * as appContextHooks from 'providers/App/App';
import { ErrorModalProvider } from 'providers/ErrorModalProvider';
import * as timezoneHooks from 'providers/Timezone';
import { QueryClient, QueryClientProvider } from 'react-query';
import { Provider } from 'react-redux';
import { MemoryRouter } from 'react-router-dom';
import { useSearchParams } from 'react-router-dom-v5-compat';
import store from 'store';
import { LicenseEvent } from 'types/api/licensesV3/getActive';
import { DataSource } from 'types/common/queryBuilder';
import Explorer from '../Explorer';
const mockSetSearchParams = jest.fn();
const queryClient = new QueryClient();
const mockUpdateAllQueriesOperators = jest.fn();
const mockUseQueryBuilderData = {
handleRunQuery: jest.fn(),
stagedQuery: initialQueriesMap[DataSource.METRICS],
updateAllQueriesOperators: mockUpdateAllQueriesOperators,
currentQuery: initialQueriesMap[DataSource.METRICS],
resetQuery: jest.fn(),
redirectWithQueryBuilderData: jest.fn(),
isStagedQueryUpdated: jest.fn(),
handleSetQueryData: jest.fn(),
handleSetFormulaData: jest.fn(),
handleSetQueryItemData: jest.fn(),
handleSetConfig: jest.fn(),
removeQueryBuilderEntityByIndex: jest.fn(),
removeQueryTypeItemByIndex: jest.fn(),
isDefaultQuery: jest.fn(),
};
jest.mock('react-router-dom-v5-compat', () => {
const actual = jest.requireActual('react-router-dom-v5-compat');
return {
...actual,
useSearchParams: jest.fn(),
useNavigationType: (): any => 'PUSH',
};
});
jest.mock('hooks/useDimensions', () => ({
useResizeObserver: (): { width: number; height: number } => ({
width: 800,
height: 400,
}),
}));
jest.mock('react-query', () => ({
...jest.requireActual('react-query'),
useQueryClient: jest.fn().mockReturnValue({
getQueriesData: jest.fn(),
}),
}));
jest.mock('hooks/useSafeNavigate', () => ({
useSafeNavigate: (): any => ({
safeNavigate: jest.fn(),
}),
}));
jest.mock('hooks/useNotifications', () => ({
useNotifications: (): any => ({
notifications: {
error: jest.fn(),
},
}),
}));
jest.mock('uplot', () => {
const paths = {
spline: jest.fn(),
bars: jest.fn(),
};
const uplotMock = jest.fn(() => ({
paths,
}));
return {
paths,
default: uplotMock,
};
});
jest.mock('react-redux', () => ({
...jest.requireActual('react-redux'),
useSelector: (): any => ({
globalTime: {
selectedTime: {
startTime: 1713734400000,
endTime: 1713738000000,
},
maxTime: 1713738000000,
minTime: 1713734400000,
},
}),
}));
jest.spyOn(useUpdateDashboardHooks, 'useUpdateDashboard').mockReturnValue({
mutate: jest.fn(),
isLoading: false,
} as any);
jest.spyOn(useOptionsMenuHooks, 'useOptionsMenu').mockReturnValue({
options: {
selectColumns: [],
},
} as any);
jest.spyOn(timezoneHooks, 'useTimezone').mockReturnValue({
timezone: {
offset: 0,
},
browserTimezone: {
offset: 0,
},
} as any);
jest.spyOn(appContextHooks, 'useAppContext').mockReturnValue({
user: {
role: 'admin',
},
activeLicenseV3: {
event_queue: {
created_at: '0',
event: LicenseEvent.NO_EVENT,
scheduled_at: '0',
status: '',
updated_at: '0',
},
license: {
license_key: 'test-license-key',
license_type: 'trial',
org_id: 'test-org-id',
plan_id: 'test-plan-id',
plan_name: 'test-plan-name',
plan_type: 'trial',
plan_version: 'test-plan-version',
},
},
} as any);
jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue({
...mockUseQueryBuilderData,
} as any);
describe('Explorer', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('should render Explorer query builder with metrics datasource selected', () => {
jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue({
...mockUseQueryBuilderData,
stagedQuery: initialQueriesMap[DataSource.TRACES],
} as any);
(useSearchParams as jest.Mock).mockReturnValue([
new URLSearchParams({ isOneChartPerQueryEnabled: 'false' }),
mockSetSearchParams,
]);
render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<ErrorModalProvider>
<Explorer />
</ErrorModalProvider>
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
expect(mockUpdateAllQueriesOperators).toHaveBeenCalledWith(
initialQueriesMap[DataSource.METRICS],
PANEL_TYPES.TIME_SERIES,
DataSource.METRICS,
);
});
it('should enable one chart per query toggle when oneChartPerQuery=true in URL', () => {
(useSearchParams as jest.Mock).mockReturnValue([
new URLSearchParams({ isOneChartPerQueryEnabled: 'true' }),
mockSetSearchParams,
]);
render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<ErrorModalProvider>
<Explorer />
</ErrorModalProvider>
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
const toggle = screen.getByRole('switch');
expect(toggle).toBeChecked();
});
it('should disable one chart per query toggle when oneChartPerQuery=false in URL', () => {
(useSearchParams as jest.Mock).mockReturnValue([
new URLSearchParams({ isOneChartPerQueryEnabled: 'false' }),
mockSetSearchParams,
]);
render(
<QueryClientProvider client={queryClient}>
<MemoryRouter>
<Provider store={store}>
<ErrorModalProvider>
<Explorer />
</ErrorModalProvider>
</Provider>
</MemoryRouter>
</QueryClientProvider>,
);
const toggle = screen.getByRole('switch');
expect(toggle).not.toBeChecked();
});
});

View File

@@ -13,7 +13,6 @@ import DateTimeSelector from 'container/TopNav/DateTimeSelectionV2';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import { isEmpty } from 'lodash-es';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useSearchParams } from 'react-router-dom-v5-compat';
@@ -136,7 +135,7 @@ function Explorer(): JSX.Element {
/>
</div>
<div className="explore-header-right-actions">
{!isEmpty(warning) && <WarningPopover warningData={warning} />}
{warning?.message && <WarningPopover warningData={warning} />}
<DateTimeSelector showAutoRefresh />
<RightToolbarActions
onStageRunQuery={(): void => handleRunQuery(true, true)}

View File

@@ -1,6 +1,5 @@
import { Temporality } from 'api/metricsExplorer/getMetricDetails';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import { SpaceAggregation, TimeAggregation } from 'api/v5/v5';
import { initialQueriesMap } from 'constants/queryBuilder';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
@@ -111,15 +110,6 @@ export function getMetricDetailsQuery(
isJSON: false,
dataType: DataTypes.String,
},
aggregations: [
{
metricName,
timeAggregation: timeAggregation as TimeAggregation,
spaceAggregation: spaceAggregation as SpaceAggregation,
reduceTo: 'avg',
temporality: '',
},
],
aggregateOperator,
timeAggregation,
spaceAggregation,

View File

@@ -1,7 +1,6 @@
import './MySettings.styles.scss';
import { Radio, RadioChangeEvent, Switch, Tag } from 'antd';
import setLocalStorageApi from 'api/browser/localstorage/set';
import logEvent from 'api/common/logEvent';
import updateUserPreference from 'api/v1/user/preferences/name/update';
import { AxiosError } from 'axios';
@@ -110,9 +109,6 @@ function MySettings(): JSX.Element {
// Optimistically update the UI
setSideNavPinned(checked);
// Save to localStorage immediately for instant feedback
setLocalStorageApi(USER_PREFERENCES.SIDENAV_PINNED, checked.toString());
// Update the context immediately
const save = {
name: USER_PREFERENCES.SIDENAV_PINNED,
@@ -134,8 +130,6 @@ function MySettings(): JSX.Element {
name: USER_PREFERENCES.SIDENAV_PINNED,
value: !checked,
} as UserPreference);
// Also revert localStorage
setLocalStorageApi(USER_PREFERENCES.SIDENAV_PINNED, (!checked).toString());
showErrorNotification(notifications, error as AxiosError);
},
},

View File

@@ -6,7 +6,6 @@ import { Card } from 'container/GridCardLayout/styles';
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { isEmpty } from 'lodash-es';
import { memo } from 'react';
import { Warning } from 'types/api';
@@ -39,7 +38,7 @@ function WidgetGraph({
<div className="header">
<div className="header-left">
<PlotTag queryType={currentQuery.queryType} panelType={selectedGraph} />
{!isEmpty(queryResponse.data?.warning) && (
{queryResponse.data?.warning?.message && (
<WarningPopover warningData={queryResponse.data?.warning as Warning} />
)}
</div>

View File

@@ -6,7 +6,6 @@ import { useGetQueryKeySuggestions } from 'hooks/querySuggestions/useGetQueryKey
import useDebounce from 'hooks/useDebounce';
import { useNotifications } from 'hooks/useNotifications';
import useUrlQueryData from 'hooks/useUrlQueryData';
import { has } from 'lodash-es';
import { AllTraceFilterKeyValue } from 'pages/TracesExplorer/Filter/filterUtils';
import { usePreferenceContext } from 'providers/preferences/context/PreferenceContextProvider';
import { useCallback, useEffect, useMemo, useState } from 'react';
@@ -453,9 +452,7 @@ const useOptionsMenu = ({
() => ({
addColumn: {
isFetching: isSearchedAttributesFetchingV5,
value:
preferences?.columns.filter((item) => has(item, 'name')) ||
defaultOptionsQuery.selectColumns.filter((item) => has(item, 'name')),
value: preferences?.columns || defaultOptionsQuery.selectColumns,
options: optionsFromAttributeKeys || [],
onFocus: handleFocus,
onBlur: handleBlur,

View File

@@ -4,7 +4,7 @@ import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
export type AgregatorFilterProps = Pick<AutoCompleteProps, 'disabled'> & {
query: IBuilderQuery;
onChange: (value: BaseAutocompleteData, isEditMode?: boolean) => void;
onChange: (value: BaseAutocompleteData) => void;
defaultValue?: string;
onSelect?: (value: BaseAutocompleteData) => void;
index?: number;

View File

@@ -137,7 +137,7 @@ export const AggregatorFilter = memo(function AggregatorFilter({
if (metricData) {
// Update the aggregateAttribute with the fetched type information
onChange(metricData, true);
onChange(metricData);
}
}
}, [
@@ -223,16 +223,11 @@ export const AggregatorFilter = memo(function AggregatorFilter({
);
const handleBlur = useCallback(async () => {
if (searchText && searchText !== queryAggregation.metricName) {
if (searchText) {
const aggregateAttributes = await getResponseAttributes();
handleChangeCustomValue(searchText, aggregateAttributes);
}
}, [
getResponseAttributes,
handleChangeCustomValue,
searchText,
queryAggregation?.metricName,
]);
}, [getResponseAttributes, handleChangeCustomValue, searchText]);
const handleChange = useCallback(
(

View File

@@ -1,7 +1,7 @@
.span-details-drawer {
display: flex;
flex-direction: column;
width: 450px;
width: 330px;
border-left: 1px solid var(--bg-slate-400);
overflow-y: auto;
@@ -176,34 +176,6 @@
justify-content: center;
width: 100%;
padding: 4px 8px;
margin-right: 8px;
gap: 4px;
.tab-label {
display: flex;
align-items: center;
}
.count-badge {
display: flex;
align-items: center;
justify-content: center;
min-width: 20px;
height: 20px;
padding: 0 6px;
border-radius: 10px;
background: rgba(171, 189, 255, 0.1);
color: var(--bg-vanilla-400);
font-variant-numeric: lining-nums tabular-nums slashed-zero;
font-feature-settings: 'dlig' on, 'salt' on;
font-family: Inter;
font-size: 13px;
font-style: normal;
font-weight: 400;
line-height: 20px;
letter-spacing: -0.065px;
text-transform: uppercase;
}
}
.attributes-tab-btn:hover,

View File

@@ -54,10 +54,7 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
icon={<Bookmark size="14" />}
className="attributes-tab-btn"
>
<span className="tab-label">Attributes</span>
<span className="count-badge">
{Object.keys(span.tagMap || {}).length}
</span>
Attributes
</Button>
),
key: 'attributes',
@@ -66,8 +63,7 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
{
label: (
<Button type="text" icon={<Anvil size="14" />} className="events-tab-btn">
<span className="tab-label">Events</span>
<span className="count-badge">{span.event?.length || 0}</span>
Events
</Button>
),
key: 'events',
@@ -86,14 +82,7 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
icon={<Link2 size="14" />}
className="linked-spans-tab-btn"
>
<span className="tab-label">Links</span>
<span className="count-badge">
{
(
span.references?.filter((ref: any) => ref.refType !== 'CHILD_OF') || []
).length
}
</span>
Links
</Button>
),
key: 'linked-spans',

View File

@@ -20,7 +20,6 @@ function TimeSeriesViewContainer({
dataSource = DataSource.TRACES,
isFilterApplied,
setWarning,
setIsLoadingQueries,
}: TimeSeriesViewProps): JSX.Element {
const { stagedQuery, currentQuery, panelType } = useQueryBuilder();
@@ -84,14 +83,6 @@ function TimeSeriesViewContainer({
[data, isValidToConvertToMs],
);
useEffect(() => {
if (isLoading || isFetching) {
setIsLoadingQueries(true);
} else {
setIsLoadingQueries(false);
}
}, [isLoading, isFetching, setIsLoadingQueries]);
return (
<TimeSeriesView
isFilterApplied={isFilterApplied}
@@ -110,7 +101,6 @@ interface TimeSeriesViewProps {
dataSource?: DataSource;
isFilterApplied: boolean;
setWarning: Dispatch<SetStateAction<Warning | undefined>>;
setIsLoadingQueries: Dispatch<SetStateAction<boolean>>;
}
TimeSeriesViewContainer.defaultProps = {

View File

@@ -372,7 +372,7 @@ function DateTimeSelection({
})),
},
};
return encodeURIComponent(JSON.stringify(updatedCompositeQuery));
return JSON.stringify(updatedCompositeQuery);
}, [currentQuery]);
const onSelectHandler = useCallback(

View File

@@ -49,14 +49,9 @@ import { getListColumns, transformDataWithDate } from './utils';
interface ListViewProps {
isFilterApplied: boolean;
setWarning: Dispatch<SetStateAction<Warning | undefined>>;
setIsLoadingQueries: Dispatch<SetStateAction<boolean>>;
}
function ListView({
isFilterApplied,
setWarning,
setIsLoadingQueries,
}: ListViewProps): JSX.Element {
function ListView({ isFilterApplied, setWarning }: ListViewProps): JSX.Element {
const {
stagedQuery,
panelType: panelTypeFromQueryBuilder,
@@ -167,14 +162,6 @@ function ListView({
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [data?.payload, data?.warning]);
useEffect(() => {
if (isLoading || isFetching) {
setIsLoadingQueries(true);
} else {
setIsLoadingQueries(false);
}
}, [isLoading, isFetching, setIsLoadingQueries]);
const dataLength =
data?.payload?.data?.newResult?.data?.result[0]?.list?.length;
const totalCount = useMemo(() => dataLength || 0, [dataLength]);

View File

@@ -16,10 +16,8 @@ import { GlobalReducer } from 'types/reducer/globalTime';
function TableView({
setWarning,
setIsLoadingQueries,
}: {
setWarning: Dispatch<SetStateAction<Warning | undefined>>;
setIsLoadingQueries: Dispatch<SetStateAction<boolean>>;
}): JSX.Element {
const { stagedQuery, panelType } = useQueryBuilder();
@@ -28,7 +26,7 @@ function TableView({
GlobalReducer
>((state) => state.globalTime);
const { data, isLoading, isFetching, isError, error } = useGetQueryRange(
const { data, isLoading, isError, error } = useGetQueryRange(
{
query: stagedQuery || initialQueriesMap.traces,
graphType: panelType || PANEL_TYPES.TABLE,
@@ -51,14 +49,6 @@ function TableView({
},
);
useEffect(() => {
if (isLoading || isFetching) {
setIsLoadingQueries(true);
} else {
setIsLoadingQueries(false);
}
}, [isLoading, isFetching, setIsLoadingQueries]);
const queryTableData = useMemo(
() =>
data?.payload?.data?.newResult?.data?.result ||

View File

@@ -40,13 +40,11 @@ import { ActionsContainer, Container } from './styles';
interface TracesViewProps {
isFilterApplied: boolean;
setWarning: Dispatch<SetStateAction<Warning | undefined>>;
setIsLoadingQueries: Dispatch<SetStateAction<boolean>>;
}
function TracesView({
isFilterApplied,
setWarning,
setIsLoadingQueries,
}: TracesViewProps): JSX.Element {
const { stagedQuery, panelType } = useQueryBuilder();
const [orderBy, setOrderBy] = useState<string>('timestamp:desc');
@@ -119,14 +117,6 @@ function TracesView({
[responseData],
);
useEffect(() => {
if (isLoading || isFetching) {
setIsLoadingQueries(true);
} else {
setIsLoadingQueries(false);
}
}, [isLoading, isFetching, setIsLoadingQueries]);
useEffect(() => {
if (!isLoading && !isFetching && !isError && (tableData || []).length !== 0) {
logEvent('Traces Explorer: Data present', {

View File

@@ -212,7 +212,7 @@ export const useQueryOperations: UseQueryOperations = ({
);
const handleChangeAggregatorAttribute = useCallback(
(value: BaseAutocompleteData, isEditMode?: boolean): void => {
(value: BaseAutocompleteData): void => {
const newQuery: IBuilderQuery = {
...query,
aggregateAttribute: value,
@@ -226,32 +226,30 @@ export const useQueryOperations: UseQueryOperations = ({
handleMetricAggregateAtributeTypes(newQuery.aggregateAttribute);
}
if (!isEditMode) {
if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.SUM) {
newQuery.aggregateOperator = MetricAggregateOperator.RATE;
newQuery.timeAggregation = MetricAggregateOperator.RATE;
} else if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.GAUGE) {
if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.SUM) {
newQuery.aggregateOperator = MetricAggregateOperator.RATE;
newQuery.timeAggregation = MetricAggregateOperator.RATE;
} else if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.GAUGE) {
newQuery.aggregateOperator = MetricAggregateOperator.AVG;
newQuery.timeAggregation = MetricAggregateOperator.AVG;
} else {
newQuery.timeAggregation = '';
}
newQuery.spaceAggregation = '';
// Handled query with unknown metric to avoid 400 and 500 errors
// With metric value typed and not available then - time - 'avg', space - 'avg'
// If not typed - time - 'rate', space - 'sum', op - 'count'
if (isEmpty(newQuery.aggregateAttribute?.type)) {
if (!isEmpty(newQuery.aggregateAttribute?.key)) {
newQuery.aggregateOperator = MetricAggregateOperator.AVG;
newQuery.timeAggregation = MetricAggregateOperator.AVG;
newQuery.spaceAggregation = MetricAggregateOperator.AVG;
} else {
newQuery.timeAggregation = '';
}
newQuery.spaceAggregation = '';
// Handled query with unknown metric to avoid 400 and 500 errors
// With metric value typed and not available then - time - 'avg', space - 'avg'
// If not typed - time - 'rate', space - 'sum', op - 'count'
if (isEmpty(newQuery.aggregateAttribute?.type)) {
if (!isEmpty(newQuery.aggregateAttribute?.key)) {
newQuery.aggregateOperator = MetricAggregateOperator.AVG;
newQuery.timeAggregation = MetricAggregateOperator.AVG;
newQuery.spaceAggregation = MetricAggregateOperator.AVG;
} else {
newQuery.aggregateOperator = MetricAggregateOperator.COUNT;
newQuery.timeAggregation = MetricAggregateOperator.RATE;
newQuery.spaceAggregation = MetricAggregateOperator.SUM;
}
newQuery.aggregateOperator = MetricAggregateOperator.COUNT;
newQuery.timeAggregation = MetricAggregateOperator.RATE;
newQuery.spaceAggregation = MetricAggregateOperator.SUM;
}
}
}
@@ -264,63 +262,61 @@ export const useQueryOperations: UseQueryOperations = ({
handleMetricAggregateAtributeTypes(newQuery.aggregateAttribute);
}
if (!isEditMode) {
if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.SUM) {
newQuery.aggregations = [
{
timeAggregation: MetricAggregateOperator.RATE,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: '',
},
];
} else if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.GAUGE) {
if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.SUM) {
newQuery.aggregations = [
{
timeAggregation: MetricAggregateOperator.RATE,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: '',
},
];
} else if (newQuery.aggregateAttribute?.type === ATTRIBUTE_TYPES.GAUGE) {
newQuery.aggregations = [
{
timeAggregation: MetricAggregateOperator.AVG,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: '',
},
];
} else {
newQuery.aggregations = [
{
timeAggregation: '',
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: '',
},
];
}
newQuery.aggregateOperator = '';
newQuery.spaceAggregation = '';
// Handled query with unknown metric to avoid 400 and 500 errors
// With metric value typed and not available then - time - 'avg', space - 'avg'
// If not typed - time - 'rate', space - 'sum', op - 'count'
if (isEmpty(newQuery.aggregateAttribute?.type)) {
if (!isEmpty(newQuery.aggregateAttribute?.key)) {
newQuery.aggregations = [
{
timeAggregation: MetricAggregateOperator.AVG,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: '',
spaceAggregation: MetricAggregateOperator.AVG,
},
];
} else {
newQuery.aggregations = [
{
timeAggregation: '',
timeAggregation: MetricAggregateOperator.COUNT,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: '',
spaceAggregation: MetricAggregateOperator.SUM,
},
];
}
newQuery.aggregateOperator = '';
newQuery.spaceAggregation = '';
// Handled query with unknown metric to avoid 400 and 500 errors
// With metric value typed and not available then - time - 'avg', space - 'avg'
// If not typed - time - 'rate', space - 'sum', op - 'count'
if (isEmpty(newQuery.aggregateAttribute?.type)) {
if (!isEmpty(newQuery.aggregateAttribute?.key)) {
newQuery.aggregations = [
{
timeAggregation: MetricAggregateOperator.AVG,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: MetricAggregateOperator.AVG,
},
];
} else {
newQuery.aggregations = [
{
timeAggregation: MetricAggregateOperator.COUNT,
metricName: newQuery.aggregateAttribute?.key || '',
temporality: '',
spaceAggregation: MetricAggregateOperator.SUM,
},
];
}
}
}
}

View File

@@ -10,7 +10,6 @@ export const useGetQueryKeyValueSuggestions = ({
signal,
searchText,
signalSource,
metricName,
}: {
key: string;
signal: 'traces' | 'logs' | 'metrics';
@@ -19,26 +18,17 @@ export const useGetQueryKeyValueSuggestions = ({
options?: UseQueryOptions<
SuccessResponse<QueryKeyValueSuggestionsResponseProps> | ErrorResponse
>;
metricName?: string;
}): UseQueryResult<
AxiosResponse<QueryKeyValueSuggestionsResponseProps>,
AxiosError
> =>
useQuery<AxiosResponse<QueryKeyValueSuggestionsResponseProps>, AxiosError>({
queryKey: [
'queryKeyValueSuggestions',
key,
signal,
searchText,
signalSource,
metricName,
],
queryKey: ['queryKeyValueSuggestions', key, signal, searchText, signalSource],
queryFn: () =>
getValueSuggestions({
signal,
key,
searchText: searchText || '',
signalSource: signalSource as 'meter' | '',
metricName: metricName || '',
}),
});

View File

@@ -40,7 +40,9 @@ function validateMetricNameForMetricsDataSource(query: Query): boolean {
// Check if any METRICS data source queries exist
const metricsQueries = queryData.filter(
(queryItem) => queryItem.dataSource === DataSource.METRICS,
(queryItem) =>
queryItem.dataSource === DataSource.METRICS ||
queryItem.dataSource === DataSource.METER,
);
// If no METRICS queries, validation passes

View File

@@ -17,9 +17,9 @@ const getChartData = ({
// eslint-disable-next-line sonarjs/cognitive-complexity
} => {
const uniqueTimeLabels = new Set<number>();
queryData?.forEach((data) => {
data.queryData?.forEach((query) => {
query.values?.forEach((value) => {
queryData.forEach((data) => {
data.queryData.forEach((query) => {
query.values.forEach((value) => {
uniqueTimeLabels.add(value[0]);
});
});
@@ -27,8 +27,8 @@ const getChartData = ({
const labels = Array.from(uniqueTimeLabels).sort((a, b) => a - b);
const response =
queryData?.map(({ queryData, query: queryG, legend: legendG }) =>
const response = queryData.map(
({ queryData, query: queryG, legend: legendG }) =>
queryData.map((e) => {
const { values = [], metric, legend, queryName } = e || {};
const labelNames = getLabelName(
@@ -61,7 +61,7 @@ const getChartData = ({
second: filledDataValues.map((e) => e.second || 0),
};
}),
) || [];
);
const modifiedData = response
.flat()

View File

@@ -490,7 +490,6 @@ export const defaultOutput = {
pageSize: 0,
queryName: 'A',
reduceTo: 'avg',
source: '',
spaceAggregation: 'sum',
stepInterval: 240,
timeAggregation: 'rate',

View File

@@ -28,7 +28,7 @@ import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
import { useHandleExplorerTabChange } from 'hooks/useHandleExplorerTabChange';
import useUrlQueryData from 'hooks/useUrlQueryData';
import { isEmpty, isEqual, isNull } from 'lodash-es';
import { isEqual, isNull } from 'lodash-es';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import { usePreferenceContext } from 'providers/preferences/context/PreferenceContextProvider';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
@@ -361,7 +361,7 @@ function LogsExplorer(): JSX.Element {
/>
}
warningElement={
!isEmpty(warning) ? <WarningPopover warningData={warning} /> : <div />
warning?.message ? <WarningPopover warningData={warning} /> : <div />
}
rightActions={
<RightToolbarActions

View File

@@ -6,7 +6,7 @@ import cx from 'classnames';
import { CardContainer } from 'container/GridCardLayout/styles';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { ChevronDown, ChevronUp } from 'lucide-react';
import { useCallback, useRef, useState } from 'react';
import { useRef, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { Widgets } from 'types/api/dashboard/getAll';
@@ -129,22 +129,23 @@ function MetricPage(): JSX.Element {
},
];
const renderedGraphCountRef = useRef(0);
const [renderedGraphCount, setRenderedGraphCount] = useState(0);
const hasLoggedRef = useRef(false);
const checkIfDataExists = useCallback((isDataAvailable: boolean): void => {
const checkIfDataExists = (isDataAvailable: boolean): void => {
if (isDataAvailable) {
renderedGraphCountRef.current += 1;
const newCount = renderedGraphCount + 1;
setRenderedGraphCount(newCount);
// Only log when first graph has rendered and we haven't logged yet
if (renderedGraphCountRef.current === 1 && !hasLoggedRef.current) {
if (newCount === 1 && !hasLoggedRef.current) {
logEvent('MQ Kafka: Metric view', {
graphRendered: true,
});
hasLoggedRef.current = true;
}
}
}, []);
};
return (
<div className="metric-page">

View File

@@ -69,7 +69,6 @@ function TracesExplorer(): JSX.Element {
// Get panel type from URL
const panelTypesFromUrl = useGetPanelTypesQueryParam(PANEL_TYPES.LIST);
const [isLoadingQueries, setIsLoadingQueries] = useState<boolean>(false);
const [selectedView, setSelectedView] = useState<ExplorerViews>(() =>
getExplorerViewFromUrl(searchParams, panelTypesFromUrl),
@@ -319,12 +318,11 @@ function TracesExplorer(): JSX.Element {
/>
}
warningElement={
!isEmpty(warning) ? <WarningPopover warningData={warning} /> : <div />
warning?.message ? <WarningPopover warningData={warning} /> : <div />
}
rightActions={
<RightToolbarActions
onStageRunQuery={(): void => handleRunQuery(true, true)}
isLoadingQueries={isLoadingQueries}
/>
}
/>
@@ -346,21 +344,13 @@ function TracesExplorer(): JSX.Element {
{selectedView === ExplorerViews.LIST && (
<div className="trace-explorer-list-view">
<ListView
isFilterApplied={isFilterApplied}
setWarning={setWarning}
setIsLoadingQueries={setIsLoadingQueries}
/>
<ListView isFilterApplied={isFilterApplied} setWarning={setWarning} />
</div>
)}
{selectedView === ExplorerViews.TRACE && (
<div className="trace-explorer-traces-view">
<TracesView
isFilterApplied={isFilterApplied}
setWarning={setWarning}
setIsLoadingQueries={setIsLoadingQueries}
/>
<TracesView isFilterApplied={isFilterApplied} setWarning={setWarning} />
</div>
)}
@@ -370,17 +360,13 @@ function TracesExplorer(): JSX.Element {
dataSource={DataSource.TRACES}
isFilterApplied={isFilterApplied}
setWarning={setWarning}
setIsLoadingQueries={setIsLoadingQueries}
/>
</div>
)}
{selectedView === ExplorerViews.TABLE && (
<div className="trace-explorer-table-view">
<TableView
setWarning={setWarning}
setIsLoadingQueries={setIsLoadingQueries}
/>
<TableView setWarning={setWarning} />
</div>
)}
</div>

View File

@@ -1,7 +1,6 @@
/* eslint-disable sonarjs/cognitive-complexity */
/* eslint-disable no-empty */
import { TelemetryFieldKey } from 'api/v5/v5';
import { has } from 'lodash-es';
import { useEffect, useState } from 'react';
import { DataSource } from 'types/common/queryBuilder';
@@ -9,14 +8,6 @@ import logsLoaderConfig from '../configs/logsLoaderConfig';
import tracesLoaderConfig from '../configs/tracesLoaderConfig';
import { FormattingOptions, Preferences } from '../types';
const migrateColumns = (columns: any): any =>
columns.map((column: any) => {
if (has(column, 'key') && !has(column, 'name')) {
return { ...column, name: column.key };
}
return column;
});
// Generic preferences loader that works with any config
async function preferencesLoader<T>(config: {
priority: readonly string[];
@@ -35,16 +26,11 @@ async function preferencesLoader<T>(config: {
const validColumnsResult = results.find(
({ result }) => result.columns?.length,
);
const validFormattingResult = results.find(({ result }) => result.formatting);
const migratedColumns = validColumnsResult?.result.columns
? migrateColumns(validColumnsResult?.result.columns)
: undefined;
// Combine valid results or fallback to default
const finalResult = {
columns: migratedColumns || config.default().columns,
columns: validColumnsResult?.result.columns || config.default().columns,
formatting:
validFormattingResult?.result.formatting || config.default().formatting,
};

View File

@@ -46,7 +46,6 @@ export interface QueryKeyValueRequestProps {
key: string;
searchText: string;
signalSource?: 'meter' | '';
metricName?: string;
}
export type SignalType = 'traces' | 'logs' | 'metrics';

View File

@@ -427,7 +427,7 @@ export type QueryRangeDataV5 =
export interface QueryRangeResponseV5 {
type: RequestType;
data: QueryRangeDataV5 & { warning?: string[] };
data: QueryRangeDataV5 & { warnings?: string[] };
meta: ExecStats;
warning?: Warning;
}

2
go.mod
View File

@@ -70,7 +70,6 @@ require (
go.uber.org/zap v1.27.0
golang.org/x/crypto v0.39.0
golang.org/x/exp v0.0.0-20250210185358-939b2ce775ac
golang.org/x/net v0.41.0
golang.org/x/oauth2 v0.30.0
golang.org/x/sync v0.15.0
golang.org/x/text v0.26.0
@@ -284,6 +283,7 @@ require (
go.uber.org/atomic v1.11.0 // indirect
go.uber.org/goleak v1.3.0 // indirect
golang.org/x/mod v0.25.0 // indirect
golang.org/x/net v0.41.0 // indirect
golang.org/x/sys v0.33.0 // indirect
golang.org/x/time v0.11.0 // indirect
golang.org/x/tools v0.33.0 // indirect

View File

@@ -31,7 +31,6 @@ func NewAPI(
telemetryStore,
telemetrytraces.DBName,
telemetrytraces.TagAttributesV2TableName,
telemetrytraces.SpanAttributesKeysTblName,
telemetrytraces.SpanIndexV3TableName,
telemetrymetrics.DBName,
telemetrymetrics.AttributesMetadataTableName,
@@ -40,8 +39,6 @@ func NewAPI(
telemetrylogs.DBName,
telemetrylogs.LogsV2TableName,
telemetrylogs.TagAttributesV2TableName,
telemetrylogs.LogAttributeKeysTblName,
telemetrylogs.LogResourceKeysTblName,
telemetrymetadata.DBName,
telemetrymetadata.AttributesMetadataLocalTableName,
)
@@ -69,7 +66,7 @@ func (api *API) GetFieldsKeys(w http.ResponseWriter, r *http.Request) {
return
}
keys, complete, err := api.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
keys, err := api.telemetryMetadataStore.GetKeys(ctx, fieldKeySelector)
if err != nil {
render.Error(w, err)
return
@@ -77,7 +74,7 @@ func (api *API) GetFieldsKeys(w http.ResponseWriter, r *http.Request) {
response := fieldKeysResponse{
Keys: keys,
Complete: complete,
Complete: len(keys) < fieldKeySelector.Limit,
}
render.Success(w, http.StatusOK, response)
@@ -100,13 +97,13 @@ func (api *API) GetFieldsValues(w http.ResponseWriter, r *http.Request) {
return
}
allValues, allComplete, err := api.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
allValues, err := api.telemetryMetadataStore.GetAllValues(ctx, fieldValueSelector)
if err != nil {
render.Error(w, err)
return
}
relatedValues, relatedComplete, err := api.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
relatedValues, err := api.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector)
if err != nil {
// we don't want to return error if we fail to get related values for some reason
relatedValues = []string{}
@@ -119,8 +116,11 @@ func (api *API) GetFieldsValues(w http.ResponseWriter, r *http.Request) {
}
response := fieldValuesResponse{
Values: values,
Complete: allComplete && relatedComplete,
Values: values,
Complete: len(values.StringValues) < fieldValueSelector.Limit &&
len(values.BoolValues) < fieldValueSelector.Limit &&
len(values.NumberValues) < fieldValueSelector.Limit &&
len(values.RelatedValues) < fieldValueSelector.Limit,
}
render.Success(w, http.StatusOK, response)

View File

@@ -7,11 +7,8 @@ import (
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/transition"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -19,12 +16,11 @@ import (
)
type handler struct {
module dashboard.Module
providerSettings factory.ProviderSettings
module dashboard.Module
}
func NewHandler(module dashboard.Module, providerSettings factory.ProviderSettings) dashboard.Handler {
return &handler{module: module, providerSettings: providerSettings}
func NewHandler(module dashboard.Module) dashboard.Handler {
return &handler{module: module}
}
func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
@@ -50,13 +46,6 @@ func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) {
return
}
if querybuilder.QBV5Enabled {
dashboardMigrator := transition.NewDashboardMigrateV5(handler.providerSettings.Logger, nil, nil)
if req["version"] != "v5" {
dashboardMigrator.Migrate(ctx, req)
}
}
dashboard, err := handler.module.Create(ctx, orgID, claims.Email, valuer.MustNewUUID(claims.UserID), req)
if err != nil {
render.Error(rw, err)

View File

@@ -401,7 +401,7 @@ func (q *querier) run(
}
}
resp.Warning = &qbtypes.QueryWarnData{
resp.Warning = qbtypes.QueryWarnData{
Message: "Encountered warnings",
Url: warningsDocURL,
Warnings: warns,
@@ -515,39 +515,26 @@ func (q *querier) executeWithCache(ctx context.Context, orgID valuer.UUID, query
// createRangedQuery creates a copy of the query with a different time range
func (q *querier) createRangedQuery(originalQuery qbtypes.Query, timeRange qbtypes.TimeRange) qbtypes.Query {
// this is called in a goroutine, so we create a copy of the query to avoid race conditions
switch qt := originalQuery.(type) {
case *promqlQuery:
queryCopy := qt.query.Copy()
return newPromqlQuery(q.logger, q.promEngine, queryCopy, timeRange, qt.requestType, qt.vars)
return newPromqlQuery(q.logger, q.promEngine, qt.query, timeRange, qt.requestType, qt.vars)
case *chSQLQuery:
queryCopy := qt.query.Copy()
argsCopy := make([]any, len(qt.args))
copy(argsCopy, qt.args)
return newchSQLQuery(q.logger, q.telemetryStore, queryCopy, argsCopy, timeRange, qt.kind, qt.vars)
return newchSQLQuery(q.logger, q.telemetryStore, qt.query, qt.args, timeRange, qt.kind, qt.vars)
case *builderQuery[qbtypes.TraceAggregation]:
specCopy := qt.spec.Copy()
specCopy.ShiftBy = extractShiftFromBuilderQuery(specCopy)
adjustedTimeRange := adjustTimeRangeForShift(specCopy, timeRange, qt.kind)
return newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, specCopy, adjustedTimeRange, qt.kind, qt.variables)
qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec)
adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind)
return newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, qt.spec, adjustedTimeRange, qt.kind, qt.variables)
case *builderQuery[qbtypes.LogAggregation]:
specCopy := qt.spec.Copy()
specCopy.ShiftBy = extractShiftFromBuilderQuery(specCopy)
adjustedTimeRange := adjustTimeRangeForShift(specCopy, timeRange, qt.kind)
return newBuilderQuery(q.telemetryStore, q.logStmtBuilder, specCopy, adjustedTimeRange, qt.kind, qt.variables)
qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec)
adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind)
return newBuilderQuery(q.telemetryStore, q.logStmtBuilder, qt.spec, adjustedTimeRange, qt.kind, qt.variables)
case *builderQuery[qbtypes.MetricAggregation]:
specCopy := qt.spec.Copy()
specCopy.ShiftBy = extractShiftFromBuilderQuery(specCopy)
adjustedTimeRange := adjustTimeRangeForShift(specCopy, timeRange, qt.kind)
qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec)
adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind)
if qt.spec.Source == telemetrytypes.SourceMeter {
return newBuilderQuery(q.telemetryStore, q.meterStmtBuilder, specCopy, adjustedTimeRange, qt.kind, qt.variables)
return newBuilderQuery(q.telemetryStore, q.meterStmtBuilder, qt.spec, adjustedTimeRange, qt.kind, qt.variables)
}
return newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, specCopy, adjustedTimeRange, qt.kind, qt.variables)
return newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, qt.spec, adjustedTimeRange, qt.kind, qt.variables)
default:
return nil
}

View File

@@ -50,7 +50,6 @@ func newProvider(
telemetryStore,
telemetrytraces.DBName,
telemetrytraces.TagAttributesV2TableName,
telemetrytraces.SpanAttributesKeysTblName,
telemetrytraces.SpanIndexV3TableName,
telemetrymetrics.DBName,
telemetrymetrics.AttributesMetadataTableName,
@@ -59,8 +58,6 @@ func newProvider(
telemetrylogs.DBName,
telemetrylogs.LogsV2TableName,
telemetrylogs.TagAttributesV2TableName,
telemetrylogs.LogAttributeKeysTblName,
telemetrylogs.LogResourceKeysTblName,
telemetrymetadata.DBName,
telemetrymetadata.AttributesMetadataLocalTableName,
)
@@ -72,13 +69,12 @@ func newProvider(
resourceFilterFieldMapper := resourcefilter.NewFieldMapper()
resourceFilterConditionBuilder := resourcefilter.NewConditionBuilder(resourceFilterFieldMapper)
resourceFilterStmtBuilder := resourcefilter.NewTraceResourceFilterStatementBuilder(
settings,
resourceFilterFieldMapper,
resourceFilterConditionBuilder,
telemetryMetadataStore,
)
traceAggExprRewriter := querybuilder.NewAggExprRewriter(settings, nil, traceFieldMapper, traceConditionBuilder, "", nil)
traceAggExprRewriter := querybuilder.NewAggExprRewriter(nil, traceFieldMapper, traceConditionBuilder, "", nil)
traceStmtBuilder := telemetrytraces.NewTraceQueryStatementBuilder(
settings,
telemetryMetadataStore,
@@ -93,7 +89,6 @@ func newProvider(
logFieldMapper := telemetrylogs.NewFieldMapper()
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper)
logResourceFilterStmtBuilder := resourcefilter.NewLogResourceFilterStatementBuilder(
settings,
resourceFilterFieldMapper,
resourceFilterConditionBuilder,
telemetryMetadataStore,
@@ -102,7 +97,6 @@ func newProvider(
telemetrylogs.GetBodyJSONKey,
)
logAggExprRewriter := querybuilder.NewAggExprRewriter(
settings,
telemetrylogs.DefaultFullTextColumn,
logFieldMapper,
logConditionBuilder,

View File

@@ -12,6 +12,7 @@ import (
"sort"
"strconv"
"strings"
"sync"
"time"
"github.com/SigNoz/signoz/pkg/prometheus"
@@ -385,6 +386,7 @@ func (r *ClickHouseReader) buildResourceSubQuery(tags []model.TagQueryParam, svc
}
func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.GetServicesParams) (*[]model.ServiceItem, *model.ApiError) {
if r.indexTable == "" {
return nil, &model.ApiError{Typ: model.ErrorExec, Err: ErrNoIndexTable}
}
@@ -393,220 +395,121 @@ func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.G
if apiErr != nil {
return nil, apiErr
}
// Build parallel arrays for arrayZip approach
var ops []string
var svcs []string
serviceOperationsMap := make(map[string][]string)
for svc, opsList := range *topLevelOps {
// Cap operations to 1500 per service (same as original logic)
cappedOps := opsList[:int(math.Min(1500, float64(len(opsList))))]
serviceOperationsMap[svc] = cappedOps
// Add to parallel arrays
for _, op := range cappedOps {
ops = append(ops, op)
svcs = append(svcs, svc)
}
}
fmt.Printf("Operation pairs count: %d\n", len(ops))
// Build resource subquery for all services, but only include our target services
targetServices := make([]string, 0, len(*topLevelOps))
for svc := range *topLevelOps {
targetServices = append(targetServices, svc)
}
resourceSubQuery, err := r.buildResourceSubQueryForServices(queryParams.Tags, targetServices, *queryParams.Start, *queryParams.End)
if err != nil {
zap.L().Error("Error building resource subquery", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
}
// Build the optimized single query using arrayZip for tuple creation
query := fmt.Sprintf(`
SELECT
resource_string_service$$name AS serviceName,
quantile(0.99)(duration_nano) AS p99,
avg(duration_nano) AS avgDuration,
count(*) AS numCalls,
countIf(statusCode = 2) AS numErrors
FROM %s.%s
WHERE (name, resource_string_service$$name) IN arrayZip(@ops, @svcs)
AND timestamp >= @start
AND timestamp <= @end
AND ts_bucket_start >= @start_bucket
AND ts_bucket_start <= @end_bucket
AND (resource_fingerprint GLOBAL IN %s)
GROUP BY serviceName
ORDER BY numCalls DESC`,
r.TraceDB, r.traceTableName, resourceSubQuery,
)
args := []interface{}{
clickhouse.Named("start", strconv.FormatInt(queryParams.Start.UnixNano(), 10)),
clickhouse.Named("end", strconv.FormatInt(queryParams.End.UnixNano(), 10)),
clickhouse.Named("start_bucket", strconv.FormatInt(queryParams.Start.Unix()-1800, 10)),
clickhouse.Named("end_bucket", strconv.FormatInt(queryParams.End.Unix(), 10)),
// Important: wrap slices with clickhouse.Array for IN/array params
clickhouse.Named("ops", ops),
clickhouse.Named("svcs", svcs),
}
fmt.Printf("Query: %s\n", query)
// Execute the single optimized query
rows, err := r.db.Query(ctx, query, args...)
if err != nil {
zap.L().Error("Error executing optimized services query", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
}
defer rows.Close()
// Process results
serviceItems := []model.ServiceItem{}
var wg sync.WaitGroup
// limit the number of concurrent queries to not overload the clickhouse server
sem := make(chan struct{}, 10)
var mtx sync.RWMutex
for rows.Next() {
var serviceItem model.ServiceItem
err := rows.ScanStruct(&serviceItem)
if err != nil {
zap.L().Error("Error scanning service item", zap.Error(err))
continue
}
for svc, ops := range *topLevelOps {
sem <- struct{}{}
wg.Add(1)
go func(svc string, ops []string) {
defer wg.Done()
defer func() { <-sem }()
var serviceItem model.ServiceItem
var numErrors uint64
// Skip services with zero calls (match original behavior)
if serviceItem.NumCalls == 0 {
continue
}
// Add data warning for this service
if ops, exists := serviceOperationsMap[serviceItem.ServiceName]; exists {
// Even if the total number of operations within the time range is less and the all
// the top level operations are high, we want to warn to let user know the issue
// with the instrumentation
serviceItem.DataWarning = model.DataWarning{
TopLevelOps: ops,
TopLevelOps: (*topLevelOps)[svc],
}
}
// Calculate derived fields
serviceItem.CallRate = float64(serviceItem.NumCalls) / float64(queryParams.Period)
if serviceItem.NumCalls > 0 {
serviceItem.ErrorRate = float64(serviceItem.NumErrors) * 100 / float64(serviceItem.NumCalls)
}
// default max_query_size = 262144
// Let's assume the average size of the item in `ops` is 50 bytes
// We can have 262144/50 = 5242 items in the `ops` array
// Although we have make it as big as 5k, We cap the number of items
// in the `ops` array to 1500
serviceItems = append(serviceItems, serviceItem)
ops = ops[:int(math.Min(1500, float64(len(ops))))]
query := fmt.Sprintf(
`SELECT
quantile(0.99)(duration_nano) as p99,
avg(duration_nano) as avgDuration,
count(*) as numCalls
FROM %s.%s
WHERE resource_string_service$$name = @serviceName AND name In @names AND timestamp>= @start AND timestamp<= @end`,
r.TraceDB, r.traceTableName,
)
errorQuery := fmt.Sprintf(
`SELECT
count(*) as numErrors
FROM %s.%s
WHERE resource_string_service$$name = @serviceName AND name In @names AND timestamp>= @start AND timestamp<= @end AND statusCode=2`,
r.TraceDB, r.traceTableName,
)
args := []interface{}{}
args = append(args,
clickhouse.Named("start", strconv.FormatInt(queryParams.Start.UnixNano(), 10)),
clickhouse.Named("end", strconv.FormatInt(queryParams.End.UnixNano(), 10)),
clickhouse.Named("serviceName", svc),
clickhouse.Named("names", ops),
)
resourceSubQuery, err := r.buildResourceSubQuery(queryParams.Tags, svc, *queryParams.Start, *queryParams.End)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return
}
query += `
AND (
resource_fingerprint GLOBAL IN ` +
resourceSubQuery +
`) AND ts_bucket_start >= @start_bucket AND ts_bucket_start <= @end_bucket`
args = append(args,
clickhouse.Named("start_bucket", strconv.FormatInt(queryParams.Start.Unix()-1800, 10)),
clickhouse.Named("end_bucket", strconv.FormatInt(queryParams.End.Unix(), 10)),
)
err = r.db.QueryRow(
ctx,
query,
args...,
).ScanStruct(&serviceItem)
if serviceItem.NumCalls == 0 {
return
}
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return
}
errorQuery += `
AND (
resource_fingerprint GLOBAL IN ` +
resourceSubQuery +
`) AND ts_bucket_start >= @start_bucket AND ts_bucket_start <= @end_bucket`
err = r.db.QueryRow(ctx, errorQuery, args...).Scan(&numErrors)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return
}
serviceItem.ServiceName = svc
serviceItem.NumErrors = numErrors
mtx.Lock()
serviceItems = append(serviceItems, serviceItem)
mtx.Unlock()
}(svc, ops)
}
wg.Wait()
if err = rows.Err(); err != nil {
zap.L().Error("Error iterating over service results", zap.Error(err))
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
for idx := range serviceItems {
serviceItems[idx].CallRate = float64(serviceItems[idx].NumCalls) / float64(queryParams.Period)
serviceItems[idx].ErrorRate = float64(serviceItems[idx].NumErrors) * 100 / float64(serviceItems[idx].NumCalls)
}
return &serviceItems, nil
}
// buildResourceSubQueryForServices builds a resource subquery that includes only specific services
// This maintains service context while optimizing for multiple services in a single query
func (r *ClickHouseReader) buildResourceSubQueryForServices(tags []model.TagQueryParam, targetServices []string, start, end time.Time) (string, error) {
if len(targetServices) == 0 {
return "", fmt.Errorf("no target services provided")
}
if len(tags) == 0 {
// For exact parity with per-service behavior, build via resource builder with only service filter
filterSet := v3.FilterSet{}
filterSet.Items = append(filterSet.Items, v3.FilterItem{
Key: v3.AttributeKey{
Key: "service.name",
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeResource,
},
Operator: v3.FilterOperatorIn,
Value: targetServices,
})
resourceSubQuery, err := resource.BuildResourceSubQuery(
r.TraceDB,
r.traceResourceTableV3,
start.Unix()-1800,
end.Unix(),
&filterSet,
[]v3.AttributeKey{},
v3.AttributeKey{},
false)
if err != nil {
zap.L().Error("Error building resource subquery for services", zap.Error(err))
return "", err
}
return resourceSubQuery, nil
}
// Convert tags to filter set
filterSet := v3.FilterSet{}
for _, tag := range tags {
// Skip the collector id as we don't add it to traces
if tag.Key == "signoz.collector.id" {
continue
}
var it v3.FilterItem
it.Key = v3.AttributeKey{
Key: tag.Key,
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeResource,
}
switch tag.Operator {
case model.NotInOperator:
it.Operator = v3.FilterOperatorNotIn
it.Value = tag.StringValues
case model.InOperator:
it.Operator = v3.FilterOperatorIn
it.Value = tag.StringValues
default:
return "", fmt.Errorf("operator %s not supported", tag.Operator)
}
filterSet.Items = append(filterSet.Items, it)
}
// Add service filter to limit to our target services
filterSet.Items = append(filterSet.Items, v3.FilterItem{
Key: v3.AttributeKey{
Key: "service.name",
DataType: v3.AttributeKeyDataTypeString,
Type: v3.AttributeKeyTypeResource,
},
Operator: v3.FilterOperatorIn,
Value: targetServices,
})
// Build resource subquery with service-specific filtering
resourceSubQuery, err := resource.BuildResourceSubQuery(
r.TraceDB,
r.traceResourceTableV3,
start.Unix()-1800,
end.Unix(),
&filterSet,
[]v3.AttributeKey{},
v3.AttributeKey{},
false)
if err != nil {
zap.L().Error("Error building resource subquery for services", zap.Error(err))
return "", err
}
return resourceSubQuery, nil
}
// buildServiceInClause creates a properly quoted IN clause for service names
func (r *ClickHouseReader) buildServiceInClause(services []string) string {
var quotedServices []string
for _, svc := range services {
// Escape single quotes and wrap in quotes
escapedSvc := strings.ReplaceAll(svc, "'", "\\'")
quotedServices = append(quotedServices, fmt.Sprintf("'%s'", escapedSvc))
}
return strings.Join(quotedServices, ", ")
}
func getStatusFilters(query string, statusParams []string, excludeMap map[string]struct{}) string {
// status can only be two and if both are selected than they are equivalent to none selected
if _, ok := excludeMap["status"]; ok {
@@ -783,6 +686,7 @@ func addExistsOperator(item model.TagQuery, tagMapType string, not bool) (string
}
return fmt.Sprintf(" AND %s (%s)", notStr, strings.Join(tagOperatorPair, " OR ")), args
}
func (r *ClickHouseReader) GetEntryPointOperations(ctx context.Context, queryParams *model.GetTopOperationsParams) (*[]model.TopOperationsItem, error) {
// Step 1: Get top operations for the given service
topOps, err := r.GetTopOperations(ctx, queryParams)
@@ -1512,6 +1416,7 @@ func (r *ClickHouseReader) setTTLLogs(ctx context.Context, orgID string, params
}(ttlPayload)
return &model.SetTTLResponseItem{Message: "move ttl has been successfully set up"}, nil
}
func (r *ClickHouseReader) setTTLTraces(ctx context.Context, orgID string, params *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError) {
// uuid is used as transaction id
uuidWithHyphen := uuid.New()
@@ -2264,6 +2169,7 @@ func (r *ClickHouseReader) GetNextPrevErrorIDs(ctx context.Context, queryParams
return &getNextPrevErrorIDsResponse, nil
}
func (r *ClickHouseReader) getNextErrorID(ctx context.Context, queryParams *model.GetErrorParams) (string, time.Time, *model.ApiError) {
var getNextErrorIDReponse []model.NextPrevErrorIDsDBResponse
@@ -2999,6 +2905,7 @@ func (r *ClickHouseReader) GetMetricAttributeValues(ctx context.Context, req *v3
return &attributeValues, nil
}
func (r *ClickHouseReader) GetMetricMetadata(ctx context.Context, orgID valuer.UUID, metricName, serviceName string) (*v3.MetricMetadataResponse, error) {
unixMilli := common.PastDayRoundOff()
@@ -5273,6 +5180,7 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, orgID valuer.
return &response, nil
}
func (r *ClickHouseReader) GetMetricsTimeSeriesPercentage(ctx context.Context, req *metrics_explorer.TreeMapMetricsRequest) (*[]metrics_explorer.TreeMapResponseItem, *model.ApiError) {
var args []interface{}
@@ -6025,6 +5933,7 @@ func (r *ClickHouseReader) CheckForLabelsInMetric(ctx context.Context, metricNam
}
return hasLE, nil
}
func (r *ClickHouseReader) GetUpdatedMetricsMetadata(ctx context.Context, orgID valuer.UUID, metricNames ...string) (map[string]*model.UpdateMetricsMetadata, *model.ApiError) {
cachedMetadata := make(map[string]*model.UpdateMetricsMetadata)
var missingMetrics []string

View File

@@ -484,7 +484,7 @@ func parseAggregateAttributeRequest(r *http.Request) (*v3.AggregateAttributeRequ
limit = 50
}
if dataSource != v3.DataSourceMetrics && dataSource != v3.DataSourceMeter {
if dataSource != v3.DataSourceMetrics {
if err := aggregateOperator.Validate(); err != nil {
return nil, err
}
@@ -604,7 +604,7 @@ func parseFilterAttributeKeyRequest(r *http.Request) (*v3.FilterAttributeKeyRequ
return nil, err
}
if dataSource != v3.DataSourceMetrics && dataSource != v3.DataSourceMeter {
if dataSource != v3.DataSourceMetrics {
if err := aggregateOperator.Validate(); err != nil {
return nil, err
}

View File

@@ -971,9 +971,7 @@ func (m *Manager) TestNotification(ctx context.Context, orgID valuer.UUID, ruleS
RuleStore: m.ruleStore,
MaintenanceStore: m.maintenanceStore,
Logger: m.logger,
SLogger: m.opts.SLogger,
Reader: m.reader,
Querier: m.opts.Querier,
Cache: m.cache,
ManagerOpts: m.opts,
NotifyFunc: m.prepareTestNotifyFunc(),

View File

@@ -543,11 +543,6 @@ func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUI
return resultVector, nil
}
if queryResult == nil {
r.logger.WarnContext(ctx, "query result is nil", "rule_name", r.Name(), "query_name", selectedQuery)
return resultVector, nil
}
for _, series := range queryResult.Series {
smpl, shouldAlert := r.ShouldAlert(*series)
if shouldAlert {

View File

@@ -318,7 +318,7 @@ func NewFilterSuggestionsTestBed(t *testing.T) *FilterSuggestionsTestBed {
emailing := emailingtest.New()
analytics := analyticstest.New()
modules := signoz.NewModules(testDB, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics)
handlers := signoz.NewHandlers(modules, providerSettings)
handlers := signoz.NewHandlers(modules)
apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{
Reader: reader,

View File

@@ -498,7 +498,7 @@ func NewTestbedWithoutOpamp(t *testing.T, sqlStore sqlstore.SQLStore) *LogPipeli
emailing := emailingtest.New()
analytics := analyticstest.New()
modules := signoz.NewModules(sqlStore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics)
handlers := signoz.NewHandlers(modules, providerSettings)
handlers := signoz.NewHandlers(modules)
apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{
LogsParsingPipelineController: controller,

View File

@@ -379,7 +379,7 @@ func NewCloudIntegrationsTestBed(t *testing.T, testDB sqlstore.SQLStore) *CloudI
emailing := emailingtest.New()
analytics := analyticstest.New()
modules := signoz.NewModules(testDB, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics)
handlers := signoz.NewHandlers(modules, providerSettings)
handlers := signoz.NewHandlers(modules)
apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{
Reader: reader,

View File

@@ -594,7 +594,7 @@ func NewIntegrationsTestBed(t *testing.T, testDB sqlstore.SQLStore) *Integration
emailing := emailingtest.New()
analytics := analyticstest.New()
modules := signoz.NewModules(testDB, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics)
handlers := signoz.NewHandlers(modules, providerSettings)
handlers := signoz.NewHandlers(modules)
apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{
Reader: reader,

View File

@@ -3,12 +3,10 @@ package querybuilder
import (
"context"
"fmt"
"log/slog"
"strings"
chparser "github.com/AfterShip/clickhouse-sql-parser/parser"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -16,7 +14,6 @@ import (
)
type aggExprRewriter struct {
logger *slog.Logger
fullTextColumn *telemetrytypes.TelemetryFieldKey
fieldMapper qbtypes.FieldMapper
conditionBuilder qbtypes.ConditionBuilder
@@ -27,17 +24,13 @@ type aggExprRewriter struct {
var _ qbtypes.AggExprRewriter = (*aggExprRewriter)(nil)
func NewAggExprRewriter(
settings factory.ProviderSettings,
fullTextColumn *telemetrytypes.TelemetryFieldKey,
fieldMapper qbtypes.FieldMapper,
conditionBuilder qbtypes.ConditionBuilder,
jsonBodyPrefix string,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *aggExprRewriter {
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/agg_rewrite")
return &aggExprRewriter{
logger: set.Logger(),
fullTextColumn: fullTextColumn,
fieldMapper: fieldMapper,
conditionBuilder: conditionBuilder,
@@ -77,7 +70,7 @@ func (r *aggExprRewriter) Rewrite(
return "", nil, errors.NewInternalf(errors.CodeInternal, "no SELECT items for %q", expr)
}
visitor := newExprVisitor(r.logger, keys,
visitor := newExprVisitor(keys,
r.fullTextColumn,
r.fieldMapper,
r.conditionBuilder,
@@ -124,7 +117,6 @@ func (r *aggExprRewriter) RewriteMulti(
// exprVisitor walks FunctionExpr nodes and applies the mappers.
type exprVisitor struct {
chparser.DefaultASTVisitor
logger *slog.Logger
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
fullTextColumn *telemetrytypes.TelemetryFieldKey
fieldMapper qbtypes.FieldMapper
@@ -137,7 +129,6 @@ type exprVisitor struct {
}
func newExprVisitor(
logger *slog.Logger,
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey,
fullTextColumn *telemetrytypes.TelemetryFieldKey,
fieldMapper qbtypes.FieldMapper,
@@ -146,7 +137,6 @@ func newExprVisitor(
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *exprVisitor {
return &exprVisitor{
logger: logger,
fieldKeys: fieldKeys,
fullTextColumn: fullTextColumn,
fieldMapper: fieldMapper,
@@ -193,7 +183,6 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
whereClause, err := PrepareWhereClause(
origPred,
FilterExprVisitorOpts{
Logger: v.logger,
FieldKeys: v.fieldKeys,
FieldMapper: v.fieldMapper,
ConditionBuilder: v.conditionBuilder,

View File

@@ -2,11 +2,7 @@ package querybuilder
import (
"context"
"encoding/json"
"fmt"
"math"
"reflect"
"strconv"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
@@ -120,58 +116,3 @@ func GroupByKeys(keys []qbtypes.GroupByKey) []string {
}
return k
}
func FormatValueForContains(value any) string {
if value == nil {
return ""
}
switch v := value.(type) {
case string:
return v
case []byte:
return string(v)
case json.Number:
return v.String()
case float64:
if v == math.Trunc(v) && v >= -1e15 && v <= 1e15 {
return fmt.Sprintf("%.0f", v)
}
return strconv.FormatFloat(v, 'f', -1, 64)
case float32:
return strconv.FormatFloat(float64(v), 'f', -1, 32)
case int, int8, int16, int32, int64:
return fmt.Sprintf("%d", v)
case uint, uint8, uint16, uint32, uint64:
return fmt.Sprintf("%d", v)
case bool:
return strconv.FormatBool(v)
case fmt.Stringer:
return v.String()
default:
// fallback - try to convert through reflection
rv := reflect.ValueOf(value)
switch rv.Kind() {
case reflect.Float32, reflect.Float64:
f := rv.Float()
if f == math.Trunc(f) && f >= -1e15 && f <= 1e15 {
return fmt.Sprintf("%.0f", f)
}
return strconv.FormatFloat(f, 'f', -1, 64)
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return strconv.FormatInt(rv.Int(), 10)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return strconv.FormatUint(rv.Uint(), 10)
default:
return fmt.Sprintf("%v", value)
}
}
}

View File

@@ -1,275 +0,0 @@
package querybuilder
import (
"encoding/json"
"fmt"
"math"
"testing"
"github.com/stretchr/testify/assert"
)
type customStringer struct {
value string
}
func (c customStringer) String() string {
return c.value
}
type customInt int64
type customFloat float64
type customUint uint64
func TestFormatValueForContains(t *testing.T) {
tests := []struct {
name string
input any
expected string
}{
{
name: "nil value",
input: nil,
expected: "",
},
{
name: "string value",
input: "hello world",
expected: "hello world",
},
{
name: "empty string",
input: "",
expected: "",
},
{
name: "string with special characters",
input: "test@#$%^&*()_+-=",
expected: "test@#$%^&*()_+-=",
},
{
name: "byte slice",
input: []byte("byte slice test"),
expected: "byte slice test",
},
{
name: "empty byte slice",
input: []byte{},
expected: "",
},
{
name: "json.Number integer",
input: json.Number("521509198310"),
expected: "521509198310",
},
{
name: "json.Number float",
input: json.Number("3.14159"),
expected: "3.14159",
},
{
name: "json.Number scientific notation",
input: json.Number("1.23e+10"),
expected: "1.23e+10",
},
{
name: "float64 whole number",
input: float64(42),
expected: "42",
},
{
name: "float64 decimal",
input: float64(3.14159),
expected: "3.14159",
},
{
name: "float64 large whole number",
input: float64(521509198310),
expected: "521509198310",
},
{
name: "float64 at positive threshold",
input: float64(1e15),
expected: "1000000000000000",
},
{
name: "float64 above positive threshold",
input: float64(1e16),
expected: "10000000000000000",
},
{
name: "float64 at negative threshold",
input: float64(-1e15),
expected: "-1000000000000000",
},
{
name: "float64 negative decimal",
input: float64(-123.456),
expected: "-123.456",
},
{
name: "float64 zero",
input: float64(0),
expected: "0",
},
{
name: "float32 whole number",
input: float32(42),
expected: "42",
},
{
name: "float32 decimal",
input: float32(3.14),
expected: "3.14",
},
{
name: "int",
input: int(123),
expected: "123",
},
{
name: "int negative",
input: int(-456),
expected: "-456",
},
{
name: "int8 max",
input: int8(127),
expected: "127",
},
{
name: "int8 min",
input: int8(-128),
expected: "-128",
},
{
name: "int16",
input: int16(32767),
expected: "32767",
},
{
name: "int32",
input: int32(2147483647),
expected: "2147483647",
},
{
name: "int64",
input: int64(9223372036854775807),
expected: "9223372036854775807",
},
{
name: "uint",
input: uint(123),
expected: "123",
},
{
name: "uint8 max",
input: uint8(255),
expected: "255",
},
{
name: "uint16",
input: uint16(65535),
expected: "65535",
},
{
name: "uint32",
input: uint32(4294967295),
expected: "4294967295",
},
{
name: "uint64 large",
input: uint64(18446744073709551615),
expected: "18446744073709551615",
},
{
name: "bool true",
input: true,
expected: "true",
},
{
name: "bool false",
input: false,
expected: "false",
},
{
name: "custom stringer",
input: customStringer{value: "custom string value"},
expected: "custom string value",
},
{
name: "custom int type",
input: customInt(12345),
expected: "12345",
},
{
name: "custom float type whole number",
input: customFloat(67890),
expected: "67890",
},
{
name: "custom float type decimal",
input: customFloat(123.456),
expected: "123.456",
},
{
name: "custom uint type",
input: customUint(99999),
expected: "99999",
},
{
name: "struct fallback",
input: struct{ Name string }{Name: "test"},
expected: "{test}",
},
{
name: "slice fallback",
input: []int{1, 2, 3},
expected: "[1 2 3]",
},
{
name: "map fallback",
input: map[string]int{"a": 1, "b": 2},
expected: fmt.Sprintf("%v", map[string]int{"a": 1, "b": 2}),
},
{
name: "float64 infinity",
input: math.Inf(1),
expected: "+Inf",
},
{
name: "float64 negative infinity",
input: math.Inf(-1),
expected: "-Inf",
},
{
name: "float64 NaN",
input: math.NaN(),
expected: "NaN",
},
{
name: "float64 very small positive",
input: float64(0.000000123),
expected: "0.000000123",
},
{
name: "float64 very small negative",
input: float64(-0.000000123),
expected: "-0.000000123",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := FormatValueForContains(tt.input)
assert.Equal(t, tt.expected, result)
})
}
}
func TestFormatValueForContains_LargeNumberScientificNotation(t *testing.T) {
largeNumber := float64(521509198310)
result := FormatValueForContains(largeNumber)
assert.Equal(t, "521509198310", result)
assert.NotEqual(t, "5.2150919831e+11", result)
}

View File

@@ -1,15 +0,0 @@
package querybuilder
import (
"os"
"strings"
)
var QBV5Enabled = false
func init() {
v := os.Getenv("ENABLE_QB_V5")
if strings.ToLower(v) == "true" || strings.ToLower(v) == "1" {
QBV5Enabled = true
}
}

View File

@@ -42,17 +42,6 @@ func QueryStringToKeysSelectors(query string) []*telemetrytypes.FieldKeySelector
FieldContext: key.FieldContext,
FieldDataType: key.FieldDataType,
})
if key.FieldContext != telemetrytypes.FieldContextUnspecified {
// span.kind in metrics or metric.max_count in span etc.. should get the search on span.kind
// see note in where_clause_visitor.go in VisitKey(...)
keys = append(keys, &telemetrytypes.FieldKeySelector{
Name: key.FieldContext.StringValue() + "." + key.Name,
Signal: key.Signal,
FieldContext: key.FieldContext,
FieldDataType: key.FieldDataType,
})
}
}
}

View File

@@ -32,12 +32,6 @@ func TestQueryToKeys(t *testing.T) {
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
{
Name: "resource.service.name",
Signal: telemetrytypes.SignalUnspecified,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
},
{

View File

@@ -4,7 +4,6 @@ import (
"context"
"fmt"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
@@ -35,8 +34,7 @@ func valueForIndexFilter(op qbtypes.FilterOperator, key *telemetrytypes.Telemetr
}
return values
}
// resource table expects string value
return fmt.Sprintf(`%%%v%%`, value)
return value
}
func keyIndexFilter(key *telemetrytypes.TelemetryFieldKey) any {
@@ -55,16 +53,6 @@ func (b *defaultConditionBuilder) ConditionFor(
return "true", nil
}
switch op {
case qbtypes.FilterOperatorContains,
qbtypes.FilterOperatorNotContains,
qbtypes.FilterOperatorILike,
qbtypes.FilterOperatorNotILike,
qbtypes.FilterOperatorLike,
qbtypes.FilterOperatorNotLike:
value = querybuilder.FormatValueForContains(value)
}
column, err := b.fm.ColumnFor(ctx, key)
if err != nil {
return "", err

View File

@@ -5,7 +5,6 @@ import (
"testing"
"github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/assert"
@@ -78,18 +77,6 @@ func TestConditionBuilder(t *testing.T) {
expected: "LOWER(simpleJSONExtractString(labels, 'k8s.namespace.name')) LIKE LOWER(?) AND labels LIKE ? AND LOWER(labels) LIKE LOWER(?)",
expectedArgs: []any{"%banana%", "%k8s.namespace.name%", `%k8s.namespace.name%banana%`},
},
{
name: "Contains operator - string attribute number value",
key: &telemetrytypes.TelemetryFieldKey{
Name: "company.id",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
op: qbtypes.FilterOperatorContains,
value: 521509198310,
expected: "LOWER(simpleJSONExtractString(labels, 'company.id')) LIKE LOWER(?) AND labels LIKE ? AND LOWER(labels) LIKE LOWER(?)",
expectedArgs: []any{"%521509198310%", "%company.id%", `%company.id%521509198310%`},
},
{
name: "string_not_contains",
key: &telemetrytypes.TelemetryFieldKey{

View File

@@ -3,10 +3,8 @@ package resourcefilter
import (
"context"
"fmt"
"log/slog"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
@@ -36,7 +34,6 @@ var signalConfigs = map[telemetrytypes.Signal]signalConfig{
// Generic resource filter statement builder
type resourceFilterStatementBuilder[T any] struct {
logger *slog.Logger
fieldMapper qbtypes.FieldMapper
conditionBuilder qbtypes.ConditionBuilder
metadataStore telemetrytypes.MetadataStore
@@ -55,14 +52,11 @@ var (
// Constructor functions
func NewTraceResourceFilterStatementBuilder(
settings factory.ProviderSettings,
fieldMapper qbtypes.FieldMapper,
conditionBuilder qbtypes.ConditionBuilder,
metadataStore telemetrytypes.MetadataStore,
) *resourceFilterStatementBuilder[qbtypes.TraceAggregation] {
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter")
return &resourceFilterStatementBuilder[qbtypes.TraceAggregation]{
logger: set.Logger(),
fieldMapper: fieldMapper,
conditionBuilder: conditionBuilder,
metadataStore: metadataStore,
@@ -71,7 +65,6 @@ func NewTraceResourceFilterStatementBuilder(
}
func NewLogResourceFilterStatementBuilder(
settings factory.ProviderSettings,
fieldMapper qbtypes.FieldMapper,
conditionBuilder qbtypes.ConditionBuilder,
metadataStore telemetrytypes.MetadataStore,
@@ -79,9 +72,7 @@ func NewLogResourceFilterStatementBuilder(
jsonBodyPrefix string,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *resourceFilterStatementBuilder[qbtypes.LogAggregation] {
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter")
return &resourceFilterStatementBuilder[qbtypes.LogAggregation]{
logger: set.Logger(),
fieldMapper: fieldMapper,
conditionBuilder: conditionBuilder,
metadataStore: metadataStore,
@@ -102,7 +93,6 @@ func (b *resourceFilterStatementBuilder[T]) getKeySelectors(query qbtypes.QueryB
for idx := range keySelectors {
keySelectors[idx].Signal = b.signal
keySelectors[idx].SelectorMatchType = telemetrytypes.FieldSelectorMatchTypeExact
}
return keySelectors
@@ -127,7 +117,7 @@ func (b *resourceFilterStatementBuilder[T]) Build(
q.From(fmt.Sprintf("%s.%s", config.dbName, config.tableName))
keySelectors := b.getKeySelectors(query)
keys, _, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
keys, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
if err != nil {
return nil, err
}
@@ -157,7 +147,6 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
// warnings would be encountered as part of the main condition already
filterWhereClause, err := querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
Logger: b.logger,
FieldMapper: b.fieldMapper,
ConditionBuilder: b.conditionBuilder,
FieldKeys: keys,

View File

@@ -33,8 +33,6 @@ func ToNanoSecs(epoch uint64) uint64 {
return temp * uint64(math.Pow(10, float64(19-count)))
}
// TODO(srikanthccv): should these be rounded to nearest multiple of 60 instead of 5 if step > 60?
// That would make graph look nice but "nice" but should be less important than the usefulness
func RecommendedStepInterval(start, end uint64) uint64 {
start = ToNanoSecs(start)
end = ToNanoSecs(end)
@@ -162,6 +160,29 @@ func AdjustedMetricTimeRange(start, end, step uint64, mq qbtypes.QueryBuilderQue
return start, end
}
func GCD(a, b int64) int64 {
for b != 0 {
a, b = b, a%b
}
return a
}
func LCM(a, b int64) int64 {
return (a * b) / GCD(a, b)
}
// LCMList computes the LCM of a list of int64 numbers.
func LCMList(nums []int64) int64 {
if len(nums) == 0 {
return 1
}
result := nums[0]
for _, num := range nums[1:] {
result = LCM(result, num)
}
return result
}
func AssignReservedVars(vars map[string]any, start, end uint64) {
start = ToNanoSecs(start)
end = ToNanoSecs(end)

View File

@@ -3,7 +3,6 @@ package querybuilder
import (
"context"
"fmt"
"log/slog"
"strconv"
"strings"
@@ -21,7 +20,6 @@ var searchTroubleshootingGuideURL = "https://signoz.io/docs/userguide/search-tro
// filterExpressionVisitor implements the FilterQueryVisitor interface
// to convert the parsed filter expressions into ClickHouse WHERE clause
type filterExpressionVisitor struct {
logger *slog.Logger
fieldMapper qbtypes.FieldMapper
conditionBuilder qbtypes.ConditionBuilder
warnings []string
@@ -43,7 +41,6 @@ type filterExpressionVisitor struct {
}
type FilterExprVisitorOpts struct {
Logger *slog.Logger
FieldMapper qbtypes.FieldMapper
ConditionBuilder qbtypes.ConditionBuilder
FieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
@@ -61,7 +58,6 @@ type FilterExprVisitorOpts struct {
// newFilterExpressionVisitor creates a new filterExpressionVisitor
func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVisitor {
return &filterExpressionVisitor{
logger: opts.Logger,
fieldMapper: opts.FieldMapper,
conditionBuilder: opts.ConditionBuilder,
fieldKeys: opts.FieldKeys,
@@ -748,19 +744,6 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
fieldKeysForName = filteredKeys
}
// if the data type is explicitly provided, filter out the remaining
// example, level:string = 'value', then we don't want to search on
// anything other than the string attributes
if fieldKey.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
filteredKeys := []*telemetrytypes.TelemetryFieldKey{}
for _, item := range fieldKeysForName {
if item.FieldDataType == fieldKey.FieldDataType {
filteredKeys = append(filteredKeys, item)
}
}
fieldKeysForName = filteredKeys
}
// for the body json search, we need to add search on the body field even
// if there is a field with the same name as attribute/resource attribute
// Since it will ORed with the fieldKeysForName, it will not result empty
@@ -790,35 +773,15 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
}
if len(fieldKeysForName) > 1 && !v.keysWithWarnings[keyName] {
warnMsg := fmt.Sprintf(
"Key `%s` is ambiguous, found %d different combinations of field context / data type: %v.",
v.mainWarnURL = "https://signoz.io/docs/userguide/field-context-data-types/"
// this is warning state, we must have a unambiguous key
v.warnings = append(v.warnings, fmt.Sprintf(
"key `%s` is ambiguous, found %d different combinations of field context / data type: %v",
fieldKey.Name,
len(fieldKeysForName),
fieldKeysForName,
)
mixedFieldContext := map[string]bool{}
for _, item := range fieldKeysForName {
mixedFieldContext[item.FieldContext.StringValue()] = true
}
if mixedFieldContext[telemetrytypes.FieldContextResource.StringValue()] &&
mixedFieldContext[telemetrytypes.FieldContextAttribute.StringValue()] {
filteredKeys := []*telemetrytypes.TelemetryFieldKey{}
for _, item := range fieldKeysForName {
if item.FieldContext != telemetrytypes.FieldContextResource {
continue
}
filteredKeys = append(filteredKeys, item)
}
fieldKeysForName = filteredKeys
warnMsg += " " + "Using `resource` context by default. To query attributes explicitly, " +
fmt.Sprintf("use the fully qualified name (e.g., 'attribute.%s')", fieldKey.Name)
}
v.mainWarnURL = "https://signoz.io/docs/userguide/field-context-data-types/"
// this is warning state, we must have a unambiguous key
v.warnings = append(v.warnings, warnMsg)
))
v.keysWithWarnings[keyName] = true
v.logger.Warn("ambiguous key", "field_key_name", fieldKey.Name) //nolint:sloglint
}
return fieldKeysForName

View File

@@ -1,7 +1,6 @@
package signoz
import (
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/modules/apdex"
"github.com/SigNoz/signoz/pkg/modules/apdex/implapdex"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
@@ -31,14 +30,14 @@ type Handlers struct {
TraceFunnel tracefunnel.Handler
}
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings) Handlers {
func NewHandlers(modules Modules) Handlers {
return Handlers{
Organization: implorganization.NewHandler(modules.OrgGetter, modules.OrgSetter),
Preference: implpreference.NewHandler(modules.Preference),
User: impluser.NewHandler(modules.User),
SavedView: implsavedview.NewHandler(modules.SavedView),
Apdex: implapdex.NewHandler(modules.Apdex),
Dashboard: impldashboard.NewHandler(modules.Dashboard, providerSettings),
Dashboard: impldashboard.NewHandler(modules.Dashboard),
QuickFilter: implquickfilter.NewHandler(modules.QuickFilter),
TraceFunnel: impltracefunnel.NewHandler(modules.TraceFunnel),
}

View File

@@ -35,7 +35,7 @@ func TestNewHandlers(t *testing.T) {
emailing := emailingtest.New()
modules := NewModules(sqlstore, jwt, emailing, providerSettings, orgGetter, alertmanager, nil)
handlers := NewHandlers(modules, providerSettings)
handlers := NewHandlers(modules)
reflectVal := reflect.ValueOf(handlers)
for i := 0; i < reflectVal.NumField(); i++ {

View File

@@ -77,12 +77,7 @@ func NewSQLSchemaProviderFactories(sqlstore sqlstore.SQLStore) factory.NamedMap[
)
}
func NewSQLMigrationProviderFactories(
sqlstore sqlstore.SQLStore,
sqlschema sqlschema.SQLSchema,
telemetryStore telemetrystore.TelemetryStore,
providerSettings factory.ProviderSettings,
) factory.NamedMap[factory.ProviderFactory[sqlmigration.SQLMigration, sqlmigration.Config]] {
func NewSQLMigrationProviderFactories(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.NamedMap[factory.ProviderFactory[sqlmigration.SQLMigration, sqlmigration.Config]] {
return factory.MustNewNamedMap(
sqlmigration.NewAddDataMigrationsFactory(),
sqlmigration.NewAddOrganizationFactory(),
@@ -129,21 +124,13 @@ func NewSQLMigrationProviderFactories(
sqlmigration.NewUpdateUserInviteFactory(sqlstore, sqlschema),
sqlmigration.NewUpdateOrgDomainFactory(sqlstore, sqlschema),
sqlmigration.NewAddFactorIndexesFactory(sqlstore, sqlschema),
sqlmigration.NewQueryBuilderV5MigrationFactory(sqlstore, telemetryStore),
sqlmigration.NewAddMeterQuickFiltersFactory(sqlstore, sqlschema),
)
}
func NewTelemetryStoreProviderFactories() factory.NamedMap[factory.ProviderFactory[telemetrystore.TelemetryStore, telemetrystore.Config]] {
return factory.MustNewNamedMap(
clickhousetelemetrystore.NewFactory(
telemetrystore.TelemetryStoreHookFactoryFunc(func(s string) factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config] {
return telemetrystorehook.NewSettingsFactory(s)
}),
telemetrystore.TelemetryStoreHookFactoryFunc(func(s string) factory.ProviderFactory[telemetrystore.TelemetryStoreHook, telemetrystore.Config] {
return telemetrystorehook.NewLoggingFactory()
}),
),
clickhousetelemetrystore.NewFactory(telemetrystorehook.NewSettingsFactory(), telemetrystorehook.NewLoggingFactory()),
)
}

View File

@@ -40,12 +40,7 @@ func TestNewProviderFactories(t *testing.T) {
})
assert.NotPanics(t, func() {
NewSQLMigrationProviderFactories(
sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual),
sqlschematest.New(map[string]*sqlschema.Table{}, map[string][]*sqlschema.UniqueConstraint{}, map[string]sqlschema.Index{}),
telemetrystoretest.New(telemetrystore.Config{Provider: "clickhouse"}, sqlmock.QueryMatcherEqual),
instrumentationtest.New().ToProviderSettings(),
)
NewSQLMigrationProviderFactories(sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual), sqlschematest.New(map[string]*sqlschema.Table{}, map[string][]*sqlschema.UniqueConstraint{}, map[string]sqlschema.Index{}))
})
assert.NotPanics(t, func() {

View File

@@ -201,7 +201,7 @@ func New(
ctx,
providerSettings,
config.SQLMigration,
NewSQLMigrationProviderFactories(sqlstore, sqlschema, telemetrystore, providerSettings),
NewSQLMigrationProviderFactories(sqlstore, sqlschema),
)
if err != nil {
return nil, err
@@ -268,7 +268,7 @@ func New(
modules := NewModules(sqlstore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics)
// Initialize all handlers for the modules
handlers := NewHandlers(modules, providerSettings)
handlers := NewHandlers(modules)
// Create a list of all stats collectors
statsCollectors := []statsreporter.StatsCollector{

View File

@@ -106,7 +106,7 @@ func (migration *addMeterQuickFilters) Up(ctx context.Context, db *bun.DB) error
},
OrgID: orgID,
Filter: string(meterJSON),
Signal: signal{valuer.NewString("meter")},
Signal: signal{valuer.NewString("meter_explorer")},
timeAuditable: timeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
@@ -114,15 +114,13 @@ func (migration *addMeterQuickFilters) Up(ctx context.Context, db *bun.DB) error
})
}
if len(meterFiltersToInsert) > 0 {
_, err = tx.NewInsert().
Model(&meterFiltersToInsert).
On("CONFLICT (org_id, signal) DO UPDATE").
Set("filter = EXCLUDED.filter, updated_at = EXCLUDED.updated_at").
Exec(ctx)
if err != nil {
return err
}
_, err = tx.NewInsert().
Model(&meterFiltersToInsert).
On("CONFLICT (org_id, signal) DO UPDATE").
Set("filter = EXCLUDED.filter, updated_at = EXCLUDED.updated_at").
Exec(ctx)
if err != nil {
return err
}
if err := tx.Commit(); err != nil {

View File

@@ -1,300 +0,0 @@
package sqlmigration
import (
"context"
"database/sql"
"encoding/json"
"fmt"
"log/slog"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/transition"
"github.com/uptrace/bun"
"github.com/uptrace/bun/migrate"
)
type queryBuilderV5Migration struct {
store sqlstore.SQLStore
telemetryStore telemetrystore.TelemetryStore
logger *slog.Logger
}
func NewQueryBuilderV5MigrationFactory(
store sqlstore.SQLStore,
telemetryStore telemetrystore.TelemetryStore,
) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(
factory.MustNewName("query_builder_v5_migration"),
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
return newQueryBuilderV5Migration(ctx, c, store, telemetryStore, ps.Logger)
})
}
func newQueryBuilderV5Migration(
_ context.Context,
_ Config, store sqlstore.SQLStore,
telemetryStore telemetrystore.TelemetryStore,
logger *slog.Logger,
) (SQLMigration, error) {
return &queryBuilderV5Migration{store: store, telemetryStore: telemetryStore, logger: logger}, nil
}
func (migration *queryBuilderV5Migration) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *queryBuilderV5Migration) getTraceDuplicateKeys(ctx context.Context) ([]string, error) {
query := `
SELECT tagKey
FROM signoz_traces.distributed_span_attributes_keys
WHERE tagType IN ('tag', 'resource')
GROUP BY tagKey
HAVING COUNT(DISTINCT tagType) > 1
ORDER BY tagKey
`
rows, err := migration.telemetryStore.ClickhouseDB().Query(ctx, query)
if err != nil {
return nil, fmt.Errorf("failed to query trace duplicate keys: %w", err)
}
defer rows.Close()
var keys []string
for rows.Next() {
var key string
if err := rows.Scan(&key); err != nil {
return nil, fmt.Errorf("failed to scan trace duplicate key: %w", err)
}
keys = append(keys, key)
}
return keys, nil
}
func (migration *queryBuilderV5Migration) getLogDuplicateKeys(ctx context.Context) ([]string, error) {
query := `
SELECT name
FROM (
SELECT DISTINCT name FROM signoz_logs.distributed_logs_attribute_keys
INTERSECT
SELECT DISTINCT name FROM signoz_logs.distributed_logs_resource_keys
)
ORDER BY name
`
rows, err := migration.telemetryStore.ClickhouseDB().Query(ctx, query)
if err != nil {
return nil, fmt.Errorf("failed to query log duplicate keys: %w", err)
}
defer rows.Close()
var keys []string
for rows.Next() {
var key string
if err := rows.Scan(&key); err != nil {
return nil, fmt.Errorf("failed to scan log duplicate key: %w", err)
}
keys = append(keys, key)
}
return keys, nil
}
func (migration *queryBuilderV5Migration) Up(ctx context.Context, db *bun.DB) error {
// fetch keys that have both attribute and resource attribute types
logsKeys, err := migration.getLogDuplicateKeys(ctx)
if err != nil {
return err
}
tracesKeys, err := migration.getTraceDuplicateKeys(ctx)
if err != nil {
return err
}
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
_ = tx.Rollback()
}()
if err := migration.migrateDashboards(ctx, tx, logsKeys, tracesKeys); err != nil {
return err
}
if err := migration.migrateSavedViews(ctx, tx, logsKeys, tracesKeys); err != nil {
return err
}
if err := migration.migrateRules(ctx, tx, logsKeys, tracesKeys); err != nil {
return err
}
return tx.Commit()
}
func (migration *queryBuilderV5Migration) Down(ctx context.Context, db *bun.DB) error {
// this migration is not reversible as we're transforming the structure
return nil
}
func (migration *queryBuilderV5Migration) migrateDashboards(
ctx context.Context,
tx bun.Tx,
logsKeys []string,
tracesKeys []string,
) error {
var dashboards []struct {
ID string `bun:"id"`
Data map[string]any `bun:"data"`
}
err := tx.NewSelect().
Table("dashboard").
Column("id", "data").
Scan(ctx, &dashboards)
if err != nil {
if err == sql.ErrNoRows {
return nil
}
return err
}
dashboardMigrator := transition.NewDashboardMigrateV5(migration.logger, logsKeys, tracesKeys)
for _, dashboard := range dashboards {
updated := dashboardMigrator.Migrate(ctx, dashboard.Data)
if updated {
dataJSON, err := json.Marshal(dashboard.Data)
if err != nil {
return err
}
_, err = tx.NewUpdate().
Table("dashboard").
Set("data = ?", string(dataJSON)).
Where("id = ?", dashboard.ID).
Exec(ctx)
if err != nil {
return err
}
}
}
return nil
}
func (migration *queryBuilderV5Migration) migrateSavedViews(
ctx context.Context,
tx bun.Tx,
logsKeys []string,
tracesKeys []string,
) error {
var savedViews []struct {
ID string `bun:"id"`
Data string `bun:"data"`
}
err := tx.NewSelect().
Table("saved_views").
Column("id", "data").
Scan(ctx, &savedViews)
if err != nil {
if err == sql.ErrNoRows {
return nil
}
return err
}
savedViewsMigrator := transition.NewSavedViewMigrateV5(migration.logger, logsKeys, tracesKeys)
for _, savedView := range savedViews {
var data map[string]any
if err := json.Unmarshal([]byte(savedView.Data), &data); err != nil {
continue // invalid JSON
}
updated := savedViewsMigrator.Migrate(ctx, data)
if updated {
dataJSON, err := json.Marshal(data)
if err != nil {
return err
}
_, err = tx.NewUpdate().
Table("saved_views").
Set("data = ?", string(dataJSON)).
Where("id = ?", savedView.ID).
Exec(ctx)
if err != nil {
return err
}
}
}
return nil
}
func (migration *queryBuilderV5Migration) migrateRules(
ctx context.Context,
tx bun.Tx,
logsKeys []string,
tracesKeys []string,
) error {
// Fetch all rules
var rules []struct {
ID string `bun:"id"`
Data map[string]any `bun:"data"`
}
err := tx.NewSelect().
Table("rule").
Column("id", "data").
Scan(ctx, &rules)
if err != nil {
if err == sql.ErrNoRows {
return nil
}
return err
}
alertsMigrator := transition.NewAlertMigrateV5(migration.logger, logsKeys, tracesKeys)
for _, rule := range rules {
migration.logger.InfoContext(ctx, "migrating rule", "rule_id", rule.ID)
updated := alertsMigrator.Migrate(ctx, rule.Data)
if updated {
fmt.Println("updated rule", rule.ID)
dataJSON, err := json.Marshal(rule.Data)
if err != nil {
return err
}
_, err = tx.NewUpdate().
Table("rule").
Set("data = ?", string(dataJSON)).
Where("id = ?", rule.ID).
Exec(ctx)
if err != nil {
return err
}
}
}
return nil
}

View File

@@ -7,7 +7,6 @@ import (
"strings"
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"golang.org/x/exp/maps"
@@ -31,16 +30,6 @@ func (c *conditionBuilder) conditionFor(
sb *sqlbuilder.SelectBuilder,
) (string, error) {
switch operator {
case qbtypes.FilterOperatorContains,
qbtypes.FilterOperatorNotContains,
qbtypes.FilterOperatorILike,
qbtypes.FilterOperatorNotILike,
qbtypes.FilterOperatorLike,
qbtypes.FilterOperatorNotLike:
value = querybuilder.FormatValueForContains(value)
}
column, err := c.fm.ColumnFor(ctx, key)
if err != nil {
return "", err

View File

@@ -111,30 +111,6 @@ func TestConditionFor(t *testing.T) {
expectedArgs: []any{"%admin%"},
expectedError: nil,
},
{
name: "Contains operator - string attribute number value",
key: telemetrytypes.TelemetryFieldKey{
Name: "user.id",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
operator: qbtypes.FilterOperatorContains,
value: 521509198310,
expectedSQL: "LOWER(attributes_string['user.id']) LIKE LOWER(?)",
expectedArgs: []any{"%521509198310%", true},
expectedError: nil,
},
{
name: "Contains operator - body",
key: telemetrytypes.TelemetryFieldKey{
Name: "body",
},
operator: qbtypes.FilterOperatorContains,
value: 521509198310,
expectedSQL: "LOWER(body) LIKE LOWER(?)",
expectedArgs: []any{"%521509198310%"},
expectedError: nil,
},
{
name: "Contains operator - string attribute",
key: telemetrytypes.TelemetryFieldKey{

View File

@@ -4,7 +4,6 @@ import (
"fmt"
"testing"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
@@ -20,7 +19,6 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
keys := buildCompleteFieldKeyMap()
opts := querybuilder.FilterExprVisitorOpts{
Logger: instrumentationtest.New().Logger(),
FieldMapper: fm,
ConditionBuilder: cb,
FieldKeys: keys,

View File

@@ -6,8 +6,8 @@ import (
"testing"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/require"
)
@@ -21,13 +21,14 @@ func TestFilterExprLogs(t *testing.T) {
keys := buildCompleteFieldKeyMap()
opts := querybuilder.FilterExprVisitorOpts{
Logger: instrumentationtest.New().Logger(),
FieldMapper: fm,
ConditionBuilder: cb,
FieldKeys: keys,
FullTextColumn: DefaultFullTextColumn,
JsonBodyPrefix: BodyJSONStringSearchPrefix,
JsonKeyToKey: GetBodyJSONKey,
FullTextColumn: &telemetrytypes.TelemetryFieldKey{
Name: "body",
},
JsonBodyPrefix: "body",
JsonKeyToKey: GetBodyJSONKey,
}
testCases := []struct {
@@ -1405,14 +1406,6 @@ func TestFilterExprLogs(t *testing.T) {
expectedArgs: []any{"%error%", true},
expectedErrorContains: "",
},
{
category: "number contains body",
query: "body CONTAINS 521509198310",
shouldPass: true,
expectedQuery: "WHERE LOWER(body) LIKE LOWER(?)",
expectedArgs: []any{"%521509198310%"},
expectedErrorContains: "",
},
{
category: "CONTAINS operator",
query: "level CONTAINS \"critical\"",

View File

@@ -26,12 +26,12 @@ func parseStrValue(valueStr string, operator qbtypes.FilterOperator) (telemetryt
var err error
var parsedValue any
if parsedValue, err = strconv.ParseInt(valueStr, 10, 64); err == nil {
if parsedValue, err = strconv.ParseBool(valueStr); err == nil {
valueType = telemetrytypes.FieldDataTypeBool
} else if parsedValue, err = strconv.ParseInt(valueStr, 10, 64); err == nil {
valueType = telemetrytypes.FieldDataTypeInt64
} else if parsedValue, err = strconv.ParseFloat(valueStr, 64); err == nil {
valueType = telemetrytypes.FieldDataTypeFloat64
} else if parsedValue, err = strconv.ParseBool(valueStr); err == nil {
valueType = telemetrytypes.FieldDataTypeBool
} else {
parsedValue = valueStr
valueType = telemetrytypes.FieldDataTypeString

View File

@@ -68,7 +68,7 @@ func (b *logQueryStatementBuilder) Build(
end = querybuilder.ToNanoSecs(end)
keySelectors := getKeySelectors(query)
keys, _, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
keys, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
if err != nil {
return nil, err
}
@@ -121,7 +121,6 @@ func getKeySelectors(query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]) []
for idx := range keySelectors {
keySelectors[idx].Signal = telemetrytypes.SignalLogs
keySelectors[idx].SelectorMatchType = telemetrytypes.FieldSelectorMatchTypeExact
}
return keySelectors
@@ -553,7 +552,6 @@ func (b *logQueryStatementBuilder) addFilterCondition(
if query.Filter != nil && query.Filter.Expression != "" {
// add filter expression
preparedWhereClause, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
Logger: b.logger,
FieldMapper: b.fm,
ConditionBuilder: b.cb,
FieldKeys: keys,

View File

@@ -27,7 +27,6 @@ func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.LogAggregation
mockMetadataStore.KeysMap = keysMap
return resourcefilter.NewLogResourceFilterStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
fm,
cb,
mockMetadataStore,
@@ -120,7 +119,7 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -213,7 +212,7 @@ func TestStatementBuilderListQuery(t *testing.T) {
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
@@ -322,7 +321,7 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
aggExprRewriter := querybuilder.NewAggExprRewriter(nil, fm, cb, "", nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()

View File

@@ -6,6 +6,4 @@ const (
LogsV2LocalTableName = "logs_v2"
TagAttributesV2TableName = "distributed_tag_attributes_v2"
TagAttributesV2LocalTableName = "tag_attributes_v2"
LogAttributeKeysTblName = "distributed_logs_attribute_keys"
LogResourceKeysTblName = "distributed_logs_resource_keys"
)

View File

@@ -27,13 +27,6 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"body": {
{
Name: "body",
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"http.status_code": {
{
Name: "http.status_code",

View File

@@ -5,7 +5,6 @@ import (
"fmt"
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
@@ -26,17 +25,6 @@ func (c *conditionBuilder) ConditionFor(
value any,
sb *sqlbuilder.SelectBuilder,
) (string, error) {
switch operator {
case qbtypes.FilterOperatorContains,
qbtypes.FilterOperatorNotContains,
qbtypes.FilterOperatorILike,
qbtypes.FilterOperatorNotILike,
qbtypes.FilterOperatorLike,
qbtypes.FilterOperatorNotLike:
value = querybuilder.FormatValueForContains(value)
}
column, err := c.fm.ColumnFor(ctx, key)
if err != nil {
// if we don't have a column, we can't build a condition for related values

File diff suppressed because it is too large Load Diff

View File

@@ -40,7 +40,6 @@ func TestGetKeys(t *testing.T) {
mockTelemetryStore,
telemetrytraces.DBName,
telemetrytraces.TagAttributesV2TableName,
telemetrytraces.SpanAttributesKeysTblName,
telemetrytraces.SpanIndexV3TableName,
telemetrymetrics.DBName,
telemetrymetrics.AttributesMetadataTableName,
@@ -49,8 +48,6 @@ func TestGetKeys(t *testing.T) {
telemetrylogs.DBName,
telemetrylogs.LogsV2TableName,
telemetrylogs.TagAttributesV2TableName,
telemetrylogs.LogAttributeKeysTblName,
telemetrylogs.LogResourceKeysTblName,
DBName,
AttributesMetadataLocalTableName,
)
@@ -66,14 +63,14 @@ func TestGetKeys(t *testing.T) {
query := `SELECT.*`
mock.ExpectQuery(query).
WithArgs("%http.method%", telemetrytypes.FieldDataTypeString.TagDataType(), 11).
WithArgs("%http.method%", telemetrytypes.FieldDataTypeString.TagDataType(), 10).
WillReturnRows(cmock.NewRows([]cmock.ColumnType{
{Name: "tag_key", Type: "String"},
{Name: "tag_type", Type: "String"},
{Name: "tag_data_type", Type: "String"},
{Name: "priority", Type: "UInt8"},
}, [][]any{{"http.method", "tag", "String", 1}, {"http.method", "tag", "String", 1}}))
keys, _, err := metadata.GetKeys(context.Background(), &telemetrytypes.FieldKeySelector{
keys, err := metadata.GetKeys(context.Background(), &telemetrytypes.FieldKeySelector{
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,

View File

@@ -51,7 +51,7 @@ func (b *meterQueryStatementBuilder) Build(
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) {
keySelectors := telemetrymetrics.GetKeySelectors(query)
keys, _, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
keys, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
if err != nil {
return nil, err
}
@@ -141,7 +141,6 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
)
if query.Filter != nil && query.Filter.Expression != "" {
filterWhere, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
Logger: b.logger,
FieldMapper: b.fm,
ConditionBuilder: b.cb,
FieldKeys: keys,
@@ -224,7 +223,6 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDelta(
if query.Filter != nil && query.Filter.Expression != "" {
filterWhere, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
Logger: b.logger,
FieldMapper: b.fm,
ConditionBuilder: b.cb,
FieldKeys: keys,
@@ -288,7 +286,6 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
)
if query.Filter != nil && query.Filter.Expression != "" {
filterWhere, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
Logger: b.logger,
FieldMapper: b.fm,
ConditionBuilder: b.cb,
FieldKeys: keys,

View File

@@ -6,7 +6,6 @@ import (
"slices"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
@@ -29,16 +28,6 @@ func (c *conditionBuilder) conditionFor(
sb *sqlbuilder.SelectBuilder,
) (string, error) {
switch operator {
case qbtypes.FilterOperatorContains,
qbtypes.FilterOperatorNotContains,
qbtypes.FilterOperatorILike,
qbtypes.FilterOperatorNotILike,
qbtypes.FilterOperatorLike,
qbtypes.FilterOperatorNotLike:
value = querybuilder.FormatValueForContains(value)
}
tblFieldName, err := c.fm.FieldFor(ctx, key)
if err != nil {
return "", err

View File

@@ -133,19 +133,6 @@ func TestConditionFor(t *testing.T) {
expectedSQL: "",
expectedError: qbtypes.ErrInValues,
},
{
name: "Contains operator - string attribute",
key: telemetrytypes.TelemetryFieldKey{
Name: "user.id",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
operator: qbtypes.FilterOperatorContains,
value: 521509198310,
expectedSQL: "LOWER(JSONExtractString(labels, 'user.id')) LIKE LOWER(?)",
expectedArgs: []any{"%521509198310%"},
expectedError: nil,
},
{
name: "Not In operator - metric_name",
key: telemetrytypes.TelemetryFieldKey{

View File

@@ -67,10 +67,6 @@ func GetKeySelectors(query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation])
for idx := range keySelectors {
keySelectors[idx].Signal = telemetrytypes.SignalMetrics
keySelectors[idx].SelectorMatchType = telemetrytypes.FieldSelectorMatchTypeExact
keySelectors[idx].MetricContext = &telemetrytypes.MetricContext{
MetricName: query.Aggregations[0].MetricName,
}
}
return keySelectors
}
@@ -84,7 +80,7 @@ func (b *MetricQueryStatementBuilder) Build(
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) {
keySelectors := GetKeySelectors(query)
keys, _, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
keys, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
if err != nil {
return nil, err
}
@@ -298,7 +294,6 @@ func (b *MetricQueryStatementBuilder) buildTimeSeriesCTE(
if query.Filter != nil && query.Filter.Expression != "" {
preparedWhereClause, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
Logger: b.logger,
FieldMapper: b.fm,
ConditionBuilder: b.cb,
FieldKeys: keys,

Some files were not shown because too many files have changed in this diff Show More