mirror of
https://github.com/SigNoz/signoz.git
synced 2025-12-27 09:22:12 +00:00
Compare commits
2 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b31ef0937e | ||
|
|
917c7be6c8 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -49,7 +49,6 @@ ee/query-service/tests/test-deploy/data/
|
||||
# local data
|
||||
*.backup
|
||||
*.db
|
||||
**/db
|
||||
/deploy/docker/clickhouse-setup/data/
|
||||
/deploy/docker-swarm/clickhouse-setup/data/
|
||||
bin/
|
||||
|
||||
6
Makefile
6
Makefile
@@ -72,12 +72,6 @@ devenv-up: devenv-clickhouse devenv-signoz-otel-collector ## Start both clickhou
|
||||
@echo " - ClickHouse: http://localhost:8123"
|
||||
@echo " - Signoz OTel Collector: grpc://localhost:4317, http://localhost:4318"
|
||||
|
||||
.PHONY: devenv-clickhouse-clean
|
||||
devenv-clickhouse-clean: ## Clean all ClickHouse data from filesystem
|
||||
@echo "Removing ClickHouse data..."
|
||||
@rm -rf .devenv/docker/clickhouse/fs/tmp/*
|
||||
@echo "ClickHouse data cleaned!"
|
||||
|
||||
##############################################################
|
||||
# go commands
|
||||
##############################################################
|
||||
|
||||
@@ -849,71 +849,6 @@ paths:
|
||||
summary: Deprecated create session by email password
|
||||
tags:
|
||||
- sessions
|
||||
/api/v1/logs/promote_paths:
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoints promotes and indexes paths
|
||||
operationId: ListPromotedAndIndexedPaths
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
items:
|
||||
$ref: '#/components/schemas/PromotetypesPromotePath'
|
||||
nullable: true
|
||||
type: array
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
summary: Promote and index paths
|
||||
tags:
|
||||
- logs
|
||||
post:
|
||||
deprecated: false
|
||||
description: This endpoints promotes and indexes paths
|
||||
operationId: HandlePromoteAndIndexPaths
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
items:
|
||||
$ref: '#/components/schemas/PromotetypesPromotePath'
|
||||
nullable: true
|
||||
type: array
|
||||
responses:
|
||||
"201":
|
||||
description: Created
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
summary: Promote and index paths
|
||||
tags:
|
||||
- logs
|
||||
/api/v1/org/preferences:
|
||||
get:
|
||||
deprecated: false
|
||||
@@ -2202,26 +2137,6 @@ components:
|
||||
type: object
|
||||
PreferencetypesValue:
|
||||
type: object
|
||||
PromotetypesPromotePath:
|
||||
properties:
|
||||
indexes:
|
||||
items:
|
||||
$ref: '#/components/schemas/PromotetypesWrappedIndex'
|
||||
type: array
|
||||
path:
|
||||
type: string
|
||||
promote:
|
||||
type: boolean
|
||||
type: object
|
||||
PromotetypesWrappedIndex:
|
||||
properties:
|
||||
column_type:
|
||||
type: string
|
||||
granularity:
|
||||
type: integer
|
||||
type:
|
||||
type: string
|
||||
type: object
|
||||
RenderErrorResponse:
|
||||
properties:
|
||||
error:
|
||||
|
||||
@@ -6,7 +6,6 @@ import logEvent from 'api/common/logEvent';
|
||||
import AppLoading from 'components/AppLoading/AppLoading';
|
||||
import { CmdKPalette } from 'components/cmdKPalette/cmdKPalette';
|
||||
import NotFound from 'components/NotFound';
|
||||
import { ShiftHoldOverlayController } from 'components/ShiftOverlay/ShiftHoldOverlayController';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
@@ -369,9 +368,6 @@ function App(): JSX.Element {
|
||||
<NotificationProvider>
|
||||
<ErrorModalProvider>
|
||||
{isLoggedInState && <CmdKPalette userRole={user.role} />}
|
||||
{isLoggedInState && (
|
||||
<ShiftHoldOverlayController userRole={user.role} />
|
||||
)}
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
<QueryBuilderProvider>
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
import { createShortcutActions } from '../../constants/shortcutActions';
|
||||
import { useCmdK } from '../../providers/cmdKProvider';
|
||||
import { ShiftOverlay } from './ShiftOverlay';
|
||||
import { useShiftHoldOverlay } from './useShiftHoldOverlay';
|
||||
|
||||
type UserRole = 'ADMIN' | 'EDITOR' | 'AUTHOR' | 'VIEWER';
|
||||
export function ShiftHoldOverlayController({
|
||||
userRole,
|
||||
}: {
|
||||
userRole: UserRole;
|
||||
}): JSX.Element | null {
|
||||
const { open: isCmdKOpen } = useCmdK();
|
||||
const noop = (): void => undefined;
|
||||
|
||||
const actions = createShortcutActions({
|
||||
navigate: noop,
|
||||
handleThemeChange: noop,
|
||||
});
|
||||
|
||||
const visible = useShiftHoldOverlay({
|
||||
isModalOpen: isCmdKOpen,
|
||||
});
|
||||
|
||||
return (
|
||||
<ShiftOverlay visible={visible} actions={actions} userRole={userRole} />
|
||||
);
|
||||
}
|
||||
@@ -1,77 +0,0 @@
|
||||
import './shiftOverlay.scss';
|
||||
|
||||
import { useMemo } from 'react';
|
||||
import ReactDOM from 'react-dom';
|
||||
|
||||
import { formatShortcut } from './formatShortcut';
|
||||
|
||||
export type UserRole = 'ADMIN' | 'EDITOR' | 'AUTHOR' | 'VIEWER';
|
||||
export type CmdAction = {
|
||||
id: string;
|
||||
name: string;
|
||||
shortcut?: string[];
|
||||
keywords?: string;
|
||||
section?: string;
|
||||
roles?: UserRole[];
|
||||
perform: () => void;
|
||||
};
|
||||
|
||||
interface ShortcutProps {
|
||||
label: string;
|
||||
keyHint: React.ReactNode;
|
||||
}
|
||||
|
||||
function Shortcut({ label, keyHint }: ShortcutProps): JSX.Element {
|
||||
return (
|
||||
<div className="shift-overlay__item">
|
||||
<span className="shift-overlay__label">{label}</span>
|
||||
<kbd className="shift-overlay__kbd">{keyHint}</kbd>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
interface ShiftOverlayProps {
|
||||
visible: boolean;
|
||||
actions: CmdAction[];
|
||||
userRole: UserRole;
|
||||
}
|
||||
|
||||
export function ShiftOverlay({
|
||||
visible,
|
||||
actions,
|
||||
userRole,
|
||||
}: ShiftOverlayProps): JSX.Element | null {
|
||||
const navigationActions = useMemo(() => {
|
||||
// RBAC filter: show action if no roles set OR current user role is included
|
||||
const permitted = actions.filter(
|
||||
(a) => !a.roles || a.roles.includes(userRole),
|
||||
);
|
||||
|
||||
// Navigation only + must have shortcut
|
||||
return permitted.filter(
|
||||
(a) =>
|
||||
a.section?.toLowerCase() === 'navigation' &&
|
||||
a.shortcut &&
|
||||
a.shortcut.length > 0,
|
||||
);
|
||||
}, [actions, userRole]);
|
||||
|
||||
if (!visible || navigationActions.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return ReactDOM.createPortal(
|
||||
<div className="shift-overlay">
|
||||
<div className="shift-overlay__panel">
|
||||
{navigationActions.map((action) => (
|
||||
<Shortcut
|
||||
key={action.id}
|
||||
label={action.name.replace(/^Go to\s+/i, '')}
|
||||
keyHint={formatShortcut(action.shortcut)}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>,
|
||||
document.body,
|
||||
);
|
||||
}
|
||||
@@ -1,102 +0,0 @@
|
||||
import '@testing-library/jest-dom';
|
||||
|
||||
import { render, screen } from '@testing-library/react';
|
||||
|
||||
import type { CmdAction } from '../ShiftOverlay';
|
||||
import { ShiftOverlay } from '../ShiftOverlay';
|
||||
|
||||
jest.mock('../formatShortcut', () => ({
|
||||
formatShortcut: (shortcut: string[]): string => shortcut.join('+'),
|
||||
}));
|
||||
|
||||
const baseActions: CmdAction[] = [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Go to Traces',
|
||||
section: 'navigation',
|
||||
shortcut: ['Shift', 'T'],
|
||||
perform: jest.fn(),
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
name: 'Go to Metrics',
|
||||
section: 'navigation',
|
||||
shortcut: ['Shift', 'M'],
|
||||
roles: ['ADMIN'], // ✅ now UserRole[]
|
||||
perform: jest.fn(),
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
name: 'Create Alert',
|
||||
section: 'actions',
|
||||
shortcut: ['A'],
|
||||
perform: jest.fn(),
|
||||
},
|
||||
{
|
||||
id: '4',
|
||||
name: 'Go to Logs',
|
||||
section: 'navigation',
|
||||
perform: jest.fn(),
|
||||
},
|
||||
];
|
||||
|
||||
describe('ShiftOverlay', () => {
|
||||
it('renders nothing when not visible', () => {
|
||||
const { container } = render(
|
||||
<ShiftOverlay visible={false} actions={baseActions} userRole="ADMIN" />,
|
||||
);
|
||||
|
||||
expect(container.firstChild).toBeNull();
|
||||
});
|
||||
|
||||
it('renders nothing when no navigation shortcuts exist', () => {
|
||||
const { container } = render(
|
||||
<ShiftOverlay
|
||||
visible
|
||||
actions={[
|
||||
{
|
||||
id: 'x',
|
||||
name: 'Create Alert',
|
||||
section: 'actions',
|
||||
perform: jest.fn(),
|
||||
},
|
||||
]}
|
||||
userRole="ADMIN"
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(container.firstChild).toBeNull();
|
||||
});
|
||||
|
||||
it('renders navigation shortcuts in a portal', () => {
|
||||
render(<ShiftOverlay visible actions={baseActions} userRole="ADMIN" />);
|
||||
|
||||
expect(document.body.querySelector('.shift-overlay')).toBeInTheDocument();
|
||||
|
||||
expect(screen.getByText('Traces')).toBeInTheDocument();
|
||||
expect(screen.getByText('Metrics')).toBeInTheDocument();
|
||||
|
||||
expect(screen.getByText('Shift+T')).toBeInTheDocument();
|
||||
expect(screen.getByText('Shift+M')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('applies RBAC filtering correctly', () => {
|
||||
render(<ShiftOverlay visible actions={baseActions} userRole="VIEWER" />);
|
||||
|
||||
expect(screen.getByText('Traces')).toBeInTheDocument();
|
||||
expect(screen.queryByText('Metrics')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('strips "Go to" prefix from labels', () => {
|
||||
render(<ShiftOverlay visible actions={baseActions} userRole="ADMIN" />);
|
||||
|
||||
expect(screen.getByText('Traces')).toBeInTheDocument();
|
||||
expect(screen.queryByText('Go to Traces')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('does not render actions without shortcuts', () => {
|
||||
render(<ShiftOverlay visible actions={baseActions} userRole="ADMIN" />);
|
||||
|
||||
expect(screen.queryByText('Logs')).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -1,144 +0,0 @@
|
||||
import { act, renderHook } from '@testing-library/react';
|
||||
|
||||
import { useShiftHoldOverlay } from '../useShiftHoldOverlay';
|
||||
|
||||
jest.useFakeTimers();
|
||||
|
||||
function pressShift(target: EventTarget = window): void {
|
||||
const event = new KeyboardEvent('keydown', {
|
||||
key: 'Shift',
|
||||
bubbles: true,
|
||||
});
|
||||
Object.defineProperty(event, 'target', { value: target });
|
||||
window.dispatchEvent(event);
|
||||
}
|
||||
|
||||
function releaseShift(): void {
|
||||
window.dispatchEvent(
|
||||
new KeyboardEvent('keyup', {
|
||||
key: 'Shift',
|
||||
bubbles: true,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
describe('useShiftHoldOverlay', () => {
|
||||
afterEach(() => {
|
||||
jest.clearAllTimers();
|
||||
});
|
||||
|
||||
it('shows overlay after holding Shift for 600ms', () => {
|
||||
const { result } = renderHook(() => useShiftHoldOverlay({}));
|
||||
|
||||
act(() => {
|
||||
pressShift();
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
|
||||
it('does not show overlay if Shift is released early', () => {
|
||||
const { result } = renderHook(() => useShiftHoldOverlay({}));
|
||||
|
||||
act(() => {
|
||||
pressShift();
|
||||
jest.advanceTimersByTime(300);
|
||||
releaseShift();
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('hides overlay on Shift key release', () => {
|
||||
const { result } = renderHook(() => useShiftHoldOverlay({}));
|
||||
|
||||
act(() => {
|
||||
pressShift();
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(true);
|
||||
|
||||
act(() => {
|
||||
releaseShift();
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('does not activate when modal is open', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useShiftHoldOverlay({ isModalOpen: true }),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
pressShift();
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('does not activate in typing context (input)', () => {
|
||||
const input = document.createElement('input');
|
||||
document.body.appendChild(input);
|
||||
|
||||
const { result } = renderHook(() => useShiftHoldOverlay({}));
|
||||
|
||||
act(() => {
|
||||
pressShift(input);
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
|
||||
document.body.removeChild(input);
|
||||
});
|
||||
|
||||
it('cleans up on window blur', () => {
|
||||
const { result } = renderHook(() => useShiftHoldOverlay({}));
|
||||
|
||||
act(() => {
|
||||
pressShift();
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(true);
|
||||
|
||||
act(() => {
|
||||
window.dispatchEvent(new Event('blur'));
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('cleans up on document visibility change', () => {
|
||||
const { result } = renderHook(() => useShiftHoldOverlay({}));
|
||||
|
||||
act(() => {
|
||||
pressShift();
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(true);
|
||||
|
||||
act(() => {
|
||||
document.dispatchEvent(new Event('visibilitychange'));
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('does nothing when disabled', () => {
|
||||
const { result } = renderHook(() => useShiftHoldOverlay({ disabled: true }));
|
||||
|
||||
act(() => {
|
||||
pressShift();
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
});
|
||||
@@ -1,44 +0,0 @@
|
||||
import './shiftOverlay.scss';
|
||||
|
||||
import { ArrowUp, ChevronUp, Command, Option } from 'lucide-react';
|
||||
import { ReactNode } from 'react';
|
||||
|
||||
export function formatShortcut(shortcut?: string[]): ReactNode {
|
||||
if (!shortcut || shortcut.length === 0) return null;
|
||||
|
||||
const combo = shortcut.find((s) => typeof s === 'string' && s.trim());
|
||||
if (!combo) return null;
|
||||
|
||||
return combo.split('+').map((key) => {
|
||||
const k = key.trim().toLowerCase();
|
||||
|
||||
let node: ReactNode;
|
||||
switch (k) {
|
||||
case 'shift':
|
||||
node = <ArrowUp size={14} />;
|
||||
break;
|
||||
case 'cmd':
|
||||
case 'meta':
|
||||
node = <Command size={14} />;
|
||||
break;
|
||||
case 'alt':
|
||||
node = <Option size={14} />;
|
||||
break;
|
||||
case 'ctrl':
|
||||
case 'control':
|
||||
node = <ChevronUp size={14} />;
|
||||
break;
|
||||
case 'arrowup':
|
||||
node = <ArrowUp size={14} />;
|
||||
break;
|
||||
default:
|
||||
node = k.toUpperCase();
|
||||
}
|
||||
|
||||
return (
|
||||
<span key={`shortcut-${k}`} className="shift-overlay__key">
|
||||
{node}
|
||||
</span>
|
||||
);
|
||||
});
|
||||
}
|
||||
@@ -1,75 +0,0 @@
|
||||
.shift-overlay {
|
||||
position: fixed;
|
||||
bottom: 20px;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
z-index: 9999;
|
||||
pointer-events: none;
|
||||
|
||||
&__panel {
|
||||
display: flex;
|
||||
gap: 20px;
|
||||
padding: 8px 12px;
|
||||
|
||||
background: var(--bg-ink-500);
|
||||
color: var(--bg-vanilla-300);
|
||||
|
||||
border-radius: 8px;
|
||||
font-size: 13px;
|
||||
line-height: 1.2;
|
||||
|
||||
box-shadow: 0 6px 20px var(--bg-ink-500);
|
||||
animation: shift-overlay-fade-in 120ms ease-out;
|
||||
}
|
||||
|
||||
&__item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
&__label {
|
||||
opacity: 0.9;
|
||||
}
|
||||
|
||||
&__kbd {
|
||||
font-family: monospace;
|
||||
font-size: 12px;
|
||||
padding: 2px 6px;
|
||||
display: flex;
|
||||
|
||||
border-radius: 4px;
|
||||
background: var(--bg-slate-100);
|
||||
}
|
||||
|
||||
&__key {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
|
||||
min-width: 15px;
|
||||
height: 20px;
|
||||
|
||||
border-radius: 4px;
|
||||
|
||||
background-color: var(--bg-slate-100);
|
||||
|
||||
font-size: 12px;
|
||||
font-weight: 500;
|
||||
line-height: 1;
|
||||
color: var(--bg-vanilla-300);
|
||||
flex-shrink: 0;
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes shift-overlay-fade-in {
|
||||
from {
|
||||
opacity: 0;
|
||||
transform: translateY(-4px);
|
||||
}
|
||||
to {
|
||||
opacity: 1;
|
||||
transform: translateY(0);
|
||||
}
|
||||
}
|
||||
@@ -1,87 +0,0 @@
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
|
||||
const HOLD_DELAY_MS = 500;
|
||||
|
||||
function isTypingContext(target: EventTarget | null): boolean {
|
||||
if (!(target instanceof HTMLElement)) return false;
|
||||
|
||||
const tag = target.tagName;
|
||||
return tag === 'INPUT' || tag === 'TEXTAREA' || target.isContentEditable;
|
||||
}
|
||||
|
||||
interface UseShiftHoldOverlayOptions {
|
||||
disabled?: boolean;
|
||||
isModalOpen?: boolean;
|
||||
}
|
||||
|
||||
export function useShiftHoldOverlay({
|
||||
disabled = false,
|
||||
isModalOpen = false,
|
||||
}: UseShiftHoldOverlayOptions): boolean {
|
||||
const [visible, setVisible] = useState<boolean>(false);
|
||||
|
||||
const timerRef = useRef<number | null>(null);
|
||||
const isHoldingRef = useRef<boolean>(false);
|
||||
|
||||
useEffect((): (() => void) | void => {
|
||||
if (disabled) return;
|
||||
|
||||
function cleanup(): void {
|
||||
isHoldingRef.current = false;
|
||||
|
||||
if (timerRef.current !== null) {
|
||||
window.clearTimeout(timerRef.current);
|
||||
timerRef.current = null;
|
||||
}
|
||||
|
||||
setVisible(false);
|
||||
}
|
||||
|
||||
function onKeyDown(e: KeyboardEvent): void {
|
||||
if (e.key !== 'Shift') return;
|
||||
if (e.repeat) return;
|
||||
|
||||
// Suppress in bad contexts
|
||||
if (
|
||||
isModalOpen ||
|
||||
e.metaKey ||
|
||||
e.ctrlKey ||
|
||||
e.altKey ||
|
||||
isTypingContext(e.target)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
isHoldingRef.current = true;
|
||||
|
||||
timerRef.current = window.setTimeout(() => {
|
||||
if (isHoldingRef.current) {
|
||||
setVisible(true);
|
||||
}
|
||||
}, HOLD_DELAY_MS);
|
||||
}
|
||||
|
||||
function onKeyUp(e: KeyboardEvent): void {
|
||||
if (e.key !== 'Shift') return;
|
||||
cleanup();
|
||||
}
|
||||
|
||||
function onBlur(): void {
|
||||
cleanup();
|
||||
}
|
||||
|
||||
window.addEventListener('keydown', onKeyDown);
|
||||
window.addEventListener('keyup', onKeyUp);
|
||||
window.addEventListener('blur', onBlur);
|
||||
document.addEventListener('visibilitychange', cleanup);
|
||||
|
||||
return (): void => {
|
||||
window.removeEventListener('keydown', onKeyDown);
|
||||
window.removeEventListener('keyup', onKeyUp);
|
||||
window.removeEventListener('blur', onBlur);
|
||||
document.removeEventListener('visibilitychange', cleanup);
|
||||
};
|
||||
}, [disabled, isModalOpen]);
|
||||
|
||||
return visible;
|
||||
}
|
||||
@@ -159,6 +159,7 @@ describe('CmdKPalette', () => {
|
||||
|
||||
expect(screen.getByText(HOME_LABEL)).toBeInTheDocument();
|
||||
expect(screen.getByText('Go to Dashboards')).toBeInTheDocument();
|
||||
expect(screen.getByText('Open Sidebar')).toBeInTheDocument();
|
||||
expect(screen.getByText('Switch to Dark Mode')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
|
||||
@@ -9,12 +9,34 @@ import {
|
||||
CommandList,
|
||||
CommandShortcut,
|
||||
} from '@signozhq/command';
|
||||
import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import updateUserPreference from 'api/v1/user/preferences/name/update';
|
||||
import { AxiosError } from 'axios';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { USER_PREFERENCES } from 'constants/userPreferences';
|
||||
import { useThemeMode } from 'hooks/useDarkMode';
|
||||
import { THEME_MODE } from 'hooks/useDarkMode/constant';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import history from 'lib/history';
|
||||
import {
|
||||
BellDot,
|
||||
BugIcon,
|
||||
DraftingCompass,
|
||||
Expand,
|
||||
HardDrive,
|
||||
Home,
|
||||
LayoutGrid,
|
||||
ListMinus,
|
||||
ScrollText,
|
||||
Settings,
|
||||
} from 'lucide-react';
|
||||
import React, { useEffect } from 'react';
|
||||
import { useMutation } from 'react-query';
|
||||
import { UserPreference } from 'types/api/preferences/preference';
|
||||
import { showErrorNotification } from 'utils/error';
|
||||
|
||||
import { createShortcutActions } from '../../constants/shortcutActions';
|
||||
import { useAppContext } from '../../providers/App/App';
|
||||
import { useCmdK } from '../../providers/cmdKProvider';
|
||||
|
||||
type CmdAction = {
|
||||
@@ -36,8 +58,19 @@ export function CmdKPalette({
|
||||
}): JSX.Element | null {
|
||||
const { open, setOpen } = useCmdK();
|
||||
|
||||
const { updateUserPreferenceInContext } = useAppContext();
|
||||
const { notifications } = useNotifications();
|
||||
const { setAutoSwitch, setTheme, theme } = useThemeMode();
|
||||
|
||||
const { mutate: updateUserPreferenceMutation } = useMutation(
|
||||
updateUserPreference,
|
||||
{
|
||||
onError: (error) => {
|
||||
showErrorNotification(notifications, error as AxiosError);
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
// toggle palette with ⌘/Ctrl+K
|
||||
function handleGlobalCmdK(
|
||||
e: KeyboardEvent,
|
||||
@@ -78,10 +111,164 @@ export function CmdKPalette({
|
||||
history.push(key);
|
||||
}
|
||||
|
||||
const actions = createShortcutActions({
|
||||
navigate: onClickHandler,
|
||||
handleThemeChange,
|
||||
});
|
||||
function handleOpenSidebar(): void {
|
||||
setLocalStorageApi(USER_PREFERENCES.SIDENAV_PINNED, 'true');
|
||||
const save = { name: USER_PREFERENCES.SIDENAV_PINNED, value: true };
|
||||
updateUserPreferenceInContext(save as UserPreference);
|
||||
updateUserPreferenceMutation({
|
||||
name: USER_PREFERENCES.SIDENAV_PINNED,
|
||||
value: true,
|
||||
});
|
||||
}
|
||||
|
||||
function handleCloseSidebar(): void {
|
||||
setLocalStorageApi(USER_PREFERENCES.SIDENAV_PINNED, 'false');
|
||||
const save = { name: USER_PREFERENCES.SIDENAV_PINNED, value: false };
|
||||
updateUserPreferenceInContext(save as UserPreference);
|
||||
updateUserPreferenceMutation({
|
||||
name: USER_PREFERENCES.SIDENAV_PINNED,
|
||||
value: false,
|
||||
});
|
||||
}
|
||||
|
||||
const actions: CmdAction[] = [
|
||||
{
|
||||
id: 'home',
|
||||
name: 'Go to Home',
|
||||
shortcut: ['shift + h'],
|
||||
keywords: 'home',
|
||||
section: 'Navigation',
|
||||
icon: <Home size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.HOME),
|
||||
},
|
||||
{
|
||||
id: 'dashboards',
|
||||
name: 'Go to Dashboards',
|
||||
shortcut: ['shift + d'],
|
||||
keywords: 'dashboards',
|
||||
section: 'Navigation',
|
||||
icon: <LayoutGrid size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.ALL_DASHBOARD),
|
||||
},
|
||||
{
|
||||
id: 'services',
|
||||
name: 'Go to Services',
|
||||
shortcut: ['shift + s'],
|
||||
keywords: 'services monitoring',
|
||||
section: 'Navigation',
|
||||
icon: <HardDrive size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.APPLICATION),
|
||||
},
|
||||
{
|
||||
id: 'traces',
|
||||
name: 'Go to Traces',
|
||||
shortcut: ['shift + t'],
|
||||
keywords: 'traces',
|
||||
section: 'Navigation',
|
||||
icon: <DraftingCompass size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.TRACES_EXPLORER),
|
||||
},
|
||||
{
|
||||
id: 'logs',
|
||||
name: 'Go to Logs',
|
||||
shortcut: ['shift + l'],
|
||||
keywords: 'logs',
|
||||
section: 'Navigation',
|
||||
icon: <ScrollText size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.LOGS),
|
||||
},
|
||||
{
|
||||
id: 'alerts',
|
||||
name: 'Go to Alerts',
|
||||
shortcut: ['shift + a'],
|
||||
keywords: 'alerts',
|
||||
section: 'Navigation',
|
||||
icon: <BellDot size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.LIST_ALL_ALERT),
|
||||
},
|
||||
{
|
||||
id: 'exceptions',
|
||||
name: 'Go to Exceptions',
|
||||
shortcut: ['shift + e'],
|
||||
keywords: 'exceptions errors',
|
||||
section: 'Navigation',
|
||||
icon: <BugIcon size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.ALL_ERROR),
|
||||
},
|
||||
{
|
||||
id: 'messaging-queues',
|
||||
name: 'Go to Messaging Queues',
|
||||
shortcut: ['shift + m'],
|
||||
keywords: 'messaging queues mq',
|
||||
section: 'Navigation',
|
||||
icon: <ListMinus size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.MESSAGING_QUEUES_OVERVIEW),
|
||||
},
|
||||
{
|
||||
id: 'my-settings',
|
||||
name: 'Go to Account Settings',
|
||||
keywords: 'account settings',
|
||||
section: 'Navigation',
|
||||
icon: <Settings size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.MY_SETTINGS),
|
||||
},
|
||||
|
||||
// Settings
|
||||
{
|
||||
id: 'open-sidebar',
|
||||
name: 'Open Sidebar',
|
||||
keywords: 'sidebar navigation menu expand',
|
||||
section: 'Settings',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => handleOpenSidebar(),
|
||||
},
|
||||
{
|
||||
id: 'collapse-sidebar',
|
||||
name: 'Collapse Sidebar',
|
||||
keywords: 'sidebar navigation menu collapse',
|
||||
section: 'Settings',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => handleCloseSidebar(),
|
||||
},
|
||||
{
|
||||
id: 'dark-mode',
|
||||
name: 'Switch to Dark Mode',
|
||||
keywords: 'theme dark mode appearance',
|
||||
section: 'Settings',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => handleThemeChange(THEME_MODE.DARK),
|
||||
},
|
||||
{
|
||||
id: 'light-mode',
|
||||
name: 'Switch to Light Mode [Beta]',
|
||||
keywords: 'theme light mode appearance',
|
||||
section: 'Settings',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => handleThemeChange(THEME_MODE.LIGHT),
|
||||
},
|
||||
{
|
||||
id: 'system-theme',
|
||||
name: 'Switch to System Theme',
|
||||
keywords: 'system theme appearance',
|
||||
section: 'Settings',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => handleThemeChange(THEME_MODE.SYSTEM),
|
||||
},
|
||||
];
|
||||
|
||||
// RBAC filter: show action if no roles set OR current user role is included
|
||||
const permitted = actions.filter(
|
||||
|
||||
@@ -1,263 +0,0 @@
|
||||
import ROUTES from 'constants/routes';
|
||||
import { GlobalShortcutsName } from 'constants/shortcuts/globalShortcuts';
|
||||
import { THEME_MODE } from 'hooks/useDarkMode/constant';
|
||||
import {
|
||||
BarChart2,
|
||||
BellDot,
|
||||
BugIcon,
|
||||
Compass,
|
||||
DraftingCompass,
|
||||
Expand,
|
||||
HardDrive,
|
||||
Home,
|
||||
LayoutGrid,
|
||||
ListMinus,
|
||||
ScrollText,
|
||||
Settings,
|
||||
TowerControl,
|
||||
Workflow,
|
||||
} from 'lucide-react';
|
||||
import React from 'react';
|
||||
|
||||
export type UserRole = 'ADMIN' | 'EDITOR' | 'AUTHOR' | 'VIEWER';
|
||||
|
||||
export type CmdAction = {
|
||||
id: string;
|
||||
name: string;
|
||||
shortcut?: string[];
|
||||
keywords?: string;
|
||||
section?: string;
|
||||
icon?: React.ReactNode;
|
||||
roles?: UserRole[];
|
||||
perform: () => void;
|
||||
};
|
||||
|
||||
type ActionDeps = {
|
||||
navigate: (path: string) => void;
|
||||
handleThemeChange: (mode: string) => void;
|
||||
};
|
||||
|
||||
export function createShortcutActions(deps: ActionDeps): CmdAction[] {
|
||||
const { navigate, handleThemeChange } = deps;
|
||||
|
||||
return [
|
||||
{
|
||||
id: 'home',
|
||||
name: 'Go to Home',
|
||||
shortcut: [GlobalShortcutsName.NavigateToHome],
|
||||
keywords: 'home',
|
||||
section: 'Navigation',
|
||||
icon: <Home size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.HOME),
|
||||
},
|
||||
{
|
||||
id: 'dashboards',
|
||||
name: 'Go to Dashboards',
|
||||
shortcut: [GlobalShortcutsName.NavigateToDashboards],
|
||||
keywords: 'dashboards',
|
||||
section: 'Navigation',
|
||||
icon: <LayoutGrid size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.ALL_DASHBOARD),
|
||||
},
|
||||
{
|
||||
id: 'services',
|
||||
name: 'Go to Services',
|
||||
shortcut: [GlobalShortcutsName.NavigateToServices],
|
||||
keywords: 'services monitoring',
|
||||
section: 'Navigation',
|
||||
icon: <HardDrive size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.APPLICATION),
|
||||
},
|
||||
{
|
||||
id: 'alerts',
|
||||
name: 'Go to Alerts',
|
||||
shortcut: [GlobalShortcutsName.NavigateToAlerts],
|
||||
keywords: 'alerts',
|
||||
section: 'Navigation',
|
||||
icon: <BellDot size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.LIST_ALL_ALERT),
|
||||
},
|
||||
{
|
||||
id: 'exceptions',
|
||||
name: 'Go to Exceptions',
|
||||
shortcut: [GlobalShortcutsName.NavigateToExceptions],
|
||||
keywords: 'exceptions errors',
|
||||
section: 'Navigation',
|
||||
icon: <BugIcon size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.ALL_ERROR),
|
||||
},
|
||||
{
|
||||
id: 'messaging-queues',
|
||||
name: 'Go to Messaging Queues',
|
||||
shortcut: [GlobalShortcutsName.NavigateToMessagingQueues],
|
||||
keywords: 'messaging queues mq',
|
||||
section: 'Navigation',
|
||||
icon: <ListMinus size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.MESSAGING_QUEUES_OVERVIEW),
|
||||
},
|
||||
|
||||
// logs
|
||||
{
|
||||
id: 'logs',
|
||||
name: 'Go to Logs',
|
||||
shortcut: [GlobalShortcutsName.NavigateToLogs],
|
||||
keywords: 'logs',
|
||||
section: 'Logs',
|
||||
icon: <ScrollText size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.LOGS),
|
||||
},
|
||||
{
|
||||
id: 'logs',
|
||||
name: 'Go to Logs Pipelines',
|
||||
shortcut: [GlobalShortcutsName.NavigateToLogsPipelines],
|
||||
keywords: 'logs pipelines',
|
||||
section: 'Logs',
|
||||
icon: <Workflow size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.LOGS_PIPELINES),
|
||||
},
|
||||
{
|
||||
id: 'logs',
|
||||
name: 'Go to Logs Views',
|
||||
shortcut: [GlobalShortcutsName.NavigateToLogsViews],
|
||||
keywords: 'logs views',
|
||||
section: 'Logs',
|
||||
icon: <TowerControl size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.LOGS_SAVE_VIEWS),
|
||||
},
|
||||
|
||||
// metrics
|
||||
{
|
||||
id: 'metrics-summary',
|
||||
name: 'Go to Metrics Summary',
|
||||
shortcut: [GlobalShortcutsName.NavigateToMetricsSummary],
|
||||
keywords: 'metrics summary',
|
||||
section: 'Metrics',
|
||||
icon: <BarChart2 size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.METRICS_EXPLORER),
|
||||
},
|
||||
{
|
||||
id: 'metrics-explorer',
|
||||
name: 'Go to Metrics Explorer',
|
||||
shortcut: [GlobalShortcutsName.NavigateToMetricsExplorer],
|
||||
keywords: 'metrics explorer',
|
||||
section: 'Metrics',
|
||||
icon: <Compass size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.METRICS_EXPLORER_EXPLORER),
|
||||
},
|
||||
{
|
||||
id: 'metrics-views',
|
||||
name: 'Go to Metrics Views',
|
||||
shortcut: [GlobalShortcutsName.NavigateToMetricsViews],
|
||||
keywords: 'metrics views',
|
||||
section: 'Metrics',
|
||||
icon: <TowerControl size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.METRICS_EXPLORER_VIEWS),
|
||||
},
|
||||
|
||||
// Traces
|
||||
{
|
||||
id: 'traces',
|
||||
name: 'Go to Traces',
|
||||
shortcut: [GlobalShortcutsName.NavigateToTraces],
|
||||
keywords: 'traces',
|
||||
section: 'Traces',
|
||||
icon: <DraftingCompass size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.TRACES_EXPLORER),
|
||||
},
|
||||
{
|
||||
id: 'traces-funnel',
|
||||
name: 'Go to Traces Funnels',
|
||||
shortcut: [GlobalShortcutsName.NavigateToTracesFunnel],
|
||||
keywords: 'traces funnel',
|
||||
section: 'Traces',
|
||||
icon: <DraftingCompass size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.TRACES_FUNNELS),
|
||||
},
|
||||
|
||||
// Common actions
|
||||
{
|
||||
id: 'dark-mode',
|
||||
name: 'Switch to Dark Mode',
|
||||
keywords: 'theme dark mode appearance',
|
||||
section: 'Common',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => handleThemeChange(THEME_MODE.DARK),
|
||||
},
|
||||
{
|
||||
id: 'light-mode',
|
||||
name: 'Switch to Light Mode [Beta]',
|
||||
keywords: 'theme light mode appearance',
|
||||
section: 'Common',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => handleThemeChange(THEME_MODE.LIGHT),
|
||||
},
|
||||
{
|
||||
id: 'system-theme',
|
||||
name: 'Switch to System Theme',
|
||||
keywords: 'system theme appearance',
|
||||
section: 'Common',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => handleThemeChange(THEME_MODE.SYSTEM),
|
||||
},
|
||||
|
||||
// settings sub-pages
|
||||
{
|
||||
id: 'my-settings',
|
||||
name: 'Go to Account Settings',
|
||||
shortcut: [GlobalShortcutsName.NavigateToSettings],
|
||||
keywords: 'account settings',
|
||||
section: 'Settings',
|
||||
icon: <Settings size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.MY_SETTINGS),
|
||||
},
|
||||
{
|
||||
id: 'my-settings-ingestion',
|
||||
name: 'Go to Account Settings Ingestion',
|
||||
shortcut: [GlobalShortcutsName.NavigateToSettingsIngestion],
|
||||
keywords: 'account settings',
|
||||
section: 'Settings',
|
||||
icon: <Settings size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR'],
|
||||
perform: (): void => navigate(ROUTES.INGESTION_SETTINGS),
|
||||
},
|
||||
|
||||
{
|
||||
id: 'my-settings-billing',
|
||||
name: 'Go to Account Settings Billing',
|
||||
shortcut: [GlobalShortcutsName.NavigateToSettingsBilling],
|
||||
keywords: 'account settings billing',
|
||||
section: 'Settings',
|
||||
icon: <Settings size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR'],
|
||||
perform: (): void => navigate(ROUTES.BILLING),
|
||||
},
|
||||
{
|
||||
id: 'my-settings-api-keys',
|
||||
name: 'Go to Account Settings API Keys',
|
||||
shortcut: [GlobalShortcutsName.NavigateToSettingsAPIKeys],
|
||||
keywords: 'account settings api keys',
|
||||
section: 'Settings',
|
||||
icon: <Settings size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR'],
|
||||
perform: (): void => navigate(ROUTES.API_KEYS),
|
||||
},
|
||||
];
|
||||
}
|
||||
@@ -1,57 +1,25 @@
|
||||
export const GlobalShortcuts = {
|
||||
NavigateToServices: 'shift+s',
|
||||
NavigateToDashboards: 'shift+d',
|
||||
NavigateToAlerts: 'shift+a',
|
||||
NavigateToExceptions: 'shift+e',
|
||||
NavigateToMessagingQueues: 'shift+q',
|
||||
ToggleSidebar: 'shift+b',
|
||||
NavigateToHome: 'shift+h',
|
||||
|
||||
// logs
|
||||
NavigateToLogs: 'shift+l',
|
||||
NavigateToLogsPipelines: 'shift+l+p',
|
||||
NavigateToLogsViews: 'shift+l+v',
|
||||
|
||||
// traces
|
||||
NavigateToTraces: 'shift+t',
|
||||
NavigateToTracesFunnel: 'shift+t+f',
|
||||
NavigateToTracesViews: 'shift+t+v',
|
||||
|
||||
// metrics
|
||||
NavigateToMetricsSummary: 'shift+m',
|
||||
NavigateToMetricsExplorer: 'shift+m+e',
|
||||
NavigateToMetricsViews: 'shift+m+v',
|
||||
|
||||
// settings
|
||||
NavigateToSettings: 'shift+g',
|
||||
NavigateToSettingsIngestion: 'shift+g+i',
|
||||
NavigateToSettingsBilling: 'shift+g+b',
|
||||
NavigateToSettingsAPIKeys: 'shift+g+k',
|
||||
NavigateToSettingsNotificationChannels: 'shift+g+n',
|
||||
NavigateToServices: 's+shift',
|
||||
NavigateToTraces: 't+shift',
|
||||
NavigateToLogs: 'l+shift',
|
||||
NavigateToDashboards: 'd+shift',
|
||||
NavigateToAlerts: 'a+shift',
|
||||
NavigateToExceptions: 'e+shift',
|
||||
NavigateToMessagingQueues: 'm+shift',
|
||||
ToggleSidebar: 'b+shift',
|
||||
NavigateToHome: 'h+shift',
|
||||
};
|
||||
|
||||
export const GlobalShortcutsName = {
|
||||
NavigateToServices: 'shift+s',
|
||||
NavigateToTraces: 'shift+t',
|
||||
NavigateToLogs: 'shift+l',
|
||||
NavigateToDashboards: 'shift+d',
|
||||
NavigateToAlerts: 'shift+a',
|
||||
NavigateToExceptions: 'shift+e',
|
||||
NavigateToMessagingQueues: 'shift+q',
|
||||
NavigateToMessagingQueues: 'shift+m',
|
||||
ToggleSidebar: 'shift+b',
|
||||
NavigateToHome: 'shift+h',
|
||||
NavigateToTracesFunnel: 'shift+t+f',
|
||||
NavigateToTracesViews: 'shift+t+v',
|
||||
NavigateToMetricsSummary: 'shift+m',
|
||||
NavigateToMetricsExplorer: 'shift+m+e',
|
||||
NavigateToMetricsViews: 'shift+m+v',
|
||||
NavigateToSettings: 'shift+g',
|
||||
NavigateToSettingsIngestion: 'shift+g+i',
|
||||
NavigateToSettingsBilling: 'shift+g+b',
|
||||
NavigateToSettingsAPIKeys: 'shift+g+k',
|
||||
NavigateToSettingsNotificationChannels: 'shift+g+n',
|
||||
NavigateToLogs: 'shift+l',
|
||||
NavigateToLogsPipelines: 'shift+l+p',
|
||||
NavigateToLogsViews: 'shift+l+v',
|
||||
};
|
||||
|
||||
export const GlobalShortcutsDescription = {
|
||||
@@ -64,17 +32,4 @@ export const GlobalShortcutsDescription = {
|
||||
NavigateToExceptions: 'Navigate to Exceptions List',
|
||||
NavigateToMessagingQueues: 'Navigate to Messaging Queues',
|
||||
ToggleSidebar: 'Toggle sidebar visibility',
|
||||
NavigateToTracesFunnel: 'Navigate to Traces Funnel',
|
||||
NavigateToTracesViews: 'Navigate to Traces Views',
|
||||
NavigateToMetricsSummary: 'Navigate to Metrics Summary',
|
||||
NavigateToMetricsExplorer: 'Navigate to Metrics Explorer',
|
||||
NavigateToMetricsViews: 'Navigate to Metrics Views',
|
||||
NavigateToSettings: 'Navigate to Settings',
|
||||
NavigateToSettingsIngestion: 'Navigate to Ingestion Settings',
|
||||
NavigateToSettingsBilling: 'Navigate to Billing Settings',
|
||||
NavigateToSettingsAPIKeys: 'Navigate to API Keys Settings',
|
||||
NavigateToSettingsNotificationChannels:
|
||||
'Navigate to Notification Channels Settings',
|
||||
NavigateToLogsPipelines: 'Navigate to Logs Pipelines',
|
||||
NavigateToLogsViews: 'Navigate to Logs Views',
|
||||
};
|
||||
|
||||
@@ -10,20 +10,6 @@ import {
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
|
||||
// Mock dependencies
|
||||
jest.mock('providers/cmdKProvider', () => ({
|
||||
useCmdK: (): {
|
||||
open: boolean;
|
||||
setOpen: React.Dispatch<React.SetStateAction<boolean>>;
|
||||
openCmdK: () => void;
|
||||
closeCmdK: () => void;
|
||||
} => ({
|
||||
open: false,
|
||||
setOpen: jest.fn(),
|
||||
openCmdK: jest.fn(),
|
||||
closeCmdK: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('api/common/logEvent', () => jest.fn());
|
||||
|
||||
// Mock the AppContext
|
||||
@@ -77,7 +63,7 @@ describe('Sidebar Toggle Shortcut', () => {
|
||||
|
||||
describe('Global Shortcuts Constants', () => {
|
||||
it('should have the correct shortcut key combination', () => {
|
||||
expect(GlobalShortcuts.ToggleSidebar).toBe('shift+b');
|
||||
expect(GlobalShortcuts.ToggleSidebar).toBe('b+shift');
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -67,6 +67,7 @@ function WidgetGraphComponent({
|
||||
}: WidgetGraphComponentProps): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const [deleteModal, setDeleteModal] = useState(false);
|
||||
const [hovered, setHovered] = useState(false);
|
||||
const { notifications } = useNotifications();
|
||||
const { pathname, search } = useLocation();
|
||||
|
||||
@@ -315,6 +316,18 @@ function WidgetGraphComponent({
|
||||
style={{
|
||||
height: '100%',
|
||||
}}
|
||||
onMouseOver={(): void => {
|
||||
setHovered(true);
|
||||
}}
|
||||
onFocus={(): void => {
|
||||
setHovered(true);
|
||||
}}
|
||||
onMouseOut={(): void => {
|
||||
setHovered(false);
|
||||
}}
|
||||
onBlur={(): void => {
|
||||
setHovered(false);
|
||||
}}
|
||||
id={widget.id}
|
||||
className="widget-graph-component-container"
|
||||
>
|
||||
@@ -364,6 +377,7 @@ function WidgetGraphComponent({
|
||||
|
||||
<div className="drag-handle">
|
||||
<WidgetHeader
|
||||
parentHover={hovered}
|
||||
title={widget?.title}
|
||||
widget={widget}
|
||||
onView={handleOnView}
|
||||
|
||||
@@ -99,12 +99,6 @@
|
||||
height: calc(100% - 30px);
|
||||
}
|
||||
}
|
||||
|
||||
&:hover {
|
||||
.widget-header-more-options {
|
||||
visibility: visible;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.widget-full-view {
|
||||
|
||||
@@ -51,6 +51,10 @@
|
||||
visibility: visible;
|
||||
}
|
||||
|
||||
.widget-header-hover {
|
||||
visibility: visible;
|
||||
}
|
||||
|
||||
.widget-api-actions {
|
||||
padding-right: 0.25rem;
|
||||
}
|
||||
|
||||
@@ -181,6 +181,7 @@ describe('WidgetHeader', () => {
|
||||
title={TEST_WIDGET_TITLE}
|
||||
widget={mockWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
@@ -203,6 +204,7 @@ describe('WidgetHeader', () => {
|
||||
title="Empty Widget"
|
||||
widget={emptyWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
@@ -225,6 +227,7 @@ describe('WidgetHeader', () => {
|
||||
title={TABLE_WIDGET_TITLE}
|
||||
widget={tableWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
@@ -252,6 +255,7 @@ describe('WidgetHeader', () => {
|
||||
title={TABLE_WIDGET_TITLE}
|
||||
widget={tableWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
@@ -294,6 +298,7 @@ describe('WidgetHeader', () => {
|
||||
title={TEST_WIDGET_TITLE}
|
||||
widget={mockWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={errorResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
@@ -335,6 +340,7 @@ describe('WidgetHeader', () => {
|
||||
title={TEST_WIDGET_TITLE}
|
||||
widget={mockWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={warningResponse}
|
||||
isWarning
|
||||
isFetchingResponse={false}
|
||||
@@ -364,6 +370,7 @@ describe('WidgetHeader', () => {
|
||||
title={TEST_WIDGET_TITLE}
|
||||
widget={mockWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={fetchingResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse
|
||||
@@ -382,6 +389,7 @@ describe('WidgetHeader', () => {
|
||||
title={TEST_WIDGET_TITLE}
|
||||
widget={mockWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
@@ -406,6 +414,7 @@ describe('WidgetHeader', () => {
|
||||
title={TABLE_WIDGET_TITLE}
|
||||
widget={tableWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
@@ -424,6 +433,7 @@ describe('WidgetHeader', () => {
|
||||
title={TEST_WIDGET_TITLE}
|
||||
widget={mockWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
@@ -444,6 +454,7 @@ describe('WidgetHeader', () => {
|
||||
title={TEST_WIDGET_TITLE}
|
||||
widget={mockWidget}
|
||||
onView={mockOnView}
|
||||
parentHover={false}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
|
||||
@@ -48,6 +48,7 @@ interface IWidgetHeaderProps {
|
||||
onView: VoidFunction;
|
||||
onDelete?: VoidFunction;
|
||||
onClone?: VoidFunction;
|
||||
parentHover: boolean;
|
||||
queryResponse: UseQueryResult<
|
||||
SuccessResponse<MetricRangePayloadProps, unknown> & {
|
||||
warning?: Warning;
|
||||
@@ -68,6 +69,7 @@ function WidgetHeader({
|
||||
onView,
|
||||
onDelete,
|
||||
onClone,
|
||||
parentHover,
|
||||
queryResponse,
|
||||
threshold,
|
||||
headerMenuList,
|
||||
@@ -313,6 +315,8 @@ function WidgetHeader({
|
||||
<MoreOutlined
|
||||
data-testid="widget-header-options"
|
||||
className={`widget-header-more-options ${
|
||||
parentHover ? 'widget-header-hover' : ''
|
||||
} ${
|
||||
globalSearchAvailable ? 'widget-header-more-options-visible' : ''
|
||||
}`}
|
||||
/>
|
||||
|
||||
@@ -92,14 +92,14 @@ function BodyTitleRenderer({
|
||||
|
||||
if (isObject) {
|
||||
// For objects/arrays, stringify the entire structure
|
||||
copyText = JSON.stringify(value, null, 2);
|
||||
copyText = `"${cleanedKey}": ${JSON.stringify(value, null, 2)}`;
|
||||
} else if (parentIsArray) {
|
||||
// array elements
|
||||
copyText = `${value}`;
|
||||
// For array elements, copy just the value
|
||||
copyText = `"${cleanedKey}": ${value}`;
|
||||
} else {
|
||||
// primitive values
|
||||
const valueStr = typeof value === 'string' ? value : String(value);
|
||||
copyText = valueStr;
|
||||
// For primitive values, format as JSON key-value pair
|
||||
const valueStr = typeof value === 'string' ? `"${value}"` : String(value);
|
||||
copyText = `"${cleanedKey}": ${valueStr}`;
|
||||
}
|
||||
|
||||
setCopy(copyText);
|
||||
|
||||
@@ -60,8 +60,7 @@ const BodyContent: React.FC<{
|
||||
fieldData: Record<string, string>;
|
||||
record: DataType;
|
||||
bodyHtml: { __html: string };
|
||||
textToCopy: string;
|
||||
}> = React.memo(({ fieldData, record, bodyHtml, textToCopy }) => {
|
||||
}> = React.memo(({ fieldData, record, bodyHtml }) => {
|
||||
const { isLoading, treeData, error } = useAsyncJSONProcessing(
|
||||
fieldData.value,
|
||||
record.field === 'body',
|
||||
@@ -93,13 +92,11 @@ const BodyContent: React.FC<{
|
||||
|
||||
if (record.field === 'body') {
|
||||
return (
|
||||
<CopyClipboardHOC entityKey="body" textToCopy={textToCopy}>
|
||||
<span
|
||||
style={{ color: Color.BG_SIENNA_400, whiteSpace: 'pre-wrap', tabSize: 4 }}
|
||||
>
|
||||
<span dangerouslySetInnerHTML={bodyHtml} />
|
||||
</span>
|
||||
</CopyClipboardHOC>
|
||||
<span
|
||||
style={{ color: Color.BG_SIENNA_400, whiteSpace: 'pre-wrap', tabSize: 4 }}
|
||||
>
|
||||
<span dangerouslySetInnerHTML={bodyHtml} />
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -175,12 +172,7 @@ export default function TableViewActions(
|
||||
switch (record.field) {
|
||||
case 'body':
|
||||
return (
|
||||
<BodyContent
|
||||
fieldData={fieldData}
|
||||
record={record}
|
||||
bodyHtml={bodyHtml}
|
||||
textToCopy={textToCopy}
|
||||
/>
|
||||
<BodyContent fieldData={fieldData} record={record} bodyHtml={bodyHtml} />
|
||||
);
|
||||
|
||||
case 'timestamp':
|
||||
@@ -202,7 +194,6 @@ export default function TableViewActions(
|
||||
record,
|
||||
fieldData,
|
||||
bodyHtml,
|
||||
textToCopy,
|
||||
formatTimezoneAdjustedTimestamp,
|
||||
cleanTimestamp,
|
||||
]);
|
||||
@@ -211,12 +202,7 @@ export default function TableViewActions(
|
||||
if (record.field === 'body') {
|
||||
return (
|
||||
<div className={cx('value-field', isOpen ? 'open-popover' : '')}>
|
||||
<BodyContent
|
||||
fieldData={fieldData}
|
||||
record={record}
|
||||
bodyHtml={bodyHtml}
|
||||
textToCopy={textToCopy}
|
||||
/>
|
||||
<BodyContent fieldData={fieldData} record={record} bodyHtml={bodyHtml} />
|
||||
{!isListViewPanel && !RESTRICTED_SELECTED_FIELDS.includes(fieldFilterKey) && (
|
||||
<span className="action-btn">
|
||||
<Tooltip title="Filter for value">
|
||||
|
||||
@@ -1,54 +1,16 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { RESTRICTED_SELECTED_FIELDS } from 'container/LogsFilters/config';
|
||||
|
||||
import TableViewActions from '../TableViewActions';
|
||||
import useAsyncJSONProcessing from '../useAsyncJSONProcessing';
|
||||
|
||||
// Mock data for tests
|
||||
let mockCopyToClipboard: jest.Mock;
|
||||
let mockNotificationsSuccess: jest.Mock;
|
||||
|
||||
// Mock the components and hooks
|
||||
jest.mock('components/Logs/CopyClipboardHOC', () => ({
|
||||
__esModule: true,
|
||||
default: ({
|
||||
children,
|
||||
textToCopy,
|
||||
entityKey,
|
||||
}: {
|
||||
children: React.ReactNode;
|
||||
textToCopy: string;
|
||||
entityKey: string;
|
||||
}): JSX.Element => (
|
||||
// eslint-disable-next-line jsx-a11y/click-events-have-key-events
|
||||
<div
|
||||
className="CopyClipboardHOC"
|
||||
data-testid={`copy-clipboard-${entityKey}`}
|
||||
data-text-to-copy={textToCopy}
|
||||
onClick={(): void => {
|
||||
if (mockCopyToClipboard) {
|
||||
mockCopyToClipboard(textToCopy);
|
||||
}
|
||||
if (mockNotificationsSuccess) {
|
||||
mockNotificationsSuccess({
|
||||
message: `${entityKey} copied to clipboard`,
|
||||
key: `${entityKey} copied to clipboard`,
|
||||
});
|
||||
}
|
||||
}}
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
>
|
||||
{children}
|
||||
</div>
|
||||
default: ({ children }: { children: React.ReactNode }): JSX.Element => (
|
||||
<div className="CopyClipboardHOC">{children}</div>
|
||||
),
|
||||
}));
|
||||
|
||||
jest.mock('../useAsyncJSONProcessing', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('providers/Timezone', () => ({
|
||||
useTimezone: (): {
|
||||
formatTimezoneAdjustedTimestamp: (timestamp: string) => string;
|
||||
@@ -91,19 +53,6 @@ describe('TableViewActions', () => {
|
||||
onGroupByAttribute: jest.fn(),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockCopyToClipboard = jest.fn();
|
||||
mockNotificationsSuccess = jest.fn();
|
||||
|
||||
// Default mock for useAsyncJSONProcessing
|
||||
const mockUseAsyncJSONProcessing = jest.mocked(useAsyncJSONProcessing);
|
||||
mockUseAsyncJSONProcessing.mockReturnValue({
|
||||
isLoading: false,
|
||||
treeData: null,
|
||||
error: null,
|
||||
});
|
||||
});
|
||||
|
||||
it('should render without crashing', () => {
|
||||
render(
|
||||
<TableViewActions
|
||||
@@ -178,60 +127,4 @@ describe('TableViewActions', () => {
|
||||
container.querySelector(ACTION_BUTTON_TEST_ID),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should copy non-JSON body text without quotes when user clicks on body', () => {
|
||||
// Setup: body field with surrounding quotes
|
||||
const bodyValueWithQuotes =
|
||||
'"FeatureFlag \'kafkaQueueProblems\' is enabled, sleeping 1 second"';
|
||||
const expectedCopiedText =
|
||||
"FeatureFlag 'kafkaQueueProblems' is enabled, sleeping 1 second";
|
||||
|
||||
const bodyProps = {
|
||||
fieldData: {
|
||||
field: 'body',
|
||||
value: bodyValueWithQuotes,
|
||||
},
|
||||
record: {
|
||||
key: 'body-key',
|
||||
field: 'body',
|
||||
value: bodyValueWithQuotes,
|
||||
},
|
||||
isListViewPanel: false,
|
||||
isfilterInLoading: false,
|
||||
isfilterOutLoading: false,
|
||||
onClickHandler: jest.fn(),
|
||||
onGroupByAttribute: jest.fn(),
|
||||
};
|
||||
|
||||
// Render component with body field
|
||||
render(
|
||||
<TableViewActions
|
||||
fieldData={bodyProps.fieldData}
|
||||
record={bodyProps.record}
|
||||
isListViewPanel={bodyProps.isListViewPanel}
|
||||
isfilterInLoading={bodyProps.isfilterInLoading}
|
||||
isfilterOutLoading={bodyProps.isfilterOutLoading}
|
||||
onClickHandler={bodyProps.onClickHandler}
|
||||
onGroupByAttribute={bodyProps.onGroupByAttribute}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Find the clickable copy area for body
|
||||
const copyArea = screen.getByTestId('copy-clipboard-body');
|
||||
|
||||
// Verify it has the correct text to copy (without quotes)
|
||||
expect(copyArea).toHaveAttribute('data-text-to-copy', expectedCopiedText);
|
||||
|
||||
// Action: User clicks on body content
|
||||
fireEvent.click(copyArea);
|
||||
|
||||
// Assert: Text was copied without surrounding quotes
|
||||
expect(mockCopyToClipboard).toHaveBeenCalledWith(expectedCopiedText);
|
||||
|
||||
// Assert: Success notification shown
|
||||
expect(mockNotificationsSuccess).toHaveBeenCalledWith({
|
||||
message: 'body copied to clipboard',
|
||||
key: 'body copied to clipboard',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -51,7 +51,7 @@ describe('BodyTitleRenderer', () => {
|
||||
await user.click(screen.getByText('name'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockSetCopy).toHaveBeenCalledWith('John');
|
||||
expect(mockSetCopy).toHaveBeenCalledWith('"user.name": "John"');
|
||||
expect(mockNotification).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
message: expect.stringContaining('user.name'),
|
||||
@@ -75,7 +75,7 @@ describe('BodyTitleRenderer', () => {
|
||||
await user.click(screen.getByText('0'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockSetCopy).toHaveBeenCalledWith('arrayElement');
|
||||
expect(mockSetCopy).toHaveBeenCalledWith('"items[*].0": arrayElement');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -96,8 +96,9 @@ describe('BodyTitleRenderer', () => {
|
||||
|
||||
await waitFor(() => {
|
||||
const callArg = mockSetCopy.mock.calls[0][0];
|
||||
const expectedJson = JSON.stringify(testObject, null, 2);
|
||||
expect(callArg).toBe(expectedJson);
|
||||
expect(callArg).toContain('"user.metadata":');
|
||||
expect(callArg).toContain('"id": 123');
|
||||
expect(callArg).toContain('"active": true');
|
||||
expect(mockNotification).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
message: expect.stringContaining('object copied'),
|
||||
|
||||
@@ -363,6 +363,7 @@ export const WidgetHeaderProps: any = {
|
||||
title: 'Table - Panel',
|
||||
yAxisUnit: 'none',
|
||||
},
|
||||
parentHover: false,
|
||||
queryResponse: {
|
||||
status: 'success',
|
||||
isLoading: false,
|
||||
|
||||
@@ -679,42 +679,7 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
registerShortcut(GlobalShortcuts.NavigateToExceptions, () =>
|
||||
onClickHandler(ROUTES.ALL_ERROR, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToTracesFunnel, () =>
|
||||
onClickHandler(ROUTES.TRACES_FUNNELS, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToTracesViews, () =>
|
||||
onClickHandler(ROUTES.TRACES_SAVE_VIEWS, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToMetricsSummary, () =>
|
||||
onClickHandler(ROUTES.METRICS_EXPLORER, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToMetricsExplorer, () =>
|
||||
onClickHandler(ROUTES.METRICS_EXPLORER_EXPLORER, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToMetricsViews, () =>
|
||||
onClickHandler(ROUTES.METRICS_EXPLORER_VIEWS, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToSettings, () =>
|
||||
onClickHandler(ROUTES.SETTINGS, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToSettingsIngestion, () =>
|
||||
onClickHandler(ROUTES.INGESTION_SETTINGS, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToSettingsBilling, () =>
|
||||
onClickHandler(ROUTES.BILLING, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToSettingsAPIKeys, () =>
|
||||
onClickHandler(ROUTES.API_KEYS, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToSettingsNotificationChannels, () =>
|
||||
onClickHandler(ROUTES.ALL_CHANNELS, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToLogsPipelines, () =>
|
||||
onClickHandler(ROUTES.LOGS_PIPELINES, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToLogsViews, () =>
|
||||
onClickHandler(ROUTES.LOGS_SAVE_VIEWS, null),
|
||||
);
|
||||
|
||||
return (): void => {
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToHome);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToServices);
|
||||
@@ -724,18 +689,6 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToAlerts);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToExceptions);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToMessagingQueues);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToTracesFunnel);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToMetricsSummary);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToMetricsExplorer);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToMetricsViews);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToSettings);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToSettingsIngestion);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToSettingsBilling);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToSettingsAPIKeys);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToSettingsNotificationChannels);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToLogsPipelines);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToLogsViews);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToTracesViews);
|
||||
};
|
||||
}, [deregisterShortcut, onClickHandler, registerShortcut]);
|
||||
|
||||
|
||||
@@ -5,20 +5,16 @@
|
||||
&-virtuoso {
|
||||
background: rgba(171, 189, 255, 0.04);
|
||||
}
|
||||
&-list-container {
|
||||
&-list-container .logs-loading-skeleton {
|
||||
height: 100%;
|
||||
|
||||
.logs-loading-skeleton {
|
||||
height: 100%;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
border-top: none;
|
||||
color: var(--bg-vanilla-400);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding: 8px 0;
|
||||
}
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
border-top: none;
|
||||
color: var(--bg-vanilla-400);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding: 8px 0;
|
||||
}
|
||||
|
||||
&-empty-content {
|
||||
|
||||
@@ -1,18 +1,11 @@
|
||||
import { render } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import { useEffect } from 'react';
|
||||
|
||||
import {
|
||||
KeyboardHotkeysProvider,
|
||||
useKeyboardHotkeys,
|
||||
} from '../useKeyboardHotkeys';
|
||||
|
||||
jest.mock('../../../providers/cmdKProvider', () => ({
|
||||
useCmdK: (): { open: boolean } => ({
|
||||
open: false,
|
||||
}),
|
||||
}));
|
||||
|
||||
function TestComponentWithRegister({
|
||||
handleShortcut,
|
||||
}: {
|
||||
@@ -20,13 +13,14 @@ function TestComponentWithRegister({
|
||||
}): JSX.Element {
|
||||
const { registerShortcut } = useKeyboardHotkeys();
|
||||
|
||||
useEffect(() => {
|
||||
registerShortcut('a', handleShortcut);
|
||||
}, [registerShortcut, handleShortcut]);
|
||||
registerShortcut('a', handleShortcut);
|
||||
|
||||
return <span>Test Component</span>;
|
||||
return (
|
||||
<div>
|
||||
<span>Test Component</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function TestComponentWithDeRegister({
|
||||
handleShortcut,
|
||||
}: {
|
||||
@@ -34,18 +28,21 @@ function TestComponentWithDeRegister({
|
||||
}): JSX.Element {
|
||||
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
|
||||
|
||||
useEffect(() => {
|
||||
registerShortcut('b', handleShortcut);
|
||||
deregisterShortcut('b');
|
||||
}, [registerShortcut, deregisterShortcut, handleShortcut]);
|
||||
registerShortcut('b', handleShortcut);
|
||||
|
||||
return <span>Test Component</span>;
|
||||
// Deregister the shortcut before triggering it
|
||||
deregisterShortcut('b');
|
||||
|
||||
return (
|
||||
<div>
|
||||
<span>Test Component</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
describe('KeyboardHotkeysProvider', () => {
|
||||
it('registers and triggers shortcuts correctly', async () => {
|
||||
const handleShortcut = jest.fn();
|
||||
const user = userEvent.setup();
|
||||
|
||||
render(
|
||||
<KeyboardHotkeysProvider>
|
||||
@@ -53,15 +50,15 @@ describe('KeyboardHotkeysProvider', () => {
|
||||
</KeyboardHotkeysProvider>,
|
||||
);
|
||||
|
||||
// fires on keyup
|
||||
await user.keyboard('{a}');
|
||||
// Trigger the registered shortcut
|
||||
await userEvent.keyboard('a');
|
||||
|
||||
expect(handleShortcut).toHaveBeenCalledTimes(1);
|
||||
// Assert that the handleShortcut function has been called
|
||||
expect(handleShortcut).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('does not trigger deregistered shortcuts', async () => {
|
||||
it('deregisters shortcuts correctly', () => {
|
||||
const handleShortcut = jest.fn();
|
||||
const user = userEvent.setup();
|
||||
|
||||
render(
|
||||
<KeyboardHotkeysProvider>
|
||||
@@ -69,8 +66,10 @@ describe('KeyboardHotkeysProvider', () => {
|
||||
</KeyboardHotkeysProvider>,
|
||||
);
|
||||
|
||||
await user.keyboard('{b}');
|
||||
// Try to trigger the deregistered shortcut
|
||||
userEvent.keyboard('b');
|
||||
|
||||
// Assert that the handleShortcut function has NOT been called
|
||||
expect(handleShortcut).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -8,21 +8,20 @@ import {
|
||||
useRef,
|
||||
} from 'react';
|
||||
|
||||
import { useCmdK } from '../../providers/cmdKProvider';
|
||||
|
||||
interface KeyboardHotkeysContextReturnValue {
|
||||
/**
|
||||
* @param keyCombo provide the string for which the subsequent callback should be triggered. Example 'ctrl+a'
|
||||
* @param keyCombination provide the string for which the subsequent callback should be triggered. Example 'ctrl+a'
|
||||
* @param callback the callback that should be triggered when the above key combination is being pressed
|
||||
* @returns void
|
||||
*/
|
||||
registerShortcut: (keyCombo: string, callback: () => void) => void;
|
||||
registerShortcut: (keyCombination: string, callback: () => void) => void;
|
||||
|
||||
/**
|
||||
*
|
||||
* @param keyCombo provide the string for which we want to deregister the callback
|
||||
* @param keyCombination provide the string for which we want to deregister the callback
|
||||
* @returns void
|
||||
*/
|
||||
deregisterShortcut: (keyCombo: string) => void;
|
||||
deregisterShortcut: (keyCombination: string) => void;
|
||||
}
|
||||
|
||||
const KeyboardHotkeysContext = createContext<KeyboardHotkeysContextReturnValue>(
|
||||
@@ -34,7 +33,7 @@ const KeyboardHotkeysContext = createContext<KeyboardHotkeysContextReturnValue>(
|
||||
|
||||
const IGNORE_INPUTS = ['input', 'textarea', 'cm-editor']; // Inputs in which hotkey events will be ignored
|
||||
|
||||
export function useKeyboardHotkeys(): KeyboardHotkeysContextReturnValue {
|
||||
const useKeyboardHotkeys = (): KeyboardHotkeysContextReturnValue => {
|
||||
const context = useContext(KeyboardHotkeysContext);
|
||||
if (!context) {
|
||||
throw new Error(
|
||||
@@ -43,45 +42,21 @@ export function useKeyboardHotkeys(): KeyboardHotkeysContextReturnValue {
|
||||
}
|
||||
|
||||
return context;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Normalize a set of keys into a stable combo
|
||||
* { shift, m, e } → "e+m+shift"
|
||||
*/
|
||||
function normalizeChord(keys: Set<string>): string {
|
||||
return Array.from(keys).sort().join('+');
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize registration strings
|
||||
* "shift+m+e" → "e+m+shift"
|
||||
*/
|
||||
function normalizeComboString(combo: string): string {
|
||||
return normalizeChord(new Set(combo.split('+')));
|
||||
}
|
||||
|
||||
export function KeyboardHotkeysProvider({
|
||||
function KeyboardHotkeysProvider({
|
||||
children,
|
||||
}: {
|
||||
children: JSX.Element;
|
||||
}): JSX.Element {
|
||||
const { open: cmdKOpen } = useCmdK();
|
||||
const shortcuts = useRef<Record<string, () => void>>({});
|
||||
const pressedKeys = useRef<Set<string>>(new Set());
|
||||
|
||||
// A detected valid shortcut waiting to fire
|
||||
const pendingCombo = useRef<string | null>(null);
|
||||
const handleKeyPress = (event: KeyboardEvent): void => {
|
||||
const { key, ctrlKey, altKey, shiftKey, metaKey, target } = event;
|
||||
|
||||
// Tracks whether user extended the combo
|
||||
const wasExtended = useRef(false);
|
||||
|
||||
const handleKeyDown = (event: KeyboardEvent): void => {
|
||||
if (event.repeat) return;
|
||||
|
||||
const target = event.target as HTMLElement;
|
||||
const isCodeMirrorEditor =
|
||||
(target as HTMLElement).closest('.cm-editor') !== null;
|
||||
|
||||
if (
|
||||
IGNORE_INPUTS.includes((target as HTMLElement).tagName.toLowerCase()) ||
|
||||
isCodeMirrorEditor
|
||||
@@ -89,110 +64,61 @@ export function KeyboardHotkeysProvider({
|
||||
return;
|
||||
}
|
||||
|
||||
const key = event.key?.toLowerCase();
|
||||
if (!key) return; // Skip if key is undefined
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/metaKey
|
||||
const modifiers = { ctrlKey, altKey, shiftKey, metaKey };
|
||||
|
||||
// If a pending combo exists and a new key is pressed → extension
|
||||
if (pendingCombo.current && !pressedKeys.current.has(key)) {
|
||||
wasExtended.current = true;
|
||||
}
|
||||
let shortcutKey = `${key.toLowerCase()}`;
|
||||
|
||||
pressedKeys.current.add(key);
|
||||
const isAltKey = `${modifiers.altKey ? '+alt' : ''}`;
|
||||
const isShiftKey = `${modifiers.shiftKey ? '+shift' : ''}`;
|
||||
|
||||
if (event.shiftKey) pressedKeys.current.add('shift');
|
||||
if (event.metaKey || event.ctrlKey) pressedKeys.current.add('meta');
|
||||
if (event.altKey) pressedKeys.current.add('alt');
|
||||
// ctrl and cmd have the same functionality for mac and windows parity
|
||||
const isMetaKey = `${modifiers.metaKey || modifiers.ctrlKey ? '+meta' : ''}`;
|
||||
|
||||
const combo = normalizeChord(pressedKeys.current);
|
||||
shortcutKey = shortcutKey + isAltKey + isShiftKey + isMetaKey;
|
||||
|
||||
if (shortcuts.current[combo]) {
|
||||
if (shortcuts.current[shortcutKey]) {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
pendingCombo.current = combo;
|
||||
wasExtended.current = false;
|
||||
event.stopImmediatePropagation();
|
||||
|
||||
shortcuts.current[shortcutKey]();
|
||||
}
|
||||
};
|
||||
|
||||
const handleKeyUp = (event: KeyboardEvent): void => {
|
||||
const key = event.key?.toLowerCase();
|
||||
if (!key) return; // Skip if key is undefined
|
||||
|
||||
pressedKeys.current.delete(key);
|
||||
|
||||
if (!event.shiftKey) pressedKeys.current.delete('shift');
|
||||
if (!event.metaKey && !event.ctrlKey) pressedKeys.current.delete('meta');
|
||||
if (!event.altKey) pressedKeys.current.delete('alt');
|
||||
|
||||
if (!pendingCombo.current) return;
|
||||
|
||||
// Fire only if user did NOT extend the combo
|
||||
if (!wasExtended.current) {
|
||||
event.preventDefault();
|
||||
try {
|
||||
shortcuts.current[pendingCombo.current]?.();
|
||||
} catch (error) {
|
||||
console.error('Error executing hotkey callback:', error);
|
||||
}
|
||||
}
|
||||
|
||||
pendingCombo.current = null;
|
||||
wasExtended.current = false;
|
||||
};
|
||||
|
||||
useEffect((): (() => void) => {
|
||||
document.addEventListener('keydown', handleKeyDown);
|
||||
document.addEventListener('keyup', handleKeyUp);
|
||||
|
||||
const reset = (): void => {
|
||||
pressedKeys.current.clear();
|
||||
pendingCombo.current = null;
|
||||
wasExtended.current = false;
|
||||
};
|
||||
|
||||
window.addEventListener('blur', reset);
|
||||
|
||||
return (): void => {
|
||||
document.removeEventListener('keydown', handleKeyDown);
|
||||
document.removeEventListener('keyup', handleKeyUp);
|
||||
window.removeEventListener('blur', reset);
|
||||
};
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (!cmdKOpen) {
|
||||
// Reset when palette closes
|
||||
pressedKeys.current.clear();
|
||||
pendingCombo.current = null;
|
||||
wasExtended.current = false;
|
||||
}
|
||||
}, [cmdKOpen]);
|
||||
|
||||
const registerShortcut = useCallback(
|
||||
(keyCombo: string, callback: () => void): void => {
|
||||
const normalized = normalizeComboString(keyCombo);
|
||||
|
||||
if (!shortcuts.current[normalized]) {
|
||||
shortcuts.current[normalized] = callback;
|
||||
return;
|
||||
}
|
||||
|
||||
const message = `This shortcut is already present in current scope :- ${keyCombo}`;
|
||||
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
throw new Error(message);
|
||||
} else {
|
||||
console.error(message);
|
||||
}
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
const deregisterShortcut = useCallback((keyCombo: string) => {
|
||||
const normalized = normalizeComboString(keyCombo);
|
||||
unset(shortcuts.current, normalized);
|
||||
document.addEventListener('keydown', handleKeyPress);
|
||||
return (): void => {
|
||||
document.removeEventListener('keydown', handleKeyPress);
|
||||
};
|
||||
}, []);
|
||||
|
||||
const ctxValue = useMemo(
|
||||
const registerShortcut = useCallback(
|
||||
(keyCombination: string, callback: () => void): void => {
|
||||
if (!shortcuts.current[keyCombination]) {
|
||||
shortcuts.current[keyCombination] = callback;
|
||||
} else if (process.env.NODE_ENV === 'development') {
|
||||
throw new Error(
|
||||
`This shortcut is already present in current scope :- ${keyCombination}`,
|
||||
);
|
||||
} else {
|
||||
console.error(
|
||||
`This shortcut is already present in current scope :- ${keyCombination}`,
|
||||
);
|
||||
}
|
||||
},
|
||||
[shortcuts],
|
||||
);
|
||||
|
||||
const deregisterShortcut = useCallback(
|
||||
(keyCombination: string): void => {
|
||||
if (shortcuts.current[keyCombination]) {
|
||||
unset(shortcuts.current, keyCombination);
|
||||
}
|
||||
},
|
||||
[shortcuts],
|
||||
);
|
||||
|
||||
const contextValue = useMemo(
|
||||
() => ({
|
||||
registerShortcut,
|
||||
deregisterShortcut,
|
||||
@@ -201,8 +127,10 @@ export function KeyboardHotkeysProvider({
|
||||
);
|
||||
|
||||
return (
|
||||
<KeyboardHotkeysContext.Provider value={ctxValue}>
|
||||
<KeyboardHotkeysContext.Provider value={contextValue}>
|
||||
{children}
|
||||
</KeyboardHotkeysContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
export { KeyboardHotkeysProvider, useKeyboardHotkeys };
|
||||
|
||||
@@ -15,7 +15,7 @@ function NoData(): JSX.Element {
|
||||
<Typography.Text className="not-found-text-1">
|
||||
Uh-oh! We cannot show the selected trace.
|
||||
<span className="not-found-text-2">
|
||||
This can happen in either of the two scenarios -
|
||||
This can happen in either of the two scenraios -
|
||||
</span>
|
||||
</Typography.Text>
|
||||
</section>
|
||||
|
||||
2
go.mod
2
go.mod
@@ -11,7 +11,6 @@ require (
|
||||
github.com/SigNoz/signoz-otel-collector v0.129.10-rc.9
|
||||
github.com/antlr4-go/antlr/v4 v4.13.1
|
||||
github.com/antonmedv/expr v1.15.3
|
||||
github.com/bytedance/sonic v1.14.1
|
||||
github.com/cespare/xxhash/v2 v2.3.0
|
||||
github.com/coreos/go-oidc/v3 v3.14.1
|
||||
github.com/dgraph-io/ristretto/v2 v2.3.0
|
||||
@@ -90,6 +89,7 @@ require (
|
||||
|
||||
require (
|
||||
github.com/bytedance/gopkg v0.1.3 // indirect
|
||||
github.com/bytedance/sonic v1.14.1 // indirect
|
||||
github.com/bytedance/sonic/loader v0.3.0 // indirect
|
||||
github.com/cloudwego/base64x v0.1.6 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
package signozapiserver
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types/promotetypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (provider *provider) addPromoteRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v1/logs/promote_paths", handler.New(provider.authZ.EditAccess(provider.promoteHandler.HandlePromoteAndIndexPaths), handler.OpenAPIDef{
|
||||
ID: "HandlePromoteAndIndexPaths",
|
||||
Tags: []string{"logs"},
|
||||
Summary: "Promote and index paths",
|
||||
Description: "This endpoints promotes and indexes paths",
|
||||
Request: new([]*promotetypes.PromotePath),
|
||||
RequestContentType: "application/json",
|
||||
Response: nil,
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusCreated,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/logs/promote_paths", handler.New(provider.authZ.ViewAccess(provider.promoteHandler.ListPromotedAndIndexedPaths), handler.OpenAPIDef{
|
||||
ID: "ListPromotedAndIndexedPaths",
|
||||
Tags: []string{"logs"},
|
||||
Summary: "Promote and index paths",
|
||||
Description: "This endpoints promotes and indexes paths",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: new([]*promotetypes.PromotePath),
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -12,7 +12,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
@@ -31,7 +30,6 @@ type provider struct {
|
||||
authDomainHandler authdomain.Handler
|
||||
preferenceHandler preference.Handler
|
||||
globalHandler global.Handler
|
||||
promoteHandler promote.Handler
|
||||
}
|
||||
|
||||
func NewFactory(
|
||||
@@ -43,10 +41,9 @@ func NewFactory(
|
||||
authDomainHandler authdomain.Handler,
|
||||
preferenceHandler preference.Handler,
|
||||
globalHandler global.Handler,
|
||||
promoteHandler promote.Handler,
|
||||
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
|
||||
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler, promoteHandler)
|
||||
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -62,7 +59,6 @@ func newProvider(
|
||||
authDomainHandler authdomain.Handler,
|
||||
preferenceHandler preference.Handler,
|
||||
globalHandler global.Handler,
|
||||
promoteHandler promote.Handler,
|
||||
) (apiserver.APIServer, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
|
||||
router := mux.NewRouter().UseEncodedPath()
|
||||
@@ -77,7 +73,6 @@ func newProvider(
|
||||
authDomainHandler: authDomainHandler,
|
||||
preferenceHandler: preferenceHandler,
|
||||
globalHandler: globalHandler,
|
||||
promoteHandler: promoteHandler,
|
||||
}
|
||||
|
||||
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
|
||||
@@ -118,10 +113,6 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addPromoteRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -209,11 +209,6 @@ func NewUnexpectedf(code Code, format string, args ...any) *base {
|
||||
return Newf(TypeInvalidInput, code, format, args...)
|
||||
}
|
||||
|
||||
// NewMethodNotAllowedf is a wrapper around Newf with TypeMethodNotAllowed.
|
||||
func NewMethodNotAllowedf(code Code, format string, args ...any) *base {
|
||||
return Newf(TypeMethodNotAllowed, code, format, args...)
|
||||
}
|
||||
|
||||
// WrapTimeoutf is a wrapper around Wrapf with TypeTimeout.
|
||||
func WrapTimeoutf(cause error, code Code, format string, args ...any) *base {
|
||||
return Wrapf(cause, TypeTimeout, code, format, args...)
|
||||
|
||||
@@ -1,60 +0,0 @@
|
||||
package implpromote
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/binding"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/promotetypes"
|
||||
)
|
||||
|
||||
type handler struct {
|
||||
module promote.Module
|
||||
}
|
||||
|
||||
func NewHandler(module promote.Module) promote.Handler {
|
||||
return &handler{module: module}
|
||||
}
|
||||
|
||||
func (h *handler) HandlePromoteAndIndexPaths(w http.ResponseWriter, r *http.Request) {
|
||||
// TODO(Nitya): Use in multi tenant setup
|
||||
_, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, errors.NewInternalf(errors.CodeInternal, "failed to get org id from context"))
|
||||
return
|
||||
}
|
||||
|
||||
var req []*promotetypes.PromotePath
|
||||
if err := binding.JSON.BindBody(r.Body, &req); err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
err = h.module.PromoteAndIndexPaths(r.Context(), req...)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusCreated, nil)
|
||||
}
|
||||
|
||||
func (h *handler) ListPromotedAndIndexedPaths(w http.ResponseWriter, r *http.Request) {
|
||||
// TODO(Nitya): Use in multi tenant setup
|
||||
_, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, errors.NewInternalf(errors.CodeInternal, "failed to get org id from context"))
|
||||
return
|
||||
}
|
||||
|
||||
paths, err := h.module.ListPromotedAndIndexedPaths(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, paths)
|
||||
}
|
||||
@@ -1,201 +0,0 @@
|
||||
package implpromote
|
||||
|
||||
import (
|
||||
"context"
|
||||
"maps"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/promotetypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
var (
|
||||
CodeFailedToCreateIndex = errors.MustNewCode("failed_to_create_index_promoted_paths")
|
||||
CodeFailedToQueryPromotedPaths = errors.MustNewCode("failed_to_query_promoted_paths")
|
||||
)
|
||||
|
||||
type module struct {
|
||||
metadataStore telemetrytypes.MetadataStore
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
}
|
||||
|
||||
func NewModule(metadataStore telemetrytypes.MetadataStore, telemetrystore telemetrystore.TelemetryStore) promote.Module {
|
||||
return &module{metadataStore: metadataStore, telemetryStore: telemetrystore}
|
||||
}
|
||||
|
||||
func (m *module) ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetypes.PromotePath, error) {
|
||||
logsIndexes, err := m.metadataStore.ListLogsJSONIndexes(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Flatten the map values (which are slices) into a single slice
|
||||
indexes := slices.Concat(slices.Collect(maps.Values(logsIndexes))...)
|
||||
|
||||
aggr := map[string][]promotetypes.WrappedIndex{}
|
||||
for _, index := range indexes {
|
||||
path, columnType, err := schemamigrator.UnfoldJSONSubColumnIndexExpr(index.Expression)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// clean backticks from the path
|
||||
path = strings.ReplaceAll(path, "`", "")
|
||||
|
||||
aggr[path] = append(aggr[path], promotetypes.WrappedIndex{
|
||||
ColumnType: columnType,
|
||||
Type: index.Type,
|
||||
Granularity: index.Granularity,
|
||||
})
|
||||
}
|
||||
promotedPaths, err := m.listPromotedPaths(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response := []promotetypes.PromotePath{}
|
||||
for _, path := range promotedPaths {
|
||||
fullPath := telemetrylogs.BodyPromotedColumnPrefix + path
|
||||
path = telemetrytypes.BodyJSONStringSearchPrefix + path
|
||||
item := promotetypes.PromotePath{
|
||||
Path: path,
|
||||
Promote: true,
|
||||
}
|
||||
indexes, ok := aggr[fullPath]
|
||||
if ok {
|
||||
item.Indexes = indexes
|
||||
delete(aggr, fullPath)
|
||||
}
|
||||
response = append(response, item)
|
||||
}
|
||||
|
||||
// add the paths that are not promoted but have indexes
|
||||
for path, indexes := range aggr {
|
||||
path := strings.TrimPrefix(path, telemetrylogs.BodyJSONColumnPrefix)
|
||||
path = telemetrytypes.BodyJSONStringSearchPrefix + path
|
||||
response = append(response, promotetypes.PromotePath{
|
||||
Path: path,
|
||||
Indexes: indexes,
|
||||
})
|
||||
}
|
||||
return response, nil
|
||||
}
|
||||
|
||||
func (m *module) listPromotedPaths(ctx context.Context) ([]string, error) {
|
||||
paths, err := m.metadataStore.ListPromotedPaths(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return slices.Collect(maps.Keys(paths)), nil
|
||||
}
|
||||
|
||||
// PromotePaths inserts provided JSON paths into the promoted paths table for logs queries.
|
||||
func (m *module) PromotePaths(ctx context.Context, paths []string) error {
|
||||
if len(paths) == 0 {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "paths cannot be empty")
|
||||
}
|
||||
|
||||
return m.metadataStore.PromotePaths(ctx, paths...)
|
||||
}
|
||||
|
||||
// createIndexes creates string ngram + token filter indexes on JSON path subcolumns for LIKE queries.
|
||||
func (m *module) createIndexes(ctx context.Context, indexes []schemamigrator.Index) error {
|
||||
if len(indexes) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, index := range indexes {
|
||||
alterStmt := schemamigrator.AlterTableAddIndex{
|
||||
Database: telemetrylogs.DBName,
|
||||
Table: telemetrylogs.LogsV2LocalTableName,
|
||||
Index: index,
|
||||
}
|
||||
op := alterStmt.OnCluster(m.telemetryStore.Cluster())
|
||||
if err := m.telemetryStore.ClickhouseDB().Exec(ctx, op.ToSQL()); err != nil {
|
||||
return errors.WrapInternalf(err, CodeFailedToCreateIndex, "failed to create index")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// PromoteAndIndexPaths handles promoting paths and creating indexes in one call.
|
||||
func (m *module) PromoteAndIndexPaths(
|
||||
ctx context.Context,
|
||||
paths ...*promotetypes.PromotePath,
|
||||
) error {
|
||||
if len(paths) == 0 {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "paths cannot be empty")
|
||||
}
|
||||
|
||||
pathsStr := []string{}
|
||||
// validate the paths
|
||||
for _, path := range paths {
|
||||
if err := path.ValidateAndSetDefaults(); err != nil {
|
||||
return err
|
||||
}
|
||||
pathsStr = append(pathsStr, path.Path)
|
||||
}
|
||||
|
||||
existingPromotedPaths, err := m.metadataStore.ListPromotedPaths(ctx, pathsStr...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var toInsert []string
|
||||
indexes := []schemamigrator.Index{}
|
||||
for _, it := range paths {
|
||||
if it.Promote {
|
||||
if _, promoted := existingPromotedPaths[it.Path]; !promoted {
|
||||
toInsert = append(toInsert, it.Path)
|
||||
}
|
||||
}
|
||||
if len(it.Indexes) > 0 {
|
||||
parentColumn := telemetrylogs.LogsV2BodyJSONColumn
|
||||
// if the path is already promoted or is being promoted, add it to the promoted column
|
||||
if _, promoted := existingPromotedPaths[it.Path]; promoted || it.Promote {
|
||||
parentColumn = telemetrylogs.LogsV2BodyPromotedColumn
|
||||
}
|
||||
|
||||
for _, index := range it.Indexes {
|
||||
var typeIndex schemamigrator.IndexType
|
||||
switch {
|
||||
case strings.HasPrefix(index.Type, string(schemamigrator.IndexTypeNGramBF)):
|
||||
typeIndex = schemamigrator.IndexTypeNGramBF
|
||||
case strings.HasPrefix(index.Type, string(schemamigrator.IndexTypeTokenBF)):
|
||||
typeIndex = schemamigrator.IndexTypeTokenBF
|
||||
case strings.HasPrefix(index.Type, string(schemamigrator.IndexTypeMinMax)):
|
||||
typeIndex = schemamigrator.IndexTypeMinMax
|
||||
default:
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid index type: %s", index.Type)
|
||||
}
|
||||
indexes = append(indexes, schemamigrator.Index{
|
||||
Name: schemamigrator.JSONSubColumnIndexName(parentColumn, it.Path, index.JSONDataType.StringValue(), typeIndex),
|
||||
Expression: schemamigrator.JSONSubColumnIndexExpr(parentColumn, it.Path, index.JSONDataType.StringValue()),
|
||||
Type: index.Type,
|
||||
Granularity: index.Granularity,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(toInsert) > 0 {
|
||||
err := m.PromotePaths(ctx, toInsert)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if len(indexes) > 0 {
|
||||
if err := m.createIndexes(ctx, indexes); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
package promote
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/promotetypes"
|
||||
)
|
||||
|
||||
type Module interface {
|
||||
ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetypes.PromotePath, error)
|
||||
PromoteAndIndexPaths(ctx context.Context, paths ...*promotetypes.PromotePath) error
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
HandlePromoteAndIndexPaths(w http.ResponseWriter, r *http.Request)
|
||||
ListPromotedAndIndexedPaths(w http.ResponseWriter, r *http.Request)
|
||||
}
|
||||
@@ -10,11 +10,9 @@ import (
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/bytedance/sonic"
|
||||
)
|
||||
|
||||
type builderQuery[T any] struct {
|
||||
@@ -250,40 +248,6 @@ func (q *builderQuery[T]) executeWithContext(ctx context.Context, query string,
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// merge body_json and promoted into body
|
||||
if q.spec.Signal == telemetrytypes.SignalLogs {
|
||||
switch typedPayload := payload.(type) {
|
||||
case *qbtypes.RawData:
|
||||
for _, rr := range typedPayload.Rows {
|
||||
seeder := func() error {
|
||||
body, ok := rr.Data[telemetrylogs.LogsV2BodyJSONColumn].(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
promoted, ok := rr.Data[telemetrylogs.LogsV2BodyPromotedColumn].(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
seed(promoted, body)
|
||||
str, err := sonic.MarshalString(body)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to marshal body")
|
||||
}
|
||||
rr.Data["body"] = str
|
||||
return nil
|
||||
}
|
||||
err := seeder()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
delete(rr.Data, telemetrylogs.LogsV2BodyJSONColumn)
|
||||
delete(rr.Data, telemetrylogs.LogsV2BodyPromotedColumn)
|
||||
}
|
||||
payload = typedPayload
|
||||
}
|
||||
}
|
||||
|
||||
return &qbtypes.Result{
|
||||
Type: q.kind,
|
||||
Value: payload,
|
||||
@@ -411,18 +375,3 @@ func decodeCursor(cur string) (int64, error) {
|
||||
}
|
||||
return strconv.ParseInt(string(b), 10, 64)
|
||||
}
|
||||
|
||||
func seed(promoted map[string]any, body map[string]any) {
|
||||
for key, fromValue := range promoted {
|
||||
if toValue, ok := body[key]; !ok {
|
||||
body[key] = fromValue
|
||||
} else {
|
||||
if fromValue, ok := fromValue.(map[string]any); ok {
|
||||
if toValue, ok := toValue.(map[string]any); ok {
|
||||
seed(fromValue, toValue)
|
||||
body[key] = toValue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,6 @@ import (
|
||||
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/bytedance/sonic"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -52,6 +51,7 @@ func consume(rows driver.Rows, kind qbtypes.RequestType, queryWindow *qbtypes.Ti
|
||||
}
|
||||
|
||||
func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbtypes.Step, queryName string) (*qbtypes.TimeSeriesData, error) {
|
||||
|
||||
colTypes := rows.ColumnTypes()
|
||||
colNames := rows.Columns()
|
||||
|
||||
@@ -354,22 +354,10 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
|
||||
colTypes := rows.ColumnTypes()
|
||||
colCnt := len(colNames)
|
||||
|
||||
// Helper that decides scan target per column based on DB type
|
||||
makeScanTarget := func(i int) any {
|
||||
dbt := strings.ToUpper(colTypes[i].DatabaseTypeName())
|
||||
if strings.HasPrefix(dbt, "JSON") {
|
||||
// Since the driver fails to decode JSON/Dynamic into native Go values, we read it as raw bytes
|
||||
// TODO: check in future if fixed in the driver
|
||||
var v []byte
|
||||
return &v
|
||||
}
|
||||
return reflect.New(colTypes[i].ScanType()).Interface()
|
||||
}
|
||||
|
||||
// Build a template slice of correctly-typed pointers once
|
||||
scanTpl := make([]any, colCnt)
|
||||
for i := range colTypes {
|
||||
scanTpl[i] = makeScanTarget(i)
|
||||
for i, ct := range colTypes {
|
||||
scanTpl[i] = reflect.New(ct.ScanType()).Interface()
|
||||
}
|
||||
|
||||
var outRows []*qbtypes.RawRow
|
||||
@@ -378,7 +366,7 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
|
||||
// fresh copy of the scan slice (otherwise the driver reuses pointers)
|
||||
scan := make([]any, colCnt)
|
||||
for i := range scanTpl {
|
||||
scan[i] = makeScanTarget(i)
|
||||
scan[i] = reflect.New(colTypes[i].ScanType()).Interface()
|
||||
}
|
||||
|
||||
if err := rows.Scan(scan...); err != nil {
|
||||
@@ -395,28 +383,6 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
|
||||
// de-reference the typed pointer to any
|
||||
val := reflect.ValueOf(cellPtr).Elem().Interface()
|
||||
|
||||
// Post-process JSON columns: normalize into structured values
|
||||
if strings.HasPrefix(strings.ToUpper(colTypes[i].DatabaseTypeName()), "JSON") {
|
||||
switch x := val.(type) {
|
||||
case []byte:
|
||||
if len(x) > 0 {
|
||||
var v any
|
||||
if err := sonic.Unmarshal(x, &v); err == nil {
|
||||
val = v
|
||||
}
|
||||
}
|
||||
case string:
|
||||
if x != "" {
|
||||
var v any
|
||||
if err := sonic.Unmarshal([]byte(x), &v); err == nil {
|
||||
val = v
|
||||
}
|
||||
}
|
||||
default:
|
||||
// already a structured type (map[string]any, []any, etc.)
|
||||
}
|
||||
}
|
||||
|
||||
// special-case: timestamp column
|
||||
if name == "timestamp" || name == "timestamp_datetime" {
|
||||
switch t := val.(type) {
|
||||
|
||||
@@ -78,7 +78,7 @@ func newProvider(
|
||||
telemetryMetadataStore,
|
||||
)
|
||||
|
||||
traceAggExprRewriter := querybuilder.NewAggExprRewriter(settings, nil, traceFieldMapper, traceConditionBuilder, nil)
|
||||
traceAggExprRewriter := querybuilder.NewAggExprRewriter(settings, nil, traceFieldMapper, traceConditionBuilder, "", nil)
|
||||
traceStmtBuilder := telemetrytraces.NewTraceQueryStatementBuilder(
|
||||
settings,
|
||||
telemetryMetadataStore,
|
||||
@@ -102,13 +102,14 @@ func newProvider(
|
||||
|
||||
// Create log statement builder
|
||||
logFieldMapper := telemetrylogs.NewFieldMapper()
|
||||
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper, telemetryMetadataStore)
|
||||
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper)
|
||||
logResourceFilterStmtBuilder := resourcefilter.NewLogResourceFilterStatementBuilder(
|
||||
settings,
|
||||
resourceFilterFieldMapper,
|
||||
resourceFilterConditionBuilder,
|
||||
telemetryMetadataStore,
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
telemetrylogs.BodyJSONStringSearchPrefix,
|
||||
telemetrylogs.GetBodyJSONKey,
|
||||
)
|
||||
logAggExprRewriter := querybuilder.NewAggExprRewriter(
|
||||
@@ -116,6 +117,7 @@ func newProvider(
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
logFieldMapper,
|
||||
logConditionBuilder,
|
||||
telemetrylogs.BodyJSONStringSearchPrefix,
|
||||
telemetrylogs.GetBodyJSONKey,
|
||||
)
|
||||
logStmtBuilder := telemetrylogs.NewLogQueryStatementBuilder(
|
||||
@@ -126,6 +128,7 @@ func newProvider(
|
||||
logResourceFilterStmtBuilder,
|
||||
logAggExprRewriter,
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
telemetrylogs.BodyJSONStringSearchPrefix,
|
||||
telemetrylogs.GetBodyJSONKey,
|
||||
)
|
||||
|
||||
|
||||
@@ -43,7 +43,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/traces/tracedetail"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
|
||||
chErrors "github.com/SigNoz/signoz/pkg/query-service/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/metrics"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
|
||||
@@ -555,7 +555,6 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
router.HandleFunc("/api/v1/settings/ttl", am.ViewAccess(aH.getTTL)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v2/settings/ttl", am.AdminAccess(aH.setCustomRetentionTTL)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v2/settings/ttl", am.ViewAccess(aH.getCustomRetentionTTL)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/settings/apdex", am.AdminAccess(aH.Signoz.Handlers.Apdex.Set)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/settings/apdex", am.ViewAccess(aH.Signoz.Handlers.Apdex.Get)).Methods(http.MethodGet)
|
||||
|
||||
|
||||
@@ -11,16 +11,13 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/opamptypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
@@ -131,40 +128,6 @@ func (ic *LogParsingPipelineController) ValidatePipelines(ctx context.Context,
|
||||
return err
|
||||
}
|
||||
|
||||
func (ic *LogParsingPipelineController) getDefaultPipelines() ([]pipelinetypes.GettablePipeline, error) {
|
||||
defaultPipelines := []pipelinetypes.GettablePipeline{}
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
preprocessingPipeline := pipelinetypes.GettablePipeline{
|
||||
StoreablePipeline: pipelinetypes.StoreablePipeline{
|
||||
Name: "Default Pipeline - PreProcessing Body",
|
||||
Alias: "NormalizeBodyDefault",
|
||||
Enabled: true,
|
||||
},
|
||||
Filter: &v3.FilterSet{
|
||||
Items: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "body",
|
||||
},
|
||||
Operator: v3.FilterOperatorExists,
|
||||
},
|
||||
},
|
||||
},
|
||||
Config: []pipelinetypes.PipelineOperator{
|
||||
{
|
||||
ID: uuid.NewString(),
|
||||
Type: "normalize",
|
||||
Enabled: true,
|
||||
If: "body != nil",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
defaultPipelines = append(defaultPipelines, preprocessingPipeline)
|
||||
}
|
||||
return defaultPipelines, nil
|
||||
}
|
||||
|
||||
// Returns effective list of pipelines including user created
|
||||
// pipelines and pipelines for installed integrations
|
||||
func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
|
||||
@@ -295,13 +258,6 @@ func (pc *LogParsingPipelineController) RecommendAgentConfig(
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
// recommend default pipelines along with user created pipelines
|
||||
defaultPipelines, err := pc.getDefaultPipelines()
|
||||
if err != nil {
|
||||
return nil, "", model.InternalError(fmt.Errorf("failed to get default pipelines: %w", err))
|
||||
}
|
||||
pipelinesResp.Pipelines = append(pipelinesResp.Pipelines, defaultPipelines...)
|
||||
|
||||
updatedConf, err := GenerateCollectorConfigWithPipelines(currentConfYaml, pipelinesResp.Pipelines)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
|
||||
@@ -132,7 +132,7 @@ func SignozLogsToPLogs(logs []model.SignozLog) []plog.Logs {
|
||||
slRecord.SetSeverityText(log.SeverityText)
|
||||
slRecord.SetSeverityNumber(plog.SeverityNumber(log.SeverityNumber))
|
||||
|
||||
slRecord.Body().FromRaw(log.Body)
|
||||
slRecord.Body().SetStr(log.Body)
|
||||
|
||||
slAttribs := slRecord.Attributes()
|
||||
for k, v := range log.Attributes_int64 {
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func Test_getClickhouseKey(t *testing.T) {
|
||||
@@ -1211,8 +1210,9 @@ func TestPrepareLogsQuery(t *testing.T) {
|
||||
t.Errorf("PrepareLogsQuery() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
|
||||
assert.Equal(t, tt.want, got)
|
||||
if got != tt.want {
|
||||
t.Errorf("PrepareLogsQuery() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ type aggExprRewriter struct {
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||
fieldMapper qbtypes.FieldMapper
|
||||
conditionBuilder qbtypes.ConditionBuilder
|
||||
jsonBodyPrefix string
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
}
|
||||
|
||||
@@ -30,6 +31,7 @@ func NewAggExprRewriter(
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
fieldMapper qbtypes.FieldMapper,
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *aggExprRewriter {
|
||||
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/agg_rewrite")
|
||||
@@ -39,6 +41,7 @@ func NewAggExprRewriter(
|
||||
fullTextColumn: fullTextColumn,
|
||||
fieldMapper: fieldMapper,
|
||||
conditionBuilder: conditionBuilder,
|
||||
jsonBodyPrefix: jsonBodyPrefix,
|
||||
jsonKeyToKey: jsonKeyToKey,
|
||||
}
|
||||
}
|
||||
@@ -78,6 +81,7 @@ func (r *aggExprRewriter) Rewrite(
|
||||
r.fullTextColumn,
|
||||
r.fieldMapper,
|
||||
r.conditionBuilder,
|
||||
r.jsonBodyPrefix,
|
||||
r.jsonKeyToKey,
|
||||
)
|
||||
// Rewrite the first select item (our expression)
|
||||
@@ -125,6 +129,7 @@ type exprVisitor struct {
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||
fieldMapper qbtypes.FieldMapper
|
||||
conditionBuilder qbtypes.ConditionBuilder
|
||||
jsonBodyPrefix string
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
Modified bool
|
||||
chArgs []any
|
||||
@@ -137,6 +142,7 @@ func newExprVisitor(
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
fieldMapper qbtypes.FieldMapper,
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *exprVisitor {
|
||||
return &exprVisitor{
|
||||
@@ -145,6 +151,7 @@ func newExprVisitor(
|
||||
fullTextColumn: fullTextColumn,
|
||||
fieldMapper: fieldMapper,
|
||||
conditionBuilder: conditionBuilder,
|
||||
jsonBodyPrefix: jsonBodyPrefix,
|
||||
jsonKeyToKey: jsonKeyToKey,
|
||||
}
|
||||
}
|
||||
@@ -183,7 +190,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
if aggFunc.FuncCombinator {
|
||||
// Map the predicate (last argument)
|
||||
origPred := args[len(args)-1].String()
|
||||
whereClause, err := PrepareWhereClause(
|
||||
whereClause, err := PrepareWhereClause(
|
||||
origPred,
|
||||
FilterExprVisitorOpts{
|
||||
Logger: v.logger,
|
||||
@@ -192,7 +199,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
ConditionBuilder: v.conditionBuilder,
|
||||
FullTextColumn: v.fullTextColumn,
|
||||
JsonKeyToKey: v.jsonKeyToKey,
|
||||
}, 0, 0,
|
||||
}, 0, 0,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -212,7 +219,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
for i := 0; i < len(args)-1; i++ {
|
||||
origVal := args[i].String()
|
||||
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(origVal)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", origVal)
|
||||
}
|
||||
@@ -230,7 +237,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
for i, arg := range args {
|
||||
orig := arg.String()
|
||||
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(orig)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonKeyToKey)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ func CollisionHandledFinalExpr(
|
||||
cb qbtypes.ConditionBuilder,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
requiredDataType telemetrytypes.FieldDataType,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) (string, []any, error) {
|
||||
|
||||
@@ -44,7 +45,7 @@ func CollisionHandledFinalExpr(
|
||||
|
||||
addCondition := func(key *telemetrytypes.TelemetryFieldKey) error {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
|
||||
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -57,8 +58,8 @@ func CollisionHandledFinalExpr(
|
||||
return nil
|
||||
}
|
||||
|
||||
colName, fieldForErr := fm.FieldFor(ctx, field)
|
||||
if errors.Is(fieldForErr, qbtypes.ErrColumnNotFound) {
|
||||
colName, err := fm.FieldFor(ctx, field)
|
||||
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||
// the key didn't have the right context to be added to the query
|
||||
// we try to use the context we know of
|
||||
keysForField := keys[field.Name]
|
||||
@@ -81,10 +82,10 @@ func CollisionHandledFinalExpr(
|
||||
correction, found := telemetrytypes.SuggestCorrection(field.Name, maps.Keys(keys))
|
||||
if found {
|
||||
// we found a close match, in the error message send the suggestion
|
||||
return "", nil, errors.WithAdditionalf(fieldForErr, "%s", correction)
|
||||
return "", nil, errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction)
|
||||
} else {
|
||||
// not even a close match, return an error
|
||||
return "", nil, errors.WithAdditionalf(fieldForErr, "field `%s` not found", field.Name)
|
||||
return "", nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field `%s` not found", field.Name)
|
||||
}
|
||||
} else {
|
||||
for _, key := range keysForField {
|
||||
@@ -103,11 +104,10 @@ func CollisionHandledFinalExpr(
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
// first if condition covers the older tests and second if condition covers the array conditions
|
||||
if !BodyJSONQueryEnabled && field.FieldContext == telemetrytypes.FieldContextBody && jsonKeyToKey != nil {
|
||||
if strings.HasPrefix(field.Name, jsonBodyPrefix) && jsonBodyPrefix != "" && jsonKeyToKey != nil {
|
||||
// TODO(nitya): enable group by on body column?
|
||||
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the body column")
|
||||
} else if strings.Contains(field.Name, telemetrytypes.ArraySep) || strings.Contains(field.Name, telemetrytypes.ArrayAnyIndex) {
|
||||
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the Array Paths: %s", field.Name)
|
||||
// colName, _ = jsonKeyToKey(context.Background(), field, qbtypes.FilterOperatorUnknown, dummyValue)
|
||||
} else {
|
||||
colName, _ = DataTypeCollisionHandledFieldName(field, dummyValue, colName, qbtypes.FilterOperatorUnknown)
|
||||
}
|
||||
@@ -204,7 +204,7 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
|
||||
// While we expect user not to send the mixed data types, it inevitably happens
|
||||
// So we handle the data type collisions here
|
||||
switch key.FieldDataType {
|
||||
case telemetrytypes.FieldDataTypeString, telemetrytypes.FieldDataTypeArrayString:
|
||||
case telemetrytypes.FieldDataTypeString:
|
||||
switch v := value.(type) {
|
||||
case float64:
|
||||
// try to convert the string value to to number
|
||||
@@ -219,36 +219,8 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
|
||||
// we don't have a toBoolOrNull in ClickHouse, so we need to convert the bool to a string
|
||||
value = fmt.Sprintf("%t", v)
|
||||
}
|
||||
case telemetrytypes.FieldDataTypeFloat64,
|
||||
telemetrytypes.FieldDataTypeArrayFloat64:
|
||||
switch v := value.(type) {
|
||||
case float32, float64:
|
||||
tblFieldName = castFloatHack(tblFieldName)
|
||||
case string:
|
||||
// check if it's a number inside a string
|
||||
isNumber := false
|
||||
if _, err := strconv.ParseFloat(v, 64); err == nil {
|
||||
isNumber = true
|
||||
}
|
||||
|
||||
if !operator.IsComparisonOperator() || !isNumber {
|
||||
// try to convert the number attribute to string
|
||||
tblFieldName = castString(tblFieldName) // numeric col vs string literal
|
||||
} else {
|
||||
tblFieldName = castFloatHack(tblFieldName)
|
||||
}
|
||||
case []any:
|
||||
if allFloats(v) {
|
||||
tblFieldName = castFloatHack(tblFieldName)
|
||||
} else if hasString(v) {
|
||||
tblFieldName, value = castString(tblFieldName), toStrings(v)
|
||||
}
|
||||
}
|
||||
|
||||
case telemetrytypes.FieldDataTypeInt64,
|
||||
telemetrytypes.FieldDataTypeArrayInt64,
|
||||
telemetrytypes.FieldDataTypeNumber,
|
||||
telemetrytypes.FieldDataTypeArrayNumber:
|
||||
case telemetrytypes.FieldDataTypeFloat64, telemetrytypes.FieldDataTypeInt64, telemetrytypes.FieldDataTypeNumber:
|
||||
switch v := value.(type) {
|
||||
// why? ; CH returns an error for a simple check
|
||||
// attributes_number['http.status_code'] = 200 but not for attributes_number['http.status_code'] >= 200
|
||||
@@ -286,8 +258,7 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
|
||||
}
|
||||
}
|
||||
|
||||
case telemetrytypes.FieldDataTypeBool,
|
||||
telemetrytypes.FieldDataTypeArrayBool:
|
||||
case telemetrytypes.FieldDataTypeBool:
|
||||
switch v := value.(type) {
|
||||
case string:
|
||||
tblFieldName = castString(tblFieldName)
|
||||
|
||||
@@ -43,6 +43,7 @@ type resourceFilterStatementBuilder[T any] struct {
|
||||
signal telemetrytypes.Signal
|
||||
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||
jsonBodyPrefix string
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
}
|
||||
|
||||
@@ -75,6 +76,7 @@ func NewLogResourceFilterStatementBuilder(
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
metadataStore telemetrytypes.MetadataStore,
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *resourceFilterStatementBuilder[qbtypes.LogAggregation] {
|
||||
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter")
|
||||
@@ -85,6 +87,7 @@ func NewLogResourceFilterStatementBuilder(
|
||||
metadataStore: metadataStore,
|
||||
signal: telemetrytypes.SignalLogs,
|
||||
fullTextColumn: fullTextColumn,
|
||||
jsonBodyPrefix: jsonBodyPrefix,
|
||||
jsonKeyToKey: jsonKeyToKey,
|
||||
}
|
||||
}
|
||||
@@ -97,18 +100,12 @@ func (b *resourceFilterStatementBuilder[T]) getKeySelectors(query qbtypes.QueryB
|
||||
keySelectors = append(keySelectors, whereClauseSelectors...)
|
||||
}
|
||||
|
||||
// exclude out the body related key selectors
|
||||
filteredKeySelectors := []*telemetrytypes.FieldKeySelector{}
|
||||
for idx := range keySelectors {
|
||||
if keySelectors[idx].FieldContext == telemetrytypes.FieldContextBody {
|
||||
continue
|
||||
}
|
||||
keySelectors[idx].Signal = b.signal
|
||||
keySelectors[idx].SelectorMatchType = telemetrytypes.FieldSelectorMatchTypeExact
|
||||
filteredKeySelectors = append(filteredKeySelectors, keySelectors[idx])
|
||||
}
|
||||
|
||||
return filteredKeySelectors
|
||||
return keySelectors
|
||||
}
|
||||
|
||||
// Build builds a SQL query based on the given parameters
|
||||
@@ -171,7 +168,7 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
|
||||
// there is no need for "key" not found error for resource filtering
|
||||
IgnoreNotFoundKeys: true,
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
}, start, end)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
@@ -20,8 +20,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference/implpreference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote/implpromote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
|
||||
"github.com/SigNoz/signoz/pkg/modules/quickfilter/implquickfilter"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
|
||||
@@ -67,7 +65,6 @@ type Modules struct {
|
||||
Services services.Module
|
||||
SpanPercentile spanpercentile.Module
|
||||
MetricsExplorer metricsexplorer.Module
|
||||
Promote promote.Module
|
||||
}
|
||||
|
||||
func NewModules(
|
||||
@@ -111,6 +108,5 @@ func NewModules(
|
||||
SpanPercentile: implspanpercentile.NewModule(querier, providerSettings),
|
||||
Services: implservices.NewModule(querier, telemetryStore),
|
||||
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, ruleStore, dashboard, providerSettings, config.MetricsExplorer),
|
||||
Promote: implpromote.NewModule(telemetryMetadataStore, telemetryStore),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
@@ -39,7 +38,6 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
|
||||
struct{ authdomain.Handler }{},
|
||||
struct{ preference.Handler }{},
|
||||
struct{ global.Handler }{},
|
||||
struct{ promote.Handler }{},
|
||||
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -24,7 +24,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference/implpreference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote/implpromote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session/implsession"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
@@ -235,7 +234,6 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
|
||||
implauthdomain.NewHandler(modules.AuthDomain),
|
||||
implpreference.NewHandler(modules.Preference),
|
||||
signozglobal.NewHandler(global),
|
||||
implpromote.NewHandler(modules.Promote),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -16,12 +16,11 @@ import (
|
||||
)
|
||||
|
||||
type conditionBuilder struct {
|
||||
fm qbtypes.FieldMapper
|
||||
metadataStore telemetrytypes.MetadataStore
|
||||
fm qbtypes.FieldMapper
|
||||
}
|
||||
|
||||
func NewConditionBuilder(fm qbtypes.FieldMapper, metadataStore telemetrytypes.MetadataStore) *conditionBuilder {
|
||||
return &conditionBuilder{fm: fm, metadataStore: metadataStore}
|
||||
func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
|
||||
return &conditionBuilder{fm: fm}
|
||||
}
|
||||
|
||||
func (c *conditionBuilder) conditionFor(
|
||||
@@ -31,34 +30,22 @@ func (c *conditionBuilder) conditionFor(
|
||||
value any,
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
) (string, error) {
|
||||
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorContains,
|
||||
qbtypes.FilterOperatorNotContains,
|
||||
qbtypes.FilterOperatorILike,
|
||||
qbtypes.FilterOperatorNotILike,
|
||||
qbtypes.FilterOperatorLike,
|
||||
qbtypes.FilterOperatorNotLike:
|
||||
value = querybuilder.FormatValueForContains(value)
|
||||
}
|
||||
|
||||
column, err := c.fm.ColumnFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// For JSON columns, preserve the original value type (numeric, bool, etc.)
|
||||
// Only format to string for non-JSON columns that need string formatting
|
||||
isJSONColumn := column.IsJSONColumn() && querybuilder.BodyJSONQueryEnabled && key.FieldContext == telemetrytypes.FieldContextBody
|
||||
if !isJSONColumn {
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorContains,
|
||||
qbtypes.FilterOperatorNotContains,
|
||||
qbtypes.FilterOperatorILike,
|
||||
qbtypes.FilterOperatorNotILike,
|
||||
qbtypes.FilterOperatorLike,
|
||||
qbtypes.FilterOperatorNotLike:
|
||||
value = querybuilder.FormatValueForContains(value)
|
||||
}
|
||||
}
|
||||
|
||||
if isJSONColumn {
|
||||
cond, err := c.buildJSONCondition(ctx, key, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return cond, nil
|
||||
}
|
||||
|
||||
tblFieldName, err := c.fm.FieldFor(ctx, key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
@@ -176,7 +163,9 @@ func (c *conditionBuilder) conditionFor(
|
||||
// in the UI based query builder, `exists` and `not exists` are used for
|
||||
// key membership checks, so depending on the column type, the condition changes
|
||||
case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists:
|
||||
if key.FieldContext == telemetrytypes.FieldContextBody && !querybuilder.BodyJSONQueryEnabled {
|
||||
|
||||
// Check if this is a body JSON search - by FieldContext
|
||||
if key.FieldContext == telemetrytypes.FieldContextBody {
|
||||
if operator == qbtypes.FilterOperatorExists {
|
||||
return GetBodyJSONKeyForExists(ctx, key, operator, value), nil
|
||||
} else {
|
||||
@@ -258,7 +247,7 @@ func (c *conditionBuilder) ConditionFor(
|
||||
return "", err
|
||||
}
|
||||
|
||||
if !(key.FieldContext == telemetrytypes.FieldContextBody && querybuilder.BodyJSONQueryEnabled) && operator.AddDefaultExistsFilter() {
|
||||
if operator.AddDefaultExistsFilter() {
|
||||
// skip adding exists filter for intrinsic fields
|
||||
// with an exception for body json search
|
||||
field, _ := c.fm.FieldFor(ctx, key)
|
||||
|
||||
@@ -373,8 +373,7 @@ func TestConditionFor(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
mockMetadataStore := buildTestTelemetryMetadataStore()
|
||||
conditionBuilder := NewConditionBuilder(fm, mockMetadataStore)
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
@@ -427,8 +426,7 @@ func TestConditionForMultipleKeys(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
mockMetadataStore := buildTestTelemetryMetadataStore()
|
||||
conditionBuilder := NewConditionBuilder(fm, mockMetadataStore)
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
@@ -687,8 +685,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
mockMetadataStore := buildTestTelemetryMetadataStore()
|
||||
conditionBuilder := NewConditionBuilder(fm, mockMetadataStore)
|
||||
conditionBuilder := NewConditionBuilder(fm)
|
||||
|
||||
for _, tc := range testCases {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
|
||||
@@ -2,6 +2,7 @@ package telemetrylogs
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz-otel-collector/constants"
|
||||
"github.com/SigNoz/signoz-otel-collector/exporter/jsontypeexporter"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
@@ -36,6 +37,8 @@ const (
|
||||
|
||||
BodyJSONColumnPrefix = constants.BodyJSONColumnPrefix
|
||||
BodyPromotedColumnPrefix = constants.BodyPromotedColumnPrefix
|
||||
ArraySep = jsontypeexporter.ArraySeparator
|
||||
ArrayAnyIndex = "[*]."
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -45,7 +48,8 @@ var (
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
}
|
||||
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
|
||||
BodyJSONStringSearchPrefix = `body.`
|
||||
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
|
||||
"body": {
|
||||
Name: "body",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
|
||||
@@ -6,9 +6,7 @@ import (
|
||||
"strings"
|
||||
|
||||
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz-otel-collector/utils"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
@@ -30,11 +28,6 @@ var (
|
||||
"severity_text": {Name: "severity_text", Type: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}},
|
||||
"severity_number": {Name: "severity_number", Type: schema.ColumnTypeUInt8},
|
||||
"body": {Name: "body", Type: schema.ColumnTypeString},
|
||||
LogsV2BodyJSONColumn: {Name: LogsV2BodyJSONColumn, Type: schema.JSONColumnType{
|
||||
MaxDynamicTypes: utils.ToPointer(uint(32)),
|
||||
MaxDynamicPaths: utils.ToPointer(uint(0)),
|
||||
}},
|
||||
LogsV2BodyPromotedColumn: {Name: LogsV2BodyPromotedColumn, Type: schema.JSONColumnType{}},
|
||||
"attributes_string": {Name: "attributes_string", Type: schema.MapColumnType{
|
||||
KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString},
|
||||
ValueType: schema.ColumnTypeString,
|
||||
@@ -90,23 +83,13 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
|
||||
return logsV2Columns["attributes_bool"], nil
|
||||
}
|
||||
case telemetrytypes.FieldContextBody:
|
||||
// Body context is for JSON body fields
|
||||
// Use body_json if feature flag is enabled
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
return logsV2Columns[LogsV2BodyJSONColumn], nil
|
||||
}
|
||||
// Fall back to legacy body column
|
||||
// body context fields are stored in the body column
|
||||
return logsV2Columns["body"], nil
|
||||
case telemetrytypes.FieldContextLog, telemetrytypes.FieldContextUnspecified:
|
||||
col, ok := logsV2Columns[key.Name]
|
||||
if !ok {
|
||||
// check if the key has body JSON search
|
||||
if strings.HasPrefix(key.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
|
||||
// Use body_json if feature flag is enabled and we have a body condition builder
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
return logsV2Columns[LogsV2BodyJSONColumn], nil
|
||||
}
|
||||
// Fall back to legacy body column
|
||||
// check if the key has body JSON search (backward compatibility)
|
||||
if strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) {
|
||||
return logsV2Columns["body"], nil
|
||||
}
|
||||
return nil, qbtypes.ErrColumnNotFound
|
||||
@@ -126,34 +109,21 @@ func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.Telemetr
|
||||
switch column.Type.GetType() {
|
||||
case schema.ColumnTypeEnumJSON:
|
||||
// json is only supported for resource context as of now
|
||||
switch key.FieldContext {
|
||||
case telemetrytypes.FieldContextResource:
|
||||
oldColumn := logsV2Columns["resources_string"]
|
||||
oldKeyName := fmt.Sprintf("%s['%s']", oldColumn.Name, key.Name)
|
||||
|
||||
// have to add ::string as clickHouse throws an error :- data types Variant/Dynamic are not allowed in GROUP BY
|
||||
// once clickHouse dependency is updated, we need to check if we can remove it.
|
||||
if key.Materialized {
|
||||
oldKeyName = telemetrytypes.FieldKeyToMaterializedColumnName(key)
|
||||
oldKeyNameExists := telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, %s==true, %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldKeyNameExists, oldKeyName), nil
|
||||
}
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil
|
||||
case telemetrytypes.FieldContextBody:
|
||||
if strings.Contains(key.Name, telemetrytypes.ArraySep) || strings.Contains(key.Name, telemetrytypes.ArrayAnyIndex) {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the Array Paths: %s", key.Name)
|
||||
}
|
||||
fieldExpr := BodyJSONColumnPrefix + fmt.Sprintf("`%s`", key.Name)
|
||||
expr := fmt.Sprintf("dynamicElement(%s, '%s')", fieldExpr, key.JSONDataType.StringValue())
|
||||
if key.Materialized {
|
||||
promotedFieldExpr := BodyPromotedColumnPrefix + fmt.Sprintf("`%s`", key.Name)
|
||||
expr = fmt.Sprintf("coalesce(%s, %s)", expr, fmt.Sprintf("dynamicElement(%s, '%s')", promotedFieldExpr, key.JSONDataType.StringValue()))
|
||||
}
|
||||
// returning qbtypes.ErrColumnNotFound is a hack that will trigger the fallback expr logic to include all the types for the key
|
||||
return expr, qbtypes.ErrColumnNotFound
|
||||
default:
|
||||
if key.FieldContext != telemetrytypes.FieldContextResource {
|
||||
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource context fields are supported for json columns, got %s", key.FieldContext.String)
|
||||
}
|
||||
oldColumn := logsV2Columns["resources_string"]
|
||||
oldKeyName := fmt.Sprintf("%s['%s']", oldColumn.Name, key.Name)
|
||||
|
||||
// have to add ::string as clickHouse throws an error :- data types Variant/Dynamic are not allowed in GROUP BY
|
||||
// once clickHouse dependency is updated, we need to check if we can remove it.
|
||||
if key.Materialized {
|
||||
oldKeyName = telemetrytypes.FieldKeyToMaterializedColumnName(key)
|
||||
oldKeyNameExists := telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, %s==true, %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldKeyNameExists, oldKeyName), nil
|
||||
} else {
|
||||
return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil
|
||||
}
|
||||
case schema.ColumnTypeEnumLowCardinality:
|
||||
switch elementType := column.Type.(schema.LowCardinalityColumnType).ElementType; elementType.GetType() {
|
||||
case schema.ColumnTypeEnumString:
|
||||
|
||||
@@ -11,7 +11,7 @@ import (
|
||||
// TestLikeAndILikeWithoutWildcards_Warns Tests that LIKE/ILIKE without wildcards add warnings and include docs URL
|
||||
func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm, nil)
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
@@ -33,7 +33,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
|
||||
for _, expr := range tests {
|
||||
t.Run(expr, func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
|
||||
@@ -47,7 +47,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
|
||||
// TestLikeAndILikeWithWildcards_NoWarn Tests that LIKE/ILIKE with wildcards do not add warnings
|
||||
func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm, nil)
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
@@ -69,7 +69,7 @@ func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
|
||||
|
||||
for _, expr := range tests {
|
||||
t.Run(expr, func(t *testing.T) {
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clause)
|
||||
|
||||
|
||||
@@ -7,7 +7,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
@@ -15,7 +14,8 @@ import (
|
||||
// TestFilterExprLogsBodyJSON tests a comprehensive set of query patterns for body JSON search
|
||||
func TestFilterExprLogsBodyJSON(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm, telemetrytypestest.NewMockMetadataStore())
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
// Define a comprehensive set of field keys to support all test cases
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ import (
|
||||
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
|
||||
func TestFilterExprLogs(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm, nil)
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
// Define a comprehensive set of field keys to support all test cases
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
@@ -2423,7 +2423,7 @@ func TestFilterExprLogs(t *testing.T) {
|
||||
// TestFilterExprLogs tests a comprehensive set of query patterns for logs search
|
||||
func TestFilterExprLogsConflictNegation(t *testing.T) {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm, nil)
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
// Define a comprehensive set of field keys to support all test cases
|
||||
keys := buildCompleteFieldKeyMap()
|
||||
|
||||
@@ -84,6 +84,7 @@ func getBodyJSONPath(key *telemetrytypes.TelemetryFieldKey) string {
|
||||
}
|
||||
|
||||
func GetBodyJSONKey(_ context.Context, key *telemetrytypes.TelemetryFieldKey, operator qbtypes.FilterOperator, value any) (string, any) {
|
||||
|
||||
dataType, value := inferDataType(value, operator, key)
|
||||
|
||||
// for array types, we need to extract the value from the JSON_QUERY
|
||||
@@ -1,149 +0,0 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
var (
|
||||
CodePlanIndexOutOfBounds = errors.MustNewCode("plan_index_out_of_bounds")
|
||||
)
|
||||
|
||||
type JSONAccessPlanBuilder struct {
|
||||
key *telemetrytypes.TelemetryFieldKey
|
||||
value any
|
||||
op qbtypes.FilterOperator
|
||||
parts []string
|
||||
getTypes func(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error)
|
||||
isPromoted bool
|
||||
}
|
||||
|
||||
// buildPlan recursively builds the path plan tree
|
||||
func (pb *JSONAccessPlanBuilder) buildPlan(ctx context.Context, index int, parent *telemetrytypes.JSONAccessNode, isDynArrChild bool) (*telemetrytypes.JSONAccessNode, error) {
|
||||
if index >= len(pb.parts) {
|
||||
return nil, errors.NewInvalidInputf(CodePlanIndexOutOfBounds, "index is out of bounds")
|
||||
}
|
||||
|
||||
part := pb.parts[index]
|
||||
pathSoFar := strings.Join(pb.parts[:index+1], telemetrytypes.ArraySep)
|
||||
isTerminal := index == len(pb.parts)-1
|
||||
|
||||
// Calculate progression parameters based on parent's values
|
||||
var maxTypes, maxPaths int
|
||||
if isDynArrChild {
|
||||
// Child of Dynamic array - reset progression to base values (16, 256)
|
||||
// This happens when we switch from Array(Dynamic) to Array(JSON)
|
||||
maxTypes = 16
|
||||
maxPaths = 256
|
||||
} else if parent != nil {
|
||||
// Child of JSON array - use parent's progression divided by 2 and 4
|
||||
maxTypes = parent.MaxDynamicTypes / 2
|
||||
maxPaths = parent.MaxDynamicPaths / 4
|
||||
if maxTypes < 0 {
|
||||
maxTypes = 0
|
||||
}
|
||||
if maxPaths < 0 {
|
||||
maxPaths = 0
|
||||
}
|
||||
}
|
||||
|
||||
types, err := pb.getTypes(ctx, pathSoFar)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Create node for this path segment
|
||||
node := &telemetrytypes.JSONAccessNode{
|
||||
Name: part,
|
||||
IsTerminal: isTerminal,
|
||||
AvailableTypes: types,
|
||||
Branches: make(map[telemetrytypes.JSONAccessBranchType]*telemetrytypes.JSONAccessNode),
|
||||
Parent: parent,
|
||||
MaxDynamicTypes: maxTypes,
|
||||
MaxDynamicPaths: maxPaths,
|
||||
}
|
||||
|
||||
hasJSON := slices.Contains(node.AvailableTypes, telemetrytypes.ArrayJSON)
|
||||
hasDynamic := slices.Contains(node.AvailableTypes, telemetrytypes.ArrayDynamic)
|
||||
|
||||
// Configure terminal if this is the last part
|
||||
if isTerminal {
|
||||
valueType, _ := inferDataType(pb.value, pb.op, pb.key)
|
||||
node.TerminalConfig = &telemetrytypes.TerminalConfig{
|
||||
Key: pb.key,
|
||||
ElemType: *pb.key.JSONDataType,
|
||||
ValueType: telemetrytypes.MappingFieldDataTypeToJSONDataType[valueType],
|
||||
}
|
||||
} else {
|
||||
if hasJSON {
|
||||
node.Branches[telemetrytypes.BranchJSON], err = pb.buildPlan(ctx, index+1, node, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
if hasDynamic {
|
||||
node.Branches[telemetrytypes.BranchDynamic], err = pb.buildPlan(ctx, index+1, node, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return node, nil
|
||||
}
|
||||
|
||||
// PlanJSON builds a tree structure representing the complete JSON path traversal
|
||||
// that precomputes all possible branches and their types
|
||||
func PlanJSON(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, op qbtypes.FilterOperator,
|
||||
value any,
|
||||
getTypes func(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error),
|
||||
) (telemetrytypes.JSONAccessPlan, error) {
|
||||
// if path is empty, return nil
|
||||
if key.Name == "" {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "path is empty")
|
||||
}
|
||||
|
||||
// TODO: PlanJSON requires the Start and End of the Query to select correct column between promoted and body_json using
|
||||
// creation time in distributed_promoted_paths
|
||||
path := strings.ReplaceAll(key.Name, telemetrytypes.ArrayAnyIndex, telemetrytypes.ArraySep)
|
||||
parts := strings.Split(path, telemetrytypes.ArraySep)
|
||||
|
||||
pb := &JSONAccessPlanBuilder{
|
||||
key: key,
|
||||
op: op,
|
||||
value: value,
|
||||
parts: parts,
|
||||
getTypes: getTypes,
|
||||
isPromoted: key.Materialized,
|
||||
}
|
||||
plans := telemetrytypes.JSONAccessPlan{}
|
||||
|
||||
node, err := pb.buildPlan(ctx, 0,
|
||||
telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn,
|
||||
32, 0),
|
||||
false,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
plans = append(plans, node)
|
||||
|
||||
if pb.isPromoted {
|
||||
node, err := pb.buildPlan(ctx, 0,
|
||||
telemetrytypes.NewRootJSONAccessNode(LogsV2BodyPromotedColumn,
|
||||
32, 1024),
|
||||
true,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
plans = append(plans, node)
|
||||
}
|
||||
|
||||
return plans, nil
|
||||
}
|
||||
@@ -1,877 +0,0 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
// ============================================================================
|
||||
// Helper Functions for Test Data Creation
|
||||
// ============================================================================
|
||||
|
||||
// makeKey creates a TelemetryFieldKey for testing
|
||||
func makeKey(name string, dataType telemetrytypes.JSONDataType, materialized bool) *telemetrytypes.TelemetryFieldKey {
|
||||
return &telemetrytypes.TelemetryFieldKey{
|
||||
Name: name,
|
||||
JSONDataType: &dataType,
|
||||
Materialized: materialized,
|
||||
}
|
||||
}
|
||||
|
||||
// makeGetTypes creates a getTypes function from a map of path -> types
|
||||
func makeGetTypes(typesMap map[string][]telemetrytypes.JSONDataType) func(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error) {
|
||||
return func(_ context.Context, path string) ([]telemetrytypes.JSONDataType, error) {
|
||||
return typesMap[path], nil
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Helper Functions for Node Validation
|
||||
// ============================================================================
|
||||
|
||||
// jsonAccessTestNode is a test-only, YAML-friendly view of JSONAccessNode.
|
||||
// It intentionally omits Parent to avoid cycles and only keeps the fields
|
||||
// that are useful for understanding / asserting the plan structure.
|
||||
type jsonAccessTestNode struct {
|
||||
Name string `yaml:"name"`
|
||||
Column string `yaml:"column,omitempty"`
|
||||
IsTerminal bool `yaml:"isTerminal,omitempty"`
|
||||
MaxDynamicTypes int `yaml:"maxDynamicTypes,omitempty"`
|
||||
MaxDynamicPaths int `yaml:"maxDynamicPaths,omitempty"`
|
||||
ElemType string `yaml:"elemType,omitempty"`
|
||||
ValueType string `yaml:"valueType,omitempty"`
|
||||
AvailableTypes []string `yaml:"availableTypes,omitempty"`
|
||||
Branches map[string]*jsonAccessTestNode `yaml:"branches,omitempty"`
|
||||
}
|
||||
|
||||
// toTestNode converts a JSONAccessNode tree into jsonAccessTestNode so that
|
||||
// it can be serialized to YAML for easy visual comparison in tests.
|
||||
func toTestNode(n *telemetrytypes.JSONAccessNode) *jsonAccessTestNode {
|
||||
if n == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
out := &jsonAccessTestNode{
|
||||
Name: n.Name,
|
||||
IsTerminal: n.IsTerminal,
|
||||
MaxDynamicTypes: n.MaxDynamicTypes,
|
||||
MaxDynamicPaths: n.MaxDynamicPaths,
|
||||
}
|
||||
|
||||
// Column information for top-level plan nodes: their parent is the root,
|
||||
// whose parent is nil.
|
||||
if n.Parent != nil && n.Parent.Parent == nil {
|
||||
out.Column = n.Parent.Name
|
||||
}
|
||||
|
||||
// AvailableTypes as strings (using StringValue for stable representation)
|
||||
if len(n.AvailableTypes) > 0 {
|
||||
out.AvailableTypes = make([]string, 0, len(n.AvailableTypes))
|
||||
for _, t := range n.AvailableTypes {
|
||||
out.AvailableTypes = append(out.AvailableTypes, t.StringValue())
|
||||
}
|
||||
}
|
||||
|
||||
// Terminal config
|
||||
if n.TerminalConfig != nil {
|
||||
out.ElemType = n.TerminalConfig.ElemType.StringValue()
|
||||
out.ValueType = n.TerminalConfig.ValueType.StringValue()
|
||||
}
|
||||
|
||||
// Branches
|
||||
if len(n.Branches) > 0 {
|
||||
out.Branches = make(map[string]*jsonAccessTestNode, len(n.Branches))
|
||||
for bt, child := range n.Branches {
|
||||
out.Branches[bt.StringValue()] = toTestNode(child)
|
||||
}
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
// plansToYAML converts a slice of JSONAccessNode plans to a YAML string that
|
||||
// can be compared against a per-test expectedTree.
|
||||
func plansToYAML(t *testing.T, plans []*telemetrytypes.JSONAccessNode) string {
|
||||
t.Helper()
|
||||
|
||||
testNodes := make([]*jsonAccessTestNode, 0, len(plans))
|
||||
for _, p := range plans {
|
||||
testNodes = append(testNodes, toTestNode(p))
|
||||
}
|
||||
|
||||
got, err := yaml.Marshal(testNodes)
|
||||
require.NoError(t, err)
|
||||
return string(got)
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Test Cases for Node Methods
|
||||
// ============================================================================
|
||||
|
||||
func TestNode_Alias(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
node *telemetrytypes.JSONAccessNode
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "Root node returns name as-is",
|
||||
node: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
expected: LogsV2BodyJSONColumn,
|
||||
},
|
||||
{
|
||||
name: "Node without parent returns backticked name",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "user",
|
||||
Parent: nil,
|
||||
},
|
||||
expected: "`user`",
|
||||
},
|
||||
{
|
||||
name: "Node with root parent uses dot separator",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "age",
|
||||
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
},
|
||||
expected: "`" + LogsV2BodyJSONColumn + ".age`",
|
||||
},
|
||||
{
|
||||
name: "Node with non-root parent uses array separator",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "name",
|
||||
Parent: &telemetrytypes.JSONAccessNode{
|
||||
Name: "education",
|
||||
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
},
|
||||
},
|
||||
expected: "`" + LogsV2BodyJSONColumn + ".education[].name`",
|
||||
},
|
||||
{
|
||||
name: "Nested array path with multiple levels",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "type",
|
||||
Parent: &telemetrytypes.JSONAccessNode{
|
||||
Name: "awards",
|
||||
Parent: &telemetrytypes.JSONAccessNode{
|
||||
Name: "education",
|
||||
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: "`" + LogsV2BodyJSONColumn + ".education[].awards[].type`",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := tt.node.Alias()
|
||||
require.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNode_FieldPath(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
node *telemetrytypes.JSONAccessNode
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "Simple field path from root",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "user",
|
||||
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
},
|
||||
// FieldPath() always wraps the field name in backticks
|
||||
expected: LogsV2BodyJSONColumn + ".`user`",
|
||||
},
|
||||
{
|
||||
name: "Field path with backtick-required key",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "user-name", // requires backtick
|
||||
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
},
|
||||
expected: LogsV2BodyJSONColumn + ".`user-name`",
|
||||
},
|
||||
{
|
||||
name: "Nested field path",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "age",
|
||||
Parent: &telemetrytypes.JSONAccessNode{
|
||||
Name: "user",
|
||||
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
},
|
||||
},
|
||||
// FieldPath() always wraps the field name in backticks
|
||||
expected: "`" + LogsV2BodyJSONColumn + ".user`.`age`",
|
||||
},
|
||||
{
|
||||
name: "Array element field path",
|
||||
node: &telemetrytypes.JSONAccessNode{
|
||||
Name: "name",
|
||||
Parent: &telemetrytypes.JSONAccessNode{
|
||||
Name: "education",
|
||||
Parent: telemetrytypes.NewRootJSONAccessNode(LogsV2BodyJSONColumn, 32, 0),
|
||||
},
|
||||
},
|
||||
// FieldPath() always wraps the field name in backticks
|
||||
expected: "`" + LogsV2BodyJSONColumn + ".education`.`name`",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := tt.node.FieldPath()
|
||||
require.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Test Cases for PlanJSON
|
||||
// ============================================================================
|
||||
|
||||
func TestPlanJSON_BasicStructure(t *testing.T) {
|
||||
_, getTypes := testTypeSet()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
key *telemetrytypes.TelemetryFieldKey
|
||||
expectErr bool
|
||||
expectedYAML string
|
||||
}{
|
||||
{
|
||||
name: "Simple path not promoted",
|
||||
key: makeKey("user.name", telemetrytypes.String, false),
|
||||
expectedYAML: `
|
||||
- name: user.name
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Simple path promoted",
|
||||
key: makeKey("user.name", telemetrytypes.String, true),
|
||||
expectedYAML: `
|
||||
- name: user.name
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
- name: user.name
|
||||
column: body_json_promoted
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Empty path returns error",
|
||||
key: makeKey("", telemetrytypes.String, false),
|
||||
expectErr: true,
|
||||
expectedYAML: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
plans, err := PlanJSON(context.Background(), tt.key, qbtypes.FilterOperatorEqual, "John", getTypes)
|
||||
if tt.expectErr {
|
||||
require.Error(t, err)
|
||||
require.Nil(t, plans)
|
||||
return
|
||||
}
|
||||
require.NoError(t, err)
|
||||
got := plansToYAML(t, plans)
|
||||
require.YAMLEq(t, tt.expectedYAML, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPlanJSON_ArrayPaths(t *testing.T) {
|
||||
_, getTypes := testTypeSet()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
path string
|
||||
expectedYAML string
|
||||
}{
|
||||
{
|
||||
name: "Single array level - JSON branch only",
|
||||
path: "education[].name",
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: name
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 8
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Single array level - both JSON and Dynamic branches",
|
||||
path: "education[].awards[].type",
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: awards
|
||||
availableTypes:
|
||||
- Array(Dynamic)
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
branches:
|
||||
json:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 4
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
dynamic:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Deeply nested array path",
|
||||
path: "interests[].entities[].reviews[].entries[].metadata[].positions[].name",
|
||||
expectedYAML: `
|
||||
- name: interests
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: entities
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
branches:
|
||||
json:
|
||||
name: reviews
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 4
|
||||
branches:
|
||||
json:
|
||||
name: entries
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 2
|
||||
branches:
|
||||
json:
|
||||
name: metadata
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 1
|
||||
branches:
|
||||
json:
|
||||
name: positions
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
branches:
|
||||
json:
|
||||
name: name
|
||||
availableTypes:
|
||||
- String
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "ArrayAnyIndex replacement [*] to []",
|
||||
path: "education[*].name",
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: name
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 8
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
key := makeKey(tt.path, telemetrytypes.String, false)
|
||||
plans, err := PlanJSON(context.Background(), key, qbtypes.FilterOperatorEqual, "John", getTypes)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, plans)
|
||||
require.Len(t, plans, 1)
|
||||
got := plansToYAML(t, plans)
|
||||
require.YAMLEq(t, tt.expectedYAML, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
|
||||
_, getTypes := testTypeSet()
|
||||
path := "education[].awards[].type"
|
||||
value := "sports"
|
||||
|
||||
t.Run("Non-promoted plan", func(t *testing.T) {
|
||||
key := makeKey(path, telemetrytypes.String, false)
|
||||
plans, err := PlanJSON(context.Background(), key, qbtypes.FilterOperatorEqual, value, getTypes)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, plans, 1)
|
||||
|
||||
expectedYAML := `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: awards
|
||||
availableTypes:
|
||||
- Array(Dynamic)
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
branches:
|
||||
json:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 4
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
dynamic:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`
|
||||
got := plansToYAML(t, plans)
|
||||
require.YAMLEq(t, expectedYAML, got)
|
||||
})
|
||||
|
||||
t.Run("Promoted plan", func(t *testing.T) {
|
||||
key := makeKey(path, telemetrytypes.String, true)
|
||||
plans, err := PlanJSON(context.Background(), key, qbtypes.FilterOperatorEqual, value, getTypes)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, plans, 2)
|
||||
|
||||
expectedYAML := `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: awards
|
||||
availableTypes:
|
||||
- Array(Dynamic)
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
branches:
|
||||
json:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 4
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
dynamic:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
- name: education
|
||||
column: body_json_promoted
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
branches:
|
||||
json:
|
||||
name: awards
|
||||
availableTypes:
|
||||
- Array(Dynamic)
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
maxDynamicPaths: 64
|
||||
branches:
|
||||
json:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 4
|
||||
maxDynamicPaths: 16
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
dynamic:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`
|
||||
got := plansToYAML(t, plans)
|
||||
require.YAMLEq(t, expectedYAML, got)
|
||||
})
|
||||
}
|
||||
|
||||
func TestPlanJSON_EdgeCases(t *testing.T) {
|
||||
_, getTypes := testTypeSet()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
path string
|
||||
value any
|
||||
expectedYAML string
|
||||
}{
|
||||
{
|
||||
name: "Path with no available types",
|
||||
path: "unknown.path",
|
||||
value: "test",
|
||||
expectedYAML: `
|
||||
- name: unknown.path
|
||||
column: body_json
|
||||
maxDynamicTypes: 16
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Very deep nesting - validates progression doesn't go negative",
|
||||
path: "interests[].entities[].reviews[].entries[].metadata[].positions[].name",
|
||||
value: "Engineer",
|
||||
expectedYAML: `
|
||||
- name: interests
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: entities
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
branches:
|
||||
json:
|
||||
name: reviews
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 4
|
||||
branches:
|
||||
json:
|
||||
name: entries
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 2
|
||||
branches:
|
||||
json:
|
||||
name: metadata
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 1
|
||||
branches:
|
||||
json:
|
||||
name: positions
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
branches:
|
||||
json:
|
||||
name: name
|
||||
availableTypes:
|
||||
- String
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Path with mixed scalar and array types",
|
||||
path: "education[].type",
|
||||
value: "high_school",
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: type
|
||||
availableTypes:
|
||||
- String
|
||||
- Int64
|
||||
maxDynamicTypes: 8
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Exists with only array types available",
|
||||
path: "education",
|
||||
value: nil,
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
isTerminal: true
|
||||
elemType: Array(JSON)
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Choose key type based on path; operator does not affect the tree shape asserted here.
|
||||
keyType := telemetrytypes.String
|
||||
switch tt.path {
|
||||
case "education":
|
||||
keyType = telemetrytypes.ArrayJSON
|
||||
case "education[].type":
|
||||
keyType = telemetrytypes.String
|
||||
}
|
||||
key := makeKey(tt.path, keyType, false)
|
||||
plans, err := PlanJSON(context.Background(), key, qbtypes.FilterOperatorEqual, tt.value, getTypes)
|
||||
require.NoError(t, err)
|
||||
got := plansToYAML(t, plans)
|
||||
require.YAMLEq(t, tt.expectedYAML, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPlanJSON_TreeStructure(t *testing.T) {
|
||||
_, getTypes := testTypeSet()
|
||||
path := "education[].awards[].participated[].team[].branch"
|
||||
key := makeKey(path, telemetrytypes.String, false)
|
||||
plans, err := PlanJSON(context.Background(), key, qbtypes.FilterOperatorEqual, "John", getTypes)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, plans, 1)
|
||||
|
||||
expectedYAML := `
|
||||
- name: education
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
branches:
|
||||
json:
|
||||
name: awards
|
||||
availableTypes:
|
||||
- Array(Dynamic)
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
branches:
|
||||
json:
|
||||
name: participated
|
||||
availableTypes:
|
||||
- Array(Dynamic)
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 4
|
||||
branches:
|
||||
json:
|
||||
name: team
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 2
|
||||
branches:
|
||||
json:
|
||||
name: branch
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 1
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
dynamic:
|
||||
name: team
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
branches:
|
||||
json:
|
||||
name: branch
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 8
|
||||
maxDynamicPaths: 64
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
dynamic:
|
||||
name: participated
|
||||
availableTypes:
|
||||
- Array(Dynamic)
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
branches:
|
||||
json:
|
||||
name: team
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 8
|
||||
maxDynamicPaths: 64
|
||||
branches:
|
||||
json:
|
||||
name: branch
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 4
|
||||
maxDynamicPaths: 16
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
dynamic:
|
||||
name: team
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
branches:
|
||||
json:
|
||||
name: branch
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 8
|
||||
maxDynamicPaths: 64
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
valueType: String
|
||||
`
|
||||
|
||||
got := plansToYAML(t, plans)
|
||||
require.YAMLEq(t, expectedYAML, got)
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Test Data Setup
|
||||
// ============================================================================
|
||||
|
||||
// testTypeSet returns a map of path->types and a getTypes function for testing
|
||||
// This represents the type information available in the test JSON structure
|
||||
func testTypeSet() (map[string][]telemetrytypes.JSONDataType, func(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error)) {
|
||||
types := map[string][]telemetrytypes.JSONDataType{
|
||||
"user.name": {telemetrytypes.String},
|
||||
"user.age": {telemetrytypes.Int64, telemetrytypes.String},
|
||||
"user.height": {telemetrytypes.Float64},
|
||||
"education": {telemetrytypes.ArrayJSON},
|
||||
"education[].name": {telemetrytypes.String},
|
||||
"education[].type": {telemetrytypes.String, telemetrytypes.Int64},
|
||||
"education[].internal_type": {telemetrytypes.String},
|
||||
"education[].metadata.location": {telemetrytypes.String},
|
||||
"education[].parameters": {telemetrytypes.ArrayFloat64, telemetrytypes.ArrayDynamic},
|
||||
"education[].duration": {telemetrytypes.String},
|
||||
"education[].mode": {telemetrytypes.String},
|
||||
"education[].year": {telemetrytypes.Int64},
|
||||
"education[].field": {telemetrytypes.String},
|
||||
"education[].awards": {telemetrytypes.ArrayDynamic, telemetrytypes.ArrayJSON},
|
||||
"education[].awards[].name": {telemetrytypes.String},
|
||||
"education[].awards[].rank": {telemetrytypes.Int64},
|
||||
"education[].awards[].medal": {telemetrytypes.String},
|
||||
"education[].awards[].type": {telemetrytypes.String},
|
||||
"education[].awards[].semester": {telemetrytypes.Int64},
|
||||
"education[].awards[].participated": {telemetrytypes.ArrayDynamic, telemetrytypes.ArrayJSON},
|
||||
"education[].awards[].participated[].type": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].field": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].project_type": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].project_name": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].race_type": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].team_based": {telemetrytypes.Bool},
|
||||
"education[].awards[].participated[].team_name": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].team": {telemetrytypes.ArrayJSON},
|
||||
"education[].awards[].participated[].team[].name": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].team[].branch": {telemetrytypes.String},
|
||||
"education[].awards[].participated[].team[].semester": {telemetrytypes.Int64},
|
||||
"interests": {telemetrytypes.ArrayJSON},
|
||||
"interests[].type": {telemetrytypes.String},
|
||||
"interests[].entities": {telemetrytypes.ArrayJSON},
|
||||
"interests[].entities.application_date": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews": {telemetrytypes.ArrayJSON},
|
||||
"interests[].entities[].reviews[].given_by": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].remarks": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].weight": {telemetrytypes.Float64},
|
||||
"interests[].entities[].reviews[].passed": {telemetrytypes.Bool},
|
||||
"interests[].entities[].reviews[].type": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].analysis_type": {telemetrytypes.Int64},
|
||||
"interests[].entities[].reviews[].entries": {telemetrytypes.ArrayJSON},
|
||||
"interests[].entities[].reviews[].entries[].subject": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].entries[].status": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].entries[].metadata": {telemetrytypes.ArrayJSON},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].company": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].experience": {telemetrytypes.Int64},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].unit": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions": {telemetrytypes.ArrayJSON},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions[].name": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions[].duration": {telemetrytypes.Int64, telemetrytypes.Float64},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions[].unit": {telemetrytypes.String},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions[].ratings": {telemetrytypes.ArrayInt64, telemetrytypes.ArrayString},
|
||||
"message": {telemetrytypes.String},
|
||||
}
|
||||
|
||||
return types, makeGetTypes(types)
|
||||
}
|
||||
@@ -1,455 +0,0 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
)
|
||||
|
||||
var (
|
||||
CodeCurrentNodeNil = errors.MustNewCode("current_node_nil")
|
||||
CodeNextNodeNil = errors.MustNewCode("next_node_nil")
|
||||
CodeNestedExpressionsEmpty = errors.MustNewCode("nested_expressions_empty")
|
||||
CodeGroupByPlanEmpty = errors.MustNewCode("group_by_plan_empty")
|
||||
CodeArrayMapExpressionsEmpty = errors.MustNewCode("array_map_expressions_empty")
|
||||
CodePromotedPlanMissing = errors.MustNewCode("promoted_plan_missing")
|
||||
CodeArrayNavigationFailed = errors.MustNewCode("array_navigation_failed")
|
||||
)
|
||||
|
||||
func (c *conditionBuilder) getTypes(ctx context.Context, path string) ([]telemetrytypes.JSONDataType, error) {
|
||||
keys, _, err := c.metadataStore.GetKeys(ctx, &telemetrytypes.FieldKeySelector{
|
||||
Name: path,
|
||||
SelectorMatchType: telemetrytypes.FieldSelectorMatchTypeExact,
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Limit: 1,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
types := []telemetrytypes.JSONDataType{}
|
||||
for _, key := range keys[path] {
|
||||
if key.JSONDataType != nil {
|
||||
types = append(types, *key.JSONDataType)
|
||||
}
|
||||
}
|
||||
return types, nil
|
||||
}
|
||||
|
||||
// BuildCondition builds the full WHERE condition for body_json JSON paths
|
||||
func (c *conditionBuilder) buildJSONCondition(ctx context.Context, key *telemetrytypes.TelemetryFieldKey,
|
||||
operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
|
||||
plan, err := PlanJSON(ctx, key, operator, value, c.getTypes)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
conditions := []string{}
|
||||
for _, plan := range plan {
|
||||
condition, err := c.emitPlannedCondition(plan, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
conditions = append(conditions, condition)
|
||||
}
|
||||
return sb.Or(conditions...), nil
|
||||
}
|
||||
|
||||
// emitPlannedCondition handles paths with array traversal
|
||||
func (c *conditionBuilder) emitPlannedCondition(plan *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
// Build traversal + terminal recursively per-hop
|
||||
compiled, err := c.recurseArrayHops(plan, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// sb.AddWhereClause(sqlbuilder.NewWhereClause().AddWhereExpr(sb.Args, compiled))
|
||||
return compiled, nil
|
||||
}
|
||||
|
||||
// buildTerminalCondition creates the innermost condition
|
||||
func (c *conditionBuilder) buildTerminalCondition(node *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
// Use the parent's alias + current field name for the full path
|
||||
fieldPath := node.FieldPath()
|
||||
|
||||
if node.TerminalConfig.ElemType.IsArray {
|
||||
// switch operator for array membership checks
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorContains, qbtypes.FilterOperatorIn:
|
||||
operator = qbtypes.FilterOperatorEqual
|
||||
case qbtypes.FilterOperatorNotContains, qbtypes.FilterOperatorNotIn:
|
||||
operator = qbtypes.FilterOperatorNotEqual
|
||||
}
|
||||
arrayCond, err := c.buildArrayMembershipCondition(node, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return arrayCond, nil
|
||||
}
|
||||
conditions := []string{}
|
||||
|
||||
elemType := node.TerminalConfig.ElemType
|
||||
fieldExpr := fmt.Sprintf("dynamicElement(%s, '%s')", fieldPath, elemType.StringValue())
|
||||
fieldExpr, value = querybuilder.DataTypeCollisionHandledFieldName(node.TerminalConfig.Key, value, fieldExpr, operator)
|
||||
|
||||
indexed := slices.ContainsFunc(node.TerminalConfig.Key.Indexes, func(index telemetrytypes.JSONDataTypeIndex) bool {
|
||||
return index.Type == elemType && index.ColumnExpression == fieldPath
|
||||
})
|
||||
if elemType.IndexSupported && indexed {
|
||||
indexedExpr := assumeNotNull(fieldPath, elemType)
|
||||
emptyValue := func() any {
|
||||
switch elemType {
|
||||
case telemetrytypes.String:
|
||||
return ""
|
||||
case telemetrytypes.Int64, telemetrytypes.Float64, telemetrytypes.Bool:
|
||||
return 0
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}()
|
||||
|
||||
// switch the operator and value for exists and not exists
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorExists:
|
||||
operator = qbtypes.FilterOperatorNotEqual
|
||||
value = emptyValue
|
||||
case qbtypes.FilterOperatorNotExists:
|
||||
operator = qbtypes.FilterOperatorEqual
|
||||
value = emptyValue
|
||||
default:
|
||||
// do nothing
|
||||
}
|
||||
|
||||
cond, err := c.applyOperator(sb, indexedExpr, operator, value)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
conditions = append(conditions, cond)
|
||||
// Switch operator to EXISTS
|
||||
operator = qbtypes.FilterOperatorExists
|
||||
}
|
||||
|
||||
cond, err := c.applyOperator(sb, fieldExpr, operator, value)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
conditions = append(conditions, cond)
|
||||
if len(conditions) > 1 {
|
||||
return sb.And(conditions...), nil
|
||||
}
|
||||
return cond, nil
|
||||
}
|
||||
|
||||
// buildArrayMembershipCondition handles array membership checks
|
||||
func (c *conditionBuilder) buildArrayMembershipCondition(node *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
arrayPath := node.FieldPath()
|
||||
|
||||
// create typed array out of a dynamic array
|
||||
filteredDynamicExpr := func() string {
|
||||
baseArrayDynamicExpr := fmt.Sprintf("dynamicElement(%s, 'Array(Dynamic)')", arrayPath)
|
||||
return fmt.Sprintf("arrayMap(x->dynamicElement(x, '%s'), arrayFilter(x->(dynamicType(x) = '%s'), %s))",
|
||||
node.TerminalConfig.ValueType.StringValue(),
|
||||
node.TerminalConfig.ValueType.StringValue(),
|
||||
baseArrayDynamicExpr)
|
||||
}
|
||||
typedArrayExpr := func() string {
|
||||
return fmt.Sprintf("dynamicElement(%s, '%s')", arrayPath, node.TerminalConfig.ElemType.StringValue())
|
||||
}
|
||||
|
||||
var arrayExpr string
|
||||
if node.TerminalConfig.ElemType == telemetrytypes.ArrayDynamic {
|
||||
arrayExpr = filteredDynamicExpr()
|
||||
} else {
|
||||
arrayExpr = typedArrayExpr()
|
||||
}
|
||||
|
||||
fieldExpr, value := querybuilder.DataTypeCollisionHandledFieldName(node.TerminalConfig.Key, value, "x", operator)
|
||||
op, err := c.applyOperator(sb, fieldExpr, operator, value)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return fmt.Sprintf("arrayExists(%s -> %s, %s)", fieldExpr, op, arrayExpr), nil
|
||||
}
|
||||
|
||||
// recurseArrayHops recursively builds array traversal conditions
|
||||
func (c *conditionBuilder) recurseArrayHops(current *telemetrytypes.JSONAccessNode, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
if current == nil {
|
||||
return "", errors.NewInternalf(CodeArrayNavigationFailed, "navigation failed, current node is nil")
|
||||
}
|
||||
|
||||
if current.IsTerminal {
|
||||
terminalCond, err := c.buildTerminalCondition(current, operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return terminalCond, nil
|
||||
}
|
||||
|
||||
currAlias := current.Alias()
|
||||
fieldPath := current.FieldPath()
|
||||
// Determine availability of Array(JSON) and Array(Dynamic) at this hop
|
||||
hasArrayJSON := current.Branches[telemetrytypes.BranchJSON] != nil
|
||||
hasArrayDynamic := current.Branches[telemetrytypes.BranchDynamic] != nil
|
||||
|
||||
// Then, at this hop, compute child per branch and wrap
|
||||
branches := make([]string, 0, 2)
|
||||
if hasArrayJSON {
|
||||
jsonArrayExpr := fmt.Sprintf("dynamicElement(%s, 'Array(JSON(max_dynamic_types=%d, max_dynamic_paths=%d))')", fieldPath, current.MaxDynamicTypes, current.MaxDynamicPaths)
|
||||
childGroupJSON, err := c.recurseArrayHops(current.Branches[telemetrytypes.BranchJSON], operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
branches = append(branches, fmt.Sprintf("arrayExists(%s-> %s, %s)", currAlias, childGroupJSON, jsonArrayExpr))
|
||||
}
|
||||
if hasArrayDynamic {
|
||||
dynBaseExpr := fmt.Sprintf("dynamicElement(%s, 'Array(Dynamic)')", fieldPath)
|
||||
dynFilteredExpr := fmt.Sprintf("arrayMap(x->dynamicElement(x, 'JSON'), arrayFilter(x->(dynamicType(x) = 'JSON'), %s))", dynBaseExpr)
|
||||
|
||||
// Create the Query for Dynamic array
|
||||
childGroupDyn, err := c.recurseArrayHops(current.Branches[telemetrytypes.BranchDynamic], operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
branches = append(branches, fmt.Sprintf("arrayExists(%s-> %s, %s)", currAlias, childGroupDyn, dynFilteredExpr))
|
||||
}
|
||||
|
||||
if len(branches) == 1 {
|
||||
return branches[0], nil
|
||||
}
|
||||
return fmt.Sprintf("(%s)", strings.Join(branches, " OR ")), nil
|
||||
}
|
||||
|
||||
func (c *conditionBuilder) applyOperator(sb *sqlbuilder.SelectBuilder, fieldExpr string, operator qbtypes.FilterOperator, value any) (string, error) {
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorEqual:
|
||||
return sb.E(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorNotEqual:
|
||||
return sb.NE(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorGreaterThan:
|
||||
return sb.G(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorGreaterThanOrEq:
|
||||
return sb.GE(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorLessThan:
|
||||
return sb.LT(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorLessThanOrEq:
|
||||
return sb.LE(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorLike:
|
||||
return sb.Like(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorNotLike:
|
||||
return sb.NotLike(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorILike:
|
||||
return sb.ILike(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorNotILike:
|
||||
return sb.NotILike(fieldExpr, value), nil
|
||||
case qbtypes.FilterOperatorRegexp:
|
||||
return fmt.Sprintf("match(%s, %s)", fieldExpr, sb.Var(value)), nil
|
||||
case qbtypes.FilterOperatorNotRegexp:
|
||||
return fmt.Sprintf("NOT match(%s, %s)", fieldExpr, sb.Var(value)), nil
|
||||
case qbtypes.FilterOperatorContains:
|
||||
return sb.ILike(fieldExpr, fmt.Sprintf("%%%v%%", value)), nil
|
||||
case qbtypes.FilterOperatorNotContains:
|
||||
return sb.NotILike(fieldExpr, fmt.Sprintf("%%%v%%", value)), nil
|
||||
case qbtypes.FilterOperatorIn, qbtypes.FilterOperatorNotIn:
|
||||
// emulate IN/NOT IN using OR/AND over equals to leverage indexes consistently
|
||||
values, ok := value.([]any)
|
||||
if !ok {
|
||||
values = []any{value}
|
||||
}
|
||||
conds := []string{}
|
||||
for _, v := range values {
|
||||
if operator == qbtypes.FilterOperatorIn {
|
||||
conds = append(conds, sb.E(fieldExpr, v))
|
||||
} else {
|
||||
conds = append(conds, sb.NE(fieldExpr, v))
|
||||
}
|
||||
}
|
||||
if operator == qbtypes.FilterOperatorIn {
|
||||
return sb.Or(conds...), nil
|
||||
}
|
||||
return sb.And(conds...), nil
|
||||
case qbtypes.FilterOperatorExists:
|
||||
return fmt.Sprintf("%s IS NOT NULL", fieldExpr), nil
|
||||
case qbtypes.FilterOperatorNotExists:
|
||||
return fmt.Sprintf("%s IS NULL", fieldExpr), nil
|
||||
default:
|
||||
return "", qbtypes.ErrUnsupportedOperator
|
||||
}
|
||||
}
|
||||
|
||||
// GroupByArrayJoinInfo contains information about array joins needed for GroupBy
|
||||
type GroupByArrayJoinInfo struct {
|
||||
ArrayJoinClauses []string // ARRAY JOIN clauses to add to FROM clause
|
||||
TerminalExpr string // Terminal field expression for SELECT/GROUP BY
|
||||
}
|
||||
|
||||
// BuildGroupBy builds GroupBy information for body JSON fields using arrayConcat pattern
|
||||
func (c *conditionBuilder) BuildGroupBy(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (*GroupByArrayJoinInfo, error) {
|
||||
path := strings.TrimPrefix(key.Name, telemetrytypes.BodyJSONStringSearchPrefix)
|
||||
|
||||
plan, err := PlanJSON(ctx, key, qbtypes.FilterOperatorExists, nil, c.getTypes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(plan) == 0 {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput,
|
||||
"Could not find any valid paths for: %s", path)
|
||||
}
|
||||
|
||||
if plan[0].IsTerminal {
|
||||
node := plan[0]
|
||||
|
||||
expr := fmt.Sprintf("dynamicElement(%s, '%s')", node.FieldPath(), node.TerminalConfig.ElemType.StringValue())
|
||||
if key.Materialized {
|
||||
if len(plan) < 2 {
|
||||
return nil, errors.Newf(errors.TypeUnexpected, CodePromotedPlanMissing,
|
||||
"plan length is less than 2 for promoted path: %s", path)
|
||||
}
|
||||
|
||||
// promoted column first then body_json column
|
||||
// TODO(Piyush): Change this in future for better performance
|
||||
expr = fmt.Sprintf("coalesce(%s, %s)",
|
||||
fmt.Sprintf("dynamicElement(%s, '%s')", plan[1].FieldPath(), plan[1].TerminalConfig.ElemType.StringValue()),
|
||||
expr,
|
||||
)
|
||||
}
|
||||
|
||||
return &GroupByArrayJoinInfo{
|
||||
ArrayJoinClauses: []string{},
|
||||
TerminalExpr: expr,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Build arrayConcat pattern directly from the tree structure
|
||||
arrayConcatExpr, err := c.buildArrayConcat(plan)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Create single ARRAY JOIN clause with arrayFlatten
|
||||
arrayJoinClause := fmt.Sprintf("ARRAY JOIN %s AS `%s`", arrayConcatExpr, key.Name)
|
||||
|
||||
return &GroupByArrayJoinInfo{
|
||||
ArrayJoinClauses: []string{arrayJoinClause},
|
||||
TerminalExpr: fmt.Sprintf("`%s`", key.Name),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// buildArrayConcat builds the arrayConcat pattern directly from the tree structure
|
||||
func (c *conditionBuilder) buildArrayConcat(plan telemetrytypes.JSONAccessPlan) (string, error) {
|
||||
if len(plan) == 0 {
|
||||
return "", errors.Newf(errors.TypeInternal, CodeGroupByPlanEmpty, "group by plan is empty while building arrayConcat")
|
||||
}
|
||||
|
||||
// Build arrayMap expressions for ALL available branches at the root level
|
||||
var arrayMapExpressions []string
|
||||
for _, node := range plan {
|
||||
hasJSON := node.Branches[telemetrytypes.BranchJSON] != nil
|
||||
hasDynamic := node.Branches[telemetrytypes.BranchDynamic] != nil
|
||||
|
||||
if hasJSON {
|
||||
jsonExpr, err := c.buildArrayMap(node, telemetrytypes.BranchJSON)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
arrayMapExpressions = append(arrayMapExpressions, jsonExpr)
|
||||
}
|
||||
|
||||
if hasDynamic {
|
||||
dynamicExpr, err := c.buildArrayMap(node, telemetrytypes.BranchDynamic)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
arrayMapExpressions = append(arrayMapExpressions, dynamicExpr)
|
||||
}
|
||||
}
|
||||
if len(arrayMapExpressions) == 0 {
|
||||
return "", errors.Newf(errors.TypeInternal, CodeArrayMapExpressionsEmpty, "array map expressions are empty while building arrayConcat")
|
||||
}
|
||||
|
||||
// Build the arrayConcat expression
|
||||
arrayConcatExpr := fmt.Sprintf("arrayConcat(%s)", strings.Join(arrayMapExpressions, ", "))
|
||||
|
||||
// Wrap with arrayFlatten
|
||||
arrayFlattenExpr := fmt.Sprintf("arrayFlatten(%s)", arrayConcatExpr)
|
||||
|
||||
return arrayFlattenExpr, nil
|
||||
}
|
||||
|
||||
// buildArrayMap builds the arrayMap expression for a specific branch, handling all sub-branches
|
||||
func (c *conditionBuilder) buildArrayMap(currentNode *telemetrytypes.JSONAccessNode, branchType telemetrytypes.JSONAccessBranchType) (string, error) {
|
||||
if currentNode == nil {
|
||||
return "", errors.Newf(errors.TypeInternal, CodeCurrentNodeNil, "current node is nil while building arrayMap")
|
||||
}
|
||||
|
||||
nextNode := currentNode.Branches[branchType]
|
||||
if nextNode == nil {
|
||||
return "", errors.Newf(errors.TypeInternal, CodeNextNodeNil, "next node is nil while building arrayMap")
|
||||
}
|
||||
|
||||
// Build the array expression for this level
|
||||
var arrayExpr string
|
||||
if branchType == telemetrytypes.BranchJSON {
|
||||
// Array(JSON) branch
|
||||
arrayExpr = fmt.Sprintf("dynamicElement(%s, 'Array(JSON(max_dynamic_types=%d, max_dynamic_paths=%d))')",
|
||||
currentNode.FieldPath(), currentNode.MaxDynamicTypes, currentNode.MaxDynamicPaths)
|
||||
} else {
|
||||
// Array(Dynamic) branch - filter for JSON objects
|
||||
dynBaseExpr := fmt.Sprintf("dynamicElement(%s, 'Array(Dynamic)')", currentNode.FieldPath())
|
||||
arrayExpr = fmt.Sprintf("arrayMap(x->assumeNotNull(dynamicElement(x, 'JSON')), arrayFilter(x->(dynamicType(x) = 'JSON'), %s))", dynBaseExpr)
|
||||
}
|
||||
|
||||
// If this is the terminal level, return the simple arrayMap
|
||||
if nextNode.IsTerminal {
|
||||
dynamicElementExpr := fmt.Sprintf("dynamicElement(%s, '%s')", nextNode.FieldPath(),
|
||||
nextNode.TerminalConfig.ElemType.StringValue(),
|
||||
)
|
||||
return fmt.Sprintf("arrayMap(%s->%s, %s)", currentNode.Alias(), dynamicElementExpr, arrayExpr), nil
|
||||
}
|
||||
|
||||
// For non-terminal nodes, we need to handle ALL possible branches at the next level
|
||||
var nestedExpressions []string
|
||||
hasJSON := nextNode.Branches[telemetrytypes.BranchJSON] != nil
|
||||
hasDynamic := nextNode.Branches[telemetrytypes.BranchDynamic] != nil
|
||||
|
||||
if hasJSON {
|
||||
jsonNested, err := c.buildArrayMap(nextNode, telemetrytypes.BranchJSON)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
nestedExpressions = append(nestedExpressions, jsonNested)
|
||||
}
|
||||
|
||||
if hasDynamic {
|
||||
dynamicNested, err := c.buildArrayMap(nextNode, telemetrytypes.BranchDynamic)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
nestedExpressions = append(nestedExpressions, dynamicNested)
|
||||
}
|
||||
|
||||
// If we have multiple nested expressions, we need to concat them
|
||||
var nestedExpr string
|
||||
if len(nestedExpressions) == 1 {
|
||||
nestedExpr = nestedExpressions[0]
|
||||
} else if len(nestedExpressions) > 1 {
|
||||
// This shouldn't happen in our current tree structure, but handle it just in case
|
||||
nestedExpr = fmt.Sprintf("arrayConcat(%s)", strings.Join(nestedExpressions, ", "))
|
||||
} else {
|
||||
return "", errors.Newf(errors.TypeInternal, CodeNestedExpressionsEmpty, "nested expressions are empty while building arrayMap")
|
||||
}
|
||||
|
||||
return fmt.Sprintf("arrayMap(%s->%s, %s)", currentNode.Alias(), nestedExpr, arrayExpr), nil
|
||||
}
|
||||
|
||||
func assumeNotNull(column string, elemType telemetrytypes.JSONDataType) string {
|
||||
return fmt.Sprintf("assumeNotNull(dynamicElement(%s, '%s'))", column, elemType.StringValue())
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
@@ -23,6 +23,7 @@ type logQueryStatementBuilder struct {
|
||||
aggExprRewriter qbtypes.AggExprRewriter
|
||||
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||
jsonBodyPrefix string
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
}
|
||||
|
||||
@@ -36,6 +37,7 @@ func NewLogQueryStatementBuilder(
|
||||
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation],
|
||||
aggExprRewriter qbtypes.AggExprRewriter,
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *logQueryStatementBuilder {
|
||||
logsSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrylogs")
|
||||
@@ -48,6 +50,7 @@ func NewLogQueryStatementBuilder(
|
||||
resourceFilterStmtBuilder: resourceFilterStmtBuilder,
|
||||
aggExprRewriter: aggExprRewriter,
|
||||
fullTextColumn: fullTextColumn,
|
||||
jsonBodyPrefix: jsonBodyPrefix,
|
||||
jsonKeyToKey: jsonKeyToKey,
|
||||
}
|
||||
}
|
||||
@@ -168,25 +171,6 @@ func (b *logQueryStatementBuilder) adjustKeys(ctx context.Context, keys map[stri
|
||||
overallMatch = overallMatch || findMatch(IntrinsicFields)
|
||||
}
|
||||
|
||||
if strings.Contains(k.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
|
||||
k.Name = strings.TrimPrefix(k.Name, telemetrytypes.BodyJSONStringSearchPrefix)
|
||||
fieldKeys, found := keys[k.Name]
|
||||
if found && len(fieldKeys) > 0 {
|
||||
k.FieldContext = fieldKeys[0].FieldContext
|
||||
k.FieldDataType = fieldKeys[0].FieldDataType
|
||||
k.Materialized = fieldKeys[0].Materialized
|
||||
k.JSONDataType = fieldKeys[0].JSONDataType
|
||||
k.Indexes = fieldKeys[0].Indexes
|
||||
|
||||
overallMatch = true // because we found a match
|
||||
} else {
|
||||
b.logger.InfoContext(ctx, "overriding the field context and data type", "key", k.Name)
|
||||
k.FieldContext = telemetrytypes.FieldContextBody
|
||||
k.FieldDataType = telemetrytypes.FieldDataTypeString
|
||||
k.JSONDataType = &telemetrytypes.String
|
||||
}
|
||||
}
|
||||
|
||||
if !overallMatch {
|
||||
// check if all the key for the given field have been materialized, if so
|
||||
// set the key to materialized
|
||||
@@ -237,9 +221,6 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
cteArgs = append(cteArgs, args)
|
||||
}
|
||||
|
||||
// Collect array join info for body JSON fields
|
||||
var arrayJoinClauses []string
|
||||
|
||||
// Select timestamp and id by default
|
||||
sb.Select(LogsV2TimestampColumn)
|
||||
sb.SelectMore(LogsV2IDColumn)
|
||||
@@ -253,10 +234,6 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
sb.SelectMore(LogsV2ScopeNameColumn)
|
||||
sb.SelectMore(LogsV2ScopeVersionColumn)
|
||||
sb.SelectMore(LogsV2BodyColumn)
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
sb.SelectMore(LogsV2BodyJSONColumn)
|
||||
sb.SelectMore(LogsV2BodyPromotedColumn)
|
||||
}
|
||||
sb.SelectMore(LogsV2AttributesStringColumn)
|
||||
sb.SelectMore(LogsV2AttributesNumberColumn)
|
||||
sb.SelectMore(LogsV2AttributesBoolColumn)
|
||||
@@ -269,7 +246,6 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
if query.SelectFields[index].Name == LogsV2TimestampColumn || query.SelectFields[index].Name == LogsV2IDColumn {
|
||||
continue
|
||||
}
|
||||
|
||||
// get column expression for the field - use array index directly to avoid pointer to loop variable
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &query.SelectFields[index], keys)
|
||||
if err != nil {
|
||||
@@ -279,12 +255,8 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
}
|
||||
}
|
||||
|
||||
// From table (inject ARRAY JOINs if collected)
|
||||
fromBase := fmt.Sprintf("%s.%s", DBName, LogsV2TableName)
|
||||
if len(arrayJoinClauses) > 0 {
|
||||
fromBase = fromBase + " " + strings.Join(arrayJoinClauses, " ")
|
||||
}
|
||||
sb.From(fromBase)
|
||||
// From table
|
||||
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
|
||||
|
||||
// Add filter conditions
|
||||
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
|
||||
@@ -358,17 +330,13 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
|
||||
var allGroupByArgs []any
|
||||
|
||||
// Collect array join info for body JSON fields
|
||||
var arrayJoinClauses []string
|
||||
|
||||
// Keep original column expressions so we can build the tuple
|
||||
fieldNames := make([]string, 0, len(query.GroupBy))
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name)
|
||||
allGroupByArgs = append(allGroupByArgs, args...)
|
||||
sb.SelectMore(colExpr)
|
||||
@@ -390,13 +358,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
sb.SelectMore(fmt.Sprintf("%s AS __result_%d", rewritten, i))
|
||||
}
|
||||
|
||||
// Add FROM clause
|
||||
fromBase := fmt.Sprintf("%s.%s", DBName, LogsV2TableName)
|
||||
if len(arrayJoinClauses) > 0 {
|
||||
fromBase = fromBase + " " + strings.Join(arrayJoinClauses, " ")
|
||||
}
|
||||
sb.From(fromBase)
|
||||
|
||||
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
|
||||
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
|
||||
|
||||
if err != nil {
|
||||
@@ -442,6 +404,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
}
|
||||
|
||||
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||
|
||||
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
|
||||
|
||||
// Stitch it all together: WITH … SELECT …
|
||||
@@ -468,6 +431,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
}
|
||||
|
||||
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||
|
||||
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
|
||||
|
||||
// Stitch it all together: WITH … SELECT …
|
||||
@@ -514,15 +478,11 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
|
||||
var allGroupByArgs []any
|
||||
|
||||
// Collect array join info for body JSON fields
|
||||
var arrayJoinClauses []string
|
||||
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name)
|
||||
allGroupByArgs = append(allGroupByArgs, args...)
|
||||
sb.SelectMore(colExpr)
|
||||
@@ -548,12 +508,8 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
}
|
||||
}
|
||||
|
||||
// From table (inject ARRAY JOINs if collected)
|
||||
fromBase := fmt.Sprintf("%s.%s", DBName, LogsV2TableName)
|
||||
if len(arrayJoinClauses) > 0 {
|
||||
fromBase = fromBase + " " + strings.Join(arrayJoinClauses, " ")
|
||||
}
|
||||
sb.From(fromBase)
|
||||
// From table
|
||||
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
|
||||
|
||||
// Add filter conditions
|
||||
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
|
||||
@@ -698,6 +654,7 @@ func (b *logQueryStatementBuilder) buildResourceFilterCTE(
|
||||
start, end uint64,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) (*qbtypes.Statement, error) {
|
||||
|
||||
return b.resourceFilterStmtBuilder.Build(
|
||||
ctx,
|
||||
start,
|
||||
|
||||
@@ -32,6 +32,7 @@ func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.LogAggregation
|
||||
cb,
|
||||
mockMetadataStore,
|
||||
DefaultFullTextColumn,
|
||||
BodyJSONStringSearchPrefix,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
}
|
||||
@@ -196,11 +197,11 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
cb := NewConditionBuilder(fm, mockMetadataStore)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
@@ -212,6 +213,7 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
BodyJSONStringSearchPrefix,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
|
||||
@@ -316,11 +318,11 @@ func TestStatementBuilderListQuery(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
cb := NewConditionBuilder(fm, mockMetadataStore)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
@@ -332,6 +334,7 @@ func TestStatementBuilderListQuery(t *testing.T) {
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
BodyJSONStringSearchPrefix,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
|
||||
@@ -424,11 +427,11 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
cb := NewConditionBuilder(fm, mockMetadataStore)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
@@ -440,11 +443,10 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
BodyJSONStringSearchPrefix,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
|
||||
//
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
|
||||
@@ -489,8 +491,7 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "status",
|
||||
FieldContext: telemetrytypes.FieldContextBody,
|
||||
Name: "body.status",
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -500,11 +501,11 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
cb := NewConditionBuilder(fm, mockMetadataStore)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
@@ -516,6 +517,7 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
BodyJSONStringSearchPrefix,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
|
||||
@@ -595,11 +597,11 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) {
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMapCollision()
|
||||
cb := NewConditionBuilder(fm, mockMetadataStore)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
@@ -611,6 +613,7 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) {
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
BodyJSONStringSearchPrefix,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/ClickHouse/clickhouse-go/v2/lib/chcol"
|
||||
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz-otel-collector/constants"
|
||||
@@ -14,6 +15,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
)
|
||||
@@ -32,10 +34,6 @@ var (
|
||||
CodeFailScanVariant = errors.MustNewCode("fail_scan_variant")
|
||||
CodeFailBuildJSONPathsQuery = errors.MustNewCode("fail_build_json_paths_query")
|
||||
CodeNoPathsToQueryIndexes = errors.MustNewCode("no_paths_to_query_indexes_provided")
|
||||
|
||||
CodeFailedToPrepareBatch = errors.MustNewCode("failed_to_prepare_batch_promoted_paths")
|
||||
CodeFailedToSendBatch = errors.MustNewCode("failed_to_send_batch_promoted_paths")
|
||||
CodeFailedToAppendPath = errors.MustNewCode("failed_to_append_path_promoted_paths")
|
||||
)
|
||||
|
||||
// GetBodyJSONPaths extracts body JSON paths from the path_types table
|
||||
@@ -47,7 +45,10 @@ var (
|
||||
//
|
||||
// searchOperator: LIKE for pattern matching, EQUAL for exact match
|
||||
// Returns: (paths, error)
|
||||
func (t *telemetryMetaStore) getBodyJSONPaths(ctx context.Context,
|
||||
// TODO(Piyush): Remove this lint skip
|
||||
//
|
||||
// nolint:unused
|
||||
func getBodyJSONPaths(ctx context.Context, telemetryStore telemetrystore.TelemetryStore,
|
||||
fieldKeySelectors []*telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, bool, error) {
|
||||
|
||||
query, args, limit, err := buildGetBodyJSONPathsQuery(fieldKeySelectors)
|
||||
@@ -55,7 +56,7 @@ func (t *telemetryMetaStore) getBodyJSONPaths(ctx context.Context,
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
rows, err := telemetryStore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, false, errors.WrapInternalf(err, CodeFailExtractBodyJSONKeys, "failed to extract body JSON keys")
|
||||
}
|
||||
@@ -95,12 +96,12 @@ func (t *telemetryMetaStore) getBodyJSONPaths(ctx context.Context,
|
||||
return nil, false, errors.WrapInternalf(rows.Err(), CodeFailIterateBodyJSONKeys, "error iterating body JSON keys")
|
||||
}
|
||||
|
||||
promoted, err := t.GetPromotedPaths(ctx, paths...)
|
||||
promoted, err := GetPromotedPaths(ctx, telemetryStore.ClickhouseDB(), paths...)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
indexes, err := t.getJSONPathIndexes(ctx, paths...)
|
||||
indexes, err := getJSONPathIndexes(ctx, telemetryStore, paths...)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
@@ -132,7 +133,7 @@ func buildGetBodyJSONPathsQuery(fieldKeySelectors []*telemetrytypes.FieldKeySele
|
||||
orClauses := []string{}
|
||||
for _, fieldKeySelector := range fieldKeySelectors {
|
||||
// replace [*] with []
|
||||
fieldKeySelector.Name = strings.ReplaceAll(fieldKeySelector.Name, telemetrytypes.ArrayAnyIndex, telemetrytypes.ArraySep)
|
||||
fieldKeySelector.Name = strings.ReplaceAll(fieldKeySelector.Name, telemetrylogs.ArrayAnyIndex, telemetrylogs.ArraySep)
|
||||
// Extract search text for body JSON keys
|
||||
keyName := CleanPathPrefixes(fieldKeySelector.Name)
|
||||
if fieldKeySelector.SelectorMatchType == telemetrytypes.FieldSelectorMatchTypeExact {
|
||||
@@ -159,11 +160,13 @@ func buildGetBodyJSONPathsQuery(fieldKeySelectors []*telemetrytypes.FieldKeySele
|
||||
return query, args, limit, nil
|
||||
}
|
||||
|
||||
|
||||
func (t *telemetryMetaStore) getJSONPathIndexes(ctx context.Context, paths ...string) (map[string][]telemetrytypes.JSONDataTypeIndex, error) {
|
||||
// TODO(Piyush): Remove this lint skip
|
||||
//
|
||||
// nolint:unused
|
||||
func getJSONPathIndexes(ctx context.Context, telemetryStore telemetrystore.TelemetryStore, paths ...string) (map[string][]telemetrytypes.JSONDataTypeIndex, error) {
|
||||
filteredPaths := []string{}
|
||||
for _, path := range paths {
|
||||
if strings.Contains(path, telemetrytypes.ArraySep) || strings.Contains(path, telemetrytypes.ArrayAnyIndex) {
|
||||
if strings.Contains(path, telemetrylogs.ArraySep) || strings.Contains(path, telemetrylogs.ArrayAnyIndex) {
|
||||
continue
|
||||
}
|
||||
filteredPaths = append(filteredPaths, path)
|
||||
@@ -173,7 +176,7 @@ func (t *telemetryMetaStore) getJSONPathIndexes(ctx context.Context, paths ...st
|
||||
}
|
||||
|
||||
// list indexes for the paths
|
||||
indexesMap, err := t.ListLogsJSONIndexes(ctx, filteredPaths...)
|
||||
indexesMap, err := ListLogsJSONIndexes(ctx, telemetryStore, filteredPaths...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailLoadLogsJSONIndexes, "failed to list JSON path indexes")
|
||||
}
|
||||
@@ -212,6 +215,7 @@ func (t *telemetryMetaStore) getJSONPathIndexes(ctx context.Context, paths ...st
|
||||
}
|
||||
|
||||
func buildListLogsJSONIndexesQuery(cluster string, filters ...string) (string, []any) {
|
||||
// This aggregates all types per path and gets the max last_seen, then applies LIMIT
|
||||
sb := sqlbuilder.Select(
|
||||
"name", "type_full", "expr", "granularity",
|
||||
).From(fmt.Sprintf("clusterAllReplicas('%s', %s)", cluster, SkipIndexTableName))
|
||||
@@ -232,15 +236,15 @@ func buildListLogsJSONIndexesQuery(cluster string, filters ...string) (string, [
|
||||
return sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
}
|
||||
|
||||
func (t *telemetryMetaStore) ListLogsJSONIndexes(ctx context.Context, filters ...string) (map[string][]schemamigrator.Index, error) {
|
||||
query, args := buildListLogsJSONIndexesQuery(t.telemetrystore.Cluster(), filters...)
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
func ListLogsJSONIndexes(ctx context.Context, telemetryStore telemetrystore.TelemetryStore, filters ...string) (map[string][]schemamigrator.Index, error) {
|
||||
query, args := buildListLogsJSONIndexesQuery(telemetryStore.Cluster(), filters...)
|
||||
rows, err := telemetryStore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailLoadLogsJSONIndexes, "failed to load string indexed columns")
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
indexes := make(map[string][]schemamigrator.Index)
|
||||
indexesMap := make(map[string][]schemamigrator.Index)
|
||||
for rows.Next() {
|
||||
var name string
|
||||
var typeFull string
|
||||
@@ -249,7 +253,7 @@ func (t *telemetryMetaStore) ListLogsJSONIndexes(ctx context.Context, filters ..
|
||||
if err := rows.Scan(&name, &typeFull, &expr, &granularity); err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailLoadLogsJSONIndexes, "failed to scan string indexed column")
|
||||
}
|
||||
indexes[name] = append(indexes[name], schemamigrator.Index{
|
||||
indexesMap[name] = append(indexesMap[name], schemamigrator.Index{
|
||||
Name: name,
|
||||
Type: typeFull,
|
||||
Expression: expr,
|
||||
@@ -257,19 +261,12 @@ func (t *telemetryMetaStore) ListLogsJSONIndexes(ctx context.Context, filters ..
|
||||
})
|
||||
}
|
||||
|
||||
return indexes, nil
|
||||
return indexesMap, nil
|
||||
}
|
||||
|
||||
func (t *telemetryMetaStore) ListPromotedPaths(ctx context.Context, paths ...string) (map[string]struct{}, error) {
|
||||
sb := sqlbuilder.Select("path").From(fmt.Sprintf("%s.%s", DBName, PromotedPathsTableName))
|
||||
pathConditions := []string{}
|
||||
for _, path := range paths {
|
||||
pathConditions = append(pathConditions, sb.Equal("path", path))
|
||||
}
|
||||
sb.Where(sb.Or(pathConditions...))
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
func ListPromotedPaths(ctx context.Context, conn clickhouse.Conn) (map[string]struct{}, error) {
|
||||
query := fmt.Sprintf("SELECT path FROM %s.%s", DBName, PromotedPathsTableName)
|
||||
rows, err := conn.Query(ctx, query)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailLoadPromotedPaths, "failed to load promoted paths")
|
||||
}
|
||||
@@ -288,14 +285,14 @@ func (t *telemetryMetaStore) ListPromotedPaths(ctx context.Context, paths ...str
|
||||
}
|
||||
|
||||
// TODO(Piyush): Remove this if not used in future
|
||||
func (t *telemetryMetaStore) ListJSONValues(ctx context.Context, path string, limit int) (*telemetrytypes.TelemetryFieldValues, bool, error) {
|
||||
func ListJSONValues(ctx context.Context, conn clickhouse.Conn, path string, limit int) (*telemetrytypes.TelemetryFieldValues, bool, error) {
|
||||
path = CleanPathPrefixes(path)
|
||||
|
||||
if strings.Contains(path, telemetrytypes.ArraySep) || strings.Contains(path, telemetrytypes.ArrayAnyIndex) {
|
||||
if strings.Contains(path, telemetrylogs.ArraySep) || strings.Contains(path, telemetrylogs.ArrayAnyIndex) {
|
||||
return nil, false, errors.NewInvalidInputf(errors.CodeInvalidInput, "array paths are not supported")
|
||||
}
|
||||
|
||||
promoted, err := t.IsPathPromoted(ctx, path)
|
||||
promoted, err := IsPathPromoted(ctx, conn, path)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
@@ -328,7 +325,7 @@ func (t *telemetryMetaStore) ListJSONValues(ctx context.Context, path string, li
|
||||
contextWithTimeout, cancel := context.WithTimeout(ctx, 5*time.Second)
|
||||
defer cancel()
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(contextWithTimeout, query, args...)
|
||||
rows, err := conn.Query(contextWithTimeout, query, args...)
|
||||
if err != nil {
|
||||
if errors.Is(err, context.DeadlineExceeded) {
|
||||
return nil, false, errors.WrapTimeoutf(err, errors.CodeTimeout, "query timed out").WithAdditional("failed to list JSON values")
|
||||
@@ -450,10 +447,10 @@ func derefValue(v any) any {
|
||||
}
|
||||
|
||||
// IsPathPromoted checks if a specific path is promoted
|
||||
func (t *telemetryMetaStore) IsPathPromoted(ctx context.Context, path string) (bool, error) {
|
||||
split := strings.Split(path, telemetrytypes.ArraySep)
|
||||
func IsPathPromoted(ctx context.Context, conn clickhouse.Conn, path string) (bool, error) {
|
||||
split := strings.Split(path, telemetrylogs.ArraySep)
|
||||
query := fmt.Sprintf("SELECT 1 FROM %s.%s WHERE path = ? LIMIT 1", DBName, PromotedPathsTableName)
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, split[0])
|
||||
rows, err := conn.Query(ctx, query, split[0])
|
||||
if err != nil {
|
||||
return false, errors.WrapInternalf(err, CodeFailCheckPathPromoted, "failed to check if path %s is promoted", path)
|
||||
}
|
||||
@@ -463,7 +460,7 @@ func (t *telemetryMetaStore) IsPathPromoted(ctx context.Context, path string) (b
|
||||
}
|
||||
|
||||
// GetPromotedPaths checks if a specific path is promoted
|
||||
func (t *telemetryMetaStore) GetPromotedPaths(ctx context.Context, paths ...string) (*utils.ConcurrentSet[string], error) {
|
||||
func GetPromotedPaths(ctx context.Context, conn clickhouse.Conn, paths ...string) (*utils.ConcurrentSet[string], error) {
|
||||
sb := sqlbuilder.Select("path").From(fmt.Sprintf("%s.%s", DBName, PromotedPathsTableName))
|
||||
pathConditions := []string{}
|
||||
for _, path := range paths {
|
||||
@@ -472,7 +469,7 @@ func (t *telemetryMetaStore) GetPromotedPaths(ctx context.Context, paths ...stri
|
||||
sb.Where(sb.Or(pathConditions...))
|
||||
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
rows, err := conn.Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailCheckPathPromoted, "failed to get promoted paths")
|
||||
}
|
||||
@@ -497,29 +494,3 @@ func CleanPathPrefixes(path string) string {
|
||||
path = strings.TrimPrefix(path, telemetrylogs.BodyPromotedColumnPrefix)
|
||||
return path
|
||||
}
|
||||
|
||||
func (t *telemetryMetaStore) PromotePaths(ctx context.Context, paths ...string) error {
|
||||
batch, err := t.telemetrystore.ClickhouseDB().PrepareBatch(ctx,
|
||||
fmt.Sprintf("INSERT INTO %s.%s (path, created_at) VALUES", DBName,
|
||||
PromotedPathsTableName))
|
||||
if err != nil {
|
||||
return errors.WrapInternalf(err, CodeFailedToPrepareBatch, "failed to prepare batch")
|
||||
}
|
||||
|
||||
nowMs := uint64(time.Now().UnixMilli())
|
||||
for _, p := range paths {
|
||||
trimmed := strings.TrimSpace(p)
|
||||
if trimmed == "" {
|
||||
continue
|
||||
}
|
||||
if err := batch.Append(trimmed, nowMs); err != nil {
|
||||
_ = batch.Abort()
|
||||
return errors.WrapInternalf(err, CodeFailedToAppendPath, "failed to append path")
|
||||
}
|
||||
}
|
||||
|
||||
if err := batch.Send(); err != nil {
|
||||
return errors.WrapInternalf(err, CodeFailedToSendBatch, "failed to send batch")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -572,14 +572,6 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
||||
}
|
||||
}
|
||||
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
bodyJSONPaths, finished, err := t.getBodyJSONPaths(ctx, fieldKeySelectors) // LIKE for pattern matching
|
||||
if err != nil {
|
||||
t.logger.ErrorContext(ctx, "failed to extract body JSON paths", "error", err)
|
||||
}
|
||||
keys = append(keys, bodyJSONPaths...)
|
||||
complete = complete && finished
|
||||
}
|
||||
return keys, complete, nil
|
||||
}
|
||||
|
||||
@@ -986,7 +978,7 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
|
||||
FieldMapper: t.fm,
|
||||
ConditionBuilder: t.conditionBuilder,
|
||||
FieldKeys: keys,
|
||||
}, 0, 0)
|
||||
}, 0, 0)
|
||||
if err == nil {
|
||||
sb.AddWhereClause(whereClause.WhereClause)
|
||||
} else {
|
||||
@@ -1010,20 +1002,20 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
|
||||
|
||||
// search on attributes
|
||||
key.FieldContext = telemetrytypes.FieldContextAttribute
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
if err == nil {
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
|
||||
// search on resource
|
||||
key.FieldContext = telemetrytypes.FieldContextResource
|
||||
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
if err == nil {
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
key.FieldContext = origContext
|
||||
} else {
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
|
||||
if err == nil {
|
||||
conds = append(conds, cond)
|
||||
}
|
||||
@@ -1172,10 +1164,6 @@ func (t *telemetryMetaStore) getLogFieldValues(ctx context.Context, fieldValueSe
|
||||
limit = 50
|
||||
}
|
||||
|
||||
if strings.HasPrefix(fieldValueSelector.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
|
||||
return t.ListJSONValues(ctx, fieldValueSelector.Name, limit)
|
||||
}
|
||||
|
||||
sb := sqlbuilder.Select("DISTINCT string_value, number_value").From(t.logsDBName + "." + t.logsFieldsTblName)
|
||||
|
||||
if fieldValueSelector.Name != "" {
|
||||
|
||||
@@ -32,8 +32,6 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
||||
options.MaxIdleConns = config.Connection.MaxIdleConns
|
||||
options.MaxOpenConns = config.Connection.MaxOpenConns
|
||||
options.DialTimeout = config.Connection.DialTimeout
|
||||
// This is to avoid the driver decoding issues with JSON columns
|
||||
options.Settings["output_format_native_write_json_as_string"] = 1
|
||||
|
||||
chConn, err := clickhouse.Open(options)
|
||||
if err != nil {
|
||||
|
||||
@@ -495,7 +495,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
// Keep original column expressions so we can build the tuple
|
||||
fieldNames := make([]string, 0, len(query.GroupBy))
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, nil)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -637,7 +637,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
||||
|
||||
var allGroupByArgs []any
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, nil)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -746,7 +746,7 @@ func (b *traceQueryStatementBuilder) addFilterCondition(
|
||||
FieldKeys: keys,
|
||||
SkipResourceFilter: true,
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
}, start, end)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -357,7 +357,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
@@ -525,7 +525,7 @@ func TestStatementBuilderListQuery(t *testing.T) {
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
@@ -681,7 +681,7 @@ func TestStatementBuilderTraceQuery(t *testing.T) {
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
|
||||
@@ -237,7 +237,7 @@ func (b *traceOperatorCTEBuilder) buildQueryCTE(ctx context.Context, queryName s
|
||||
ConditionBuilder: b.stmtBuilder.cb,
|
||||
FieldKeys: keys,
|
||||
SkipResourceFilter: true,
|
||||
}, b.start, b.end,
|
||||
}, b.start, b.end,
|
||||
)
|
||||
if err != nil {
|
||||
b.stmtBuilder.logger.ErrorContext(ctx, "Failed to prepare where clause", "error", err, "filter", query.Filter.Expression)
|
||||
@@ -552,6 +552,7 @@ func (b *traceOperatorCTEBuilder) buildTimeSeriesQuery(ctx context.Context, sele
|
||||
b.stmtBuilder.cb,
|
||||
keys,
|
||||
telemetrytypes.FieldDataTypeString,
|
||||
"",
|
||||
nil,
|
||||
)
|
||||
if err != nil {
|
||||
@@ -661,6 +662,7 @@ func (b *traceOperatorCTEBuilder) buildTraceQuery(ctx context.Context, selectFro
|
||||
b.stmtBuilder.cb,
|
||||
keys,
|
||||
telemetrytypes.FieldDataTypeString,
|
||||
"",
|
||||
nil,
|
||||
)
|
||||
if err != nil {
|
||||
@@ -800,6 +802,7 @@ func (b *traceOperatorCTEBuilder) buildScalarQuery(ctx context.Context, selectFr
|
||||
b.stmtBuilder.cb,
|
||||
keys,
|
||||
telemetrytypes.FieldDataTypeString,
|
||||
"",
|
||||
nil,
|
||||
)
|
||||
if err != nil {
|
||||
|
||||
@@ -390,7 +390,7 @@ func TestTraceOperatorStatementBuilder(t *testing.T) {
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
traceStmtBuilder := NewTraceQueryStatementBuilder(
|
||||
@@ -506,7 +506,7 @@ func TestTraceOperatorStatementBuilderErrors(t *testing.T) {
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
traceStmtBuilder := NewTraceQueryStatementBuilder(
|
||||
|
||||
@@ -44,7 +44,7 @@ func TestTraceTimeRangeOptimization(t *testing.T) {
|
||||
mockMetadataStore,
|
||||
)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
statementBuilder := NewTraceQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
|
||||
@@ -349,7 +349,7 @@ func (dashboard *Dashboard) GetWidgetQuery(startTime, endTime uint64, widgetInde
|
||||
return nil, errors.Wrapf(err, errors.TypeInvalidInput, ErrCodeDashboardInvalidData, "invalid dashboard data")
|
||||
}
|
||||
|
||||
if widgetIndex < 0 || int(widgetIndex) >= len(data.Widgets) {
|
||||
if len(data.Widgets) < int(widgetIndex)+1 {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, ErrCodeDashboardInvalidInput, "widget with index %v doesn't exist", widgetIndex)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
package promotetypes
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz-otel-collector/constants"
|
||||
"github.com/SigNoz/signoz-otel-collector/pkg/keycheck"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
type WrappedIndex struct {
|
||||
JSONDataType telemetrytypes.JSONDataType `json:"-"`
|
||||
ColumnType string `json:"column_type"`
|
||||
Type string `json:"type"`
|
||||
Granularity int `json:"granularity"`
|
||||
}
|
||||
|
||||
type PromotePath struct {
|
||||
Path string `json:"path"`
|
||||
Promote bool `json:"promote,omitempty"`
|
||||
|
||||
Indexes []WrappedIndex `json:"indexes,omitempty"`
|
||||
}
|
||||
|
||||
func (i *PromotePath) ValidateAndSetDefaults() error {
|
||||
if i.Path == "" {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "path is required")
|
||||
}
|
||||
|
||||
if strings.Contains(i.Path, " ") {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "path cannot contain spaces")
|
||||
}
|
||||
|
||||
if strings.Contains(i.Path, telemetrytypes.ArraySep) || strings.Contains(i.Path, telemetrytypes.ArrayAnyIndex) {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "array paths can not be promoted or indexed")
|
||||
}
|
||||
|
||||
if strings.HasPrefix(i.Path, constants.BodyJSONColumnPrefix) || strings.HasPrefix(i.Path, constants.BodyPromotedColumnPrefix) {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "`%s`, `%s` don't add these prefixes to the path", constants.BodyJSONColumnPrefix, constants.BodyPromotedColumnPrefix)
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(i.Path, telemetrytypes.BodyJSONStringSearchPrefix) {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "path must start with `body.`")
|
||||
}
|
||||
|
||||
// remove the "body." prefix from the path
|
||||
i.Path = strings.TrimPrefix(i.Path, telemetrytypes.BodyJSONStringSearchPrefix)
|
||||
|
||||
isCardinal := keycheck.IsCardinal(i.Path)
|
||||
if isCardinal {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cardinal paths can not be promoted or indexed")
|
||||
}
|
||||
|
||||
for idx, index := range i.Indexes {
|
||||
if index.Type == "" {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "index type is required")
|
||||
}
|
||||
if index.Granularity <= 0 {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "index granularity must be greater than 0")
|
||||
}
|
||||
|
||||
jsonDataType, ok := telemetrytypes.MappingStringToJSONDataType[index.ColumnType]
|
||||
if !ok {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid column type: %s", index.ColumnType)
|
||||
}
|
||||
if !jsonDataType.IndexSupported {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "index is not supported for column type: %s", index.ColumnType)
|
||||
}
|
||||
|
||||
i.Indexes[idx].JSONDataType = jsonDataType
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz-otel-collector/exporter/jsontypeexporter"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
@@ -18,13 +17,9 @@ var (
|
||||
FieldSelectorMatchTypeFuzzy = FieldSelectorMatchType{valuer.NewString("fuzzy")}
|
||||
)
|
||||
|
||||
const (
|
||||
// BodyJSONStringSearchPrefix is the prefix used for body JSON search queries
|
||||
// e.g., "body.status" where "body." is the prefix
|
||||
BodyJSONStringSearchPrefix = "body."
|
||||
ArraySep = jsontypeexporter.ArraySeparator
|
||||
ArrayAnyIndex = "[*]."
|
||||
)
|
||||
// BodyJSONStringSearchPrefix is the prefix used for body JSON search queries
|
||||
// e.g., "body.status" where "body." is the prefix
|
||||
const BodyJSONStringSearchPrefix = `body.`
|
||||
|
||||
type TelemetryFieldKey struct {
|
||||
Name string `json:"name"`
|
||||
|
||||
@@ -1,82 +0,0 @@
|
||||
package telemetrytypes
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz-otel-collector/exporter/jsontypeexporter"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type JSONAccessBranchType struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
BranchJSON = JSONAccessBranchType{valuer.NewString("json")}
|
||||
BranchDynamic = JSONAccessBranchType{valuer.NewString("dynamic")}
|
||||
)
|
||||
|
||||
type JSONAccessPlan = []*JSONAccessNode
|
||||
|
||||
type TerminalConfig struct {
|
||||
Key *TelemetryFieldKey
|
||||
ElemType JSONDataType
|
||||
ValueType JSONDataType
|
||||
}
|
||||
|
||||
// Node is now a tree structure representing the complete JSON path traversal
|
||||
// that precomputes all possible branches and their types
|
||||
type JSONAccessNode struct {
|
||||
// Node information
|
||||
Name string
|
||||
IsTerminal bool
|
||||
isRoot bool // marked true for only body_json and body_json_promoted
|
||||
|
||||
// Precomputed type information (single source of truth)
|
||||
AvailableTypes []JSONDataType
|
||||
|
||||
// Array type branches (Array(JSON) vs Array(Dynamic))
|
||||
Branches map[JSONAccessBranchType]*JSONAccessNode
|
||||
|
||||
// Terminal configuration
|
||||
TerminalConfig *TerminalConfig
|
||||
|
||||
// Parent reference for traversal
|
||||
Parent *JSONAccessNode
|
||||
|
||||
// JSON progression parameters (precomputed during planning)
|
||||
MaxDynamicTypes int
|
||||
MaxDynamicPaths int
|
||||
}
|
||||
|
||||
func NewRootJSONAccessNode(name string, maxDynamicTypes, maxDynamicPaths int) *JSONAccessNode {
|
||||
return &JSONAccessNode{
|
||||
Name: name,
|
||||
isRoot: true,
|
||||
MaxDynamicTypes: maxDynamicTypes,
|
||||
MaxDynamicPaths: maxDynamicPaths,
|
||||
}
|
||||
}
|
||||
|
||||
func (n *JSONAccessNode) Alias() string {
|
||||
if n.isRoot {
|
||||
return n.Name
|
||||
} else if n.Parent == nil {
|
||||
return fmt.Sprintf("`%s`", n.Name)
|
||||
}
|
||||
|
||||
parentAlias := strings.TrimLeft(n.Parent.Alias(), "`")
|
||||
parentAlias = strings.TrimRight(parentAlias, "`")
|
||||
|
||||
sep := jsontypeexporter.ArraySeparator
|
||||
if n.Parent.isRoot {
|
||||
sep = "."
|
||||
}
|
||||
return fmt.Sprintf("`%s%s%s`", parentAlias, sep, n.Name)
|
||||
}
|
||||
|
||||
func (n *JSONAccessNode) FieldPath() string {
|
||||
key := "`" + n.Name + "`"
|
||||
return n.Parent.Alias() + "." + key
|
||||
}
|
||||
@@ -3,7 +3,6 @@ package telemetrytypes
|
||||
import (
|
||||
"context"
|
||||
|
||||
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
)
|
||||
|
||||
@@ -31,13 +30,4 @@ type MetadataStore interface {
|
||||
|
||||
// FetchTemporalityMulti fetches the temporality for multiple metrics
|
||||
FetchTemporalityMulti(ctx context.Context, metricNames ...string) (map[string]metrictypes.Temporality, error)
|
||||
|
||||
// ListLogsJSONIndexes lists the JSON indexes for the logs table.
|
||||
ListLogsJSONIndexes(ctx context.Context, filters ...string) (map[string][]schemamigrator.Index, error)
|
||||
|
||||
// ListPromotedPaths lists the promoted paths.
|
||||
ListPromotedPaths(ctx context.Context, paths ...string) (map[string]struct{}, error)
|
||||
|
||||
// PromotePaths promotes the paths.
|
||||
PromotePaths(ctx context.Context, paths ...string) error
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"strings"
|
||||
|
||||
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
@@ -12,23 +11,19 @@ import (
|
||||
// MockMetadataStore implements the MetadataStore interface for testing purposes
|
||||
type MockMetadataStore struct {
|
||||
// Maps to store test data
|
||||
KeysMap map[string][]*telemetrytypes.TelemetryFieldKey
|
||||
RelatedValuesMap map[string][]string
|
||||
AllValuesMap map[string]*telemetrytypes.TelemetryFieldValues
|
||||
TemporalityMap map[string]metrictypes.Temporality
|
||||
PromotedPathsMap map[string]struct{}
|
||||
LogsJSONIndexesMap map[string][]schemamigrator.Index
|
||||
KeysMap map[string][]*telemetrytypes.TelemetryFieldKey
|
||||
RelatedValuesMap map[string][]string
|
||||
AllValuesMap map[string]*telemetrytypes.TelemetryFieldValues
|
||||
TemporalityMap map[string]metrictypes.Temporality
|
||||
}
|
||||
|
||||
// NewMockMetadataStore creates a new instance of MockMetadataStore with initialized maps
|
||||
func NewMockMetadataStore() *MockMetadataStore {
|
||||
return &MockMetadataStore{
|
||||
KeysMap: make(map[string][]*telemetrytypes.TelemetryFieldKey),
|
||||
RelatedValuesMap: make(map[string][]string),
|
||||
AllValuesMap: make(map[string]*telemetrytypes.TelemetryFieldValues),
|
||||
TemporalityMap: make(map[string]metrictypes.Temporality),
|
||||
PromotedPathsMap: make(map[string]struct{}),
|
||||
LogsJSONIndexesMap: make(map[string][]schemamigrator.Index),
|
||||
KeysMap: make(map[string][]*telemetrytypes.TelemetryFieldKey),
|
||||
RelatedValuesMap: make(map[string][]string),
|
||||
AllValuesMap: make(map[string]*telemetrytypes.TelemetryFieldValues),
|
||||
TemporalityMap: make(map[string]metrictypes.Temporality),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -289,21 +284,3 @@ func (m *MockMetadataStore) FetchTemporalityMulti(ctx context.Context, metricNam
|
||||
func (m *MockMetadataStore) SetTemporality(metricName string, temporality metrictypes.Temporality) {
|
||||
m.TemporalityMap[metricName] = temporality
|
||||
}
|
||||
|
||||
// PromotePaths promotes the paths.
|
||||
func (m *MockMetadataStore) PromotePaths(ctx context.Context, paths ...string) error {
|
||||
for _, path := range paths {
|
||||
m.PromotedPathsMap[path] = struct{}{}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ListPromotedPaths lists the promoted paths.
|
||||
func (m *MockMetadataStore) ListPromotedPaths(ctx context.Context, paths ...string) (map[string]struct{}, error) {
|
||||
return m.PromotedPathsMap, nil
|
||||
}
|
||||
|
||||
// ListLogsJSONIndexes lists the JSON indexes for the logs table.
|
||||
func (m *MockMetadataStore) ListLogsJSONIndexes(ctx context.Context, filters ...string) (map[string][]schemamigrator.Index, error) {
|
||||
return m.LogsJSONIndexesMap, nil
|
||||
}
|
||||
|
||||
@@ -101,145 +101,3 @@ def test_create_and_get_public_dashboard(
|
||||
)
|
||||
tuple_row = tuple_result.fetchone()
|
||||
assert tuple_row is not None
|
||||
|
||||
|
||||
def test_public_dashboard_widget_query_range(
|
||||
signoz: SigNoz,
|
||||
create_user_admin: Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
dashboard_req = {
|
||||
"title": "Test Widget Query Range Dashboard",
|
||||
"description": "For testing widget query range",
|
||||
"version": "v5",
|
||||
"widgets": [
|
||||
{
|
||||
"id": "6990c9d8-57ad-492f-8c63-039081e30d02",
|
||||
"panelTypes": "graph",
|
||||
"query": {
|
||||
"builder": {
|
||||
"queryData": [
|
||||
{
|
||||
"aggregations": [
|
||||
{
|
||||
"metricName": "container.cpu.time",
|
||||
"reduceTo": "avg",
|
||||
"spaceAggregation": "sum",
|
||||
"temporality": "",
|
||||
"timeAggregation": "rate",
|
||||
}
|
||||
],
|
||||
"dataSource": "metrics",
|
||||
"disabled": False,
|
||||
"expression": "A",
|
||||
"filter": {
|
||||
"expression": ""
|
||||
},
|
||||
"functions": [],
|
||||
"groupBy": [],
|
||||
"having": {
|
||||
"expression": ""
|
||||
},
|
||||
"legend": "",
|
||||
"limit": 10,
|
||||
"orderBy": [],
|
||||
"queryName": "A",
|
||||
"source": "",
|
||||
"stepInterval": 10
|
||||
}
|
||||
],
|
||||
"queryFormulas": [],
|
||||
"queryTraceOperator": []
|
||||
},
|
||||
"clickhouse_sql": [
|
||||
{
|
||||
"disabled": False,
|
||||
"legend": "",
|
||||
"name": "A",
|
||||
"query": ""
|
||||
}
|
||||
],
|
||||
"id": "80f12506-ef72-4013-8282-2713c8114c9e",
|
||||
"promql": [
|
||||
{
|
||||
"disabled": False,
|
||||
"legend": "",
|
||||
"name": "A",
|
||||
"query": ""
|
||||
}
|
||||
],
|
||||
"queryType": "builder"
|
||||
},
|
||||
}
|
||||
],
|
||||
}
|
||||
create_response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/dashboards"),
|
||||
json=dashboard_req,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
assert create_response.status_code == HTTPStatus.CREATED
|
||||
data = create_response.json()["data"]
|
||||
dashboard_id = data["id"]
|
||||
|
||||
|
||||
# create public dashboard
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v1/dashboards/{dashboard_id}/public"
|
||||
),
|
||||
json={
|
||||
"timeRangeEnabled": False,
|
||||
"defaultTimeRange": "10s",
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.CREATED
|
||||
assert "id" in response.json()["data"]
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v1/dashboards/{dashboard_id}/public"
|
||||
),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["status"] == "success"
|
||||
public_path = response.json()["data"]["publicPath"]
|
||||
public_dashboard_id = public_path.split("/public/dashboard/")[-1]
|
||||
|
||||
|
||||
|
||||
resp = requests.get(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v1/public/dashboards/{public_dashboard_id}/widgets/0/query_range"
|
||||
),
|
||||
timeout=2,
|
||||
)
|
||||
print(resp.json())
|
||||
assert resp.status_code == HTTPStatus.OK
|
||||
assert resp.json().get("status") == "success"
|
||||
|
||||
|
||||
resp = requests.get(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v1/public/dashboards/{public_dashboard_id}/widgets/-1/query_range"
|
||||
),
|
||||
timeout=2,
|
||||
)
|
||||
assert resp.status_code == HTTPStatus.BAD_REQUEST
|
||||
|
||||
resp = requests.get(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v1/public/dashboards/{public_dashboard_id}/widgets/1/query_range"
|
||||
),
|
||||
timeout=2,
|
||||
)
|
||||
assert resp.status_code == HTTPStatus.BAD_REQUEST
|
||||
|
||||
Reference in New Issue
Block a user