Compare commits

...

20 Commits

Author SHA1 Message Date
SagarRajput-7
5b65be0567 feat: downgraded version to v3.4 as v4 doesn't recommend sass 2025-03-29 17:44:27 +05:30
SagarRajput-7
d5aae260df feat: tailwindcss setup in SigNoz, running with signoz tokens and existing scss 2025-03-29 16:19:50 +05:30
Nageshbansal
e637487984 Fix the hyperlink for otel-demo-docs in contributing guide (#7462)
Co-authored-by: CheetoDa <31571545+Calm-Rock@users.noreply.github.com>
Co-authored-by: Vibhu Pandey <vibhupandey28@gmail.com>
2025-03-28 14:58:11 +00:00
Nityananda Gohain
8fc43a00f8 fix: collector connection to opamp without orgID (#7474) 2025-03-28 20:19:37 +05:30
Srikanth Chekuri
031d62ca44 Revert "fix: added default value to time,space aggregation to fix query_range…" (#7464)
This reverts commit 8c4c357351.
2025-03-28 12:43:31 +05:30
SagarRajput-7
8c4c357351 fix: added default value to time,space aggregation to fix query_range getting 500 for metric (#7414)
* fix: added default value to time,space aggregation to fix query_range getting 500 for metric

* fix: added all available operators as default when no attribute type is present

* fix: changed operator, time and space values to avg when empty attribute type
2025-03-28 06:36:09 +00:00
SagarRajput-7
d8d8191a32 feat: allow width customisation and persist it across users and view (#7273)
* feat: removed ellipsis prop

* feat: prevent unnecessary save calls

* feat: fix dashboard detail resize icon

* feat: adjusted resizable header - set minConstraint

* feat: fixed dashboard vanishing issue

* feat: removed dependency causing maximum callstack warning

* feat: corrected the list edit view render issue and resize handler fix

* feat: style fix

* feat: removed comments

* fix: updated test cases

* feat: updated the test cases

---------

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-03-28 06:06:40 +00:00
SagarRajput-7
a876c0a744 chore: added doc title to messaging queues (#7460) 2025-03-28 11:25:41 +05:30
Vishal Sharma
c36f913a90 fix: telemetry version function call (#7453) 2025-03-27 20:07:09 +05:30
SagarRajput-7
ed597f00c0 fix: fixed copy to clipboard popup getting flooded on every click (#7448) 2025-03-27 05:54:34 +00:00
Vibhu Pandey
4957d3ae93 feat(sqlstore): move postgres to enterprise codebase (#7445) 2025-03-27 11:16:43 +05:30
Srikanth Chekuri
8835e3493d chore: skip logfield/spanfield type in the suggestions (#7433) 2025-03-27 10:36:27 +05:30
Vibhu Pandey
027a1631ef feat(httpclient): add an extensible http client (#7446) 2025-03-26 19:33:52 +00:00
Shaheer Kochai
d7a6607a25 fix: use search v2 component for traces data source & minor improvements to search v2 component (#7404) 2025-03-26 18:00:54 +00:00
Sahil Khan
7a58bc58c9 fix: stage and run query button same url navigation enabled (#7415) 2025-03-26 23:25:01 +05:30
Srikanth Chekuri
88be23c3e3 chore: pass through substitutions for CH query (#7389) 2025-03-26 12:58:55 +00:00
Srikanth Chekuri
8f095dfbc9 fix: handle expected value less than zero (#7410) 2025-03-26 12:50:46 +00:00
aniketio-ctrl
72207691a3 fix(metrics-explorer): added time filter in inner sub queries of list and samples (#7436) 2025-03-26 09:57:21 +00:00
Raj Kamal Singh
8998ca652e chore: aws integration: bump recommended agent version (#7434) 2025-03-26 09:14:05 +00:00
Piyush Singariya
f4ae5f19ff feat: AWS Managed Streaming Kafka service integration (#7350)
* feat: msk integration

* feat: logs not available in msk

* fix: minor suggestions made by ellipsis

* fix: changes based on review, added Variables, Units, Legends, SVG

* fix: update in global variables, and query operators

* fix: update in rx tx panel, region variable query update

---------

Co-authored-by: Raj Kamal Singh <1133322+raj-k-singh@users.noreply.github.com>
2025-03-26 12:57:39 +05:30
68 changed files with 4207 additions and 135 deletions

1
.gitignore vendored
View File

@@ -54,6 +54,7 @@ ee/query-service/tests/test-deploy/data/
bin/
.local/
*/query-service/queries.active
ee/query-service/db
# e2e

View File

@@ -77,4 +77,4 @@ Need assistance? Join our Slack community:
## Where do I go from here?
- Set up your [development environment](docs/contributing/development.md)
- Deploy and observe [SigNoz in action with OpenTelemetry Demo Application](docs/otel-demo/otel-demo-docs.md)
- Deploy and observe [SigNoz in action with OpenTelemetry Demo Application](docs/otel-demo-docs.md)

View File

@@ -313,6 +313,9 @@ func (p *BaseSeasonalProvider) getScore(
series, prevSeries, weekSeries, weekPrevSeries, past2SeasonSeries, past3SeasonSeries *v3.Series, value float64, idx int,
) float64 {
expectedValue := p.getExpectedValue(series, prevSeries, weekSeries, weekPrevSeries, past2SeasonSeries, past3SeasonSeries, idx)
if expectedValue < 0 {
expectedValue = p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
}
return (value - expectedValue) / p.getStdDev(weekSeries)
}

View File

@@ -7,12 +7,14 @@ import (
"time"
"github.com/SigNoz/signoz/ee/query-service/app"
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
"github.com/SigNoz/signoz/pkg/config"
"github.com/SigNoz/signoz/pkg/config/envprovider"
"github.com/SigNoz/signoz/pkg/config/fileprovider"
"github.com/SigNoz/signoz/pkg/query-service/auth"
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/version"
@@ -94,12 +96,17 @@ func main() {
version.Info.PrettyPrint(config.Version)
sqlStoreFactories := signoz.NewSQLStoreProviderFactories()
if err := sqlStoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory())); err != nil {
zap.L().Fatal("Failed to add postgressqlstore factory", zap.Error(err))
}
signoz, err := signoz.New(
context.Background(),
config,
signoz.NewCacheProviderFactories(),
signoz.NewWebProviderFactories(),
signoz.NewSQLStoreProviderFactories(),
sqlStoreFactories,
signoz.NewTelemetryStoreProviderFactories(),
)
if err != nil {

View File

@@ -17,6 +17,7 @@ module.exports = {
'plugin:import/errors',
'plugin:import/warnings',
'plugin:react/jsx-runtime',
// 'plugin:tailwindcss/recommended',
],
parser: '@typescript-eslint/parser',
parserOptions: {

View File

@@ -224,6 +224,7 @@
"npm-run-all": "latest",
"portfinder-sync": "^0.0.2",
"postcss": "8.4.38",
"postcss-loader": "8.1.1",
"prettier": "2.2.1",
"prop-types": "15.8.1",
"raw-loader": "4.0.2",
@@ -233,6 +234,7 @@
"redux-mock-store": "1.5.4",
"sass": "1.66.1",
"sass-loader": "13.3.2",
"tailwindcss": "3.4.17",
"ts-jest": "^27.1.5",
"ts-node": "^10.2.1",
"typescript-plugin-css-modules": "5.0.1",

View File

@@ -0,0 +1,6 @@
module.exports = {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
};

View File

@@ -60,7 +60,10 @@
"INTEGRATIONS": "SigNoz | Integrations",
"ALERT_HISTORY": "SigNoz | Alert Rule History",
"ALERT_OVERVIEW": "SigNoz | Alert Rule Overview",
"MESSAGING_QUEUES": "SigNoz | Messaging Queues",
"MESSAGING_QUEUES_OVERVIEW": "SigNoz | Messaging Queues",
"MESSAGING_QUEUES_KAFKA": "SigNoz | Messaging Queues | Kafka",
"MESSAGING_QUEUES_KAFKA_DETAIL": "SigNoz | Messaging Queues | Kafka",
"MESSAGING_QUEUES_CELERY_TASK": "SigNoz | Messaging Queues | Celery",
"INFRASTRUCTURE_MONITORING_HOSTS": "SigNoz | Infra Monitoring",
"INFRASTRUCTURE_MONITORING_KUBERNETES": "SigNoz | Infra Monitoring",
"METRICS_EXPLORER": "SigNoz | Metrics Explorer",

View File

@@ -521,7 +521,7 @@ export default function CeleryOverviewTable({
locale={{
emptyText: isLoading ? null : <Typography.Text>No data</Typography.Text>,
}}
scroll={{ x: true }}
scroll={{ x: 'max-content' }}
showSorterTooltip
onDragColumn={handleDragColumn}
onRow={(record): { onClick: () => void; className: string } => ({

View File

@@ -18,6 +18,7 @@ function CopyClipboardHOC({
notifications.success({
message: notificationMessage,
key: notificationMessage,
});
}
}, [value, notifications, entityKey]);

View File

@@ -1,3 +1,5 @@
import './ResizeTable.styles.scss';
import { SyntheticEvent, useMemo } from 'react';
import { Resizable, ResizeCallbackData } from 'react-resizable';
@@ -10,8 +12,8 @@ function ResizableHeader(props: ResizableHeaderProps): JSX.Element {
const handle = useMemo(
() => (
<SpanStyle
className="react-resizable-handle"
onClick={(e): void => e.stopPropagation()}
className="resize-handle"
/>
),
[],
@@ -19,7 +21,7 @@ function ResizableHeader(props: ResizableHeaderProps): JSX.Element {
if (!width) {
// eslint-disable-next-line react/jsx-props-no-spreading
return <th {...restProps} />;
return <th {...restProps} className="resizable-header" />;
}
return (
@@ -29,9 +31,10 @@ function ResizableHeader(props: ResizableHeaderProps): JSX.Element {
handle={handle}
onResize={onResize}
draggableOpts={enableUserSelectHack}
minConstraints={[150, 0]}
>
{/* eslint-disable-next-line react/jsx-props-no-spreading */}
<th {...restProps} />
<th {...restProps} className="resizable-header" />
</Resizable>
);
}

View File

@@ -0,0 +1,53 @@
.resizable-header {
user-select: none;
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
position: relative;
.ant-table-column-title {
white-space: normal;
overflow: hidden;
text-overflow: ellipsis;
}
}
.resize-main-table {
.ant-table-body {
.ant-table-tbody {
.ant-table-row {
.ant-table-cell {
.ant-typography {
white-space: unset;
}
}
}
}
}
}
.logs-table,
.traces-table {
.resize-table {
.resize-handle {
position: absolute;
top: 0;
bottom: 0;
inset-inline-end: -5px;
width: 10px;
cursor: col-resize;
&::after {
content: '';
position: absolute;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
width: 1px;
height: 1.6em;
background-color: var(--bg-slate-200);
transition: background-color 0.2s;
}
}
}
}

View File

@@ -2,35 +2,63 @@
import { Table } from 'antd';
import { ColumnsType } from 'antd/lib/table';
import cx from 'classnames';
import { dragColumnParams } from 'hooks/useDragColumns/configs';
import { set } from 'lodash-es';
import { RowData } from 'lib/query/createTableColumnsFromQuery';
import { debounce, set } from 'lodash-es';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import {
SyntheticEvent,
useCallback,
useEffect,
useMemo,
useRef,
useState,
} from 'react';
import ReactDragListView from 'react-drag-listview';
import { ResizeCallbackData } from 'react-resizable';
import { Widgets } from 'types/api/dashboard/getAll';
import ResizableHeader from './ResizableHeader';
import { DragSpanStyle } from './styles';
import { ResizeTableProps } from './types';
// eslint-disable-next-line sonarjs/cognitive-complexity
function ResizeTable({
columns,
onDragColumn,
pagination,
widgetId,
shouldPersistColumnWidths = false,
...restProps
}: ResizeTableProps): JSX.Element {
const [columnsData, setColumns] = useState<ColumnsType>([]);
const { setColumnWidths, selectedDashboard } = useDashboard();
const columnWidths = shouldPersistColumnWidths
? (selectedDashboard?.data?.widgets?.find(
(widget) => widget.id === widgetId,
) as Widgets)?.columnWidths
: undefined;
const updateAllColumnWidths = useRef(
debounce((widthsConfig: Record<string, number>) => {
if (!widgetId || !shouldPersistColumnWidths) return;
setColumnWidths?.((prev) => ({
...prev,
[widgetId]: widthsConfig,
}));
}, 1000),
).current;
const handleResize = useCallback(
(index: number) => (
_e: SyntheticEvent<Element>,
e: SyntheticEvent<Element>,
{ size }: ResizeCallbackData,
): void => {
e.preventDefault();
e.stopPropagation();
const newColumns = [...columnsData];
newColumns[index] = {
...newColumns[index],
@@ -65,6 +93,7 @@ function ResizeTable({
...restProps,
components: { header: { cell: ResizableHeader } },
columns: mergedColumns,
className: cx('resize-main-table', restProps.className),
};
set(
@@ -78,9 +107,39 @@ function ResizeTable({
useEffect(() => {
if (columns) {
setColumns(columns);
// Apply stored column widths from widget configuration
const columnsWithStoredWidths = columns.map((col) => {
const dataIndex = (col as RowData).dataIndex as string;
if (dataIndex && columnWidths && columnWidths[dataIndex]) {
return {
...col,
width: columnWidths[dataIndex], // Apply stored width
};
}
return col;
});
setColumns(columnsWithStoredWidths);
}
}, [columns]);
}, [columns, columnWidths]);
useEffect(() => {
if (!shouldPersistColumnWidths) return;
// Collect all column widths in a single object
const newColumnWidths: Record<string, number> = {};
mergedColumns.forEach((col) => {
if (col.width && (col as RowData).dataIndex) {
const dataIndex = (col as RowData).dataIndex as string;
newColumnWidths[dataIndex] = col.width as number;
}
});
// Only update if there are actual widths to set
if (Object.keys(newColumnWidths).length > 0) {
updateAllColumnWidths(newColumnWidths);
}
}, [mergedColumns, updateAllColumnWidths, shouldPersistColumnWidths]);
return onDragColumn ? (
<ReactDragListView.DragColumn {...dragColumnParams} onDragEnd={onDragColumn}>

View File

@@ -8,6 +8,8 @@ export const SpanStyle = styled.span`
width: 0.625rem;
height: 100%;
cursor: col-resize;
margin-left: 4px;
margin-right: 4px;
`;
export const DragSpanStyle = styled.span`

View File

@@ -9,6 +9,8 @@ import { TableDataSource } from './contants';
export interface ResizeTableProps extends TableProps<any> {
onDragColumn?: (fromIndex: number, toIndex: number) => void;
widgetId?: string;
shouldPersistColumnWidths?: boolean;
}
export interface DynamicColumnTableProps extends TableProps<any> {
tablesource: typeof TableDataSource[keyof typeof TableDataSource];

View File

@@ -1,3 +1,4 @@
import ROUTES from 'constants/routes';
import AlertChannels from 'container/AllAlertChannels';
import { allAlertChannels } from 'mocks-server/__mockdata__/alerts';
import { act, fireEvent, render, screen, waitFor } from 'tests/test-utils';
@@ -20,6 +21,13 @@ jest.mock('hooks/useNotifications', () => ({
})),
}));
jest.mock('react-router-dom', () => ({
...jest.requireActual('react-router-dom'),
useLocation: (): { pathname: string } => ({
pathname: `${process.env.FRONTEND_API_ENDPOINT}${ROUTES.ALL_CHANNELS}`,
}),
}));
describe('Alert Channels Settings List page', () => {
beforeEach(() => {
render(<AlertChannels />);

View File

@@ -1,3 +1,4 @@
import ROUTES from 'constants/routes';
import AlertChannels from 'container/AllAlertChannels';
import { allAlertChannels } from 'mocks-server/__mockdata__/alerts';
import { fireEvent, render, screen, waitFor } from 'tests/test-utils';
@@ -25,6 +26,13 @@ jest.mock('hooks/useComponentPermission', () => ({
default: jest.fn().mockImplementation(() => [false]),
}));
jest.mock('react-router-dom', () => ({
...jest.requireActual('react-router-dom'),
useLocation: (): { pathname: string } => ({
pathname: `${process.env.FRONTEND_API_ENDPOINT}${ROUTES.ALL_CHANNELS}`,
}),
}));
describe('Alert Channels Settings List page (Normal User)', () => {
beforeEach(() => {
render(<AlertChannels />);

View File

@@ -44,7 +44,10 @@ import { EditMenuAction, ViewMenuAction } from './config';
import DashboardEmptyState from './DashboardEmptyState/DashboardEmptyState';
import GridCard from './GridCard';
import { Card, CardContainer, ReactGridLayout } from './styles';
import { removeUndefinedValuesFromLayout } from './utils';
import {
hasColumnWidthsChanged,
removeUndefinedValuesFromLayout,
} from './utils';
import { MenuItemKeys } from './WidgetHeader/contants';
import { WidgetRowHeader } from './WidgetRow';
@@ -68,6 +71,7 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
setDashboardQueryRangeCalled,
setSelectedRowWidgetId,
isDashboardFetching,
columnWidths,
} = useDashboard();
const { data } = selectedDashboard || {};
const { pathname } = useLocation();
@@ -162,6 +166,7 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
logEventCalledRef.current = true;
}
}, [data]);
const onSaveHandler = (): void => {
if (!selectedDashboard) return;
@@ -171,6 +176,15 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
...selectedDashboard.data,
panelMap: { ...currentPanelMap },
layout: dashboardLayout.filter((e) => e.i !== PANEL_TYPES.EMPTY_WIDGET),
widgets: selectedDashboard?.data?.widgets?.map((widget) => {
if (columnWidths?.[widget.id]) {
return {
...widget,
columnWidths: columnWidths[widget.id],
};
}
return widget;
}),
},
uuid: selectedDashboard.uuid,
};
@@ -227,20 +241,31 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
useEffect(() => {
if (
isDashboardLocked ||
!saveLayoutPermission ||
updateDashboardMutation.isLoading ||
isDashboardFetching
) {
return;
}
const shouldSaveLayout =
dashboardLayout &&
Array.isArray(dashboardLayout) &&
dashboardLayout.length > 0 &&
!isEqual(layouts, dashboardLayout) &&
!isDashboardLocked &&
saveLayoutPermission &&
!updateDashboardMutation.isLoading &&
!isDashboardFetching
) {
!isEqual(layouts, dashboardLayout);
const shouldSaveColumnWidths =
dashboardLayout &&
Array.isArray(dashboardLayout) &&
dashboardLayout.length > 0 &&
hasColumnWidthsChanged(columnWidths, selectedDashboard);
if (shouldSaveLayout || shouldSaveColumnWidths) {
onSaveHandler();
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [dashboardLayout]);
}, [dashboardLayout, columnWidths]);
const onSettingsModalSubmit = (): void => {
const newTitle = form.getFieldValue('title');

View File

@@ -1,5 +1,7 @@
import { FORMULA_REGEXP } from 'constants/regExp';
import { isEmpty, isEqual } from 'lodash-es';
import { Layout } from 'react-grid-layout';
import { Dashboard, Widgets } from 'types/api/dashboard/getAll';
export const removeUndefinedValuesFromLayout = (layout: Layout[]): Layout[] =>
layout.map((obj) =>
@@ -25,3 +27,27 @@ export function extractQueryNamesFromExpression(expression: string): string[] {
// Extract matches and deduplicate
return [...new Set(expression.match(queryNameRegex) || [])];
}
export const hasColumnWidthsChanged = (
columnWidths: Record<string, Record<string, number>>,
selectedDashboard?: Dashboard,
): boolean => {
// If no column widths stored, no changes
if (isEmpty(columnWidths) || !selectedDashboard) return false;
// Check each widget's column widths
return Object.keys(columnWidths).some((widgetId) => {
const dashboardWidget = selectedDashboard?.data?.widgets?.find(
(widget) => widget.id === widgetId,
) as Widgets;
const newWidths = columnWidths[widgetId];
const existingWidths = dashboardWidget?.columnWidths;
// If both are empty/undefined, no change
if (isEmpty(newWidths) || isEmpty(existingWidths)) return false;
// Compare stored column widths with dashboard widget's column widths
return !isEqual(newWidths, existingWidths);
});
};

View File

@@ -43,6 +43,7 @@ function GridTableComponent({
sticky,
openTracesButton,
onOpenTraceBtnClick,
widgetId,
...props
}: GridTableComponentProps): JSX.Element {
const { t } = useTranslation(['valueGraph']);
@@ -229,6 +230,7 @@ function GridTableComponent({
columns={openTracesButton ? columnDataWithOpenTracesButton : newColumnData}
dataSource={dataSource}
sticky={sticky}
widgetId={widgetId}
onRow={
openTracesButton
? (record): React.HTMLAttributes<HTMLElement> => ({

View File

@@ -17,6 +17,7 @@ export type GridTableComponentProps = {
searchTerm?: string;
openTracesButton?: boolean;
onOpenTraceBtnClick?: (record: RowData) => void;
widgetId?: string;
} & Pick<LogsExplorerTableProps, 'data'> &
Omit<TableProps<RowData>, 'columns' | 'dataSource'>;

View File

@@ -1,9 +1,9 @@
import './LogsPanelComponent.styles.scss';
import { Table } from 'antd';
import LogDetail from 'components/LogDetail';
import { VIEW_TYPES } from 'components/LogDetail/constants';
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
import { ResizeTable } from 'components/ResizeTable';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { PANEL_TYPES } from 'constants/queryBuilder';
import Controls from 'container/Controls';
@@ -79,9 +79,14 @@ function LogsPanelComponent({
const { formatTimezoneAdjustedTimestamp } = useTimezone();
const columns = getLogPanelColumnsList(
widget.selectedLogFields,
formatTimezoneAdjustedTimestamp,
const columns = useMemo(
() =>
getLogPanelColumnsList(
widget.selectedLogFields,
formatTimezoneAdjustedTimestamp,
),
// eslint-disable-next-line react-hooks/exhaustive-deps
[widget.selectedLogFields],
);
const dataLength =
@@ -216,16 +221,18 @@ function LogsPanelComponent({
<div className="logs-table">
<div className="resize-table">
<OverlayScrollbar>
<Table
<ResizeTable
pagination={false}
tableLayout="fixed"
scroll={{ x: `calc(50vw - 10px)` }}
scroll={{ x: `max-content` }}
sticky
loading={queryResponse.isFetching}
style={tableStyles}
dataSource={flattenLogData}
columns={columns}
onRow={handleRow}
widgetId={widget.id}
shouldPersistColumnWidths
/>
</OverlayScrollbar>
</div>

View File

@@ -74,6 +74,7 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
setToScrollWidgetId,
selectedRowWidgetId,
setSelectedRowWidgetId,
columnWidths,
} = useDashboard();
const { t } = useTranslation(['dashboard']);
@@ -238,8 +239,10 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
selectedLogFields,
selectedTracesFields,
isLogScale,
columnWidths: columnWidths?.[selectedWidget?.id],
};
});
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [
columnUnits,
currentQuery,
@@ -260,6 +263,7 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element {
combineHistogram,
stackedBarChart,
isLogScale,
columnWidths,
]);
const closeModal = (): void => {

View File

@@ -26,6 +26,7 @@ function TablePanelWrapper({
searchTerm={searchTerm}
openTracesButton={openTracesButton}
onOpenTraceBtnClick={onOpenTraceBtnClick}
widgetId={widget.id}
// eslint-disable-next-line react/jsx-props-no-spreading
{...GRID_TABLE_CONFIG}
/>

View File

@@ -9,6 +9,8 @@ exports[`Table panel wrappper tests table should render fine with the query resp
width: 0.625rem;
height: 100%;
cursor: col-resize;
margin-left: 4px;
margin-right: 4px;
}
.c0 {
@@ -54,7 +56,7 @@ exports[`Table panel wrappper tests table should render fine with the query resp
class="query-table"
>
<div
class="ant-table-wrapper css-dev-only-do-not-override-2i2tap"
class="ant-table-wrapper resize-main-table css-dev-only-do-not-override-2i2tap"
>
<div
class="ant-spin-nested-loading css-dev-only-do-not-override-2i2tap"
@@ -82,7 +84,7 @@ exports[`Table panel wrappper tests table should render fine with the query resp
<tr>
<th
aria-label="service_name"
class="ant-table-cell ant-table-column-has-sorters react-resizable"
class="resizable-header react-resizable"
scope="col"
tabindex="0"
>
@@ -143,12 +145,12 @@ exports[`Table panel wrappper tests table should render fine with the query resp
</span>
</div>
<span
class="c1 react-resizable-handle"
class="c1 resize-handle"
/>
</th>
<th
aria-label="latency-per-service"
class="ant-table-cell ant-table-column-has-sorters react-resizable"
class="resizable-header react-resizable"
scope="col"
tabindex="0"
>
@@ -209,7 +211,7 @@ exports[`Table panel wrappper tests table should render fine with the query resp
</span>
</div>
<span
class="c1 react-resizable-handle"
class="c1 resize-handle"
/>
</th>
</tr>
@@ -221,7 +223,7 @@ exports[`Table panel wrappper tests table should render fine with the query resp
style="overflow-x: auto; overflow-y: hidden;"
>
<table
style="width: auto; min-width: 100%; table-layout: fixed;"
style="min-width: 100%; table-layout: fixed;"
>
<colgroup>
<col

View File

@@ -453,7 +453,7 @@ export const Query = memo(function Query({
</Col>
)}
<Col flex="1" className="qb-search-container">
{query.dataSource === DataSource.LOGS ? (
{[DataSource.LOGS, DataSource.TRACES].includes(query.dataSource) ? (
<QueryBuilderSearchV2
query={query}
onChange={handleChangeTagFilters}

View File

@@ -2,6 +2,7 @@
import './QueryBuilderSearchV2.styles.scss';
import { Typography } from 'antd';
import cx from 'classnames';
import {
ArrowDown,
ArrowUp,
@@ -25,6 +26,7 @@ interface ICustomDropdownProps {
exampleQueries: TagFilter[];
onChange: (value: TagFilter) => void;
currentFilterItem?: ITag;
isLogsDataSource: boolean;
}
export default function QueryBuilderSearchDropdown(
@@ -38,11 +40,14 @@ export default function QueryBuilderSearchDropdown(
exampleQueries,
options,
onChange,
isLogsDataSource,
} = props;
const userOs = getUserOperatingSystem();
return (
<>
<div className="content">
<div
className={cx('content', { 'non-logs-data-source': !isLogsDataSource })}
>
{!currentFilterItem?.key ? (
<div className="suggested-filters">Suggested Filters</div>
) : !currentFilterItem?.op ? (

View File

@@ -11,6 +11,11 @@
.rc-virtual-list-holder {
height: 115px;
}
&.non-logs-data-source {
.rc-virtual-list-holder {
height: 256px;
}
}
}
}

View File

@@ -689,12 +689,29 @@ function QueryBuilderSearchV2(
})),
);
} else {
setDropdownOptions(
data?.payload?.attributeKeys?.map((key) => ({
setDropdownOptions([
// Add user typed option if it doesn't exist in the payload
...(!isEmpty(tagKey) &&
!data?.payload?.attributeKeys?.some((val) => isEqual(val.key, tagKey))
? [
{
label: tagKey,
value: {
key: tagKey,
dataType: DataTypes.EMPTY,
type: '',
isColumn: false,
isJSON: false,
},
},
]
: []),
// Map existing attribute keys from payload
...(data?.payload?.attributeKeys?.map((key) => ({
label: key.key,
value: key,
})) || [],
);
})) || []),
]);
}
}
if (currentState === DropdownState.OPERATOR) {
@@ -964,6 +981,7 @@ function QueryBuilderSearchV2(
exampleQueries={suggestionsData?.payload?.example_queries || []}
tags={tags}
currentFilterItem={currentFilterItem}
isLogsDataSource={isLogsDataSource}
/>
)}
>

View File

@@ -20,4 +20,5 @@ export type QueryTableProps = Omit<
dataSource?: RowData[];
sticky?: TableProps<RowData>['sticky'];
searchTerm?: string;
widgetId?: string;
};

View File

@@ -24,6 +24,7 @@ export function QueryTable({
dataSource,
sticky,
searchTerm,
widgetId,
...props
}: QueryTableProps): JSX.Element {
const { isDownloadEnabled = false, fileName = '' } = downloadOption || {};
@@ -95,8 +96,10 @@ export function QueryTable({
columns={tableColumns}
tableLayout="fixed"
dataSource={filterTable === null ? newDataSource : filterTable}
scroll={{ x: true }}
scroll={{ x: 'max-content' }}
pagination={paginationConfig}
widgetId={widgetId}
shouldPersistColumnWidths
sticky={sticky}
// eslint-disable-next-line react/jsx-props-no-spreading
{...props}

View File

@@ -1,7 +1,7 @@
import './TracesTableComponent.styles.scss';
import { Table } from 'antd';
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
import { ResizeTable } from 'components/ResizeTable';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import Controls from 'container/Controls';
import { PER_PAGE_OPTIONS } from 'container/TracesExplorer/ListView/configs';
@@ -54,9 +54,14 @@ function TracesTableComponent({
const { formatTimezoneAdjustedTimestamp } = useTimezone();
const columns = getListColumns(
widget.selectedTracesFields || [],
formatTimezoneAdjustedTimestamp,
const columns = useMemo(
() =>
getListColumns(
widget.selectedTracesFields || [],
formatTimezoneAdjustedTimestamp,
),
// eslint-disable-next-line react-hooks/exhaustive-deps
[widget.selectedTracesFields],
);
const dataLength =
@@ -116,16 +121,18 @@ function TracesTableComponent({
<div className="traces-table">
<div className="resize-table">
<OverlayScrollbar>
<Table
<ResizeTable
pagination={false}
tableLayout="fixed"
scroll={{ x: true }}
scroll={{ x: 'max-content' }}
loading={queryResponse.isFetching}
style={tableStyles}
dataSource={transformedQueryTableData}
columns={columns}
onRow={handleRow}
sticky
widgetId={widget.id}
shouldPersistColumnWidths
/>
</OverlayScrollbar>
</div>

View File

@@ -170,11 +170,7 @@ export const useOptions = (
(option, index, self) =>
index ===
self.findIndex(
(o) =>
// to remove duplicate & empty options from list
o.label === option.label &&
o.value === option.value &&
o.dataType?.toLowerCase() === option.dataType?.toLowerCase(), // handle case sensitivity
(o) => o.label === option.label && o.value === option.value, // to remove duplicate & empty options from list
) && option.value !== '',
) || []
).map((option) => {

View File

@@ -40,7 +40,11 @@ import { Dashboard, IDashboardVariable } from 'types/api/dashboard/getAll';
import { GlobalReducer } from 'types/reducer/globalTime';
import { v4 as generateUUID } from 'uuid';
import { DashboardSortOrder, IDashboardContext } from './types';
import {
DashboardSortOrder,
IDashboardContext,
WidgetColumnWidths,
} from './types';
import { sortLayout } from './util';
const DashboardContext = createContext<IDashboardContext>({
@@ -74,6 +78,8 @@ const DashboardContext = createContext<IDashboardContext>({
selectedRowWidgetId: '',
setSelectedRowWidgetId: () => {},
isDashboardFetching: false,
columnWidths: {},
setColumnWidths: () => {},
});
interface Props {
@@ -408,6 +414,8 @@ export function DashboardProvider({
}
};
const [columnWidths, setColumnWidths] = useState<WidgetColumnWidths>({});
const value: IDashboardContext = useMemo(
() => ({
toScrollWidgetId,
@@ -435,6 +443,8 @@ export function DashboardProvider({
selectedRowWidgetId,
setSelectedRowWidgetId,
isDashboardFetching,
columnWidths,
setColumnWidths,
}),
// eslint-disable-next-line react-hooks/exhaustive-deps
[
@@ -457,6 +467,8 @@ export function DashboardProvider({
selectedRowWidgetId,
setSelectedRowWidgetId,
isDashboardFetching,
columnWidths,
setColumnWidths,
],
);

View File

@@ -10,6 +10,10 @@ export interface DashboardSortOrder {
search: string;
}
export type WidgetColumnWidths = {
[widgetId: string]: Record<string, number>;
};
export interface IDashboardContext {
isDashboardSliderOpen: boolean;
isDashboardLocked: boolean;
@@ -48,4 +52,6 @@ export interface IDashboardContext {
selectedRowWidgetId: string | null;
setSelectedRowWidgetId: React.Dispatch<React.SetStateAction<string | null>>;
isDashboardFetching: boolean;
columnWidths: WidgetColumnWidths;
setColumnWidths: React.Dispatch<React.SetStateAction<WidgetColumnWidths>>;
}

View File

@@ -764,10 +764,7 @@ export function QueryBuilderProvider({
);
const { safeNavigate } = useSafeNavigate({
preventSameUrlNavigation: !(
initialDataSource === DataSource.LOGS ||
initialDataSource === DataSource.TRACES
),
preventSameUrlNavigation: false,
});
const redirectWithQueryBuilderData = useCallback(

View File

@@ -2,6 +2,9 @@
@import 'overlayscrollbars/overlayscrollbars.css';
@import './periscope.scss';
@tailwind base;
@tailwind components;
@tailwind utilities;
#root,
html,

View File

@@ -109,6 +109,7 @@ export interface IBaseWidget {
selectedLogFields: IField[] | null;
selectedTracesFields: BaseAutocompleteData[] | null;
isLogScale?: boolean;
columnWidths?: Record<string, number>;
}
export interface Widgets extends IBaseWidget {
query: Query;

View File

@@ -0,0 +1,31 @@
/* eslint-disable @typescript-eslint/explicit-function-return-type */
/* eslint-disable @typescript-eslint/no-var-requires */
const {
ColorTailwind,
spacingTokens,
typographyTokens,
} = require('@signozhq/design-tokens');
// Helper function to extract token values
const mapTokenValues = (tokens) =>
Object.fromEntries(
Object.entries(tokens).map(([key, token]) => [key, token.value]),
);
module.exports = {
content: ['./src/**/*.{js,jsx,ts,tsx}', './public/index.html'],
theme: {
extend: {
colors: {
...ColorTailwind,
},
spacing: {
...mapTokenValues(spacingTokens.padding),
...mapTokenValues(spacingTokens.margin),
},
fontSize: mapTokenValues(typographyTokens.fontSize),
fontWeight: mapTokenValues(typographyTokens.fontWeight),
},
},
plugins: [],
};

View File

@@ -49,6 +49,8 @@
"./webpack.config.prod.js",
"./jest.setup.ts",
"./tests/**.ts",
"./**/*.d.ts"
"./**/*.d.ts",
"tailwind.config.js",
"postcss.config.js"
]
}

View File

@@ -171,6 +171,7 @@ const config = {
styleLoader,
// Translates CSS into CommonJS
cssLoader,
'postcss-loader',
// Compiles Sass to CSS
sassLoader,
],

View File

@@ -139,6 +139,7 @@ const config = {
styleLoader,
// Translates CSS into CommonJS
cssLoader,
'postcss-loader',
// Compiles Sass to CSS
sassLoader,
],

View File

@@ -38,6 +38,11 @@
resolved "https://registry.yarnpkg.com/@adobe/css-tools/-/css-tools-4.3.2.tgz#a6abc715fb6884851fca9dad37fc34739a04fd11"
integrity sha512-DA5a1C0gD/pLOvhv33YMrbf2FK3oUzwNl9oOJqE4XVjuEtt6XIakRcsd7eLiOSPkp1kTRQGICTA8cKra/vFbjw==
"@alloc/quick-lru@^5.2.0":
version "5.2.0"
resolved "https://registry.yarnpkg.com/@alloc/quick-lru/-/quick-lru-5.2.0.tgz#7bf68b20c0a350f936915fcae06f58e32007ce30"
integrity sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==
"@ampproject/remapping@^2.2.0":
version "2.2.1"
resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.1.tgz#99e8e11851128b8702cd57c33684f1d0f260b630"
@@ -2682,6 +2687,18 @@
resolved "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz"
integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==
"@isaacs/cliui@^8.0.2":
version "8.0.2"
resolved "https://registry.yarnpkg.com/@isaacs/cliui/-/cliui-8.0.2.tgz#b37667b7bc181c168782259bab42474fbf52b550"
integrity sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==
dependencies:
string-width "^5.1.2"
string-width-cjs "npm:string-width@^4.2.0"
strip-ansi "^7.0.1"
strip-ansi-cjs "npm:strip-ansi@^6.0.1"
wrap-ansi "^8.1.0"
wrap-ansi-cjs "npm:wrap-ansi@^7.0.0"
"@istanbuljs/load-nyc-config@^1.0.0":
version "1.1.0"
resolved "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz"
@@ -3166,6 +3183,11 @@
resolved "https://registry.npmjs.org/@petamoriken/float16/-/float16-3.8.0.tgz"
integrity sha512-AhVAm6SQ+zgxIiOzwVdUcDmKlu/qU39FiYD2UD6kQQaVenrn0dGZewIghWAENGQsvC+1avLCuT+T2/3Gsp/W3w==
"@pkgjs/parseargs@^0.11.0":
version "0.11.0"
resolved "https://registry.yarnpkg.com/@pkgjs/parseargs/-/parseargs-0.11.0.tgz#a77ea742fab25775145434eb1d2328cf5013ac33"
integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==
"@playwright/test@^1.22.0":
version "1.33.0"
resolved "https://registry.npmjs.org/@playwright/test/-/test-1.33.0.tgz"
@@ -5241,7 +5263,7 @@ ansi-styles@^5.0.0:
resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz"
integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==
ansi-styles@^6.0.0:
ansi-styles@^6.0.0, ansi-styles@^6.1.0:
version "6.2.1"
resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz"
integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==
@@ -5316,6 +5338,11 @@ antd@5.11.0:
scroll-into-view-if-needed "^3.1.0"
throttle-debounce "^5.0.0"
any-promise@^1.0.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f"
integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==
anymatch@^3.0.3, anymatch@~3.1.2:
version "3.1.3"
resolved "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz"
@@ -5341,6 +5368,11 @@ arg@^4.1.0:
resolved "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz"
integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==
arg@^5.0.2:
version "5.0.2"
resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c"
integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==
argparse@^1.0.7:
version "1.0.10"
resolved "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz"
@@ -5348,6 +5380,11 @@ argparse@^1.0.7:
dependencies:
sprintf-js "~1.0.2"
argparse@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38"
integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==
aria-query@^5.0.0, aria-query@^5.1.3:
version "5.1.3"
resolved "https://registry.npmjs.org/aria-query/-/aria-query-5.1.3.tgz"
@@ -6165,7 +6202,7 @@ browser-process-hrtime@^1.0.0:
resolved "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz"
integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==
browserslist@^4.0.0, browserslist@^4.21.10, browserslist@^4.21.3, browserslist@^4.21.4, browserslist@^4.21.5, browserslist@^4.21.9, browserslist@^4.22.2, browserslist@^4.23.0:
browserslist@^4.0.0, browserslist@^4.21.10, browserslist@^4.21.3, browserslist@^4.21.4, browserslist@^4.21.5, browserslist@^4.21.9, browserslist@^4.22.2:
version "4.23.0"
resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.23.0.tgz#8f3acc2bbe73af7213399430890f86c63a5674ab"
integrity sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ==
@@ -6175,6 +6212,16 @@ browserslist@^4.0.0, browserslist@^4.21.10, browserslist@^4.21.3, browserslist@^
node-releases "^2.0.14"
update-browserslist-db "^1.0.13"
browserslist@^4.23.0:
version "4.24.4"
resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.24.4.tgz#c6b2865a3f08bcb860a0e827389003b9fe686e4b"
integrity sha512-KDi1Ny1gSePi1vm0q4oxSF8b4DR44GF4BbmS2YdhPLOEqd8pDviZOGH/GsmRwoWJ2+5Lr085X7naowMwKHDG1A==
dependencies:
caniuse-lite "^1.0.30001688"
electron-to-chromium "^1.5.73"
node-releases "^2.0.19"
update-browserslist-db "^1.1.1"
bs-logger@0.x:
version "0.2.6"
resolved "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz"
@@ -6298,6 +6345,11 @@ camel-case@^4.1.2:
pascal-case "^3.1.2"
tslib "^2.0.3"
camelcase-css@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/camelcase-css/-/camelcase-css-2.0.1.tgz#ee978f6947914cc30c6b44741b6ed1df7f043fd5"
integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==
camelcase-keys@^6.2.2:
version "6.2.2"
resolved "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.2.2.tgz"
@@ -6314,7 +6366,7 @@ camelcase@^5.3.1:
camelcase@^6.1.0, camelcase@^6.2.0:
version "6.3.0"
resolved "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz"
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a"
integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==
camelize@^1.0.0:
@@ -6337,11 +6389,16 @@ caniuse-lite@^1.0.0:
resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001481.tgz"
integrity sha512-KCqHwRnaa1InZBtqXzP98LPg0ajCVujMKjqKDhZEthIpAsJl/YEIa3YvXjGXPVqzZVguccuu7ga9KOE1J9rKPQ==
caniuse-lite@^1.0.30001587, caniuse-lite@^1.0.30001599:
caniuse-lite@^1.0.30001587:
version "1.0.30001621"
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001621.tgz#4adcb443c8b9c8303e04498318f987616b8fea2e"
integrity sha512-+NLXZiviFFKX0fk8Piwv3PfLPGtRqJeq2TiNoUff/qB5KJgwecJTvCXDpmlyP/eCI/GUEmp/h/y5j0yckiiZrA==
caniuse-lite@^1.0.30001599, caniuse-lite@^1.0.30001688:
version "1.0.30001707"
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001707.tgz#c5e104d199e6f4355a898fcd995a066c7eb9bf41"
integrity sha512-3qtRjw/HQSMlDWf+X79N206fepf4SOOU6SQLMaq/0KkZLmSjPxAkBOQQ+FxbHKfHmYLZFfdWsO3KA90ceHPSnw==
canvas-color-tracker@1:
version "1.2.1"
resolved "https://registry.npmjs.org/canvas-color-tracker/-/canvas-color-tracker-1.2.1.tgz"
@@ -6480,6 +6537,21 @@ chartjs-plugin-annotation@^1.4.0:
optionalDependencies:
fsevents "~2.3.2"
chokidar@^3.6.0:
version "3.6.0"
resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.6.0.tgz#197c6cc669ef2a8dc5e7b4d97ee4e092c3eb0d5b"
integrity sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==
dependencies:
anymatch "~3.1.2"
braces "~3.0.2"
glob-parent "~5.1.2"
is-binary-path "~2.1.0"
is-glob "~4.0.1"
normalize-path "~3.0.0"
readdirp "~3.6.0"
optionalDependencies:
fsevents "~2.3.2"
chrome-trace-event@^1.0.2:
version "1.0.3"
resolved "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz"
@@ -6685,6 +6757,11 @@ commander@2, commander@^2.20.0, commander@^2.20.3, commander@^2.8.1:
resolved "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz"
integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==
commander@^4.0.0:
version "4.1.1"
resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068"
integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==
commander@^7.0.0, commander@^7.2.0:
version "7.2.0"
resolved "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz"
@@ -6913,6 +6990,16 @@ cosmiconfig@^7, cosmiconfig@^7.0.0:
path-type "^4.0.0"
yaml "^1.10.0"
cosmiconfig@^9.0.0:
version "9.0.0"
resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-9.0.0.tgz#34c3fc58287b915f3ae905ab6dc3de258b55ad9d"
integrity sha512-itvL5h8RETACmOTFc4UfIyB2RfEHi71Ax6E/PivVxq9NseKbOWpeyHEOIbmAw1rs8Ak0VursQNww7lf7YtUwzg==
dependencies:
env-paths "^2.2.1"
import-fresh "^3.3.0"
js-yaml "^4.1.0"
parse-json "^5.2.0"
create-require@^1.1.0:
version "1.1.1"
resolved "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz"
@@ -6954,7 +7041,7 @@ cross-fetch@3.1.5:
dependencies:
node-fetch "2.6.7"
cross-spawn@7.0.5, cross-spawn@^5.0.1, cross-spawn@^6.0.0, cross-spawn@^6.0.5, cross-spawn@^7.0.0, cross-spawn@^7.0.1, cross-spawn@^7.0.2, cross-spawn@^7.0.3:
cross-spawn@7.0.5, cross-spawn@^5.0.1, cross-spawn@^6.0.0, cross-spawn@^6.0.5, cross-spawn@^7.0.0, cross-spawn@^7.0.1, cross-spawn@^7.0.2, cross-spawn@^7.0.3, cross-spawn@^7.0.6:
version "7.0.5"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.5.tgz#910aac880ff5243da96b728bc6521a5f6c2f2f82"
integrity sha512-ZVJrKKYunU38/76t0RMOulHOnUcbU9GbpWKAOZ0mhjr7CX6FVrH+4FrAapSOekrgFQ3f/8gwMEuIft0aKq6Hug==
@@ -7666,6 +7753,11 @@ devlop@^1.0.0:
dependencies:
dequal "^2.0.0"
didyoumean@^1.2.2:
version "1.2.2"
resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037"
integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==
diff-sequences@^27.5.1:
version "27.5.1"
resolved "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.5.1.tgz"
@@ -7698,6 +7790,11 @@ direction@^2.0.0:
resolved "https://registry.yarnpkg.com/direction/-/direction-2.0.1.tgz#71800dd3c4fa102406502905d3866e65bdebb985"
integrity sha512-9S6m9Sukh1cZNknO1CWAr2QAWsbKLafQiyM5gZ7VgXHeuaoUwffKN4q6NC4A/Mf9iiPlOXQEKW/Mv/mh9/3YFA==
dlv@^1.1.3:
version "1.1.3"
resolved "https://registry.yarnpkg.com/dlv/-/dlv-1.1.3.tgz#5c198a8a11453596e751494d49874bc7732f2e79"
integrity sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==
dnd-core@^16.0.1:
version "16.0.1"
resolved "https://registry.yarnpkg.com/dnd-core/-/dnd-core-16.0.1.tgz#a1c213ed08961f6bd1959a28bb76f1a868360d19"
@@ -7919,6 +8016,11 @@ electron-to-chromium@^1.4.668:
resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.777.tgz#f846fbba23fd11b3c6f97848cdda94896fdb8baf"
integrity sha512-n02NCwLJ3wexLfK/yQeqfywCblZqLcXphzmid5e8yVPdtEcida7li0A5WQKghHNG0FeOMCzeFOzEbtAh5riXFw==
electron-to-chromium@^1.5.73:
version "1.5.128"
resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.5.128.tgz#8ea537b369c32527b3cc47df7973bffe5d3c2980"
integrity sha512-bo1A4HH/NS522Ws0QNFIzyPcyUUNV/yyy70Ho1xqfGYzPUme2F/xr4tlEOuM6/A538U1vDA7a4XfCd1CKRegKQ==
emitter-component@^1.1.1:
version "1.1.1"
resolved "https://registry.npmjs.org/emitter-component/-/emitter-component-1.1.1.tgz"
@@ -7986,6 +8088,11 @@ entities@^4.2.0, entities@^4.4.0:
resolved "https://registry.yarnpkg.com/entities/-/entities-4.5.0.tgz#5d268ea5e7113ec74c4d033b79ea5a35a488fb48"
integrity sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==
env-paths@^2.2.1:
version "2.2.1"
resolved "https://registry.yarnpkg.com/env-paths/-/env-paths-2.2.1.tgz#420399d416ce1fbe9bc0a07c62fa68d67fd0f8f2"
integrity sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==
envinfo@^7.7.3:
version "7.8.1"
resolved "https://registry.npmjs.org/envinfo/-/envinfo-7.8.1.tgz"
@@ -8159,6 +8266,11 @@ escalade@^3.1.1:
resolved "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz"
integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==
escalade@^3.2.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.2.0.tgz#011a3f69856ba189dffa7dc8fcce99d2a87903e5"
integrity sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==
escape-html@~1.0.3:
version "1.0.3"
resolved "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz"
@@ -8740,7 +8852,7 @@ fast-diff@^1.1.2:
resolved "https://registry.npmjs.org/fast-diff/-/fast-diff-1.2.0.tgz"
integrity sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==
fast-glob@^3.0.3:
fast-glob@^3.0.3, fast-glob@^3.3.2:
version "3.3.3"
resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.3.tgz#d06d585ce8dba90a16b0505c543c3ccfb3aeb818"
integrity sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==
@@ -9048,6 +9160,14 @@ force-graph@1:
kapsule "^1.14"
lodash-es "4"
foreground-child@^3.1.0:
version "3.3.1"
resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-3.3.1.tgz#32e8e9ed1b68a3497befb9ac2b6adf92a638576f"
integrity sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==
dependencies:
cross-spawn "^7.0.6"
signal-exit "^4.0.1"
form-data@^3.0.0:
version "3.0.1"
resolved "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz"
@@ -9314,11 +9434,30 @@ glob-parent@^5.1.1, glob-parent@^5.1.2, glob-parent@~5.1.2:
dependencies:
is-glob "^4.0.1"
glob-parent@^6.0.2:
version "6.0.2"
resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3"
integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==
dependencies:
is-glob "^4.0.3"
glob-to-regexp@^0.4.1:
version "0.4.1"
resolved "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz"
integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==
glob@^10.3.10:
version "10.4.5"
resolved "https://registry.yarnpkg.com/glob/-/glob-10.4.5.tgz#f4d9f0b90ffdbab09c9d77f5f29b4262517b0956"
integrity sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==
dependencies:
foreground-child "^3.1.0"
jackspeak "^3.1.2"
minimatch "^9.0.4"
minipass "^7.1.2"
package-json-from-dist "^1.0.0"
path-scurry "^1.11.1"
glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6:
version "7.2.3"
resolved "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz"
@@ -9550,6 +9689,13 @@ hasown@^2.0.0:
dependencies:
function-bind "^1.1.2"
hasown@^2.0.2:
version "2.0.2"
resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003"
integrity sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==
dependencies:
function-bind "^1.1.2"
hast-util-from-parse5@^7.0.0:
version "7.1.2"
resolved "https://registry.yarnpkg.com/hast-util-from-parse5/-/hast-util-from-parse5-7.1.2.tgz#aecfef73e3ceafdfa4550716443e4eb7b02e22b0"
@@ -10169,6 +10315,14 @@ import-fresh@^3.0.0, import-fresh@^3.2.1:
parent-module "^1.0.0"
resolve-from "^4.0.0"
import-fresh@^3.3.0:
version "3.3.1"
resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.1.tgz#9cecb56503c0ada1f2741dbbd6546e4b13b57ccf"
integrity sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==
dependencies:
parent-module "^1.0.0"
resolve-from "^4.0.0"
import-lazy@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/import-lazy/-/import-lazy-3.1.0.tgz#891279202c8a2280fdbd6674dbd8da1a1dfc67cc"
@@ -10416,6 +10570,13 @@ is-core-module@^2.13.0:
dependencies:
has "^1.0.3"
is-core-module@^2.16.0:
version "2.16.1"
resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.16.1.tgz#2a98801a849f43e2add644fbb6bc6229b19a4ef4"
integrity sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==
dependencies:
hasown "^2.0.2"
is-cwebp-readable@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/is-cwebp-readable/-/is-cwebp-readable-3.0.0.tgz#0554aaa400977a2fc4de366d8c0244f13cde58cb"
@@ -10795,6 +10956,15 @@ isurl@^1.0.0-alpha5:
has-to-string-tag-x "^1.2.0"
is-object "^1.0.1"
jackspeak@^3.1.2:
version "3.4.3"
resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-3.4.3.tgz#8833a9d89ab4acde6188942bd1c53b6390ed5a8a"
integrity sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==
dependencies:
"@isaacs/cliui" "^8.0.2"
optionalDependencies:
"@pkgjs/parseargs" "^0.11.0"
jerrypick@^1.1.1:
version "1.1.1"
resolved "https://registry.npmjs.org/jerrypick/-/jerrypick-1.1.1.tgz"
@@ -11310,6 +11480,11 @@ jest@^27.5.1:
import-local "^3.0.2"
jest-cli "^27.5.1"
jiti@^1.20.0, jiti@^1.21.6:
version "1.21.7"
resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.21.7.tgz#9dd81043424a3d28458b193d965f0d18a2300ba9"
integrity sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==
js-base64@^3.7.2:
version "3.7.5"
resolved "https://registry.npmjs.org/js-base64/-/js-base64-3.7.5.tgz"
@@ -11343,6 +11518,13 @@ js-yaml@^3.13.1:
argparse "^1.0.7"
esprima "^4.0.0"
js-yaml@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602"
integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==
dependencies:
argparse "^2.0.1"
jsdom@^16.6.0:
version "16.7.0"
resolved "https://registry.npmjs.org/jsdom/-/jsdom-16.7.0.tgz"
@@ -11624,6 +11806,11 @@ lilconfig@^2.0.5, lilconfig@^2.1.0:
resolved "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz"
integrity sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==
lilconfig@^3.0.0, lilconfig@^3.1.3:
version "3.1.3"
resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-3.1.3.tgz#a1bcfd6257f9585bf5ae14ceeebb7b559025e4c4"
integrity sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==
lines-and-columns@^1.1.6:
version "1.2.4"
resolved "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz"
@@ -11837,6 +12024,11 @@ lowlight@^1.17.0:
fault "^1.0.0"
highlight.js "~10.7.0"
lru-cache@^10.2.0:
version "10.4.3"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.4.3.tgz#410fc8a17b70e598013df257c2446b7f3383f119"
integrity sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==
lru-cache@^5.1.1:
version "5.1.1"
resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz"
@@ -12724,6 +12916,13 @@ minimatch@^8.0.2:
dependencies:
brace-expansion "^2.0.1"
minimatch@^9.0.4:
version "9.0.5"
resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.5.tgz#d74f9dd6b57d83d8e98cfb82133b03978bc929e5"
integrity sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==
dependencies:
brace-expansion "^2.0.1"
minimist-options@4.1.0:
version "4.1.0"
resolved "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz"
@@ -12748,6 +12947,11 @@ minipass@^4.2.4:
resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.0.4.tgz#dbce03740f50a4786ba994c1fb908844d27b038c"
integrity sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ==
minipass@^7.1.2:
version "7.1.2"
resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.1.2.tgz#93a9626ce5e5e66bd4db86849e7515e92340a707"
integrity sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==
mkdirp@^0.5.6:
version "0.5.6"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6"
@@ -12858,6 +13062,15 @@ mute-stream@0.0.8:
resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d"
integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==
mz@^2.7.0:
version "2.7.0"
resolved "https://registry.yarnpkg.com/mz/-/mz-2.7.0.tgz#95008057a56cafadc2bc63dde7f9ff6955948e32"
integrity sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==
dependencies:
any-promise "^1.0.0"
object-assign "^4.0.1"
thenify-all "^1.0.0"
nano-css@^5.3.1:
version "5.3.5"
resolved "https://registry.npmjs.org/nano-css/-/nano-css-5.3.5.tgz"
@@ -12903,6 +13116,11 @@ nanoid@^3.3.7:
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.7.tgz#d0c301a691bc8d54efa0a2226ccf3fe2fd656bd8"
integrity sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==
nanoid@^3.3.8:
version "3.3.11"
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.11.tgz#4f4f112cefbe303202f2199838128936266d185b"
integrity sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==
natural-compare@^1.4.0:
version "1.4.0"
resolved "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz"
@@ -13010,6 +13228,11 @@ node-releases@^2.0.14:
resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.14.tgz#2ffb053bceb8b2be8495ece1ab6ce600c4461b0b"
integrity sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==
node-releases@^2.0.19:
version "2.0.19"
resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.19.tgz#9e445a52950951ec4d177d843af370b411caf314"
integrity sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==
normalize-package-data@^2.3.2, normalize-package-data@^2.5.0:
version "2.5.0"
resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8"
@@ -13113,6 +13336,11 @@ object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1:
resolved "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz"
integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==
object-hash@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/object-hash/-/object-hash-3.0.0.tgz#73f97f753e7baffc0e2cc9d6e079079744ac82e9"
integrity sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==
object-inspect@^1.12.2, object-inspect@^1.12.3, object-inspect@^1.9.0:
version "1.12.3"
resolved "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz"
@@ -13487,6 +13715,11 @@ p-try@^2.0.0:
resolved "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz"
integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==
package-json-from-dist@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz#4f1471a010827a86f94cfd9b0727e36d267de505"
integrity sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==
pako@^2.0.4:
version "2.1.0"
resolved "https://registry.npmjs.org/pako/-/pako-2.1.0.tgz"
@@ -13656,6 +13889,14 @@ path-parse@^1.0.7:
resolved "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz"
integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==
path-scurry@^1.11.1:
version "1.11.1"
resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.11.1.tgz#7960a668888594a0720b12a911d1a742ab9f11d2"
integrity sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==
dependencies:
lru-cache "^10.2.0"
minipass "^5.0.0 || ^6.0.2 || ^7.0.0"
path-scurry@^1.6.1:
version "1.10.1"
resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.10.1.tgz#9ba6bf5aa8500fe9fd67df4f0d9483b2b0bfc698"
@@ -13732,6 +13973,11 @@ picocolors@^1.0.0:
resolved "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz"
integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==
picocolors@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.1.1.tgz#3d321af3eab939b083c8f929a1d12cda81c26b6b"
integrity sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==
picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.3, picomatch@^2.3.0, picomatch@^2.3.1:
version "2.3.1"
resolved "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz"
@@ -13774,6 +14020,11 @@ pinkie@^2.0.0:
resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870"
integrity sha512-MnUuEycAemtSaeFSjXKW/aroV7akBbY+Sv+RkyqFjgAe73F+MR0TBWKBRDkmfWq/HiFmdavfZ1G7h4SPZXaCSg==
pirates@^4.0.1:
version "4.0.7"
resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.7.tgz#643b4a18c4257c8a65104b73f3049ce9a0a15e22"
integrity sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==
pirates@^4.0.4:
version "4.0.5"
resolved "https://registry.npmjs.org/pirates/-/pirates-4.0.5.tgz"
@@ -13877,6 +14128,22 @@ postcss-discard-overridden@^6.0.0:
resolved "https://registry.yarnpkg.com/postcss-discard-overridden/-/postcss-discard-overridden-6.0.0.tgz#49c5262db14e975e349692d9024442de7cd8e234"
integrity sha512-4VELwssYXDFigPYAZ8vL4yX4mUepF/oCBeeIT4OXsJPYOtvJumyz9WflmJWTfDwCUcpDR+z0zvCWBXgTx35SVw==
postcss-import@^15.1.0:
version "15.1.0"
resolved "https://registry.yarnpkg.com/postcss-import/-/postcss-import-15.1.0.tgz#41c64ed8cc0e23735a9698b3249ffdbf704adc70"
integrity sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==
dependencies:
postcss-value-parser "^4.0.0"
read-cache "^1.0.0"
resolve "^1.1.7"
postcss-js@^4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/postcss-js/-/postcss-js-4.0.1.tgz#61598186f3703bab052f1c4f7d805f3991bee9d2"
integrity sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw==
dependencies:
camelcase-css "^2.0.1"
postcss-load-config@^3.1.4:
version "3.1.4"
resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-3.1.4.tgz#1ab2571faf84bb078877e1d07905eabe9ebda855"
@@ -13885,6 +14152,23 @@ postcss-load-config@^3.1.4:
lilconfig "^2.0.5"
yaml "^1.10.2"
postcss-load-config@^4.0.2:
version "4.0.2"
resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-4.0.2.tgz#7159dcf626118d33e299f485d6afe4aff7c4a3e3"
integrity sha512-bSVhyJGL00wMVoPUzAVAnbEoWyqRxkjv64tUl427SKnPrENtq6hJwUojroMz2VB+Q1edmi4IfrAPpami5VVgMQ==
dependencies:
lilconfig "^3.0.0"
yaml "^2.3.4"
postcss-loader@8.1.1:
version "8.1.1"
resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-8.1.1.tgz#2822589e7522927344954acb55bbf26e8b195dfe"
integrity sha512-0IeqyAsG6tYiDRCYKQJLAmgQr47DX6N7sFSWvQxt6AcupX8DIdmykuk/o/tx0Lze3ErGHJEp5OSRxrelC6+NdQ==
dependencies:
cosmiconfig "^9.0.0"
jiti "^1.20.0"
semver "^7.5.4"
postcss-merge-longhand@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/postcss-merge-longhand/-/postcss-merge-longhand-6.0.0.tgz#6f627b27db939bce316eaa97e22400267e798d69"
@@ -13963,6 +14247,13 @@ postcss-modules-values@^4.0.0:
dependencies:
icss-utils "^5.0.0"
postcss-nested@^6.2.0:
version "6.2.0"
resolved "https://registry.yarnpkg.com/postcss-nested/-/postcss-nested-6.2.0.tgz#4c2d22ab5f20b9cb61e2c5c5915950784d068131"
integrity sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ==
dependencies:
postcss-selector-parser "^6.1.1"
postcss-normalize-charset@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/postcss-normalize-charset/-/postcss-normalize-charset-6.0.0.tgz#36cc12457259064969fb96f84df491652a4b0975"
@@ -14064,6 +14355,14 @@ postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector
cssesc "^3.0.0"
util-deprecate "^1.0.2"
postcss-selector-parser@^6.1.1, postcss-selector-parser@^6.1.2:
version "6.1.2"
resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz#27ecb41fb0e3b6ba7a1ec84fff347f734c7929de"
integrity sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==
dependencies:
cssesc "^3.0.0"
util-deprecate "^1.0.2"
postcss-svgo@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-6.0.0.tgz#7b18742d38d4505a0455bbe70d52b49f00eaf69d"
@@ -14079,12 +14378,12 @@ postcss-unique-selectors@^6.0.0:
dependencies:
postcss-selector-parser "^6.0.5"
postcss-value-parser@^4.0.2, postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0:
postcss-value-parser@^4.0.0, postcss-value-parser@^4.0.2, postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0:
version "4.2.0"
resolved "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz"
integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==
postcss@8.4.38, postcss@^8.0.0, postcss@^8.1.1, postcss@^8.3.7, postcss@^8.4.21, postcss@^8.4.24:
postcss@8.4.38, postcss@^8.0.0, postcss@^8.3.7, postcss@^8.4.21, postcss@^8.4.24:
version "8.4.38"
resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.38.tgz#b387d533baf2054288e337066d81c6bee9db9e0e"
integrity sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==
@@ -14093,6 +14392,15 @@ postcss@8.4.38, postcss@^8.0.0, postcss@^8.1.1, postcss@^8.3.7, postcss@^8.4.21,
picocolors "^1.0.0"
source-map-js "^1.2.0"
postcss@^8.1.1, postcss@^8.4.47:
version "8.5.3"
resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.5.3.tgz#1463b6f1c7fb16fe258736cba29a2de35237eafb"
integrity sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==
dependencies:
nanoid "^3.3.8"
picocolors "^1.1.1"
source-map-js "^1.2.1"
posthog-js@1.215.5:
version "1.215.5"
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.215.5.tgz#0512cfdb919da960b809c5f686ca147f9c2922ba"
@@ -15138,6 +15446,13 @@ react@18.2.0:
dependencies:
loose-envify "^1.1.0"
read-cache@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/read-cache/-/read-cache-1.0.0.tgz#e664ef31161166c9751cdbe8dbcf86b5fb58f774"
integrity sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==
dependencies:
pify "^2.3.0"
read-pkg-up@^7.0.1:
version "7.0.1"
resolved "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz"
@@ -15574,6 +15889,15 @@ resolve.exports@^1.1.0:
resolved "https://registry.npmjs.org/resolve.exports/-/resolve.exports-1.1.1.tgz"
integrity sha512-/NtpHNDN7jWhAaQ9BvBUYZ6YTXsRBgfqWFWP7BZBaoMJO/I3G5OFzvTuWNlZC3aPjins1F+TNrLKsGbH4rfsRQ==
resolve@^1.1.7, resolve@^1.22.8:
version "1.22.10"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.10.tgz#b663e83ffb09bbf2386944736baae803029b8b39"
integrity sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==
dependencies:
is-core-module "^2.16.0"
path-parse "^1.0.7"
supports-preserve-symlinks-flag "^1.0.0"
resolve@^1.10.0, resolve@^1.10.1, resolve@^1.12.0, resolve@^1.14.2, resolve@^1.19.0, resolve@^1.20.0, resolve@^1.22.1, resolve@^1.9.0:
version "1.22.2"
resolved "https://registry.npmjs.org/resolve/-/resolve-1.22.2.tgz"
@@ -15750,7 +16074,7 @@ scheduler@^0.23.0:
schema-utils@^2.7.0, schema-utils@^2.7.1:
version "2.7.1"
resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.1.tgz"
resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.7.1.tgz#1ca4f32d1b24c590c203b8e7a50bf0ea4cd394d7"
integrity sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg==
dependencies:
"@types/json-schema" "^7.0.5"
@@ -15819,7 +16143,7 @@ semver-truncate@^1.1.2:
dependencies:
semver "^5.3.0"
"semver@2 || 3 || 4 || 5", semver@7.3.7, semver@7.5.4, semver@7.x, semver@^5.3.0, semver@^5.6.0, semver@^6.0.0, semver@^6.1.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0, semver@^6.3.1, semver@^7.2.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7:
"semver@2 || 3 || 4 || 5", semver@7.3.7, semver@7.5.4, semver@7.x, semver@^5.3.0, semver@^5.6.0, semver@^6.0.0, semver@^6.1.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0, semver@^6.3.1, semver@^7.2.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.5.4:
version "7.5.4"
resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e"
integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==
@@ -15972,6 +16296,11 @@ signal-exit@^3.0.0, signal-exit@^3.0.2, signal-exit@^3.0.3, signal-exit@^3.0.7:
resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9"
integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==
signal-exit@^4.0.1:
version "4.1.0"
resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04"
integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==
simple-concat@^1.0.0:
version "1.0.1"
resolved "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz"
@@ -16101,6 +16430,11 @@ source-map-js@^1.2.0:
resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.0.tgz#16b809c162517b5b8c3e7dcd315a2a5c2612b2af"
integrity sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==
source-map-js@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.1.tgz#1ce5650fddd87abc099eda37dcff024c2667ae46"
integrity sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==
source-map-support@^0.5.6, source-map-support@~0.5.20:
version "0.5.21"
resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f"
@@ -16310,6 +16644,15 @@ string-length@^4.0.1:
char-regex "^1.0.2"
strip-ansi "^6.0.0"
"string-width-cjs@npm:string-width@^4.2.0":
version "4.2.3"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010"
integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==
dependencies:
emoji-regex "^8.0.0"
is-fullwidth-code-point "^3.0.0"
strip-ansi "^6.0.1"
string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3:
version "4.2.3"
resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz"
@@ -16319,7 +16662,7 @@ string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3:
is-fullwidth-code-point "^3.0.0"
strip-ansi "^6.0.1"
string-width@^5.0.0:
string-width@^5.0.0, string-width@^5.0.1, string-width@^5.1.2:
version "5.1.2"
resolved "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz"
integrity sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==
@@ -16400,6 +16743,13 @@ stringify-entities@^4.0.0:
character-entities-html4 "^2.0.0"
character-entities-legacy "^3.0.0"
"strip-ansi-cjs@npm:strip-ansi@^6.0.1":
version "6.0.1"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==
dependencies:
ansi-regex "^5.0.1"
strip-ansi@^6.0.0, strip-ansi@^6.0.1:
version "6.0.1"
resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz"
@@ -16467,7 +16817,7 @@ strnum@^1.1.1:
style-loader@1.3.0:
version "1.3.0"
resolved "https://registry.npmjs.org/style-loader/-/style-loader-1.3.0.tgz"
resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-1.3.0.tgz#828b4a3b3b7e7aa5847ce7bae9e874512114249e"
integrity sha512-V7TCORko8rs9rIqkSrlMfkqA63DfoGBBJmK1kKGCcSi+BWb4cqz0SRsnp4l6rU5iwOEd0/2ePv68SV22VXon4Q==
dependencies:
loader-utils "^2.0.0"
@@ -16530,6 +16880,19 @@ stylus@^0.59.0:
sax "~1.2.4"
source-map "^0.7.3"
sucrase@^3.35.0:
version "3.35.0"
resolved "https://registry.yarnpkg.com/sucrase/-/sucrase-3.35.0.tgz#57f17a3d7e19b36d8995f06679d121be914ae263"
integrity sha512-8EbVDiu9iN/nESwxeSxDKe0dunta1GOlHufmSSXxMD2z2/tMZpDMpvXQGsc+ajGo8y2uYUmixaSRUc/QPoQ0GA==
dependencies:
"@jridgewell/gen-mapping" "^0.3.2"
commander "^4.0.0"
glob "^10.3.10"
lines-and-columns "^1.1.6"
mz "^2.7.0"
pirates "^4.0.1"
ts-interface-checker "^0.1.9"
super-animejs@^3.1.0:
version "3.1.0"
resolved "https://registry.npmjs.org/super-animejs/-/super-animejs-3.1.0.tgz"
@@ -16635,6 +16998,34 @@ table@^6.0.9:
string-width "^4.2.3"
strip-ansi "^6.0.1"
tailwindcss@3.4.17:
version "3.4.17"
resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-3.4.17.tgz#ae8406c0f96696a631c790768ff319d46d5e5a63"
integrity sha512-w33E2aCvSDP0tW9RZuNXadXlkHXqFzSkQew/aIa2i/Sj8fThxwovwlXHSPXTbAHwEIhBFXAedUhP2tueAKP8Og==
dependencies:
"@alloc/quick-lru" "^5.2.0"
arg "^5.0.2"
chokidar "^3.6.0"
didyoumean "^1.2.2"
dlv "^1.1.3"
fast-glob "^3.3.2"
glob-parent "^6.0.2"
is-glob "^4.0.3"
jiti "^1.21.6"
lilconfig "^3.1.3"
micromatch "^4.0.8"
normalize-path "^3.0.0"
object-hash "^3.0.0"
picocolors "^1.1.1"
postcss "^8.4.47"
postcss-import "^15.1.0"
postcss-js "^4.0.1"
postcss-load-config "^4.0.2"
postcss-nested "^6.2.0"
postcss-selector-parser "^6.1.2"
resolve "^1.22.8"
sucrase "^3.35.0"
tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0:
version "2.2.1"
resolved "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz"
@@ -16724,6 +17115,20 @@ text-table@^0.2.0:
resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4"
integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==
thenify-all@^1.0.0:
version "1.6.0"
resolved "https://registry.yarnpkg.com/thenify-all/-/thenify-all-1.6.0.tgz#1a1918d402d8fc3f98fbf234db0bcc8cc10e9726"
integrity sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==
dependencies:
thenify ">= 3.1.0 < 4"
"thenify@>= 3.1.0 < 4":
version "3.3.1"
resolved "https://registry.yarnpkg.com/thenify/-/thenify-3.3.1.tgz#8932e686a4066038a016dd9e2ca46add9838a95f"
integrity sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==
dependencies:
any-promise "^1.0.0"
three-bmfont-text@dmarcos/three-bmfont-text#21d017046216e318362c48abd1a48bddfb6e0733:
version "2.4.0"
resolved "https://codeload.github.com/dmarcos/three-bmfont-text/tar.gz/21d017046216e318362c48abd1a48bddfb6e0733"
@@ -16935,6 +17340,11 @@ ts-easing@^0.2.0:
resolved "https://registry.npmjs.org/ts-easing/-/ts-easing-0.2.0.tgz"
integrity sha512-Z86EW+fFFh/IFB1fqQ3/+7Zpf9t2ebOAxNI/V6Wo7r5gqiqtxmgTlQ1qbqQcjLKYeSHPTsEmvlJUDg/EuL0uHQ==
ts-interface-checker@^0.1.9:
version "0.1.13"
resolved "https://registry.yarnpkg.com/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz#784fd3d679722bc103b1b4b8030bcddb5db2a699"
integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==
ts-jest@^27.1.5:
version "27.1.5"
resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-27.1.5.tgz#0ddf1b163fbaae3d5b7504a1e65c914a95cff297"
@@ -17387,6 +17797,14 @@ update-browserslist-db@^1.0.13:
escalade "^3.1.1"
picocolors "^1.0.0"
update-browserslist-db@^1.1.1:
version "1.1.3"
resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz#348377dd245216f9e7060ff50b15a1b740b75420"
integrity sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==
dependencies:
escalade "^3.2.0"
picocolors "^1.1.1"
uplot@1.6.31:
version "1.6.31"
resolved "https://registry.yarnpkg.com/uplot/-/uplot-1.6.31.tgz#092a4b586590e9794b679e1df885a15584b03698"
@@ -17986,6 +18404,15 @@ word-wrapper@^1.0.7:
resolved "https://registry.npmjs.org/word-wrapper/-/word-wrapper-1.0.7.tgz"
integrity sha512-VOPBFCm9b6FyYKQYfn9AVn2dQvdR/YOVFV6IBRA1TBMJWKffvhEX1af6FMGrttILs2Q9ikCRhLqkbY2weW6dOQ==
"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0":
version "7.0.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43"
integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==
dependencies:
ansi-styles "^4.0.0"
string-width "^4.1.0"
strip-ansi "^6.0.0"
wrap-ansi@^6.0.1, wrap-ansi@^6.2.0:
version "6.2.0"
resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz"
@@ -18004,6 +18431,15 @@ wrap-ansi@^7.0.0:
string-width "^4.1.0"
strip-ansi "^6.0.0"
wrap-ansi@^8.1.0:
version "8.1.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214"
integrity sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==
dependencies:
ansi-styles "^6.1.0"
string-width "^5.0.1"
strip-ansi "^7.0.1"
wrappy@1:
version "1.0.2"
resolved "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz"
@@ -18131,6 +18567,11 @@ yaml@^1.10.0, yaml@^1.10.2:
resolved "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz"
integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==
yaml@^2.3.4:
version "2.7.0"
resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.7.0.tgz#aef9bb617a64c937a9a748803786ad8d3ffe1e98"
integrity sha512-+hSoy/QHluxmC9kCIJyL/uyFmLmc+e5CFR5Wa+bpIhIj85LVb9ZH2nVnqrHoSvKogwODv0ClqZkmiSSaIH5LTA==
yargs-parser@20.x, yargs-parser@^20.2.2, yargs-parser@^20.2.3:
version "20.2.9"
resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz"

6
go.mod
View File

@@ -6,6 +6,7 @@ toolchain go1.22.7
require (
dario.cat/mergo v1.0.1
github.com/AfterShip/clickhouse-sql-parser v0.4.4
github.com/ClickHouse/clickhouse-go/v2 v2.30.0
github.com/DATA-DOG/go-sqlmock v1.5.2
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
@@ -21,6 +22,7 @@ require (
github.com/go-redis/redis/v8 v8.11.5
github.com/go-redis/redismock/v8 v8.11.5
github.com/go-viper/mapstructure/v2 v2.1.0
github.com/gojek/heimdall/v7 v7.0.3
github.com/golang-jwt/jwt/v5 v5.2.1
github.com/google/uuid v1.6.0
github.com/gorilla/handlers v1.5.1
@@ -61,6 +63,7 @@ require (
go.opentelemetry.io/collector/pdata v1.17.0
go.opentelemetry.io/collector/processor v0.111.0
go.opentelemetry.io/contrib/config v0.10.0
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.55.0
go.opentelemetry.io/otel v1.34.0
go.opentelemetry.io/otel/metric v1.34.0
go.opentelemetry.io/otel/sdk v1.34.0
@@ -128,6 +131,7 @@ require (
github.com/goccy/go-json v0.10.3 // indirect
github.com/gofrs/uuid v4.4.0+incompatible // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/gojek/valkyrie v0.0.0-20180215180059-6aee720afcdf // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/protobuf v1.5.4 // indirect
github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb // indirect
@@ -201,6 +205,7 @@ require (
github.com/smarty/assertions v1.15.0 // indirect
github.com/spf13/cobra v1.8.1 // indirect
github.com/spf13/pflag v1.0.5 // indirect
github.com/stretchr/objx v0.5.2 // indirect
github.com/tidwall/match v1.1.1 // indirect
github.com/tidwall/pretty v1.2.0 // indirect
github.com/tklauser/go-sysconf v0.3.13 // indirect
@@ -245,7 +250,6 @@ require (
go.opentelemetry.io/collector/receiver/receiverprofiles v0.111.0 // indirect
go.opentelemetry.io/collector/semconv v0.111.0 // indirect
go.opentelemetry.io/collector/service v0.111.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.55.0 // indirect
go.opentelemetry.io/contrib/propagators/b3 v1.30.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.6.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.30.0 // indirect

19
go.sum
View File

@@ -64,6 +64,8 @@ dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
github.com/AfterShip/clickhouse-sql-parser v0.4.4 h1:iLRwjzz1mWmUEf5UNrSYOceQ+PX9SdBJ8Xw0DNrL114=
github.com/AfterShip/clickhouse-sql-parser v0.4.4/go.mod h1:W0Z82wJWkJxz2RVun/RMwxue3g7ut47Xxl+SFqdJGus=
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.14.0 h1:nyQWyZvwGTvunIMxi1Y9uXkcyr+I7TeNrr/foo4Kpk8=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.14.0/go.mod h1:l38EPgmsp71HHLq9j7De57JcKOWPyhrsW1Awm1JS6K0=
@@ -88,6 +90,7 @@ github.com/Code-Hex/go-generics-cache v1.5.1/go.mod h1:qxcC9kRVrct9rHeiYpFWSoW1v
github.com/DATA-DOG/go-sqlmock v1.5.2 h1:OcvFkGmslmlZibjAjaHm3L//6LiuBgolP7OputlJIzU=
github.com/DATA-DOG/go-sqlmock v1.5.2/go.mod h1:88MAG/4G7SMwSE3CeA0ZKzrT5CiOU3OJ+JlNzwDqpNU=
github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
github.com/DataDog/datadog-go v3.7.1+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
@@ -97,6 +100,7 @@ github.com/SigNoz/prometheus v1.13.0 h1:hsUql1zd83ifXtswO9Qk1rpCgVjE/ItQvgdNocBS
github.com/SigNoz/prometheus v1.13.0/go.mod h1:4PC0dxmx6y3kNI2d9oOTvEFTPkH6QnxDxERyqeL1hvI=
github.com/SigNoz/signoz-otel-collector v0.111.16 h1:535uKH5Oux+35EsI+L3C6pnAP/Ye0PTCbVizXoL+VqE=
github.com/SigNoz/signoz-otel-collector v0.111.16/go.mod h1:HJ4m0LY1MPsuZmuRF7Ixb+bY8rxgRzI0VXzOedESsjg=
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
@@ -142,6 +146,7 @@ github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6r
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 h1:DDGfHa7BWjL4YnC6+E63dPcxHo2sUxDIu8g3QgEJdRY=
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
github.com/cactus/go-statsd-client/statsd v0.0.0-20200423205355-cb0885a1018c/go.mod h1:l/bIBLeOl9eX+wxJAzxS4TveKRtAqlyDpHjhkfO0MEI=
github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
@@ -326,6 +331,10 @@ github.com/gofrs/uuid v4.4.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRx
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
github.com/gojek/heimdall/v7 v7.0.3 h1:+5sAhl8S0m+qRRL8IVeHCJudFh/XkG3wyO++nvOg+gc=
github.com/gojek/heimdall/v7 v7.0.3/go.mod h1:Z43HtMid7ysSjmsedPTXAki6jcdcNVnjn5pmsTyiMic=
github.com/gojek/valkyrie v0.0.0-20180215180059-6aee720afcdf h1:5xRGbUdOmZKoDXkGx5evVLehuCMpuO1hl701bEQqXOM=
github.com/gojek/valkyrie v0.0.0-20180215180059-6aee720afcdf/go.mod h1:QzhUKaYKJmcbTnCYCAVQrroCOY7vOOI8cSQ4NbuhYf0=
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
@@ -431,6 +440,7 @@ github.com/googleapis/gax-go/v2 v2.4.0/go.mod h1:XOTVJ59hdnfJLIP/dh8n5CGryZR2LxK
github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g=
github.com/gophercloud/gophercloud v1.14.0 h1:Bt9zQDhPrbd4qX7EILGmy+i7GP35cc+AAL2+wIJpUE8=
github.com/gophercloud/gophercloud v1.14.0/go.mod h1:aAVqcocTSXh2vYFZ1JTvx4EQmfgzxRcNupUfxZbBNDM=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gopherjs/gopherjs v1.17.2 h1:fQnZVsXk8uxXIStYb0N4bGk7jeyTalG/wsZjQ25dO0g=
github.com/gopherjs/gopherjs v1.17.2/go.mod h1:pRRIvn/QzFLrKfvEz3qUuEhtE/zLCWfreZ6J5gM2i+k=
github.com/gorilla/handlers v1.5.1 h1:9lRY6j8DEeeBT10CvO9hGW0gmky0BprnvDI5vfhUHH4=
@@ -800,6 +810,7 @@ github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0leargg
github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk=
github.com/puzpuzpuz/xsync/v3 v3.5.0 h1:i+cMcpEDY1BkNm7lPDkCtE4oElsYLn+EKF8kAu2vXT4=
github.com/puzpuzpuz/xsync/v3 v3.5.0/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA=
github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
github.com/rhnvrm/simples3 v0.6.1/go.mod h1:Y+3vYm2V7Y4VijFoJHHTrja6OgPrJ2cBti8dPGkC3sA=
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
@@ -827,10 +838,14 @@ github.com/scaleway/scaleway-sdk-go v1.0.0-beta.30 h1:yoKAVkEVwAqbGbR8n87rHQ1dul
github.com/scaleway/scaleway-sdk-go v1.0.0-beta.30/go.mod h1:sH0u6fq6x4R5M7WxkoQFY/o7UaiItec0o1LinLCJNq8=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/sebdah/goldie/v2 v2.5.3 h1:9ES/mNN+HNUbNWpVAlrzuZ7jE+Nrczbj8uFRjM7624Y=
github.com/sebdah/goldie/v2 v2.5.3/go.mod h1:oZ9fp0+se1eapSRjfYbsV/0Hqhbuu3bJVvKI/NNtssI=
github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys=
github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
github.com/segmentio/backo-go v1.0.1 h1:68RQccglxZeyURy93ASB/2kc9QudzgIDexJ927N++y4=
github.com/segmentio/backo-go v1.0.1/go.mod h1:9/Rh6yILuLysoQnZ2oNooD2g7aBnvM7r/fNVxRNWfBc=
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/sethvargo/go-password v0.2.0 h1:BTDl4CC/gjf/axHMaDQtw507ogrXLci6XRiLc7i/UHI=
github.com/sethvargo/go-password v0.2.0/go.mod h1:Ym4Mr9JXLBycr02MFuVQ/0JHidNetSgbzutTr3zsYXE=
github.com/shirou/gopsutil/v4 v4.24.9 h1:KIV+/HaHD5ka5f570RZq+2SaeFsb/pq+fp2DGNWYoOI=
@@ -848,6 +863,8 @@ github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/smarty/assertions v1.15.0 h1:cR//PqUBUiQRakZWqBiFFQ9wb8emQGDb0HeGdqGByCY=
github.com/smarty/assertions v1.15.0/go.mod h1:yABtdzeQs6l1brC900WlRNwj6ZR55d7B+E8C6HtKdec=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
github.com/smartystreets/goconvey v1.8.1 h1:qGjIddxOk4grTu9JPOU31tVfq3cNdBlNa5sSznIX1xY=
github.com/smartystreets/goconvey v1.8.1/go.mod h1:+/u4qLyY6x1jReYOp7GOM2FSt8aP9CzCZL03bI28W60=
github.com/soheilhy/cmux v0.1.5 h1:jjzc5WVemNEDTLwv9tlmemhC73tI08BNOIGwBOo10Js=
@@ -865,6 +882,7 @@ github.com/srikanthccv/ClickHouse-go-mock v0.11.0 h1:hKY9l7SbhI4IPPs7hjKAL1iDgKc
github.com/srikanthccv/ClickHouse-go-mock v0.11.0/go.mod h1:CzFC21J4tLn7cEYdU5k6hg7yyf052xtZXUY2e3UF6+I=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.3.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
@@ -1353,6 +1371,7 @@ golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=

58
pkg/http/client/http.go Normal file
View File

@@ -0,0 +1,58 @@
package client
import (
"log/slog"
"net/http"
"time"
"github.com/SigNoz/signoz/pkg/http/client/plugin"
"github.com/gojek/heimdall/v7/httpclient"
"go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp"
"go.opentelemetry.io/otel/metric"
"go.opentelemetry.io/otel/trace"
)
type Client struct {
c *httpclient.Client
netc *http.Client
}
func New(logger *slog.Logger, tracerProvider trace.TracerProvider, meterProvider metric.MeterProvider, opts ...Option) (*Client, error) {
clientOpts := options{
retryCount: 3,
requestResponseLog: false,
timeout: 5 * time.Second,
}
for _, opt := range opts {
opt(&clientOpts)
}
netc := &http.Client{
Timeout: clientOpts.timeout,
Transport: otelhttp.NewTransport(http.DefaultTransport, otelhttp.WithTracerProvider(tracerProvider), otelhttp.WithMeterProvider(meterProvider)),
}
c := httpclient.NewClient(
httpclient.WithHTTPClient(netc),
httpclient.WithRetrier(clientOpts.retriable),
httpclient.WithRetryCount(clientOpts.retryCount),
)
if clientOpts.requestResponseLog {
c.AddPlugin(plugin.NewLog(logger))
}
return &Client{
netc: netc,
c: c,
}, nil
}
func (c *Client) Do(request *http.Request) (*http.Response, error) {
return c.c.Do(request)
}
func (c *Client) Client() *http.Client {
return c.netc
}

42
pkg/http/client/option.go Normal file
View File

@@ -0,0 +1,42 @@
package client
import (
"time"
"github.com/gojek/heimdall/v7"
)
type Retriable = heimdall.Retriable
type options struct {
retryCount int
requestResponseLog bool
timeout time.Duration
retriable Retriable
}
type Option func(*options)
func WithRetryCount(i int) Option {
return func(o *options) {
o.retryCount = i
}
}
func WithTimeout(i time.Duration) Option {
return func(o *options) {
o.timeout = i
}
}
func WithRequestResponseLog(b bool) Option {
return func(o *options) {
o.requestResponseLog = b
}
}
func WithRetriable(retriable Retriable) Option {
return func(o *options) {
o.retriable = retriable
}
}

View File

@@ -0,0 +1,77 @@
package plugin
import (
"bytes"
"io"
"log/slog"
"net"
"net/http"
"github.com/gojek/heimdall/v7"
semconv "go.opentelemetry.io/otel/semconv/v1.27.0"
)
type reqResLog struct {
logger *slog.Logger
}
func NewLog(logger *slog.Logger) heimdall.Plugin {
return &reqResLog{
logger: logger,
}
}
func (plugin *reqResLog) OnRequestStart(request *http.Request) {
host, port, _ := net.SplitHostPort(request.Host)
fields := []any{
string(semconv.HTTPRequestMethodKey), request.Method,
string(semconv.URLPathKey), request.URL.Path,
string(semconv.URLSchemeKey), request.URL.Scheme,
string(semconv.UserAgentOriginalKey), request.UserAgent(),
string(semconv.ServerAddressKey), host,
string(semconv.ServerPortKey), port,
string(semconv.HTTPRequestSizeKey), request.ContentLength,
"http.request.headers", request.Header,
}
plugin.logger.InfoContext(request.Context(), "::SENT-REQUEST::", fields...)
}
func (plugin *reqResLog) OnRequestEnd(request *http.Request, response *http.Response) {
fields := []any{
string(semconv.HTTPResponseStatusCodeKey), response.StatusCode,
string(semconv.HTTPResponseBodySizeKey), response.ContentLength,
}
bodybytes, err := io.ReadAll(response.Body)
if err != nil {
plugin.logger.DebugContext(request.Context(), "::UNABLE-TO-LOG-RESPONSE-BODY::", "error", err)
} else {
_ = response.Body.Close()
response.Body = io.NopCloser(bytes.NewBuffer(bodybytes))
if len(bodybytes) > 0 {
fields = append(fields, "http.response.body", string(bodybytes))
} else {
fields = append(fields, "http.response.body", "(empty)")
}
}
plugin.logger.InfoContext(request.Context(), "::RECEIVED-RESPONSE::", fields...)
}
func (plugin *reqResLog) OnError(request *http.Request, err error) {
host, port, _ := net.SplitHostPort(request.Host)
fields := []any{
err,
string(semconv.HTTPRequestMethodKey), request.Method,
string(semconv.URLPathKey), request.URL.Path,
string(semconv.URLSchemeKey), request.URL.Scheme,
string(semconv.UserAgentOriginalKey), request.UserAgent(),
string(semconv.ServerAddressKey), host,
string(semconv.ServerPortKey), port,
string(semconv.HTTPRequestSizeKey), request.ContentLength,
}
plugin.logger.ErrorContext(request.Context(), "::UNABLE-TO-SEND-REQUEST::", fields...)
}

View File

@@ -174,7 +174,7 @@ func (r *ClickHouseReader) getValuesForLogAttributes(
from %s.%s
where tag_key = $%d and (
string_value != '' or number_value is not null
)
) and tag_type != 'logfield'
limit %d
)`, r.logsDB, r.logsTagAttributeTableV2, idx+1, limit))

View File

@@ -1143,7 +1143,7 @@ func (r *ClickHouseReader) GetUsage(ctx context.Context, queryParams *model.GetU
func (r *ClickHouseReader) SearchTracesV2(ctx context.Context, params *model.SearchTracesParams,
smartTraceAlgorithm func(payload []model.SearchSpanResponseItem, targetSpanId string,
levelUp int, levelDown int, spanLimit int) ([]model.SearchSpansResult, error)) (*[]model.SearchSpansResult, error) {
levelUp int, levelDown int, spanLimit int) ([]model.SearchSpansResult, error)) (*[]model.SearchSpansResult, error) {
searchSpansResult := []model.SearchSpansResult{
{
Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References", "Events", "HasError", "StatusMessage", "StatusCodeString", "SpanKind"},
@@ -1291,7 +1291,7 @@ func (r *ClickHouseReader) SearchTracesV2(ctx context.Context, params *model.Sea
func (r *ClickHouseReader) SearchTraces(ctx context.Context, params *model.SearchTracesParams,
smartTraceAlgorithm func(payload []model.SearchSpanResponseItem, targetSpanId string,
levelUp int, levelDown int, spanLimit int) ([]model.SearchSpansResult, error)) (*[]model.SearchSpansResult, error) {
levelUp int, levelDown int, spanLimit int) ([]model.SearchSpansResult, error)) (*[]model.SearchSpansResult, error) {
if r.useTraceNewSchema {
return r.SearchTracesV2(ctx, params, smartTraceAlgorithm)
@@ -4065,7 +4065,7 @@ func (r *ClickHouseReader) GetLogAggregateAttributes(ctx context.Context, req *v
return nil, fmt.Errorf("unsupported aggregate operator")
}
query = fmt.Sprintf("SELECT DISTINCT(tag_key), tag_type, tag_data_type from %s.%s WHERE %s limit $2", r.logsDB, r.logsTagAttributeTableV2, where)
query = fmt.Sprintf("SELECT DISTINCT(tag_key), tag_type, tag_data_type from %s.%s WHERE %s and tag_type != 'logfield' limit $2", r.logsDB, r.logsTagAttributeTableV2, where)
rows, err = r.db.Query(ctx, query, fmt.Sprintf("%%%s%%", req.SearchText), req.Limit)
if err != nil {
zap.L().Error("Error while executing query", zap.Error(err))
@@ -4114,10 +4114,10 @@ func (r *ClickHouseReader) GetLogAttributeKeys(ctx context.Context, req *v3.Filt
var response v3.FilterAttributeKeyResponse
if len(req.SearchText) != 0 {
query = fmt.Sprintf("select distinct tag_key, tag_type, tag_data_type from %s.%s where tag_key ILIKE $1 limit $2", r.logsDB, r.logsTagAttributeTableV2)
query = fmt.Sprintf("select distinct tag_key, tag_type, tag_data_type from %s.%s where tag_type != 'logfield' and tag_key ILIKE $1 limit $2", r.logsDB, r.logsTagAttributeTableV2)
rows, err = r.db.Query(ctx, query, fmt.Sprintf("%%%s%%", req.SearchText), req.Limit)
} else {
query = fmt.Sprintf("select distinct tag_key, tag_type, tag_data_type from %s.%s limit $1", r.logsDB, r.logsTagAttributeTableV2)
query = fmt.Sprintf("select distinct tag_key, tag_type, tag_data_type from %s.%s where tag_type != 'logfield' limit $1", r.logsDB, r.logsTagAttributeTableV2)
rows, err = r.db.Query(ctx, query, req.Limit)
}
@@ -4838,7 +4838,7 @@ func (r *ClickHouseReader) GetTraceAggregateAttributes(ctx context.Context, req
default:
return nil, fmt.Errorf("unsupported aggregate operator")
}
query = fmt.Sprintf("SELECT DISTINCT(tag_key), tag_type, tag_data_type FROM %s.%s WHERE %s", r.TraceDB, r.spanAttributeTableV2, where)
query = fmt.Sprintf("SELECT DISTINCT(tag_key), tag_type, tag_data_type FROM %s.%s WHERE %s and tag_type != 'spanfield'", r.TraceDB, r.spanAttributeTableV2, where)
if req.Limit != 0 {
query = query + fmt.Sprintf(" LIMIT %d;", req.Limit)
}
@@ -4900,7 +4900,7 @@ func (r *ClickHouseReader) GetTraceAttributeKeys(ctx context.Context, req *v3.Fi
var rows driver.Rows
var response v3.FilterAttributeKeyResponse
query = fmt.Sprintf("SELECT DISTINCT(tag_key), tag_type, tag_data_type FROM %s.%s WHERE tag_key ILIKE $1 LIMIT $2", r.TraceDB, r.spanAttributeTableV2)
query = fmt.Sprintf("SELECT DISTINCT(tag_key), tag_type, tag_data_type FROM %s.%s WHERE tag_key ILIKE $1 and tag_type != 'spanfield' LIMIT $2", r.TraceDB, r.spanAttributeTableV2)
rows, err = r.db.Query(ctx, query, fmt.Sprintf("%%%s%%", req.SearchText), req.Limit)
@@ -5982,10 +5982,10 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_
}
firstQueryLimit := req.Limit
dataPointsOrder := false
samplesOrder := false
var orderByClauseFirstQuery string
if req.OrderBy.ColumnName == "samples" {
dataPointsOrder = true
samplesOrder = true
orderByClauseFirstQuery = fmt.Sprintf("ORDER BY timeseries %s", req.OrderBy.Order)
if req.Limit < 50 {
firstQueryLimit = 50
@@ -5995,8 +5995,8 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_
}
// Determine which tables to use
start, end, tsTable, localTsTable := utils.WhichTSTableToUse(req.Start, req.EndD)
sampleTable, countExp := utils.WhichSampleTableToUse(req.Start, req.EndD)
start, end, tsTable, localTsTable := utils.WhichTSTableToUse(req.Start, req.End)
sampleTable, countExp := utils.WhichSampleTableToUse(req.Start, req.End)
metricsQuery := fmt.Sprintf(
`SELECT
@@ -6018,7 +6018,10 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_
args = append(args, start, end)
valueCtx := context.WithValue(ctx, "clickhouse_max_threads", constants.MetricsExplorerClickhouseThreads)
begin := time.Now()
rows, err := r.db.Query(valueCtx, metricsQuery, args...)
queryDuration := time.Since(begin)
zap.L().Info("Time taken to execute metrics query to fetch metrics with high time series", zap.String("query", metricsQuery), zap.Any("args", args), zap.Duration("duration", queryDuration))
if err != nil {
zap.L().Error("Error executing metrics query", zap.Error(err))
return &metrics_explorer.SummaryListMetricsResponse{}, &model.ApiError{Typ: "ClickHouseError", Err: err}
@@ -6049,12 +6052,12 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_
// Build a comma-separated list of quoted metric names.
metricsList := "'" + strings.Join(metricNames, "', '") + "'"
// If samples are being sorted by datapoints, update the ORDER clause.
if dataPointsOrder {
if samplesOrder {
orderByClauseFirstQuery = fmt.Sprintf("ORDER BY s.samples %s", req.OrderBy.Order)
} else {
orderByClauseFirstQuery = ""
}
args = make([]interface{}, 0)
var sampleQuery string
var sb strings.Builder
@@ -6062,13 +6065,11 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_
sb.WriteString(fmt.Sprintf(
`SELECT
s.samples,
s.metric_name,
s.lastReceived
s.metric_name
FROM (
SELECT
dm.metric_name,
%s AS samples,
MAX(dm.unix_milli) AS lastReceived
%s AS samples
FROM %s.%s AS dm
WHERE dm.metric_name IN (%s)
AND dm.fingerprint IN (
@@ -6076,6 +6077,7 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_
FROM %s.%s
WHERE metric_name IN (%s)
AND __normalized = true
AND unix_milli BETWEEN ? AND ?
%s
GROUP BY fingerprint
)
@@ -6089,18 +6091,18 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_
metricsList,
whereClause,
))
args = append(args, start, end)
args = append(args, req.Start, req.End)
} else {
// If no filters, it is a simpler query.
sb.WriteString(fmt.Sprintf(
`SELECT
s.samples,
s.metric_name,
s.lastReceived
s.metric_name
FROM (
SELECT
metric_name,
%s AS samples,
MAX(unix_milli) AS lastReceived
%s AS samples
FROM %s.%s
WHERE metric_name IN (%s)
AND unix_milli BETWEEN ? AND ?
@@ -6109,6 +6111,7 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_
countExp,
signozMetricDBName, sampleTable,
metricsList))
args = append(args, req.Start, req.End)
}
// Append ORDER BY clause if provided.
@@ -6119,10 +6122,10 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_
// Append LIMIT clause.
sb.WriteString(fmt.Sprintf("LIMIT %d;", req.Limit))
sampleQuery = sb.String()
// Append the time boundaries for sampleQuery.
args = append(args, start, end)
begin = time.Now()
rows, err = r.db.Query(valueCtx, sampleQuery, args...)
queryDuration = time.Since(begin)
zap.L().Info("Time taken to execute list summary query", zap.String("query", sampleQuery), zap.Any("args", args), zap.Duration("duration", queryDuration))
if err != nil {
zap.L().Error("Error executing samples query", zap.Error(err))
return &response, &model.ApiError{Typ: "ClickHouseError", Err: err}
@@ -6130,18 +6133,15 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_
defer rows.Close()
samplesMap := make(map[string]uint64)
lastReceivedMap := make(map[string]int64)
for rows.Next() {
var samples uint64
var metricName string
var lastReceived int64
if err := rows.Scan(&samples, &metricName, &lastReceived); err != nil {
if err := rows.Scan(&samples, &metricName); err != nil {
zap.L().Error("Error scanning sample row", zap.Error(err))
return &response, &model.ApiError{Typ: "ClickHouseError", Err: err}
}
samplesMap[metricName] = samples
lastReceivedMap[metricName] = lastReceived
}
if err := rows.Err(); err != nil {
zap.L().Error("Error iterating over sample rows", zap.Error(err))
@@ -6167,16 +6167,13 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_
}
if samples, exists := samplesMap[response.Metrics[i].MetricName]; exists {
response.Metrics[i].Samples = samples
if lastReceived, exists := lastReceivedMap[response.Metrics[i].MetricName]; exists {
response.Metrics[i].LastReceived = lastReceived
}
filteredMetrics = append(filteredMetrics, response.Metrics[i])
}
}
response.Metrics = filteredMetrics
// If ordering by samples, sort in-memory.
if dataPointsOrder {
if samplesOrder {
sort.Slice(response.Metrics, func(i, j int) bool {
return response.Metrics[i].Samples > response.Metrics[j].Samples
})
@@ -6194,7 +6191,7 @@ func (r *ClickHouseReader) GetMetricsTimeSeriesPercentage(ctx context.Context, r
if len(conditions) > 0 {
whereClause = "AND " + strings.Join(conditions, " AND ")
}
start, end, tsTable, _ := utils.WhichTSTableToUse(req.Start, req.EndD)
start, end, tsTable, _ := utils.WhichTSTableToUse(req.Start, req.End)
// Construct the query without backticks
query := fmt.Sprintf(`
@@ -6224,26 +6221,29 @@ func (r *ClickHouseReader) GetMetricsTimeSeriesPercentage(ctx context.Context, r
)
args = append(args,
start, end, // For total_cardinality subquery
start, end, // For total_time_series subquery
start, end, // For main query
)
valueCtx := context.WithValue(ctx, "clickhouse_max_threads", constants.MetricsExplorerClickhouseThreads)
begin := time.Now()
rows, err := r.db.Query(valueCtx, query, args...)
duration := time.Since(begin)
zap.L().Info("Time taken to execute time series percentage query", zap.String("query", query), zap.Any("args", args), zap.Duration("duration", duration))
if err != nil {
zap.L().Error("Error executing cardinality query", zap.Error(err), zap.String("query", query))
zap.L().Error("Error executing time series percentage query", zap.Error(err), zap.String("query", query))
return nil, &model.ApiError{Typ: "ClickHouseError", Err: err}
}
defer rows.Close()
var heatmap []metrics_explorer.TreeMapResponseItem
var treemap []metrics_explorer.TreeMapResponseItem
for rows.Next() {
var item metrics_explorer.TreeMapResponseItem
if err := rows.Scan(&item.MetricName, &item.TotalValue, &item.Percentage); err != nil {
zap.L().Error("Error scanning row", zap.Error(err))
return nil, &model.ApiError{Typ: "ClickHouseError", Err: err}
}
heatmap = append(heatmap, item)
treemap = append(treemap, item)
}
if err := rows.Err(); err != nil {
@@ -6251,11 +6251,10 @@ func (r *ClickHouseReader) GetMetricsTimeSeriesPercentage(ctx context.Context, r
return nil, &model.ApiError{Typ: "ClickHouseError", Err: err}
}
return &heatmap, nil
return &treemap, nil
}
func (r *ClickHouseReader) GetMetricsSamplesPercentage(ctx context.Context, req *metrics_explorer.TreeMapMetricsRequest) (*[]metrics_explorer.TreeMapResponseItem, *model.ApiError) {
var args []interface{}
conditions, _ := utils.BuildFilterConditions(&req.Filters, "ts")
whereClause := ""
@@ -6264,8 +6263,8 @@ func (r *ClickHouseReader) GetMetricsSamplesPercentage(ctx context.Context, req
}
// Determine time range and tables to use
start, end, tsTable, localTsTable := utils.WhichTSTableToUse(req.Start, req.EndD)
sampleTable, countExp := utils.WhichSampleTableToUse(req.Start, req.EndD)
start, end, tsTable, localTsTable := utils.WhichTSTableToUse(req.Start, req.End)
sampleTable, countExp := utils.WhichSampleTableToUse(req.Start, req.End)
queryLimit := 50 + req.Limit
metricsQuery := fmt.Sprintf(
@@ -6284,9 +6283,12 @@ func (r *ClickHouseReader) GetMetricsSamplesPercentage(ctx context.Context, req
)
valueCtx := context.WithValue(ctx, "clickhouse_max_threads", constants.MetricsExplorerClickhouseThreads)
begin := time.Now()
rows, err := r.db.Query(valueCtx, metricsQuery, start, end)
duration := time.Since(begin)
zap.L().Info("Time taken to execute samples percentage metric name query to reduce search space", zap.String("query", metricsQuery), zap.Any("start", start), zap.Any("end", end), zap.Duration("duration", duration))
if err != nil {
zap.L().Error("Error executing metrics query", zap.Error(err))
zap.L().Error("Error executing samples percentage query", zap.Error(err))
return nil, &model.ApiError{Typ: "ClickHouseError", Err: err}
}
defer rows.Close()
@@ -6317,7 +6319,6 @@ func (r *ClickHouseReader) GetMetricsSamplesPercentage(ctx context.Context, req
// Build optimized query with JOIN but `unix_milli` filter only on the sample table
var sb strings.Builder
sb.WriteString(fmt.Sprintf(
`WITH TotalSamples AS (
SELECT %s AS total_samples
@@ -6338,8 +6339,14 @@ func (r *ClickHouseReader) GetMetricsSamplesPercentage(ctx context.Context, req
countExp, signozMetricDBName, sampleTable, // Inner select samples
))
var args []interface{}
args = append(args,
req.Start, req.End, // For total_samples subquery
)
// Apply `unix_milli` filter **only** on the sample table (`dm`)
sb.WriteString(` WHERE dm.unix_milli BETWEEN ? AND ?`)
args = append(args, req.Start, req.End)
// Use JOIN instead of IN (subquery) when additional filters exist
if whereClause != "" {
@@ -6348,12 +6355,14 @@ func (r *ClickHouseReader) GetMetricsSamplesPercentage(ctx context.Context, req
SELECT ts.fingerprint
FROM %s.%s AS ts
WHERE ts.metric_name IN (%s)
AND unix_milli BETWEEN ? AND ?
AND __normalized = true
%s
GROUP BY ts.fingerprint
)`,
signozMetricDBName, localTsTable, metricsList, whereClause,
))
args = append(args, start, end)
}
// Apply metric filtering after all conditions
@@ -6366,14 +6375,16 @@ func (r *ClickHouseReader) GetMetricsSamplesPercentage(ctx context.Context, req
LIMIT ?;`,
metricsList,
))
args = append(args, req.Limit)
sampleQuery := sb.String()
// Add start and end time to args (only for sample table)
args = append(args, start, end, start, end, req.Limit)
begin = time.Now()
// Execute the sample percentage query
rows, err = r.db.Query(valueCtx, sampleQuery, args...)
duration = time.Since(begin)
zap.L().Info("Time taken to execute samples percentage query", zap.String("query", sampleQuery), zap.Any("args", args), zap.Duration("duration", duration))
if err != nil {
zap.L().Error("Error executing samples query", zap.Error(err))
return nil, &model.ApiError{Typ: "ClickHouseError", Err: err}
@@ -6381,21 +6392,21 @@ func (r *ClickHouseReader) GetMetricsSamplesPercentage(ctx context.Context, req
defer rows.Close()
// Process the results into a response slice
var heatmap []metrics_explorer.TreeMapResponseItem
var treemap []metrics_explorer.TreeMapResponseItem
for rows.Next() {
var item metrics_explorer.TreeMapResponseItem
if err := rows.Scan(&item.TotalValue, &item.MetricName, &item.Percentage); err != nil {
zap.L().Error("Error scanning row", zap.Error(err))
return nil, &model.ApiError{Typ: "ClickHouseError", Err: err}
}
heatmap = append(heatmap, item)
treemap = append(treemap, item)
}
if err := rows.Err(); err != nil {
zap.L().Error("Error iterating over sample rows", zap.Error(err))
return nil, &model.ApiError{Typ: "ClickHouseError", Err: err}
}
return &heatmap, nil
return &treemap, nil
}
func (r *ClickHouseReader) GetNameSimilarity(ctx context.Context, req *metrics_explorer.RelatedMetricsRequest) (map[string]metrics_explorer.RelatedMetricsScore, *model.ApiError) {

View File

@@ -124,7 +124,7 @@ func (c *Controller) GenerateConnectionUrl(
}
// TODO(Raj): parameterized this in follow up changes
agentVersion := "0.0.2"
agentVersion := "0.0.3"
connectionUrl := fmt.Sprintf(
"https://%s.console.aws.amazon.com/cloudformation/home?region=%s#/stacks/quickcreate?",

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 371 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 6.0 KiB

View File

@@ -0,0 +1,3 @@
### Monitor Managed Streaming Kafka with SigNoz
Collect key MSK metrics and view them with an out of the box dashboard.

View File

@@ -145,11 +145,12 @@ func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
// todo(nitya): remove this once we fix agents in multitenancy
defaultOrgID, err := ic.GetDefaultOrgID(ctx)
if err != nil {
return nil, model.WrapApiError(err, "failed to get default org ID")
// we don't want to fail the request if we can't get the default org ID
// we will just return an empty list of pipelines
zap.L().Warn("failed to get default org ID", zap.Error(err))
return result, nil
}
fmt.Println("defaultOrgID", defaultOrgID)
if version >= 0 {
savedPipelines, errors := ic.getPipelinesByVersion(ctx, defaultOrgID, version)
if errors != nil {

View File

@@ -225,21 +225,21 @@ func (receiver *SummaryService) GetMetricsTreemap(ctx context.Context, params *m
var response metrics_explorer.TreeMap
switch params.Treemap {
case metrics_explorer.TimeSeriesTeeMap:
cardinality, apiError := receiver.reader.GetMetricsTimeSeriesPercentage(ctx, params)
ts, apiError := receiver.reader.GetMetricsTimeSeriesPercentage(ctx, params)
if apiError != nil {
return nil, apiError
}
if cardinality != nil {
response.TimeSeries = *cardinality
if ts != nil {
response.TimeSeries = *ts
}
return &response, nil
case metrics_explorer.SamplesTreeMap:
dataPoints, apiError := receiver.reader.GetMetricsSamplesPercentage(ctx, params)
samples, apiError := receiver.reader.GetMetricsSamplesPercentage(ctx, params)
if apiError != nil {
return nil, apiError
}
if dataPoints != nil {
response.Samples = *dataPoints
if samples != nil {
response.Samples = *samples
}
return &response, nil
default:

View File

@@ -21,6 +21,7 @@ import (
"github.com/gorilla/mux"
promModel "github.com/prometheus/common/model"
"go.uber.org/multierr"
"go.uber.org/zap"
"github.com/SigNoz/signoz/pkg/query-service/app/metrics"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
@@ -34,6 +35,7 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/utils"
querytemplate "github.com/SigNoz/signoz/pkg/query-service/utils/queryTemplate"
"github.com/SigNoz/signoz/pkg/types"
chVariables "github.com/SigNoz/signoz/pkg/variables/clickhouse"
)
var allowedFunctions = []string{"count", "ratePerSec", "sum", "avg", "min", "max", "p50", "p90", "p95", "p99"}
@@ -841,6 +843,29 @@ func validateExpressions(expressions []string, funcs map[string]govaluate.Expres
return errs
}
// chTransformQuery transforms the clickhouse query with the given variables
// it is used to check what would be the query if variables are selected as __all__.
// for now, this is just a pass through, but in the future, we will use it to
// dashboard variables
// TODO(srikanthccv): version based query replacement
func chTransformQuery(query string, variables map[string]interface{}) {
varsForTransform := make([]chVariables.VariableValue, 0, len(variables))
for name := range variables {
varsForTransform = append(varsForTransform, chVariables.VariableValue{
Name: name,
Values: []string{"__all__"},
IsSelectAll: true,
FieldType: "scalar",
})
}
transformer := chVariables.NewQueryTransformer(query, varsForTransform)
transformedQuery, err := transformer.Transform()
if err != nil {
zap.L().Warn("failed to transform clickhouse query", zap.Error(err))
}
zap.L().Info("transformed clickhouse query", zap.String("transformedQuery", transformedQuery), zap.String("originalQuery", query))
}
func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiError) {
var queryRangeParams *v3.QueryRangeParamsV3
@@ -979,6 +1004,7 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE
continue
}
chTransformQuery(chQuery.Query, queryRangeParams.Variables)
for name, value := range queryRangeParams.Variables {
chQuery.Query = strings.Replace(chQuery.Query, fmt.Sprintf("{{%s}}", name), fmt.Sprint(value), -1)
chQuery.Query = strings.Replace(chQuery.Query, fmt.Sprintf("[[%s]]", name), fmt.Sprint(value), -1)

View File

@@ -89,13 +89,13 @@ func (aH *APIHandler) GetTreeMap(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
params, apiError := explorer.ParseTreeMapMetricsParams(r)
if apiError != nil {
zap.L().Error("error parsing heatmap metric params", zap.Error(apiError.Err))
zap.L().Error("error parsing tree map metric params", zap.Error(apiError.Err))
RespondError(w, apiError, nil)
return
}
result, apiError := aH.SummaryService.GetMetricsTreemap(ctx, params)
if apiError != nil {
zap.L().Error("error getting heatmap data", zap.Error(apiError.Err))
zap.L().Error("error getting tree map data", zap.Error(apiError.Err))
RespondError(w, apiError, nil)
return
}

View File

@@ -9,7 +9,7 @@ type SummaryListMetricsRequest struct {
Limit int `json:"limit"`
OrderBy v3.OrderBy `json:"orderBy"`
Start int64 `json:"start"`
EndD int64 `json:"end"`
End int64 `json:"end"`
Filters v3.FilterSet `json:"filters"`
}
@@ -24,7 +24,7 @@ type TreeMapMetricsRequest struct {
Limit int `json:"limit"`
Treemap TreeMapType `json:"treemap"`
Start int64 `json:"start"`
EndD int64 `json:"end"`
End int64 `json:"end"`
Filters v3.FilterSet `json:"filters"`
}

View File

@@ -740,7 +740,7 @@ func (a *Telemetry) SendEvent(event string, data map[string]interface{}, userEma
// zap.L().Info(data)
properties := analytics.NewProperties()
properties.Set("version", version.Info.Version)
properties.Set("version", version.Info.Version())
properties.Set("deploymentType", getDeploymentType())
properties.Set("companyDomain", a.getCompanyDomain())

View File

@@ -10,7 +10,6 @@ import (
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlmigration"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/sqlstore/postgressqlstore"
"github.com/SigNoz/signoz/pkg/sqlstore/sqlitesqlstore"
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
"github.com/SigNoz/signoz/pkg/telemetrystore"
@@ -39,7 +38,6 @@ func NewSQLStoreProviderFactories() factory.NamedMap[factory.ProviderFactory[sql
hook := sqlstorehook.NewLoggingFactory()
return factory.MustNewNamedMap(
sqlitesqlstore.NewFactory(hook),
postgressqlstore.NewFactory(hook),
)
}

View File

@@ -0,0 +1,511 @@
package clickhouse
import (
"fmt"
"strings"
"github.com/AfterShip/clickhouse-sql-parser/parser"
)
// FilterAction represents what to do with a filter containing a variable
type FilterAction int
const (
// KeepFilter maintains the original filter
KeepFilter FilterAction = iota
// RemoveFilter completely removes the filter
RemoveFilter
// ReplaceWithExistsCheck replaces filter with an EXISTS check
ReplaceWithExistsCheck
)
// FilterTransformer defines the callback function that decides
// what to do with a filter containing a variable
type FilterTransformer func(variableName string, expr parser.Expr) FilterAction
// QueryProcessor handles ClickHouse query modifications
type QueryProcessor struct {
}
// NewQueryProcessor creates a new processor
func NewQueryProcessor() *QueryProcessor {
return &QueryProcessor{}
}
// ProcessQuery finds variables in WHERE clauses and modifies them according to the transformer function
func (qp *QueryProcessor) ProcessQuery(query string, transformer FilterTransformer) (string, error) {
p := parser.NewParser(query)
stmts, err := p.ParseStmts()
if err != nil {
return "", fmt.Errorf("failed to parse query: %w", err)
}
if len(stmts) == 0 {
return query, nil
}
// Look for SELECT statements
modified := false
for i, stmt := range stmts {
selectQuery, ok := stmt.(*parser.SelectQuery)
if !ok {
continue
}
whereModified, err := qp.processWhereClause(selectQuery, transformer)
if err != nil {
return "", err
}
if whereModified {
modified = true
stmts[i] = selectQuery
}
}
if !modified {
return query, nil
}
// Reconstruct the query
var resultBuilder strings.Builder
for _, stmt := range stmts {
resultBuilder.WriteString(stmt.String())
resultBuilder.WriteString(";")
}
return resultBuilder.String(), nil
}
// processWhereClause processes the WHERE clause in a SELECT statement
func (qp *QueryProcessor) processWhereClause(selectQuery *parser.SelectQuery, transformer FilterTransformer) (bool, error) {
// First, process any subqueries in the FROM clause
if selectQuery.From != nil {
subQueryModified, err := qp.processFromClauseSubqueries(selectQuery.From, transformer)
if err != nil {
return false, err
}
if subQueryModified {
// Mark as modified if any subqueries were modified
return true, nil
}
}
// Then process the main WHERE clause
if selectQuery.Where == nil {
return false, nil
}
// Process the WHERE expression, which may include subqueries
modified := false
newExpr, hasChanged, err := qp.transformExpr(selectQuery.Where.Expr, transformer)
if err != nil {
return false, err
}
if hasChanged {
modified = true
if newExpr == nil {
// If the entire WHERE clause is removed
selectQuery.Where = nil
} else {
selectQuery.Where.Expr = newExpr
}
}
return modified, nil
}
// processFromClauseSubqueries recursively processes subqueries in the FROM clause
func (qp *QueryProcessor) processFromClauseSubqueries(fromClause *parser.FromClause, transformer FilterTransformer) (bool, error) {
if fromClause == nil {
return false, nil
}
return qp.processExprSubqueries(fromClause.Expr, transformer)
}
// processExprSubqueries processes subqueries found in expressions
func (qp *QueryProcessor) processExprSubqueries(expr parser.Expr, transformer FilterTransformer) (bool, error) {
if expr == nil {
return false, nil
}
modified := false
switch e := expr.(type) {
case *parser.SubQuery:
// Process the subquery's SELECT statement
if e.Select != nil {
subQueryModified, err := qp.processWhereClause(e.Select, transformer)
if err != nil {
return false, err
}
if subQueryModified {
modified = true
}
}
case *parser.BinaryOperation:
// Check left and right expressions for subqueries
leftModified, err := qp.processExprSubqueries(e.LeftExpr, transformer)
if err != nil {
return false, err
}
rightModified, err := qp.processExprSubqueries(e.RightExpr, transformer)
if err != nil {
return false, err
}
if leftModified || rightModified {
modified = true
}
case *parser.JoinExpr:
// Process both sides of the join
leftModified, err := qp.processExprSubqueries(e.Left, transformer)
if err != nil {
return false, err
}
rightModified, err := qp.processExprSubqueries(e.Right, transformer)
if err != nil {
return false, err
}
// Process join constraints if any
constraintsModified, err := qp.processExprSubqueries(e.Constraints, transformer)
if err != nil {
return false, err
}
if leftModified || rightModified || constraintsModified {
modified = true
}
case *parser.TableExpr:
// Process any subqueries in the table expression
return qp.processExprSubqueries(e.Expr, transformer)
case *parser.AliasExpr:
// Check if the aliased expression contains a subquery
return qp.processExprSubqueries(e.Expr, transformer)
case *parser.FunctionExpr:
// Check function parameters for subqueries
if e.Params != nil && e.Params.Items != nil {
for _, item := range e.Params.Items.Items {
itemModified, err := qp.processExprSubqueries(item, transformer)
if err != nil {
return false, err
}
if itemModified {
modified = true
}
}
}
}
return modified, nil
}
// transformExpr recursively processes expressions in the WHERE clause
func (qp *QueryProcessor) transformExpr(expr parser.Expr, transformer FilterTransformer) (parser.Expr, bool, error) {
if expr == nil {
return nil, false, nil
}
// Handle different expression types
switch e := expr.(type) {
case *parser.SubQuery:
// Handle subqueries like "column IN (SELECT...)"
if e.Select != nil {
modified, err := qp.processWhereClause(e.Select, transformer)
if err != nil {
return nil, false, err
}
return expr, modified, nil
}
case *parser.BinaryOperation:
// Handle IN with a subquery on the right
if e.Operation == "IN" || e.Operation == "NOT IN" {
_, rightIsSubQuery := e.RightExpr.(*parser.SubQuery)
if rightIsSubQuery {
// If right side is a subquery, check if left side has variables
leftVars := qp.findVariables(e.LeftExpr)
if len(leftVars) > 0 {
// Apply action to the entire IN clause
action := transformer(leftVars[0], expr)
switch action {
case RemoveFilter:
return nil, true, nil
case ReplaceWithExistsCheck:
return qp.createExistsCheck(expr, leftVars[0])
}
}
// Process the subquery separately (regardless of whether we modified based on left side)
newRight, rightChanged, err := qp.transformExpr(e.RightExpr, transformer)
if err != nil {
return nil, false, err
}
if rightChanged {
return &parser.BinaryOperation{
LeftExpr: e.LeftExpr,
Operation: e.Operation,
RightExpr: newRight,
HasGlobal: e.HasGlobal,
HasNot: e.HasNot,
}, true, nil
}
// If no changes, return the original
return expr, false, nil
}
}
// Check if this specific binary operation directly contains a variable
leftVars := qp.findVariables(e.LeftExpr)
rightVars := qp.findVariables(e.RightExpr)
// If this is a direct filter with a variable (e.g., "column = $var")
// and not a complex expression, handle it directly
if len(leftVars) > 0 && len(rightVars) == 0 &&
!qp.isComplexExpression(e.LeftExpr) && !qp.isComplexExpression(e.RightExpr) {
action := transformer(leftVars[0], expr)
switch action {
case RemoveFilter:
return nil, true, nil
case ReplaceWithExistsCheck:
return qp.createExistsCheck(expr, leftVars[0])
}
} else if len(rightVars) > 0 && len(leftVars) == 0 &&
!qp.isComplexExpression(e.LeftExpr) && !qp.isComplexExpression(e.RightExpr) {
action := transformer(rightVars[0], expr)
switch action {
case RemoveFilter:
return nil, true, nil
case ReplaceWithExistsCheck:
return qp.createExistsCheck(expr, rightVars[0])
}
}
// Otherwise, recursively process left and right sides
newLeft, leftChanged, err := qp.transformExpr(e.LeftExpr, transformer)
if err != nil {
return nil, false, err
}
newRight, rightChanged, err := qp.transformExpr(e.RightExpr, transformer)
if err != nil {
return nil, false, err
}
if leftChanged || rightChanged {
if e.Operation == "AND" {
// For AND operations, if either side is nil (removed), we can simplify
if newLeft == nil {
return newRight, true, nil
}
if newRight == nil {
return newLeft, true, nil
}
} else if (newLeft == nil || newRight == nil) &&
(e.Operation == "=" || e.Operation == "IN" ||
e.Operation == "<" || e.Operation == ">" ||
e.Operation == "<=" || e.Operation == ">=") {
// For direct comparison operations, if one side is removed, remove the entire expression
return nil, true, nil
}
// Create a new binary operation with the modified sides
return &parser.BinaryOperation{
LeftExpr: newLeft,
Operation: e.Operation,
RightExpr: newRight,
HasGlobal: e.HasGlobal,
HasNot: e.HasNot,
}, true, nil
}
}
// For other expression types that may contain variables
variables := qp.findVariables(expr)
if len(variables) > 0 && !qp.isComplexExpression(expr) {
action := transformer(variables[0], expr)
switch action {
case RemoveFilter:
return nil, true, nil
case ReplaceWithExistsCheck:
return qp.createExistsCheck(expr, variables[0])
}
}
return expr, false, nil
}
// isComplexExpression checks if an expression contains nested operations
// that should not be treated as a simple variable reference
func (qp *QueryProcessor) isComplexExpression(expr parser.Expr) bool {
switch e := expr.(type) {
case *parser.BinaryOperation:
// If it's a binary operation, it's complex
return true
case *parser.FunctionExpr:
// If it's a function, examine its parameters
if e.Params != nil && e.Params.Items != nil {
for _, item := range e.Params.Items.Items {
if qp.isComplexExpression(item) {
return true
}
}
}
}
return false
}
// findVariables finds all variables in an expression
func (qp *QueryProcessor) findVariables(expr parser.Expr) []string {
var variables []string
if expr == nil {
return variables
}
switch e := expr.(type) {
case *parser.Ident:
// we should identify the following ways of using variables
// whitespace at the end or beginning of the variable name
// should be trimmed
// {{.variable_name}}, {{ .variable_name }}, {{ .variable_name}}
// $variable_name
// [[variable_name]], [[ variable_name]], [[ variable_name ]]
// {{variable_name}}, {{ variable_name }}, {{variable_name }}
if strings.HasPrefix(e.Name, "$") {
variables = append(variables, e.Name[1:]) // Remove the $ prefix
}
case *parser.BinaryOperation:
variables = append(variables, qp.findVariables(e.LeftExpr)...)
variables = append(variables, qp.findVariables(e.RightExpr)...)
case *parser.FunctionExpr:
if e.Params != nil && e.Params.Items != nil {
for _, item := range e.Params.Items.Items {
variables = append(variables, qp.findVariables(item)...)
}
}
case *parser.ColumnExpr:
variables = append(variables, qp.findVariables(e.Expr)...)
case *parser.ParamExprList:
if e.Items != nil {
for _, item := range e.Items.Items {
variables = append(variables, qp.findVariables(item)...)
}
}
case *parser.SelectItem:
variables = append(variables, qp.findVariables(e.Expr)...)
case *parser.IndexOperation:
variables = append(variables, qp.findVariables(e.Object)...)
variables = append(variables, qp.findVariables(e.Index)...)
}
return variables
}
// createExistsCheck creates an EXISTS check for a column/map field
func (qp *QueryProcessor) createExistsCheck(expr parser.Expr, _ string) (parser.Expr, bool, error) {
switch e := expr.(type) {
case *parser.BinaryOperation:
// Handle map field access like "attributes['http.method'] = $http_method"
if indexOp, ok := e.LeftExpr.(*parser.IndexOperation); ok {
// Create a "has" function check for maps
functionName := &parser.Ident{
Name: "has",
}
// Create function parameters with the map and the key
params := &parser.ParamExprList{
Items: &parser.ColumnExprList{
Items: []parser.Expr{
indexOp.Object, // The map name (e.g., "attributes")
indexOp.Index, // The key (e.g., "'http.method'")
},
},
}
return &parser.FunctionExpr{
Name: functionName,
Params: params,
}, true, nil
}
// Handle direct field comparisons like "field = $variable"
if ident, ok := e.LeftExpr.(*parser.Ident); ok && !strings.HasPrefix(ident.Name, "$") {
// For regular columns, we might want to check if the column exists or has a non-null value
functionName := &parser.Ident{
Name: "isNotNull",
}
// Create function parameters
params := &parser.ParamExprList{
Items: &parser.ColumnExprList{
Items: []parser.Expr{
ident, // The field name
},
},
}
return &parser.FunctionExpr{
Name: functionName,
Params: params,
}, true, nil
} else if ident, ok := e.RightExpr.(*parser.Ident); ok && !strings.HasPrefix(ident.Name, "$") {
// For regular columns, but variable is on the left
functionName := &parser.Ident{
Name: "isNotNull",
}
params := &parser.ParamExprList{
Items: &parser.ColumnExprList{
Items: []parser.Expr{
ident, // The field name
},
},
}
return &parser.FunctionExpr{
Name: functionName,
Params: params,
}, true, nil
}
// Handle IN clauses like "field IN ($variables)"
if e.Operation == "IN" || e.Operation == "NOT IN" {
if ident, ok := e.LeftExpr.(*parser.Ident); ok && !strings.HasPrefix(ident.Name, "$") {
// For IN clauses, we might just check if the field exists
functionName := &parser.Ident{
Name: "isNotNull",
}
params := &parser.ParamExprList{
Items: &parser.ColumnExprList{
Items: []parser.Expr{
ident, // The field name
},
},
}
return &parser.FunctionExpr{
Name: functionName,
Params: params,
}, true, nil
}
}
}
// If we couldn't transform it to an EXISTS check, keep the original
return expr, false, nil
}

View File

@@ -0,0 +1,210 @@
package clickhouse
import (
"testing"
)
func TestTransform(t *testing.T) {
testCases := []struct {
name string
sql string
variables []VariableValue
expected string
}{
{
name: "Example 1: Only service_name is __all__",
sql: `SELECT trace_id, name, kind, status
FROM signoz_traces.signoz_index_v3
WHERE service_name = $service_name
AND operation_name IN ($operation_names)
AND duration >= $min_duration
AND attributes['http.method'] = $http_method
AND op IN (SELECT op FROM signoz_traces.operations WHERE service_name = $service_name AND kind = 'server')
AND timestamp BETWEEN $start_time AND $end_time`,
// Define our variables and their values/metadata
variables: []VariableValue{
{
Name: "service_name",
Values: []string{"__all__"}, // User selected "__all__"
IsSelectAll: true,
FieldType: "scalar",
},
{
Name: "operation_names",
Values: []string{"op1", "op2", "op3"},
IsSelectAll: false, // User selected specific values
FieldType: "array",
},
{
Name: "min_duration",
Values: []string{"100"},
IsSelectAll: false,
FieldType: "scalar",
},
{
Name: "http_method",
Values: []string{"GET", "POST"},
IsSelectAll: false,
FieldType: "map", // This is a map lookup
},
{
Name: "start_time",
Values: []string{"2023-01-01 00:00:00"},
IsSelectAll: false,
FieldType: "scalar",
},
{
Name: "end_time",
Values: []string{"2023-01-02 00:00:00"},
IsSelectAll: false,
FieldType: "scalar",
},
},
expected: `SELECT trace_id, name, kind, status FROM signoz_traces.signoz_index_v3 WHERE operation_name IN ($operation_names) AND duration >= $min_duration AND attributes['http.method'] = $http_method AND op IN (SELECT op FROM signoz_traces.operations WHERE kind = 'server') AND timestamp BETWEEN $start_time AND $end_time;`,
},
{
name: "Example 2: Multiple __all__ selections and map lookups",
sql: `SELECT trace_id, name, kind, status
FROM signoz_traces.signoz_index_v3
WHERE service_name = $service_name
AND operation_name IN ($operation_names)
AND duration >= $min_duration
AND attributes['http.method'] = $http_method
AND op IN (SELECT op FROM signoz_traces.operations WHERE service_name = $service_name AND kind = 'server')
AND timestamp BETWEEN $start_time AND $end_time`,
variables: []VariableValue{
{
Name: "service_name",
Values: []string{"__all__"},
IsSelectAll: true,
FieldType: "scalar",
},
{
Name: "operation_names",
Values: []string{"__all__"},
IsSelectAll: true,
FieldType: "array",
},
{
Name: "min_duration",
Values: []string{"__all__"},
IsSelectAll: true,
FieldType: "scalar",
},
{
Name: "http_method",
Values: []string{"GET", "POST"},
IsSelectAll: false,
FieldType: "map",
},
{
Name: "start_time",
Values: []string{"__all__"},
IsSelectAll: true,
FieldType: "scalar",
},
{
Name: "end_time",
Values: []string{"__all__"},
IsSelectAll: true,
FieldType: "scalar",
},
},
expected: `SELECT trace_id, name, kind, status FROM signoz_traces.signoz_index_v3 WHERE attributes['http.method'] = $http_method AND op IN (SELECT op FROM signoz_traces.operations WHERE kind = 'server') AND timestamp BETWEEN $start_time AND $end_time;`,
},
{
name: "Example 3: Multiple __all__ selections and map lookups",
sql: `SELECT trace_id, name, kind, status
FROM signoz_traces.signoz_index_v3
WHERE service_name = $service_name
AND operation_name IN ($operation_names)
AND duration >= $min_duration
AND attributes['http.method'] = $http_method
AND op IN (SELECT op FROM signoz_traces.operations WHERE service_name = $service_name AND kind = 'server')
AND timestamp BETWEEN $start_time AND $end_time`,
variables: []VariableValue{
{
Name: "service_name",
Values: []string{"__all__"},
IsSelectAll: true,
FieldType: "scalar",
},
{
Name: "operation_names",
Values: []string{"__all__"},
IsSelectAll: true,
FieldType: "array",
},
{
Name: "min_duration",
Values: []string{"__all__"},
IsSelectAll: true,
FieldType: "scalar",
},
{
Name: "http_method",
Values: []string{"__all__"},
IsSelectAll: true,
FieldType: "map",
},
},
expected: `SELECT trace_id, name, kind, status FROM signoz_traces.signoz_index_v3 WHERE op IN (SELECT op FROM signoz_traces.operations WHERE kind = 'server') AND timestamp BETWEEN $start_time AND $end_time;`,
},
{
name: "Example 3: Multiple __all__ selections and map lookups",
sql: `SELECT trace_id, name, kind, status
FROM signoz_traces.signoz_index_v3
WHERE service_name = {{service_name}}
AND operation_name IN {{.operation_names}}
AND duration >= {{min_duration}}
AND attributes['http.method'] = $http_method
AND op IN (SELECT op FROM signoz_traces.operations WHERE service_name = {{service_name}} AND kind = 'server')`,
variables: []VariableValue{
{
Name: "service_name",
Values: []string{"__all__"},
IsSelectAll: true,
FieldType: "scalar",
},
{
Name: "operation_names",
Values: []string{"__all__"},
IsSelectAll: true,
FieldType: "array",
},
{
Name: "min_duration",
Values: []string{"__all__"},
IsSelectAll: true,
FieldType: "scalar",
},
{
Name: "http_method",
Values: []string{"__all__"},
IsSelectAll: true,
FieldType: "map",
},
},
expected: `SELECT trace_id, name, kind, status FROM signoz_traces.signoz_index_v3 WHERE op IN (SELECT op FROM signoz_traces.operations WHERE kind = 'server');`,
},
}
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
transformer := NewQueryTransformer(testCase.sql, testCase.variables)
modifiedQuery, err := transformer.Transform()
if err != nil {
t.Fatalf("Error transforming query: %v", err)
}
if modifiedQuery != testCase.expected {
t.Errorf("Expected transformed query to be:\n%s\nBut got:\n%s", testCase.expected, modifiedQuery)
}
})
}
}

View File

@@ -0,0 +1,74 @@
package clickhouse
import (
"fmt"
"strings"
"github.com/AfterShip/clickhouse-sql-parser/parser"
)
// VariableValue represents a variable's assigned value
type VariableValue struct {
Name string
Values []string
IsSelectAll bool
FieldType string // "scalar", "array", "map", etc.
}
// QueryTransformer handles the transformation of queries based on variable values
type QueryTransformer struct {
processor *QueryProcessor
variables map[string]VariableValue
originalSQL string
}
// NewQueryTransformer creates a new transformer with the given SQL and variables
func NewQueryTransformer(sql string, variables []VariableValue) *QueryTransformer {
varMap := make(map[string]VariableValue)
for _, v := range variables {
varMap[v.Name] = v
}
// for each variable, replace the `{{variable_name}}`, [[variable_name]], {{ .variable_name }}, {{.variable_name}}
// with $variable_name
for name := range varMap {
sql = strings.Replace(sql, fmt.Sprintf("{{%s}}", name), fmt.Sprintf("$%s", name), -1)
sql = strings.Replace(sql, fmt.Sprintf("[[%s]]", name), fmt.Sprintf("$%s", name), -1)
sql = strings.Replace(sql, fmt.Sprintf("{{ .%s }}", name), fmt.Sprintf("$%s", name), -1)
sql = strings.Replace(sql, fmt.Sprintf("{{.%s}}", name), fmt.Sprintf("$%s", name), -1)
}
return &QueryTransformer{
processor: NewQueryProcessor(),
variables: varMap,
originalSQL: sql,
}
}
// Transform processes the query and returns a transformed version
func (t *QueryTransformer) Transform() (string, error) {
return t.processor.ProcessQuery(t.originalSQL, t.transformFilter)
}
// transformFilter is the callback function that decides what to do with each filter
func (t *QueryTransformer) transformFilter(variableName string, expr parser.Expr) FilterAction {
// Check if we have info about this variable
varInfo, exists := t.variables[variableName]
if !exists {
// If we don't have info, keep the filter as is
return KeepFilter
}
// If the user selected "__all__", we should remove the filter
if varInfo.IsSelectAll {
return RemoveFilter
}
// For maps, we might want to check for existence rather than equality
if varInfo.FieldType == "map" {
return ReplaceWithExistsCheck
}
// Otherwise keep the filter as is (it will be filled with the actual values)
return KeepFilter
}