Compare commits
1 Commits
v0.93.0-cl
...
not-fix-ex
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2c8ce2a749 |
4
.github/CODEOWNERS
vendored
4
.github/CODEOWNERS
vendored
@@ -42,7 +42,3 @@
|
||||
/pkg/telemetrymetadata/ @srikanthccv
|
||||
/pkg/telemetrymetrics/ @srikanthccv
|
||||
/pkg/telemetrytraces/ @srikanthccv
|
||||
|
||||
# AuthN / AuthZ Owners
|
||||
|
||||
/pkg/authz/ @vikrantgupta25 @grandwizard28
|
||||
|
||||
2
.github/workflows/build-community.yaml
vendored
2
.github/workflows/build-community.yaml
vendored
@@ -62,7 +62,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.24
|
||||
GO_VERSION: 1.23
|
||||
GO_NAME: signoz-community
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
|
||||
2
.github/workflows/build-enterprise.yaml
vendored
2
.github/workflows/build-enterprise.yaml
vendored
@@ -93,7 +93,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.24
|
||||
GO_VERSION: 1.23
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./cmd/enterprise
|
||||
|
||||
2
.github/workflows/build-staging.yaml
vendored
2
.github/workflows/build-staging.yaml
vendored
@@ -92,7 +92,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.24
|
||||
GO_VERSION: 1.23
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./cmd/enterprise
|
||||
|
||||
10
.github/workflows/goci.yaml
vendored
10
.github/workflows/goci.yaml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_TEST_CONTEXT: ./...
|
||||
GO_VERSION: 1.24
|
||||
GO_VERSION: 1.23
|
||||
fmt:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
@@ -27,7 +27,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.24
|
||||
GO_VERSION: 1.23
|
||||
lint:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.24
|
||||
GO_VERSION: 1.23
|
||||
deps:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
@@ -45,7 +45,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.24
|
||||
GO_VERSION: 1.23
|
||||
build:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: go-install
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.24"
|
||||
go-version: "1.23"
|
||||
- name: qemu-install
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: aarch64-install
|
||||
|
||||
4
.github/workflows/gor-signoz-community.yaml
vendored
4
.github/workflows/gor-signoz-community.yaml
vendored
@@ -58,7 +58,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.24"
|
||||
go-version: "1.23"
|
||||
- name: cross-compilation-tools
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
@@ -122,7 +122,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.24"
|
||||
go-version: "1.23"
|
||||
|
||||
# copy the caches from build
|
||||
- name: get-sha
|
||||
|
||||
4
.github/workflows/gor-signoz.yaml
vendored
4
.github/workflows/gor-signoz.yaml
vendored
@@ -72,7 +72,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.24"
|
||||
go-version: "1.23"
|
||||
- name: cross-compilation-tools
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
@@ -135,7 +135,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.24"
|
||||
go-version: "1.23"
|
||||
|
||||
# copy the caches from build
|
||||
- name: get-sha
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -86,8 +86,6 @@ queries.active
|
||||
.devenv/**/tmp/**
|
||||
.qodo
|
||||
|
||||
.dev
|
||||
|
||||
### Python ###
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
|
||||
@@ -5,7 +5,7 @@ COPY ./frontend/ ./
|
||||
RUN CI=1 yarn install
|
||||
RUN CI=1 yarn build
|
||||
|
||||
FROM golang:1.24-bullseye
|
||||
FROM golang:1.23-bullseye
|
||||
|
||||
ARG OS="linux"
|
||||
ARG TARGETARCH
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
)
|
||||
@@ -57,7 +57,7 @@ func Unauthorized(err error) *ApiError {
|
||||
func BadRequestStr(s string) *ApiError {
|
||||
return &ApiError{
|
||||
Typ: basemodel.ErrorBadData,
|
||||
Err: errors.New(s),
|
||||
Err: fmt.Errorf(s),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -73,7 +73,7 @@ func InternalError(err error) *ApiError {
|
||||
func InternalErrorStr(s string) *ApiError {
|
||||
return &ApiError{
|
||||
Typ: basemodel.ErrorInternal,
|
||||
Err: errors.New(s),
|
||||
Err: fmt.Errorf(s),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -174,31 +174,6 @@
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.time-input-prefix {
|
||||
.live-dot-icon {
|
||||
width: 6px;
|
||||
height: 6px;
|
||||
border-radius: 50%;
|
||||
background-color: var(--bg-forest-500);
|
||||
animation: ripple 1s infinite;
|
||||
|
||||
margin-right: 4px;
|
||||
margin-left: 4px;
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes ripple {
|
||||
0% {
|
||||
box-shadow: 0 0 0 0 rgba(245, 158, 11, 0.4);
|
||||
}
|
||||
70% {
|
||||
box-shadow: 0 0 0 6px rgba(245, 158, 11, 0);
|
||||
}
|
||||
100% {
|
||||
box-shadow: 0 0 0 0 rgba(245, 158, 11, 0);
|
||||
}
|
||||
}
|
||||
|
||||
.time-input-suffix-icon-badge {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
|
||||
@@ -59,9 +59,7 @@ interface CustomTimePickerProps {
|
||||
customDateTimeVisible?: boolean;
|
||||
setCustomDTPickerVisible?: Dispatch<SetStateAction<boolean>>;
|
||||
onCustomDateHandler?: (dateTimeRange: DateTimeRangeType) => void;
|
||||
showLiveLogs?: boolean;
|
||||
onGoLive?: () => void;
|
||||
onExitLiveLogs?: () => void;
|
||||
handleGoLive?: () => void;
|
||||
}
|
||||
|
||||
function CustomTimePicker({
|
||||
@@ -78,9 +76,7 @@ function CustomTimePicker({
|
||||
customDateTimeVisible,
|
||||
setCustomDTPickerVisible,
|
||||
onCustomDateHandler,
|
||||
onGoLive,
|
||||
onExitLiveLogs,
|
||||
showLiveLogs,
|
||||
handleGoLive,
|
||||
}: CustomTimePickerProps): JSX.Element {
|
||||
const [
|
||||
selectedTimePlaceholderValue,
|
||||
@@ -169,13 +165,9 @@ function CustomTimePicker({
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (showLiveLogs) {
|
||||
setSelectedTimePlaceholderValue('Live');
|
||||
} else {
|
||||
const value = getSelectedTimeRangeLabel(selectedTime, selectedValue);
|
||||
setSelectedTimePlaceholderValue(value);
|
||||
}
|
||||
}, [selectedTime, selectedValue, showLiveLogs]);
|
||||
const value = getSelectedTimeRangeLabel(selectedTime, selectedValue);
|
||||
setSelectedTimePlaceholderValue(value);
|
||||
}, [selectedTime, selectedValue]);
|
||||
|
||||
const hide = (): void => {
|
||||
setOpen(false);
|
||||
@@ -346,28 +338,6 @@ function CustomTimePicker({
|
||||
return '';
|
||||
};
|
||||
|
||||
const getInputPrefix = (): JSX.Element => {
|
||||
if (showLiveLogs) {
|
||||
return (
|
||||
<div className="time-input-prefix">
|
||||
<div className="live-dot-icon" />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="time-input-prefix">
|
||||
{inputValue && inputStatus === 'success' ? (
|
||||
<CheckCircle size={14} color="#51E7A8" />
|
||||
) : (
|
||||
<Tooltip title="Enter time in format (e.g., 1m, 2h, 3d, 4w)">
|
||||
<Clock size={14} className="cursor-pointer" />
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="custom-time-picker">
|
||||
<Tooltip title={getTooltipTitle()} placement="top">
|
||||
@@ -387,8 +357,7 @@ function CustomTimePicker({
|
||||
setCustomDTPickerVisible={defaultTo(setCustomDTPickerVisible, noop)}
|
||||
onCustomDateHandler={defaultTo(onCustomDateHandler, noop)}
|
||||
onSelectHandler={handleSelect}
|
||||
onGoLive={defaultTo(onGoLive, noop)}
|
||||
onExitLiveLogs={defaultTo(onExitLiveLogs, noop)}
|
||||
handleGoLive={defaultTo(handleGoLive, noop)}
|
||||
options={items}
|
||||
selectedTime={selectedTime}
|
||||
activeView={activeView}
|
||||
@@ -423,7 +392,17 @@ function CustomTimePicker({
|
||||
onBlur={handleBlur}
|
||||
onChange={handleInputChange}
|
||||
data-1p-ignore
|
||||
prefix={getInputPrefix()}
|
||||
prefix={
|
||||
<div className="time-input-prefix">
|
||||
{inputValue && inputStatus === 'success' ? (
|
||||
<CheckCircle size={14} color="#51E7A8" />
|
||||
) : (
|
||||
<Tooltip title="Enter time in format (e.g., 1m, 2h, 3d, 4w)">
|
||||
<Clock size={14} className="cursor-pointer" />
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
}
|
||||
suffix={
|
||||
<div className="time-input-suffix">
|
||||
{!!isTimezoneOverridden && activeTimezoneOffset && (
|
||||
@@ -460,8 +439,6 @@ CustomTimePicker.defaultProps = {
|
||||
customDateTimeVisible: false,
|
||||
setCustomDTPickerVisible: noop,
|
||||
onCustomDateHandler: noop,
|
||||
onGoLive: noop,
|
||||
handleGoLive: noop,
|
||||
onCustomTimeStatusUpdate: noop,
|
||||
onExitLiveLogs: noop,
|
||||
showLiveLogs: false,
|
||||
};
|
||||
|
||||
@@ -6,7 +6,6 @@ import logEvent from 'api/common/logEvent';
|
||||
import cx from 'classnames';
|
||||
import DatePickerV2 from 'components/DatePickerV2/DatePickerV2';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal';
|
||||
import {
|
||||
@@ -17,14 +16,7 @@ import {
|
||||
import dayjs from 'dayjs';
|
||||
import { Clock, PenLine } from 'lucide-react';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import {
|
||||
Dispatch,
|
||||
SetStateAction,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useState,
|
||||
} from 'react';
|
||||
import { Dispatch, SetStateAction, useEffect, useMemo, useState } from 'react';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { getCustomTimeRanges } from 'utils/customTimeRangeUtils';
|
||||
|
||||
@@ -40,13 +32,12 @@ interface CustomTimePickerPopoverContentProps {
|
||||
lexicalContext?: LexicalContext,
|
||||
) => void;
|
||||
onSelectHandler: (label: string, value: string) => void;
|
||||
onGoLive: () => void;
|
||||
handleGoLive: () => void;
|
||||
selectedTime: string;
|
||||
activeView: 'datetime' | 'timezone';
|
||||
setActiveView: Dispatch<SetStateAction<'datetime' | 'timezone'>>;
|
||||
isOpenedFromFooter: boolean;
|
||||
setIsOpenedFromFooter: Dispatch<SetStateAction<boolean>>;
|
||||
onExitLiveLogs: () => void;
|
||||
}
|
||||
|
||||
interface RecentlyUsedDateTimeRange {
|
||||
@@ -65,13 +56,12 @@ function CustomTimePickerPopoverContent({
|
||||
setCustomDTPickerVisible,
|
||||
onCustomDateHandler,
|
||||
onSelectHandler,
|
||||
onGoLive,
|
||||
handleGoLive,
|
||||
selectedTime,
|
||||
activeView,
|
||||
setActiveView,
|
||||
isOpenedFromFooter,
|
||||
setIsOpenedFromFooter,
|
||||
onExitLiveLogs,
|
||||
}: CustomTimePickerPopoverContentProps): JSX.Element {
|
||||
const { pathname } = useLocation();
|
||||
|
||||
@@ -79,19 +69,6 @@ function CustomTimePickerPopoverContent({
|
||||
pathname,
|
||||
]);
|
||||
|
||||
const url = new URLSearchParams(window.location.search);
|
||||
|
||||
let panelTypeFromURL = url.get(QueryParams.panelTypes);
|
||||
|
||||
try {
|
||||
panelTypeFromURL = JSON.parse(panelTypeFromURL as string);
|
||||
} catch {
|
||||
// fallback → leave as-is
|
||||
}
|
||||
|
||||
const isLogsListView =
|
||||
panelTypeFromURL !== 'table' && panelTypeFromURL !== 'graph'; // we do not select list view in the url
|
||||
|
||||
const { timezone } = useTimezone();
|
||||
const activeTimezoneOffset = timezone.offset;
|
||||
|
||||
@@ -99,12 +76,6 @@ function CustomTimePickerPopoverContent({
|
||||
RecentlyUsedDateTimeRange[]
|
||||
>([]);
|
||||
|
||||
const handleExitLiveLogs = useCallback((): void => {
|
||||
if (isLogsExplorerPage) {
|
||||
onExitLiveLogs();
|
||||
}
|
||||
}, [isLogsExplorerPage, onExitLiveLogs]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!customDateTimeVisible) {
|
||||
const customTimeRanges = getCustomTimeRanges();
|
||||
@@ -136,7 +107,6 @@ function CustomTimePickerPopoverContent({
|
||||
className="time-btns"
|
||||
key={option.label + option.value}
|
||||
onClick={(): void => {
|
||||
handleExitLiveLogs();
|
||||
onSelectHandler(option.label, option.value);
|
||||
}}
|
||||
>
|
||||
@@ -170,17 +140,12 @@ function CustomTimePickerPopoverContent({
|
||||
);
|
||||
}
|
||||
|
||||
const handleGoLive = (): void => {
|
||||
onGoLive();
|
||||
setIsOpen(false);
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="date-time-popover">
|
||||
{!customDateTimeVisible && (
|
||||
<div className="date-time-options">
|
||||
{isLogsExplorerPage && isLogsListView && (
|
||||
{isLogsExplorerPage && (
|
||||
<Button className="data-time-live" type="text" onClick={handleGoLive}>
|
||||
Live
|
||||
</Button>
|
||||
@@ -190,7 +155,6 @@ function CustomTimePickerPopoverContent({
|
||||
type="text"
|
||||
key={option.label + option.value}
|
||||
onClick={(): void => {
|
||||
handleExitLiveLogs();
|
||||
onSelectHandler(option.label, option.value);
|
||||
}}
|
||||
className={cx(
|
||||
@@ -205,6 +169,7 @@ function CustomTimePickerPopoverContent({
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div
|
||||
className={cx(
|
||||
'relative-date-time',
|
||||
@@ -234,14 +199,12 @@ function CustomTimePickerPopoverContent({
|
||||
tabIndex={0}
|
||||
onKeyDown={(e): void => {
|
||||
if (e.key === 'Enter' || e.key === ' ') {
|
||||
handleExitLiveLogs();
|
||||
onCustomDateHandler([dayjs(range.from), dayjs(range.to)]);
|
||||
setIsOpen(false);
|
||||
}
|
||||
}}
|
||||
key={range.value}
|
||||
onClick={(): void => {
|
||||
handleExitLiveLogs();
|
||||
onCustomDateHandler([dayjs(range.from), dayjs(range.to)]);
|
||||
setIsOpen(false);
|
||||
}}
|
||||
|
||||
@@ -13,7 +13,7 @@ import { useCallback } from 'react';
|
||||
import { DataSource, StringOperators } from 'types/common/queryBuilder';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
|
||||
import { SpinnerWrapper } from './styles';
|
||||
import { SpinnerWrapper, Wrapper } from './styles';
|
||||
|
||||
function ListViewPanel(): JSX.Element {
|
||||
const { config } = useOptionsMenu({
|
||||
@@ -42,7 +42,7 @@ function ListViewPanel(): JSX.Element {
|
||||
}, [config]);
|
||||
|
||||
return (
|
||||
<div className="live-logs-settings-panel">
|
||||
<Wrapper>
|
||||
<Select
|
||||
getPopupContainer={popupContainer}
|
||||
style={defaultSelectStyle}
|
||||
@@ -68,7 +68,7 @@ function ListViewPanel(): JSX.Element {
|
||||
<Spinner style={{ height: 'auto' }} />
|
||||
</SpinnerWrapper>
|
||||
)}
|
||||
</div>
|
||||
</Wrapper>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
.live-logs-chart-container {
|
||||
height: 200px;
|
||||
min-height: 200px;
|
||||
border-left: none;
|
||||
border-right: none;
|
||||
}
|
||||
|
||||
.live-logs-settings-panel {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: 8px;
|
||||
border-bottom: 1px solid var(--bg-ink-300);
|
||||
|
||||
.live-logs-frequency-chart-view-controller {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.live-logs-settings-panel {
|
||||
border-bottom: 1px solid var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
@@ -1,89 +1,46 @@
|
||||
import './LiveLogsContainer.styles.scss';
|
||||
|
||||
import { Button, Switch, Typography } from 'antd';
|
||||
import LogsFormatOptionsMenu from 'components/LogsFormatOptionsMenu/LogsFormatOptionsMenu';
|
||||
import { Col } from 'antd';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { MAX_LOGS_LIST_SIZE } from 'constants/liveTail';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { themeColors } from 'constants/theme';
|
||||
import GoToTop from 'container/GoToTop';
|
||||
import { useOptionsMenu } from 'container/OptionsMenu';
|
||||
import FiltersInput from 'container/LiveLogs/FiltersInput';
|
||||
import LiveLogsTopNav from 'container/LiveLogsTopNav';
|
||||
import { useGetCompositeQueryParam } from 'hooks/queryBuilder/useGetCompositeQueryParam';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import useClickOutside from 'hooks/useClickOutside';
|
||||
import useDebouncedFn from 'hooks/useDebouncedFunction';
|
||||
import { useEventSourceEvent } from 'hooks/useEventSourceEvent';
|
||||
import { Sliders } from 'lucide-react';
|
||||
import { prepareQueryRangePayload } from 'lib/dashboard/prepareQueryRangePayload';
|
||||
import { useEventSource } from 'providers/EventSource';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { DataSource, StringOperators } from 'types/common/queryBuilder';
|
||||
import { validateQuery } from 'utils/queryValidationUtils';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import { idObject } from '../constants';
|
||||
import ListViewPanel from '../ListViewPanel';
|
||||
import LiveLogsList from '../LiveLogsList';
|
||||
import { ILiveLogsLog } from '../LiveLogsList/types';
|
||||
import LiveLogsListChart from '../LiveLogsListChart';
|
||||
import { QueryHistoryState } from '../types';
|
||||
import { prepareQueryByFilter } from '../utils';
|
||||
import { ContentWrapper, LiveLogsChart, Wrapper } from './styles';
|
||||
|
||||
function LiveLogsContainer(): JSX.Element {
|
||||
const location = useLocation();
|
||||
const [logs, setLogs] = useState<ILiveLogsLog[]>([]);
|
||||
const { currentQuery, stagedQuery } = useQueryBuilder();
|
||||
const [showLiveLogsFrequencyChart, setShowLiveLogsFrequencyChart] = useState(
|
||||
true,
|
||||
);
|
||||
const [logs, setLogs] = useState<ILog[]>([]);
|
||||
|
||||
const listQuery = useMemo(() => {
|
||||
if (!stagedQuery || stagedQuery.builder.queryData.length < 1) return null;
|
||||
|
||||
return stagedQuery.builder.queryData.find((item) => !item.disabled) || null;
|
||||
}, [stagedQuery]);
|
||||
const { stagedQuery } = useQueryBuilder();
|
||||
|
||||
const queryLocationState = location.state as QueryHistoryState;
|
||||
|
||||
const batchedEventsRef = useRef<ILiveLogsLog[]>([]);
|
||||
const batchedEventsRef = useRef<ILog[]>([]);
|
||||
|
||||
const [showFormatMenuItems, setShowFormatMenuItems] = useState(false);
|
||||
const menuRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
const prevFilterExpressionRef = useRef<string | null>(null);
|
||||
|
||||
const { options, config } = useOptionsMenu({
|
||||
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
|
||||
dataSource: DataSource.LOGS,
|
||||
aggregateOperator: listQuery?.aggregateOperator || StringOperators.NOOP,
|
||||
});
|
||||
|
||||
const formatItems = [
|
||||
{
|
||||
key: 'raw',
|
||||
label: 'Raw',
|
||||
data: {
|
||||
title: 'max lines per row',
|
||||
},
|
||||
},
|
||||
{
|
||||
key: 'list',
|
||||
label: 'Default',
|
||||
},
|
||||
{
|
||||
key: 'table',
|
||||
label: 'Column',
|
||||
data: {
|
||||
title: 'columns',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const handleToggleShowFormatOptions = (): void =>
|
||||
setShowFormatMenuItems(!showFormatMenuItems);
|
||||
|
||||
useClickOutside({
|
||||
ref: menuRef,
|
||||
onClickOutside: () => {
|
||||
if (showFormatMenuItems) {
|
||||
setShowFormatMenuItems(false);
|
||||
}
|
||||
},
|
||||
});
|
||||
const { selectedTime: globalSelectedTime } = useSelector<
|
||||
AppState,
|
||||
GlobalReducer
|
||||
>((state) => state.globalTime);
|
||||
|
||||
const {
|
||||
handleStartOpenConnection,
|
||||
@@ -96,7 +53,7 @@ function LiveLogsContainer(): JSX.Element {
|
||||
|
||||
const compositeQuery = useGetCompositeQueryParam();
|
||||
|
||||
const updateLogs = useCallback((newLogs: ILiveLogsLog[]) => {
|
||||
const updateLogs = useCallback((newLogs: ILog[]) => {
|
||||
setLogs((prevState) =>
|
||||
[...newLogs, ...prevState].slice(0, MAX_LOGS_LIST_SIZE),
|
||||
);
|
||||
@@ -110,7 +67,7 @@ function LiveLogsContainer(): JSX.Element {
|
||||
}, 500);
|
||||
|
||||
const batchLiveLog = useCallback(
|
||||
(log: ILiveLogsLog): void => {
|
||||
(log: ILog): void => {
|
||||
batchedEventsRef.current.push(log);
|
||||
|
||||
debouncedUpdateLogs();
|
||||
@@ -120,7 +77,7 @@ function LiveLogsContainer(): JSX.Element {
|
||||
|
||||
const handleGetLiveLogs = useCallback(
|
||||
(event: MessageEvent<string>) => {
|
||||
const data: ILiveLogsLog = JSON.parse(event?.data);
|
||||
const data: ILog = JSON.parse(event.data);
|
||||
|
||||
batchLiveLog(data);
|
||||
},
|
||||
@@ -134,65 +91,72 @@ function LiveLogsContainer(): JSX.Element {
|
||||
useEventSourceEvent('message', handleGetLiveLogs);
|
||||
useEventSourceEvent('error', handleError);
|
||||
|
||||
const openConnection = useCallback(
|
||||
(filterExpression?: string | null) => {
|
||||
handleStartOpenConnection(filterExpression || '');
|
||||
const getPreparedQuery = useCallback(
|
||||
(query: Query): Query => {
|
||||
const firstLogId: string | null = logs.length ? logs[0].id : null;
|
||||
|
||||
const preparedQuery: Query = prepareQueryByFilter(
|
||||
query,
|
||||
idObject,
|
||||
firstLogId,
|
||||
);
|
||||
|
||||
return preparedQuery;
|
||||
},
|
||||
[handleStartOpenConnection],
|
||||
[logs],
|
||||
);
|
||||
|
||||
const openConnection = useCallback(
|
||||
(query: Query) => {
|
||||
const { queryPayload } = prepareQueryRangePayload({
|
||||
query,
|
||||
graphType: PANEL_TYPES.LIST,
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
globalSelectedInterval: globalSelectedTime,
|
||||
});
|
||||
|
||||
const encodedQueryPayload = encodeURIComponent(JSON.stringify(queryPayload));
|
||||
const queryString = `q=${encodedQueryPayload}`;
|
||||
|
||||
handleStartOpenConnection({ queryString });
|
||||
},
|
||||
[globalSelectedTime, handleStartOpenConnection],
|
||||
);
|
||||
|
||||
const handleStartNewConnection = useCallback(
|
||||
(filterExpression?: string | null) => {
|
||||
(query: Query) => {
|
||||
handleCloseConnection();
|
||||
|
||||
openConnection(filterExpression);
|
||||
const preparedQuery = getPreparedQuery(query);
|
||||
|
||||
openConnection(preparedQuery);
|
||||
},
|
||||
[handleCloseConnection, openConnection],
|
||||
[getPreparedQuery, handleCloseConnection, openConnection],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const currentFilterExpression =
|
||||
currentQuery?.builder.queryData[0]?.filter?.expression?.trim() || '';
|
||||
if (!compositeQuery) return;
|
||||
|
||||
// Check if filterExpression has actually changed
|
||||
if (
|
||||
!prevFilterExpressionRef.current ||
|
||||
prevFilterExpressionRef.current !== currentFilterExpression
|
||||
(initialLoading && !isConnectionLoading) ||
|
||||
compositeQuery.id !== stagedQuery?.id
|
||||
) {
|
||||
const validationResult = validateQuery(currentFilterExpression || '');
|
||||
|
||||
if (validationResult.isValid) {
|
||||
setLogs([]);
|
||||
batchedEventsRef.current = [];
|
||||
handleStartNewConnection(currentFilterExpression);
|
||||
}
|
||||
|
||||
prevFilterExpressionRef.current = currentFilterExpression || null;
|
||||
handleStartNewConnection(compositeQuery);
|
||||
}
|
||||
}, [currentQuery, handleStartNewConnection]);
|
||||
|
||||
useEffect(() => {
|
||||
if (initialLoading && !isConnectionLoading) {
|
||||
const currentFilterExpression =
|
||||
currentQuery?.builder.queryData[0]?.filter?.expression?.trim() || '';
|
||||
|
||||
const validationResult = validateQuery(currentFilterExpression || '');
|
||||
|
||||
if (validationResult.isValid) {
|
||||
handleStartNewConnection(currentFilterExpression);
|
||||
prevFilterExpressionRef.current = currentFilterExpression || null;
|
||||
} else {
|
||||
handleStartNewConnection(null);
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [initialLoading, isConnectionLoading, handleStartNewConnection]);
|
||||
}, [
|
||||
compositeQuery,
|
||||
initialLoading,
|
||||
stagedQuery,
|
||||
isConnectionLoading,
|
||||
openConnection,
|
||||
handleStartNewConnection,
|
||||
]);
|
||||
|
||||
useEffect((): (() => void) | undefined => {
|
||||
if (isConnectionError && reconnectDueToError) {
|
||||
if (isConnectionError && reconnectDueToError && compositeQuery) {
|
||||
// Small delay to prevent immediate reconnection attempts
|
||||
const reconnectTimer = setTimeout(() => {
|
||||
handleStartNewConnection();
|
||||
handleStartNewConnection(compositeQuery);
|
||||
}, 1000);
|
||||
|
||||
return (): void => clearTimeout(reconnectTimer);
|
||||
@@ -205,70 +169,50 @@ function LiveLogsContainer(): JSX.Element {
|
||||
handleStartNewConnection,
|
||||
]);
|
||||
|
||||
// clean up the connection when the component unmounts
|
||||
useEffect(
|
||||
() => (): void => {
|
||||
handleCloseConnection();
|
||||
},
|
||||
[handleCloseConnection],
|
||||
);
|
||||
useEffect(() => {
|
||||
const prefetchedList = queryLocationState?.listQueryPayload[0]?.list;
|
||||
|
||||
const handleToggleFrequencyChart = useCallback(() => {
|
||||
setShowLiveLogsFrequencyChart(!showLiveLogsFrequencyChart);
|
||||
}, [showLiveLogsFrequencyChart]);
|
||||
if (prefetchedList) {
|
||||
const prefetchedLogs: ILog[] = prefetchedList
|
||||
.map((item) => ({
|
||||
...item.data,
|
||||
timestamp: item.timestamp,
|
||||
}))
|
||||
.reverse();
|
||||
|
||||
updateLogs(prefetchedLogs);
|
||||
}
|
||||
}, [queryLocationState, updateLogs]);
|
||||
|
||||
return (
|
||||
<div className="live-logs-container">
|
||||
<div className="live-logs-content">
|
||||
<div className="live-logs-settings-panel">
|
||||
<div className="live-logs-frequency-chart-view-controller">
|
||||
<Typography>Frequency chart</Typography>
|
||||
<Switch
|
||||
size="small"
|
||||
checked={showLiveLogsFrequencyChart}
|
||||
defaultChecked
|
||||
onChange={handleToggleFrequencyChart}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="format-options-container" ref={menuRef}>
|
||||
<Button
|
||||
className="periscope-btn ghost"
|
||||
onClick={handleToggleShowFormatOptions}
|
||||
icon={<Sliders size={14} />}
|
||||
/>
|
||||
|
||||
{showFormatMenuItems && (
|
||||
<LogsFormatOptionsMenu
|
||||
title="FORMAT"
|
||||
items={formatItems}
|
||||
selectedOptionFormat={options.format}
|
||||
config={config}
|
||||
<Wrapper>
|
||||
<LiveLogsTopNav />
|
||||
<ContentWrapper gutter={[0, 20]} style={{ color: themeColors.lightWhite }}>
|
||||
<Col span={24}>
|
||||
<FiltersInput />
|
||||
</Col>
|
||||
{initialLoading && logs.length === 0 ? (
|
||||
<Col span={24}>
|
||||
<Spinner style={{ height: 'auto' }} tip="Fetching Logs" />
|
||||
</Col>
|
||||
) : (
|
||||
<>
|
||||
<Col span={24}>
|
||||
<LiveLogsChart
|
||||
initialData={queryLocationState?.graphQueryPayload || null}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{showLiveLogsFrequencyChart && (
|
||||
<div className="live-logs-chart-container">
|
||||
<LiveLogsListChart
|
||||
initialData={queryLocationState?.graphQueryPayload || null}
|
||||
className="live-logs-chart"
|
||||
isShowingLiveLogs
|
||||
/>
|
||||
</div>
|
||||
</Col>
|
||||
<Col span={24}>
|
||||
<ListViewPanel />
|
||||
</Col>
|
||||
<Col span={24}>
|
||||
<LiveLogsList logs={logs} />
|
||||
</Col>
|
||||
</>
|
||||
)}
|
||||
|
||||
<div className="live-logs-list-container">
|
||||
<LiveLogsList
|
||||
logs={logs}
|
||||
isLoading={initialLoading && logs.length === 0}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<GoToTop />
|
||||
</div>
|
||||
<GoToTop />
|
||||
</ContentWrapper>
|
||||
</Wrapper>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
.live-logs-container {
|
||||
.live-logs-content {
|
||||
.live-logs-chart-container {
|
||||
padding: 0px 8px;
|
||||
|
||||
.logs-frequency-chart {
|
||||
.ant-card-body {
|
||||
height: 140px;
|
||||
min-height: 140px;
|
||||
padding: 0 16px 22px 16px;
|
||||
font-family: 'Geist Mono';
|
||||
}
|
||||
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.live-logs-list {
|
||||
.live-logs-list-loading {
|
||||
padding: 16px;
|
||||
text-align: center;
|
||||
color: var(--text-vanilla-100);
|
||||
}
|
||||
|
||||
.live-logs-list-loading {
|
||||
height: 100%;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
width: 100%;
|
||||
|
||||
.loading-live-logs-content {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
flex-direction: column;
|
||||
|
||||
.loading-gif {
|
||||
height: 72px;
|
||||
margin-left: -24px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.live-logs-list-loading {
|
||||
.loading-live-logs-content {
|
||||
.ant-typography {
|
||||
color: var(--text-ink-500);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,23 +1,24 @@
|
||||
import './LiveLogsList.styles.scss';
|
||||
|
||||
import { Card, Typography } from 'antd';
|
||||
import LogDetail from 'components/LogDetail';
|
||||
import { VIEW_TYPES } from 'components/LogDetail/constants';
|
||||
import ListLogView from 'components/Logs/ListLogView';
|
||||
import RawLogView from 'components/Logs/RawLogView';
|
||||
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { CARD_BODY_STYLE } from 'constants/card';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { OptionFormatTypes } from 'constants/optionsFormatTypes';
|
||||
import InfinityTableView from 'container/LogsExplorerList/InfinityTableView';
|
||||
import { InfinityWrapperStyled } from 'container/LogsExplorerList/styles';
|
||||
import { convertKeysToColumnFields } from 'container/LogsExplorerList/utils';
|
||||
import { Heading } from 'container/LogsTable/styles';
|
||||
import { useOptionsMenu } from 'container/OptionsMenu';
|
||||
import { defaultLogsSelectedColumns } from 'container/OptionsMenu/constants';
|
||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
||||
import { useEventSource } from 'providers/EventSource';
|
||||
import { memo, useCallback, useEffect, useMemo, useRef } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Virtuoso, VirtuosoHandle } from 'react-virtuoso';
|
||||
// interfaces
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
@@ -25,9 +26,11 @@ import { DataSource, StringOperators } from 'types/common/queryBuilder';
|
||||
|
||||
import { LiveLogsListProps } from './types';
|
||||
|
||||
function LiveLogsList({ logs, isLoading }: LiveLogsListProps): JSX.Element {
|
||||
function LiveLogsList({ logs }: LiveLogsListProps): JSX.Element {
|
||||
const ref = useRef<VirtuosoHandle>(null);
|
||||
|
||||
const { t } = useTranslation(['logs']);
|
||||
|
||||
const { isConnectionLoading } = useEventSource();
|
||||
|
||||
const { activeLogId } = useCopyLogLink();
|
||||
@@ -40,12 +43,6 @@ function LiveLogsList({ logs, isLoading }: LiveLogsListProps): JSX.Element {
|
||||
onSetActiveLog,
|
||||
} = useActiveLog();
|
||||
|
||||
// get only data from the logs object
|
||||
const formattedLogs: ILog[] = useMemo(
|
||||
() => logs.map((log) => log?.data).flat(),
|
||||
[logs],
|
||||
);
|
||||
|
||||
const { options } = useOptionsMenu({
|
||||
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
|
||||
dataSource: DataSource.LOGS,
|
||||
@@ -53,8 +50,8 @@ function LiveLogsList({ logs, isLoading }: LiveLogsListProps): JSX.Element {
|
||||
});
|
||||
|
||||
const activeLogIndex = useMemo(
|
||||
() => formattedLogs.findIndex(({ id }) => id === activeLogId),
|
||||
[formattedLogs, activeLogId],
|
||||
() => logs.findIndex(({ id }) => id === activeLogId),
|
||||
[logs, activeLogId],
|
||||
);
|
||||
|
||||
const selectedFields = convertKeysToColumnFields([
|
||||
@@ -108,39 +105,30 @@ function LiveLogsList({ logs, isLoading }: LiveLogsListProps): JSX.Element {
|
||||
});
|
||||
}, [activeLogId, activeLogIndex]);
|
||||
|
||||
const isLoadingList = isConnectionLoading && formattedLogs.length === 0;
|
||||
const isLoadingList = isConnectionLoading && logs.length === 0;
|
||||
|
||||
const renderLoading = useCallback(
|
||||
() => (
|
||||
<div className="live-logs-list-loading">
|
||||
<div className="loading-live-logs-content">
|
||||
<img
|
||||
className="loading-gif"
|
||||
src="/Icons/loading-plane.gif"
|
||||
alt="wait-icon"
|
||||
/>
|
||||
|
||||
<Typography>Fetching live logs...</Typography>
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[],
|
||||
);
|
||||
if (isLoadingList) {
|
||||
return <Spinner style={{ height: 'auto' }} tip="Fetching Logs" />;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="live-logs-list">
|
||||
{(formattedLogs.length === 0 || isLoading || isLoadingList) &&
|
||||
renderLoading()}
|
||||
<>
|
||||
{options.format !== OptionFormatTypes.TABLE && (
|
||||
<Heading>
|
||||
<Typography.Text>Event</Typography.Text>
|
||||
</Heading>
|
||||
)}
|
||||
|
||||
{formattedLogs.length !== 0 && (
|
||||
{logs.length === 0 && <Typography>{t('fetching_log_lines')}</Typography>}
|
||||
|
||||
{logs.length !== 0 && (
|
||||
<InfinityWrapperStyled>
|
||||
{options.format === OptionFormatTypes.TABLE ? (
|
||||
<InfinityTableView
|
||||
ref={ref}
|
||||
isLoading={false}
|
||||
tableViewProps={{
|
||||
logs: formattedLogs,
|
||||
logs,
|
||||
fields: selectedFields,
|
||||
linesPerRow: options.maxLines,
|
||||
fontSize: options.fontSize,
|
||||
@@ -154,8 +142,8 @@ function LiveLogsList({ logs, isLoading }: LiveLogsListProps): JSX.Element {
|
||||
<Virtuoso
|
||||
ref={ref}
|
||||
initialTopMostItemIndex={activeLogIndex !== -1 ? activeLogIndex : 0}
|
||||
data={formattedLogs}
|
||||
totalCount={formattedLogs.length}
|
||||
data={logs}
|
||||
totalCount={logs.length}
|
||||
itemContent={getItemContent}
|
||||
/>
|
||||
</OverlayScrollbar>
|
||||
@@ -163,18 +151,15 @@ function LiveLogsList({ logs, isLoading }: LiveLogsListProps): JSX.Element {
|
||||
)}
|
||||
</InfinityWrapperStyled>
|
||||
)}
|
||||
|
||||
{activeLog && (
|
||||
<LogDetail
|
||||
selectedTab={VIEW_TYPES.OVERVIEW}
|
||||
log={activeLog}
|
||||
onClose={onClearActiveLog}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onGroupByAttribute={onGroupByAttribute}
|
||||
onClickActionItem={onAddToQuery}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<LogDetail
|
||||
selectedTab={VIEW_TYPES.OVERVIEW}
|
||||
log={activeLog}
|
||||
onClose={onClearActiveLog}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onGroupByAttribute={onGroupByAttribute}
|
||||
onClickActionItem={onAddToQuery}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,11 +1,5 @@
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
|
||||
export interface ILiveLogsLog {
|
||||
data: ILog[];
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
export type LiveLogsListProps = {
|
||||
logs: ILiveLogsLog[];
|
||||
isLoading: boolean;
|
||||
logs: ILog[];
|
||||
};
|
||||
|
||||
@@ -9,33 +9,24 @@ import { useMemo } from 'react';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { QueryData } from 'types/api/widgets/getQuery';
|
||||
import { DataSource, LogsAggregatorOperator } from 'types/common/queryBuilder';
|
||||
import { validateQuery } from 'utils/queryValidationUtils';
|
||||
|
||||
import { LiveLogsListChartProps } from './types';
|
||||
|
||||
function LiveLogsListChart({
|
||||
className,
|
||||
initialData,
|
||||
isShowingLiveLogs = false,
|
||||
}: LiveLogsListChartProps): JSX.Element {
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
const { stagedQuery } = useQueryBuilder();
|
||||
const { isConnectionOpen } = useEventSource();
|
||||
|
||||
const listChartQuery: Query | null = useMemo(() => {
|
||||
if (!currentQuery) return null;
|
||||
|
||||
const currentFilterExpression =
|
||||
currentQuery?.builder.queryData[0]?.filter?.expression?.trim() || '';
|
||||
|
||||
const validationResult = validateQuery(currentFilterExpression || '');
|
||||
|
||||
if (!validationResult.isValid) return null;
|
||||
if (!stagedQuery) return null;
|
||||
|
||||
return {
|
||||
...currentQuery,
|
||||
...stagedQuery,
|
||||
builder: {
|
||||
...currentQuery.builder,
|
||||
queryData: currentQuery.builder.queryData.map((item) => ({
|
||||
...stagedQuery.builder,
|
||||
queryData: stagedQuery.builder.queryData.map((item) => ({
|
||||
...item,
|
||||
disabled: false,
|
||||
aggregateOperator: LogsAggregatorOperator.COUNT,
|
||||
@@ -48,7 +39,7 @@ function LiveLogsListChart({
|
||||
})),
|
||||
},
|
||||
};
|
||||
}, [currentQuery]);
|
||||
}, [stagedQuery]);
|
||||
|
||||
const { data, isFetching } = useGetExplorerQueryRange(
|
||||
listChartQuery,
|
||||
@@ -71,15 +62,12 @@ function LiveLogsListChart({
|
||||
}, [data, initialData]);
|
||||
|
||||
return (
|
||||
<div className="live-logs-chart-container">
|
||||
<LogsExplorerChart
|
||||
isLoading={initialData ? false : isFetching}
|
||||
data={chartData}
|
||||
isLabelEnabled={false}
|
||||
className={className}
|
||||
isLogsExplorerViews={isShowingLiveLogs}
|
||||
/>
|
||||
</div>
|
||||
<LogsExplorerChart
|
||||
isLoading={initialData ? false : isFetching}
|
||||
data={chartData}
|
||||
isLabelEnabled={false}
|
||||
className={className}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -3,5 +3,4 @@ import { QueryData } from 'types/api/widgets/getQuery';
|
||||
export type LiveLogsListChartProps = {
|
||||
className?: string;
|
||||
initialData: QueryData[] | null;
|
||||
isShowingLiveLogs: boolean;
|
||||
};
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
import { PauseCircleFilled, PlayCircleFilled } from '@ant-design/icons';
|
||||
import { Button } from 'antd';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useEventSource } from 'providers/EventSource';
|
||||
import { useCallback, useEffect } from 'react';
|
||||
import { validateQuery } from 'utils/queryValidationUtils';
|
||||
|
||||
function LiveLogsPauseResume(): JSX.Element {
|
||||
const {
|
||||
isConnectionOpen,
|
||||
isConnectionLoading,
|
||||
initialLoading,
|
||||
handleCloseConnection,
|
||||
handleStartOpenConnection,
|
||||
handleSetInitialLoading,
|
||||
} = useEventSource();
|
||||
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
|
||||
const isPlaying = isConnectionOpen || isConnectionLoading || initialLoading;
|
||||
|
||||
const openConnection = useCallback(
|
||||
(filterExpression?: string | null) => {
|
||||
handleStartOpenConnection(filterExpression || '');
|
||||
},
|
||||
[handleStartOpenConnection],
|
||||
);
|
||||
|
||||
const handleStartNewConnection = useCallback(
|
||||
(filterExpression?: string | null) => {
|
||||
handleCloseConnection();
|
||||
|
||||
openConnection(filterExpression);
|
||||
},
|
||||
[handleCloseConnection, openConnection],
|
||||
);
|
||||
|
||||
const onLiveButtonClick = useCallback(() => {
|
||||
if (initialLoading) {
|
||||
handleSetInitialLoading(false);
|
||||
}
|
||||
|
||||
if ((!isConnectionOpen && isConnectionLoading) || isConnectionOpen) {
|
||||
handleCloseConnection();
|
||||
} else {
|
||||
const currentFilterExpression =
|
||||
currentQuery?.builder.queryData[0]?.filter?.expression?.trim() || '';
|
||||
|
||||
const validationResult = validateQuery(currentFilterExpression || '');
|
||||
|
||||
if (validationResult.isValid) {
|
||||
handleStartNewConnection(currentFilterExpression);
|
||||
} else {
|
||||
handleStartNewConnection(null);
|
||||
}
|
||||
}
|
||||
}, [
|
||||
initialLoading,
|
||||
isConnectionOpen,
|
||||
isConnectionLoading,
|
||||
currentQuery,
|
||||
handleSetInitialLoading,
|
||||
handleCloseConnection,
|
||||
handleStartNewConnection,
|
||||
]);
|
||||
|
||||
// clean up the connection when the component unmounts
|
||||
useEffect(
|
||||
() => (): void => {
|
||||
handleCloseConnection();
|
||||
},
|
||||
[handleCloseConnection],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="live-logs-pause-resume">
|
||||
<Button
|
||||
icon={isPlaying ? <PauseCircleFilled /> : <PlayCircleFilled />}
|
||||
danger={isPlaying}
|
||||
onClick={onLiveButtonClick}
|
||||
type="primary"
|
||||
className={`periscope-btn ${isPlaying ? 'warning' : 'success'}`}
|
||||
>
|
||||
{isPlaying ? 'Pause' : 'Resume'}
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default LiveLogsPauseResume;
|
||||
@@ -6,5 +6,4 @@ export type LogsExplorerChartProps = {
|
||||
isLogsExplorerViews?: boolean;
|
||||
isLabelEnabled?: boolean;
|
||||
className?: string;
|
||||
isShowingLiveLogs?: boolean;
|
||||
};
|
||||
|
||||
@@ -25,7 +25,6 @@ function LogsExplorerChart({
|
||||
isLabelEnabled = true,
|
||||
className,
|
||||
isLogsExplorerViews = false,
|
||||
isShowingLiveLogs = false,
|
||||
}: LogsExplorerChartProps): JSX.Element {
|
||||
const dispatch = useDispatch();
|
||||
const urlQuery = useUrlQuery();
|
||||
@@ -56,11 +55,6 @@ function LogsExplorerChart({
|
||||
|
||||
const onDragSelect = useCallback(
|
||||
(start: number, end: number): void => {
|
||||
// Do not allow dragging on live logs chart
|
||||
if (isShowingLiveLogs) {
|
||||
return;
|
||||
}
|
||||
|
||||
const startTimestamp = Math.trunc(start);
|
||||
const endTimestamp = Math.trunc(end);
|
||||
|
||||
@@ -81,7 +75,7 @@ function LogsExplorerChart({
|
||||
const generatedUrl = `${location.pathname}?${urlQuery.toString()}`;
|
||||
safeNavigate(generatedUrl);
|
||||
},
|
||||
[dispatch, location.pathname, safeNavigate, urlQuery, isShowingLiveLogs],
|
||||
[dispatch, location.pathname, safeNavigate, urlQuery],
|
||||
);
|
||||
|
||||
const graphData = useMemo(
|
||||
|
||||
@@ -17,7 +17,7 @@ import { getDraggedColumns } from 'hooks/useDragColumns/utils';
|
||||
import useUrlQueryData from 'hooks/useUrlQueryData';
|
||||
import { isEmpty, isEqual } from 'lodash-es';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useCallback, useEffect, useMemo, useRef } from 'react';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
|
||||
interface ColumnViewProps {
|
||||
@@ -51,8 +51,6 @@ function ColumnView({
|
||||
onGroupByAttribute: handleGroupByAttribute,
|
||||
} = useActiveLog();
|
||||
|
||||
const [showActiveLog, setShowActiveLog] = useState<boolean>(false);
|
||||
|
||||
const { queryData: activeLogId } = useUrlQueryData<string | null>(
|
||||
QueryParams.activeLogId,
|
||||
null,
|
||||
@@ -74,10 +72,9 @@ function ColumnView({
|
||||
|
||||
if (log) {
|
||||
handleSetActiveLog(log);
|
||||
setShowActiveLog(true);
|
||||
}
|
||||
}
|
||||
}, []);
|
||||
}, [activeLogId, logs, handleSetActiveLog]);
|
||||
|
||||
const tableViewProps = {
|
||||
logs,
|
||||
@@ -91,6 +88,7 @@ function ColumnView({
|
||||
const { dataSource, columns } = useTableView({
|
||||
...tableViewProps,
|
||||
onClickExpand: handleSetActiveLog,
|
||||
onOpenLogsContext: handleClearActiveLog,
|
||||
});
|
||||
|
||||
const { draggedColumns, onColumnOrderChange } = useDragColumns<
|
||||
@@ -224,22 +222,9 @@ function ColumnView({
|
||||
const handleRowClick = (row: Row<Record<string, unknown>>): void => {
|
||||
const currentLog = logs.find(({ id }) => id === row.original.id);
|
||||
|
||||
setShowActiveLog(true);
|
||||
handleSetActiveLog(currentLog as ILog);
|
||||
};
|
||||
|
||||
const removeQueryParam = (key: string): void => {
|
||||
const url = new URL(window.location.href);
|
||||
url.searchParams.delete(key);
|
||||
window.history.replaceState({}, '', url);
|
||||
};
|
||||
|
||||
const handleLogDetailClose = (): void => {
|
||||
removeQueryParam(QueryParams.activeLogId);
|
||||
handleClearActiveLog();
|
||||
setShowActiveLog(false);
|
||||
};
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`logs-list-table-view-container ${
|
||||
@@ -261,11 +246,11 @@ function ColumnView({
|
||||
scrollToIndexRef={scrollToIndexRef}
|
||||
/>
|
||||
|
||||
{showActiveLog && activeLog && (
|
||||
{activeLog && (
|
||||
<LogDetail
|
||||
selectedTab={VIEW_TYPES.OVERVIEW}
|
||||
log={activeLog}
|
||||
onClose={handleLogDetailClose}
|
||||
onClose={handleClearActiveLog}
|
||||
onAddToQuery={handleAddToQuery}
|
||||
onClickActionItem={handleAddToQuery}
|
||||
onGroupByAttribute={handleGroupByAttribute}
|
||||
|
||||
@@ -141,7 +141,6 @@ describe('LogsExplorerList - empty states', () => {
|
||||
listQueryKeyRef={{ current: {} }}
|
||||
chartQueryKeyRef={{ current: {} }}
|
||||
setWarning={(): void => {}}
|
||||
showLiveLogs={false}
|
||||
/>
|
||||
</PreferenceContextProvider>
|
||||
</QueryBuilderContext.Provider>,
|
||||
@@ -206,7 +205,6 @@ describe('LogsExplorerList - empty states', () => {
|
||||
listQueryKeyRef={{ current: {} }}
|
||||
chartQueryKeyRef={{ current: {} }}
|
||||
setWarning={(): void => {}}
|
||||
showLiveLogs={false}
|
||||
/>
|
||||
</PreferenceContextProvider>
|
||||
</QueryBuilderContext.Provider>,
|
||||
|
||||
@@ -1,174 +0,0 @@
|
||||
import { Button, Switch, Typography } from 'antd';
|
||||
import { WsDataEvent } from 'api/common/getQueryStats';
|
||||
import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
|
||||
import LogsFormatOptionsMenu from 'components/LogsFormatOptionsMenu/LogsFormatOptionsMenu';
|
||||
import ListViewOrderBy from 'components/OrderBy/ListViewOrderBy';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import Download from 'container/DownloadV2/DownloadV2';
|
||||
import { useOptionsMenu } from 'container/OptionsMenu';
|
||||
import useClickOutside from 'hooks/useClickOutside';
|
||||
import { ArrowUp10, Minus, Sliders } from 'lucide-react';
|
||||
import { useRef, useState } from 'react';
|
||||
import { DataSource, StringOperators } from 'types/common/queryBuilder';
|
||||
|
||||
import QueryStatus from './QueryStatus';
|
||||
|
||||
function LogsActionsContainer({
|
||||
listQuery,
|
||||
queryStats,
|
||||
selectedPanelType,
|
||||
showFrequencyChart,
|
||||
handleToggleFrequencyChart,
|
||||
orderBy,
|
||||
setOrderBy,
|
||||
flattenLogData,
|
||||
isFetching,
|
||||
isLoading,
|
||||
isError,
|
||||
isSuccess,
|
||||
}: {
|
||||
listQuery: any;
|
||||
selectedPanelType: PANEL_TYPES;
|
||||
showFrequencyChart: boolean;
|
||||
handleToggleFrequencyChart: () => void;
|
||||
orderBy: string;
|
||||
setOrderBy: (value: string) => void;
|
||||
flattenLogData: any;
|
||||
isFetching: boolean;
|
||||
isLoading: boolean;
|
||||
isError: boolean;
|
||||
isSuccess: boolean;
|
||||
queryStats: WsDataEvent | undefined;
|
||||
}): JSX.Element {
|
||||
const [showFormatMenuItems, setShowFormatMenuItems] = useState(false);
|
||||
const menuRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
const { options, config } = useOptionsMenu({
|
||||
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
|
||||
dataSource: DataSource.LOGS,
|
||||
aggregateOperator: listQuery?.aggregateOperator || StringOperators.NOOP,
|
||||
});
|
||||
|
||||
const formatItems = [
|
||||
{
|
||||
key: 'raw',
|
||||
label: 'Raw',
|
||||
data: {
|
||||
title: 'max lines per row',
|
||||
},
|
||||
},
|
||||
{
|
||||
key: 'list',
|
||||
label: 'Default',
|
||||
},
|
||||
{
|
||||
key: 'table',
|
||||
label: 'Column',
|
||||
data: {
|
||||
title: 'columns',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const handleToggleShowFormatOptions = (): void =>
|
||||
setShowFormatMenuItems(!showFormatMenuItems);
|
||||
|
||||
useClickOutside({
|
||||
ref: menuRef,
|
||||
onClickOutside: () => {
|
||||
if (showFormatMenuItems) {
|
||||
setShowFormatMenuItems(false);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
return (
|
||||
<div className="logs-actions-container">
|
||||
<div className="tab-options">
|
||||
<div className="tab-options-left">
|
||||
{selectedPanelType === PANEL_TYPES.LIST && (
|
||||
<div className="frequency-chart-view-controller">
|
||||
<Typography>Frequency chart</Typography>
|
||||
<Switch
|
||||
size="small"
|
||||
checked={showFrequencyChart}
|
||||
defaultChecked
|
||||
onChange={handleToggleFrequencyChart}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="tab-options-right">
|
||||
{selectedPanelType === PANEL_TYPES.LIST && (
|
||||
<>
|
||||
<div className="order-by-container">
|
||||
<div className="order-by-label">
|
||||
Order by <Minus size={14} /> <ArrowUp10 size={14} />
|
||||
</div>
|
||||
|
||||
<ListViewOrderBy
|
||||
value={orderBy}
|
||||
onChange={(value): void => setOrderBy(value)}
|
||||
dataSource={DataSource.LOGS}
|
||||
/>
|
||||
</div>
|
||||
<Download
|
||||
data={flattenLogData}
|
||||
isLoading={isFetching}
|
||||
fileName="log_data"
|
||||
/>
|
||||
<div className="format-options-container" ref={menuRef}>
|
||||
<Button
|
||||
className="periscope-btn ghost"
|
||||
onClick={handleToggleShowFormatOptions}
|
||||
icon={<Sliders size={14} />}
|
||||
data-testid="periscope-btn"
|
||||
/>
|
||||
|
||||
{showFormatMenuItems && (
|
||||
<LogsFormatOptionsMenu
|
||||
title="FORMAT"
|
||||
items={formatItems}
|
||||
selectedOptionFormat={options.format}
|
||||
config={config}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
{(selectedPanelType === PANEL_TYPES.TIME_SERIES ||
|
||||
selectedPanelType === PANEL_TYPES.TABLE) && (
|
||||
<div className="query-stats">
|
||||
<QueryStatus
|
||||
loading={isLoading || isFetching}
|
||||
error={isError}
|
||||
success={isSuccess}
|
||||
/>
|
||||
|
||||
{queryStats?.read_rows && (
|
||||
<Typography.Text className="rows">
|
||||
{getYAxisFormattedValue(queryStats.read_rows?.toString(), 'short')}{' '}
|
||||
rows
|
||||
</Typography.Text>
|
||||
)}
|
||||
|
||||
{queryStats?.elapsed_ms && (
|
||||
<>
|
||||
<div className="divider" />
|
||||
<Typography.Text className="time">
|
||||
{getYAxisFormattedValue(queryStats?.elapsed_ms?.toString(), 'ms')}
|
||||
</Typography.Text>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default LogsActionsContainer;
|
||||
@@ -1,10 +1,14 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import './LogsExplorerViews.styles.scss';
|
||||
|
||||
import { Button, Switch, Typography } from 'antd';
|
||||
import getFromLocalstorage from 'api/browser/localstorage/get';
|
||||
import setToLocalstorage from 'api/browser/localstorage/set';
|
||||
import { getQueryStats, WsDataEvent } from 'api/common/getQueryStats';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
|
||||
import LogsFormatOptionsMenu from 'components/LogsFormatOptionsMenu/LogsFormatOptionsMenu';
|
||||
import ListViewOrderBy from 'components/OrderBy/ListViewOrderBy';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
@@ -18,18 +22,20 @@ import {
|
||||
PANEL_TYPES,
|
||||
} from 'constants/queryBuilder';
|
||||
import { DEFAULT_PER_PAGE_VALUE } from 'container/Controls/config';
|
||||
import Download from 'container/DownloadV2/DownloadV2';
|
||||
import ExplorerOptionWrapper from 'container/ExplorerOptions/ExplorerOptionWrapper';
|
||||
import GoToTop from 'container/GoToTop';
|
||||
import {} from 'container/LiveLogs/constants';
|
||||
import LogsExplorerChart from 'container/LogsExplorerChart';
|
||||
import LogsExplorerList from 'container/LogsExplorerList';
|
||||
import LogsExplorerTable from 'container/LogsExplorerTable';
|
||||
import { useOptionsMenu } from 'container/OptionsMenu';
|
||||
import TimeSeriesView from 'container/TimeSeriesView/TimeSeriesView';
|
||||
import dayjs from 'dayjs';
|
||||
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
||||
import { useGetExplorerQueryRange } from 'hooks/queryBuilder/useGetExplorerQueryRange';
|
||||
import { useGetPanelTypesQueryParam } from 'hooks/queryBuilder/useGetPanelTypesQueryParam';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import useClickOutside from 'hooks/useClickOutside';
|
||||
import { useHandleExplorerTabChange } from 'hooks/useHandleExplorerTabChange';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQueryData from 'hooks/useUrlQueryData';
|
||||
@@ -43,7 +49,7 @@ import {
|
||||
omit,
|
||||
set,
|
||||
} from 'lodash-es';
|
||||
import LiveLogs from 'pages/LiveLogs';
|
||||
import { ArrowUp10, Minus, Sliders } from 'lucide-react';
|
||||
import { ExplorerViews } from 'pages/LogsExplorer/utils';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import {
|
||||
@@ -71,12 +77,16 @@ import {
|
||||
TagFilter,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { QueryDataV3 } from 'types/api/widgets/getQuery';
|
||||
import { DataSource, LogsAggregatorOperator } from 'types/common/queryBuilder';
|
||||
import {
|
||||
DataSource,
|
||||
LogsAggregatorOperator,
|
||||
StringOperators,
|
||||
} from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { generateExportToDashboardLink } from 'utils/dashboard/generateExportToDashboardLink';
|
||||
import { v4 } from 'uuid';
|
||||
|
||||
import LogsActionsContainer from './LogsActionsContainer';
|
||||
import QueryStatus from './QueryStatus';
|
||||
|
||||
function LogsExplorerViewsContainer({
|
||||
selectedView,
|
||||
@@ -84,7 +94,6 @@ function LogsExplorerViewsContainer({
|
||||
listQueryKeyRef,
|
||||
chartQueryKeyRef,
|
||||
setWarning,
|
||||
showLiveLogs,
|
||||
}: {
|
||||
selectedView: ExplorerViews;
|
||||
setIsLoadingQueries: React.Dispatch<React.SetStateAction<boolean>>;
|
||||
@@ -93,7 +102,6 @@ function LogsExplorerViewsContainer({
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
chartQueryKeyRef: MutableRefObject<any>;
|
||||
setWarning: Dispatch<SetStateAction<Warning | undefined>>;
|
||||
showLiveLogs: boolean;
|
||||
}): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const dispatch = useDispatch();
|
||||
@@ -141,6 +149,7 @@ function LogsExplorerViewsContainer({
|
||||
const [page, setPage] = useState<number>(1);
|
||||
const [logs, setLogs] = useState<ILog[]>([]);
|
||||
const [requestData, setRequestData] = useState<Query | null>(null);
|
||||
const [showFormatMenuItems, setShowFormatMenuItems] = useState(false);
|
||||
const [queryId, setQueryId] = useState<string>(v4());
|
||||
const [queryStats, setQueryStats] = useState<WsDataEvent>();
|
||||
const [listChartQuery, setListChartQuery] = useState<Query | null>(null);
|
||||
@@ -153,6 +162,12 @@ function LogsExplorerViewsContainer({
|
||||
return stagedQuery.builder.queryData.find((item) => !item.disabled) || null;
|
||||
}, [stagedQuery]);
|
||||
|
||||
const { options, config } = useOptionsMenu({
|
||||
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
|
||||
dataSource: DataSource.LOGS,
|
||||
aggregateOperator: listQuery?.aggregateOperator || StringOperators.NOOP,
|
||||
});
|
||||
|
||||
const isMultipleQueries = useMemo(
|
||||
() =>
|
||||
currentQuery?.builder?.queryData?.length > 1 ||
|
||||
@@ -588,6 +603,41 @@ function LogsExplorerViewsContainer({
|
||||
return isGroupByExist ? data.payload.data.result : firstPayloadQueryArray;
|
||||
}, [stagedQuery, panelType, data, listChartData, listQuery]);
|
||||
|
||||
const formatItems = [
|
||||
{
|
||||
key: 'raw',
|
||||
label: 'Raw',
|
||||
data: {
|
||||
title: 'max lines per row',
|
||||
},
|
||||
},
|
||||
{
|
||||
key: 'list',
|
||||
label: 'Default',
|
||||
},
|
||||
{
|
||||
key: 'table',
|
||||
label: 'Column',
|
||||
data: {
|
||||
title: 'columns',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const handleToggleShowFormatOptions = (): void =>
|
||||
setShowFormatMenuItems(!showFormatMenuItems);
|
||||
|
||||
const menuRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
useClickOutside({
|
||||
ref: menuRef,
|
||||
onClickOutside: () => {
|
||||
if (showFormatMenuItems) {
|
||||
setShowFormatMenuItems(false);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
isLoading ||
|
||||
@@ -645,40 +695,104 @@ function LogsExplorerViewsContainer({
|
||||
return (
|
||||
<div className="logs-explorer-views-container">
|
||||
<div className="logs-explorer-views-types">
|
||||
{!showLiveLogs && (
|
||||
<LogsActionsContainer
|
||||
listQuery={listQuery}
|
||||
queryStats={queryStats}
|
||||
selectedPanelType={selectedPanelType}
|
||||
showFrequencyChart={showFrequencyChart}
|
||||
handleToggleFrequencyChart={handleToggleFrequencyChart}
|
||||
orderBy={orderBy}
|
||||
setOrderBy={setOrderBy}
|
||||
flattenLogData={flattenLogData}
|
||||
isFetching={isFetching}
|
||||
isLoading={isLoading}
|
||||
isError={isError}
|
||||
isSuccess={isSuccess}
|
||||
/>
|
||||
<div className="logs-actions-container">
|
||||
<div className="tab-options">
|
||||
<div className="tab-options-left">
|
||||
{selectedPanelType === PANEL_TYPES.LIST && (
|
||||
<div className="frequency-chart-view-controller">
|
||||
<Typography>Frequency chart</Typography>
|
||||
<Switch
|
||||
size="small"
|
||||
checked={showFrequencyChart}
|
||||
defaultChecked
|
||||
onChange={handleToggleFrequencyChart}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="tab-options-right">
|
||||
{selectedPanelType === PANEL_TYPES.LIST && (
|
||||
<>
|
||||
<div className="order-by-container">
|
||||
<div className="order-by-label">
|
||||
Order by <Minus size={14} /> <ArrowUp10 size={14} />
|
||||
</div>
|
||||
|
||||
<ListViewOrderBy
|
||||
value={orderBy}
|
||||
onChange={(value): void => setOrderBy(value)}
|
||||
dataSource={DataSource.LOGS}
|
||||
/>
|
||||
</div>
|
||||
<Download
|
||||
data={flattenLogData}
|
||||
isLoading={isFetching}
|
||||
fileName="log_data"
|
||||
/>
|
||||
<div className="format-options-container" ref={menuRef}>
|
||||
<Button
|
||||
className="periscope-btn ghost"
|
||||
onClick={handleToggleShowFormatOptions}
|
||||
icon={<Sliders size={14} />}
|
||||
data-testid="periscope-btn"
|
||||
/>
|
||||
|
||||
{showFormatMenuItems && (
|
||||
<LogsFormatOptionsMenu
|
||||
title="FORMAT"
|
||||
items={formatItems}
|
||||
selectedOptionFormat={options.format}
|
||||
config={config}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
{(selectedPanelType === PANEL_TYPES.TIME_SERIES ||
|
||||
selectedPanelType === PANEL_TYPES.TABLE) && (
|
||||
<div className="query-stats">
|
||||
<QueryStatus
|
||||
loading={isLoading || isFetching}
|
||||
error={isError}
|
||||
success={isSuccess}
|
||||
/>
|
||||
|
||||
{queryStats?.read_rows && (
|
||||
<Typography.Text className="rows">
|
||||
{getYAxisFormattedValue(queryStats.read_rows?.toString(), 'short')}{' '}
|
||||
rows
|
||||
</Typography.Text>
|
||||
)}
|
||||
|
||||
{queryStats?.elapsed_ms && (
|
||||
<>
|
||||
<div className="divider" />
|
||||
<Typography.Text className="time">
|
||||
{getYAxisFormattedValue(queryStats?.elapsed_ms?.toString(), 'ms')}
|
||||
</Typography.Text>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{selectedPanelType === PANEL_TYPES.LIST && showFrequencyChart && (
|
||||
<div className="logs-frequency-chart-container">
|
||||
<LogsExplorerChart
|
||||
className="logs-frequency-chart"
|
||||
isLoading={isFetchingListChartData || isLoadingListChartData}
|
||||
data={chartData}
|
||||
isLogsExplorerViews={panelType === PANEL_TYPES.LIST}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{selectedPanelType === PANEL_TYPES.LIST &&
|
||||
showFrequencyChart &&
|
||||
!showLiveLogs && (
|
||||
<div className="logs-frequency-chart-container">
|
||||
<LogsExplorerChart
|
||||
className="logs-frequency-chart"
|
||||
isLoading={isFetchingListChartData || isLoadingListChartData}
|
||||
data={chartData}
|
||||
isLogsExplorerViews={panelType === PANEL_TYPES.LIST}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="logs-explorer-views-type-content">
|
||||
{showLiveLogs && <LiveLogs />}
|
||||
|
||||
{selectedPanelType === PANEL_TYPES.LIST && !showLiveLogs && (
|
||||
{selectedPanelType === PANEL_TYPES.LIST && (
|
||||
<LogsExplorerList
|
||||
isLoading={isLoading}
|
||||
isFetching={isFetching}
|
||||
@@ -691,8 +805,7 @@ function LogsExplorerViewsContainer({
|
||||
isFilterApplied={!isEmpty(listQuery?.filters?.items)}
|
||||
/>
|
||||
)}
|
||||
|
||||
{selectedPanelType === PANEL_TYPES.TIME_SERIES && !showLiveLogs && (
|
||||
{selectedPanelType === PANEL_TYPES.TIME_SERIES && (
|
||||
<TimeSeriesView
|
||||
isLoading={isLoading || isFetching}
|
||||
data={data}
|
||||
@@ -704,7 +817,7 @@ function LogsExplorerViewsContainer({
|
||||
/>
|
||||
)}
|
||||
|
||||
{selectedPanelType === PANEL_TYPES.TABLE && !showLiveLogs && (
|
||||
{selectedPanelType === PANEL_TYPES.TABLE && (
|
||||
<LogsExplorerTable
|
||||
data={
|
||||
(data?.payload?.data?.newResult?.data?.result ||
|
||||
|
||||
@@ -174,7 +174,6 @@ const renderer = (): RenderResult =>
|
||||
listQueryKeyRef={{ current: {} }}
|
||||
chartQueryKeyRef={{ current: {} }}
|
||||
setWarning={(): void => {}}
|
||||
showLiveLogs={false}
|
||||
/>
|
||||
</PreferenceContextProvider>
|
||||
</VirtuosoMockContext.Provider>,
|
||||
@@ -236,7 +235,6 @@ describe('LogsExplorerViews -', () => {
|
||||
listQueryKeyRef={{ current: {} }}
|
||||
chartQueryKeyRef={{ current: {} }}
|
||||
setWarning={(): void => {}}
|
||||
showLiveLogs={false}
|
||||
/>
|
||||
</PreferenceContextProvider>
|
||||
</QueryBuilderContext.Provider>,
|
||||
|
||||
@@ -1,19 +1,22 @@
|
||||
import './BreakDown.styles.scss';
|
||||
|
||||
import { Alert, Typography } from 'antd';
|
||||
// import useFilterConfig from 'components/QuickFilters/hooks/useFilterConfig';
|
||||
// import { SignalType } from 'components/QuickFilters/types';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import GridCard from 'container/GridCardLayout/GridCard';
|
||||
import { Card, CardContainer } from 'container/GridCardLayout/styles';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
// import { useGetQueryKeyValueSuggestions } from 'hooks/querySuggestions/useGetQueryKeyValueSuggestions';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { useCallback } from 'react';
|
||||
import { useDispatch } from 'react-redux';
|
||||
import { useHistory, useLocation } from 'react-router-dom';
|
||||
import { UpdateTimeInterval } from 'store/actions';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
// import { DataSource } from 'types/common/queryBuilder';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
import {
|
||||
@@ -106,11 +109,65 @@ function Section(section: MetricSection): JSX.Element {
|
||||
);
|
||||
}
|
||||
|
||||
// function FilterDropdown({ attrKey }: { attrKey: string }): JSX.Element {
|
||||
// const {
|
||||
// data: keyValueSuggestions,
|
||||
// isLoading: isLoadingKeyValueSuggestions,
|
||||
// } = useGetQueryKeyValueSuggestions({
|
||||
// key: attrKey,
|
||||
// signal: DataSource.METRICS,
|
||||
// signalSource: 'meter',
|
||||
// options: {
|
||||
// keepPreviousData: true,
|
||||
// },
|
||||
// });
|
||||
|
||||
// const responseData = keyValueSuggestions?.data as any;
|
||||
// const values = responseData?.data?.values || {};
|
||||
// const stringValues = values.stringValues || [];
|
||||
// const numberValues = values.numberValues || [];
|
||||
|
||||
// const stringOptions = stringValues.filter(
|
||||
// (value: string | null | undefined): value is string =>
|
||||
// value !== null && value !== undefined && value !== '',
|
||||
// );
|
||||
|
||||
// const numberOptions = numberValues
|
||||
// .filter(
|
||||
// (value: number | null | undefined): value is number =>
|
||||
// value !== null && value !== undefined,
|
||||
// )
|
||||
// .map((value: number) => value.toString());
|
||||
|
||||
// const vals = [...stringOptions, ...numberOptions];
|
||||
|
||||
// return (
|
||||
// <div className="filter-dropdown">
|
||||
// <Typography.Text>{attrKey}</Typography.Text>
|
||||
// <Select
|
||||
// loading={isLoadingKeyValueSuggestions}
|
||||
// options={vals?.map((suggestion: any) => ({
|
||||
// label: suggestion,
|
||||
// value: suggestion,
|
||||
// }))}
|
||||
// placeholder={`Select ${attrKey}`}
|
||||
// />
|
||||
// </div>
|
||||
// );
|
||||
// }
|
||||
|
||||
function BreakDown(): JSX.Element {
|
||||
const { isCloudUser } = useGetTenantLicense();
|
||||
// const { customFilters } = useFilterConfig({
|
||||
// signal: SignalType.METER_EXPLORER,
|
||||
// config: [],
|
||||
// });
|
||||
|
||||
return (
|
||||
<div className="meter-explorer-breakdown">
|
||||
<section className="meter-explorer-date-time">
|
||||
{/* {customFilters.map((filter) => (
|
||||
<FilterDropdown key={filter.key} attrKey={filter.key} />
|
||||
))} */}
|
||||
<DateTimeSelectionV2 showAutoRefresh={false} />
|
||||
</section>
|
||||
<section className="meter-explorer-graphs">
|
||||
@@ -121,13 +178,11 @@ function BreakDown(): JSX.Element {
|
||||
message="Billing is calculated in UTC. To match your meter data with billing, select full-day ranges in UTC time (00:00 – 23:59 UTC).
|
||||
For example, if you’re in IST, for the billing of Jan 1, select your time range as Jan 1, 5:30 AM – Jan 2, 5:29 AM IST."
|
||||
/>
|
||||
{isCloudUser && (
|
||||
<Alert
|
||||
type="warning"
|
||||
showIcon
|
||||
message="Meter module data is accurate only from 22nd August 2025, 00:00 UTC onwards. Data before this time was collected during the beta phase and may be inaccurate."
|
||||
/>
|
||||
)}
|
||||
<Alert
|
||||
type="warning"
|
||||
showIcon
|
||||
message="Meter module data is accurate only from 22nd August 2025, 00:00 UTC onwards. Data before this time was collected during the beta phase and may be inaccurate."
|
||||
/>
|
||||
</section>
|
||||
<section className="total">
|
||||
<Section
|
||||
|
||||
@@ -20,6 +20,7 @@ const activeTab = 'active-tab';
|
||||
export default function LeftToolbarActions({
|
||||
items,
|
||||
selectedView,
|
||||
|
||||
onChangeSelectedView,
|
||||
showFilter,
|
||||
handleFilterVisibilityChange,
|
||||
|
||||
@@ -12,7 +12,6 @@ interface RightToolbarActionsProps {
|
||||
isLoadingQueries?: boolean;
|
||||
listQueryKeyRef?: MutableRefObject<any>;
|
||||
chartQueryKeyRef?: MutableRefObject<any>;
|
||||
showLiveLogs?: boolean;
|
||||
}
|
||||
|
||||
export default function RightToolbarActions({
|
||||
@@ -20,25 +19,19 @@ export default function RightToolbarActions({
|
||||
isLoadingQueries,
|
||||
listQueryKeyRef,
|
||||
chartQueryKeyRef,
|
||||
showLiveLogs,
|
||||
}: RightToolbarActionsProps): JSX.Element {
|
||||
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
|
||||
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
useEffect(() => {
|
||||
if (showLiveLogs) return;
|
||||
|
||||
registerShortcut(LogsExplorerShortcuts.StageAndRunQuery, onStageRunQuery);
|
||||
|
||||
return (): void => {
|
||||
deregisterShortcut(LogsExplorerShortcuts.StageAndRunQuery);
|
||||
};
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [onStageRunQuery, showLiveLogs]);
|
||||
|
||||
if (showLiveLogs) return <div />;
|
||||
|
||||
}, [onStageRunQuery]);
|
||||
return (
|
||||
<div>
|
||||
{isLoadingQueries ? (
|
||||
@@ -78,5 +71,4 @@ RightToolbarActions.defaultProps = {
|
||||
isLoadingQueries: false,
|
||||
listQueryKeyRef: null,
|
||||
chartQueryKeyRef: null,
|
||||
showLiveLogs: false,
|
||||
};
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
.timeRange {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
gap: 16px;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
import './Toolbar.styles.scss';
|
||||
|
||||
import ROUTES from 'constants/routes';
|
||||
import LiveLogsPauseResume from 'container/LiveLogs/LiveLogsPauseResume/LiveLogsPauseResume';
|
||||
import NewExplorerCTA from 'container/NewExplorerCTA';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import { noop } from 'lodash-es';
|
||||
import { useMemo } from 'react';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
|
||||
@@ -14,9 +12,6 @@ interface ToolbarProps {
|
||||
rightActions?: JSX.Element;
|
||||
showOldCTA?: boolean;
|
||||
warningElement?: JSX.Element;
|
||||
onGoLive?: () => void;
|
||||
onExitLiveLogs?: () => void;
|
||||
showLiveLogs?: boolean;
|
||||
}
|
||||
|
||||
export default function Toolbar({
|
||||
@@ -25,9 +20,6 @@ export default function Toolbar({
|
||||
rightActions,
|
||||
showOldCTA,
|
||||
warningElement,
|
||||
showLiveLogs,
|
||||
onGoLive,
|
||||
onExitLiveLogs,
|
||||
}: ToolbarProps): JSX.Element {
|
||||
const { pathname } = useLocation();
|
||||
|
||||
@@ -47,11 +39,7 @@ export default function Toolbar({
|
||||
<div className="timeRange">
|
||||
{warningElement}
|
||||
{showOldCTA && <NewExplorerCTA />}
|
||||
{showLiveLogs && <LiveLogsPauseResume />}
|
||||
<DateTimeSelectionV2
|
||||
showLiveLogs={showLiveLogs}
|
||||
onExitLiveLogs={onExitLiveLogs}
|
||||
onGoLive={onGoLive}
|
||||
showAutoRefresh={showAutoRefresh}
|
||||
showRefreshText={!isLogsExplorerPage && !isApiMonitoringPage}
|
||||
hideShareModal
|
||||
@@ -69,7 +57,4 @@ Toolbar.defaultProps = {
|
||||
rightActions: <div />,
|
||||
showOldCTA: false,
|
||||
warningElement: <div />,
|
||||
showLiveLogs: false,
|
||||
onGoLive: (): void => noop(),
|
||||
onExitLiveLogs: (): void => {},
|
||||
};
|
||||
|
||||
@@ -9,7 +9,17 @@ import CustomTimePicker from 'components/CustomTimePicker/CustomTimePicker';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import {
|
||||
initialQueryBuilderFormValuesMap,
|
||||
PANEL_TYPES,
|
||||
} from 'constants/queryBuilder';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import ROUTES from 'constants/routes';
|
||||
import {
|
||||
constructCompositeQuery,
|
||||
defaultLiveQueryDataConfig,
|
||||
} from 'container/LiveLogs/constants';
|
||||
import { QueryHistoryState } from 'container/LiveLogs/types';
|
||||
import NewExplorerCTA from 'container/NewExplorerCTA';
|
||||
import dayjs, { Dayjs } from 'dayjs';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
@@ -21,6 +31,7 @@ import { cloneDeep, isObject } from 'lodash-es';
|
||||
import { Check, Copy, Info, Send, Undo } from 'lucide-react';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { useQueryClient } from 'react-query';
|
||||
import { connect, useDispatch, useSelector } from 'react-redux';
|
||||
import { RouteComponentProps, withRouter } from 'react-router-dom';
|
||||
import { useNavigationType, useSearchParams } from 'react-router-dom-v5-compat';
|
||||
@@ -30,6 +41,8 @@ import { ThunkDispatch } from 'redux-thunk';
|
||||
import { GlobalTimeLoading, UpdateTimeInterval } from 'store/actions';
|
||||
import { AppState } from 'store/reducers';
|
||||
import AppActions from 'types/actions';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { normalizeTimeToMs } from 'utils/timeUtils';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
@@ -65,9 +78,6 @@ function DateTimeSelection({
|
||||
modalSelectedInterval,
|
||||
modalInitialStartTime,
|
||||
modalInitialEndTime,
|
||||
onGoLive,
|
||||
onExitLiveLogs,
|
||||
showLiveLogs,
|
||||
}: Props): JSX.Element {
|
||||
const [formSelector] = Form.useForm();
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
@@ -81,6 +91,7 @@ function DateTimeSelection({
|
||||
const searchStartTime = urlQuery.get('startTime');
|
||||
const searchEndTime = urlQuery.get('endTime');
|
||||
const relativeTimeFromUrl = urlQuery.get(QueryParams.relativeTime);
|
||||
const queryClient = useQueryClient();
|
||||
const [enableAbsoluteTime, setEnableAbsoluteTime] = useState(false);
|
||||
const [isValidteRelativeTime, setIsValidteRelativeTime] = useState(false);
|
||||
const [, handleCopyToClipboard] = useCopyToClipboard();
|
||||
@@ -177,7 +188,54 @@ function DateTimeSelection({
|
||||
false,
|
||||
);
|
||||
|
||||
const { stagedQuery, currentQuery, initQueryBuilderData } = useQueryBuilder();
|
||||
const {
|
||||
stagedQuery,
|
||||
currentQuery,
|
||||
initQueryBuilderData,
|
||||
panelType,
|
||||
} = useQueryBuilder();
|
||||
|
||||
const handleGoLive = useCallback(() => {
|
||||
if (!stagedQuery) return;
|
||||
|
||||
setIsOpen(false);
|
||||
let queryHistoryState: QueryHistoryState | null = null;
|
||||
|
||||
const compositeQuery = constructCompositeQuery({
|
||||
query: stagedQuery,
|
||||
initialQueryData: initialQueryBuilderFormValuesMap.logs,
|
||||
customQueryData: defaultLiveQueryDataConfig,
|
||||
});
|
||||
|
||||
const isListView =
|
||||
panelType === PANEL_TYPES.LIST && stagedQuery.builder.queryData[0];
|
||||
|
||||
if (isListView) {
|
||||
const [graphQuery, listQuery] = queryClient.getQueriesData<
|
||||
SuccessResponse<MetricRangePayloadProps> | ErrorResponse
|
||||
>({
|
||||
queryKey: REACT_QUERY_KEY.GET_QUERY_RANGE,
|
||||
active: true,
|
||||
});
|
||||
|
||||
queryHistoryState = {
|
||||
graphQueryPayload:
|
||||
graphQuery && graphQuery[1]
|
||||
? graphQuery[1].payload?.data.result || []
|
||||
: [],
|
||||
listQueryPayload:
|
||||
listQuery && listQuery[1]
|
||||
? listQuery[1].payload?.data?.newResult?.data?.result || []
|
||||
: [],
|
||||
};
|
||||
}
|
||||
|
||||
const JSONCompositeQuery = encodeURIComponent(JSON.stringify(compositeQuery));
|
||||
|
||||
const path = `${ROUTES.LIVE_LOGS}?${QueryParams.compositeQuery}=${JSONCompositeQuery}`;
|
||||
|
||||
safeNavigate(path, { state: queryHistoryState });
|
||||
}, [panelType, queryClient, safeNavigate, stagedQuery]);
|
||||
|
||||
const { maxTime, minTime, selectedTime } = useSelector<
|
||||
AppState,
|
||||
@@ -745,12 +803,8 @@ function DateTimeSelection({
|
||||
|
||||
const { timezone } = useTimezone();
|
||||
|
||||
const getSelectedValue = (): string => {
|
||||
if (showLiveLogs) {
|
||||
return 'live';
|
||||
}
|
||||
|
||||
return getInputLabel(
|
||||
const getSelectedValue = (): string =>
|
||||
getInputLabel(
|
||||
dayjs(isModalTimeSelection ? modalStartTime : minTime / 1000000).tz(
|
||||
timezone.value,
|
||||
),
|
||||
@@ -759,7 +813,6 @@ function DateTimeSelection({
|
||||
),
|
||||
isModalTimeSelection ? modalSelectedInterval : selectedTime,
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="date-time-selector">
|
||||
@@ -820,13 +873,11 @@ function DateTimeSelection({
|
||||
selectedValue={getSelectedValue()}
|
||||
data-testid="dropDown"
|
||||
items={options}
|
||||
showLiveLogs={showLiveLogs}
|
||||
newPopover
|
||||
onGoLive={onGoLive}
|
||||
handleGoLive={handleGoLive}
|
||||
onCustomDateHandler={onCustomDateHandler}
|
||||
customDateTimeVisible={customDateTimeVisible}
|
||||
setCustomDTPickerVisible={setCustomDTPickerVisible}
|
||||
onExitLiveLogs={onExitLiveLogs}
|
||||
/>
|
||||
|
||||
{showAutoRefresh && selectedTime !== 'custom' && (
|
||||
@@ -882,9 +933,6 @@ interface DateTimeSelectionV2Props {
|
||||
modalSelectedInterval?: Time;
|
||||
modalInitialStartTime?: number;
|
||||
modalInitialEndTime?: number;
|
||||
showLiveLogs?: boolean;
|
||||
onGoLive?: () => void;
|
||||
onExitLiveLogs?: () => void;
|
||||
}
|
||||
|
||||
DateTimeSelection.defaultProps = {
|
||||
@@ -898,9 +946,6 @@ DateTimeSelection.defaultProps = {
|
||||
modalSelectedInterval: RelativeTimeMap['5m'] as Time,
|
||||
modalInitialStartTime: undefined,
|
||||
modalInitialEndTime: undefined,
|
||||
onGoLive: (): void => {},
|
||||
onExitLiveLogs: (): void => {},
|
||||
showLiveLogs: false,
|
||||
};
|
||||
interface DispatchProps {
|
||||
updateTimeInterval: (
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { liveLogsCompositeQuery } from 'container/LiveLogs/constants';
|
||||
import LiveLogsContainer from 'container/LiveLogs/LiveLogsContainer';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { useEffect } from 'react';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
function LiveLogs(): JSX.Element {
|
||||
useShareBuilderUrl({ defaultValue: liveLogsCompositeQuery });
|
||||
const { handleSetConfig } = useQueryBuilder();
|
||||
|
||||
useEffect(() => {
|
||||
handleSetConfig(PANEL_TYPES.LIST, DataSource.LOGS);
|
||||
}, [handleSetConfig]);
|
||||
|
||||
return (
|
||||
<PreferenceContextProvider>
|
||||
<LiveLogsContainer />
|
||||
</PreferenceContextProvider>
|
||||
);
|
||||
}
|
||||
|
||||
export default LiveLogs;
|
||||
@@ -1,3 +1,28 @@
|
||||
import LiveLogs from './LiveLogs';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { liveLogsCompositeQuery } from 'container/LiveLogs/constants';
|
||||
import LiveLogsContainer from 'container/LiveLogs/LiveLogsContainer';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
|
||||
import { EventSourceProvider } from 'providers/EventSource';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { useEffect } from 'react';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
function LiveLogs(): JSX.Element {
|
||||
useShareBuilderUrl({ defaultValue: liveLogsCompositeQuery });
|
||||
const { handleSetConfig } = useQueryBuilder();
|
||||
|
||||
useEffect(() => {
|
||||
handleSetConfig(PANEL_TYPES.LIST, DataSource.LOGS);
|
||||
}, [handleSetConfig]);
|
||||
|
||||
return (
|
||||
<EventSourceProvider>
|
||||
<PreferenceContextProvider>
|
||||
<LiveLogsContainer />
|
||||
</PreferenceContextProvider>
|
||||
</EventSourceProvider>
|
||||
);
|
||||
}
|
||||
|
||||
export default LiveLogs;
|
||||
|
||||
@@ -30,7 +30,6 @@ import { useHandleExplorerTabChange } from 'hooks/useHandleExplorerTabChange';
|
||||
import useUrlQueryData from 'hooks/useUrlQueryData';
|
||||
import { isEmpty, isEqual, isNull } from 'lodash-es';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import { EventSourceProvider } from 'providers/EventSource';
|
||||
import { usePreferenceContext } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useSearchParams } from 'react-router-dom-v5-compat';
|
||||
@@ -46,7 +45,6 @@ import { ExplorerViews } from './utils';
|
||||
|
||||
function LogsExplorer(): JSX.Element {
|
||||
const [searchParams] = useSearchParams();
|
||||
const [showLiveLogs, setShowLiveLogs] = useState<boolean>(false);
|
||||
|
||||
// Get panel type from URL
|
||||
const panelTypesFromUrl = useGetPanelTypesQueryParam(PANEL_TYPES.LIST);
|
||||
@@ -146,11 +144,6 @@ function LogsExplorer(): JSX.Element {
|
||||
}
|
||||
|
||||
setSelectedView(view);
|
||||
|
||||
if (view !== ExplorerViews.LIST) {
|
||||
setShowLiveLogs(false);
|
||||
}
|
||||
|
||||
handleExplorerTabChange(
|
||||
view === ExplorerViews.TIMESERIES ? PANEL_TYPES.TIME_SERIES : view,
|
||||
);
|
||||
@@ -342,79 +335,62 @@ function LogsExplorer(): JSX.Element {
|
||||
[],
|
||||
);
|
||||
|
||||
const handleShowLiveLogs = useCallback(() => {
|
||||
setShowLiveLogs(true);
|
||||
}, []);
|
||||
|
||||
const handleExitLiveLogs = useCallback(() => {
|
||||
setShowLiveLogs(false);
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||
<EventSourceProvider>
|
||||
<div
|
||||
className={cx('logs-module-page', showFilters ? 'filter-visible' : '')}
|
||||
>
|
||||
{showFilters && (
|
||||
<section className={cx('log-quick-filter-left-section')}>
|
||||
<QuickFilters
|
||||
className="qf-logs-explorer"
|
||||
signal={SignalType.LOGS}
|
||||
source={QuickFiltersSource.LOGS_EXPLORER}
|
||||
handleFilterVisibilityChange={handleFilterVisibilityChange}
|
||||
/>
|
||||
</section>
|
||||
)}
|
||||
<section className={cx('log-module-right-section')}>
|
||||
<Toolbar
|
||||
showAutoRefresh={false}
|
||||
leftActions={
|
||||
<LeftToolbarActions
|
||||
showFilter={showFilters}
|
||||
handleFilterVisibilityChange={handleFilterVisibilityChange}
|
||||
items={toolbarViews}
|
||||
selectedView={selectedView}
|
||||
onChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
}
|
||||
warningElement={
|
||||
!isEmpty(warning) ? <WarningPopover warningData={warning} /> : <div />
|
||||
}
|
||||
rightActions={
|
||||
<RightToolbarActions
|
||||
onStageRunQuery={(): void => handleRunQuery()}
|
||||
listQueryKeyRef={listQueryKeyRef}
|
||||
chartQueryKeyRef={chartQueryKeyRef}
|
||||
isLoadingQueries={isLoadingQueries}
|
||||
showLiveLogs={showLiveLogs}
|
||||
/>
|
||||
}
|
||||
showLiveLogs={showLiveLogs}
|
||||
onGoLive={handleShowLiveLogs}
|
||||
onExitLiveLogs={handleExitLiveLogs}
|
||||
<div className={cx('logs-module-page', showFilters ? 'filter-visible' : '')}>
|
||||
{showFilters && (
|
||||
<section className={cx('log-quick-filter-left-section')}>
|
||||
<QuickFilters
|
||||
className="qf-logs-explorer"
|
||||
signal={SignalType.LOGS}
|
||||
source={QuickFiltersSource.LOGS_EXPLORER}
|
||||
handleFilterVisibilityChange={handleFilterVisibilityChange}
|
||||
/>
|
||||
|
||||
<div className="log-explorer-query-container">
|
||||
<div>
|
||||
<ExplorerCard sourcepage={DataSource.LOGS}>
|
||||
<LogExplorerQuerySection selectedView={selectedView} />
|
||||
</ExplorerCard>
|
||||
</div>
|
||||
<div className="logs-explorer-views">
|
||||
<LogsExplorerViewsContainer
|
||||
selectedView={selectedView}
|
||||
listQueryKeyRef={listQueryKeyRef}
|
||||
chartQueryKeyRef={chartQueryKeyRef}
|
||||
setIsLoadingQueries={setIsLoadingQueries}
|
||||
setWarning={setWarning}
|
||||
showLiveLogs={showLiveLogs}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
</EventSourceProvider>
|
||||
)}
|
||||
<section className={cx('log-module-right-section')}>
|
||||
<Toolbar
|
||||
showAutoRefresh={false}
|
||||
leftActions={
|
||||
<LeftToolbarActions
|
||||
showFilter={showFilters}
|
||||
handleFilterVisibilityChange={handleFilterVisibilityChange}
|
||||
items={toolbarViews}
|
||||
selectedView={selectedView}
|
||||
onChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
}
|
||||
warningElement={
|
||||
!isEmpty(warning) ? <WarningPopover warningData={warning} /> : <div />
|
||||
}
|
||||
rightActions={
|
||||
<RightToolbarActions
|
||||
onStageRunQuery={(): void => handleRunQuery()}
|
||||
listQueryKeyRef={listQueryKeyRef}
|
||||
chartQueryKeyRef={chartQueryKeyRef}
|
||||
isLoadingQueries={isLoadingQueries}
|
||||
/>
|
||||
}
|
||||
/>
|
||||
|
||||
<div className="log-explorer-query-container">
|
||||
<div>
|
||||
<ExplorerCard sourcepage={DataSource.LOGS}>
|
||||
<LogExplorerQuerySection selectedView={selectedView} />
|
||||
</ExplorerCard>
|
||||
</div>
|
||||
<div className="logs-explorer-views">
|
||||
<LogsExplorerViewsContainer
|
||||
selectedView={selectedView}
|
||||
listQueryKeyRef={listQueryKeyRef}
|
||||
chartQueryKeyRef={chartQueryKeyRef}
|
||||
setIsLoadingQueries={setIsLoadingQueries}
|
||||
setWarning={setWarning}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
</Sentry.ErrorBoundary>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -81,23 +81,6 @@
|
||||
border-radius: 2px;
|
||||
border: 1px solid rgba(37, 225, 146, 0.1);
|
||||
background: rgba(37, 225, 146, 0.1) !important;
|
||||
box-shadow: none !important;
|
||||
}
|
||||
|
||||
&.warning {
|
||||
color: var(--bg-amber-500) !important;
|
||||
border-radius: 2px;
|
||||
border: 1px solid rgba(255, 184, 0, 0.1);
|
||||
background: rgba(255, 184, 0, 0.1) !important;
|
||||
box-shadow: none !important;
|
||||
}
|
||||
|
||||
&.danger {
|
||||
color: var(--bg-cherry-500) !important;
|
||||
border-radius: 2px;
|
||||
border: 1px solid rgba(255, 184, 0, 0.1);
|
||||
background: rgba(255, 184, 0, 0.1) !important;
|
||||
box-shadow: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -27,7 +27,10 @@ interface IEventSourceContext {
|
||||
isConnectionError: boolean;
|
||||
initialLoading: boolean;
|
||||
reconnectDueToError: boolean;
|
||||
handleStartOpenConnection: (filterExpression?: string) => void;
|
||||
handleStartOpenConnection: (urlProps: {
|
||||
url?: string;
|
||||
queryString: string;
|
||||
}) => void;
|
||||
handleCloseConnection: () => void;
|
||||
handleSetInitialLoading: (value: boolean) => void;
|
||||
}
|
||||
@@ -120,10 +123,12 @@ export function EventSourceProvider({
|
||||
}, [destroyEventSourceSession]);
|
||||
|
||||
const handleStartOpenConnection = useCallback(
|
||||
(filterExpression?: string): void => {
|
||||
const eventSourceUrl = `${
|
||||
ENVIRONMENT.baseURL
|
||||
}${apiV3}logs/livetail?filter=${encodeURIComponent(filterExpression || '')}`;
|
||||
(urlProps: { url?: string; queryString: string }): void => {
|
||||
const { url, queryString } = urlProps;
|
||||
|
||||
const eventSourceUrl = url
|
||||
? `${url}/?${queryString}`
|
||||
: `${ENVIRONMENT.baseURL}${apiV3}logs/livetail?${queryString}`;
|
||||
|
||||
eventSourceRef.current = new EventSourcePolyfill(eventSourceUrl, {
|
||||
headers: {
|
||||
|
||||
88
go.mod
88
go.mod
@@ -1,10 +1,10 @@
|
||||
module github.com/SigNoz/signoz
|
||||
|
||||
go 1.24.0
|
||||
go 1.23.0
|
||||
|
||||
require (
|
||||
dario.cat/mergo v1.0.1
|
||||
github.com/AfterShip/clickhouse-sql-parser v0.4.11
|
||||
github.com/AfterShip/clickhouse-sql-parser v0.4.7
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.36.0
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.2
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
|
||||
@@ -21,13 +21,13 @@ require (
|
||||
github.com/go-redis/redismock/v8 v8.11.5
|
||||
github.com/go-viper/mapstructure/v2 v2.3.0
|
||||
github.com/gojek/heimdall/v7 v7.0.3
|
||||
github.com/golang-jwt/jwt/v5 v5.3.0
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/gorilla/handlers v1.5.1
|
||||
github.com/gorilla/mux v1.8.1
|
||||
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674
|
||||
github.com/gorilla/websocket v1.5.3
|
||||
github.com/huandu/go-sqlbuilder v1.35.0
|
||||
github.com/jackc/pgx/v5 v5.7.5
|
||||
github.com/jackc/pgx/v5 v5.7.2
|
||||
github.com/json-iterator/go v1.1.12
|
||||
github.com/knadh/koanf v1.5.0
|
||||
github.com/knadh/koanf/v2 v2.2.0
|
||||
@@ -35,14 +35,12 @@ require (
|
||||
github.com/mattn/go-sqlite3 v1.14.24
|
||||
github.com/open-telemetry/opamp-go v0.19.0
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.128.0
|
||||
github.com/openfga/api/proto v0.0.0-20250127102726-f9709139a369
|
||||
github.com/openfga/language/pkg/go v0.2.0-beta.2.0.20250428093642-7aeebe78bbfe
|
||||
github.com/opentracing/opentracing-go v1.2.0
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/prometheus/alertmanager v0.28.1
|
||||
github.com/prometheus/client_golang v1.23.0
|
||||
github.com/prometheus/common v0.65.0
|
||||
github.com/prometheus/client_golang v1.22.0
|
||||
github.com/prometheus/common v0.64.0
|
||||
github.com/prometheus/prometheus v0.304.1
|
||||
github.com/rs/cors v1.11.1
|
||||
github.com/russellhaering/gosaml2 v0.9.0
|
||||
@@ -64,37 +62,33 @@ require (
|
||||
go.opentelemetry.io/collector/pdata v1.34.0
|
||||
go.opentelemetry.io/contrib/config v0.10.0
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0
|
||||
go.opentelemetry.io/otel v1.37.0
|
||||
go.opentelemetry.io/otel/metric v1.37.0
|
||||
go.opentelemetry.io/otel v1.36.0
|
||||
go.opentelemetry.io/otel/metric v1.36.0
|
||||
go.opentelemetry.io/otel/sdk v1.36.0
|
||||
go.opentelemetry.io/otel/trace v1.37.0
|
||||
go.opentelemetry.io/otel/trace v1.36.0
|
||||
go.uber.org/multierr v1.11.0
|
||||
go.uber.org/zap v1.27.0
|
||||
golang.org/x/crypto v0.39.0
|
||||
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b
|
||||
golang.org/x/exp v0.0.0-20250210185358-939b2ce775ac
|
||||
golang.org/x/net v0.41.0
|
||||
golang.org/x/oauth2 v0.30.0
|
||||
golang.org/x/sync v0.16.0
|
||||
golang.org/x/sync v0.15.0
|
||||
golang.org/x/text v0.26.0
|
||||
google.golang.org/protobuf v1.36.6
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
k8s.io/apimachinery v0.33.0
|
||||
k8s.io/apimachinery v0.32.3
|
||||
)
|
||||
|
||||
require (
|
||||
cel.dev/expr v0.24.0 // indirect
|
||||
cloud.google.com/go/auth v0.16.1 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.7.0 // indirect
|
||||
filippo.io/edwards25519 v1.1.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 // indirect
|
||||
github.com/ClickHouse/ch-go v0.66.0 // indirect
|
||||
github.com/Masterminds/squirrel v1.5.4 // indirect
|
||||
github.com/Yiling-J/theine-go v0.6.1 // indirect
|
||||
github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b // indirect
|
||||
github.com/andybalholm/brotli v1.1.1 // indirect
|
||||
github.com/armon/go-metrics v0.4.1 // indirect
|
||||
@@ -115,8 +109,6 @@ require (
|
||||
github.com/ebitengine/purego v0.8.4 // indirect
|
||||
github.com/edsrzf/mmap-go v1.2.0 // indirect
|
||||
github.com/elastic/lunes v0.1.0 // indirect
|
||||
github.com/emirpasic/gods v1.18.1 // indirect
|
||||
github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect
|
||||
github.com/expr-lang/expr v1.17.5 // indirect
|
||||
github.com/facette/natsort v0.0.0-20181210072756-2cd4dd1e2dcb // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
@@ -124,7 +116,7 @@ require (
|
||||
github.com/go-faster/city v1.0.1 // indirect
|
||||
github.com/go-faster/errors v0.7.1 // indirect
|
||||
github.com/go-jose/go-jose/v4 v4.0.5 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/logr v1.4.2 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/go-ole/go-ole v1.3.0 // indirect
|
||||
github.com/go-openapi/analysis v0.23.0 // indirect
|
||||
@@ -135,7 +127,6 @@ require (
|
||||
github.com/go-openapi/spec v0.21.0 // indirect
|
||||
github.com/go-openapi/swag v0.23.0 // indirect
|
||||
github.com/go-openapi/validate v0.24.0 // indirect
|
||||
github.com/go-sql-driver/mysql v1.9.3 // indirect
|
||||
github.com/gobwas/glob v0.2.3 // indirect
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
github.com/gofrs/uuid v4.4.0+incompatible // indirect
|
||||
@@ -143,17 +134,14 @@ require (
|
||||
github.com/gojek/valkyrie v0.0.0-20180215180059-6aee720afcdf // indirect
|
||||
github.com/golang/protobuf v1.5.4 // indirect
|
||||
github.com/golang/snappy v1.0.0 // indirect
|
||||
github.com/google/btree v1.1.3 // indirect
|
||||
github.com/google/cel-go v0.26.0 // indirect
|
||||
github.com/google/btree v1.0.1 // indirect
|
||||
github.com/google/go-cmp v0.7.0 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/gopherjs/gopherjs v1.17.2 // indirect
|
||||
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc // indirect
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect
|
||||
github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.2 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/hashicorp/go-immutable-radix v1.3.1 // indirect
|
||||
github.com/hashicorp/go-msgpack/v2 v2.1.1 // indirect
|
||||
@@ -176,20 +164,15 @@ require (
|
||||
github.com/jpillora/backoff v1.0.0 // indirect
|
||||
github.com/jtolds/gls v4.20.0+incompatible // indirect
|
||||
github.com/klauspost/compress v1.18.0 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.10 // indirect
|
||||
github.com/kylelemons/godebug v1.1.0 // indirect
|
||||
github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect
|
||||
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect
|
||||
github.com/leodido/go-syslog/v4 v4.2.0 // indirect
|
||||
github.com/leodido/ragel-machinery v0.0.0-20190525184631-5f46317e436b // indirect
|
||||
github.com/lufia/plan9stats v0.0.0-20250317134145-8bc96cf8fc35 // indirect
|
||||
github.com/magefile/mage v1.15.0 // indirect
|
||||
github.com/mattermost/xml-roundtrip-validator v0.1.0 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
|
||||
github.com/mdlayher/socket v0.4.1 // indirect
|
||||
github.com/mdlayher/vsock v1.2.1 // indirect
|
||||
github.com/mfridman/interpolate v0.0.2 // indirect
|
||||
github.com/miekg/dns v1.1.65 // indirect
|
||||
github.com/mitchellh/copystructure v1.2.0 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.1-0.20231216201459-8508981c8b6c // indirect
|
||||
@@ -198,49 +181,35 @@ require (
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f // indirect
|
||||
github.com/natefinch/wrap v0.2.0 // indirect
|
||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||
github.com/oklog/run v1.1.0 // indirect
|
||||
github.com/oklog/ulid v1.3.1 // indirect
|
||||
github.com/oklog/ulid/v2 v2.1.1 // indirect
|
||||
github.com/oklog/ulid/v2 v2.1.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.128.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.128.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.128.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.128.0 // indirect
|
||||
github.com/openfga/openfga v1.9.5
|
||||
github.com/paulmach/orb v0.11.1 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.3 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.22 // indirect
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect
|
||||
github.com/pressly/goose/v3 v3.24.3 // indirect
|
||||
github.com/prometheus/client_model v0.6.2 // indirect
|
||||
github.com/prometheus/exporter-toolkit v0.14.0 // indirect
|
||||
github.com/prometheus/otlptranslator v0.0.0-20250320144820-d800c8b0eb07 // indirect
|
||||
github.com/prometheus/procfs v0.16.1 // indirect
|
||||
github.com/prometheus/sigv4 v0.1.2 // indirect
|
||||
github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/robfig/cron/v3 v3.0.1 // indirect
|
||||
github.com/sagikazarmark/locafero v0.7.0 // indirect
|
||||
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 // indirect
|
||||
github.com/segmentio/asm v1.2.0 // indirect
|
||||
github.com/segmentio/backo-go v1.0.1 // indirect
|
||||
github.com/sethvargo/go-retry v0.3.0 // indirect
|
||||
github.com/shirou/gopsutil/v4 v4.25.5 // indirect
|
||||
github.com/shopspring/decimal v1.4.0 // indirect
|
||||
github.com/shurcooL/httpfs v0.0.0-20230704072500-f1e31cf0ba5c // indirect
|
||||
github.com/shurcooL/vfsgen v0.0.0-20230704071429-0000e147ea92 // indirect
|
||||
github.com/smarty/assertions v1.15.0 // indirect
|
||||
github.com/sourcegraph/conc v0.3.0 // indirect
|
||||
github.com/spf13/afero v1.12.0 // indirect
|
||||
github.com/spf13/cast v1.9.2 // indirect
|
||||
github.com/spf13/pflag v1.0.7 // indirect
|
||||
github.com/spf13/viper v1.20.1 // indirect
|
||||
github.com/stoewer/go-strcase v1.3.0 // indirect
|
||||
github.com/spf13/pflag v1.0.6 // indirect
|
||||
github.com/stretchr/objx v0.5.2 // indirect
|
||||
github.com/subosito/gotenv v1.6.0 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.0 // indirect
|
||||
github.com/tklauser/go-sysconf v0.3.15 // indirect
|
||||
@@ -252,7 +221,6 @@ require (
|
||||
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||
github.com/yusufpapurcu/wmi v1.2.4 // indirect
|
||||
github.com/zeebo/xxh3 v1.0.2 // indirect
|
||||
go.mongodb.org/mongo-driver v1.17.1 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/collector/component v1.34.0 // indirect
|
||||
@@ -315,24 +283,18 @@ require (
|
||||
go.opentelemetry.io/proto/otlp v1.6.0 // indirect
|
||||
go.uber.org/atomic v1.11.0 // indirect
|
||||
go.uber.org/goleak v1.3.0 // indirect
|
||||
go.uber.org/mock v0.5.2 // indirect
|
||||
go.yaml.in/yaml/v3 v3.0.3 // indirect
|
||||
golang.org/x/mod v0.25.0 // indirect
|
||||
golang.org/x/sys v0.34.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/time v0.11.0 // indirect
|
||||
golang.org/x/tools v0.34.0 // indirect
|
||||
golang.org/x/tools v0.33.0 // indirect
|
||||
gonum.org/v1/gonum v0.16.0 // indirect
|
||||
google.golang.org/api v0.236.0 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/grpc v1.74.2 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250519155744-55703ea1f237 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250528174236-200df99c418a // indirect
|
||||
google.golang.org/grpc v1.72.2 // indirect
|
||||
gopkg.in/telebot.v3 v3.3.8 // indirect
|
||||
k8s.io/client-go v0.33.0 // indirect
|
||||
k8s.io/client-go v0.32.3 // indirect
|
||||
k8s.io/klog/v2 v2.130.1 // indirect
|
||||
k8s.io/utils v0.0.0-20250321185631-1f6e0b77f77e // indirect
|
||||
modernc.org/libc v1.66.3 // indirect
|
||||
modernc.org/mathutil v1.7.1 // indirect
|
||||
modernc.org/memory v1.11.0 // indirect
|
||||
modernc.org/sqlite v1.38.2 // indirect
|
||||
sigs.k8s.io/yaml v1.6.0 // indirect
|
||||
sigs.k8s.io/yaml v1.4.0 // indirect
|
||||
)
|
||||
|
||||
242
go.sum
242
go.sum
@@ -1,5 +1,3 @@
|
||||
cel.dev/expr v0.24.0 h1:56OvJKSH3hDGL0ml5uSxZmz3/3Pq4tJ+fb1unVLAFcY=
|
||||
cel.dev/expr v0.24.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw=
|
||||
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
|
||||
@@ -64,10 +62,8 @@ cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3f
|
||||
dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s=
|
||||
dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
|
||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||
filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
|
||||
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
|
||||
github.com/AfterShip/clickhouse-sql-parser v0.4.11 h1:fZMKAjRmgzW44+hEhF6ywi4VjFZQjJ8QrFBbgBsjmF4=
|
||||
github.com/AfterShip/clickhouse-sql-parser v0.4.11/go.mod h1:W0Z82wJWkJxz2RVun/RMwxue3g7ut47Xxl+SFqdJGus=
|
||||
github.com/AfterShip/clickhouse-sql-parser v0.4.7 h1:zbdC0UooWLwBvOi5CeyCA42AWm6lMYuBVy6XnMzmF+c=
|
||||
github.com/AfterShip/clickhouse-sql-parser v0.4.7/go.mod h1:W0Z82wJWkJxz2RVun/RMwxue3g7ut47Xxl+SFqdJGus=
|
||||
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM=
|
||||
@@ -97,8 +93,6 @@ github.com/DATA-DOG/go-sqlmock v1.5.2 h1:OcvFkGmslmlZibjAjaHm3L//6LiuBgolP7Oputl
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.2/go.mod h1:88MAG/4G7SMwSE3CeA0ZKzrT5CiOU3OJ+JlNzwDqpNU=
|
||||
github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
|
||||
github.com/DataDog/datadog-go v3.7.1+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
|
||||
github.com/Masterminds/squirrel v1.5.4 h1:uUcX/aBc8O7Fg9kaISIUsHXdKuqehiXAMQTYX8afzqM=
|
||||
github.com/Masterminds/squirrel v1.5.4/go.mod h1:NNaOrjSoIDfDA40n7sr2tPNZRfjzjA400rg+riTZj10=
|
||||
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
|
||||
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
|
||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||
@@ -106,8 +100,6 @@ github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkb
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc=
|
||||
github.com/SigNoz/signoz-otel-collector v0.128.1 h1:D0bKMrRNgcKreKKYoakCr5jTWj1srupbNwGIvpHMihw=
|
||||
github.com/SigNoz/signoz-otel-collector v0.128.1/go.mod h1:vFQLsJFzQwVkO1ltIMH+z9KKuTZTn/P0lKu2mNYDBpE=
|
||||
github.com/Yiling-J/theine-go v0.6.1 h1:njE/rBBviU/Sq2G7PJKdLdwXg8j1azvZQulIjmshD+o=
|
||||
github.com/Yiling-J/theine-go v0.6.1/go.mod h1:08QpMa5JZ2pKN+UJCRrCasWYO1IKCdl54Xa836rpmDU=
|
||||
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
|
||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
@@ -148,7 +140,6 @@ github.com/bboreham/go-loser v0.0.0-20230920113527-fcc2c21820a3 h1:6df1vn4bBlDDo
|
||||
github.com/bboreham/go-loser v0.0.0-20230920113527-fcc2c21820a3/go.mod h1:CIWtjkly68+yqLPbvwwR/fjNJA/idrtULjZWh2v1ys0=
|
||||
github.com/beevik/etree v1.1.0 h1:T0xke/WvNtMoCqgzPhkX2r4rjY3GDZFi+FjpRZY2Jbs=
|
||||
github.com/beevik/etree v1.1.0/go.mod h1:r8Aw8JqVegEf0w2fDnATrX9VpkMcyFeM0FhwO62wh+A=
|
||||
github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
|
||||
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
|
||||
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
|
||||
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||
@@ -182,8 +173,8 @@ github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWH
|
||||
github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
|
||||
github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
|
||||
github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
|
||||
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv1aFbZMiM9vblcSArJRf2Irls=
|
||||
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8=
|
||||
github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42 h1:Om6kYQYDUk5wWbT0t0q6pvyM49i9XZAv9dDrkDA7gjk=
|
||||
github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8=
|
||||
github.com/coder/quartz v0.1.2 h1:PVhc9sJimTdKd3VbygXtS4826EOCpB1fXoRlLnCrE+s=
|
||||
github.com/coder/quartz v0.1.2/go.mod h1:vsiCc+AHViMKH2CQpGIpFgdHIEQsxwm8yCscqKmzbRA=
|
||||
github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI=
|
||||
@@ -210,8 +201,8 @@ github.com/digitalocean/godo v1.144.0 h1:rDCsmpwcDe5egFQ3Ae45HTde685/GzX037mWRMP
|
||||
github.com/digitalocean/godo v1.144.0/go.mod h1:tYeiWY5ZXVpU48YaFv0M5irUFHXGorZpDNm7zzdWMzM=
|
||||
github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk=
|
||||
github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
|
||||
github.com/docker/docker v28.3.3+incompatible h1:Dypm25kh4rmk49v1eiVbsAtpAsYURjYkaKubwuBdxEI=
|
||||
github.com/docker/docker v28.3.3+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/docker v28.2.2+incompatible h1:CjwRSksz8Yo4+RmQ339Dp/D2tGO5JxwYeqtMOEe0LDw=
|
||||
github.com/docker/docker v28.2.2+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c=
|
||||
github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc=
|
||||
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
|
||||
@@ -231,8 +222,6 @@ github.com/emersion/go-smtp v0.21.3 h1:7uVwagE8iPYE48WhNsng3RRpCUpFvNl39JGNSIyGV
|
||||
github.com/emersion/go-smtp v0.21.3/go.mod h1:qm27SGYgoIPRot6ubfQ/GpiPy/g3PaZAVRxiO/sDUgQ=
|
||||
github.com/emicklei/go-restful/v3 v3.11.0 h1:rAQeMHw1c7zTmncogyy8VvRZwtkmkZ4FxERmMY4rD+g=
|
||||
github.com/emicklei/go-restful/v3 v3.11.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc=
|
||||
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
|
||||
github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
|
||||
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
|
||||
@@ -265,8 +254,6 @@ github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSw
|
||||
github.com/foxboron/go-tpm-keyfiles v0.0.0-20250323135004-b31fac66206e h1:2jjYsGgM13xId2Ku+UGDQTO5It50LhT6lljiVJvBj1Y=
|
||||
github.com/foxboron/go-tpm-keyfiles v0.0.0-20250323135004-b31fac66206e/go.mod h1:uAyTlAUxchYuiFjTHmuIEJ4nGSm7iOPaGcAyA81fJ80=
|
||||
github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps=
|
||||
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
|
||||
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
|
||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
||||
github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU=
|
||||
@@ -294,8 +281,8 @@ github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9
|
||||
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
||||
github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
|
||||
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
|
||||
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
|
||||
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
|
||||
@@ -331,8 +318,6 @@ github.com/go-redis/redismock/v8 v8.11.5 h1:RJFIiua58hrBrSpXhnGX3on79AU3S271H4Zh
|
||||
github.com/go-redis/redismock/v8 v8.11.5/go.mod h1:UaAU9dEe1C+eGr+FHV5prCWIt0hafyPWbGMEWE0UWdA=
|
||||
github.com/go-resty/resty/v2 v2.16.5 h1:hBKqmWrr7uRc3euHVqmh1HTHcKn99Smr7o5spptdhTM=
|
||||
github.com/go-resty/resty/v2 v2.16.5/go.mod h1:hkJtXbA2iKHzJheXYvQ8snQES5ZLGKMwQ07xAwp/fiA=
|
||||
github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo=
|
||||
github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
|
||||
github.com/go-test/deep v1.0.2-0.20181118220953-042da051cf31/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
|
||||
@@ -355,8 +340,8 @@ github.com/gojek/heimdall/v7 v7.0.3 h1:+5sAhl8S0m+qRRL8IVeHCJudFh/XkG3wyO++nvOg+
|
||||
github.com/gojek/heimdall/v7 v7.0.3/go.mod h1:Z43HtMid7ysSjmsedPTXAki6jcdcNVnjn5pmsTyiMic=
|
||||
github.com/gojek/valkyrie v0.0.0-20180215180059-6aee720afcdf h1:5xRGbUdOmZKoDXkGx5evVLehuCMpuO1hl701bEQqXOM=
|
||||
github.com/gojek/valkyrie v0.0.0-20180215180059-6aee720afcdf/go.mod h1:QzhUKaYKJmcbTnCYCAVQrroCOY7vOOI8cSQ4NbuhYf0=
|
||||
github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo=
|
||||
github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
@@ -396,12 +381,10 @@ github.com/golang/snappy v1.0.0 h1:Oy607GVXHs7RtbggtPBnr2RmDArIsAefDwvrdWvRhGs=
|
||||
github.com/golang/snappy v1.0.0/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/btree v1.1.3 h1:CVpQJjYgC4VbzxeGVHfvZrv1ctoYCAI8vbl07Fcxlyg=
|
||||
github.com/google/btree v1.1.3/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4=
|
||||
github.com/google/cel-go v0.26.0 h1:DPGjXackMpJWH680oGY4lZhYjIameYmR+/6RBdDGmaI=
|
||||
github.com/google/cel-go v0.26.0/go.mod h1:A9O8OU9rdvrK5MQyrqfIxo1a0u4g3sF8KB6PUIaryMM=
|
||||
github.com/google/gnostic-models v0.6.9 h1:MU/8wDLif2qCXZmzncUQ/BOfxWfthHi63KqpoNbWqVw=
|
||||
github.com/google/gnostic-models v0.6.9/go.mod h1:CiWsm0s6BSQd1hRn8/QmxqB6BesYcbSZxsz9b0KuDBw=
|
||||
github.com/google/btree v1.0.1 h1:gK4Kx5IaGY9CD5sPJ36FHiBJ6ZXl0kilRiiCj+jdYp4=
|
||||
github.com/google/btree v1.0.1/go.mod h1:xXMiIv4Fb/0kKde4SpL7qlzvu5cMJDRkFDxJfI9uaxA=
|
||||
github.com/google/gnostic-models v0.6.8 h1:yo/ABAfM5IMRsS1VnXjTBvUb61tFIHozhlYvRgGre9I=
|
||||
github.com/google/gnostic-models v0.6.8/go.mod h1:5n7qKqH0f5wFt+aWF8CW6pZLLNOfYuF5OpfBSENuI8U=
|
||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
@@ -416,6 +399,7 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
||||
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
|
||||
github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
|
||||
@@ -423,6 +407,8 @@ github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17
|
||||
github.com/google/go-tpm v0.9.5 h1:ocUmnDebX54dnW+MQWGQRbdaAcJELsa6PqZhJ48KwVU=
|
||||
github.com/google/go-tpm v0.9.5/go.mod h1:h9jEsEECg7gtLis0upRBQU+GhYVH6jMjrFxI8u6bVUY=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
|
||||
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
|
||||
github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
|
||||
@@ -443,8 +429,6 @@ github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLe
|
||||
github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
|
||||
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
|
||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||
github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0=
|
||||
github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM=
|
||||
@@ -472,18 +456,14 @@ github.com/gorilla/handlers v1.5.1 h1:9lRY6j8DEeeBT10CvO9hGW0gmky0BprnvDI5vfhUHH
|
||||
github.com/gorilla/handlers v1.5.1/go.mod h1:t8XrUpc4KVXb7HGyJ4/cEnwQiaxrX/hz1Zv/4g96P1Q=
|
||||
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
||||
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
||||
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 h1:JeSE6pjso5THxAzdVpqr6/geYxZytqFMBCOtn/ujyeo=
|
||||
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674/go.mod h1:r4w70xmWCQKmi1ONH4KIaBptdivuRPyosB9RmPlGEwA=
|
||||
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
||||
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc h1:GN2Lv3MGO7AS6PrRoT6yV5+wkrOpcszoIsO4+4ds248=
|
||||
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc/go.mod h1:+JKpmjMGhpgPL+rXZ5nsZieVzvarn86asRlBg4uNGnk=
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 h1:UH//fgunKIs4JdUbpDl1VZCDaL56wXCB/5+wF6uHfaI=
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0/go.mod h1:g5qyo/la0ALbONm6Vbp88Yd8NsDy6rZz+RcrMPxvld8=
|
||||
github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.2 h1:sGm2vDRFUrQJO/Veii4h4zG2vvqG6uWNkBHSTqXOZk0=
|
||||
github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.2/go.mod h1:wd1YpapPLivG6nQgbf7ZkG1hhSOXDhhn4MLTknx2aAc=
|
||||
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
|
||||
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1 h1:X5VWvz21y3gzm9Nw/kaUeku/1+uBhcekkmy4IkffJww=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1/go.mod h1:Zanoh4+gvIgluNqcfMVTJueD4wSS5hT7zTt4Mrutd90=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3 h1:5ZPtiqj0JL5oKWmcsq4VMaAW5ukBEgSGXEN89zeH1Jo=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3/go.mod h1:ndYquD05frm2vACXE1nsccT4oJzjhw2arTS2cpUD1PI=
|
||||
github.com/hashicorp/consul/api v1.12.0/go.mod h1:6pVBMo0ebnYdt2S3H87XhekM/HHrUoTD2XXb/VrZVy0=
|
||||
github.com/hashicorp/consul/api v1.13.0/go.mod h1:ZlVrynguJKcYr54zGaDbaL3fOvKC9m72FhPvA8T35KQ=
|
||||
github.com/hashicorp/consul/api v1.32.0 h1:5wp5u780Gri7c4OedGEPzmlUEzi0g2KyiPphSr6zjVg=
|
||||
@@ -517,8 +497,8 @@ github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9
|
||||
github.com/hashicorp/go-plugin v1.0.1/go.mod h1:++UyYGoz3o5w9ZzAdZxtQKrWWP+iqPBn3cQptSMzBuY=
|
||||
github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs=
|
||||
github.com/hashicorp/go-retryablehttp v0.5.4/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs=
|
||||
github.com/hashicorp/go-retryablehttp v0.7.8 h1:ylXZWnqa7Lhqpk0L1P1LzDtGcCR0rPVUrx/c8Unxc48=
|
||||
github.com/hashicorp/go-retryablehttp v0.7.8/go.mod h1:rjiScheydd+CxvumBsIrFKlx3iS0jrZ7LvzFGFmuKbw=
|
||||
github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU=
|
||||
github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk=
|
||||
github.com/hashicorp/go-rootcerts v1.0.1/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=
|
||||
github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc=
|
||||
github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=
|
||||
@@ -579,8 +559,8 @@ github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsI
|
||||
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
|
||||
github.com/jackc/pgx/v5 v5.7.5 h1:JHGfMnQY+IEtGM63d+NGMjoRpysB2JBwDr5fsngwmJs=
|
||||
github.com/jackc/pgx/v5 v5.7.5/go.mod h1:aruU7o91Tc2q2cFp5h4uP3f6ztExVpyVv88Xl/8Vl8M=
|
||||
github.com/jackc/pgx/v5 v5.7.2 h1:mLoDLV6sonKlvjIEsV56SkWNCnuNv531l94GaIzO+XI=
|
||||
github.com/jackc/pgx/v5 v5.7.2/go.mod h1:ncY89UGWxg82EykZUwSpUKEfccBGGYq1xjrOpsbsfGQ=
|
||||
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
|
||||
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
||||
github.com/jessevdk/go-flags v1.6.1 h1:Cvu5U8UGrLay1rZfv/zP7iLpSHGUZ/Ou68T0iX1bBK4=
|
||||
@@ -591,9 +571,8 @@ github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9Y
|
||||
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
|
||||
github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=
|
||||
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
|
||||
github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=
|
||||
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
|
||||
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
|
||||
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
|
||||
github.com/jonboulle/clockwork v0.2.2/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8=
|
||||
github.com/jonboulle/clockwork v0.5.0 h1:Hyh9A8u51kptdkR+cqRpT1EebBwTn1oK9YfGYbdFz6I=
|
||||
github.com/jonboulle/clockwork v0.5.0/go.mod h1:3mZlmanh0g2NDKO5TWZVJAfofYk64M7XN3SzBPjZF60=
|
||||
@@ -621,8 +600,6 @@ github.com/kisielk/sqlstruct v0.0.0-20201105191214-5f3e10d3ab46/go.mod h1:yyMNCy
|
||||
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
|
||||
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
|
||||
github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE=
|
||||
github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
|
||||
github.com/knadh/koanf v1.5.0 h1:q2TSd/3Pyc/5yP9ldIrSdIz26MCcyNQzW0pEAugLPNs=
|
||||
github.com/knadh/koanf v1.5.0/go.mod h1:Hgyjp4y8v44hpZtPzs7JZfRAW5AhN7KfZcwv1RYggDs=
|
||||
github.com/knadh/koanf/v2 v2.2.0 h1:FZFwd9bUjpb8DyCWARUBy5ovuhDs1lI87dOEn2K8UVU=
|
||||
@@ -645,10 +622,6 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
|
||||
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||
github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 h1:SOEGU9fKiNWd/HOJuq6+3iTQz8KNCLtVX6idSoTLdUw=
|
||||
github.com/lann/builder v0.0.0-20180802200727-47ae307949d0/go.mod h1:dXGbAdH5GtBTC4WfIxhKZfyBF/HBFgRZSWwZ9g/He9o=
|
||||
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 h1:P6pPBnrTSX3DEVR4fDembhRWSsG5rVo6hYhAB/ADZrk=
|
||||
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0/go.mod h1:vmVJ0l/dxyfGW6FmdpVm2joNMFikkuWg0EoCKLGUMNw=
|
||||
github.com/leodido/go-syslog/v4 v4.2.0 h1:A7vpbYxsO4e2E8udaurkLlxP5LDpDbmPMsGnuhb7jVk=
|
||||
github.com/leodido/go-syslog/v4 v4.2.0/go.mod h1:eJ8rUfDN5OS6dOkCOBYlg2a+hbAg6pJa99QXXgMrd98=
|
||||
github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII=
|
||||
@@ -690,8 +663,6 @@ github.com/mdlayher/socket v0.4.1 h1:eM9y2/jlbs1M615oshPQOHZzj6R6wMT7bX5NPiQvn2U
|
||||
github.com/mdlayher/socket v0.4.1/go.mod h1:cAqeGjoufqdxWkD7DkpyS+wcefOtmu5OQ8KuoJGIReA=
|
||||
github.com/mdlayher/vsock v1.2.1 h1:pC1mTJTvjo1r9n9fbm7S1j04rCgCzhCOS5DY0zqHlnQ=
|
||||
github.com/mdlayher/vsock v1.2.1/go.mod h1:NRfCibel++DgeMD8z/hP+PPTjlNJsdPOmxcnENvE+SE=
|
||||
github.com/mfridman/interpolate v0.0.2 h1:pnuTK7MQIxxFz1Gr+rjSIx9u7qVjf5VOoM/u6BbAxPY=
|
||||
github.com/mfridman/interpolate v0.0.2/go.mod h1:p+7uk6oE07mpE/Ik1b8EckO0O4ZXiGAfshKBWLUM9Xg=
|
||||
github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso=
|
||||
github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI=
|
||||
github.com/miekg/dns v1.1.65 h1:0+tIPHzUW0GCge7IiK3guGP57VAw7hoPDfApjkMD1Fc=
|
||||
@@ -730,10 +701,6 @@ github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8m
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f h1:KUppIJq7/+SVif2QVs3tOP0zanoHgBEVAwHxUSIzRqU=
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||
github.com/natefinch/wrap v0.2.0 h1:IXzc/pw5KqxJv55gV0lSOcKHYuEZPGbQrOOXr/bamRk=
|
||||
github.com/natefinch/wrap v0.2.0/go.mod h1:6gMHlAl12DwYEfKP3TkuykYUfLSEAvHw67itm4/KAS8=
|
||||
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
|
||||
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||
github.com/npillmayer/nestext v0.1.3/go.mod h1:h2lrijH8jpicr25dFY+oAJLyzlya6jhnuG+zWp9L0Uk=
|
||||
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
|
||||
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
|
||||
@@ -743,8 +710,8 @@ github.com/oklog/run v1.1.0 h1:GEenZ1cK0+q0+wsJew9qUg/DyD8k3JzYsZAi5gYi2mA=
|
||||
github.com/oklog/run v1.1.0/go.mod h1:sVPdnTZT1zYwAJeCMu2Th4T21pA3FPOQRfWjQlk7DVU=
|
||||
github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=
|
||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||
github.com/oklog/ulid/v2 v2.1.1 h1:suPZ4ARWLOJLegGFiZZ1dFAkqzhMjL3J1TzI+5wHz8s=
|
||||
github.com/oklog/ulid/v2 v2.1.1/go.mod h1:rcEKHmBBKfef9DhnvX7y1HZBYxjXb0cP5ExxNsTT1QQ=
|
||||
github.com/oklog/ulid/v2 v2.1.0 h1:+9lhoxAP56we25tyYETBBY1YLA2SaoLvUFgrP2miPJU=
|
||||
github.com/oklog/ulid/v2 v2.1.0/go.mod h1:rcEKHmBBKfef9DhnvX7y1HZBYxjXb0cP5ExxNsTT1QQ=
|
||||
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk=
|
||||
github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0=
|
||||
@@ -781,13 +748,6 @@ github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8
|
||||
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
|
||||
github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040=
|
||||
github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M=
|
||||
github.com/openfga/api/proto v0.0.0-20250127102726-f9709139a369 h1:wEsCZ4oBuu8LfEJ3VXbveXO8uEhCthrxA40WSvxO044=
|
||||
github.com/openfga/api/proto v0.0.0-20250127102726-f9709139a369/go.mod h1:m74TNgnAAIJ03gfHcx+xaRWnr+IbQy3y/AVNwwCFrC0=
|
||||
github.com/openfga/language/pkg/go v0.2.0-beta.2.0.20250428093642-7aeebe78bbfe h1:X1g0rBUMvvzMudsak/jmoEZ1NhSsp6yR0VGxWHnGMzs=
|
||||
github.com/openfga/language/pkg/go v0.2.0-beta.2.0.20250428093642-7aeebe78bbfe/go.mod h1:5Z0pbTT7Jz/oQFLfadb+C5t5NwHrduAO7j7L07Ec1GM=
|
||||
github.com/openfga/openfga v1.9.5 h1:CtlyxP8D6cG1/EC5GLgJHSZYz4Wcbzh9b3jF10oMuw4=
|
||||
github.com/openfga/openfga v1.9.5/go.mod h1:prbb9r4bAp24mYi/DQK97Q3fqiLivlH+8maopVNnxdE=
|
||||
github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
|
||||
github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs=
|
||||
github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc=
|
||||
github.com/ovh/go-ovh v1.7.0 h1:V14nF7FwDjQrZt9g7jzcvAAQ3HN6DNShRFRMC3jLoPw=
|
||||
@@ -805,8 +765,6 @@ github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAv
|
||||
github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8=
|
||||
github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
|
||||
github.com/pelletier/go-toml/v2 v2.0.5/go.mod h1:OMHamSCAODeSsVrwwvcJOaoN0LIUIaFVNZzmWyNfXas=
|
||||
github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
|
||||
github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
|
||||
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
|
||||
github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU=
|
||||
github.com/pierrec/lz4/v4 v4.1.22/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||
@@ -827,8 +785,6 @@ github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndr
|
||||
github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s=
|
||||
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 h1:o4JXh1EVt9k/+g42oCprj/FisM4qX9L3sZB3upGN2ZU=
|
||||
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
|
||||
github.com/pressly/goose/v3 v3.24.3 h1:DSWWNwwggVUsYZ0X2VitiAa9sKuqtBfe+Jr9zFGwWlM=
|
||||
github.com/pressly/goose/v3 v3.24.3/go.mod h1:v9zYL4xdViLHCUUJh/mhjnm6JrK7Eul8AS93IxiZM4E=
|
||||
github.com/prometheus/alertmanager v0.28.1 h1:BK5pCoAtaKg01BYRUJhEDV1tqJMEtYBGzPw8QdvnnvA=
|
||||
github.com/prometheus/alertmanager v0.28.1/go.mod h1:0StpPUDDHi1VXeM7p2yYfeZgLVi/PPlt39vo9LQUHxM=
|
||||
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
||||
@@ -836,8 +792,8 @@ github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5Fsn
|
||||
github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU=
|
||||
github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M=
|
||||
github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0=
|
||||
github.com/prometheus/client_golang v1.23.0 h1:ust4zpdl9r4trLY/gSjlm07PuiBq2ynaXXlptpfy8Uc=
|
||||
github.com/prometheus/client_golang v1.23.0/go.mod h1:i/o0R9ByOnHX0McrTMTyhYvKE4haaf2mW08I+jGAjEE=
|
||||
github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/Y92Vm0Zc6Q=
|
||||
github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0=
|
||||
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
|
||||
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
@@ -848,8 +804,8 @@ github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y8
|
||||
github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4=
|
||||
github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo=
|
||||
github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc=
|
||||
github.com/prometheus/common v0.65.0 h1:QDwzd+G1twt//Kwj/Ww6E9FQq1iVMmODnILtW1t2VzE=
|
||||
github.com/prometheus/common v0.65.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8=
|
||||
github.com/prometheus/common v0.64.0 h1:pdZeA+g617P7oGv1CzdTzyeShxAGrTBsolKNOLQPGO4=
|
||||
github.com/prometheus/common v0.64.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8=
|
||||
github.com/prometheus/exporter-toolkit v0.14.0 h1:NMlswfibpcZZ+H0sZBiTjrA3/aBFHkNZqE+iCj5EmRg=
|
||||
github.com/prometheus/exporter-toolkit v0.14.0/go.mod h1:Gu5LnVvt7Nr/oqTBUC23WILZepW0nffNo10XdhQcwWA=
|
||||
github.com/prometheus/otlptranslator v0.0.0-20250320144820-d800c8b0eb07 h1:YaJ1JqyKGIUFIMUpMeT22yewZMXiTt5sLgWG1D/m4Yc=
|
||||
@@ -870,8 +826,6 @@ github.com/puzpuzpuz/xsync/v3 v3.5.1/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPK
|
||||
github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
||||
github.com/redis/go-redis/v9 v9.9.0 h1:URbPQ4xVQSQhZ27WMQVmZSo3uT3pL+4IdHVcYq2nVfM=
|
||||
github.com/redis/go-redis/v9 v9.9.0/go.mod h1:huWgSWd8mW6+m0VPhJjSSQ+d6Nh1VICQ6Q5lHuCH/Iw=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||
github.com/rhnvrm/simples3 v0.6.1/go.mod h1:Y+3vYm2V7Y4VijFoJHHTrja6OgPrJ2cBti8dPGkC3sA=
|
||||
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
|
||||
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
|
||||
@@ -880,8 +834,8 @@ github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR
|
||||
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
||||
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
|
||||
github.com/rogpeppe/go-internal v1.8.1/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4nPKWu0nJ5d+o=
|
||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
||||
github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
|
||||
github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=
|
||||
github.com/rs/cors v1.11.1 h1:eU3gRzXLRK57F5rKMGMZURNdIG4EoAmX8k94r9wXWHA=
|
||||
github.com/rs/cors v1.11.1/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU=
|
||||
github.com/russellhaering/gosaml2 v0.9.0 h1:CNMnH42z/GirrKjdmNrSS6bAAs47F9bPdl4PfRmVOIk=
|
||||
@@ -893,8 +847,6 @@ github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb
|
||||
github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
||||
github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc=
|
||||
github.com/sagikazarmark/crypt v0.6.0/go.mod h1:U8+INwJo3nBv1m6A/8OBXAq7Jnpspk5AxSgDyEQcea8=
|
||||
github.com/sagikazarmark/locafero v0.7.0 h1:5MqpDsTGNDhY8sGp0Aowyf0qKsPrhewaLSsFaodPcyo=
|
||||
github.com/sagikazarmark/locafero v0.7.0/go.mod h1:2za3Cg5rMaTMoG/2Ulr9AwtFaIppKXTRYnozin4aB5k=
|
||||
github.com/samber/lo v1.47.0 h1:z7RynLwP5nbyRscyvcD043DWYoOcYRv3mV8lBeqOCLc=
|
||||
github.com/samber/lo v1.47.0/go.mod h1:RmDH9Ct32Qy3gduHQuKJ3gW1fMHAnE/fAzQuf6He5cU=
|
||||
github.com/scaleway/scaleway-sdk-go v1.0.0-beta.33 h1:KhF0WejiUTDbL5X55nXowP7zNopwpowa6qaMAWyIE+0=
|
||||
@@ -913,8 +865,6 @@ github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
|
||||
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
|
||||
github.com/sethvargo/go-password v0.2.0 h1:BTDl4CC/gjf/axHMaDQtw507ogrXLci6XRiLc7i/UHI=
|
||||
github.com/sethvargo/go-password v0.2.0/go.mod h1:Ym4Mr9JXLBycr02MFuVQ/0JHidNetSgbzutTr3zsYXE=
|
||||
github.com/sethvargo/go-retry v0.3.0 h1:EEt31A35QhrcRZtrYFDTBg91cqZVnFL2navjDrah2SE=
|
||||
github.com/sethvargo/go-retry v0.3.0/go.mod h1:mNX17F0C/HguQMyMyJxcnU471gOZGxCLyYaFyAZraas=
|
||||
github.com/shirou/gopsutil/v4 v4.25.5 h1:rtd9piuSMGeU8g1RMXjZs9y9luK5BwtnG7dZaQUJAsc=
|
||||
github.com/shirou/gopsutil/v4 v4.25.5/go.mod h1:PfybzyydfZcN+JMMjkF6Zb8Mq1A/VcogFFg7hj50W9c=
|
||||
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
|
||||
@@ -934,29 +884,18 @@ github.com/smartystreets/goconvey v1.8.1 h1:qGjIddxOk4grTu9JPOU31tVfq3cNdBlNa5sS
|
||||
github.com/smartystreets/goconvey v1.8.1/go.mod h1:+/u4qLyY6x1jReYOp7GOM2FSt8aP9CzCZL03bI28W60=
|
||||
github.com/soheilhy/cmux v0.1.5 h1:jjzc5WVemNEDTLwv9tlmemhC73tI08BNOIGwBOo10Js=
|
||||
github.com/soheilhy/cmux v0.1.5/go.mod h1:T7TcVDs9LWfQgPlPsdngu6I6QIoyIFZDDC6sNE1GqG0=
|
||||
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
|
||||
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
|
||||
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||
github.com/spf13/afero v1.8.2/go.mod h1:CtAatgMJh6bJEIs48Ay/FOnkljP3WeGUG0MC1RfAqwo=
|
||||
github.com/spf13/afero v1.12.0 h1:UcOPyRBYczmFn6yvphxkn9ZEOY65cpwGKb5mL36mrqs=
|
||||
github.com/spf13/afero v1.12.0/go.mod h1:ZTlWwG4/ahT8W7T0WQ5uYmjI9duaLQGy3Q2OAl4sk/4=
|
||||
github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU=
|
||||
github.com/spf13/cast v1.9.2 h1:SsGfm7M8QOFtEzumm7UZrZdLLquNdzFYfIbEXntcFbE=
|
||||
github.com/spf13/cast v1.9.2/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo=
|
||||
github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
|
||||
github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
|
||||
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
|
||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
|
||||
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/pflag v1.0.7 h1:vN6T9TfwStFPFM5XzjsvmzZkLuaLX+HS+0SeFLRgU6M=
|
||||
github.com/spf13/pflag v1.0.7/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/viper v1.13.0/go.mod h1:Icm2xNL3/8uyh/wFuB1jI7TiTNKp8632Nwegu+zgdYw=
|
||||
github.com/spf13/viper v1.20.1 h1:ZMi+z/lvLyPSCoNtFCpqjy0S4kPbirhpTMwl8BkW9X4=
|
||||
github.com/spf13/viper v1.20.1/go.mod h1:P9Mdzt1zoHIG8m2eZQinpiBjo6kCmZSKBClNNqjJvu4=
|
||||
github.com/srikanthccv/ClickHouse-go-mock v0.12.0 h1:KUzaWTwuqMc2uf5FylM/oAcTFdE2DdZjvISm9V0/NAA=
|
||||
github.com/srikanthccv/ClickHouse-go-mock v0.12.0/go.mod h1:1oUmLtXEXOyS0EEWVKlKEfLfv9y02agCMAvD3tVnhlo=
|
||||
github.com/stoewer/go-strcase v1.3.0 h1:g0eASXYtp+yvN9fK8sH94oCIk0fau9uV1/ZdJ0AVEzs=
|
||||
github.com/stoewer/go-strcase v1.3.0/go.mod h1:fAH5hQ5pehh+j3nZfvwdk2RgEgQjAoM8wodgtPmh1xo=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.3.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
||||
@@ -973,15 +912,12 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.5/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/subosito/gotenv v1.4.1/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0=
|
||||
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
|
||||
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
|
||||
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
|
||||
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
|
||||
@@ -1029,10 +965,6 @@ github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9dec
|
||||
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||
github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0=
|
||||
github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
|
||||
github.com/zeebo/assert v1.3.1 h1:vukIABvugfNMZMQO1ABsyQDJDTVQbn+LWSMy1ol1h6A=
|
||||
github.com/zeebo/assert v1.3.1/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0=
|
||||
github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0=
|
||||
github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA=
|
||||
go.etcd.io/etcd/api/v3 v3.5.4/go.mod h1:5GB2vv4A4AOn3yk7MftYGHkUfGtDHnEraIjym4dYz5A=
|
||||
go.etcd.io/etcd/client/pkg/v3 v3.5.4/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g=
|
||||
go.etcd.io/etcd/client/v2 v2.305.4/go.mod h1:Ud+VUwIi9/uQHOMA+4ekToJ12lTxlv0zB/+DHwTGEbU=
|
||||
@@ -1173,8 +1105,8 @@ go.opentelemetry.io/contrib/propagators/b3 v1.36.0 h1:xrAb/G80z/l5JL6XlmUMSD1i6W
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.36.0/go.mod h1:UREJtqioFu5awNaCR8aEx7MfJROFlAWb6lPaJFbHaG0=
|
||||
go.opentelemetry.io/contrib/zpages v0.61.0 h1:tYvUj377Dn3k1wf1le/f8YWSNQ8k0byS3jK8PiIXu9Y=
|
||||
go.opentelemetry.io/contrib/zpages v0.61.0/go.mod h1:MFNPHMJOGA1P6m5501ANjOJDp4A9BUQja1Y53CDL8LQ=
|
||||
go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ=
|
||||
go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I=
|
||||
go.opentelemetry.io/otel v1.36.0 h1:UumtzIklRBY6cI/lllNZlALOF5nNIzJVb16APdvgTXg=
|
||||
go.opentelemetry.io/otel v1.36.0/go.mod h1:/TcFMXYjyRNh8khOAO9ybYkqaDBb/70aVwkNML4pP8E=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.12.2 h1:06ZeJRe5BnYXceSM9Vya83XXVaNGe3H1QqsvqRANQq8=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.12.2/go.mod h1:DvPtKE63knkDVP88qpatBj81JxN+w1bqfVbsbCbj1WY=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.12.2 h1:tPLwQlXbJ8NSOfZc4OkgU5h2A38M4c9kfHSVc4PFQGs=
|
||||
@@ -1201,8 +1133,8 @@ go.opentelemetry.io/otel/log v0.12.2 h1:yob9JVHn2ZY24byZeaXpTVoPS6l+UrrxmxmPKohX
|
||||
go.opentelemetry.io/otel/log v0.12.2/go.mod h1:ShIItIxSYxufUMt+1H5a2wbckGli3/iCfuEbVZi/98E=
|
||||
go.opentelemetry.io/otel/log/logtest v0.0.0-20250526142609-aa5bd0e64989 h1:4JF7oY9CcHrPGfBLijDcXZyCzGckVEyOjuat5ktmQRg=
|
||||
go.opentelemetry.io/otel/log/logtest v0.0.0-20250526142609-aa5bd0e64989/go.mod h1:NToOxLDCS1tXDSB2dIj44H9xGPOpKr0csIN+gnuihv4=
|
||||
go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE=
|
||||
go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E=
|
||||
go.opentelemetry.io/otel/metric v1.36.0 h1:MoWPKVhQvJ+eeXWHFBOPoBOi20jh6Iq2CcCREuTYufE=
|
||||
go.opentelemetry.io/otel/metric v1.36.0/go.mod h1:zC7Ks+yeyJt4xig9DEw9kuUFe5C3zLbVjV2PzT6qzbs=
|
||||
go.opentelemetry.io/otel/sdk v1.36.0 h1:b6SYIuLRs88ztox4EyrvRti80uXIFy+Sqzoh9kFULbs=
|
||||
go.opentelemetry.io/otel/sdk v1.36.0/go.mod h1:+lC+mTgD+MUWfjJubi2vvXWcVxyr9rmlshZni72pXeY=
|
||||
go.opentelemetry.io/otel/sdk/log v0.12.2 h1:yNoETvTByVKi7wHvYS6HMcZrN5hFLD7I++1xIZ/k6W0=
|
||||
@@ -1211,8 +1143,8 @@ go.opentelemetry.io/otel/sdk/log/logtest v0.0.0-20250521073539-a85ae98dcedc h1:u
|
||||
go.opentelemetry.io/otel/sdk/log/logtest v0.0.0-20250521073539-a85ae98dcedc/go.mod h1:TY/N/FT7dmFrP/r5ym3g0yysP1DefqGpAZr4f82P0dE=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.36.0 h1:r0ntwwGosWGaa0CrSt8cuNuTcccMXERFwHX4dThiPis=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.36.0/go.mod h1:qTNOhFDfKRwX0yXOqJYegL5WRaW376QbB7P4Pb0qva4=
|
||||
go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4=
|
||||
go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0=
|
||||
go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w=
|
||||
go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA=
|
||||
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
|
||||
go.opentelemetry.io/proto/otlp v1.6.0 h1:jQjP+AQyTf+Fe7OKj/MfkDrmK4MNVtw2NpXsf9fefDI=
|
||||
go.opentelemetry.io/proto/otlp v1.6.0/go.mod h1:cicgGehlFuNdgZkcALOCh3VE6K/u2tAjzlRhDwmVpZc=
|
||||
@@ -1220,22 +1152,14 @@ go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
|
||||
go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
|
||||
go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE=
|
||||
go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
|
||||
go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
go.uber.org/mock v0.5.2 h1:LbtPTcP8A5k9WPXj54PPPbjcI4Y6lhyOZXn+VS7wNko=
|
||||
go.uber.org/mock v0.5.2/go.mod h1:wLlUxC2vVTPTaE3UD51E0BGOAElKrILxhVSDYQLld5o=
|
||||
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
|
||||
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
|
||||
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
|
||||
go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo=
|
||||
go.uber.org/zap v1.18.1/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI=
|
||||
go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8=
|
||||
go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
|
||||
go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI=
|
||||
go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU=
|
||||
go.yaml.in/yaml/v3 v3.0.3 h1:bXOww4E/J3f66rav3pX3m8w6jDE4knZjGOw8b5Y6iNE=
|
||||
go.yaml.in/yaml/v3 v3.0.3/go.mod h1:tBHosrYAkRZjRAOREWbDnBXUf08JOwYq++0QNwQiWzI=
|
||||
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
@@ -1259,8 +1183,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0
|
||||
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
||||
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
|
||||
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
|
||||
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b h1:M2rDM6z3Fhozi9O7NWsxAkg/yqS/lQJ6PmkyIV3YP+o=
|
||||
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8=
|
||||
golang.org/x/exp v0.0.0-20250210185358-939b2ce775ac h1:l5+whBCLH3iH2ZNHYLbAe58bo7yrN4mVcnkHDYz5vvs=
|
||||
golang.org/x/exp v0.0.0-20250210185358-939b2ce775ac/go.mod h1:hH+7mtFmImwwcMvScyxUhjuVHR3HGaDPMn9rMSUUbxo=
|
||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
@@ -1375,8 +1299,8 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ
|
||||
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220513210516-0976fa681c29/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
|
||||
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8=
|
||||
golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@@ -1454,7 +1378,6 @@ golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
@@ -1466,9 +1389,8 @@ golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA=
|
||||
golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
|
||||
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg=
|
||||
@@ -1507,7 +1429,6 @@ golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtn
|
||||
golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
@@ -1548,8 +1469,8 @@ golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.34.0 h1:qIpSLOxeCYGg9TrcJokLBG4KFA6d795g0xkBkiESGlo=
|
||||
golang.org/x/tools v0.34.0/go.mod h1:pAP9OwEaY1CAW3HOmg3hLZC5Z0CCmzjAF2UQMSqNARg=
|
||||
golang.org/x/tools v0.33.0 h1:4qz2S3zmRxbGIhDIAgjxvFutSvH5EfnsYrRBj0UI0bc=
|
||||
golang.org/x/tools v0.33.0/go.mod h1:CIJMaWEY88juyUfo7UbgPqbC8rU2OqfAV1h2Qp0oMYI=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
@@ -1628,7 +1549,6 @@ google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfG
|
||||
google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
|
||||
@@ -1686,10 +1606,10 @@ google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX
|
||||
google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
|
||||
google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4=
|
||||
google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 h1:oWVWY3NzT7KJppx2UKhKmzPq4SRe0LdCijVRwvGeikY=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822/go.mod h1:h3c4v36UTKzUiuaOKQ6gr3S+0hovBtUrXzTG/i3+XEc=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 h1:fc6jSaCT0vBduLYZHYrBBNY4dsWuvgyff9noRNDdBeE=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250519155744-55703ea1f237 h1:Kog3KlB4xevJlAcbbbzPfRG0+X9fdoGM+UBRKVz6Wr0=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250519155744-55703ea1f237/go.mod h1:ezi0AVyMKDWy5xAncvjLWH7UcLBB5n7y2fQ8MzjJcto=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250528174236-200df99c418a h1:v2PbRU4K3llS09c7zodFpNePeamkAwG3mPrAery9VeE=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250528174236-200df99c418a/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=
|
||||
google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
||||
@@ -1722,8 +1642,8 @@ google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ5
|
||||
google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ=
|
||||
google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk=
|
||||
google.golang.org/grpc v1.46.2/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk=
|
||||
google.golang.org/grpc v1.74.2 h1:WoosgB65DlWVC9FqI82dGsZhWFNBSLjQ84bjROOpMu4=
|
||||
google.golang.org/grpc v1.74.2/go.mod h1:CtQ+BGjaAIXHs/5YS3i473GqwBBa1zGQNevxdeBEXrM=
|
||||
google.golang.org/grpc v1.72.2 h1:TdbGzwb82ty4OusHWepvFWGLgIbNo1/SUynEN0ssqv8=
|
||||
google.golang.org/grpc v1.72.2/go.mod h1:wH5Aktxcg25y1I3w7H69nHfXdOG3UiadoBtjh3izSDM=
|
||||
google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw=
|
||||
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
|
||||
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
|
||||
@@ -1781,53 +1701,25 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh
|
||||
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
|
||||
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
||||
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
||||
k8s.io/api v0.33.0 h1:yTgZVn1XEe6opVpP1FylmNrIFWuDqe2H0V8CT5gxfIU=
|
||||
k8s.io/api v0.33.0/go.mod h1:CTO61ECK/KU7haa3qq8sarQ0biLq2ju405IZAd9zsiM=
|
||||
k8s.io/apimachinery v0.33.0 h1:1a6kHrJxb2hs4t8EE5wuR/WxKDwGN1FKH3JvDtA0CIQ=
|
||||
k8s.io/apimachinery v0.33.0/go.mod h1:BHW0YOu7n22fFv/JkYOEfkUYNRN0fj0BlvMFWA7b+SM=
|
||||
k8s.io/client-go v0.33.0 h1:UASR0sAYVUzs2kYuKn/ZakZlcs2bEHaizrrHUZg0G98=
|
||||
k8s.io/client-go v0.33.0/go.mod h1:kGkd+l/gNGg8GYWAPr0xF1rRKvVWvzh9vmZAMXtaKOg=
|
||||
k8s.io/api v0.32.3 h1:Hw7KqxRusq+6QSplE3NYG4MBxZw1BZnq4aP4cJVINls=
|
||||
k8s.io/api v0.32.3/go.mod h1:2wEDTXADtm/HA7CCMD8D8bK4yuBUptzaRhYcYEEYA3k=
|
||||
k8s.io/apimachinery v0.32.3 h1:JmDuDarhDmA/Li7j3aPrwhpNBA94Nvk5zLeOge9HH1U=
|
||||
k8s.io/apimachinery v0.32.3/go.mod h1:GpHVgxoKlTxClKcteaeuF1Ul/lDVb74KpZcxcmLDElE=
|
||||
k8s.io/client-go v0.32.3 h1:RKPVltzopkSgHS7aS98QdscAgtgah/+zmpAogooIqVU=
|
||||
k8s.io/client-go v0.32.3/go.mod h1:3v0+3k4IcT9bXTc4V2rt+d2ZPPG700Xy6Oi0Gdl2PaY=
|
||||
k8s.io/klog/v2 v2.130.1 h1:n9Xl7H1Xvksem4KFG4PYbdQCQxqc/tTUyrgXaOhHSzk=
|
||||
k8s.io/klog/v2 v2.130.1/go.mod h1:3Jpz1GvMt720eyJH1ckRHK1EDfpxISzJ7I9OYgaDtPE=
|
||||
k8s.io/kube-openapi v0.0.0-20250318190949-c8a335a9a2ff h1:/usPimJzUKKu+m+TE36gUyGcf03XZEP0ZIKgKj35LS4=
|
||||
k8s.io/kube-openapi v0.0.0-20250318190949-c8a335a9a2ff/go.mod h1:5jIi+8yX4RIb8wk3XwBo5Pq2ccx4FP10ohkbSKCZoK8=
|
||||
k8s.io/kube-openapi v0.0.0-20241105132330-32ad38e42d3f h1:GA7//TjRY9yWGy1poLzYYJJ4JRdzg3+O6e8I+e+8T5Y=
|
||||
k8s.io/kube-openapi v0.0.0-20241105132330-32ad38e42d3f/go.mod h1:R/HEjbvWI0qdfb8viZUeVZm0X6IZnxAydC7YU42CMw4=
|
||||
k8s.io/utils v0.0.0-20250321185631-1f6e0b77f77e h1:KqK5c/ghOm8xkHYhlodbp6i6+r+ChV2vuAuVRdFbLro=
|
||||
k8s.io/utils v0.0.0-20250321185631-1f6e0b77f77e/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0=
|
||||
modernc.org/cc/v4 v4.26.2 h1:991HMkLjJzYBIfha6ECZdjrIYz2/1ayr+FL8GN+CNzM=
|
||||
modernc.org/cc/v4 v4.26.2/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0=
|
||||
modernc.org/ccgo/v4 v4.28.0 h1:rjznn6WWehKq7dG4JtLRKxb52Ecv8OUGah8+Z/SfpNU=
|
||||
modernc.org/ccgo/v4 v4.28.0/go.mod h1:JygV3+9AV6SmPhDasu4JgquwU81XAKLd3OKTUDNOiKE=
|
||||
modernc.org/fileutil v1.3.8 h1:qtzNm7ED75pd1C7WgAGcK4edm4fvhtBsEiI/0NQ54YM=
|
||||
modernc.org/fileutil v1.3.8/go.mod h1:HxmghZSZVAz/LXcMNwZPA/DRrQZEVP9VX0V4LQGQFOc=
|
||||
modernc.org/gc/v2 v2.6.5 h1:nyqdV8q46KvTpZlsw66kWqwXRHdjIlJOhG6kxiV/9xI=
|
||||
modernc.org/gc/v2 v2.6.5/go.mod h1:YgIahr1ypgfe7chRuJi2gD7DBQiKSLMPgBQe9oIiito=
|
||||
modernc.org/goabi0 v0.2.0 h1:HvEowk7LxcPd0eq6mVOAEMai46V+i7Jrj13t4AzuNks=
|
||||
modernc.org/goabi0 v0.2.0/go.mod h1:CEFRnnJhKvWT1c1JTI3Avm+tgOWbkOu5oPA8eH8LnMI=
|
||||
modernc.org/libc v1.66.3 h1:cfCbjTUcdsKyyZZfEUKfoHcP3S0Wkvz3jgSzByEWVCQ=
|
||||
modernc.org/libc v1.66.3/go.mod h1:XD9zO8kt59cANKvHPXpx7yS2ELPheAey0vjIuZOhOU8=
|
||||
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
|
||||
modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=
|
||||
modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI=
|
||||
modernc.org/memory v1.11.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw=
|
||||
modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=
|
||||
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
|
||||
modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
|
||||
modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
|
||||
modernc.org/sqlite v1.38.2 h1:Aclu7+tgjgcQVShZqim41Bbw9Cho0y/7WzYptXqkEek=
|
||||
modernc.org/sqlite v1.38.2/go.mod h1:cPTJYSlgg3Sfg046yBShXENNtPrWrDX8bsbAQBzgQ5E=
|
||||
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
|
||||
modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=
|
||||
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
|
||||
modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=
|
||||
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
|
||||
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
|
||||
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
|
||||
sigs.k8s.io/json v0.0.0-20241010143419-9aa6b5e7a4b3 h1:/Rv+M11QRah1itp8VhT6HoVx1Ray9eB4DBr+K+/sCJ8=
|
||||
sigs.k8s.io/json v0.0.0-20241010143419-9aa6b5e7a4b3/go.mod h1:18nIHnGi6636UCz6m8i4DhaJ65T6EruyzmoQqI2BVDo=
|
||||
sigs.k8s.io/randfill v1.0.0 h1:JfjMILfT8A6RbawdsK2JXGBR5AQVfd+9TbzrlneTyrU=
|
||||
sigs.k8s.io/randfill v1.0.0/go.mod h1:XeLlZ/jmk4i1HRopwe7/aU3H5n1zNUcX6TM94b3QxOY=
|
||||
sigs.k8s.io/structured-merge-diff/v4 v4.6.0 h1:IUA9nvMmnKWcj5jl84xn+T5MnlZKThmUW1TdblaLVAc=
|
||||
sigs.k8s.io/structured-merge-diff/v4 v4.6.0/go.mod h1:dDy58f92j70zLsuZVuUX5Wp9vtxXpaZnkPGWeqDfCps=
|
||||
sigs.k8s.io/structured-merge-diff/v4 v4.4.3 h1:sCP7Vv3xx/CWIuTPVN38lUPx0uw0lcLfzaiDa8Ja01A=
|
||||
sigs.k8s.io/structured-merge-diff/v4 v4.4.3/go.mod h1:N8f93tFZh9U6vpxwRArLiikrE5/2tiu1w1AGfACIGE4=
|
||||
sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc=
|
||||
sigs.k8s.io/yaml v1.6.0 h1:G8fkbMSAFqgEFgh4b1wmtzDnioxFCUgTZhlbj5P9QYs=
|
||||
sigs.k8s.io/yaml v1.6.0/go.mod h1:796bPqUfzR/0jLAl6XjHl3Ck7MiyVv8dbTdyT3/pMf4=
|
||||
sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E=
|
||||
sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY=
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
package authz
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/factory"
|
||||
|
||||
type AuthZ interface {
|
||||
factory.Service
|
||||
}
|
||||
@@ -1,19 +0,0 @@
|
||||
package authz
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
)
|
||||
|
||||
type Config struct{}
|
||||
|
||||
func NewConfigFactory() factory.ConfigFactory {
|
||||
return factory.NewConfigFactory(factory.MustNewName("authz"), newConfig)
|
||||
}
|
||||
|
||||
func newConfig() factory.Config {
|
||||
return Config{}
|
||||
}
|
||||
|
||||
func (c Config) Validate() error {
|
||||
return nil
|
||||
}
|
||||
@@ -1,76 +0,0 @@
|
||||
package openfgaauthz
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation"
|
||||
pkgopenfgalogger "github.com/openfga/openfga/pkg/logger"
|
||||
"go.uber.org/zap" //nolint:depguard
|
||||
)
|
||||
|
||||
var _ pkgopenfgalogger.Logger = (*openfgaLogger)(nil)
|
||||
|
||||
type openfgaLogger struct {
|
||||
slog *slog.Logger
|
||||
convertor instrumentation.ZapToSlogConverter
|
||||
}
|
||||
|
||||
func NewLogger(logger *slog.Logger) *openfgaLogger {
|
||||
return &openfgaLogger{slog: logger, convertor: instrumentation.NewZapToSlogConverter()}
|
||||
}
|
||||
|
||||
func (logger *openfgaLogger) With(fields ...zap.Field) pkgopenfgalogger.Logger {
|
||||
newLogger := logger.slog.With(logger.convertor.FieldsToAttributes(fields)...)
|
||||
return &openfgaLogger{slog: newLogger, convertor: logger.convertor}
|
||||
}
|
||||
|
||||
func (logger *openfgaLogger) Info(message string, fields ...zap.Field) {
|
||||
logger.slog.Info(message, logger.convertor.FieldsToAttributes(fields)...) //nolint:sloglint
|
||||
}
|
||||
|
||||
func (logger *openfgaLogger) InfoWithContext(ctx context.Context, message string, fields ...zap.Field) {
|
||||
logger.slog.InfoContext(ctx, message, logger.convertor.FieldsToAttributes(fields)...) //nolint:sloglint
|
||||
}
|
||||
|
||||
func (logger *openfgaLogger) Debug(message string, fields ...zap.Field) {
|
||||
logger.slog.Debug(message, logger.convertor.FieldsToAttributes(fields)...) //nolint:sloglint
|
||||
}
|
||||
|
||||
func (logger *openfgaLogger) DebugWithContext(ctx context.Context, message string, fields ...zap.Field) {
|
||||
logger.slog.DebugContext(ctx, message, logger.convertor.FieldsToAttributes(fields)...) //nolint:sloglint
|
||||
}
|
||||
|
||||
func (logger *openfgaLogger) Warn(message string, fields ...zap.Field) {
|
||||
logger.slog.Warn(message, logger.convertor.FieldsToAttributes(fields)...) //nolint:sloglint
|
||||
}
|
||||
|
||||
func (logger *openfgaLogger) WarnWithContext(ctx context.Context, message string, fields ...zap.Field) {
|
||||
logger.slog.WarnContext(ctx, message, logger.convertor.FieldsToAttributes(fields)...) //nolint:sloglint
|
||||
}
|
||||
|
||||
func (logger *openfgaLogger) Error(message string, fields ...zap.Field) {
|
||||
logger.slog.Error(message, logger.convertor.FieldsToAttributes(fields)...) //nolint:sloglint
|
||||
}
|
||||
|
||||
func (logger *openfgaLogger) ErrorWithContext(ctx context.Context, message string, fields ...zap.Field) {
|
||||
logger.slog.ErrorContext(ctx, message, logger.convertor.FieldsToAttributes(fields)...) //nolint:sloglint
|
||||
}
|
||||
|
||||
func (logger *openfgaLogger) Fatal(message string, fields ...zap.Field) {
|
||||
logger.slog.Error(message, logger.convertor.FieldsToAttributes(fields)...) //nolint:sloglint
|
||||
}
|
||||
|
||||
func (logger *openfgaLogger) FatalWithContext(ctx context.Context, message string, fields ...zap.Field) {
|
||||
logger.slog.ErrorContext(ctx, message, logger.convertor.FieldsToAttributes(fields)...) //nolint:sloglint
|
||||
}
|
||||
|
||||
func (logger *openfgaLogger) Panic(message string, fields ...zap.Field) {
|
||||
logger.slog.Error(message, logger.convertor.FieldsToAttributes(fields)...) //nolint:sloglint
|
||||
panic(message)
|
||||
}
|
||||
|
||||
func (logger *openfgaLogger) PanicWithContext(ctx context.Context, message string, fields ...zap.Field) {
|
||||
logger.slog.ErrorContext(ctx, message, logger.convertor.FieldsToAttributes(fields)...) //nolint:sloglint
|
||||
panic(message)
|
||||
}
|
||||
@@ -1,160 +0,0 @@
|
||||
package openfgaauthz
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
authz "github.com/SigNoz/signoz/pkg/authz"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
openfgav1 "github.com/openfga/api/proto/openfga/v1"
|
||||
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
|
||||
openfgapkgserver "github.com/openfga/openfga/pkg/server"
|
||||
"google.golang.org/protobuf/encoding/protojson"
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
config authz.Config
|
||||
settings factory.ScopedProviderSettings
|
||||
openfgaSchema []openfgapkgtransformer.ModuleFile
|
||||
openfgaServer *openfgapkgserver.Server
|
||||
stopChan chan struct{}
|
||||
}
|
||||
|
||||
func NewProviderFactory(sqlstoreConfig sqlstore.Config, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("openfga"), func(ctx context.Context, ps factory.ProviderSettings, config authz.Config) (authz.AuthZ, error) {
|
||||
return newOpenfgaProvider(ctx, ps, config, sqlstoreConfig, openfgaSchema)
|
||||
})
|
||||
}
|
||||
|
||||
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstoreConfig sqlstore.Config, openfgaSchema []openfgapkgtransformer.ModuleFile) (authz.AuthZ, error) {
|
||||
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/authz/openfgaauthz")
|
||||
|
||||
// setup connections and run the migrations
|
||||
sqlstore, err := NewSQLStore(storeConfig{sqlstoreConfig: sqlstoreConfig})
|
||||
if err != nil {
|
||||
scopedProviderSettings.Logger().DebugContext(ctx, "failed to initialize sqlstore for authz")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// setup the openfga server
|
||||
opts := []openfgapkgserver.OpenFGAServiceV1Option{
|
||||
openfgapkgserver.WithDatastore(sqlstore),
|
||||
openfgapkgserver.WithLogger(NewLogger(scopedProviderSettings.Logger())),
|
||||
}
|
||||
openfgaServer, err := openfgapkgserver.NewServerWithOpts(opts...)
|
||||
if err != nil {
|
||||
scopedProviderSettings.Logger().DebugContext(ctx, "failed to create authz server")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &provider{
|
||||
config: config,
|
||||
settings: scopedProviderSettings,
|
||||
openfgaServer: openfgaServer,
|
||||
openfgaSchema: openfgaSchema,
|
||||
stopChan: make(chan struct{}),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Start(ctx context.Context) error {
|
||||
storeId, err := provider.getOrCreateStore(ctx, "signoz")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = provider.getOrCreateModel(ctx, storeId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
<-provider.stopChan
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) Stop(ctx context.Context) error {
|
||||
provider.openfgaServer.Close()
|
||||
close(provider.stopChan)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) getOrCreateStore(ctx context.Context, name string) (string, error) {
|
||||
stores, err := provider.openfgaServer.ListStores(ctx, &openfgav1.ListStoresRequest{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
for _, store := range stores.GetStores() {
|
||||
if store.GetName() == name {
|
||||
return store.Id, nil
|
||||
}
|
||||
}
|
||||
|
||||
store, err := provider.openfgaServer.CreateStore(ctx, &openfgav1.CreateStoreRequest{Name: name})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return store.Id, nil
|
||||
}
|
||||
|
||||
func (provider *provider) getOrCreateModel(ctx context.Context, storeID string) (string, error) {
|
||||
schema, err := openfgapkgtransformer.TransformModuleFilesToModel(provider.openfgaSchema, "1.1")
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
authorisationModels, err := provider.openfgaServer.ReadAuthorizationModels(ctx, &openfgav1.ReadAuthorizationModelsRequest{StoreId: storeID})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
for _, authModel := range authorisationModels.GetAuthorizationModels() {
|
||||
equal, err := provider.isModelEqual(schema, authModel)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if equal {
|
||||
return authModel.Id, nil
|
||||
}
|
||||
}
|
||||
|
||||
authorizationModel, err := provider.openfgaServer.WriteAuthorizationModel(ctx, &openfgav1.WriteAuthorizationModelRequest{
|
||||
StoreId: storeID,
|
||||
TypeDefinitions: schema.TypeDefinitions,
|
||||
SchemaVersion: schema.SchemaVersion,
|
||||
Conditions: schema.Conditions,
|
||||
})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return authorizationModel.AuthorizationModelId, nil
|
||||
}
|
||||
|
||||
// the language model doesn't have any equality check
|
||||
// https://github.com/openfga/language/blob/main/pkg/go/transformer/module-to-model_test.go#L38
|
||||
func (provider *provider) isModelEqual(expected *openfgav1.AuthorizationModel, actual *openfgav1.AuthorizationModel) (bool, error) {
|
||||
// we need to initialize a new model since the model extracted from schema doesn't have id
|
||||
expectedAuthModel := openfgav1.AuthorizationModel{
|
||||
SchemaVersion: expected.SchemaVersion,
|
||||
TypeDefinitions: expected.TypeDefinitions,
|
||||
Conditions: expected.Conditions,
|
||||
}
|
||||
expectedAuthModelBytes, err := protojson.Marshal(&expectedAuthModel)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
actualAuthModel := openfgav1.AuthorizationModel{
|
||||
SchemaVersion: actual.SchemaVersion,
|
||||
TypeDefinitions: actual.TypeDefinitions,
|
||||
Conditions: actual.Conditions,
|
||||
}
|
||||
actualAuthModelBytes, err := protojson.Marshal(&actualAuthModel)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return string(expectedAuthModelBytes) == string(actualAuthModelBytes), nil
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
package openfgaauthz
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/openfga/openfga/pkg/storage"
|
||||
"github.com/openfga/openfga/pkg/storage/migrate"
|
||||
"github.com/openfga/openfga/pkg/storage/postgres"
|
||||
"github.com/openfga/openfga/pkg/storage/sqlcommon"
|
||||
"github.com/openfga/openfga/pkg/storage/sqlite"
|
||||
)
|
||||
|
||||
type storeConfig struct {
|
||||
sqlstoreConfig sqlstore.Config
|
||||
}
|
||||
|
||||
func NewSQLStore(cfg storeConfig) (storage.OpenFGADatastore, error) {
|
||||
switch cfg.sqlstoreConfig.Provider {
|
||||
case "sqlite":
|
||||
err := migrate.RunMigrations(migrate.MigrationConfig{Engine: cfg.sqlstoreConfig.Provider, URI: "file:" + cfg.sqlstoreConfig.Sqlite.Path + "?_foreign_keys=true"})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return sqlite.New("file:"+cfg.sqlstoreConfig.Sqlite.Path+"?_foreign_keys=true", &sqlcommon.Config{
|
||||
MaxTuplesPerWriteField: 100,
|
||||
MaxTypesPerModelField: 100,
|
||||
})
|
||||
case "postgres":
|
||||
err := migrate.RunMigrations(migrate.MigrationConfig{Engine: cfg.sqlstoreConfig.Provider, URI: cfg.sqlstoreConfig.Postgres.DSN})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return postgres.New(cfg.sqlstoreConfig.Postgres.DSN, &sqlcommon.Config{
|
||||
MaxTuplesPerWriteField: 100,
|
||||
MaxTypesPerModelField: 100,
|
||||
})
|
||||
default:
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid store type: %s", cfg.sqlstoreConfig.Provider)
|
||||
}
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
module base
|
||||
|
||||
type user
|
||||
|
||||
type role
|
||||
relations
|
||||
define assignee: [user]
|
||||
|
||||
type organisation
|
||||
relations
|
||||
define admin: [role#assignee]
|
||||
define editor: [role#assignee] or admin
|
||||
define viewer: [role#assignee] or editor
|
||||
@@ -1,29 +0,0 @@
|
||||
package openfgaschema
|
||||
|
||||
import (
|
||||
"context"
|
||||
_ "embed"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
|
||||
)
|
||||
|
||||
var (
|
||||
//go:embed base.fga
|
||||
baseDSL string
|
||||
)
|
||||
|
||||
type schema struct{}
|
||||
|
||||
func NewSchema() authz.Schema {
|
||||
return &schema{}
|
||||
}
|
||||
|
||||
func (schema *schema) Get(ctx context.Context) []openfgapkgtransformer.ModuleFile {
|
||||
return []openfgapkgtransformer.ModuleFile{
|
||||
{
|
||||
Name: "base.fga",
|
||||
Contents: baseDSL,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
package authz
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
|
||||
)
|
||||
|
||||
type Schema interface {
|
||||
Get(context.Context) []openfgapkgtransformer.ModuleFile
|
||||
}
|
||||
@@ -79,16 +79,6 @@ func Wrapf(cause error, t typ, code Code, format string, args ...interface{}) *b
|
||||
}
|
||||
}
|
||||
|
||||
// Wrap returns a new error by wrapping another error with base.
|
||||
func Wrap(cause error, t typ, code Code, message string) *base {
|
||||
return &base{
|
||||
t: t,
|
||||
c: code,
|
||||
m: message,
|
||||
e: cause,
|
||||
}
|
||||
}
|
||||
|
||||
// WithAdditional wraps an existing base error with a new formatted message.
|
||||
// It is used when the original error already contains type and code.
|
||||
func WithAdditional(cause error, format string, args ...interface{}) *base {
|
||||
|
||||
@@ -98,10 +98,8 @@ func (a *APIKey) Wrap(next http.Handler) http.Handler {
|
||||
return
|
||||
}
|
||||
|
||||
ctx = ctxtypes.SetAuthType(ctx, ctxtypes.AuthTypeAPIKey)
|
||||
|
||||
comment := ctxtypes.CommentFromContext(ctx)
|
||||
comment.Set("auth_type", ctxtypes.AuthTypeAPIKey.StringValue())
|
||||
comment.Set("auth_type", "api_key")
|
||||
comment.Set("user_id", claims.UserID)
|
||||
comment.Set("org_id", claims.OrgID)
|
||||
|
||||
|
||||
@@ -51,10 +51,8 @@ func (a *Auth) Wrap(next http.Handler) http.Handler {
|
||||
return
|
||||
}
|
||||
|
||||
ctx = ctxtypes.SetAuthType(ctx, ctxtypes.AuthTypeJWT)
|
||||
|
||||
comment := ctxtypes.CommentFromContext(ctx)
|
||||
comment.Set("auth_type", ctxtypes.AuthTypeJWT.StringValue())
|
||||
comment.Set("auth_type", "jwt")
|
||||
comment.Set("user_id", claims.UserID)
|
||||
comment.Set("org_id", claims.OrgID)
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ import (
|
||||
sdkmetric "go.opentelemetry.io/otel/metric"
|
||||
sdkresource "go.opentelemetry.io/otel/sdk/resource"
|
||||
sdktrace "go.opentelemetry.io/otel/trace"
|
||||
"go.uber.org/zap" //nolint:depguard
|
||||
)
|
||||
|
||||
// Instrumentation provides the core components for application instrumentation.
|
||||
@@ -25,11 +24,6 @@ type Instrumentation interface {
|
||||
ToProviderSettings() factory.ProviderSettings
|
||||
}
|
||||
|
||||
// conversion functions required for using zap interface with underlying slog provider
|
||||
type ZapToSlogConverter interface {
|
||||
FieldsToAttributes(fields []zap.Field) []any
|
||||
}
|
||||
|
||||
// Merges the input attributes with the resource attributes.
|
||||
func mergeAttributes(input map[string]any, resource *sdkresource.Resource) map[string]any {
|
||||
output := make(map[string]any)
|
||||
|
||||
@@ -5,11 +5,8 @@ import (
|
||||
"os"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/loghandler"
|
||||
"go.uber.org/zap" //nolint:depguard
|
||||
)
|
||||
|
||||
type zapToSlogConverter struct{}
|
||||
|
||||
func NewLogger(config Config, wrappers ...loghandler.Wrapper) *slog.Logger {
|
||||
logger := slog.New(
|
||||
loghandler.New(
|
||||
@@ -36,26 +33,3 @@ func NewLogger(config Config, wrappers ...loghandler.Wrapper) *slog.Logger {
|
||||
|
||||
return logger
|
||||
}
|
||||
|
||||
func NewZapToSlogConverter() ZapToSlogConverter {
|
||||
return &zapToSlogConverter{}
|
||||
}
|
||||
|
||||
func (*zapToSlogConverter) FieldsToAttributes(fields []zap.Field) []any {
|
||||
// for each KV pair
|
||||
args := make([]any, 0, len(fields)*2)
|
||||
for _, f := range fields {
|
||||
args = append(args, f.Key)
|
||||
|
||||
switch {
|
||||
case f.Interface != nil:
|
||||
args = append(args, f.Interface)
|
||||
case f.String != "":
|
||||
args = append(args, f.String)
|
||||
default:
|
||||
args = append(args, f.Integer)
|
||||
}
|
||||
}
|
||||
|
||||
return args
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/statsreporter"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
@@ -17,9 +16,9 @@ type Module interface {
|
||||
|
||||
List(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error)
|
||||
|
||||
Update(ctx context.Context, orgID valuer.UUID, id valuer.UUID, updatedBy string, data dashboardtypes.UpdatableDashboard, diff int) (*dashboardtypes.Dashboard, error)
|
||||
Update(ctx context.Context, orgID valuer.UUID, id valuer.UUID, updatedBy string, data dashboardtypes.UpdatableDashboard) (*dashboardtypes.Dashboard, error)
|
||||
|
||||
LockUnlock(ctx context.Context, orgID valuer.UUID, id valuer.UUID, updatedBy string, role types.Role, lock bool) error
|
||||
LockUnlock(ctx context.Context, orgID valuer.UUID, id valuer.UUID, updatedBy string, lock bool) error
|
||||
|
||||
Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error
|
||||
|
||||
|
||||
@@ -13,7 +13,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/transition"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/gorilla/mux"
|
||||
@@ -107,13 +106,7 @@ func (handler *handler) Update(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
diff := 0
|
||||
// Allow multiple deletions for API key requests; enforce for others
|
||||
if authType, ok := ctxtypes.AuthTypeFromContext(ctx); ok && authType == ctxtypes.AuthTypeJWT {
|
||||
diff = 1
|
||||
}
|
||||
|
||||
dashboard, err := handler.module.Update(ctx, orgID, dashboardID, claims.Email, req, diff)
|
||||
dashboard, err := handler.module.Update(ctx, orgID, dashboardID, claims.Email, req)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
@@ -156,7 +149,7 @@ func (handler *handler) LockUnlock(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
err = handler.module.LockUnlock(ctx, orgID, dashboardID, claims.Email, claims.Role, *req.Locked)
|
||||
err = handler.module.LockUnlock(ctx, orgID, dashboardID, claims.Email, *req.Locked)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
|
||||
@@ -9,7 +9,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
@@ -76,13 +75,13 @@ func (module *module) List(ctx context.Context, orgID valuer.UUID) ([]*dashboard
|
||||
return dashboards, nil
|
||||
}
|
||||
|
||||
func (module *module) Update(ctx context.Context, orgID valuer.UUID, id valuer.UUID, updatedBy string, updatableDashboard dashboardtypes.UpdatableDashboard, diff int) (*dashboardtypes.Dashboard, error) {
|
||||
func (module *module) Update(ctx context.Context, orgID valuer.UUID, id valuer.UUID, updatedBy string, updatableDashboard dashboardtypes.UpdatableDashboard) (*dashboardtypes.Dashboard, error) {
|
||||
dashboard, err := module.Get(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = dashboard.Update(ctx, updatableDashboard, updatedBy, diff)
|
||||
err = dashboard.Update(updatableDashboard, updatedBy)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -100,13 +99,13 @@ func (module *module) Update(ctx context.Context, orgID valuer.UUID, id valuer.U
|
||||
return dashboard, nil
|
||||
}
|
||||
|
||||
func (module *module) LockUnlock(ctx context.Context, orgID valuer.UUID, id valuer.UUID, updatedBy string, role types.Role, lock bool) error {
|
||||
func (module *module) LockUnlock(ctx context.Context, orgID valuer.UUID, id valuer.UUID, updatedBy string, lock bool) error {
|
||||
dashboard, err := module.Get(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = dashboard.LockUnlock(lock, role, updatedBy)
|
||||
err = dashboard.LockUnlock(ctx, lock, updatedBy)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
503
pkg/modules/thirdpartyapi/translator.go
Normal file
503
pkg/modules/thirdpartyapi/translator.go
Normal file
@@ -0,0 +1,503 @@
|
||||
package thirdpartyapi
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/SigNoz/signoz/pkg/types/thirdpartyapitypes"
|
||||
"net"
|
||||
"regexp"
|
||||
"time"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
const (
|
||||
urlPathKeyLegacy = "http.url"
|
||||
serverAddressKeyLegacy = "net.peer.name"
|
||||
|
||||
urlPathKey = "url.full"
|
||||
serverAddressKey = "server.address"
|
||||
)
|
||||
|
||||
var defaultStepInterval = 60 * time.Second
|
||||
|
||||
type SemconvFieldMapping struct {
|
||||
LegacyField string
|
||||
CurrentField string
|
||||
FieldType telemetrytypes.FieldDataType
|
||||
Context telemetrytypes.FieldContext
|
||||
}
|
||||
|
||||
var dualSemconvGroupByKeys = map[string][]qbtypes.GroupByKey{
|
||||
"server": {
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: serverAddressKey,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
},
|
||||
},
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: serverAddressKeyLegacy,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
},
|
||||
},
|
||||
},
|
||||
"url": {
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: urlPathKey,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
},
|
||||
},
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: urlPathKeyLegacy,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func MergeSemconvColumns(result *qbtypes.QueryRangeResponse) *qbtypes.QueryRangeResponse {
|
||||
if result == nil || result.Data.Results == nil {
|
||||
return result
|
||||
}
|
||||
|
||||
for _, res := range result.Data.Results {
|
||||
scalarData, ok := res.(*qbtypes.ScalarData)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
serverAddressKeyIdx := -1
|
||||
serverAddressKeyLegacyIdx := -1
|
||||
|
||||
for i, col := range scalarData.Columns {
|
||||
if col.Name == serverAddressKey {
|
||||
serverAddressKeyIdx = i
|
||||
} else if col.Name == serverAddressKeyLegacy {
|
||||
serverAddressKeyLegacyIdx = i
|
||||
}
|
||||
}
|
||||
|
||||
if serverAddressKeyIdx == -1 || serverAddressKeyLegacyIdx == -1 {
|
||||
continue
|
||||
}
|
||||
|
||||
var newRows [][]any
|
||||
for _, row := range scalarData.Data {
|
||||
if len(row) <= serverAddressKeyIdx || len(row) <= serverAddressKeyLegacyIdx {
|
||||
continue
|
||||
}
|
||||
|
||||
var serverName any
|
||||
if isValidValue(row[serverAddressKeyIdx]) {
|
||||
serverName = row[serverAddressKeyIdx]
|
||||
} else if isValidValue(row[serverAddressKeyLegacyIdx]) {
|
||||
serverName = row[serverAddressKeyLegacyIdx]
|
||||
}
|
||||
|
||||
if serverName != nil {
|
||||
newRow := make([]any, len(row)-1)
|
||||
newRow[0] = serverName
|
||||
|
||||
targetIdx := 1
|
||||
for i, val := range row {
|
||||
if i != serverAddressKeyLegacyIdx && i != serverAddressKeyIdx {
|
||||
if targetIdx < len(newRow) {
|
||||
newRow[targetIdx] = val
|
||||
targetIdx++
|
||||
}
|
||||
}
|
||||
}
|
||||
newRows = append(newRows, newRow)
|
||||
}
|
||||
}
|
||||
|
||||
newColumns := make([]*qbtypes.ColumnDescriptor, len(scalarData.Columns)-1)
|
||||
targetIdx := 0
|
||||
for i, col := range scalarData.Columns {
|
||||
if i == serverAddressKeyIdx {
|
||||
newCol := &qbtypes.ColumnDescriptor{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: serverAddressKeyLegacy,
|
||||
FieldDataType: col.FieldDataType,
|
||||
FieldContext: col.FieldContext,
|
||||
Signal: col.Signal,
|
||||
},
|
||||
QueryName: col.QueryName,
|
||||
AggregationIndex: col.AggregationIndex,
|
||||
Meta: col.Meta,
|
||||
Type: col.Type,
|
||||
}
|
||||
newColumns[targetIdx] = newCol
|
||||
targetIdx++
|
||||
} else if i != serverAddressKeyLegacyIdx {
|
||||
newColumns[targetIdx] = col
|
||||
targetIdx++
|
||||
}
|
||||
}
|
||||
|
||||
scalarData.Columns = newColumns
|
||||
scalarData.Data = newRows
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func isValidValue(val any) bool {
|
||||
if val == nil {
|
||||
return false
|
||||
}
|
||||
if str, ok := val.(string); ok {
|
||||
return str != "" && str != "n/a"
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func FilterResponse(results []*qbtypes.QueryRangeResponse) []*qbtypes.QueryRangeResponse {
|
||||
filteredResults := make([]*qbtypes.QueryRangeResponse, 0, len(results))
|
||||
|
||||
for _, res := range results {
|
||||
if res.Data.Results == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
filteredData := make([]any, 0, len(res.Data.Results))
|
||||
for _, result := range res.Data.Results {
|
||||
if result == nil {
|
||||
filteredData = append(filteredData, result)
|
||||
continue
|
||||
}
|
||||
|
||||
switch resultData := result.(type) {
|
||||
case *qbtypes.TimeSeriesData:
|
||||
if resultData.Aggregations != nil {
|
||||
for _, agg := range resultData.Aggregations {
|
||||
filteredSeries := make([]*qbtypes.TimeSeries, 0, len(agg.Series))
|
||||
for _, series := range agg.Series {
|
||||
if shouldIncludeSeries(series) {
|
||||
filteredSeries = append(filteredSeries, series)
|
||||
}
|
||||
}
|
||||
agg.Series = filteredSeries
|
||||
}
|
||||
}
|
||||
case *qbtypes.RawData:
|
||||
filteredRows := make([]*qbtypes.RawRow, 0, len(resultData.Rows))
|
||||
for _, row := range resultData.Rows {
|
||||
if shouldIncludeRow(row) {
|
||||
filteredRows = append(filteredRows, row)
|
||||
}
|
||||
}
|
||||
resultData.Rows = filteredRows
|
||||
}
|
||||
|
||||
filteredData = append(filteredData, result)
|
||||
}
|
||||
|
||||
res.Data.Results = filteredData
|
||||
filteredResults = append(filteredResults, res)
|
||||
}
|
||||
|
||||
return filteredResults
|
||||
}
|
||||
|
||||
func shouldIncludeSeries(series *qbtypes.TimeSeries) bool {
|
||||
for _, label := range series.Labels {
|
||||
if label.Key.Name == serverAddressKeyLegacy || label.Key.Name == serverAddressKey {
|
||||
if strVal, ok := label.Value.(string); ok {
|
||||
if net.ParseIP(strVal) != nil {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func shouldIncludeRow(row *qbtypes.RawRow) bool {
|
||||
if row.Data != nil {
|
||||
for _, key := range []string{serverAddressKeyLegacy, serverAddressKey} {
|
||||
if domainVal, ok := row.Data[key]; ok {
|
||||
if domainStr, ok := domainVal.(string); ok {
|
||||
if net.ParseIP(domainStr) != nil {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func containsKindStringOverride(expression string) bool {
|
||||
kindStringPattern := regexp.MustCompile(`kind_string\s*[!=<>]+`)
|
||||
return kindStringPattern.MatchString(expression)
|
||||
}
|
||||
|
||||
func mergeGroupBy(base, additional []qbtypes.GroupByKey) []qbtypes.GroupByKey {
|
||||
return append(base, additional...)
|
||||
}
|
||||
|
||||
func BuildDomainList(req *thirdpartyapitypes.ThirdPartyApiRequest) (*qbtypes.QueryRangeRequest, error) {
|
||||
if err := req.Validate(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
queries := []qbtypes.QueryEnvelope{
|
||||
buildEndpointsQuery(req),
|
||||
buildLastSeenQuery(req),
|
||||
buildRpsQuery(req),
|
||||
buildErrorQuery(req),
|
||||
buildTotalSpanQuery(req),
|
||||
buildP99Query(req),
|
||||
buildErrorRateFormula(),
|
||||
}
|
||||
|
||||
return &qbtypes.QueryRangeRequest{
|
||||
SchemaVersion: "v5",
|
||||
Start: req.Start,
|
||||
End: req.End,
|
||||
RequestType: qbtypes.RequestTypeScalar,
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: queries,
|
||||
},
|
||||
FormatOptions: &qbtypes.FormatOptions{
|
||||
FormatTableResultForUI: true,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func BuildDomainInfo(req *thirdpartyapitypes.ThirdPartyApiRequest) (*qbtypes.QueryRangeRequest, error) {
|
||||
if err := req.Validate(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
queries := []qbtypes.QueryEnvelope{
|
||||
buildEndpointsInfoQuery(req),
|
||||
buildP99InfoQuery(req),
|
||||
buildErrorRateInfoQuery(req),
|
||||
buildLastSeenInfoQuery(req),
|
||||
}
|
||||
|
||||
return &qbtypes.QueryRangeRequest{
|
||||
SchemaVersion: "v5",
|
||||
Start: req.Start,
|
||||
End: req.End,
|
||||
RequestType: qbtypes.RequestTypeScalar,
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: queries,
|
||||
},
|
||||
FormatOptions: &qbtypes.FormatOptions{
|
||||
FormatTableResultForUI: true,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func buildEndpointsQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "endpoints",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "count_distinct(http.url)"},
|
||||
},
|
||||
Filter: buildBaseFilter(req.Filter),
|
||||
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildLastSeenQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "lastseen",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "max(timestamp)"},
|
||||
},
|
||||
Filter: buildBaseFilter(req.Filter),
|
||||
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildRpsQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "rps",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "rate()"},
|
||||
},
|
||||
Filter: buildBaseFilter(req.Filter),
|
||||
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildErrorQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "error",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "count()"},
|
||||
},
|
||||
Filter: buildErrorFilter(req.Filter),
|
||||
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildTotalSpanQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "total_span",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "count()"},
|
||||
},
|
||||
Filter: buildBaseFilter(req.Filter),
|
||||
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildP99Query(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "p99",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "p99(duration_nano)"},
|
||||
},
|
||||
Filter: buildBaseFilter(req.Filter),
|
||||
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["server"], req.GroupBy),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildErrorRateFormula() qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeFormula,
|
||||
Spec: qbtypes.QueryBuilderFormula{
|
||||
Name: "error_rate",
|
||||
Expression: "(error/total_span)*100",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildEndpointsInfoQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "endpoints",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "rate(http.url)"},
|
||||
},
|
||||
Filter: buildBaseFilter(req.Filter),
|
||||
GroupBy: mergeGroupBy(dualSemconvGroupByKeys["url"], req.GroupBy),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildP99InfoQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "p99",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "p99(duration_nano)"},
|
||||
},
|
||||
Filter: buildBaseFilter(req.Filter),
|
||||
GroupBy: req.GroupBy,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildErrorRateInfoQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "error_rate",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "rate()"},
|
||||
},
|
||||
Filter: buildBaseFilter(req.Filter),
|
||||
GroupBy: req.GroupBy,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildLastSeenInfoQuery(req *thirdpartyapitypes.ThirdPartyApiRequest) qbtypes.QueryEnvelope {
|
||||
return qbtypes.QueryEnvelope{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "lastseen",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
StepInterval: qbtypes.Step{Duration: defaultStepInterval},
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{Expression: "max(timestamp)"},
|
||||
},
|
||||
Filter: buildBaseFilter(req.Filter),
|
||||
GroupBy: req.GroupBy,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func buildBaseFilter(additionalFilter *qbtypes.Filter) *qbtypes.Filter {
|
||||
baseExpression := fmt.Sprintf("(%s EXISTS OR %s EXISTS) AND kind_string = 'Client'",
|
||||
urlPathKeyLegacy, urlPathKey)
|
||||
|
||||
if additionalFilter != nil && additionalFilter.Expression != "" {
|
||||
if containsKindStringOverride(additionalFilter.Expression) {
|
||||
return &qbtypes.Filter{Expression: baseExpression}
|
||||
}
|
||||
baseExpression = fmt.Sprintf("(%s) AND (%s)", baseExpression, additionalFilter.Expression)
|
||||
}
|
||||
|
||||
return &qbtypes.Filter{Expression: baseExpression}
|
||||
}
|
||||
|
||||
func buildErrorFilter(additionalFilter *qbtypes.Filter) *qbtypes.Filter {
|
||||
errorExpression := fmt.Sprintf("has_error = true AND (%s EXISTS OR %s EXISTS) AND kind_string = 'Client'",
|
||||
urlPathKeyLegacy, urlPathKey)
|
||||
|
||||
if additionalFilter != nil && additionalFilter.Expression != "" {
|
||||
if containsKindStringOverride(additionalFilter.Expression) {
|
||||
return &qbtypes.Filter{Expression: errorExpression}
|
||||
}
|
||||
errorExpression = fmt.Sprintf("(%s) AND (%s)", errorExpression, additionalFilter.Expression)
|
||||
}
|
||||
|
||||
return &qbtypes.Filter{Expression: errorExpression}
|
||||
}
|
||||
233
pkg/modules/thirdpartyapi/translator_test.go
Normal file
233
pkg/modules/thirdpartyapi/translator_test.go
Normal file
@@ -0,0 +1,233 @@
|
||||
package thirdpartyapi
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/types/thirdpartyapitypes"
|
||||
"testing"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestFilterResponse(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input []*qbtypes.QueryRangeResponse
|
||||
expected []*qbtypes.QueryRangeResponse
|
||||
}{
|
||||
{
|
||||
name: "should filter out IP addresses from series labels",
|
||||
input: []*qbtypes.QueryRangeResponse{
|
||||
{
|
||||
Data: qbtypes.QueryData{
|
||||
Results: []any{
|
||||
&qbtypes.TimeSeriesData{
|
||||
Aggregations: []*qbtypes.AggregationBucket{
|
||||
{
|
||||
Series: []*qbtypes.TimeSeries{
|
||||
{
|
||||
Labels: []*qbtypes.Label{
|
||||
{
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: "net.peer.name"},
|
||||
Value: "192.168.1.1",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Labels: []*qbtypes.Label{
|
||||
{
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: "net.peer.name"},
|
||||
Value: "example.com",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []*qbtypes.QueryRangeResponse{
|
||||
{
|
||||
Data: qbtypes.QueryData{
|
||||
Results: []any{
|
||||
&qbtypes.TimeSeriesData{
|
||||
Aggregations: []*qbtypes.AggregationBucket{
|
||||
{
|
||||
Series: []*qbtypes.TimeSeries{
|
||||
{
|
||||
Labels: []*qbtypes.Label{
|
||||
{
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: "net.peer.name"},
|
||||
Value: "example.com",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "should filter out IP addresses from raw data",
|
||||
input: []*qbtypes.QueryRangeResponse{
|
||||
{
|
||||
Data: qbtypes.QueryData{
|
||||
Results: []any{
|
||||
&qbtypes.RawData{
|
||||
Rows: []*qbtypes.RawRow{
|
||||
{
|
||||
Data: map[string]any{
|
||||
"net.peer.name": "192.168.1.1",
|
||||
},
|
||||
},
|
||||
{
|
||||
Data: map[string]any{
|
||||
"net.peer.name": "example.com",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []*qbtypes.QueryRangeResponse{
|
||||
{
|
||||
Data: qbtypes.QueryData{
|
||||
Results: []any{
|
||||
&qbtypes.RawData{
|
||||
Rows: []*qbtypes.RawRow{
|
||||
{
|
||||
Data: map[string]any{
|
||||
"net.peer.name": "example.com",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := FilterResponse(tt.input)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildDomainList(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input *thirdpartyapitypes.ThirdPartyApiRequest
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "basic domain list query",
|
||||
input: &thirdpartyapitypes.ThirdPartyApiRequest{
|
||||
Start: 1000,
|
||||
End: 2000,
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with filters and group by",
|
||||
input: &thirdpartyapitypes.ThirdPartyApiRequest{
|
||||
Start: 1000,
|
||||
End: 2000,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "test = 'value'",
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, err := BuildDomainList(tt.input)
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
return
|
||||
}
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, result)
|
||||
assert.Equal(t, tt.input.Start, result.Start)
|
||||
assert.Equal(t, tt.input.End, result.End)
|
||||
assert.NotNil(t, result.CompositeQuery)
|
||||
assert.Len(t, result.CompositeQuery.Queries, 7) // endpoints, lastseen, rps, error, total_span, p99, error_rate
|
||||
assert.Equal(t, "v5", result.SchemaVersion)
|
||||
assert.Equal(t, qbtypes.RequestTypeScalar, result.RequestType)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildDomainInfo(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input *thirdpartyapitypes.ThirdPartyApiRequest
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "basic domain info query",
|
||||
input: &thirdpartyapitypes.ThirdPartyApiRequest{
|
||||
Start: 1000,
|
||||
End: 2000,
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with filters and group by",
|
||||
input: &thirdpartyapitypes.ThirdPartyApiRequest{
|
||||
Start: 1000,
|
||||
End: 2000,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "test = 'value'",
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, err := BuildDomainInfo(tt.input)
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
return
|
||||
}
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, result)
|
||||
assert.Equal(t, tt.input.Start, result.Start)
|
||||
assert.Equal(t, tt.input.End, result.End)
|
||||
assert.NotNil(t, result.CompositeQuery)
|
||||
assert.Len(t, result.CompositeQuery.Queries, 4) // endpoints, p99, error_rate, lastseen
|
||||
assert.Equal(t, "v5", result.SchemaVersion)
|
||||
assert.Equal(t, qbtypes.RequestTypeScalar, result.RequestType)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -265,7 +265,7 @@ func (a *API) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
|
||||
}
|
||||
|
||||
if len(errs) != 0 {
|
||||
render.Error(rw, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, errors.Join(errs...).Error()))
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, errors.Join(errs...).Error()))
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -326,8 +326,9 @@ func (q *querier) processTimeSeriesFormula(
|
||||
}
|
||||
}
|
||||
|
||||
canDefaultZero := req.GetQueriesSupportingZeroDefault()
|
||||
// Create formula evaluator
|
||||
// TODO(srikanthccv): add conditional default zero
|
||||
canDefaultZero := make(map[string]bool)
|
||||
evaluator, err := qbtypes.NewFormulaEvaluator(formula.Expression, canDefaultZero)
|
||||
if err != nil {
|
||||
q.logger.ErrorContext(ctx, "failed to create formula evaluator", "error", err, "formula", formula.Name)
|
||||
@@ -477,7 +478,7 @@ func (q *querier) processScalarFormula(
|
||||
}
|
||||
}
|
||||
|
||||
canDefaultZero := req.GetQueriesSupportingZeroDefault()
|
||||
canDefaultZero := make(map[string]bool)
|
||||
evaluator, err := qbtypes.NewFormulaEvaluator(formula.Expression, canDefaultZero)
|
||||
if err != nil {
|
||||
q.logger.ErrorContext(ctx, "failed to create formula evaluator", "error", err, "formula", formula.Name)
|
||||
|
||||
@@ -1277,13 +1277,6 @@ func getLocalTableName(tableName string) string {
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) setTTLLogs(ctx context.Context, orgID string, params *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError) {
|
||||
hasCustomRetention, err := r.hasCustomRetentionColumn(ctx)
|
||||
if hasCustomRetention {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("SetTTLV2 only supported")}
|
||||
}
|
||||
if err != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing TTL")}
|
||||
}
|
||||
// uuid is used as transaction id
|
||||
uuidWithHyphen := uuid.New()
|
||||
uuid := strings.Replace(uuidWithHyphen.String(), "-", "", -1)
|
||||
@@ -1568,440 +1561,6 @@ func (r *ClickHouseReader) setTTLTraces(ctx context.Context, orgID string, param
|
||||
return &model.SetTTLResponseItem{Message: "move ttl has been successfully set up"}, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) hasCustomRetentionColumn(ctx context.Context) (bool, error) {
|
||||
// Directly query for the _retention_days column existence
|
||||
query := fmt.Sprintf("SELECT 1 FROM system.columns WHERE database = '%s' AND table = '%s' AND name = '_retention_days' LIMIT 1", r.logsDB, r.logsLocalTableV2)
|
||||
|
||||
var exists uint8 // Changed from int to uint8 to match ClickHouse's UInt8 type
|
||||
err := r.db.QueryRow(ctx, query).Scan(&exists)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
// Column doesn't exist
|
||||
zap.L().Debug("_retention_days column not found in logs table", zap.String("table", r.logsLocalTableV2))
|
||||
return false, nil
|
||||
}
|
||||
zap.L().Error("Error checking for _retention_days column", zap.Error(err))
|
||||
return false, errorsV2.Wrapf(err, errorsV2.TypeInternal, errorsV2.CodeInternal, "error checking columns")
|
||||
}
|
||||
|
||||
zap.L().Debug("Found _retention_days column in logs table", zap.String("table", r.logsLocalTableV2))
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) SetTTLV2(ctx context.Context, orgID string, params *model.CustomRetentionTTLParams) (*model.CustomRetentionTTLResponse, error) {
|
||||
|
||||
hasCustomRetention, err := r.hasCustomRetentionColumn(ctx)
|
||||
if err != nil {
|
||||
return nil, errorsV2.Wrapf(err, errorsV2.TypeInternal, errorsV2.CodeInternal, "custom retention not supported")
|
||||
}
|
||||
|
||||
if !hasCustomRetention {
|
||||
zap.L().Info("Custom retention not supported, falling back to standard TTL method",
|
||||
zap.String("orgID", orgID))
|
||||
|
||||
ttlParams := &model.TTLParams{
|
||||
Type: params.Type,
|
||||
DelDuration: int64(params.DefaultTTLDays * 24 * 3600),
|
||||
}
|
||||
if params.ColdStorageVolume != "" {
|
||||
ttlParams.ColdStorageVolume = params.ColdStorageVolume
|
||||
} else {
|
||||
ttlParams.ColdStorageVolume = ""
|
||||
}
|
||||
|
||||
if params.ToColdStorageDuration > 0 {
|
||||
ttlParams.ToColdStorageDuration = params.ToColdStorageDuration
|
||||
} else {
|
||||
ttlParams.ToColdStorageDuration = 0
|
||||
}
|
||||
|
||||
ttlResult, apiErr := r.SetTTL(ctx, orgID, ttlParams)
|
||||
if apiErr != nil {
|
||||
return nil, errorsV2.Wrapf(apiErr.Err, errorsV2.TypeInternal, errorsV2.CodeInternal, "failed to set standard TTL")
|
||||
}
|
||||
|
||||
return &model.CustomRetentionTTLResponse{
|
||||
Message: fmt.Sprintf("Custom retention not supported, applied standard TTL of %d days. %s", params.DefaultTTLDays, ttlResult.Message),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Keep only latest 100 transactions/requests
|
||||
r.deleteTtlTransactions(ctx, orgID, 100)
|
||||
|
||||
uuidWithHyphen := valuer.GenerateUUID()
|
||||
uuid := strings.Replace(uuidWithHyphen.String(), "-", "", -1)
|
||||
|
||||
if params.Type != constants.LogsTTL {
|
||||
return nil, errorsV2.Newf(errorsV2.TypeInternal, errorsV2.CodeInternal, "custom retention TTL only supported for logs")
|
||||
}
|
||||
|
||||
// Validate TTL conditions
|
||||
if err := r.validateTTLConditions(ctx, params.TTLConditions); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tableNames := []string{
|
||||
r.logsDB + "." + r.logsLocalTableV2,
|
||||
r.logsDB + "." + r.logsResourceLocalTableV2,
|
||||
}
|
||||
|
||||
for _, tableName := range tableNames {
|
||||
statusItem, err := r.checkCustomRetentionTTLStatusItem(ctx, orgID, tableName)
|
||||
if err != nil {
|
||||
return nil, errorsV2.Newf(errorsV2.TypeInternal, errorsV2.CodeInternal, "error in processing custom_retention_ttl_status check sql query")
|
||||
}
|
||||
if statusItem.Status == constants.StatusPending {
|
||||
return nil, errorsV2.Newf(errorsV2.TypeInternal, errorsV2.CodeInternal, "custom retention TTL is already running")
|
||||
}
|
||||
}
|
||||
|
||||
// Build multiIf expressions for both tables
|
||||
multiIfExpr := r.buildMultiIfExpression(params.TTLConditions, params.DefaultTTLDays, false)
|
||||
resourceMultiIfExpr := r.buildMultiIfExpression(params.TTLConditions, params.DefaultTTLDays, true)
|
||||
|
||||
ttlPayload := map[string]string{
|
||||
tableNames[0]: fmt.Sprintf(`ALTER TABLE %s ON CLUSTER %s MODIFY COLUMN _retention_days UInt16 DEFAULT %s`,
|
||||
tableNames[0], r.cluster, multiIfExpr),
|
||||
tableNames[1]: fmt.Sprintf(`ALTER TABLE %s ON CLUSTER %s MODIFY COLUMN _retention_days UInt16 DEFAULT %s`,
|
||||
tableNames[1], r.cluster, resourceMultiIfExpr),
|
||||
}
|
||||
|
||||
ttlConditionsJSON, err := json.Marshal(params.TTLConditions)
|
||||
if err != nil {
|
||||
return nil, errorsV2.Wrapf(err, errorsV2.TypeInternal, errorsV2.CodeInternal, "error marshalling TTL condition")
|
||||
}
|
||||
|
||||
// Execute the TTL modifications synchronously
|
||||
for tableName, query := range ttlPayload {
|
||||
// Store the operation in the database
|
||||
customTTL := types.TTLSetting{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
TransactionID: uuid,
|
||||
TableName: tableName,
|
||||
TTL: params.DefaultTTLDays,
|
||||
Condition: string(ttlConditionsJSON),
|
||||
Status: constants.StatusPending,
|
||||
OrgID: orgID,
|
||||
}
|
||||
|
||||
// Insert TTL setting record
|
||||
_, dbErr := r.sqlDB.BunDB().NewInsert().Model(&customTTL).Exec(ctx)
|
||||
if dbErr != nil {
|
||||
zap.L().Error("error in inserting to custom_retention_ttl_settings table", zap.Error(dbErr))
|
||||
return nil, errorsV2.Wrapf(dbErr, errorsV2.TypeInternal, errorsV2.CodeInternal, "error inserting TTL settings")
|
||||
}
|
||||
|
||||
zap.L().Debug("Executing custom retention TTL request: ", zap.String("request", query))
|
||||
|
||||
// Execute the ALTER TABLE query
|
||||
if err := r.db.Exec(ctx, query); err != nil {
|
||||
zap.L().Error("error while setting custom retention ttl", zap.Error(err))
|
||||
|
||||
// Update status to failed
|
||||
r.updateCustomRetentionTTLStatus(ctx, orgID, tableName, constants.StatusFailed)
|
||||
|
||||
return nil, errorsV2.Wrapf(err, errorsV2.TypeInternal, errorsV2.CodeInternal, "error setting custom retention TTL for table %s", tableName)
|
||||
}
|
||||
|
||||
// Update status to success
|
||||
r.updateCustomRetentionTTLStatus(ctx, orgID, tableName, constants.StatusSuccess)
|
||||
}
|
||||
|
||||
return &model.CustomRetentionTTLResponse{
|
||||
Message: "custom retention TTL has been successfully set up",
|
||||
}, nil
|
||||
}
|
||||
|
||||
// New method to build multiIf expressions with support for multiple AND conditions
|
||||
func (r *ClickHouseReader) buildMultiIfExpression(ttlConditions []model.CustomRetentionRule, defaultTTLDays int, isResourceTable bool) string {
|
||||
var conditions []string
|
||||
|
||||
for i, rule := range ttlConditions {
|
||||
zap.L().Debug("Processing rule", zap.Int("ruleIndex", i), zap.Int("ttlDays", rule.TTLDays), zap.Int("conditionsCount", len(rule.Filters)))
|
||||
|
||||
if len(rule.Filters) == 0 {
|
||||
zap.L().Warn("Rule has no filters, skipping", zap.Int("ruleIndex", i))
|
||||
continue
|
||||
}
|
||||
|
||||
// Build AND conditions for this rule
|
||||
var andConditions []string
|
||||
for j, condition := range rule.Filters {
|
||||
zap.L().Debug("Processing condition", zap.Int("ruleIndex", i), zap.Int("conditionIndex", j), zap.String("key", condition.Key), zap.Strings("values", condition.Values))
|
||||
|
||||
// This should not happen as validation should catch it
|
||||
if len(condition.Values) == 0 {
|
||||
zap.L().Error("Condition has no values - this should have been caught in validation", zap.Int("ruleIndex", i), zap.Int("conditionIndex", j))
|
||||
continue
|
||||
}
|
||||
|
||||
// Properly quote values for IN clause
|
||||
quotedValues := make([]string, len(condition.Values))
|
||||
for k, v := range condition.Values {
|
||||
quotedValues[k] = fmt.Sprintf("'%s'", v)
|
||||
}
|
||||
|
||||
var conditionExpr string
|
||||
if isResourceTable {
|
||||
// For resource table, use JSONExtractString
|
||||
conditionExpr = fmt.Sprintf(
|
||||
"JSONExtractString(labels, '%s') IN (%s)",
|
||||
condition.Key,
|
||||
strings.Join(quotedValues, ", "),
|
||||
)
|
||||
} else {
|
||||
// For main logs table, use resources_string
|
||||
conditionExpr = fmt.Sprintf(
|
||||
"resources_string['%s'] IN (%s)",
|
||||
condition.Key,
|
||||
strings.Join(quotedValues, ", "),
|
||||
)
|
||||
}
|
||||
andConditions = append(andConditions, conditionExpr)
|
||||
}
|
||||
|
||||
if len(andConditions) > 0 {
|
||||
// Join all conditions with AND
|
||||
fullCondition := strings.Join(andConditions, " AND ")
|
||||
conditionWithTTL := fmt.Sprintf("%s, %d", fullCondition, rule.TTLDays)
|
||||
zap.L().Debug("Adding condition to multiIf", zap.String("condition", conditionWithTTL))
|
||||
conditions = append(conditions, conditionWithTTL)
|
||||
}
|
||||
}
|
||||
|
||||
// Handle case where no valid conditions were found
|
||||
if len(conditions) == 0 {
|
||||
zap.L().Info("No valid conditions found, returning default TTL", zap.Int("defaultTTLDays", defaultTTLDays))
|
||||
return fmt.Sprintf("%d", defaultTTLDays)
|
||||
}
|
||||
|
||||
result := fmt.Sprintf(
|
||||
"multiIf(%s, %d)",
|
||||
strings.Join(conditions, ", "),
|
||||
defaultTTLDays,
|
||||
)
|
||||
|
||||
zap.L().Debug("Final multiIf expression", zap.String("expression", result))
|
||||
return result
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetCustomRetentionTTL(ctx context.Context, orgID string) (*model.GetCustomRetentionTTLResponse, error) {
|
||||
// Check if V2 (custom retention) is supported
|
||||
hasCustomRetention, err := r.hasCustomRetentionColumn(ctx)
|
||||
if err != nil {
|
||||
// If there's an error checking, assume V1 and proceed
|
||||
zap.L().Warn("Error checking for custom retention column, assuming V1", zap.Error(err))
|
||||
hasCustomRetention = false
|
||||
}
|
||||
|
||||
response := &model.GetCustomRetentionTTLResponse{}
|
||||
|
||||
if hasCustomRetention {
|
||||
// V2 - Custom retention is supported
|
||||
response.Version = "v2"
|
||||
|
||||
// Get the latest custom retention TTL setting
|
||||
customTTL := new(types.TTLSetting)
|
||||
err := r.sqlDB.BunDB().NewSelect().
|
||||
Model(customTTL).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("table_name = ?", r.logsDB+"."+r.logsLocalTableV2).
|
||||
OrderExpr("created_at DESC").
|
||||
Limit(1).
|
||||
Scan(ctx)
|
||||
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, errorsV2.Newf(errorsV2.TypeInternal, errorsV2.CodeInternal, "error in processing get custom ttl query")
|
||||
}
|
||||
|
||||
if err == sql.ErrNoRows {
|
||||
// No V2 configuration found, return defaults
|
||||
response.DefaultTTLDays = 15
|
||||
response.TTLConditions = []model.CustomRetentionRule{}
|
||||
response.Status = constants.StatusFailed
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Parse TTL conditions from Condition
|
||||
var ttlConditions []model.CustomRetentionRule
|
||||
if customTTL.Condition != "" {
|
||||
if err := json.Unmarshal([]byte(customTTL.Condition), &ttlConditions); err != nil {
|
||||
zap.L().Error("Error parsing TTL conditions", zap.Error(err))
|
||||
ttlConditions = []model.CustomRetentionRule{}
|
||||
}
|
||||
}
|
||||
|
||||
response.DefaultTTLDays = customTTL.TTL
|
||||
response.TTLConditions = ttlConditions
|
||||
response.Status = customTTL.Status
|
||||
|
||||
} else {
|
||||
// V1 - Traditional TTL
|
||||
response.Version = "v1"
|
||||
|
||||
// Get V1 TTL configuration
|
||||
ttlParams := &model.GetTTLParams{
|
||||
Type: constants.LogsTTL,
|
||||
}
|
||||
|
||||
ttlResult, apiErr := r.GetTTL(ctx, orgID, ttlParams)
|
||||
if apiErr != nil {
|
||||
return nil, errorsV2.Newf(errorsV2.TypeInternal, errorsV2.CodeInternal, "error getting V1 TTL: %s", apiErr.Error())
|
||||
}
|
||||
|
||||
// Map V1 fields to response
|
||||
response.LogsTime = ttlResult.LogsTime
|
||||
response.LogsMoveTime = ttlResult.LogsMoveTime
|
||||
response.ExpectedLogsTime = ttlResult.ExpectedLogsTime
|
||||
response.ExpectedLogsMoveTime = ttlResult.ExpectedLogsMoveTime
|
||||
response.Status = ttlResult.Status
|
||||
response.DefaultTTLDays = ttlResult.LogsTime / 24
|
||||
|
||||
// For V1, we don't have TTL conditions
|
||||
response.TTLConditions = []model.CustomRetentionRule{}
|
||||
}
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) checkCustomRetentionTTLStatusItem(ctx context.Context, orgID string, tableName string) (*types.TTLSetting, error) {
|
||||
ttl := new(types.TTLSetting)
|
||||
err := r.sqlDB.BunDB().NewSelect().
|
||||
Model(ttl).
|
||||
Where("table_name = ?", tableName).
|
||||
Where("org_id = ?", orgID).
|
||||
OrderExpr("created_at DESC").
|
||||
Limit(1).
|
||||
Scan(ctx)
|
||||
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return ttl, errorsV2.Newf(errorsV2.TypeInternal, errorsV2.CodeInternal, "error in processing custom_retention_ttl_status check sql query")
|
||||
}
|
||||
|
||||
return ttl, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) updateCustomRetentionTTLStatus(ctx context.Context, orgID, tableName, status string) {
|
||||
statusItem, err := r.checkCustomRetentionTTLStatusItem(ctx, orgID, tableName)
|
||||
if err == nil && statusItem != nil {
|
||||
_, dbErr := r.sqlDB.BunDB().NewUpdate().
|
||||
Model(new(types.TTLSetting)).
|
||||
Set("updated_at = ?", time.Now()).
|
||||
Set("status = ?", status).
|
||||
Where("id = ?", statusItem.ID.StringValue()).
|
||||
Exec(ctx)
|
||||
if dbErr != nil {
|
||||
zap.L().Error("Error in processing custom_retention_ttl_status update sql query", zap.Error(dbErr))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Enhanced validation function with duplicate detection and efficient key validation
|
||||
func (r *ClickHouseReader) validateTTLConditions(ctx context.Context, ttlConditions []model.CustomRetentionRule) error {
|
||||
if len(ttlConditions) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Collect all unique keys and detect duplicates
|
||||
var allKeys []string
|
||||
keySet := make(map[string]struct{})
|
||||
conditionSignatures := make(map[string]bool)
|
||||
|
||||
for i, rule := range ttlConditions {
|
||||
if len(rule.Filters) == 0 {
|
||||
return errorsV2.Newf(errorsV2.TypeInternal, errorsV2.CodeInternal, "rule at index %d has no filters", i)
|
||||
}
|
||||
|
||||
// Create a signature for this rule's conditions to detect duplicates
|
||||
var conditionKeys []string
|
||||
var conditionValues []string
|
||||
|
||||
for j, condition := range rule.Filters {
|
||||
if len(condition.Values) == 0 {
|
||||
return errorsV2.Newf(errorsV2.TypeInternal, errorsV2.CodeInternal, "condition at rule %d, condition %d has no values", i, j)
|
||||
}
|
||||
|
||||
// Collect unique keys
|
||||
if _, exists := keySet[condition.Key]; !exists {
|
||||
allKeys = append(allKeys, condition.Key)
|
||||
keySet[condition.Key] = struct{}{}
|
||||
}
|
||||
|
||||
// Build signature for duplicate detection
|
||||
conditionKeys = append(conditionKeys, condition.Key)
|
||||
conditionValues = append(conditionValues, strings.Join(condition.Values, ","))
|
||||
}
|
||||
|
||||
// Create signature by sorting keys and values to handle order-independent comparison
|
||||
sort.Strings(conditionKeys)
|
||||
sort.Strings(conditionValues)
|
||||
signature := strings.Join(conditionKeys, "|") + ":" + strings.Join(conditionValues, "|")
|
||||
|
||||
if conditionSignatures[signature] {
|
||||
return errorsV2.Newf(errorsV2.TypeInternal, errorsV2.CodeInternal, "duplicate rule detected at index %d: rules with identical conditions are not allowed", i)
|
||||
}
|
||||
conditionSignatures[signature] = true
|
||||
}
|
||||
|
||||
if len(allKeys) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Create placeholders for IN query
|
||||
placeholders := make([]string, len(allKeys))
|
||||
for i := range allKeys {
|
||||
placeholders[i] = "?"
|
||||
}
|
||||
|
||||
// Efficient validation using IN query
|
||||
query := fmt.Sprintf("SELECT name FROM %s.%s WHERE name IN (%s)",
|
||||
r.logsDB, r.logsResourceKeys, strings.Join(placeholders, ", "))
|
||||
|
||||
// Convert keys to interface{} for query parameters
|
||||
params := make([]interface{}, len(allKeys))
|
||||
for i, key := range allKeys {
|
||||
params[i] = key
|
||||
}
|
||||
|
||||
rows, err := r.db.Query(ctx, query, params...)
|
||||
if err != nil {
|
||||
return errorsV2.Wrapf(err, errorsV2.TypeInternal, errorsV2.CodeInternal, "failed to validate resource keys")
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
// Collect valid keys
|
||||
validKeys := make(map[string]struct{})
|
||||
for rows.Next() {
|
||||
var name string
|
||||
if err := rows.Scan(&name); err != nil {
|
||||
return errorsV2.Wrapf(err, errorsV2.TypeInternal, errorsV2.CodeInternal, "failed to scan resource keys")
|
||||
}
|
||||
validKeys[name] = struct{}{}
|
||||
}
|
||||
|
||||
// Find invalid keys
|
||||
var invalidKeys []string
|
||||
for _, key := range allKeys {
|
||||
if _, exists := validKeys[key]; !exists {
|
||||
invalidKeys = append(invalidKeys, key)
|
||||
}
|
||||
}
|
||||
|
||||
if len(invalidKeys) > 0 {
|
||||
return errorsV2.Newf(errorsV2.TypeInternal, errorsV2.CodeInternal, "invalid resource keys found: %v. Please check logs_resource_keys table for valid keys", invalidKeys)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// SetTTL sets the TTL for traces or metrics or logs tables.
|
||||
// This is an async API which creates goroutines to set TTL.
|
||||
// Status of TTL update is tracked with ttl_status table in sqlite db.
|
||||
|
||||
@@ -8,6 +8,9 @@ import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/SigNoz/signoz/pkg/modules/thirdpartyapi"
|
||||
|
||||
//qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"io"
|
||||
"math"
|
||||
"net/http"
|
||||
@@ -45,7 +48,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/inframetrics"
|
||||
queues2 "github.com/SigNoz/signoz/pkg/query-service/app/integrations/messagingQueues/queues"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations/thirdPartyApi"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logs"
|
||||
logsv3 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v3"
|
||||
logsv4 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v4"
|
||||
@@ -541,8 +543,6 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
router.HandleFunc("/api/v1/dependency_graph", am.ViewAccess(aH.dependencyGraph)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/settings/ttl", am.AdminAccess(aH.setTTL)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/settings/ttl", am.ViewAccess(aH.getTTL)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v2/settings/ttl", am.AdminAccess(aH.setCustomRetentionTTL)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v2/settings/ttl", am.ViewAccess(aH.getCustomRetentionTTL)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v1/settings/apdex", am.AdminAccess(aH.Signoz.Handlers.Apdex.Set)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/settings/apdex", am.ViewAccess(aH.Signoz.Handlers.Apdex.Get)).Methods(http.MethodGet)
|
||||
|
||||
@@ -782,7 +782,7 @@ func (aH *APIHandler) getDowntimeSchedule(w http.ResponseWriter, r *http.Request
|
||||
idStr := mux.Vars(r)["id"]
|
||||
id, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
render.Error(w, errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error()))
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
@@ -818,7 +818,7 @@ func (aH *APIHandler) editDowntimeSchedule(w http.ResponseWriter, r *http.Reques
|
||||
idStr := mux.Vars(r)["id"]
|
||||
id, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
render.Error(w, errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error()))
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
@@ -846,7 +846,7 @@ func (aH *APIHandler) deleteDowntimeSchedule(w http.ResponseWriter, r *http.Requ
|
||||
idStr := mux.Vars(r)["id"]
|
||||
id, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
render.Error(w, errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error()))
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1932,47 +1932,6 @@ func (aH *APIHandler) setTTL(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
}
|
||||
|
||||
func (aH *APIHandler) setCustomRetentionTTL(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
claims, errv2 := authtypes.ClaimsFromContext(ctx)
|
||||
if errv2 != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInternal, errorsV2.CodeInternal, "failed to get org id from context"))
|
||||
return
|
||||
}
|
||||
|
||||
var params model.CustomRetentionTTLParams
|
||||
if err := json.NewDecoder(r.Body).Decode(¶ms); err != nil {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "Invalid data"))
|
||||
return
|
||||
}
|
||||
|
||||
// Context is not used here as TTL is long duration DB operation
|
||||
result, apiErr := aH.reader.SetTTLV2(context.Background(), claims.OrgID, ¶ms)
|
||||
if apiErr != nil {
|
||||
render.Error(w, errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInternal, apiErr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
aH.WriteJSON(w, r, result)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getCustomRetentionTTL(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
claims, errv2 := authtypes.ClaimsFromContext(ctx)
|
||||
if errv2 != nil {
|
||||
render.Error(w, errorsV2.New(errorsV2.TypeInternal, errorsV2.CodeInternal, "failed to get org id from context"))
|
||||
return
|
||||
}
|
||||
|
||||
result, apiErr := aH.reader.GetCustomRetentionTTL(r.Context(), claims.OrgID)
|
||||
if apiErr != nil {
|
||||
render.Error(w, errorsV2.New(errorsV2.TypeInvalidInput, errorsV2.CodeInternal, apiErr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
aH.WriteJSON(w, r, result)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getTTL(w http.ResponseWriter, r *http.Request) {
|
||||
ttlParams, err := parseGetTTL(r)
|
||||
if aH.HandleError(w, err, http.StatusBadRequest) {
|
||||
@@ -3374,7 +3333,7 @@ func (aH *APIHandler) calculateConnectionStatus(
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
|
||||
if len(connectionTests.Metrics) < 1 {
|
||||
if connectionTests.Metrics == nil || len(connectionTests.Metrics) < 1 {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -4066,6 +4025,7 @@ func (aH *APIHandler) CloudIntegrationsUpdateServiceConfig(
|
||||
func (aH *APIHandler) RegisterLogsRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
subRouter := router.PathPrefix("/api/v1/logs").Subrouter()
|
||||
subRouter.HandleFunc("", am.ViewAccess(aH.getLogs)).Methods(http.MethodGet)
|
||||
subRouter.HandleFunc("/tail", am.ViewAccess(aH.tailLogs)).Methods(http.MethodGet)
|
||||
subRouter.HandleFunc("/fields", am.ViewAccess(aH.logFields)).Methods(http.MethodGet)
|
||||
subRouter.HandleFunc("/fields", am.EditAccess(aH.logFieldUpdate)).Methods(http.MethodPost)
|
||||
subRouter.HandleFunc("/aggregate", am.ViewAccess(aH.logAggregate)).Methods(http.MethodGet)
|
||||
@@ -4112,6 +4072,10 @@ func (aH *APIHandler) getLogs(w http.ResponseWriter, r *http.Request) {
|
||||
aH.WriteJSON(w, r, map[string]interface{}{"results": []interface{}{}})
|
||||
}
|
||||
|
||||
func (aH *APIHandler) tailLogs(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
func (aH *APIHandler) logAggregate(w http.ResponseWriter, r *http.Request) {
|
||||
aH.WriteJSON(w, r, model.GetLogsAggregatesResponse{})
|
||||
}
|
||||
@@ -5015,123 +4979,135 @@ func (aH *APIHandler) getQueueOverview(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
results, err := aH.reader.GetListResultV3(r.Context(), chq.Query)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
aH.Respond(w, results)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getDomainList(w http.ResponseWriter, r *http.Request) {
|
||||
// Extract claims from context for organization ID
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Parse the request body to get third-party query parameters
|
||||
thirdPartyQueryRequest, apiErr := ParseRequestBody(r)
|
||||
if apiErr != nil {
|
||||
zap.L().Error(apiErr.Err.Error())
|
||||
RespondError(w, apiErr, nil)
|
||||
zap.L().Error("Failed to parse request body", zap.Error(apiErr))
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, apiErr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
queryRangeParams, err := thirdPartyApi.BuildDomainList(thirdPartyQueryRequest)
|
||||
// Build the v5 query range request for domain listing
|
||||
queryRangeRequest, err := thirdpartyapi.BuildDomainList(thirdPartyQueryRequest)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
zap.L().Error("Failed to build domain list query", zap.Error(err))
|
||||
apiErrObj := errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error())
|
||||
render.Error(w, apiErrObj)
|
||||
return
|
||||
}
|
||||
|
||||
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
RespondError(w, apiErr, nil)
|
||||
// Validate the v5 query range request
|
||||
if err := queryRangeRequest.Validate(); err != nil {
|
||||
zap.L().Error("Query validation failed", zap.Error(err))
|
||||
apiErrObj := errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error())
|
||||
render.Error(w, apiErrObj)
|
||||
return
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQuriesByName map[string]error
|
||||
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
// Execute the query using the v5 querier
|
||||
result, err := aH.Signoz.Querier.QueryRange(r.Context(), orgID, queryRangeRequest)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
zap.L().Error("Query execution failed", zap.Error(err))
|
||||
apiErrObj := errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error())
|
||||
render.Error(w, apiErrObj)
|
||||
return
|
||||
}
|
||||
|
||||
result, err = postprocess.PostProcessResult(result, queryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
return
|
||||
}
|
||||
result = thirdpartyapi.MergeSemconvColumns(result)
|
||||
|
||||
// Filter IP addresses if ShowIp is false
|
||||
var finalResult = result
|
||||
if !thirdPartyQueryRequest.ShowIp {
|
||||
result = thirdPartyApi.FilterResponse(result)
|
||||
filteredResults := thirdpartyapi.FilterResponse([]*qbtypes.QueryRangeResponse{result})
|
||||
if len(filteredResults) > 0 {
|
||||
finalResult = filteredResults[0]
|
||||
}
|
||||
}
|
||||
|
||||
resp := v3.QueryRangeResponse{
|
||||
Result: result,
|
||||
}
|
||||
aH.Respond(w, resp)
|
||||
// Send the response
|
||||
aH.Respond(w, finalResult)
|
||||
}
|
||||
|
||||
// getDomainInfo handles requests for domain information using v5 query builder
|
||||
func (aH *APIHandler) getDomainInfo(w http.ResponseWriter, r *http.Request) {
|
||||
// Extract claims from context for organization ID
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Parse the request body to get third-party query parameters
|
||||
thirdPartyQueryRequest, apiErr := ParseRequestBody(r)
|
||||
if apiErr != nil {
|
||||
zap.L().Error(apiErr.Err.Error())
|
||||
RespondError(w, apiErr, nil)
|
||||
zap.L().Error("Failed to parse request body", zap.Error(apiErr))
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, apiErr.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
queryRangeParams, err := thirdPartyApi.BuildDomainInfo(thirdPartyQueryRequest)
|
||||
// Build the v5 query range request for domain info
|
||||
queryRangeRequest, err := thirdpartyapi.BuildDomainInfo(thirdPartyQueryRequest)
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
zap.L().Error("Failed to build domain info query", zap.Error(err))
|
||||
apiErrObj := errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error())
|
||||
render.Error(w, apiErrObj)
|
||||
return
|
||||
}
|
||||
|
||||
if err := validateQueryRangeParamsV3(queryRangeParams); err != nil {
|
||||
zap.L().Error(err.Error())
|
||||
RespondError(w, apiErr, nil)
|
||||
// Validate the v5 query range request
|
||||
if err := queryRangeRequest.Validate(); err != nil {
|
||||
zap.L().Error("Query validation failed", zap.Error(err))
|
||||
apiErrObj := errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error())
|
||||
render.Error(w, apiErrObj)
|
||||
return
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQuriesByName map[string]error
|
||||
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
// Execute the query using the v5 querier
|
||||
result, err := aH.Signoz.Querier.QueryRange(r.Context(), orgID, queryRangeRequest)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
zap.L().Error("Query execution failed", zap.Error(err))
|
||||
apiErrObj := errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error())
|
||||
render.Error(w, apiErrObj)
|
||||
return
|
||||
}
|
||||
|
||||
result = postprocess.TransformToTableForBuilderQueries(result, queryRangeParams)
|
||||
result = thirdpartyapi.MergeSemconvColumns(result)
|
||||
|
||||
// Filter IP addresses if ShowIp is false
|
||||
var finalResult *qbtypes.QueryRangeResponse = result
|
||||
if !thirdPartyQueryRequest.ShowIp {
|
||||
result = thirdPartyApi.FilterResponse(result)
|
||||
filteredResults := thirdpartyapi.FilterResponse([]*qbtypes.QueryRangeResponse{result})
|
||||
if len(filteredResults) > 0 {
|
||||
finalResult = filteredResults[0]
|
||||
}
|
||||
}
|
||||
|
||||
resp := v3.QueryRangeResponse{
|
||||
Result: result,
|
||||
}
|
||||
aH.Respond(w, resp)
|
||||
// Send the response
|
||||
aH.Respond(w, finalResult)
|
||||
}
|
||||
|
||||
// RegisterTraceFunnelsRoutes adds trace funnels routes
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
package thirdPartyApi
|
||||
|
||||
import v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
|
||||
type ThirdPartyApis struct {
|
||||
Start int64 `json:"start"`
|
||||
End int64 `json:"end"`
|
||||
ShowIp bool `json:"show_ip,omitempty"`
|
||||
Domain int64 `json:"domain,omitempty"`
|
||||
Endpoint string `json:"endpoint,omitempty"`
|
||||
Filters v3.FilterSet `json:"filters,omitempty"`
|
||||
GroupBy []v3.AttributeKey `json:"groupBy,omitempty"`
|
||||
}
|
||||
@@ -1,598 +0,0 @@
|
||||
package thirdPartyApi
|
||||
|
||||
import (
|
||||
"net"
|
||||
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
)
|
||||
|
||||
const (
|
||||
urlPathKey = "http.url"
|
||||
serverNameKey = "net.peer.name"
|
||||
)
|
||||
|
||||
var defaultStepInterval int64 = 60
|
||||
|
||||
func FilterResponse(results []*v3.Result) []*v3.Result {
|
||||
filteredResults := make([]*v3.Result, 0, len(results))
|
||||
|
||||
for _, res := range results {
|
||||
if res.Table == nil {
|
||||
continue
|
||||
}
|
||||
filteredRows := make([]*v3.TableRow, 0, len(res.Table.Rows))
|
||||
for _, row := range res.Table.Rows {
|
||||
if row.Data != nil {
|
||||
if domainVal, ok := row.Data[serverNameKey]; ok {
|
||||
if domainStr, ok := domainVal.(string); ok {
|
||||
if net.ParseIP(domainStr) != nil {
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
filteredRows = append(filteredRows, row)
|
||||
}
|
||||
res.Table.Rows = filteredRows
|
||||
|
||||
filteredResults = append(filteredResults, res)
|
||||
}
|
||||
|
||||
return filteredResults
|
||||
}
|
||||
|
||||
func getFilterSet(existingFilters []v3.FilterItem, apiFilters v3.FilterSet) []v3.FilterItem {
|
||||
if len(apiFilters.Items) != 0 {
|
||||
existingFilters = append(existingFilters, apiFilters.Items...)
|
||||
}
|
||||
return existingFilters
|
||||
}
|
||||
|
||||
func getGroupBy(existingGroupBy []v3.AttributeKey, apiGroupBy []v3.AttributeKey) []v3.AttributeKey {
|
||||
if len(apiGroupBy) != 0 {
|
||||
existingGroupBy = append(existingGroupBy, apiGroupBy...)
|
||||
}
|
||||
return existingGroupBy
|
||||
}
|
||||
|
||||
func BuildDomainList(thirdPartyApis *ThirdPartyApis) (*v3.QueryRangeParamsV3, error) {
|
||||
|
||||
unixMilliStart := thirdPartyApis.Start
|
||||
unixMilliEnd := thirdPartyApis.End
|
||||
|
||||
builderQueries := make(map[string]*v3.BuilderQuery)
|
||||
|
||||
builderQueries["endpoints"] = &v3.BuilderQuery{
|
||||
QueryName: "endpoints",
|
||||
Legend: "endpoints",
|
||||
DataSource: v3.DataSourceTraces,
|
||||
StepInterval: defaultStepInterval,
|
||||
AggregateOperator: v3.AggregateOperatorCountDistinct,
|
||||
AggregateAttribute: v3.AttributeKey{
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
TimeAggregation: v3.TimeAggregationCountDistinct,
|
||||
SpaceAggregation: v3.SpaceAggregationSum,
|
||||
Filters: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: getFilterSet([]v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: false,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
Operator: v3.FilterOperatorExists,
|
||||
Value: "",
|
||||
},
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "kind_string",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: true,
|
||||
},
|
||||
Operator: "=",
|
||||
Value: "Client",
|
||||
},
|
||||
}, thirdPartyApis.Filters),
|
||||
},
|
||||
Expression: "endpoints",
|
||||
GroupBy: getGroupBy([]v3.AttributeKey{
|
||||
{
|
||||
Key: serverNameKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
}, thirdPartyApis.GroupBy),
|
||||
ReduceTo: v3.ReduceToOperatorAvg,
|
||||
ShiftBy: 0,
|
||||
IsAnomaly: false,
|
||||
}
|
||||
|
||||
builderQueries["lastseen"] = &v3.BuilderQuery{
|
||||
QueryName: "lastseen",
|
||||
Legend: "lastseen",
|
||||
DataSource: v3.DataSourceTraces,
|
||||
StepInterval: defaultStepInterval,
|
||||
AggregateOperator: v3.AggregateOperatorMax,
|
||||
AggregateAttribute: v3.AttributeKey{
|
||||
Key: "timestamp",
|
||||
},
|
||||
TimeAggregation: v3.TimeAggregationMax,
|
||||
SpaceAggregation: v3.SpaceAggregationSum,
|
||||
Filters: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: getFilterSet([]v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: false,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
Operator: v3.FilterOperatorExists,
|
||||
Value: "",
|
||||
},
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "kind_string",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: true,
|
||||
},
|
||||
Operator: "=",
|
||||
Value: "Client",
|
||||
},
|
||||
}, thirdPartyApis.Filters),
|
||||
},
|
||||
Expression: "lastseen",
|
||||
GroupBy: getGroupBy([]v3.AttributeKey{
|
||||
{
|
||||
Key: serverNameKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
}, thirdPartyApis.GroupBy),
|
||||
ReduceTo: v3.ReduceToOperatorAvg,
|
||||
ShiftBy: 0,
|
||||
IsAnomaly: false,
|
||||
}
|
||||
|
||||
builderQueries["rps"] = &v3.BuilderQuery{
|
||||
QueryName: "rps",
|
||||
Legend: "rps",
|
||||
DataSource: v3.DataSourceTraces,
|
||||
StepInterval: defaultStepInterval,
|
||||
AggregateOperator: v3.AggregateOperatorRate,
|
||||
AggregateAttribute: v3.AttributeKey{
|
||||
Key: "",
|
||||
},
|
||||
TimeAggregation: v3.TimeAggregationRate,
|
||||
SpaceAggregation: v3.SpaceAggregationSum,
|
||||
Filters: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: getFilterSet([]v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: false,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
Operator: v3.FilterOperatorExists,
|
||||
Value: "",
|
||||
},
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "kind_string",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: true,
|
||||
},
|
||||
Operator: "=",
|
||||
Value: "Client",
|
||||
},
|
||||
}, thirdPartyApis.Filters),
|
||||
},
|
||||
Expression: "rps",
|
||||
GroupBy: getGroupBy([]v3.AttributeKey{
|
||||
{
|
||||
Key: serverNameKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
}, thirdPartyApis.GroupBy),
|
||||
ReduceTo: v3.ReduceToOperatorAvg,
|
||||
ShiftBy: 0,
|
||||
IsAnomaly: false,
|
||||
}
|
||||
|
||||
builderQueries["error"] = &v3.BuilderQuery{
|
||||
QueryName: "error",
|
||||
DataSource: v3.DataSourceTraces,
|
||||
StepInterval: defaultStepInterval,
|
||||
AggregateOperator: v3.AggregateOperatorCount,
|
||||
AggregateAttribute: v3.AttributeKey{
|
||||
Key: "span_id",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: true,
|
||||
},
|
||||
TimeAggregation: v3.TimeAggregationCount,
|
||||
SpaceAggregation: v3.SpaceAggregationSum,
|
||||
Filters: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: getFilterSet([]v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "has_error",
|
||||
DataType: v3.AttributeKeyDataTypeBool,
|
||||
IsColumn: true,
|
||||
},
|
||||
Operator: "=",
|
||||
Value: "true",
|
||||
},
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: false,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
Operator: v3.FilterOperatorExists,
|
||||
Value: "",
|
||||
},
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "kind_string",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: true,
|
||||
},
|
||||
Operator: "=",
|
||||
Value: "Client",
|
||||
},
|
||||
}, thirdPartyApis.Filters),
|
||||
},
|
||||
Expression: "error",
|
||||
GroupBy: getGroupBy([]v3.AttributeKey{
|
||||
{
|
||||
Key: serverNameKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
}, thirdPartyApis.GroupBy),
|
||||
ReduceTo: v3.ReduceToOperatorAvg,
|
||||
Disabled: true,
|
||||
ShiftBy: 0,
|
||||
IsAnomaly: false,
|
||||
}
|
||||
|
||||
builderQueries["total_span"] = &v3.BuilderQuery{
|
||||
QueryName: "total_span",
|
||||
DataSource: v3.DataSourceTraces,
|
||||
StepInterval: defaultStepInterval,
|
||||
AggregateOperator: v3.AggregateOperatorCount,
|
||||
AggregateAttribute: v3.AttributeKey{
|
||||
Key: "span_id",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: true,
|
||||
},
|
||||
TimeAggregation: v3.TimeAggregationCount,
|
||||
SpaceAggregation: v3.SpaceAggregationSum,
|
||||
Filters: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: getFilterSet([]v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "http.url",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: false,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
Operator: v3.FilterOperatorExists,
|
||||
Value: "",
|
||||
},
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "kind_string",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: true,
|
||||
},
|
||||
Operator: "=",
|
||||
Value: "Client",
|
||||
},
|
||||
}, thirdPartyApis.Filters),
|
||||
},
|
||||
Expression: "total_span",
|
||||
GroupBy: getGroupBy([]v3.AttributeKey{
|
||||
{
|
||||
Key: "net.peer.name",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
}, thirdPartyApis.GroupBy),
|
||||
ReduceTo: v3.ReduceToOperatorAvg,
|
||||
Disabled: true,
|
||||
ShiftBy: 0,
|
||||
IsAnomaly: false,
|
||||
}
|
||||
|
||||
builderQueries["p99"] = &v3.BuilderQuery{
|
||||
QueryName: "p99",
|
||||
Legend: "p99",
|
||||
DataSource: v3.DataSourceTraces,
|
||||
StepInterval: defaultStepInterval,
|
||||
AggregateOperator: v3.AggregateOperatorP99,
|
||||
AggregateAttribute: v3.AttributeKey{
|
||||
Key: "duration_nano",
|
||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
||||
IsColumn: true,
|
||||
},
|
||||
TimeAggregation: "p99",
|
||||
SpaceAggregation: v3.SpaceAggregationSum,
|
||||
Filters: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: getFilterSet([]v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: false,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
Operator: v3.FilterOperatorExists,
|
||||
Value: "",
|
||||
},
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "kind_string",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: true,
|
||||
},
|
||||
Operator: "=",
|
||||
Value: "Client",
|
||||
},
|
||||
}, thirdPartyApis.Filters),
|
||||
},
|
||||
Expression: "p99",
|
||||
GroupBy: getGroupBy([]v3.AttributeKey{
|
||||
{
|
||||
Key: serverNameKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
}, thirdPartyApis.GroupBy),
|
||||
ReduceTo: v3.ReduceToOperatorAvg,
|
||||
ShiftBy: 0,
|
||||
IsAnomaly: false,
|
||||
}
|
||||
|
||||
builderQueries["error_rate"] = &v3.BuilderQuery{
|
||||
QueryName: "error_rate",
|
||||
Expression: "(error/total_span)*100",
|
||||
Legend: "error_rate",
|
||||
Disabled: false,
|
||||
ShiftBy: 0,
|
||||
IsAnomaly: false,
|
||||
}
|
||||
|
||||
compositeQuery := &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
PanelType: v3.PanelTypeTable,
|
||||
FillGaps: false,
|
||||
BuilderQueries: builderQueries,
|
||||
}
|
||||
|
||||
queryRangeParams := &v3.QueryRangeParamsV3{
|
||||
Start: unixMilliStart,
|
||||
End: unixMilliEnd,
|
||||
Step: defaultStepInterval,
|
||||
CompositeQuery: compositeQuery,
|
||||
Version: "v4",
|
||||
FormatForWeb: true,
|
||||
}
|
||||
|
||||
return queryRangeParams, nil
|
||||
}
|
||||
|
||||
func BuildDomainInfo(thirdPartyApis *ThirdPartyApis) (*v3.QueryRangeParamsV3, error) {
|
||||
unixMilliStart := thirdPartyApis.Start
|
||||
unixMilliEnd := thirdPartyApis.End
|
||||
|
||||
builderQueries := make(map[string]*v3.BuilderQuery)
|
||||
|
||||
builderQueries["endpoints"] = &v3.BuilderQuery{
|
||||
QueryName: "endpoints",
|
||||
DataSource: v3.DataSourceTraces,
|
||||
StepInterval: defaultStepInterval,
|
||||
AggregateOperator: v3.AggregateOperatorCount,
|
||||
AggregateAttribute: v3.AttributeKey{
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
TimeAggregation: v3.TimeAggregationRate,
|
||||
SpaceAggregation: v3.SpaceAggregationSum,
|
||||
Filters: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: getFilterSet([]v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: false,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
Operator: v3.FilterOperatorExists,
|
||||
Value: "",
|
||||
},
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "kind_string",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: true,
|
||||
},
|
||||
Operator: "=",
|
||||
Value: "Client",
|
||||
},
|
||||
}, thirdPartyApis.Filters),
|
||||
},
|
||||
Expression: "endpoints",
|
||||
Disabled: false,
|
||||
GroupBy: getGroupBy([]v3.AttributeKey{
|
||||
{
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
}, thirdPartyApis.GroupBy),
|
||||
Legend: "",
|
||||
ReduceTo: v3.ReduceToOperatorAvg,
|
||||
}
|
||||
|
||||
builderQueries["p99"] = &v3.BuilderQuery{
|
||||
QueryName: "p99",
|
||||
DataSource: v3.DataSourceTraces,
|
||||
StepInterval: defaultStepInterval,
|
||||
AggregateOperator: v3.AggregateOperatorP99,
|
||||
AggregateAttribute: v3.AttributeKey{
|
||||
Key: "duration_nano",
|
||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
||||
IsColumn: true,
|
||||
},
|
||||
TimeAggregation: "p99",
|
||||
SpaceAggregation: v3.SpaceAggregationSum,
|
||||
Filters: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: getFilterSet([]v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: false,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
Operator: v3.FilterOperatorExists,
|
||||
Value: "",
|
||||
},
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "kind_string",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: true,
|
||||
},
|
||||
Operator: "=",
|
||||
Value: "Client",
|
||||
},
|
||||
}, thirdPartyApis.Filters),
|
||||
},
|
||||
Expression: "p99",
|
||||
Disabled: false,
|
||||
Having: nil,
|
||||
GroupBy: getGroupBy([]v3.AttributeKey{}, thirdPartyApis.GroupBy),
|
||||
Legend: "",
|
||||
ReduceTo: v3.ReduceToOperatorAvg,
|
||||
}
|
||||
|
||||
builderQueries["error_rate"] = &v3.BuilderQuery{
|
||||
QueryName: "error_rate",
|
||||
DataSource: v3.DataSourceTraces,
|
||||
StepInterval: defaultStepInterval,
|
||||
AggregateOperator: v3.AggregateOperatorRate,
|
||||
AggregateAttribute: v3.AttributeKey{
|
||||
Key: "",
|
||||
},
|
||||
TimeAggregation: v3.TimeAggregationRate,
|
||||
SpaceAggregation: v3.SpaceAggregationSum,
|
||||
Filters: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: getFilterSet([]v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: false,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
Operator: v3.FilterOperatorExists,
|
||||
Value: "",
|
||||
},
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "kind_string",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: true,
|
||||
},
|
||||
Operator: "=",
|
||||
Value: "Client",
|
||||
},
|
||||
}, thirdPartyApis.Filters),
|
||||
},
|
||||
Expression: "error_rate",
|
||||
Disabled: false,
|
||||
GroupBy: getGroupBy([]v3.AttributeKey{}, thirdPartyApis.GroupBy),
|
||||
Legend: "",
|
||||
ReduceTo: v3.ReduceToOperatorAvg,
|
||||
}
|
||||
|
||||
builderQueries["lastseen"] = &v3.BuilderQuery{
|
||||
QueryName: "lastseen",
|
||||
DataSource: v3.DataSourceTraces,
|
||||
StepInterval: defaultStepInterval,
|
||||
AggregateOperator: v3.AggregateOperatorMax,
|
||||
AggregateAttribute: v3.AttributeKey{
|
||||
Key: "timestamp",
|
||||
},
|
||||
TimeAggregation: v3.TimeAggregationMax,
|
||||
SpaceAggregation: v3.SpaceAggregationSum,
|
||||
Filters: &v3.FilterSet{
|
||||
Operator: "AND",
|
||||
Items: getFilterSet([]v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: urlPathKey,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: false,
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
},
|
||||
Operator: v3.FilterOperatorExists,
|
||||
Value: "",
|
||||
},
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "kind_string",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: true,
|
||||
},
|
||||
Operator: "=",
|
||||
Value: "Client",
|
||||
},
|
||||
}, thirdPartyApis.Filters),
|
||||
},
|
||||
Expression: "lastseen",
|
||||
Disabled: false,
|
||||
Having: nil,
|
||||
OrderBy: nil,
|
||||
GroupBy: getGroupBy([]v3.AttributeKey{}, thirdPartyApis.GroupBy),
|
||||
Legend: "",
|
||||
ReduceTo: v3.ReduceToOperatorAvg,
|
||||
}
|
||||
|
||||
compositeQuery := &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
PanelType: v3.PanelTypeTable,
|
||||
FillGaps: false,
|
||||
BuilderQueries: builderQueries,
|
||||
}
|
||||
|
||||
queryRangeParams := &v3.QueryRangeParamsV3{
|
||||
Start: unixMilliStart,
|
||||
End: unixMilliEnd,
|
||||
Step: defaultStepInterval,
|
||||
CompositeQuery: compositeQuery,
|
||||
Version: "v4",
|
||||
FormatForWeb: true,
|
||||
}
|
||||
|
||||
return queryRangeParams, nil
|
||||
}
|
||||
@@ -1,267 +0,0 @@
|
||||
package thirdPartyApi
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestFilterResponse(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input []*v3.Result
|
||||
expected []*v3.Result
|
||||
}{
|
||||
{
|
||||
name: "should filter out IP addresses from net.peer.name",
|
||||
input: []*v3.Result{
|
||||
{
|
||||
Table: &v3.Table{
|
||||
Rows: []*v3.TableRow{
|
||||
{
|
||||
Data: map[string]interface{}{
|
||||
"net.peer.name": "192.168.1.1",
|
||||
},
|
||||
},
|
||||
{
|
||||
Data: map[string]interface{}{
|
||||
"net.peer.name": "example.com",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []*v3.Result{
|
||||
{
|
||||
Table: &v3.Table{
|
||||
Rows: []*v3.TableRow{
|
||||
{
|
||||
Data: map[string]interface{}{
|
||||
"net.peer.name": "example.com",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "should handle nil data",
|
||||
input: []*v3.Result{
|
||||
{
|
||||
Table: &v3.Table{
|
||||
Rows: []*v3.TableRow{
|
||||
{
|
||||
Data: nil,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []*v3.Result{
|
||||
{
|
||||
Table: &v3.Table{
|
||||
Rows: []*v3.TableRow{
|
||||
{
|
||||
Data: nil,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := FilterResponse(tt.input)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetFilterSet(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
existingFilters []v3.FilterItem
|
||||
apiFilters v3.FilterSet
|
||||
expected []v3.FilterItem
|
||||
}{
|
||||
{
|
||||
name: "should append new filters",
|
||||
existingFilters: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{Key: "existing"},
|
||||
},
|
||||
},
|
||||
apiFilters: v3.FilterSet{
|
||||
Items: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{Key: "new"},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{Key: "existing"},
|
||||
},
|
||||
{
|
||||
Key: v3.AttributeKey{Key: "new"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "should handle empty api filters",
|
||||
existingFilters: []v3.FilterItem{{Key: v3.AttributeKey{Key: "existing"}}},
|
||||
apiFilters: v3.FilterSet{},
|
||||
expected: []v3.FilterItem{{Key: v3.AttributeKey{Key: "existing"}}},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := getFilterSet(tt.existingFilters, tt.apiFilters)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetGroupBy(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
existingGroup []v3.AttributeKey
|
||||
apiGroup []v3.AttributeKey
|
||||
expected []v3.AttributeKey
|
||||
}{
|
||||
{
|
||||
name: "should append new group by attributes",
|
||||
existingGroup: []v3.AttributeKey{
|
||||
{Key: "existing"},
|
||||
},
|
||||
apiGroup: []v3.AttributeKey{
|
||||
{Key: "new"},
|
||||
},
|
||||
expected: []v3.AttributeKey{
|
||||
{Key: "existing"},
|
||||
{Key: "new"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "should handle empty api group",
|
||||
existingGroup: []v3.AttributeKey{{Key: "existing"}},
|
||||
apiGroup: []v3.AttributeKey{},
|
||||
expected: []v3.AttributeKey{{Key: "existing"}},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := getGroupBy(tt.existingGroup, tt.apiGroup)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildDomainList(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input *ThirdPartyApis
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "basic domain list query",
|
||||
input: &ThirdPartyApis{
|
||||
Start: 1000,
|
||||
End: 2000,
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with filters and group by",
|
||||
input: &ThirdPartyApis{
|
||||
Start: 1000,
|
||||
End: 2000,
|
||||
Filters: v3.FilterSet{
|
||||
Items: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{Key: "test"},
|
||||
},
|
||||
},
|
||||
},
|
||||
GroupBy: []v3.AttributeKey{
|
||||
{Key: "test"},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, err := BuildDomainList(tt.input)
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
return
|
||||
}
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, result)
|
||||
assert.Equal(t, tt.input.Start, result.Start)
|
||||
assert.Equal(t, tt.input.End, result.End)
|
||||
assert.NotNil(t, result.CompositeQuery)
|
||||
assert.NotNil(t, result.CompositeQuery.BuilderQueries)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildDomainInfo(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input *ThirdPartyApis
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "basic domain info query",
|
||||
input: &ThirdPartyApis{
|
||||
Start: 1000,
|
||||
End: 2000,
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "with filters and group by",
|
||||
input: &ThirdPartyApis{
|
||||
Start: 1000,
|
||||
End: 2000,
|
||||
Filters: v3.FilterSet{
|
||||
Items: []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{Key: "test"},
|
||||
},
|
||||
},
|
||||
},
|
||||
GroupBy: []v3.AttributeKey{
|
||||
{Key: "test"},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, err := BuildDomainInfo(tt.input)
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
return
|
||||
}
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, result)
|
||||
assert.Equal(t, tt.input.Start, result.Start)
|
||||
assert.Equal(t, tt.input.End, result.End)
|
||||
assert.NotNil(t, result.CompositeQuery)
|
||||
assert.NotNil(t, result.CompositeQuery.BuilderQueries)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"sync"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
@@ -38,7 +38,7 @@ func onConfigSuccess(orgId valuer.UUID, agentId string, hash string) {
|
||||
|
||||
func onConfigFailure(orgId valuer.UUID, agentId string, hash string, errorMessage string) {
|
||||
key := getSubscriberKey(orgId, hash)
|
||||
notifySubscribers(orgId, agentId, key, errors.New(errorMessage))
|
||||
notifySubscribers(orgId, agentId, key, fmt.Errorf(errorMessage))
|
||||
}
|
||||
|
||||
// OnSuccess listens to config changes and notifies subscribers
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/SigNoz/signoz/pkg/types/thirdpartyapitypes"
|
||||
"math"
|
||||
"net/http"
|
||||
"sort"
|
||||
@@ -14,16 +15,15 @@ import (
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/govaluate"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations/messagingQueues/kafka"
|
||||
queues2 "github.com/SigNoz/signoz/pkg/query-service/app/integrations/messagingQueues/queues"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations/thirdPartyApi"
|
||||
|
||||
"github.com/SigNoz/govaluate"
|
||||
"github.com/gorilla/mux"
|
||||
promModel "github.com/prometheus/common/model"
|
||||
"go.uber.org/multierr"
|
||||
"go.uber.org/zap"
|
||||
|
||||
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/metrics"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
@@ -981,10 +981,15 @@ func ParseQueueBody(r *http.Request) (*queues2.QueueListRequest, *model.ApiError
|
||||
}
|
||||
|
||||
// ParseRequestBody for third party APIs
|
||||
func ParseRequestBody(r *http.Request) (*thirdPartyApi.ThirdPartyApis, *model.ApiError) {
|
||||
thirdPartApis := new(thirdPartyApi.ThirdPartyApis)
|
||||
if err := json.NewDecoder(r.Body).Decode(thirdPartApis); err != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("cannot parse the request body: %v", err)}
|
||||
func ParseRequestBody(r *http.Request) (*thirdpartyapitypes.ThirdPartyApiRequest, error) {
|
||||
req := new(thirdpartyapitypes.ThirdPartyApiRequest)
|
||||
if err := json.NewDecoder(r.Body).Decode(req); err != nil {
|
||||
return nil, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "cannot parse the request body: %v", err)
|
||||
}
|
||||
return thirdPartApis, nil
|
||||
|
||||
if err := req.Validate(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return req, nil
|
||||
}
|
||||
|
||||
@@ -25,7 +25,6 @@ type Reader interface {
|
||||
GetDependencyGraph(ctx context.Context, query *model.GetServicesParams) (*[]model.ServiceMapDependencyResponseItem, error)
|
||||
|
||||
GetTTL(ctx context.Context, orgID string, ttlParams *model.GetTTLParams) (*model.GetTTLResponseItem, *model.ApiError)
|
||||
GetCustomRetentionTTL(ctx context.Context, orgID string) (*model.GetCustomRetentionTTLResponse, error)
|
||||
|
||||
// GetDisks returns a list of disks configured in the underlying DB. It is supported by
|
||||
// clickhouse only.
|
||||
@@ -48,7 +47,6 @@ type Reader interface {
|
||||
|
||||
// Setter Interfaces
|
||||
SetTTL(ctx context.Context, orgID string, ttlParams *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError)
|
||||
SetTTLV2(ctx context.Context, orgID string, params *model.CustomRetentionTTLParams) (*model.CustomRetentionTTLResponse, error)
|
||||
|
||||
FetchTemporality(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]map[v3.Temporality]bool, error)
|
||||
GetMetricAggregateAttributes(ctx context.Context, orgID valuer.UUID, req *v3.AggregateAttributeRequest, skipSignozMetrics bool) (*v3.AggregateAttributeResponse, error)
|
||||
|
||||
@@ -404,43 +404,6 @@ type TTLParams struct {
|
||||
DelDuration int64 // Seconds after which data will be deleted.
|
||||
}
|
||||
|
||||
type CustomRetentionTTLParams struct {
|
||||
Type string `json:"type"`
|
||||
DefaultTTLDays int `json:"defaultTTLDays"`
|
||||
TTLConditions []CustomRetentionRule `json:"ttlConditions"`
|
||||
ColdStorageVolume string `json:"coldStorageVolume,omitempty"`
|
||||
ToColdStorageDuration int64 `json:"coldStorageDuration,omitempty"`
|
||||
}
|
||||
|
||||
type CustomRetentionRule struct {
|
||||
Filters []FilterCondition `json:"conditions"`
|
||||
TTLDays int `json:"ttlDays"`
|
||||
}
|
||||
|
||||
type FilterCondition struct {
|
||||
Key string `json:"key"`
|
||||
Values []string `json:"values"`
|
||||
}
|
||||
|
||||
type GetCustomRetentionTTLResponse struct {
|
||||
Version string `json:"version"`
|
||||
Status string `json:"status"`
|
||||
|
||||
// V1 fields
|
||||
LogsTime int `json:"logs_ttl_duration_hrs,omitempty"`
|
||||
LogsMoveTime int `json:"logs_move_ttl_duration_hrs,omitempty"`
|
||||
ExpectedLogsTime int `json:"expected_logs_ttl_duration_hrs,omitempty"`
|
||||
ExpectedLogsMoveTime int `json:"expected_logs_move_ttl_duration_hrs,omitempty"`
|
||||
|
||||
// V2 fields
|
||||
DefaultTTLDays int `json:"default_ttl_days,omitempty"`
|
||||
TTLConditions []CustomRetentionRule `json:"ttl_conditions,omitempty"`
|
||||
}
|
||||
|
||||
type CustomRetentionTTLResponse struct {
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type GetTTLParams struct {
|
||||
Type string
|
||||
}
|
||||
|
||||
@@ -82,7 +82,7 @@ func CollisionHandledFinalExpr(
|
||||
correction, found := telemetrytypes.SuggestCorrection(field.Name, maps.Keys(keys))
|
||||
if found {
|
||||
// we found a close match, in the error message send the suggestion
|
||||
return "", nil, errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction)
|
||||
return "", nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction)
|
||||
} else {
|
||||
// not even a close match, return an error
|
||||
return "", nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field `%s` not found", field.Name)
|
||||
|
||||
@@ -164,6 +164,7 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
|
||||
FullTextColumn: b.fullTextColumn,
|
||||
JsonBodyPrefix: b.jsonBodyPrefix,
|
||||
JsonKeyToKey: b.jsonKeyToKey,
|
||||
OnlyResourceFilter: true, // Only process resource terms
|
||||
SkipFullTextFilter: true,
|
||||
SkipFunctionCalls: true,
|
||||
// there is no need for "key" not found error for resource filtering
|
||||
|
||||
@@ -18,6 +18,199 @@ import (
|
||||
|
||||
var searchTroubleshootingGuideURL = "https://signoz.io/docs/userguide/search-troubleshooting/"
|
||||
|
||||
// BooleanExpression represents a boolean expression with proper evaluation context
|
||||
type BooleanExpression struct {
|
||||
SQL string
|
||||
IsTrue bool
|
||||
IsEmpty bool
|
||||
}
|
||||
|
||||
// NewBooleanExpression creates a BooleanExpression from SQL
|
||||
func NewBooleanExpression(sql string) BooleanExpression {
|
||||
return BooleanExpression{
|
||||
SQL: sql,
|
||||
IsTrue: sql == "true",
|
||||
IsEmpty: sql == "",
|
||||
}
|
||||
}
|
||||
|
||||
// booleanEvaluatingVisitor is a specialized visitor for resource filter context
|
||||
// that properly applies boolean algebra during tree traversal
|
||||
type booleanEvaluatingVisitor struct {
|
||||
*filterExpressionVisitor
|
||||
}
|
||||
|
||||
func newBooleanEvaluatingVisitor(opts FilterExprVisitorOpts) *booleanEvaluatingVisitor {
|
||||
return &booleanEvaluatingVisitor{
|
||||
filterExpressionVisitor: newFilterExpressionVisitor(opts),
|
||||
}
|
||||
}
|
||||
|
||||
// Visit dispatches to boolean-aware visit methods
|
||||
func (v *booleanEvaluatingVisitor) Visit(tree antlr.ParseTree) any {
|
||||
if tree == nil {
|
||||
return NewBooleanExpression("")
|
||||
}
|
||||
|
||||
switch t := tree.(type) {
|
||||
case *grammar.QueryContext:
|
||||
return v.VisitQuery(t)
|
||||
case *grammar.ExpressionContext:
|
||||
return v.VisitExpression(t)
|
||||
case *grammar.OrExpressionContext:
|
||||
return v.VisitOrExpression(t)
|
||||
case *grammar.AndExpressionContext:
|
||||
return v.VisitAndExpression(t)
|
||||
case *grammar.UnaryExpressionContext:
|
||||
return v.VisitUnaryExpression(t)
|
||||
case *grammar.PrimaryContext:
|
||||
return v.VisitPrimary(t)
|
||||
default:
|
||||
// For leaf nodes, delegate to original visitor and wrap result
|
||||
result := v.filterExpressionVisitor.Visit(tree)
|
||||
if sql, ok := result.(string); ok {
|
||||
return NewBooleanExpression(sql)
|
||||
}
|
||||
return NewBooleanExpression("")
|
||||
}
|
||||
}
|
||||
|
||||
func (v *booleanEvaluatingVisitor) VisitQuery(ctx *grammar.QueryContext) any {
|
||||
return v.Visit(ctx.Expression())
|
||||
}
|
||||
|
||||
func (v *booleanEvaluatingVisitor) VisitExpression(ctx *grammar.ExpressionContext) any {
|
||||
return v.Visit(ctx.OrExpression())
|
||||
}
|
||||
|
||||
func (v *booleanEvaluatingVisitor) VisitOrExpression(ctx *grammar.OrExpressionContext) any {
|
||||
andExpressions := ctx.AllAndExpression()
|
||||
|
||||
var result BooleanExpression
|
||||
hasTrue := false
|
||||
hasEmpty := false
|
||||
|
||||
for i, expr := range andExpressions {
|
||||
exprResult := v.Visit(expr).(BooleanExpression)
|
||||
if exprResult.IsTrue {
|
||||
hasTrue = true
|
||||
}
|
||||
if exprResult.IsEmpty {
|
||||
hasEmpty = true
|
||||
}
|
||||
|
||||
if i == 0 {
|
||||
result = exprResult
|
||||
} else {
|
||||
if result.IsEmpty {
|
||||
result = exprResult
|
||||
} else if !exprResult.IsEmpty {
|
||||
sql := v.builder.Or(result.SQL, exprResult.SQL)
|
||||
result = NewBooleanExpression(sql)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// In resource filter context, if any operand is empty (meaning "include all resources"),
|
||||
// the entire OR should be empty (include all resources)
|
||||
if hasEmpty && v.onlyResourceFilter {
|
||||
result.IsEmpty = true
|
||||
result.IsTrue = true
|
||||
result.SQL = ""
|
||||
} else if hasTrue {
|
||||
// Mark as always true if any operand is true, but preserve the SQL structure
|
||||
result.IsTrue = true
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (v *booleanEvaluatingVisitor) VisitAndExpression(ctx *grammar.AndExpressionContext) any {
|
||||
unaryExpressions := ctx.AllUnaryExpression()
|
||||
|
||||
var result BooleanExpression
|
||||
allTrue := true
|
||||
|
||||
for i, expr := range unaryExpressions {
|
||||
exprResult := v.Visit(expr).(BooleanExpression)
|
||||
if !exprResult.IsTrue && !exprResult.IsEmpty {
|
||||
allTrue = false
|
||||
}
|
||||
|
||||
if i == 0 {
|
||||
result = exprResult
|
||||
} else {
|
||||
// Apply boolean AND logic
|
||||
if exprResult.IsTrue {
|
||||
// A AND true = A, continue with result
|
||||
continue
|
||||
}
|
||||
if result.IsTrue {
|
||||
result = exprResult
|
||||
} else if result.IsEmpty {
|
||||
result = exprResult
|
||||
} else if !exprResult.IsEmpty {
|
||||
sql := v.builder.And(result.SQL, exprResult.SQL)
|
||||
result = NewBooleanExpression(sql)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If all terms were "true", mark the result as always true
|
||||
if allTrue && len(unaryExpressions) > 0 {
|
||||
result.IsTrue = true
|
||||
if result.SQL == "" {
|
||||
result.SQL = "true"
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (v *booleanEvaluatingVisitor) VisitUnaryExpression(ctx *grammar.UnaryExpressionContext) any {
|
||||
result := v.Visit(ctx.Primary()).(BooleanExpression)
|
||||
|
||||
if ctx.NOT() != nil {
|
||||
// Apply NOT logic with resource filter context awareness
|
||||
if v.onlyResourceFilter {
|
||||
if result.IsTrue {
|
||||
return NewBooleanExpression("") // NOT(true) = include all resources
|
||||
}
|
||||
if result.IsEmpty {
|
||||
return NewBooleanExpression("") // NOT(empty) = include all resources
|
||||
}
|
||||
}
|
||||
|
||||
sql := fmt.Sprintf("NOT (%s)", result.SQL)
|
||||
return NewBooleanExpression(sql)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (v *booleanEvaluatingVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any {
|
||||
if ctx.OrExpression() != nil {
|
||||
result := v.Visit(ctx.OrExpression()).(BooleanExpression)
|
||||
// If no boolean simplification happened, preserve original parentheses structure
|
||||
if !result.IsEmpty && !result.IsTrue {
|
||||
// Use original visitor to get proper parentheses structure
|
||||
originalSQL := v.filterExpressionVisitor.Visit(ctx)
|
||||
if sql, ok := originalSQL.(string); ok && sql != "" {
|
||||
return NewBooleanExpression(sql)
|
||||
}
|
||||
result.SQL = fmt.Sprintf("(%s)", result.SQL)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// For other cases, delegate to original visitor
|
||||
sqlResult := v.filterExpressionVisitor.Visit(ctx)
|
||||
if sql, ok := sqlResult.(string); ok {
|
||||
return NewBooleanExpression(sql)
|
||||
}
|
||||
return NewBooleanExpression("")
|
||||
}
|
||||
|
||||
// filterExpressionVisitor implements the FilterQueryVisitor interface
|
||||
// to convert the parsed filter expressions into ClickHouse WHERE clause
|
||||
type filterExpressionVisitor struct {
|
||||
@@ -34,6 +227,7 @@ type filterExpressionVisitor struct {
|
||||
jsonBodyPrefix string
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
skipResourceFilter bool
|
||||
onlyResourceFilter bool
|
||||
skipFullTextFilter bool
|
||||
skipFunctionCalls bool
|
||||
ignoreNotFoundKeys bool
|
||||
@@ -52,6 +246,7 @@ type FilterExprVisitorOpts struct {
|
||||
JsonBodyPrefix string
|
||||
JsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
SkipResourceFilter bool
|
||||
OnlyResourceFilter bool // Only process resource terms, skip non-resource terms
|
||||
SkipFullTextFilter bool
|
||||
SkipFunctionCalls bool
|
||||
IgnoreNotFoundKeys bool
|
||||
@@ -70,6 +265,7 @@ func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVis
|
||||
jsonBodyPrefix: opts.JsonBodyPrefix,
|
||||
jsonKeyToKey: opts.JsonKeyToKey,
|
||||
skipResourceFilter: opts.SkipResourceFilter,
|
||||
onlyResourceFilter: opts.OnlyResourceFilter,
|
||||
skipFullTextFilter: opts.SkipFullTextFilter,
|
||||
skipFunctionCalls: opts.SkipFunctionCalls,
|
||||
ignoreNotFoundKeys: opts.IgnoreNotFoundKeys,
|
||||
@@ -160,6 +356,31 @@ func PrepareWhereClause(query string, opts FilterExprVisitorOpts) (*PreparedWher
|
||||
cond = "true"
|
||||
}
|
||||
|
||||
// In resource filter context, apply robust boolean evaluation only when needed
|
||||
if opts.OnlyResourceFilter {
|
||||
// Check if the condition contains patterns that need boolean simplification
|
||||
// We need boolean evaluation when:
|
||||
// 1. Expression contains " true" (indicating simplified non-resource terms)
|
||||
// 2. Expression is exactly "true"
|
||||
// 3. Expression contains "NOT" with true values that need simplification
|
||||
needsBooleanEval := strings.Contains(cond, " true") ||
|
||||
cond == "true" ||
|
||||
(strings.Contains(cond, "NOT") && strings.Contains(cond, "true"))
|
||||
|
||||
if needsBooleanEval {
|
||||
// Re-parse and evaluate with boolean algebra
|
||||
boolVisitor := newBooleanEvaluatingVisitor(opts)
|
||||
boolResult := boolVisitor.Visit(tree)
|
||||
if boolExpr, ok := boolResult.(BooleanExpression); ok {
|
||||
if boolExpr.IsEmpty {
|
||||
cond = "true" // Empty means include all resources
|
||||
} else {
|
||||
cond = boolExpr.SQL
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
whereClause := sqlbuilder.NewWhereClause().AddWhereExpr(visitor.builder.Args, cond)
|
||||
|
||||
return &PreparedWhereClause{whereClause, visitor.warnings, visitor.mainWarnURL}, nil
|
||||
@@ -226,22 +447,23 @@ func (v *filterExpressionVisitor) VisitExpression(ctx *grammar.ExpressionContext
|
||||
func (v *filterExpressionVisitor) VisitOrExpression(ctx *grammar.OrExpressionContext) any {
|
||||
andExpressions := ctx.AllAndExpression()
|
||||
|
||||
andExpressionConditions := make([]string, len(andExpressions))
|
||||
for i, expr := range andExpressions {
|
||||
validConditions := []string{}
|
||||
|
||||
for _, expr := range andExpressions {
|
||||
if condExpr, ok := v.Visit(expr).(string); ok && condExpr != "" {
|
||||
andExpressionConditions[i] = condExpr
|
||||
validConditions = append(validConditions, condExpr)
|
||||
}
|
||||
}
|
||||
|
||||
if len(andExpressionConditions) == 0 {
|
||||
if len(validConditions) == 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
if len(andExpressionConditions) == 1 {
|
||||
return andExpressionConditions[0]
|
||||
if len(validConditions) == 1 {
|
||||
return validConditions[0]
|
||||
}
|
||||
|
||||
return v.builder.Or(andExpressionConditions...)
|
||||
return v.builder.Or(validConditions...)
|
||||
}
|
||||
|
||||
// VisitAndExpression handles AND expressions
|
||||
@@ -272,6 +494,17 @@ func (v *filterExpressionVisitor) VisitUnaryExpression(ctx *grammar.UnaryExpress
|
||||
|
||||
// Check if this is a NOT expression
|
||||
if ctx.NOT() != nil {
|
||||
// In resource filter context, handle NOT specially
|
||||
if v.onlyResourceFilter {
|
||||
// NOT(true) means NOT(all non-resource terms) which means "include all resources"
|
||||
if result == "true" {
|
||||
return "" // No filtering = include all resources
|
||||
}
|
||||
// NOT(empty) should return empty (no filtering)
|
||||
if result == "" {
|
||||
return ""
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("NOT (%s)", result)
|
||||
}
|
||||
|
||||
@@ -283,7 +516,7 @@ func (v *filterExpressionVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any
|
||||
if ctx.OrExpression() != nil {
|
||||
// This is a parenthesized expression
|
||||
if condExpr, ok := v.Visit(ctx.OrExpression()).(string); ok && condExpr != "" {
|
||||
return fmt.Sprintf("(%s)", v.Visit(ctx.OrExpression()).(string))
|
||||
return fmt.Sprintf("(%s)", condExpr)
|
||||
}
|
||||
return ""
|
||||
} else if ctx.Comparison() != nil {
|
||||
@@ -365,6 +598,22 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
}
|
||||
}
|
||||
|
||||
// this is used to only process resource terms in resource filter context
|
||||
if v.onlyResourceFilter {
|
||||
filteredKeys := []*telemetrytypes.TelemetryFieldKey{}
|
||||
for _, key := range keys {
|
||||
if key.FieldContext == telemetrytypes.FieldContextResource {
|
||||
filteredKeys = append(filteredKeys, key)
|
||||
}
|
||||
}
|
||||
keys = filteredKeys
|
||||
if len(keys) == 0 {
|
||||
// For non-resource terms in resource filter context, return "true"
|
||||
// This ensures OR expressions work correctly (e.g., resource OR non-resource)
|
||||
return "true"
|
||||
}
|
||||
}
|
||||
|
||||
// Handle EXISTS specially
|
||||
if ctx.EXISTS() != nil {
|
||||
op := qbtypes.FilterOperatorExists
|
||||
|
||||
@@ -131,7 +131,6 @@ func NewSQLMigrationProviderFactories(
|
||||
sqlmigration.NewAddFactorIndexesFactory(sqlstore, sqlschema),
|
||||
sqlmigration.NewQueryBuilderV5MigrationFactory(sqlstore, telemetryStore),
|
||||
sqlmigration.NewAddMeterQuickFiltersFactory(sqlstore, sqlschema),
|
||||
sqlmigration.NewUpdateTTLSettingForCustomRetentionFactory(sqlstore, sqlschema),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type updateTTLSettingForCustomRetention struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
sqlschema sqlschema.SQLSchema
|
||||
}
|
||||
|
||||
func NewUpdateTTLSettingForCustomRetentionFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("update_ttl_setting"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) {
|
||||
return newUpdateTTLSettingForCustomRetention(ctx, providerSettings, config, sqlstore, sqlschema)
|
||||
})
|
||||
}
|
||||
|
||||
func newUpdateTTLSettingForCustomRetention(_ context.Context, _ factory.ProviderSettings, _ Config, sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) (SQLMigration, error) {
|
||||
return &updateTTLSettingForCustomRetention{
|
||||
sqlstore: sqlstore,
|
||||
sqlschema: sqlschema,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (migration *updateTTLSettingForCustomRetention) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updateTTLSettingForCustomRetention) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
// Get the table and its constraints
|
||||
table, _, err := migration.sqlschema.GetTable(ctx, sqlschema.TableName("ttl_setting"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Define the new column
|
||||
column := &sqlschema.Column{
|
||||
Name: sqlschema.ColumnName("condition"),
|
||||
DataType: sqlschema.DataTypeText,
|
||||
Nullable: true,
|
||||
}
|
||||
|
||||
sqls := migration.sqlschema.Operator().AddColumn(table, nil, column, nil)
|
||||
for _, sql := range sqls {
|
||||
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updateTTLSettingForCustomRetention) Down(ctx context.Context, db *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
@@ -170,7 +170,7 @@ func (m *fieldMapper) ColumnExpressionFor(
|
||||
correction, found := telemetrytypes.SuggestCorrection(field.Name, maps.Keys(keys))
|
||||
if found {
|
||||
// we found a close match, in the error message send the suggestion
|
||||
return "", errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction)
|
||||
return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction)
|
||||
} else {
|
||||
// not even a close match, return an error
|
||||
return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field `%s` not found", field.Name)
|
||||
|
||||
@@ -142,6 +142,111 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "Time series with NOT predicate containing only non-resource terms",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
Aggregations: []qbtypes.LogAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "NOT (message CONTAINS 'foo' AND hasToken(body, 'bar'))",
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND NOT ((((LOWER(attributes_string['message']) LIKE LOWER(?) AND mapContains(attributes_string, 'message') = ?) AND hasToken(LOWER(body), LOWER(?))))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY ts",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%foo%", true, "bar", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "Time series with NOT OR mixed resource/non-resource terms",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
Aggregations: []qbtypes.LogAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "NOT (service.name = 'redis-manual' OR http.method = 'GET')",
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND NOT ((((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY ts",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "redis-manual", true, "GET", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "Time series with complex NOT expression and nested OR conditions",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
Aggregations: []qbtypes.LogAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name IN 'redis' AND request.type = 'External' AND http.status_code < 500 AND http.status_code >= 400 AND NOT ((http.request.header.tenant_id = '[\"tenant-1\"]' AND http.status_code = 401) OR (http.request.header.tenant_id = '[\"tenant-2\"]' AND http.status_code = 404 AND http.route = '/tenants/{tenant_id}'))",
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE ((simpleJSONExtractString(labels, 'service.name') = ?) AND labels LIKE ? AND (labels LIKE ?)) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (((resources_string['service.name'] = ?) AND mapContains(resources_string, 'service.name') = ?) AND (attributes_string['request.type'] = ? AND mapContains(attributes_string, 'request.type') = ?) AND (toFloat64(attributes_number['http.status_code']) < ? AND mapContains(attributes_number, 'http.status_code') = ?) AND (toFloat64(attributes_number['http.status_code']) >= ? AND mapContains(attributes_number, 'http.status_code') = ?) AND NOT ((((((attributes_string['http.request.header.tenant_id'] = ? AND mapContains(attributes_string, 'http.request.header.tenant_id') = ?) AND (toFloat64(attributes_number['http.status_code']) = ? AND mapContains(attributes_number, 'http.status_code') = ?))) OR (((attributes_string['http.request.header.tenant_id'] = ? AND mapContains(attributes_string, 'http.request.header.tenant_id') = ?) AND (toFloat64(attributes_number['http.status_code']) = ? AND mapContains(attributes_number, 'http.status_code') = ?) AND (attributes_string['http.route'] = ? AND mapContains(attributes_string, 'http.route') = ?))))))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY ts",
|
||||
Args: []any{"redis", "%service.name%", "%service.name\":\"redis%", uint64(1747945619), uint64(1747983448), "redis", true, "External", true, float64(500), true, float64(400), true, "[\"tenant-1\"]", true, float64(401), true, "[\"tenant-2\"]", true, float64(404), true, "/tenants/{tenant_id}", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "Time series with complex OR expression containing NOT with nested conditions",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
Aggregations: []qbtypes.LogAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "(service.name IN 'redis' AND request.type = 'External' AND http.status_code < 500 AND http.status_code >= 400 OR NOT ((http.request.header.tenant_id = '[\"tenant-1\"]' AND http.status_code = 401) OR (http.request.header.tenant_id = '[\"tenant-2\"]' AND http.status_code = 404 AND http.route = '/tenants/{tenant_id}')))",
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (((((resources_string['service.name'] = ?) AND mapContains(resources_string, 'service.name') = ?) AND (attributes_string['request.type'] = ? AND mapContains(attributes_string, 'request.type') = ?) AND (toFloat64(attributes_number['http.status_code']) < ? AND mapContains(attributes_number, 'http.status_code') = ?) AND (toFloat64(attributes_number['http.status_code']) >= ? AND mapContains(attributes_number, 'http.status_code') = ?)) OR NOT ((((((attributes_string['http.request.header.tenant_id'] = ? AND mapContains(attributes_string, 'http.request.header.tenant_id') = ?) AND (toFloat64(attributes_number['http.status_code']) = ? AND mapContains(attributes_number, 'http.status_code') = ?))) OR (((attributes_string['http.request.header.tenant_id'] = ? AND mapContains(attributes_string, 'http.request.header.tenant_id') = ?) AND (toFloat64(attributes_number['http.status_code']) = ? AND mapContains(attributes_number, 'http.status_code') = ?) AND (attributes_string['http.route'] = ? AND mapContains(attributes_string, 'http.route') = ?)))))))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY ts",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "redis", true, "External", true, float64(500), true, float64(400), true, "[\"tenant-1\"]", true, float64(401), true, "[\"tenant-2\"]", true, float64(404), true, "/tenants/{tenant_id}", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "Time series with OR between multiple resource conditions",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
Aggregations: []qbtypes.LogAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'redis' OR service.name = 'driver'",
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE ((simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) OR (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?)) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) OR (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY ts",
|
||||
Args: []any{"redis", "%service.name%", "%service.name\":\"redis%", "driver", "%service.name%", "%service.name\":\"driver%", uint64(1747945619), uint64(1747983448), "redis", true, "driver", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
|
||||
@@ -862,6 +862,27 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
|
||||
Materialized: true,
|
||||
},
|
||||
},
|
||||
"request.type": {
|
||||
{
|
||||
Name: "request.type",
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
"http.request.header.tenant_id": {
|
||||
{
|
||||
Name: "http.request.header.tenant_id",
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
"http.route": {
|
||||
{
|
||||
Name: "http.route",
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, keys := range keysMap {
|
||||
|
||||
@@ -93,7 +93,7 @@ func (m *fieldMapper) ColumnExpressionFor(
|
||||
correction, found := telemetrytypes.SuggestCorrection(field.Name, maps.Keys(keys))
|
||||
if found {
|
||||
// we found a close match, in the error message send the suggestion
|
||||
return "", errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction)
|
||||
return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction)
|
||||
} else {
|
||||
// not even a close match, return an error
|
||||
return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field `%s` not found", field.Name)
|
||||
|
||||
@@ -113,12 +113,12 @@ func (t *telemetryMetaStore) tracesTblStatementToFieldKeys(ctx context.Context)
|
||||
statements := []telemetrytypes.ShowCreateTableStatement{}
|
||||
err := t.telemetrystore.ClickhouseDB().Select(ctx, &statements, query)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTblStatement.Error())
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTblStatement.Error())
|
||||
}
|
||||
|
||||
materialisedKeys, err := ExtractFieldKeysFromTblStatement(statements[0].Statement)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTracesKeys.Error())
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTracesKeys.Error())
|
||||
}
|
||||
|
||||
for idx := range materialisedKeys {
|
||||
@@ -220,7 +220,7 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector
|
||||
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTracesKeys.Error())
|
||||
return nil, false, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTracesKeys.Error())
|
||||
}
|
||||
defer rows.Close()
|
||||
keys := []*telemetrytypes.TelemetryFieldKey{}
|
||||
@@ -238,7 +238,7 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector
|
||||
var priority uint8
|
||||
err = rows.Scan(&name, &fieldContext, &fieldDataType, &priority)
|
||||
if err != nil {
|
||||
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTracesKeys.Error())
|
||||
return nil, false, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTracesKeys.Error())
|
||||
}
|
||||
key, ok := mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()]
|
||||
|
||||
@@ -257,7 +257,7 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector
|
||||
}
|
||||
|
||||
if rows.Err() != nil {
|
||||
return nil, false, errors.Wrap(rows.Err(), errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTracesKeys.Error())
|
||||
return nil, false, errors.Wrapf(rows.Err(), errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTracesKeys.Error())
|
||||
}
|
||||
|
||||
// hit the limit? (only counting DB results)
|
||||
@@ -326,12 +326,12 @@ func (t *telemetryMetaStore) logsTblStatementToFieldKeys(ctx context.Context) ([
|
||||
statements := []telemetrytypes.ShowCreateTableStatement{}
|
||||
err := t.telemetrystore.ClickhouseDB().Select(ctx, &statements, query)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTblStatement.Error())
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTblStatement.Error())
|
||||
}
|
||||
|
||||
materialisedKeys, err := ExtractFieldKeysFromTblStatement(statements[0].Statement)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||
}
|
||||
|
||||
for idx := range materialisedKeys {
|
||||
@@ -479,7 +479,7 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
||||
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, mainQuery, allArgs...)
|
||||
if err != nil {
|
||||
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||
return nil, false, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
@@ -509,7 +509,7 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
||||
var priority uint8
|
||||
err = rows.Scan(&name, &fieldContext, &fieldDataType, &priority)
|
||||
if err != nil {
|
||||
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||
return nil, false, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||
}
|
||||
key, ok := mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()]
|
||||
|
||||
@@ -528,7 +528,7 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
||||
}
|
||||
|
||||
if rows.Err() != nil {
|
||||
return nil, false, errors.Wrap(rows.Err(), errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||
return nil, false, errors.Wrapf(rows.Err(), errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||
}
|
||||
|
||||
// hit the limit? (only counting DB results)
|
||||
@@ -654,7 +654,7 @@ func (t *telemetryMetaStore) getMetricsKeys(ctx context.Context, fieldKeySelecto
|
||||
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMetricsKeys.Error())
|
||||
return nil, false, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMetricsKeys.Error())
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
@@ -673,7 +673,7 @@ func (t *telemetryMetaStore) getMetricsKeys(ctx context.Context, fieldKeySelecto
|
||||
var priority uint8
|
||||
err = rows.Scan(&name, &fieldContext, &fieldDataType, &priority)
|
||||
if err != nil {
|
||||
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMetricsKeys.Error())
|
||||
return nil, false, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMetricsKeys.Error())
|
||||
}
|
||||
keys = append(keys, &telemetrytypes.TelemetryFieldKey{
|
||||
Name: name,
|
||||
@@ -684,7 +684,7 @@ func (t *telemetryMetaStore) getMetricsKeys(ctx context.Context, fieldKeySelecto
|
||||
}
|
||||
|
||||
if rows.Err() != nil {
|
||||
return nil, false, errors.Wrap(rows.Err(), errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMetricsKeys.Error())
|
||||
return nil, false, errors.Wrapf(rows.Err(), errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMetricsKeys.Error())
|
||||
}
|
||||
|
||||
// hit the limit?
|
||||
@@ -728,7 +728,7 @@ func (t *telemetryMetaStore) getMeterSourceMetricKeys(ctx context.Context, field
|
||||
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMeterKeys.Error())
|
||||
return nil, false, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMeterKeys.Error())
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
@@ -744,7 +744,7 @@ func (t *telemetryMetaStore) getMeterSourceMetricKeys(ctx context.Context, field
|
||||
var name string
|
||||
err = rows.Scan(&name)
|
||||
if err != nil {
|
||||
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMeterKeys.Error())
|
||||
return nil, false, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMeterKeys.Error())
|
||||
}
|
||||
keys = append(keys, &telemetrytypes.TelemetryFieldKey{
|
||||
Name: name,
|
||||
@@ -753,7 +753,7 @@ func (t *telemetryMetaStore) getMeterSourceMetricKeys(ctx context.Context, field
|
||||
}
|
||||
|
||||
if rows.Err() != nil {
|
||||
return nil, false, errors.Wrap(rows.Err(), errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMeterKeys.Error())
|
||||
return nil, false, errors.Wrapf(rows.Err(), errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMeterKeys.Error())
|
||||
}
|
||||
|
||||
// hit the limit?
|
||||
@@ -1081,7 +1081,7 @@ func (t *telemetryMetaStore) getSpanFieldValues(ctx context.Context, fieldValueS
|
||||
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||
return nil, false, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
@@ -1096,7 +1096,7 @@ func (t *telemetryMetaStore) getSpanFieldValues(ctx context.Context, fieldValueS
|
||||
var stringValue string
|
||||
var numberValue float64
|
||||
if err := rows.Scan(&stringValue, &numberValue); err != nil {
|
||||
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||
return nil, false, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||
}
|
||||
|
||||
// Only add values if we haven't hit the limit yet
|
||||
@@ -1163,7 +1163,7 @@ func (t *telemetryMetaStore) getLogFieldValues(ctx context.Context, fieldValueSe
|
||||
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||
return nil, false, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
@@ -1178,7 +1178,7 @@ func (t *telemetryMetaStore) getLogFieldValues(ctx context.Context, fieldValueSe
|
||||
var stringValue string
|
||||
var numberValue float64
|
||||
if err := rows.Scan(&stringValue, &numberValue); err != nil {
|
||||
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||
return nil, false, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
||||
}
|
||||
|
||||
// Only add values if we haven't hit the limit yet
|
||||
@@ -1251,7 +1251,7 @@ func (t *telemetryMetaStore) getMetricFieldValues(ctx context.Context, fieldValu
|
||||
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMetricsKeys.Error())
|
||||
return nil, false, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMetricsKeys.Error())
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
@@ -1266,7 +1266,7 @@ func (t *telemetryMetaStore) getMetricFieldValues(ctx context.Context, fieldValu
|
||||
|
||||
var stringValue string
|
||||
if err := rows.Scan(&stringValue); err != nil {
|
||||
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMetricsKeys.Error())
|
||||
return nil, false, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMetricsKeys.Error())
|
||||
}
|
||||
values.StringValues = append(values.StringValues, stringValue)
|
||||
}
|
||||
@@ -1305,7 +1305,7 @@ func (t *telemetryMetaStore) getMeterSourceMetricFieldValues(ctx context.Context
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMeterValues.Error())
|
||||
return nil, false, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMeterValues.Error())
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
@@ -1320,7 +1320,7 @@ func (t *telemetryMetaStore) getMeterSourceMetricFieldValues(ctx context.Context
|
||||
|
||||
var attribute []string
|
||||
if err := rows.Scan(&attribute); err != nil {
|
||||
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMeterValues.Error())
|
||||
return nil, false, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMeterValues.Error())
|
||||
}
|
||||
if len(attribute) > 1 {
|
||||
values.StringValues = append(values.StringValues, attribute[1])
|
||||
|
||||
@@ -305,7 +305,7 @@ func (m *defaultFieldMapper) ColumnExpressionFor(
|
||||
correction, found := telemetrytypes.SuggestCorrection(field.Name, maps.Keys(keys))
|
||||
if found {
|
||||
// we found a close match, in the error message send the suggestion
|
||||
return "", errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction)
|
||||
return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "%s", correction)
|
||||
} else {
|
||||
// not even a close match, return an error
|
||||
return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field `%s` not found", field.Name)
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
package ctxtypes
|
||||
|
||||
import "context"
|
||||
|
||||
type authTypeKey struct{}
|
||||
|
||||
// SetAuthType stores the auth type (e.g., AuthTypeJWT, AuthTypeAPIKey, AuthTypeInternal) in context.
|
||||
func SetAuthType(ctx context.Context, authType AuthType) context.Context {
|
||||
return context.WithValue(ctx, authTypeKey{}, authType)
|
||||
}
|
||||
|
||||
// AuthTypeFromContext retrieves the auth type from context if set.
|
||||
func AuthTypeFromContext(ctx context.Context) (AuthType, bool) {
|
||||
v, ok := ctx.Value(authTypeKey{}).(AuthType)
|
||||
return v, ok
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
package ctxtypes
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
type AuthType struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
AuthTypeJWT = AuthType{valuer.NewString("jwt")}
|
||||
AuthTypeAPIKey = AuthType{valuer.NewString("api_key")}
|
||||
AuthTypeInternal = AuthType{valuer.NewString("internal")}
|
||||
)
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
@@ -231,7 +232,7 @@ func (storableDashboardData *StorableDashboardData) GetWidgetIds() []string {
|
||||
return widgetIds
|
||||
}
|
||||
|
||||
func (dashboard *Dashboard) CanUpdate(ctx context.Context, data StorableDashboardData, diff int) error {
|
||||
func (dashboard *Dashboard) CanUpdate(data StorableDashboardData) error {
|
||||
if dashboard.Locked {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot update a locked dashboard, please unlock the dashboard to update")
|
||||
}
|
||||
@@ -251,17 +252,15 @@ func (dashboard *Dashboard) CanUpdate(ctx context.Context, data StorableDashboar
|
||||
differenceMap[id] = true
|
||||
}
|
||||
}
|
||||
|
||||
// Allow multiple decisions only if diff == 0
|
||||
if diff > 0 && len(difference) > diff {
|
||||
if len(difference) > 1 {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "deleting more than one panel is not supported")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dashboard *Dashboard) Update(ctx context.Context, updatableDashboard UpdatableDashboard, updatedBy string, diff int) error {
|
||||
err := dashboard.CanUpdate(ctx, updatableDashboard, diff)
|
||||
func (dashboard *Dashboard) Update(updatableDashboard UpdatableDashboard, updatedBy string) error {
|
||||
err := dashboard.CanUpdate(updatableDashboard)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -271,15 +270,20 @@ func (dashboard *Dashboard) Update(ctx context.Context, updatableDashboard Updat
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dashboard *Dashboard) CanLockUnlock(role types.Role, updatedBy string) error {
|
||||
if dashboard.CreatedBy != updatedBy && role != types.RoleAdmin {
|
||||
func (dashboard *Dashboard) CanLockUnlock(ctx context.Context, updatedBy string) error {
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if dashboard.CreatedBy != updatedBy && claims.Role != types.RoleAdmin {
|
||||
return errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "you are not authorized to lock/unlock this dashboard")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dashboard *Dashboard) LockUnlock(lock bool, role types.Role, updatedBy string) error {
|
||||
err := dashboard.CanLockUnlock(role, updatedBy)
|
||||
func (dashboard *Dashboard) LockUnlock(ctx context.Context, lock bool, updatedBy string) error {
|
||||
err := dashboard.CanLockUnlock(ctx, updatedBy)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -1,83 +0,0 @@
|
||||
package dashboardtypes
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func makeTestWidgets(ids ...string) []interface{} {
|
||||
widgets := []interface{}{}
|
||||
for _, id := range ids {
|
||||
widgets = append(widgets, map[string]interface{}{
|
||||
"id": id,
|
||||
"query": map[string]interface{}{},
|
||||
})
|
||||
}
|
||||
return widgets
|
||||
}
|
||||
|
||||
func TestCanUpdate_MultipleDeletions_ByDiff(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
diff int
|
||||
updated []string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "diff-0-allows-multi-delete",
|
||||
diff: 0,
|
||||
updated: []string{"a"}, // deleting 2 widgets (b, c)
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "diff-1-blocks-multi-delete",
|
||||
diff: 1,
|
||||
updated: []string{"a"}, // deleting 2 widgets (b, c) > diff(1)
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "diff-1-allows-single-delete",
|
||||
diff: 1,
|
||||
updated: []string{"a", "b"}, // deleting 1 widget (c) = diff(1)
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "diff-2-allows-two-deletions",
|
||||
diff: 2,
|
||||
updated: []string{"a"}, // deleting 2 widgets (b, c) = diff(2)
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "diff-1-blocks-three-deletions",
|
||||
diff: 1,
|
||||
updated: []string{}, // deleting all 3 widgets > diff(1)
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
orgID := valuer.GenerateUUID()
|
||||
initial := StorableDashboardData{
|
||||
"widgets": makeTestWidgets("a", "b", "c"),
|
||||
}
|
||||
d, err := NewDashboard(orgID, "tester", initial)
|
||||
assert.NoError(t, err)
|
||||
|
||||
updated := StorableDashboardData{
|
||||
"widgets": makeTestWidgets(tc.updated...),
|
||||
}
|
||||
err = d.CanUpdate(ctx, updated, tc.diff)
|
||||
if tc.wantErr {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -68,7 +68,6 @@ type TTLSetting struct {
|
||||
ColdStorageTTL int `bun:"cold_storage_ttl,notnull,default:0"`
|
||||
Status string `bun:"status,type:text,notnull"`
|
||||
OrgID string `json:"-" bun:"org_id,notnull"`
|
||||
Condition string `bun:"condition,type:text"`
|
||||
}
|
||||
|
||||
type OrganizationStore interface {
|
||||
|
||||
@@ -403,37 +403,3 @@ type FormatOptions struct {
|
||||
FillGaps bool `json:"fillGaps,omitempty"`
|
||||
FormatTableResultForUI bool `json:"formatTableResultForUI,omitempty"`
|
||||
}
|
||||
|
||||
func (r *QueryRangeRequest) GetQueriesSupportingZeroDefault() map[string]bool {
|
||||
canDefaultZeroAgg := func(expr string) bool {
|
||||
expr = strings.ToLower(expr)
|
||||
// only pure additive/counting operations should default to zero,
|
||||
// while statistical/analytical operations should show gaps when there's no data to analyze.
|
||||
// TODO: use newExprVisitor for getting the function used in the expression
|
||||
if strings.HasPrefix(expr, "count(") ||
|
||||
strings.HasPrefix(expr, "count_distinct(") ||
|
||||
strings.HasPrefix(expr, "sum(") ||
|
||||
strings.HasPrefix(expr, "rate(") {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
|
||||
}
|
||||
|
||||
canDefaultZero := make(map[string]bool)
|
||||
for _, q := range r.CompositeQuery.Queries {
|
||||
if q.Type == QueryTypeBuilder {
|
||||
if query, ok := q.Spec.(QueryBuilderQuery[TraceAggregation]); ok {
|
||||
if len(query.Aggregations) == 1 && canDefaultZeroAgg(query.Aggregations[0].Expression) {
|
||||
canDefaultZero[query.Name] = true
|
||||
}
|
||||
} else if query, ok := q.Spec.(QueryBuilderQuery[LogAggregation]); ok {
|
||||
if len(query.Aggregations) == 1 && canDefaultZeroAgg(query.Aggregations[0].Expression) {
|
||||
canDefaultZero[query.Name] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return canDefaultZero
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ package querybuildertypesv5
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
@@ -1530,114 +1529,3 @@ func TestValidateUniqueTraceOperator(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestQueryRangeRequest_GetQueriesSupportingZeroDefault(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
CompositeQuery CompositeQuery
|
||||
want map[string]bool
|
||||
}{
|
||||
{
|
||||
name: "test count on traces - support zeroDefault",
|
||||
CompositeQuery: CompositeQuery{
|
||||
Queries: []QueryEnvelope{
|
||||
{
|
||||
Type: QueryTypeBuilder,
|
||||
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &Filter{
|
||||
Expression: "service.name = demo",
|
||||
},
|
||||
Aggregations: []TraceAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: QueryTypeBuilder,
|
||||
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||
Name: "B",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Aggregations: []TraceAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: QueryTypeFormula,
|
||||
Spec: QueryBuilderFormula{
|
||||
Expression: "A / B * 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
want: map[string]bool{
|
||||
"A": true,
|
||||
"B": true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "test rate on logs - support zeroDefault",
|
||||
CompositeQuery: CompositeQuery{
|
||||
Queries: []QueryEnvelope{
|
||||
{
|
||||
Type: QueryTypeBuilder,
|
||||
Spec: QueryBuilderQuery[LogAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &Filter{
|
||||
Expression: "service.name = demo",
|
||||
},
|
||||
Aggregations: []LogAggregation{
|
||||
{
|
||||
Expression: "rate()",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
want: map[string]bool{
|
||||
"A": true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "test min on logs - doesn't support zeroDefault",
|
||||
CompositeQuery: CompositeQuery{
|
||||
Queries: []QueryEnvelope{
|
||||
{
|
||||
Type: QueryTypeBuilder,
|
||||
Spec: QueryBuilderQuery[LogAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &Filter{
|
||||
Expression: "service.name = demo",
|
||||
},
|
||||
Aggregations: []LogAggregation{
|
||||
{
|
||||
Expression: "min(duration)",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
want: map[string]bool{},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
r := &QueryRangeRequest{
|
||||
CompositeQuery: tt.CompositeQuery,
|
||||
}
|
||||
if got := r.GetQueriesSupportingZeroDefault(); !reflect.DeepEqual(got, tt.want) {
|
||||
t.Errorf("QueryRangeRequest.GetQueriesSupportingZeroDefault() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
29
pkg/types/thirdpartyapitypes/thirdpartyapi.go
Normal file
29
pkg/types/thirdpartyapitypes/thirdpartyapi.go
Normal file
@@ -0,0 +1,29 @@
|
||||
package thirdpartyapitypes
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
type ThirdPartyApiRequest struct {
|
||||
Start uint64 `json:"start"`
|
||||
End uint64 `json:"end"`
|
||||
ShowIp bool `json:"show_ip,omitempty"`
|
||||
Domain string `json:"domain,omitempty"`
|
||||
Endpoint string `json:"endpoint,omitempty"`
|
||||
Filter *qbtypes.Filter `json:"filters,omitempty"`
|
||||
GroupBy []qbtypes.GroupByKey `json:"groupBy,omitempty"`
|
||||
}
|
||||
|
||||
// Validate validates the ThirdPartyApiRequest
|
||||
func (req *ThirdPartyApiRequest) Validate() error {
|
||||
if req.Start >= req.End {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "start time must be before end time")
|
||||
}
|
||||
|
||||
if req.Filter != nil && req.Filter.Expression == "" {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "filter expression cannot be empty when filter is provided")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
Reference in New Issue
Block a user