Compare commits

...

11 Commits

Author SHA1 Message Date
ahmadshaheer
8c0c922faa refactor: refactor queryPayload generation in logs pagination custom hook 2025-02-24 10:51:44 +04:30
ahmadshaheer
6aec0d5c9b refactor: handle the updated pagination logic in k8s entity events 2025-02-24 10:45:10 +04:30
ahmadshaheer
8c994ee751 chore: remove unused function and prop 2025-02-23 16:08:01 +04:30
ahmadshaheer
a0a7b82e55 chore: revert pagination handling changes for logs context view 2025-02-23 11:53:06 +04:30
ahmadshaheer
ce49c774f1 fix: optimize context log rendering and prevent duplicate logs 2025-02-21 12:27:09 +04:30
ahmadshaheer
65d0041672 fix: handle resetting offset on changing page size 2025-02-21 11:33:10 +04:30
ahmadshaheer
31d59cc3c6 refactor: logs panel component pagination changes 2025-02-20 21:12:17 +04:30
ahmadshaheer
81c8ba1978 refactor: handle pagination changes in k8s logs and host logs 2025-02-20 18:01:38 +04:30
ahmadshaheer
14a0a372c2 refactor: pagination changes in query range custom hook 2025-02-20 18:01:34 +04:30
Raj Kamal Singh
e33a0fdd47 Feat/cloud integrations connection params api key (#6997)
* feat: get started on PAT provisioning for AWS integration

* chore: include cloud integration PAT in connection params

* chore: some cleanup

---------

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-02-01 20:12:56 +05:30
Srikanth Chekuri
c8032f771e chore: add k8s metrics receiving status (#6977) 2025-01-31 15:07:04 +00:00
19 changed files with 798 additions and 493 deletions

View File

@@ -10,8 +10,13 @@ import (
"strings"
"time"
"github.com/google/uuid"
"github.com/gorilla/mux"
"go.signoz.io/signoz/ee/query-service/constants"
"go.signoz.io/signoz/ee/query-service/model"
"go.signoz.io/signoz/pkg/query-service/auth"
baseconstants "go.signoz.io/signoz/pkg/query-service/constants"
"go.signoz.io/signoz/pkg/query-service/dao"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
"go.uber.org/zap"
)
@@ -20,6 +25,7 @@ type CloudIntegrationConnectionParamsResponse struct {
IngestionUrl string `json:"ingestion_url,omitempty"`
IngestionKey string `json:"ingestion_key,omitempty"`
SigNozAPIUrl string `json:"signoz_api_url,omitempty"`
SigNozAPIKey string `json:"signoz_api_key,omitempty"`
}
func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseWriter, r *http.Request) {
@@ -31,44 +37,64 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
return
}
license, err := ah.LM().GetRepo().GetActiveLicense(r.Context())
currentUser, err := auth.GetUserFromRequest(r)
if err != nil {
RespondError(w, basemodel.InternalError(fmt.Errorf(
"couldn't look for active license: %w", err,
RespondError(w, basemodel.UnauthorizedError(fmt.Errorf(
"couldn't deduce current user: %w", err,
)), nil)
return
}
if license == nil {
RespondError(w, basemodel.ForbiddenError(fmt.Errorf(
"no active license found",
)), nil)
return
}
ingestionUrl, signozApiUrl, err := getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
if err != nil {
RespondError(w, basemodel.InternalError(fmt.Errorf(
"couldn't deduce ingestion url and signoz api url: %w", err,
)), nil)
apiKey, apiErr := ah.getOrCreateCloudIntegrationPAT(r.Context(), currentUser.OrgId, cloudProvider)
if apiErr != nil {
RespondError(w, basemodel.WrapApiError(
apiErr, "couldn't provision PAT for cloud integration:",
), nil)
return
}
result := CloudIntegrationConnectionParamsResponse{
IngestionUrl: ingestionUrl,
SigNozAPIUrl: signozApiUrl,
SigNozAPIKey: apiKey,
}
license, apiErr := ah.LM().GetRepo().GetActiveLicense(r.Context())
if apiErr != nil {
RespondError(w, basemodel.WrapApiError(
apiErr, "couldn't look for active license",
), nil)
return
}
if license == nil {
// Return the API Key (PAT) even if the rest of the params can not be deduced.
// Params not returned from here will be requested from the user via form inputs.
// This enables gracefully degraded but working experience even for non-cloud deployments.
zap.L().Info("ingestion params and signoz api url can not be deduced since no license was found")
ah.Respond(w, result)
return
}
ingestionUrl, signozApiUrl, apiErr := getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
if apiErr != nil {
RespondError(w, basemodel.WrapApiError(
apiErr, "couldn't deduce ingestion url and signoz api url",
), nil)
return
}
result.IngestionUrl = ingestionUrl
result.SigNozAPIUrl = signozApiUrl
gatewayUrl := ah.opts.GatewayUrl
if len(gatewayUrl) > 0 {
ingestionKey, err := getOrCreateCloudProviderIngestionKey(
ingestionKey, apiErr := getOrCreateCloudProviderIngestionKey(
r.Context(), gatewayUrl, license.Key, cloudProvider,
)
if err != nil {
RespondError(w, basemodel.InternalError(fmt.Errorf(
"couldn't get or create ingestion key: %w", err,
)), nil)
if apiErr != nil {
RespondError(w, basemodel.WrapApiError(
apiErr, "couldn't get or create ingestion key",
), nil)
return
}
@@ -81,6 +107,100 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
ah.Respond(w, result)
}
func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId string, cloudProvider string) (
string, *basemodel.ApiError,
) {
integrationPATName := fmt.Sprintf("%s integration", cloudProvider)
integrationUser, apiErr := ah.getOrCreateCloudIntegrationUser(ctx, orgId, cloudProvider)
if apiErr != nil {
return "", apiErr
}
allPats, err := ah.AppDao().ListPATs(ctx)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't list PATs: %w", err.Error(),
))
}
for _, p := range allPats {
if p.UserID == integrationUser.Id && p.Name == integrationPATName {
return p.Token, nil
}
}
zap.L().Info(
"no PAT found for cloud integration, creating a new one",
zap.String("cloudProvider", cloudProvider),
)
newPAT := model.PAT{
Token: generatePATToken(),
UserID: integrationUser.Id,
Name: integrationPATName,
Role: baseconstants.ViewerGroup,
ExpiresAt: 0,
CreatedAt: time.Now().Unix(),
UpdatedAt: time.Now().Unix(),
}
integrationPAT, err := ah.AppDao().CreatePAT(ctx, newPAT)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't create cloud integration PAT: %w", err.Error(),
))
}
return integrationPAT.Token, nil
}
func (ah *APIHandler) getOrCreateCloudIntegrationUser(
ctx context.Context, orgId string, cloudProvider string,
) (*basemodel.User, *basemodel.ApiError) {
cloudIntegrationUserId := fmt.Sprintf("%s-integration", cloudProvider)
integrationUserResult, apiErr := ah.AppDao().GetUser(ctx, cloudIntegrationUserId)
if apiErr != nil {
return nil, basemodel.WrapApiError(apiErr, "couldn't look for integration user")
}
if integrationUserResult != nil {
return &integrationUserResult.User, nil
}
zap.L().Info(
"cloud integration user not found. Attempting to create the user",
zap.String("cloudProvider", cloudProvider),
)
newUser := &basemodel.User{
Id: cloudIntegrationUserId,
Name: fmt.Sprintf("%s integration", cloudProvider),
Email: fmt.Sprintf("%s@signoz.io", cloudIntegrationUserId),
CreatedAt: time.Now().Unix(),
OrgId: orgId,
}
viewerGroup, apiErr := dao.DB().GetGroupByName(ctx, baseconstants.ViewerGroup)
if apiErr != nil {
return nil, basemodel.WrapApiError(apiErr, "couldn't get viewer group for creating integration user")
}
newUser.GroupId = viewerGroup.Id
passwordHash, err := auth.PasswordHash(uuid.NewString())
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf(
"couldn't hash random password for cloud integration user: %w", err,
))
}
newUser.Password = passwordHash
integrationUser, apiErr := ah.AppDao().CreateUser(ctx, newUser, false)
if apiErr != nil {
return nil, basemodel.WrapApiError(apiErr, "couldn't create cloud integration user")
}
return integrationUser, nil
}
func getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
string, string, *basemodel.ApiError,
) {

View File

@@ -8,19 +8,13 @@ import { DEFAULT_ENTITY_VERSION } from 'constants/app';
import LogsError from 'container/LogsError/LogsError';
import { LogsLoading } from 'container/LogsLoading/LogsLoading';
import { FontSize } from 'container/OptionsMenu/types';
import { ORDERBY_FILTERS } from 'container/QueryBuilder/filters/OrderByFilter/config';
import { useHandleLogsPagination } from 'hooks/infraMonitoring/useHandleLogsPagination';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { isEqual } from 'lodash-es';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useCallback, useEffect, useMemo } from 'react';
import { useQuery } from 'react-query';
import { Virtuoso } from 'react-virtuoso';
import { ILog } from 'types/api/logs/log';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import {
IBuilderQuery,
TagFilterItem,
} from 'types/api/queryBuilder/queryBuilderData';
import { v4 } from 'uuid';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { getHostLogsQueryPayload } from './constants';
import NoLogsContainer from './NoLogsContainer';
@@ -30,51 +24,30 @@ interface Props {
startTime: number;
endTime: number;
};
handleChangeLogFilters: (filters: IBuilderQuery['filters']) => void;
filters: IBuilderQuery['filters'];
}
function HostMetricsLogs({
timeRange,
handleChangeLogFilters,
filters,
}: Props): JSX.Element {
const [logs, setLogs] = useState<ILog[]>([]);
const [hasReachedEndOfLogs, setHasReachedEndOfLogs] = useState(false);
const [restFilters, setRestFilters] = useState<TagFilterItem[]>([]);
const [resetLogsList, setResetLogsList] = useState<boolean>(false);
useEffect(() => {
const newRestFilters = filters.items.filter(
(item) => item.key?.key !== 'id' && item.key?.key !== 'host.name',
);
const areFiltersSame = isEqual(restFilters, newRestFilters);
if (!areFiltersSame) {
setResetLogsList(true);
}
setRestFilters(newRestFilters);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [filters]);
const queryPayload = useMemo(() => {
const basePayload = getHostLogsQueryPayload(
timeRange.startTime,
timeRange.endTime,
filters,
);
basePayload.query.builder.queryData[0].pageSize = 100;
basePayload.query.builder.queryData[0].orderBy = [
{ columnName: 'timestamp', order: ORDERBY_FILTERS.DESC },
];
return basePayload;
}, [timeRange.startTime, timeRange.endTime, filters]);
const [isPaginating, setIsPaginating] = useState(false);
function HostMetricsLogs({ timeRange, filters }: Props): JSX.Element {
const basePayload = getHostLogsQueryPayload(
timeRange.startTime,
timeRange.endTime,
filters,
);
const {
logs,
hasReachedEndOfLogs,
isPaginating,
currentPage,
setIsPaginating,
handleNewData,
loadMoreLogs,
queryPayload,
} = useHandleLogsPagination({
timeRange,
filters,
excludeFilterKeys: ['host.name'],
basePayload,
});
const { data, isLoading, isFetching, isError } = useQuery({
queryKey: [
@@ -82,6 +55,7 @@ function HostMetricsLogs({
timeRange.startTime,
timeRange.endTime,
filters,
currentPage,
],
queryFn: () => GetMetricQueryRange(queryPayload, DEFAULT_ENTITY_VERSION),
enabled: !!queryPayload,
@@ -90,33 +64,13 @@ function HostMetricsLogs({
useEffect(() => {
if (data?.payload?.data?.newResult?.data?.result) {
const currentData = data.payload.data.newResult.data.result;
if (resetLogsList) {
const currentLogs: ILog[] =
currentData[0].list?.map((item) => ({
...item.data,
timestamp: item.timestamp,
})) || [];
setLogs(currentLogs);
setResetLogsList(false);
}
if (currentData.length > 0 && currentData[0].list) {
const currentLogs: ILog[] =
currentData[0].list.map((item) => ({
...item.data,
timestamp: item.timestamp,
})) || [];
setLogs((prev) => [...prev, ...currentLogs]);
} else {
setHasReachedEndOfLogs(true);
}
handleNewData(data.payload.data.newResult.data.result);
}
}, [data, restFilters, isPaginating, resetLogsList]);
}, [data, handleNewData]);
useEffect(() => {
setIsPaginating(false);
}, [data, setIsPaginating]);
const getItemContent = useCallback(
(_: number, logToRender: ILog): JSX.Element => (
@@ -144,39 +98,6 @@ function HostMetricsLogs({
[],
);
const loadMoreLogs = useCallback(() => {
if (!logs.length) return;
setIsPaginating(true);
const lastLog = logs[logs.length - 1];
const newItems = [
...filters.items.filter((item) => item.key?.key !== 'id'),
{
id: v4(),
key: {
key: 'id',
type: '',
dataType: DataTypes.String,
isColumn: true,
},
op: '<',
value: lastLog.id,
},
];
const newFilters = {
op: 'AND',
items: newItems,
} as IBuilderQuery['filters'];
handleChangeLogFilters(newFilters);
}, [logs, filters, handleChangeLogFilters]);
useEffect(() => {
setIsPaginating(false);
}, [data]);
const renderFooter = useCallback(
(): JSX.Element | null => (
// eslint-disable-next-line react/jsx-no-useless-fragment

View File

@@ -7,6 +7,7 @@ import { DEFAULT_ENTITY_VERSION } from 'constants/app';
import { EventContents } from 'container/InfraMonitoringK8s/commonUtils';
import { K8sCategory } from 'container/InfraMonitoringK8s/constants';
import LoadingContainer from 'container/InfraMonitoringK8s/LoadingContainer';
import { INITIAL_PAGE_SIZE } from 'container/LogsContextList/configs';
import LogsError from 'container/LogsError/LogsError';
import { ORDERBY_FILTERS } from 'container/QueryBuilder/filters/OrderByFilter/config';
import QueryBuilderSearch from 'container/QueryBuilder/filters/QueryBuilderSearch';
@@ -21,10 +22,8 @@ import { isArray } from 'lodash-es';
import { ChevronDown, ChevronLeft, ChevronRight, Loader2 } from 'lucide-react';
import { useEffect, useMemo, useState } from 'react';
import { useQuery } from 'react-query';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
import { v4 } from 'uuid';
import {
EntityDetailsEmptyContainer,
@@ -123,16 +122,19 @@ export default function Events({
filters,
);
basePayload.query.builder.queryData[0].pageSize = 10;
basePayload.query.builder.queryData[0].pageSize = INITIAL_PAGE_SIZE;
basePayload.query.builder.queryData[0].offset =
(page - 1) * INITIAL_PAGE_SIZE;
basePayload.query.builder.queryData[0].orderBy = [
{ columnName: 'timestamp', order: ORDERBY_FILTERS.DESC },
{ columnName: 'id', order: ORDERBY_FILTERS.DESC },
];
return basePayload;
}, [timeRange.startTime, timeRange.endTime, filters]);
}, [timeRange.startTime, timeRange.endTime, filters, page]);
const { data: eventsData, isLoading, isFetching, isError } = useQuery({
queryKey: [queryKey, timeRange.startTime, timeRange.endTime, filters],
queryKey: [queryKey, timeRange.startTime, timeRange.endTime, filters, page],
queryFn: () => GetMetricQueryRange(queryPayload, DEFAULT_ENTITY_VERSION),
enabled: !!queryPayload,
});
@@ -189,61 +191,12 @@ export default function Events({
const handlePrev = (): void => {
if (!formattedEntityEvents.length) return;
setPage(page - 1);
const firstEvent = formattedEntityEvents[0];
const newItems = [
...filters.items.filter((item) => item.key?.key !== 'id'),
{
id: v4(),
key: {
key: 'id',
type: '',
dataType: DataTypes.String,
isColumn: true,
},
op: '>',
value: firstEvent.id,
},
];
const newFilters = {
op: 'AND',
items: newItems,
} as IBuilderQuery['filters'];
handleChangeEventFilters(newFilters);
};
const handleNext = (): void => {
if (!formattedEntityEvents.length) return;
setPage(page + 1);
const lastEvent = formattedEntityEvents[formattedEntityEvents.length - 1];
const newItems = [
...filters.items.filter((item) => item.key?.key !== 'id'),
{
id: v4(),
key: {
key: 'id',
type: '',
dataType: DataTypes.String,
isColumn: true,
},
op: '<',
value: lastEvent.id,
},
];
const newFilters = {
op: 'AND',
items: newItems,
} as IBuilderQuery['filters'];
handleChangeEventFilters(newFilters);
};
const handleExpandRowIcon = ({

View File

@@ -9,19 +9,13 @@ import { K8sCategory } from 'container/InfraMonitoringK8s/constants';
import LogsError from 'container/LogsError/LogsError';
import { LogsLoading } from 'container/LogsLoading/LogsLoading';
import { FontSize } from 'container/OptionsMenu/types';
import { ORDERBY_FILTERS } from 'container/QueryBuilder/filters/OrderByFilter/config';
import { useHandleLogsPagination } from 'hooks/infraMonitoring/useHandleLogsPagination';
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
import { isEqual } from 'lodash-es';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { useCallback, useEffect, useMemo } from 'react';
import { useQuery } from 'react-query';
import { Virtuoso } from 'react-virtuoso';
import { ILog } from 'types/api/logs/log';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import {
IBuilderQuery,
TagFilterItem,
} from 'types/api/queryBuilder/queryBuilderData';
import { v4 } from 'uuid';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
import {
EntityDetailsEmptyContainer,
@@ -33,7 +27,6 @@ interface Props {
startTime: number;
endTime: number;
};
handleChangeLogFilters: (filters: IBuilderQuery['filters']) => void;
filters: IBuilderQuery['filters'];
queryKey: string;
category: K8sCategory;
@@ -42,87 +35,33 @@ interface Props {
function EntityLogs({
timeRange,
handleChangeLogFilters,
filters,
queryKey,
category,
queryKeyFilters,
}: Props): JSX.Element {
const [logs, setLogs] = useState<ILog[]>([]);
const [hasReachedEndOfLogs, setHasReachedEndOfLogs] = useState(false);
const [restFilters, setRestFilters] = useState<TagFilterItem[]>([]);
const [resetLogsList, setResetLogsList] = useState<boolean>(false);
const basePayload = getEntityEventsOrLogsQueryPayload(
timeRange.startTime,
timeRange.endTime,
filters,
);
useEffect(() => {
const newRestFilters = filters.items.filter(
(item) =>
item.key?.key !== 'id' && !queryKeyFilters.includes(item.key?.key ?? ''),
);
const areFiltersSame = isEqual(restFilters, newRestFilters);
if (!areFiltersSame) {
setResetLogsList(true);
}
setRestFilters(newRestFilters);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [filters]);
const queryPayload = useMemo(() => {
const basePayload = getEntityEventsOrLogsQueryPayload(
timeRange.startTime,
timeRange.endTime,
filters,
);
basePayload.query.builder.queryData[0].pageSize = 100;
basePayload.query.builder.queryData[0].orderBy = [
{ columnName: 'timestamp', order: ORDERBY_FILTERS.DESC },
];
return basePayload;
}, [timeRange.startTime, timeRange.endTime, filters]);
const [isPaginating, setIsPaginating] = useState(false);
const { data, isLoading, isFetching, isError } = useQuery({
queryKey: [queryKey, timeRange.startTime, timeRange.endTime, filters],
queryFn: () => GetMetricQueryRange(queryPayload, DEFAULT_ENTITY_VERSION),
enabled: !!queryPayload,
keepPreviousData: isPaginating,
const {
logs,
hasReachedEndOfLogs,
isPaginating,
currentPage,
setIsPaginating,
handleNewData,
loadMoreLogs,
queryPayload,
} = useHandleLogsPagination({
timeRange,
filters,
queryKeyFilters,
basePayload,
});
useEffect(() => {
if (data?.payload?.data?.newResult?.data?.result) {
const currentData = data.payload.data.newResult.data.result;
if (resetLogsList) {
const currentLogs: ILog[] =
currentData[0].list?.map((item) => ({
...item.data,
timestamp: item.timestamp,
})) || [];
setLogs(currentLogs);
setResetLogsList(false);
}
if (currentData.length > 0 && currentData[0].list) {
const currentLogs: ILog[] =
currentData[0].list.map((item) => ({
...item.data,
timestamp: item.timestamp,
})) || [];
setLogs((prev) => [...prev, ...currentLogs]);
} else {
setHasReachedEndOfLogs(true);
}
}
}, [data, restFilters, isPaginating, resetLogsList]);
const getItemContent = useCallback(
(_: number, logToRender: ILog): JSX.Element => (
<RawLogView
@@ -149,38 +88,28 @@ function EntityLogs({
[],
);
const loadMoreLogs = useCallback(() => {
if (!logs.length) return;
const { data, isLoading, isFetching, isError } = useQuery({
queryKey: [
queryKey,
timeRange.startTime,
timeRange.endTime,
filters,
currentPage,
],
queryFn: () => GetMetricQueryRange(queryPayload, DEFAULT_ENTITY_VERSION),
enabled: !!queryPayload,
keepPreviousData: isPaginating,
});
setIsPaginating(true);
const lastLog = logs[logs.length - 1];
const newItems = [
...filters.items.filter((item) => item.key?.key !== 'id'),
{
id: v4(),
key: {
key: 'id',
type: '',
dataType: DataTypes.String,
isColumn: true,
},
op: '<',
value: lastLog.id,
},
];
const newFilters = {
op: 'AND',
items: newItems,
} as IBuilderQuery['filters'];
handleChangeLogFilters(newFilters);
}, [logs, filters, handleChangeLogFilters]);
useEffect(() => {
if (data?.payload?.data?.newResult?.data?.result) {
handleNewData(data.payload.data.newResult.data.result);
}
}, [data, handleNewData]);
useEffect(() => {
setIsPaginating(false);
}, [data]);
}, [data, setIsPaginating]);
const renderFooter = useCallback(
(): JSX.Element | null => (

View File

@@ -96,7 +96,6 @@ function EntityLogsDetailedView({
</div>
<EntityLogs
timeRange={timeRange}
handleChangeLogFilters={handleChangeLogFilters}
filters={logFilters}
queryKey={queryKey}
category={category}

View File

@@ -11,7 +11,8 @@ import { defaultLogsSelectedColumns } from 'container/OptionsMenu/constants';
import { FontSize } from 'container/OptionsMenu/types';
import { ORDERBY_FILTERS } from 'container/QueryBuilder/filters/OrderByFilter/config';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { uniqBy } from 'lodash-es';
import { useCallback, useMemo, useState } from 'react';
import { Virtuoso } from 'react-virtuoso';
import { ILog } from 'types/api/logs/log';
import { Query, TagFilter } from 'types/api/queryBuilder/queryBuilderData';
@@ -27,7 +28,6 @@ function ContextLogRenderer({
}: ContextLogRendererProps): JSX.Element {
const [prevLogPage, setPrevLogPage] = useState<number>(1);
const [afterLogPage, setAfterLogPage] = useState<number>(1);
const [logs, setLogs] = useState<ILog[]>([log]);
const { initialDataSource, stagedQuery } = useQueryBuilder();
@@ -73,20 +73,10 @@ function ContextLogRenderer({
fontSize: options.fontSize,
});
useEffect(() => {
setLogs((prev) => [...previousLogs, ...prev]);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [previousLogs]);
useEffect(() => {
setLogs((prev) => [...prev, ...afterLogs]);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [afterLogs]);
useEffect(() => {
setLogs([log]);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [filters]);
const logsToRender = useMemo(
() => uniqBy([...previousLogs, log, ...afterLogs], 'id'),
[previousLogs, log, afterLogs],
);
const lengthMultipier = useMemo(() => {
switch (options.fontSize) {
@@ -137,9 +127,9 @@ function ContextLogRenderer({
<Virtuoso
className="virtuoso-list"
initialTopMostItemIndex={0}
data={logs}
data={logsToRender}
itemContent={getItemContent}
style={{ height: `calc(${logs.length} * ${lengthMultipier}px)` }}
style={{ height: `calc(${logsToRender.length} * ${lengthMultipier}px)` }}
/>
</OverlayScrollbar>
{isAfterLogsFetching && (

View File

@@ -40,7 +40,7 @@ import { useHandleExplorerTabChange } from 'hooks/useHandleExplorerTabChange';
import { useNotifications } from 'hooks/useNotifications';
import useUrlQueryData from 'hooks/useUrlQueryData';
import { FlatLogData } from 'lib/logs/flatLogData';
import { getPaginationQueryData } from 'lib/newQueryBuilder/getPaginationQueryData';
import { getPaginationQueryDataV2 } from 'lib/newQueryBuilder/getPaginationQueryData';
import {
cloneDeep,
defaultTo,
@@ -94,7 +94,9 @@ function LogsExplorerViews({
selectedView: SELECTED_VIEWS;
showFrequencyChart: boolean;
setIsLoadingQueries: React.Dispatch<React.SetStateAction<boolean>>;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
listQueryKeyRef: MutableRefObject<any>;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
chartQueryKeyRef: MutableRefObject<any>;
}): JSX.Element {
const { notifications } = useNotifications();
@@ -305,10 +307,7 @@ function LogsExplorerViews({
): Query | null => {
if (!query) return null;
const paginateData = getPaginationQueryData({
filters: params.filters,
listItemId: params.log ? params.log.id : null,
orderByTimestamp,
const paginateData = getPaginationQueryDataV2({
page: params.page,
pageSize: params.pageSize,
});
@@ -333,7 +332,7 @@ function LogsExplorerViews({
return data;
},
[orderByTimestamp, listQuery],
[listQuery],
);
const handleEndReached = useCallback(

View File

@@ -10,7 +10,6 @@ import Controls from 'container/Controls';
import { PER_PAGE_OPTIONS } from 'container/TracesExplorer/ListView/configs';
import { tableStyles } from 'container/TracesExplorer/ListView/styles';
import { useActiveLog } from 'hooks/logs/useActiveLog';
import { Pagination } from 'hooks/queryPagination';
import { useLogsData } from 'hooks/useLogsData';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { FlatLogData } from 'lib/logs/flatLogData';
@@ -21,46 +20,27 @@ import {
HTMLAttributes,
SetStateAction,
useCallback,
useEffect,
useMemo,
useState,
} from 'react';
import { UseQueryResult } from 'react-query';
import { SuccessResponse } from 'types/api';
import { Widgets } from 'types/api/dashboard/getAll';
import { ILog } from 'types/api/logs/log';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { getLogPanelColumnsList, getNextOrPreviousItems } from './utils';
import { getLogPanelColumnsList } from './utils';
function LogsPanelComponent({
widget,
setRequestData,
queryResponse,
}: LogsPanelComponentProps): JSX.Element {
const [pagination, setPagination] = useState<Pagination>({
offset: 0,
limit: widget.query.builder.queryData[0].limit || 0,
});
useEffect(() => {
setRequestData((prev) => ({
...prev,
tableParams: {
pagination,
},
}));
}, [pagination, setRequestData]);
const [pageSize, setPageSize] = useState<number>(10);
const [offset, setOffset] = useState<number>(0);
const handleChangePageSize = (value: number): void => {
setPagination({
...pagination,
limit: 0,
offset: value,
});
setPageSize(value);
setOffset(0);
setRequestData((prev) => {
const newQueryData = { ...prev.query };
newQueryData.builder.queryData[0].pageSize = value;
@@ -70,7 +50,7 @@ function LogsPanelComponent({
tableParams: {
pagination: {
limit: 0,
offset: value,
offset: 0,
},
},
};
@@ -88,22 +68,12 @@ function LogsPanelComponent({
queryResponse.data?.payload?.data?.newResult?.data?.result[0]?.list?.length;
const totalCount = useMemo(() => dataLength || 0, [dataLength]);
const [firstLog, setFirstLog] = useState<ILog>();
const [lastLog, setLastLog] = useState<ILog>();
const { logs } = useLogsData({
result: queryResponse.data?.payload?.data?.newResult?.data?.result,
panelType: PANEL_TYPES.LIST,
stagedQuery: widget.query,
});
useEffect(() => {
if (logs.length) {
setFirstLog(logs[0]);
setLastLog(logs[logs.length - 1]);
}
}, [logs]);
const flattenLogData = useMemo(
() => logs.map((log) => FlatLogData(log) as RowData),
[logs],
@@ -127,84 +97,27 @@ function LogsPanelComponent({
[logs, onSetActiveLog],
);
const isOrderByTimeStamp =
widget.query.builder.queryData[0].orderBy.length > 0 &&
widget.query.builder.queryData[0].orderBy[0].columnName === 'timestamp';
const handleRequestData = (newOffset: number): void => {
setOffset(newOffset);
setRequestData((prev) => ({
...prev,
tableParams: {
pagination: {
limit: widget.query.builder.queryData[0].limit || 0,
offset: newOffset < 0 ? 0 : newOffset,
},
},
}));
};
const handlePreviousPagination = (): void => {
if (isOrderByTimeStamp) {
setRequestData((prev) => ({
...prev,
query: {
...prev.query,
builder: {
...prev.query.builder,
queryData: [
{
...prev.query.builder.queryData[0],
filters: {
...prev.query.builder.queryData[0].filters,
items: [
...getNextOrPreviousItems(
prev.query.builder.queryData[0].filters.items,
'PREV',
firstLog,
),
],
},
limit: 0,
offset: 0,
},
],
},
},
}));
}
if (!isOrderByTimeStamp) {
setPagination({
...pagination,
limit: 0,
offset: pagination.offset - pageSize,
});
}
const newOffset = offset - pageSize;
handleRequestData(newOffset);
};
const handleNextPagination = (): void => {
if (isOrderByTimeStamp) {
setRequestData((prev) => ({
...prev,
query: {
...prev.query,
builder: {
...prev.query.builder,
queryData: [
{
...prev.query.builder.queryData[0],
filters: {
...prev.query.builder.queryData[0].filters,
items: [
...getNextOrPreviousItems(
prev.query.builder.queryData[0].filters.items,
'NEXT',
lastLog,
),
],
},
limit: 0,
offset: 0,
},
],
},
},
}));
}
if (!isOrderByTimeStamp) {
setPagination({
...pagination,
limit: 0,
offset: pagination.offset + pageSize,
});
}
const newOffset = offset + pageSize;
handleRequestData(newOffset);
};
if (queryResponse.isError) {
@@ -235,12 +148,11 @@ function LogsPanelComponent({
totalCount={totalCount}
perPageOptions={PER_PAGE_OPTIONS}
isLoading={queryResponse.isFetching}
offset={pagination.offset}
offset={offset}
countPerPage={pageSize}
handleNavigatePrevious={handlePreviousPagination}
handleNavigateNext={handleNextPagination}
handleCountItemsPerPageChange={handleChangePageSize}
isLogPanel={isOrderByTimeStamp}
/>
</div>
)}

View File

@@ -1,16 +1,11 @@
import { ColumnsType } from 'antd/es/table';
import { Typography } from 'antd/lib';
import { OPERATORS } from 'constants/queryBuilder';
import { TimestampInput } from 'hooks/useTimezoneFormatter/useTimezoneFormatter';
// import Typography from 'antd/es/typography/Typography';
import { RowData } from 'lib/query/createTableColumnsFromQuery';
import { ReactNode } from 'react';
import { Widgets } from 'types/api/dashboard/getAll';
import { IField } from 'types/api/logs/fields';
import { ILog } from 'types/api/logs/log';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { TagFilterItem } from 'types/api/queryBuilder/queryBuilderData';
import { v4 as uuid } from 'uuid';
export const getLogPanelColumnsList = (
selectedLogFields: Widgets['selectedLogFields'],
@@ -55,49 +50,3 @@ export const getLogPanelColumnsList = (
return [...initialColumns, ...columns];
};
export const getNextOrPreviousItems = (
items: TagFilterItem[],
direction: 'NEXT' | 'PREV',
log?: ILog,
): TagFilterItem[] => {
const nextItem = {
id: uuid(),
key: {
key: 'id',
type: '',
dataType: DataTypes.String,
isColumn: true,
},
op: OPERATORS['<'],
value: log?.id || '',
};
const prevItem = {
id: uuid(),
key: {
key: 'id',
type: '',
dataType: DataTypes.String,
isColumn: true,
},
op: OPERATORS['>'],
value: log?.id || '',
};
let index = items.findIndex((item) => item.op === OPERATORS['<']);
if (index === -1) {
index = items.findIndex((item) => item.op === OPERATORS['>']);
}
if (index === -1) {
if (direction === 'NEXT') {
return [...items, nextItem];
}
return [...items, prevItem];
}
const newItems = [...items];
if (direction === 'NEXT') {
newItems[index] = nextItem;
} else {
newItems[index] = prevItem;
}
return newItems;
};

View File

@@ -0,0 +1,181 @@
import { DEFAULT_PER_PAGE_VALUE } from 'container/Controls/config';
import { ORDERBY_FILTERS } from 'container/QueryBuilder/filters/OrderByFilter/config';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { isEqual } from 'lodash-es';
import {
Dispatch,
SetStateAction,
useCallback,
useEffect,
useMemo,
useState,
} from 'react';
import { ILog } from 'types/api/logs/log';
import {
IBuilderQuery,
TagFilterItem,
} from 'types/api/queryBuilder/queryBuilderData';
interface TimeRange {
startTime: number;
endTime: number;
}
interface UsePaginatedLogsProps {
timeRange: TimeRange;
filters: IBuilderQuery['filters'];
queryKeyFilters?: string[];
excludeFilterKeys?: string[];
basePayload: GetQueryResultsProps;
}
interface UseHandleLogsPagination {
logs: ILog[];
hasReachedEndOfLogs: boolean;
isPaginating: boolean;
currentPage: number;
resetLogsList: boolean;
setIsPaginating: Dispatch<SetStateAction<boolean>>;
handleNewData: (currentData: any) => void;
loadMoreLogs: () => void;
shouldResetPage: boolean;
queryPayload: GetQueryResultsProps;
}
export const useHandleLogsPagination = ({
timeRange,
filters,
queryKeyFilters = [],
excludeFilterKeys = [],
basePayload,
}: UsePaginatedLogsProps): UseHandleLogsPagination => {
const [logs, setLogs] = useState<ILog[]>([]);
const [hasReachedEndOfLogs, setHasReachedEndOfLogs] = useState(false);
const [restFilters, setRestFilters] = useState<TagFilterItem[]>([]);
const [resetLogsList, setResetLogsList] = useState<boolean>(false);
const [page, setPage] = useState(1);
const [prevTimeRange, setPrevTimeRange] = useState<TimeRange | null>(
timeRange,
);
const [isPaginating, setIsPaginating] = useState(false);
const { shouldResetPage, newRestFilters } = useMemo(() => {
const newRestFilters = filters.items.filter((item) => {
const keyToCheck = item.key?.key ?? '';
return (
!queryKeyFilters.includes(keyToCheck) &&
!excludeFilterKeys.includes(keyToCheck)
);
});
const areFiltersSame = isEqual(restFilters, newRestFilters);
const shouldResetPage =
!areFiltersSame ||
timeRange.startTime !== prevTimeRange?.startTime ||
timeRange.endTime !== prevTimeRange?.endTime;
return { shouldResetPage, newRestFilters };
}, [
filters,
timeRange,
prevTimeRange,
queryKeyFilters,
excludeFilterKeys,
restFilters,
]);
const currentPage = useMemo(() => {
if (shouldResetPage) {
return 1;
}
return page;
}, [shouldResetPage, page]);
// Handle data updates
const handleNewData = useCallback(
(currentData: any) => {
if (!currentData[0].list) {
setHasReachedEndOfLogs(true);
return;
}
const currentLogs: ILog[] =
currentData[0].list?.map((item: any) => ({
...item.data,
timestamp: item.timestamp,
})) || [];
if (resetLogsList) {
setLogs(currentLogs);
setResetLogsList(false);
return;
}
const newLogs = currentLogs.filter(
(newLog) => !logs.some((existingLog) => isEqual(existingLog, newLog)),
);
if (newLogs.length > 0) {
setLogs((prev) => [...prev, ...newLogs]);
}
},
[logs, resetLogsList],
);
// Reset logic
useEffect(() => {
if (shouldResetPage) {
setPage(1);
setLogs([]);
setResetLogsList(true);
}
setPrevTimeRange(timeRange);
setRestFilters(newRestFilters);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [shouldResetPage, timeRange]);
const loadMoreLogs = useCallback(() => {
if (!logs.length) return;
setPage((prev) => prev + 1);
setIsPaginating(true);
}, [logs]);
const queryPayload = useMemo(
() => ({
...basePayload,
query: {
...basePayload.query,
builder: {
...basePayload.query.builder,
queryData: [
{
...basePayload.query.builder.queryData[0],
pageSize: DEFAULT_PER_PAGE_VALUE,
offset: (currentPage - 1) * DEFAULT_PER_PAGE_VALUE,
orderBy: [
{ columnName: 'timestamp', order: ORDERBY_FILTERS.DESC },
{ columnName: 'id', order: ORDERBY_FILTERS.DESC },
],
},
],
},
},
}),
[basePayload, currentPage],
);
return {
logs,
hasReachedEndOfLogs,
isPaginating,
currentPage,
resetLogsList,
queryPayload,
setIsPaginating,
handleNewData,
loadMoreLogs,
shouldResetPage,
};
};

View File

@@ -22,16 +22,46 @@ export const useGetQueryRange: UseGetQueryRange = (
options,
headers,
) => {
const newRequestData: GetQueryResultsProps = useMemo(
() => ({
const newRequestData: GetQueryResultsProps = useMemo(() => {
const isListWithSingleTimestampOrder =
requestData.graphType === PANEL_TYPES.LIST &&
requestData.query.builder?.queryData[0]?.orderBy?.length === 1 &&
// exclude list with id filter (i.e. context logs)
!requestData.query.builder?.queryData[0].filters.items.some(
(filter) => filter.key?.key === 'id',
) &&
requestData.query.builder?.queryData[0].orderBy[0].columnName ===
'timestamp';
const modifiedRequestData = {
...requestData,
graphType:
requestData.graphType === PANEL_TYPES.BAR
? PANEL_TYPES.TIME_SERIES
: requestData.graphType,
}),
[requestData],
);
};
// If the query is a list with a single timestamp order, we need to add the id column to the order by clause
if (isListWithSingleTimestampOrder) {
modifiedRequestData.query.builder = {
...requestData.query.builder,
queryData: [
{
...requestData.query.builder.queryData[0],
orderBy: [
...requestData.query.builder.queryData[0].orderBy,
{
columnName: 'id',
order: requestData.query.builder.queryData[0].orderBy[0].order,
},
],
},
],
};
}
return modifiedRequestData;
}, [requestData]);
const queryKey = useMemo(() => {
if (options?.queryKey && Array.isArray(options.queryKey)) {

View File

@@ -5,7 +5,7 @@ import {
PANEL_TYPES,
} from 'constants/queryBuilder';
import { DEFAULT_PER_PAGE_VALUE } from 'container/Controls/config';
import { getPaginationQueryData } from 'lib/newQueryBuilder/getPaginationQueryData';
import { getPaginationQueryDataV2 } from 'lib/newQueryBuilder/getPaginationQueryData';
import { useEffect, useMemo, useState } from 'react';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
@@ -100,10 +100,7 @@ export const useLogsData = ({
): Query | null => {
if (!query) return null;
const paginateData = getPaginationQueryData({
filters: params.filters,
listItemId: params.log ? params.log.id : null,
orderByTimestamp,
const paginateData = getPaginationQueryDataV2({
page: params.page,
pageSize: params.pageSize,
});

View File

@@ -8,6 +8,27 @@ import {
} from 'types/api/queryBuilder/queryBuilderData';
import { v4 as uuid } from 'uuid';
type SetupPaginationQueryDataParamsV2 = {
page: number;
pageSize: number;
};
type SetupPaginationQueryDataV2 = (
params: SetupPaginationQueryDataParamsV2,
) => Partial<IBuilderQuery>;
export const getPaginationQueryDataV2: SetupPaginationQueryDataV2 = ({
page,
pageSize,
}) => {
const offset = (page - 1) * pageSize;
return {
offset,
pageSize,
};
};
type SetupPaginationQueryDataParams = {
filters: IBuilderQuery['filters'];
listItemId: string | null;

View File

@@ -448,6 +448,9 @@ func (aH *APIHandler) RegisterInfraMetricsRoutes(router *mux.Router, am *AuthMid
jobsSubRouter.HandleFunc("/attribute_keys", am.ViewAccess(aH.getJobAttributeKeys)).Methods(http.MethodGet)
jobsSubRouter.HandleFunc("/attribute_values", am.ViewAccess(aH.getJobAttributeValues)).Methods(http.MethodGet)
jobsSubRouter.HandleFunc("/list", am.ViewAccess(aH.getJobList)).Methods(http.MethodPost)
infraOnboardingSubRouter := router.PathPrefix("/api/v1/infra_onboarding").Subrouter()
infraOnboardingSubRouter.HandleFunc("/k8s/status", am.ViewAccess(aH.getK8sInfraOnboardingStatus)).Methods(http.MethodGet)
}
func (aH *APIHandler) RegisterWebSocketPaths(router *mux.Router, am *AuthMiddleware) {
@@ -3936,7 +3939,7 @@ func (aH *APIHandler) RegisterCloudIntegrationsRoutes(router *mux.Router, am *Au
).Methods(http.MethodPost)
subRouter.HandleFunc(
"/{cloudProvider}/agent-check-in", am.EditAccess(aH.CloudIntegrationsAgentCheckIn),
"/{cloudProvider}/agent-check-in", am.ViewAccess(aH.CloudIntegrationsAgentCheckIn),
).Methods(http.MethodPost)
subRouter.HandleFunc(

View File

@@ -597,3 +597,52 @@ func (aH *APIHandler) getPvcAttributeValues(w http.ResponseWriter, r *http.Reque
aH.Respond(w, values)
}
func (aH *APIHandler) getK8sInfraOnboardingStatus(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
status := model.OnboardingStatus{}
didSendPodMetrics, err := aH.podsRepo.DidSendPodMetrics(ctx)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
return
}
if !didSendPodMetrics {
aH.Respond(w, status)
return
}
didSendClusterMetrics, err := aH.podsRepo.DidSendClusterMetrics(ctx)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
return
}
didSendNodeMetrics, err := aH.nodesRepo.DidSendNodeMetrics(ctx)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
return
}
didSendOptionalPodMetrics, err := aH.podsRepo.IsSendingOptionalPodMetrics(ctx)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
return
}
requiredMetadata, err := aH.podsRepo.SendingRequiredMetadata(ctx)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
return
}
status.DidSendPodMetrics = didSendPodMetrics
status.DidSendClusterMetrics = didSendClusterMetrics
status.DidSendNodeMetrics = didSendNodeMetrics
status.IsSendingOptionalPodMetrics = didSendOptionalPodMetrics
status.IsSendingRequiredMetadata = requiredMetadata
aH.Respond(w, status)
}

View File

@@ -8,6 +8,84 @@ import (
"go.signoz.io/signoz/pkg/query-service/model"
)
var (
// TODO(srikanthccv): import metadata yaml from receivers and use generated files to check the metrics
podMetricNamesToCheck = []string{
"k8s_pod_cpu_utilization",
"k8s_pod_memory_usage",
"k8s_pod_cpu_request_utilization",
"k8s_pod_memory_request_utilization",
"k8s_pod_cpu_limit_utilization",
"k8s_pod_memory_limit_utilization",
"k8s_container_restarts",
"k8s_pod_phase",
}
nodeMetricNamesToCheck = []string{
"k8s_node_cpu_utilization",
"k8s_node_allocatable_cpu",
"k8s_node_memory_usage",
"k8s_node_allocatable_memory",
"k8s_node_condition_ready",
}
clusterMetricNamesToCheck = []string{
"k8s_daemonset_desired_scheduled_nodes",
"k8s_daemonset_current_scheduled_nodes",
"k8s_deployment_desired",
"k8s_deployment_available",
"k8s_job_desired_successful_pods",
"k8s_job_active_pods",
"k8s_job_failed_pods",
"k8s_job_successful_pods",
"k8s_statefulset_desired_pods",
"k8s_statefulset_current_pods",
}
optionalPodMetricNamesToCheck = []string{
"k8s_pod_cpu_request_utilization",
"k8s_pod_memory_request_utilization",
"k8s_pod_cpu_limit_utilization",
"k8s_pod_memory_limit_utilization",
}
// did they ever send _any_ pod metrics?
didSendPodMetricsQuery = `
SELECT count() FROM %s.%s WHERE metric_name IN (%s)
`
// did they ever send any node metrics?
didSendNodeMetricsQuery = `
SELECT count() FROM %s.%s WHERE metric_name IN (%s)
`
// did they ever send any cluster metrics?
didSendClusterMetricsQuery = `
SELECT count() FROM %s.%s WHERE metric_name IN (%s)
`
// if they ever sent _any_ pod metrics, we assume they know how to send pod metrics
// now, are they sending optional pod metrics such request/limit metrics?
isSendingOptionalPodMetricsQuery = `
SELECT count() FROM %s.%s WHERE metric_name IN (%s)
`
// there should be [cluster, node, namespace, one of (deployment, statefulset, daemonset, cronjob, job)] for each pod
isSendingRequiredMetadataQuery = `
SELECT any(JSONExtractString(labels, 'k8s_cluster_name')) as k8s_cluster_name,
any(JSONExtractString(labels, 'k8s_node_name')) as k8s_node_name,
any(JSONExtractString(labels, 'k8s_namespace_name')) as k8s_namespace_name,
any(JSONExtractString(labels, 'k8s_deployment_name')) as k8s_deployment_name,
any(JSONExtractString(labels, 'k8s_statefulset_name')) as k8s_statefulset_name,
any(JSONExtractString(labels, 'k8s_daemonset_name')) as k8s_daemonset_name,
any(JSONExtractString(labels, 'k8s_cronjob_name')) as k8s_cronjob_name,
any(JSONExtractString(labels, 'k8s_job_name')) as k8s_job_name,
JSONExtractString(labels, 'k8s_pod_name') as k8s_pod_name
FROM %s.%s WHERE metric_name IN (%s)
AND (unix_milli >= (toUnixTimestamp(now() - toIntervalMinute(60)) * 1000))
AND JSONExtractString(labels, 'k8s_namespace_name') NOT IN ('kube-system', 'kube-public', 'kube-node-lease', 'metallb-system')
GROUP BY k8s_pod_name
LIMIT 1 BY k8s_cluster_name, k8s_node_name, k8s_namespace_name
`
)
// getParamsForTopItems returns the step, time series table name and samples table name
// for the top items query. what are we doing here?
// we want to identify the top hosts/pods/nodes quickly, so we use pre-aggregated data

View File

@@ -2,11 +2,14 @@ package inframetrics
import (
"context"
"fmt"
"math"
"sort"
"strings"
"go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers"
"go.signoz.io/signoz/pkg/query-service/common"
"go.signoz.io/signoz/pkg/query-service/constants"
"go.signoz.io/signoz/pkg/query-service/interfaces"
"go.signoz.io/signoz/pkg/query-service/model"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
@@ -62,6 +65,20 @@ func (n *NodesRepo) GetNodeAttributeKeys(ctx context.Context, req v3.FilterAttri
return attributeKeysResponse, nil
}
func (n *NodesRepo) DidSendNodeMetrics(ctx context.Context) (bool, error) {
namesStr := "'" + strings.Join(nodeMetricNamesToCheck, "','") + "'"
query := fmt.Sprintf(didSendNodeMetricsQuery,
constants.SIGNOZ_METRIC_DBNAME, constants.SIGNOZ_TIMESERIES_v4_1DAY_TABLENAME, namesStr)
count, err := n.reader.GetCountOfThings(ctx, query)
if err != nil {
return false, err
}
return count > 0, nil
}
func (n *NodesRepo) GetNodeAttributeValues(ctx context.Context, req v3.FilterAttributeValueRequest) (*v3.FilterAttributeValueResponse, error) {
req.DataSource = v3.DataSourceMetrics
req.AggregateAttribute = metricToUseForNodes

View File

@@ -2,11 +2,14 @@ package inframetrics
import (
"context"
"fmt"
"math"
"sort"
"strings"
"go.signoz.io/signoz/pkg/query-service/app/metrics/v4/helpers"
"go.signoz.io/signoz/pkg/query-service/common"
"go.signoz.io/signoz/pkg/query-service/constants"
"go.signoz.io/signoz/pkg/query-service/interfaces"
"go.signoz.io/signoz/pkg/query-service/model"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
@@ -105,6 +108,137 @@ func (p *PodsRepo) GetPodAttributeValues(ctx context.Context, req v3.FilterAttri
return attributeValuesResponse, nil
}
func (p *PodsRepo) DidSendPodMetrics(ctx context.Context) (bool, error) {
namesStr := "'" + strings.Join(podMetricNamesToCheck, "','") + "'"
query := fmt.Sprintf(didSendPodMetricsQuery,
constants.SIGNOZ_METRIC_DBNAME, constants.SIGNOZ_TIMESERIES_v4_1DAY_TABLENAME, namesStr)
count, err := p.reader.GetCountOfThings(ctx, query)
if err != nil {
return false, err
}
return count > 0, nil
}
func (p *PodsRepo) DidSendClusterMetrics(ctx context.Context) (bool, error) {
namesStr := "'" + strings.Join(clusterMetricNamesToCheck, "','") + "'"
query := fmt.Sprintf(didSendClusterMetricsQuery,
constants.SIGNOZ_METRIC_DBNAME, constants.SIGNOZ_TIMESERIES_v4_1DAY_TABLENAME, namesStr)
count, err := p.reader.GetCountOfThings(ctx, query)
if err != nil {
return false, err
}
return count > 0, nil
}
func (p *PodsRepo) IsSendingOptionalPodMetrics(ctx context.Context) (bool, error) {
namesStr := "'" + strings.Join(optionalPodMetricNamesToCheck, "','") + "'"
query := fmt.Sprintf(isSendingOptionalPodMetricsQuery,
constants.SIGNOZ_METRIC_DBNAME, constants.SIGNOZ_TIMESERIES_v4_1DAY_TABLENAME, namesStr)
count, err := p.reader.GetCountOfThings(ctx, query)
if err != nil {
return false, err
}
return count > 0, nil
}
func (p *PodsRepo) SendingRequiredMetadata(ctx context.Context) ([]model.PodOnboardingStatus, error) {
namesStr := "'" + strings.Join(podMetricNamesToCheck, "','") + "'"
query := fmt.Sprintf(isSendingRequiredMetadataQuery,
constants.SIGNOZ_METRIC_DBNAME, constants.SIGNOZ_TIMESERIES_V4_TABLENAME, namesStr)
result, err := p.reader.GetListResultV3(ctx, query)
if err != nil {
return nil, err
}
statuses := []model.PodOnboardingStatus{}
// for each pod, check if we have all the required metadata
for _, row := range result {
status := model.PodOnboardingStatus{}
switch v := row.Data["k8s_cluster_name"].(type) {
case string:
status.HasClusterName = true
status.ClusterName = v
case *string:
status.HasClusterName = *v != ""
status.ClusterName = *v
}
switch v := row.Data["k8s_node_name"].(type) {
case string:
status.HasNodeName = true
status.NodeName = v
case *string:
status.HasNodeName = *v != ""
status.NodeName = *v
}
switch v := row.Data["k8s_namespace_name"].(type) {
case string:
status.HasNamespaceName = true
status.NamespaceName = v
case *string:
status.HasNamespaceName = *v != ""
status.NamespaceName = *v
}
switch v := row.Data["k8s_deployment_name"].(type) {
case string:
status.HasDeploymentName = true
case *string:
status.HasDeploymentName = *v != ""
}
switch v := row.Data["k8s_statefulset_name"].(type) {
case string:
status.HasStatefulsetName = true
case *string:
status.HasStatefulsetName = *v != ""
}
switch v := row.Data["k8s_daemonset_name"].(type) {
case string:
status.HasDaemonsetName = true
case *string:
status.HasDaemonsetName = *v != ""
}
switch v := row.Data["k8s_cronjob_name"].(type) {
case string:
status.HasCronjobName = true
case *string:
status.HasCronjobName = *v != ""
}
switch v := row.Data["k8s_job_name"].(type) {
case string:
status.HasJobName = true
case *string:
status.HasJobName = *v != ""
}
switch v := row.Data["k8s_pod_name"].(type) {
case string:
status.PodName = v
case *string:
status.PodName = *v
}
if !status.HasClusterName ||
!status.HasNodeName ||
!status.HasNamespaceName ||
(!status.HasDeploymentName && !status.HasStatefulsetName && !status.HasDaemonsetName && !status.HasCronjobName && !status.HasJobName) {
statuses = append(statuses, status)
}
}
return statuses, nil
}
func (p *PodsRepo) getMetadataAttributes(ctx context.Context, req model.PodListRequest) (map[string]map[string]string, error) {
podAttrs := map[string]map[string]string{}

View File

@@ -731,3 +731,26 @@ type VolumeListRecord struct {
VolumeUsage float64 `json:"volumeUsage"`
Meta map[string]string `json:"meta"`
}
type PodOnboardingStatus struct {
ClusterName string `json:"clusterName"`
NodeName string `json:"nodeName"`
NamespaceName string `json:"namespaceName"`
PodName string `json:"podName"`
HasClusterName bool `json:"hasClusterName"`
HasNodeName bool `json:"hasNodeName"`
HasNamespaceName bool `json:"hasNamespaceName"`
HasDeploymentName bool `json:"hasDeploymentName"`
HasStatefulsetName bool `json:"hasStatefulsetName"`
HasDaemonsetName bool `json:"hasDaemonsetName"`
HasCronjobName bool `json:"hasCronjobName"`
HasJobName bool `json:"hasJobName"`
}
type OnboardingStatus struct {
DidSendPodMetrics bool `json:"didSendPodMetrics"`
DidSendNodeMetrics bool `json:"didSendNodeMetrics"`
DidSendClusterMetrics bool `json:"didSendClusterMetrics"`
IsSendingOptionalPodMetrics bool `json:"isSendingOptionalPodMetrics"`
IsSendingRequiredMetadata []PodOnboardingStatus `json:"isSendingRequiredMetadata"`
}