Compare commits
27 Commits
fix/query-
...
feature/ta
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b07c500b3b | ||
|
|
979d238e7b | ||
|
|
8274ebfe37 | ||
|
|
7d5e14abb6 | ||
|
|
7c17ac42b1 | ||
|
|
74ee7bb2c7 | ||
|
|
2f5640b2e6 | ||
|
|
121debcecc | ||
|
|
ff13504a74 | ||
|
|
d4e373443b | ||
|
|
3ccf822d67 | ||
|
|
0e270e6f51 | ||
|
|
749df2a979 | ||
|
|
9ee5d5d599 | ||
|
|
4940dfd46f | ||
|
|
79a31cc205 | ||
|
|
5102cf2b7b | ||
|
|
9ec5594648 | ||
|
|
b6c2ebd6d7 | ||
|
|
9a3a8c8305 | ||
|
|
2ac45b0174 | ||
|
|
2a53918ebd | ||
|
|
9daefeb881 | ||
|
|
526cf01cb7 | ||
|
|
cd4766ec2b | ||
|
|
2196b58d36 | ||
|
|
53c58b9983 |
@@ -40,7 +40,7 @@ services:
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
schema-migrator-sync:
|
||||
image: signoz/signoz-schema-migrator:v0.111.42
|
||||
image: signoz/signoz-schema-migrator:v0.128.0
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -53,7 +53,7 @@ services:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
image: signoz/signoz-schema-migrator:v0.111.42
|
||||
image: signoz/signoz-schema-migrator:v0.128.0
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
2
.github/workflows/integrationci.yaml
vendored
2
.github/workflows/integrationci.yaml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
- 24.1.2-alpine
|
||||
- 24.12-alpine
|
||||
schema-migrator-version:
|
||||
- v0.111.38
|
||||
- v0.128.0
|
||||
postgres-version:
|
||||
- 15
|
||||
if: |
|
||||
|
||||
@@ -174,7 +174,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.87.0
|
||||
image: signoz/signoz:v0.88.1
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -206,7 +206,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.42
|
||||
image: signoz/signoz-otel-collector:v0.128.0
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -230,7 +230,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.42
|
||||
image: signoz/signoz-schema-migrator:v0.128.0
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -100,7 +100,7 @@ services:
|
||||
# - "9000:9000"
|
||||
# - "8123:8123"
|
||||
# - "9181:9181"
|
||||
|
||||
|
||||
configs:
|
||||
- source: clickhouse-config
|
||||
target: /etc/clickhouse-server/config.xml
|
||||
@@ -110,13 +110,12 @@ services:
|
||||
target: /etc/clickhouse-server/custom-function.xml
|
||||
- source: clickhouse-cluster
|
||||
target: /etc/clickhouse-server/config.d/cluster.xml
|
||||
|
||||
volumes:
|
||||
- clickhouse:/var/lib/clickhouse/
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.87.0
|
||||
image: signoz/signoz:v0.88.1
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -148,7 +147,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.111.42
|
||||
image: signoz/signoz-otel-collector:v0.128.0
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -174,7 +173,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.111.42
|
||||
image: signoz/signoz-schema-migrator:v0.128.0
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
@@ -195,7 +194,6 @@ volumes:
|
||||
name: signoz-sqlite
|
||||
zookeeper-1:
|
||||
name: signoz-zookeeper-1
|
||||
|
||||
configs:
|
||||
clickhouse-config:
|
||||
file: ../common/clickhouse/config.xml
|
||||
@@ -205,7 +203,6 @@ configs:
|
||||
file: ../common/clickhouse/custom-function.xml
|
||||
clickhouse-cluster:
|
||||
file: ../common/clickhouse/cluster.xml
|
||||
|
||||
signoz-prometheus-config:
|
||||
file: ../common/signoz/prometheus.yml
|
||||
# If you have multiple dashboard files, you can list them individually:
|
||||
|
||||
@@ -74,13 +74,10 @@ exporters:
|
||||
dsn: tcp://clickhouse:9000/signoz_logs
|
||||
timeout: 10s
|
||||
use_new_schema: true
|
||||
# debug: {}
|
||||
service:
|
||||
telemetry:
|
||||
logs:
|
||||
encoding: json
|
||||
metrics:
|
||||
address: 0.0.0.0:8888
|
||||
extensions:
|
||||
- health_check
|
||||
- pprof
|
||||
|
||||
@@ -177,7 +177,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.87.0}
|
||||
image: signoz/signoz:${VERSION:-v0.88.1}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -210,7 +210,7 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.0}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -236,7 +236,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -247,7 +247,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.87.0}
|
||||
image: signoz/signoz:${VERSION:-v0.88.1}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -142,7 +142,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.0}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -164,7 +164,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -176,7 +176,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -74,13 +74,10 @@ exporters:
|
||||
dsn: tcp://clickhouse:9000/signoz_logs
|
||||
timeout: 10s
|
||||
use_new_schema: true
|
||||
# debug: {}
|
||||
service:
|
||||
telemetry:
|
||||
logs:
|
||||
encoding: json
|
||||
metrics:
|
||||
address: 0.0.0.0:8888
|
||||
extensions:
|
||||
- health_check
|
||||
- pprof
|
||||
|
||||
45
frontend/.yalc/@signozhq/table/package.json
Normal file
45
frontend/.yalc/@signozhq/table/package.json
Normal file
@@ -0,0 +1,45 @@
|
||||
{
|
||||
"name": "@signozhq/table",
|
||||
"version": "0.0.0",
|
||||
"license": "MIT",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./dist/data-table.d.ts",
|
||||
"import": "./dist/data-table.js"
|
||||
}
|
||||
},
|
||||
"main": "./dist/data-table.js",
|
||||
"module": "./dist/data-table.js",
|
||||
"types": "./dist/data-table.d.ts",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsup",
|
||||
"dev": "tsup --watch",
|
||||
"lint": "eslint . --max-warnings 0",
|
||||
"clean": "rm -rf .turbo && rm -rf node_modules && rm -rf dist"
|
||||
},
|
||||
"dependencies": {
|
||||
"@radix-ui/react-icons": "^1.3.0",
|
||||
"@radix-ui/react-slot": "^1.1.0",
|
||||
"@tanstack/react-table": "^8.21.3",
|
||||
"@tanstack/react-virtual": "^3.13.9",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"class-variance-authority": "^0.7.0",
|
||||
"clsx": "^2.1.1",
|
||||
"lodash-es": "^4.17.21",
|
||||
"lucide-react": "^0.445.0",
|
||||
"tailwind-merge": "^2.5.2",
|
||||
"tailwindcss-animate": "^1.0.7"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^18.2.0"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"description": "Table component with granular control using tanstack",
|
||||
"yalcSig": "a82ce696abf1d0eaafed9c9884edbe12"
|
||||
}
|
||||
1
frontend/.yalc/@signozhq/table/yalc.sig
Normal file
1
frontend/.yalc/@signozhq/table/yalc.sig
Normal file
@@ -0,0 +1 @@
|
||||
a82ce696abf1d0eaafed9c9884edbe12
|
||||
@@ -41,6 +41,7 @@
|
||||
"@sentry/react": "8.41.0",
|
||||
"@sentry/webpack-plugin": "2.22.6",
|
||||
"@signozhq/design-tokens": "1.1.4",
|
||||
"@signozhq/table": "file:.yalc/@signozhq/table",
|
||||
"@tanstack/react-table": "8.20.6",
|
||||
"@tanstack/react-virtual": "3.11.2",
|
||||
"@uiw/react-md-editor": "3.23.5",
|
||||
|
||||
@@ -14,8 +14,8 @@
|
||||
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
|
||||
"remove_label_success": "Labels cleared",
|
||||
"alert_form_step1": "Step 1 - Define the metric",
|
||||
"alert_form_step2": "Step 2 - Define Alert Conditions",
|
||||
"alert_form_step3": "Step 3 - Alert Configuration",
|
||||
"alert_form_step2": "Step {{step}} - Define Alert Conditions",
|
||||
"alert_form_step3": "Step {{step}} - Alert Configuration",
|
||||
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
|
||||
"confirm_save_title": "Save Changes",
|
||||
"confirm_save_content_part1": "Your alert built with",
|
||||
|
||||
@@ -7,8 +7,8 @@
|
||||
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
|
||||
"remove_label_success": "Labels cleared",
|
||||
"alert_form_step1": "Step 1 - Define the metric",
|
||||
"alert_form_step2": "Step 2 - Define Alert Conditions",
|
||||
"alert_form_step3": "Step 3 - Alert Configuration",
|
||||
"alert_form_step2": "Step {{step}} - Define Alert Conditions",
|
||||
"alert_form_step3": "Step {{step}} - Alert Configuration",
|
||||
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
|
||||
"confirm_save_title": "Save Changes",
|
||||
"confirm_save_content_part1": "Your alert built with",
|
||||
|
||||
@@ -7,8 +7,8 @@
|
||||
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
|
||||
"remove_label_success": "Labels cleared",
|
||||
"alert_form_step1": "Step 1 - Define the metric",
|
||||
"alert_form_step2": "Step 2 - Define Alert Conditions",
|
||||
"alert_form_step3": "Step 3 - Alert Configuration",
|
||||
"alert_form_step2": "Step {{step}} - Define Alert Conditions",
|
||||
"alert_form_step3": "Step {{step}} - Alert Configuration",
|
||||
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
|
||||
"confirm_save_title": "Save Changes",
|
||||
"confirm_save_content_part1": "Your alert built with",
|
||||
|
||||
@@ -191,7 +191,8 @@ function App(): JSX.Element {
|
||||
// if the user is on basic plan then remove billing
|
||||
if (isOnBasicPlan) {
|
||||
updatedRoutes = updatedRoutes.filter(
|
||||
(route) => route?.path !== ROUTES.BILLING,
|
||||
(route) =>
|
||||
route?.path !== ROUTES.BILLING && route?.path !== ROUTES.INTEGRATIONS,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -204,7 +205,8 @@ function App(): JSX.Element {
|
||||
} else {
|
||||
// if not a cloud user then remove billing and add list licenses route
|
||||
updatedRoutes = updatedRoutes.filter(
|
||||
(route) => route?.path !== ROUTES.BILLING,
|
||||
(route) =>
|
||||
route?.path !== ROUTES.BILLING && route?.path !== ROUTES.INTEGRATIONS,
|
||||
);
|
||||
updatedRoutes = [...updatedRoutes, LIST_LICENSES];
|
||||
}
|
||||
|
||||
@@ -101,13 +101,18 @@
|
||||
line-height: 28px;
|
||||
}
|
||||
|
||||
.changelog-media-image {
|
||||
.changelog-media-image,
|
||||
.changelog-media-video {
|
||||
height: auto;
|
||||
width: 100%;
|
||||
overflow: hidden;
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--bg-slate-400, #1d212d);
|
||||
}
|
||||
|
||||
.changelog-media-video {
|
||||
margin: 12px 0;
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
|
||||
@@ -32,7 +32,7 @@ function renderMedia(media: Media): JSX.Element | null {
|
||||
controls
|
||||
controlsList="nodownload noplaybackrate"
|
||||
loop
|
||||
className="my-3 h-auto w-full rounded"
|
||||
className="changelog-media-video"
|
||||
>
|
||||
<source src={media.url} type={media.mime} />
|
||||
<track kind="captions" src="" label="No captions available" default />
|
||||
@@ -56,7 +56,7 @@ function ChangelogRenderer({ changelog }: Props): JSX.Element {
|
||||
</div>
|
||||
<span className="changelog-release-date">{formattedReleaseDate}</span>
|
||||
{changelog.features && changelog.features.length > 0 && (
|
||||
<div className="changelog-renderer-list flex flex-col gap-7">
|
||||
<div className="changelog-renderer-list">
|
||||
{changelog.features.map((feature) => (
|
||||
<div key={feature.id}>
|
||||
<h2>{feature.title}</h2>
|
||||
|
||||
@@ -71,7 +71,7 @@ function LogDetail({
|
||||
const [contextQuery, setContextQuery] = useState<Query | undefined>();
|
||||
const [filters, setFilters] = useState<TagFilter | null>(null);
|
||||
const [isEdit, setIsEdit] = useState<boolean>(false);
|
||||
const { initialDataSource, stagedQuery } = useQueryBuilder();
|
||||
const { stagedQuery } = useQueryBuilder();
|
||||
|
||||
const listQuery = useMemo(() => {
|
||||
if (!stagedQuery || stagedQuery.builder.queryData.length < 1) return null;
|
||||
@@ -81,7 +81,7 @@ function LogDetail({
|
||||
|
||||
const { options } = useOptionsMenu({
|
||||
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
|
||||
dataSource: initialDataSource || DataSource.LOGS,
|
||||
dataSource: DataSource.LOGS,
|
||||
aggregateOperator: listQuery?.aggregateOperator || StringOperators.NOOP,
|
||||
});
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import '../Explorer.styles.scss';
|
||||
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Spin, Table, Typography } from 'antd';
|
||||
import { DataTable } from '@signozhq/table';
|
||||
import { Typography } from 'antd';
|
||||
import axios from 'api';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
@@ -27,7 +27,7 @@ import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import { DEFAULT_PARAMS, useApiMonitoringParams } from '../../queryParams';
|
||||
import {
|
||||
columnsConfig,
|
||||
dataTableColumnsConfig,
|
||||
formatDataForTable,
|
||||
hardcodedAttributeKeys,
|
||||
} from '../../utils';
|
||||
@@ -157,50 +157,42 @@ function DomainList(): JSX.Element {
|
||||
hardcodedAttributeKeys={hardcodedAttributeKeys}
|
||||
/>
|
||||
</div>
|
||||
<Table
|
||||
className={cx('api-monitoring-domain-list-table')}
|
||||
dataSource={isFetching || isLoading ? [] : formattedDataForTable}
|
||||
columns={columnsConfig}
|
||||
loading={{
|
||||
spinning: isFetching || isLoading,
|
||||
indicator: <Spin indicator={<LoadingOutlined size={14} spin />} />,
|
||||
{!isFetching && !isLoading && formattedDataForTable.length === 0 && (
|
||||
<div className="no-filtered-domains-message-container">
|
||||
<div className="no-filtered-domains-message-content">
|
||||
<img
|
||||
src="/Icons/emptyState.svg"
|
||||
alt="thinking-emoji"
|
||||
className="empty-state-svg"
|
||||
/>
|
||||
<Typography.Text className="no-filtered-domains-message">
|
||||
This query had no results. Edit your query and try again!
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
<DataTable
|
||||
columns={dataTableColumnsConfig}
|
||||
data={isFetching || isLoading ? [] : formattedDataForTable}
|
||||
tableId="api-monitoring-domain-list-table"
|
||||
isLoading={isFetching || isLoading}
|
||||
enablePagination={false}
|
||||
showHeaders
|
||||
enableSorting
|
||||
enableColumnResizing={false}
|
||||
enableColumnReordering={false}
|
||||
enableColumnPinning={false}
|
||||
enableGlobalFilter={false}
|
||||
onRowClick={(record, index): void => {
|
||||
if (index !== undefined) {
|
||||
const dataIndex = formattedDataForTable.findIndex(
|
||||
(item) => item.key === record.original.key,
|
||||
);
|
||||
setSelectedDomainIndex(dataIndex);
|
||||
setParams({ selectedDomain: record.original.domainName });
|
||||
logEvent('API Monitoring: Domain name row clicked', {});
|
||||
}
|
||||
}}
|
||||
locale={{
|
||||
emptyText:
|
||||
isFetching || isLoading ? null : (
|
||||
<div className="no-filtered-domains-message-container">
|
||||
<div className="no-filtered-domains-message-content">
|
||||
<img
|
||||
src="/Icons/emptyState.svg"
|
||||
alt="thinking-emoji"
|
||||
className="empty-state-svg"
|
||||
/>
|
||||
|
||||
<Typography.Text className="no-filtered-domains-message">
|
||||
This query had no results. Edit your query and try again!
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
}}
|
||||
scroll={{ x: true }}
|
||||
tableLayout="fixed"
|
||||
onRow={(record, index): { onClick: () => void; className: string } => ({
|
||||
onClick: (): void => {
|
||||
if (index !== undefined) {
|
||||
const dataIndex = formattedDataForTable.findIndex(
|
||||
(item) => item.key === record.key,
|
||||
);
|
||||
setSelectedDomainIndex(dataIndex);
|
||||
setParams({ selectedDomain: record.domainName });
|
||||
logEvent('API Monitoring: Domain name row clicked', {});
|
||||
}
|
||||
},
|
||||
className: 'expanded-clickable-row',
|
||||
})}
|
||||
rowClassName={(_, index): string =>
|
||||
index % 2 === 0 ? 'table-row-dark' : 'table-row-light'
|
||||
}
|
||||
/>
|
||||
{selectedDomainIndex !== -1 && (
|
||||
<DomainDetails
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
/* eslint-disable sonarjs/no-identical-functions */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { ColumnDef } from '@tanstack/react-table';
|
||||
import { Progress, Tag, Tooltip } from 'antd';
|
||||
import { ColumnType } from 'antd/es/table';
|
||||
import {
|
||||
@@ -249,6 +251,152 @@ export const columnsConfig: ColumnType<APIDomainsRowData>[] = [
|
||||
},
|
||||
];
|
||||
|
||||
// DataTable columns configuration for SignozHq DataTable
|
||||
export const dataTableColumnsConfig: ColumnDef<APIDomainsRowData>[] = [
|
||||
{
|
||||
accessorKey: 'domainName',
|
||||
header: (): React.ReactNode => (
|
||||
<div className="domain-list-name-col-header">Domain</div>
|
||||
),
|
||||
size: 237,
|
||||
enableSorting: false,
|
||||
cell: ({ getValue }): React.ReactNode => (
|
||||
<div className="domain-list-name-col-value">{getValue() as string}</div>
|
||||
),
|
||||
},
|
||||
{
|
||||
accessorKey: 'endpointCount',
|
||||
header: 'Endpoints in use',
|
||||
size: 142,
|
||||
enableSorting: true,
|
||||
sortingFn: (rowA, rowB): number => {
|
||||
const endpointA =
|
||||
rowA.original.endpointCount === '-' || rowA.original.endpointCount === 'n/a'
|
||||
? ''
|
||||
: rowA.original.endpointCount;
|
||||
const endpointB =
|
||||
rowB.original.endpointCount === '-' || rowB.original.endpointCount === 'n/a'
|
||||
? ''
|
||||
: rowB.original.endpointCount;
|
||||
|
||||
if (!endpointA && !endpointB) return 0;
|
||||
if (!endpointA) return 1;
|
||||
if (!endpointB) return -1;
|
||||
|
||||
return Number(endpointA) - Number(endpointB);
|
||||
},
|
||||
},
|
||||
{
|
||||
accessorKey: 'lastUsed',
|
||||
header: 'Last used',
|
||||
size: 142,
|
||||
enableSorting: true,
|
||||
sortingFn: (rowA, rowB): number => {
|
||||
const dateA =
|
||||
rowA.original.lastUsed === '-' || rowA.original.lastUsed === 'n/a'
|
||||
? new Date(0).toISOString()
|
||||
: rowA.original.lastUsed;
|
||||
const dateB =
|
||||
rowB.original.lastUsed === '-' || rowB.original.lastUsed === 'n/a'
|
||||
? new Date(0).toISOString()
|
||||
: rowB.original.lastUsed;
|
||||
|
||||
return new Date(dateB).getTime() - new Date(dateA).getTime();
|
||||
},
|
||||
cell: ({ getValue }): React.ReactNode => {
|
||||
const lastUsed = getValue() as string;
|
||||
return lastUsed === 'n/a' || lastUsed === '-'
|
||||
? '-'
|
||||
: getLastUsedRelativeTime(new Date(lastUsed).getTime());
|
||||
},
|
||||
},
|
||||
{
|
||||
accessorKey: 'rate',
|
||||
header: (): React.ReactNode => (
|
||||
<div>
|
||||
Rate <span className="round-metric-tag">ops/s</span>
|
||||
</div>
|
||||
),
|
||||
size: 142,
|
||||
enableSorting: true,
|
||||
sortingFn: (rowA, rowB): number => {
|
||||
const rateA =
|
||||
rowA.original.rate === '-' || rowA.original.rate === 'n/a'
|
||||
? 0
|
||||
: rowA.original.rate;
|
||||
const rateB =
|
||||
rowB.original.rate === '-' || rowB.original.rate === 'n/a'
|
||||
? 0
|
||||
: rowB.original.rate;
|
||||
return Number(rateA) - Number(rateB);
|
||||
},
|
||||
},
|
||||
{
|
||||
accessorKey: 'errorRate',
|
||||
header: (): React.ReactNode => (
|
||||
<div>
|
||||
Error <span className="round-metric-tag">%</span>
|
||||
</div>
|
||||
),
|
||||
size: 142,
|
||||
enableSorting: true,
|
||||
sortingFn: (rowA, rowB): number => {
|
||||
const errorRateA =
|
||||
rowA.original.errorRate === '-' || rowA.original.errorRate === 'n/a'
|
||||
? 0
|
||||
: rowA.original.errorRate;
|
||||
const errorRateB =
|
||||
rowB.original.errorRate === '-' || rowB.original.errorRate === 'n/a'
|
||||
? 0
|
||||
: rowB.original.errorRate;
|
||||
|
||||
return Number(errorRateA) - Number(errorRateB);
|
||||
},
|
||||
cell: ({ getValue }): React.ReactNode => {
|
||||
const errorRate = getValue() as number | string;
|
||||
const errorRateValue =
|
||||
errorRate === 'n/a' || errorRate === '-' ? 0 : errorRate;
|
||||
return (
|
||||
<Progress
|
||||
status="active"
|
||||
percent={Number((errorRateValue as number).toFixed(2))}
|
||||
strokeLinecap="butt"
|
||||
size="small"
|
||||
strokeColor={((): string => {
|
||||
const errorRatePercent = Number((errorRateValue as number).toFixed(2));
|
||||
if (errorRatePercent >= 90) return Color.BG_SAKURA_500;
|
||||
if (errorRatePercent >= 60) return Color.BG_AMBER_500;
|
||||
return Color.BG_FOREST_500;
|
||||
})()}
|
||||
className="progress-bar error-rate"
|
||||
/>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
accessorKey: 'latency',
|
||||
header: (): React.ReactNode => (
|
||||
<div>
|
||||
Avg. Latency <span className="round-metric-tag">ms</span>
|
||||
</div>
|
||||
),
|
||||
size: 142,
|
||||
enableSorting: true,
|
||||
sortingFn: (rowA, rowB): number => {
|
||||
const latencyA =
|
||||
rowA.original.latency === '-' || rowA.original.latency === 'n/a'
|
||||
? 0
|
||||
: rowA.original.latency;
|
||||
const latencyB =
|
||||
rowB.original.latency === '-' || rowB.original.latency === 'n/a'
|
||||
? 0
|
||||
: rowB.original.latency;
|
||||
|
||||
return Number(latencyA) - Number(latencyB);
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// Rename this to a proper name
|
||||
export const hardcodedAttributeKeys: BaseAutocompleteData[] = [
|
||||
{
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
.app-banner-container {
|
||||
// Earlier we were having app-banner-container class
|
||||
// we change it to app-banner-wrapper as the adblocker was blocking the app-banner-container class
|
||||
// Keep an eye on What classnames are used in the codebase
|
||||
.app-banner-wrapper {
|
||||
position: relative;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
@@ -63,7 +63,6 @@ import {
|
||||
} from 'types/api/licensesV3/getActive';
|
||||
import AppReducer from 'types/reducer/app';
|
||||
import { USER_ROLES } from 'types/roles';
|
||||
import { checkVersionState } from 'utils/app';
|
||||
import { eventEmitter } from 'utils/getEventEmitter';
|
||||
import {
|
||||
getFormattedDate,
|
||||
@@ -98,16 +97,11 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
|
||||
const [showSlowApiWarning, setShowSlowApiWarning] = useState(false);
|
||||
const [slowApiWarningShown, setSlowApiWarningShown] = useState(false);
|
||||
const [shouldFetchChangelog, setShouldFetchChangelog] = useState<boolean>(
|
||||
false,
|
||||
);
|
||||
|
||||
const { currentVersion, latestVersion } = useSelector<AppState, AppReducer>(
|
||||
const { latestVersion } = useSelector<AppState, AppReducer>(
|
||||
(state) => state.app,
|
||||
);
|
||||
|
||||
const isLatestVersion = checkVersionState(currentVersion, latestVersion);
|
||||
|
||||
const handleBillingOnSuccess = (
|
||||
data: SuccessResponseV2<CheckoutSuccessPayloadProps>,
|
||||
): void => {
|
||||
@@ -163,7 +157,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
queryFn: (): Promise<SuccessResponse<ChangelogSchema> | ErrorResponse> =>
|
||||
getChangelogByVersion(latestVersion),
|
||||
queryKey: ['getChangelogByVersion', latestVersion],
|
||||
enabled: isLoggedIn && !isCloudUserVal && shouldFetchChangelog,
|
||||
enabled: isLoggedIn && !isCloudUserVal && Boolean(latestVersion),
|
||||
},
|
||||
]);
|
||||
|
||||
@@ -223,7 +217,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
|
||||
if (
|
||||
getUserVersionResponse.isFetched &&
|
||||
getUserLatestVersionResponse.isSuccess &&
|
||||
getUserVersionResponse.isSuccess &&
|
||||
getUserVersionResponse.data &&
|
||||
getUserVersionResponse.data.payload
|
||||
) {
|
||||
@@ -261,18 +255,13 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
getUserVersionResponse.isLoading,
|
||||
getUserVersionResponse.isError,
|
||||
getUserVersionResponse.data,
|
||||
getUserVersionResponse.isSuccess,
|
||||
getUserLatestVersionResponse.isFetched,
|
||||
getUserVersionResponse.isFetched,
|
||||
getUserLatestVersionResponse.isSuccess,
|
||||
notifications,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isLatestVersion) {
|
||||
setShouldFetchChangelog(true);
|
||||
}
|
||||
}, [isLatestVersion]);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
getChangelogByVersionResponse.isFetched &&
|
||||
@@ -613,7 +602,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
|
||||
</Helmet>
|
||||
|
||||
{isLoggedIn && (
|
||||
<div className={cx('app-banner-container')}>
|
||||
<div className={cx('app-banner-wrapper')}>
|
||||
{SHOW_TRIAL_EXPIRY_BANNER && (
|
||||
<div className="trial-expiry-banner">
|
||||
You are in free trial period. Your free trial will end on{' '}
|
||||
|
||||
@@ -1,30 +1,173 @@
|
||||
.empty-logs-search-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
height: 240px;
|
||||
|
||||
.empty-logs-search-container-content {
|
||||
.empty-logs-search {
|
||||
&__container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
height: 240px;
|
||||
}
|
||||
&__content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
|
||||
color: var(--text-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px; /* 128.571% */
|
||||
line-height: 18px;
|
||||
letter-spacing: -0.07px;
|
||||
|
||||
align-items: flex-start;
|
||||
.empty-state-svg {
|
||||
height: 50px;
|
||||
width: 50px;
|
||||
}
|
||||
}
|
||||
&__sub-text {
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.sub-text {
|
||||
font-weight: 600;
|
||||
&__container {
|
||||
&--custom-message {
|
||||
height: 445px;
|
||||
.empty-state-svg {
|
||||
height: 32px;
|
||||
width: 32px;
|
||||
}
|
||||
.empty-logs-search {
|
||||
&__header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
&__title {
|
||||
color: var(--bg-vanilla-100);
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
|
||||
&__subtitle {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-size: 14px;
|
||||
font-weight: 400;
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
|
||||
&__description {
|
||||
font-size: 14px;
|
||||
color: var(--text-vanilla-400);
|
||||
line-height: 20px;
|
||||
}
|
||||
|
||||
&__description-list {
|
||||
margin: 0;
|
||||
margin-top: 8px;
|
||||
color: var(--bg-vanilla-400);
|
||||
font-size: 14px;
|
||||
font-weight: 400;
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 6px;
|
||||
list-style: none;
|
||||
padding: 0;
|
||||
font-family: Inter;
|
||||
}
|
||||
|
||||
&__description-list li {
|
||||
position: relative;
|
||||
padding-left: 20px;
|
||||
}
|
||||
|
||||
&__description-list li::before {
|
||||
content: '⎯';
|
||||
font-family: Inter;
|
||||
position: absolute;
|
||||
left: 0;
|
||||
color: var(--bg-robin-400);
|
||||
font-weight: bold;
|
||||
font-size: 16px;
|
||||
line-height: 20px;
|
||||
}
|
||||
|
||||
&__clear-filters-btn {
|
||||
display: flex;
|
||||
width: 468px;
|
||||
font-family: Inter;
|
||||
padding: 12px;
|
||||
justify-content: space-between;
|
||||
align-items: flex-start;
|
||||
border-radius: 3px;
|
||||
border: 1px dashed var(--bg-slate-500);
|
||||
background: transparent;
|
||||
color: var(--bg-vanilla-400);
|
||||
font-size: 14px;
|
||||
font-weight: 400;
|
||||
line-height: 18px;
|
||||
letter-spacing: -0.07px;
|
||||
cursor: pointer;
|
||||
margin-top: 12px;
|
||||
}
|
||||
|
||||
&__clear-filters-btn-icon {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
}
|
||||
|
||||
&__row {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: flex-end;
|
||||
max-width: 825px;
|
||||
gap: 25px;
|
||||
justify-content: center;
|
||||
margin-left: 21px;
|
||||
}
|
||||
|
||||
&__content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
min-width: 260px;
|
||||
}
|
||||
|
||||
&__resources-card {
|
||||
background: var(--bg-ink-400);
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
border-radius: 4px;
|
||||
width: 332px;
|
||||
}
|
||||
|
||||
&__resources-title {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 11px;
|
||||
font-weight: 600;
|
||||
line-height: 18px;
|
||||
letter-spacing: 0.88px;
|
||||
text-transform: uppercase;
|
||||
padding: 16px 16px 12px;
|
||||
border-bottom: 1px solid var(--bg-slate-500);
|
||||
height: 46px;
|
||||
}
|
||||
|
||||
&__resources-links {
|
||||
padding: 16px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 16px;
|
||||
|
||||
.learn-more {
|
||||
height: 18px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,16 +2,24 @@ import './EmptyLogsSearch.styles.scss';
|
||||
|
||||
import { Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import cx from 'classnames';
|
||||
import LearnMore from 'components/LearnMore/LearnMore';
|
||||
import { EmptyLogsListConfig } from 'container/LogsExplorerList/utils';
|
||||
import { Delete } from 'lucide-react';
|
||||
import { useEffect, useRef } from 'react';
|
||||
import { DataSource, PanelTypeKeys } from 'types/common/queryBuilder';
|
||||
|
||||
interface EmptyLogsSearchProps {
|
||||
dataSource: DataSource;
|
||||
panelType: PanelTypeKeys;
|
||||
customMessage?: EmptyLogsListConfig;
|
||||
}
|
||||
|
||||
export default function EmptyLogsSearch({
|
||||
dataSource,
|
||||
panelType,
|
||||
}: {
|
||||
dataSource: DataSource;
|
||||
panelType: PanelTypeKeys;
|
||||
}): JSX.Element {
|
||||
customMessage,
|
||||
}: EmptyLogsSearchProps): JSX.Element {
|
||||
const logEventCalledRef = useRef(false);
|
||||
useEffect(() => {
|
||||
if (!logEventCalledRef.current) {
|
||||
@@ -30,18 +38,80 @@ export default function EmptyLogsSearch({
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className="empty-logs-search-container">
|
||||
<div className="empty-logs-search-container-content">
|
||||
<img
|
||||
src="/Icons/emptyState.svg"
|
||||
alt="thinking-emoji"
|
||||
className="empty-state-svg"
|
||||
/>
|
||||
<Typography.Text>
|
||||
<span className="sub-text">This query had no results. </span>
|
||||
Edit your query and try again!
|
||||
</Typography.Text>
|
||||
<div
|
||||
className={cx('empty-logs-search__container', {
|
||||
'empty-logs-search__container--custom-message': !!customMessage,
|
||||
})}
|
||||
>
|
||||
<div className="empty-logs-search__row">
|
||||
<div className="empty-logs-search__content">
|
||||
<img
|
||||
src="/Icons/emptyState.svg"
|
||||
alt="thinking-emoji"
|
||||
className="empty-state-svg"
|
||||
/>
|
||||
{customMessage ? (
|
||||
<>
|
||||
<div className="empty-logs-search__header">
|
||||
<Typography.Text className="empty-logs-search__title">
|
||||
{customMessage.title}
|
||||
</Typography.Text>
|
||||
{customMessage.subTitle && (
|
||||
<Typography.Text className="empty-logs-search__subtitle">
|
||||
{customMessage.subTitle}
|
||||
</Typography.Text>
|
||||
)}
|
||||
</div>
|
||||
{Array.isArray(customMessage.description) ? (
|
||||
<ul className="empty-logs-search__description-list">
|
||||
{customMessage.description.map((desc) => (
|
||||
<li key={desc}>{desc}</li>
|
||||
))}
|
||||
</ul>
|
||||
) : (
|
||||
<Typography.Text className="empty-logs-search__description">
|
||||
{customMessage.description}
|
||||
</Typography.Text>
|
||||
)}
|
||||
{/* Clear filters button */}
|
||||
{customMessage.showClearFiltersButton && (
|
||||
<button
|
||||
type="button"
|
||||
className="empty-logs-search__clear-filters-btn"
|
||||
onClick={customMessage.onClearFilters}
|
||||
>
|
||||
{customMessage.clearFiltersButtonText}
|
||||
<span className="empty-logs-search__clear-filters-btn-icon">
|
||||
<Delete size={14} />
|
||||
Clear filters
|
||||
</span>
|
||||
</button>
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
<Typography.Text>
|
||||
<span className="empty-logs-search__sub-text">
|
||||
This query had no results.{' '}
|
||||
</span>
|
||||
Edit your query and try again!
|
||||
</Typography.Text>
|
||||
)}
|
||||
</div>
|
||||
{customMessage?.documentationLinks && (
|
||||
<div className="empty-logs-search__resources-card">
|
||||
<div className="empty-logs-search__resources-title">RESOURCES</div>
|
||||
<div className="empty-logs-search__resources-links">
|
||||
{customMessage.documentationLinks.map((link) => (
|
||||
<LearnMore key={link.text} text={link.text} url={link.url} />
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
EmptyLogsSearch.defaultProps = {
|
||||
customMessage: null,
|
||||
};
|
||||
|
||||
@@ -212,9 +212,12 @@ function QuerySection({
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const step2Label = alertDef.alertType === 'METRIC_BASED_ALERT' ? '2' : '1';
|
||||
|
||||
return (
|
||||
<>
|
||||
<StepHeading> {t('alert_form_step2')}</StepHeading>
|
||||
<StepHeading> {t('alert_form_step2', { step: step2Label })}</StepHeading>
|
||||
<FormContainer>
|
||||
<div>{renderTabs(alertType)}</div>
|
||||
{renderQuerySection(currentTab)}
|
||||
|
||||
@@ -371,9 +371,11 @@ function RuleOptions({
|
||||
selectedCategory?.name,
|
||||
);
|
||||
|
||||
const step3Label = alertDef.alertType === 'METRIC_BASED_ALERT' ? '3' : '2';
|
||||
|
||||
return (
|
||||
<>
|
||||
<StepHeading>{t('alert_form_step3')}</StepHeading>
|
||||
<StepHeading>{t('alert_form_step3', { step: step3Label })}</StepHeading>
|
||||
<FormContainer>
|
||||
{queryCategory === EQueryType.PROM && renderPromRuleOptions()}
|
||||
{queryCategory !== EQueryType.PROM &&
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
overflow: auto;
|
||||
margin: 8px -8px;
|
||||
margin-right: 0;
|
||||
margin-bottom: 64px;
|
||||
|
||||
.react-grid-layout {
|
||||
border: none !important;
|
||||
|
||||
@@ -731,7 +731,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
{
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
graphType: PANEL_TYPES.TIME_SERIES,
|
||||
graphType: PANEL_TYPES.TABLE,
|
||||
query: {
|
||||
builder: {
|
||||
queryData: [
|
||||
@@ -751,7 +751,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
id: 'a7da59c7',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
@@ -786,12 +786,12 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sDeploymentNameKey}}} ({{${k8sNamespaceNameKey}})`,
|
||||
legend: 'available',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
reduceTo: 'last',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
},
|
||||
@@ -804,14 +804,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sDeploymentDesiredKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
aggregateOperator: 'avg',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'B',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
id: '55110885',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
@@ -846,14 +846,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sDeploymentNameKey}}} ({{${k8sNamespaceNameKey}})`,
|
||||
legend: 'desired',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'B',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
reduceTo: 'last',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
timeAggregation: 'avg',
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
@@ -890,13 +890,13 @@ export const getClusterMetricsQueryPayload = (
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
},
|
||||
variables: {},
|
||||
formatForWeb: false,
|
||||
formatForWeb: true,
|
||||
start,
|
||||
end,
|
||||
},
|
||||
{
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
graphType: PANEL_TYPES.TIME_SERIES,
|
||||
graphType: PANEL_TYPES.TABLE,
|
||||
query: {
|
||||
builder: {
|
||||
queryData: [
|
||||
@@ -909,14 +909,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sStatefulsetCurrentPodsKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
aggregateOperator: 'max',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
id: '3c57b4d1',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
@@ -951,14 +951,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sStatefulsetNameKey}}} ({{${k8sNamespaceNameKey}})`,
|
||||
legend: 'current',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
reduceTo: 'last',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
timeAggregation: 'max',
|
||||
},
|
||||
{
|
||||
aggregateAttribute: {
|
||||
@@ -969,14 +969,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sStatefulsetDesiredPodsKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
aggregateOperator: 'max',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'B',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
id: '0f49fe64',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
@@ -1011,14 +1011,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sStatefulsetNameKey}}} ({{${k8sNamespaceNameKey}})`,
|
||||
legend: 'desired',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'B',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
reduceTo: 'last',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
timeAggregation: 'max',
|
||||
},
|
||||
{
|
||||
aggregateAttribute: {
|
||||
@@ -1029,14 +1029,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sStatefulsetReadyPodsKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
aggregateOperator: 'max',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'C',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
id: '0bebf625',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
@@ -1071,14 +1071,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sStatefulsetNameKey}}} ({{${k8sNamespaceNameKey}})`,
|
||||
legend: 'ready',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'C',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
reduceTo: 'last',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
timeAggregation: 'max',
|
||||
},
|
||||
{
|
||||
aggregateAttribute: {
|
||||
@@ -1089,14 +1089,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sStatefulsetUpdatedPodsKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
aggregateOperator: 'max',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'D',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
id: '1ddacbbe',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
@@ -1131,14 +1131,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sStatefulsetNameKey}}} ({{${k8sNamespaceNameKey}})`,
|
||||
legend: 'updated',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'D',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
reduceTo: 'last',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
timeAggregation: 'max',
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
@@ -1199,13 +1199,13 @@ export const getClusterMetricsQueryPayload = (
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
},
|
||||
variables: {},
|
||||
formatForWeb: false,
|
||||
formatForWeb: true,
|
||||
start,
|
||||
end,
|
||||
},
|
||||
{
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
graphType: PANEL_TYPES.TIME_SERIES,
|
||||
graphType: PANEL_TYPES.TABLE,
|
||||
query: {
|
||||
builder: {
|
||||
queryData: [
|
||||
@@ -1218,14 +1218,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sDaemonsetCurrentScheduledNodesKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
aggregateOperator: 'avg',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
id: 'e0bea554',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
@@ -1250,24 +1250,16 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sDaemonsetNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_namespace_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sNamespaceNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sDaemonsetNameKey}} ({{${k8sNamespaceNameKey}})`,
|
||||
legend: 'current_nodes',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
reduceTo: 'last',
|
||||
spaceAggregation: 'avg',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
timeAggregation: 'avg',
|
||||
},
|
||||
{
|
||||
aggregateAttribute: {
|
||||
@@ -1278,14 +1270,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sDaemonsetDesiredScheduledNodesKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
aggregateOperator: 'avg',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'B',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
id: '741052f7',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
@@ -1310,24 +1302,16 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sDaemonsetNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_namespace_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sNamespaceNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sDaemonsetNameKey}} ({{${k8sNamespaceNameKey}})`,
|
||||
legend: 'desired_nodes',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'B',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
reduceTo: 'last',
|
||||
spaceAggregation: 'avg',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
timeAggregation: 'avg',
|
||||
},
|
||||
{
|
||||
aggregateAttribute: {
|
||||
@@ -1338,14 +1322,14 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sDaemonsetReadyNodesKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
aggregateOperator: 'avg',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'C',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
id: 'f23759f2',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
@@ -1370,24 +1354,16 @@ export const getClusterMetricsQueryPayload = (
|
||||
key: k8sDaemonsetNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_namespace_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sNamespaceNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sDaemonsetNameKey}} ({{${k8sNamespaceNameKey}})`,
|
||||
legend: 'ready_nodes',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'C',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
reduceTo: 'last',
|
||||
spaceAggregation: 'avg',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
timeAggregation: 'avg',
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
@@ -1436,316 +1412,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
},
|
||||
variables: {},
|
||||
formatForWeb: false,
|
||||
start,
|
||||
end,
|
||||
},
|
||||
{
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
graphType: PANEL_TYPES.TIME_SERIES,
|
||||
query: {
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
id: 'k8s_job_active_pods--float64--Gauge--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: k8sJobActivePodsKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sClusterNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: cluster.meta.k8s_cluster_name,
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
functions: [],
|
||||
groupBy: [
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_job_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sJobNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_namespace_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sNamespaceNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sJobNameKey}}} ({{${k8sNamespaceNameKey}})`,
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
},
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
id: 'k8s_job_successful_pods--float64--Gauge--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: k8sJobSuccessfulPodsKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'B',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sClusterNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: cluster.meta.k8s_cluster_name,
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
functions: [],
|
||||
groupBy: [
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_job_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sJobNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_namespace_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sNamespaceNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sJobNameKey}}} ({{${k8sNamespaceNameKey}})`,
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'B',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
},
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
id: 'k8s_job_failed_pods--float64--Gauge--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: k8sJobFailedPodsKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'C',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sClusterNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: cluster.meta.k8s_cluster_name,
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
functions: [],
|
||||
groupBy: [
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_job_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sJobNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_namespace_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sNamespaceNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sJobNameKey}}} ({{${k8sNamespaceNameKey}})`,
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'C',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
},
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
id: 'k8s_job_desired_successful_pods--float64--Gauge--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
key: k8sJobDesiredSuccessfulPodsKey,
|
||||
type: 'Gauge',
|
||||
},
|
||||
aggregateOperator: 'latest',
|
||||
dataSource: DataSource.METRICS,
|
||||
disabled: false,
|
||||
expression: 'D',
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'd7779183',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sClusterNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: cluster.meta.k8s_cluster_name,
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
functions: [],
|
||||
groupBy: [
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_job_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sJobNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
id: 'k8s_namespace_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: k8sNamespaceNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: `{{${k8sJobNameKey}}} ({{${k8sNamespaceNameKey}})`,
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'D',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'max',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'latest',
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'A',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'B',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'C',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'D',
|
||||
query: '',
|
||||
},
|
||||
],
|
||||
id: v4(),
|
||||
promql: [
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'A',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'B',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'C',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'D',
|
||||
query: '',
|
||||
},
|
||||
],
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
},
|
||||
variables: {},
|
||||
formatForWeb: false,
|
||||
formatForWeb: true,
|
||||
start,
|
||||
end,
|
||||
},
|
||||
@@ -1777,7 +1444,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'k8s_cluster_name',
|
||||
key: k8sClusterNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
@@ -1837,7 +1504,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'k8s_cluster_name',
|
||||
key: k8sClusterNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
@@ -1897,7 +1564,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'k8s_cluster_name',
|
||||
key: k8sClusterNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
@@ -1957,7 +1624,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
id: 'k8s_cluster_name--string--tag--false',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'k8s_cluster_name',
|
||||
key: k8sClusterNameKey,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
@@ -2005,6 +1672,24 @@ export const getClusterMetricsQueryPayload = (
|
||||
name: 'A',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'B',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'C',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'D',
|
||||
query: '',
|
||||
},
|
||||
],
|
||||
id: v4(),
|
||||
promql: [
|
||||
@@ -2014,6 +1699,24 @@ export const getClusterMetricsQueryPayload = (
|
||||
name: 'A',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'B',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'C',
|
||||
query: '',
|
||||
},
|
||||
{
|
||||
disabled: false,
|
||||
legend: '',
|
||||
name: 'D',
|
||||
query: '',
|
||||
},
|
||||
],
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
},
|
||||
|
||||
@@ -32,7 +32,7 @@ function ContextLogRenderer({
|
||||
const [afterLogPage, setAfterLogPage] = useState<number>(1);
|
||||
const [logs, setLogs] = useState<ILog[]>([log]);
|
||||
|
||||
const { initialDataSource, stagedQuery } = useQueryBuilder();
|
||||
const { stagedQuery } = useQueryBuilder();
|
||||
|
||||
const listQuery = useMemo(() => {
|
||||
if (!stagedQuery || stagedQuery.builder.queryData.length < 1) return null;
|
||||
@@ -42,7 +42,7 @@ function ContextLogRenderer({
|
||||
|
||||
const { options } = useOptionsMenu({
|
||||
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
|
||||
dataSource: initialDataSource || DataSource.METRICS,
|
||||
dataSource: DataSource.LOGS,
|
||||
aggregateOperator: listQuery?.aggregateOperator || StringOperators.NOOP,
|
||||
});
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
|
||||
import { ActionItemProps } from './ActionItem';
|
||||
import FieldRenderer from './FieldRenderer';
|
||||
import { TableViewActions } from './TableView/TableViewActions';
|
||||
import TableViewActions from './TableView/TableViewActions';
|
||||
import {
|
||||
filterKeyForField,
|
||||
findKeyPath,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import './TableViewActions.styles.scss';
|
||||
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
@@ -11,10 +12,9 @@ import { OPERATORS } from 'constants/queryBuilder';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { RESTRICTED_SELECTED_FIELDS } from 'container/LogsFilters/config';
|
||||
import dompurify from 'dompurify';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { ArrowDownToDot, ArrowUpFromDot, Ellipsis } from 'lucide-react';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import React, { useMemo, useState } from 'react';
|
||||
import React, { useCallback, useMemo, useState } from 'react';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { FORBID_DOM_PURIFY_TAGS } from 'utils/app';
|
||||
@@ -24,12 +24,11 @@ import {
|
||||
escapeHtml,
|
||||
filterKeyForField,
|
||||
getFieldAttributes,
|
||||
jsonToDataNodes,
|
||||
parseFieldValue,
|
||||
recursiveParseJSON,
|
||||
removeEscapeCharacters,
|
||||
unescapeString,
|
||||
} from '../utils';
|
||||
import useAsyncJSONProcessing from './useAsyncJSONProcessing';
|
||||
|
||||
interface ITableViewActionsProps {
|
||||
fieldData: Record<string, string>;
|
||||
@@ -52,7 +51,64 @@ interface ITableViewActionsProps {
|
||||
|
||||
const convert = new Convert();
|
||||
|
||||
export function TableViewActions(
|
||||
// Memoized Tree Component
|
||||
const MemoizedTree = React.memo<{ treeData: any[] }>(({ treeData }) => (
|
||||
<Tree defaultExpandAll showLine treeData={treeData} />
|
||||
));
|
||||
|
||||
MemoizedTree.displayName = 'MemoizedTree';
|
||||
|
||||
// Body Content Component
|
||||
const BodyContent: React.FC<{
|
||||
fieldData: Record<string, string>;
|
||||
record: DataType;
|
||||
bodyHtml: { __html: string };
|
||||
}> = React.memo(({ fieldData, record, bodyHtml }) => {
|
||||
const { isLoading, treeData, error } = useAsyncJSONProcessing(
|
||||
fieldData.value,
|
||||
record.field === 'body',
|
||||
);
|
||||
|
||||
// Show JSON tree if available, otherwise show HTML content
|
||||
if (record.field === 'body' && treeData) {
|
||||
return <MemoizedTree treeData={treeData} />;
|
||||
}
|
||||
|
||||
if (record.field === 'body' && isLoading) {
|
||||
return (
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: '8px' }}>
|
||||
<Spin size="small" />
|
||||
<span style={{ color: Color.BG_SIENNA_400 }}>Processing JSON...</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (record.field === 'body' && error) {
|
||||
return (
|
||||
<span
|
||||
style={{ color: Color.BG_SIENNA_400, whiteSpace: 'pre-wrap', tabSize: 4 }}
|
||||
>
|
||||
Error parsing Body JSON
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
||||
if (record.field === 'body') {
|
||||
return (
|
||||
<span
|
||||
style={{ color: Color.BG_SIENNA_400, whiteSpace: 'pre-wrap', tabSize: 4 }}
|
||||
>
|
||||
<span dangerouslySetInnerHTML={bodyHtml} />
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
||||
return null;
|
||||
});
|
||||
|
||||
BodyContent.displayName = 'BodyContent';
|
||||
|
||||
export default function TableViewActions(
|
||||
props: ITableViewActionsProps,
|
||||
): React.ReactElement {
|
||||
const {
|
||||
@@ -78,44 +134,42 @@ export function TableViewActions(
|
||||
|
||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
||||
|
||||
if (record.field === 'body') {
|
||||
const parsedBody = recursiveParseJSON(fieldData.value);
|
||||
if (!isEmpty(parsedBody)) {
|
||||
return (
|
||||
<Tree defaultExpandAll showLine treeData={jsonToDataNodes(parsedBody)} />
|
||||
);
|
||||
}
|
||||
}
|
||||
const bodyHtml =
|
||||
record.field === 'body'
|
||||
? {
|
||||
__html: convert.toHtml(
|
||||
dompurify.sanitize(unescapeString(escapeHtml(record.value)), {
|
||||
FORBID_TAGS: [...FORBID_DOM_PURIFY_TAGS],
|
||||
}),
|
||||
),
|
||||
}
|
||||
: { __html: '' };
|
||||
// Memoize bodyHtml computation
|
||||
const bodyHtml = useMemo(() => {
|
||||
if (record.field !== 'body') return { __html: '' };
|
||||
|
||||
return {
|
||||
__html: convert.toHtml(
|
||||
dompurify.sanitize(unescapeString(escapeHtml(record.value)), {
|
||||
FORBID_TAGS: [...FORBID_DOM_PURIFY_TAGS],
|
||||
}),
|
||||
),
|
||||
};
|
||||
}, [record.field, record.value]);
|
||||
|
||||
const fieldFilterKey = filterKeyForField(fieldData.field);
|
||||
let textToCopy = fieldData.value;
|
||||
|
||||
// remove starting and ending quotes from the value
|
||||
try {
|
||||
textToCopy = textToCopy.replace(/^"|"$/g, '');
|
||||
} catch (error) {
|
||||
console.error(
|
||||
'Failed to remove starting and ending quotes from the value',
|
||||
error,
|
||||
);
|
||||
}
|
||||
// Memoize textToCopy computation
|
||||
const textToCopy = useMemo(() => {
|
||||
let text = fieldData.value;
|
||||
try {
|
||||
text = text.replace(/^"|"$/g, '');
|
||||
} catch (error) {
|
||||
console.error(
|
||||
'Failed to remove starting and ending quotes from the value',
|
||||
error,
|
||||
);
|
||||
}
|
||||
return text;
|
||||
}, [fieldData.value]);
|
||||
|
||||
let cleanTimestamp: string;
|
||||
if (record.field === 'timestamp') {
|
||||
cleanTimestamp = fieldData.value.replace(/^["']|["']$/g, '');
|
||||
}
|
||||
// Memoize cleanTimestamp computation
|
||||
const cleanTimestamp = useMemo(() => {
|
||||
if (record.field !== 'timestamp') return '';
|
||||
return fieldData.value.replace(/^["']|["']$/g, '');
|
||||
}, [record.field, fieldData.value]);
|
||||
|
||||
const renderFieldContent = (): JSX.Element => {
|
||||
const renderFieldContent = useCallback((): JSX.Element => {
|
||||
const commonStyles: React.CSSProperties = {
|
||||
color: Color.BG_SIENNA_400,
|
||||
whiteSpace: 'pre-wrap',
|
||||
@@ -124,7 +178,9 @@ export function TableViewActions(
|
||||
|
||||
switch (record.field) {
|
||||
case 'body':
|
||||
return <span style={commonStyles} dangerouslySetInnerHTML={bodyHtml} />;
|
||||
return (
|
||||
<BodyContent fieldData={fieldData} record={record} bodyHtml={bodyHtml} />
|
||||
);
|
||||
|
||||
case 'timestamp':
|
||||
return (
|
||||
@@ -141,7 +197,93 @@ export function TableViewActions(
|
||||
<span style={commonStyles}>{removeEscapeCharacters(fieldData.value)}</span>
|
||||
);
|
||||
}
|
||||
};
|
||||
}, [
|
||||
record,
|
||||
fieldData,
|
||||
bodyHtml,
|
||||
formatTimezoneAdjustedTimestamp,
|
||||
cleanTimestamp,
|
||||
]);
|
||||
|
||||
// Early return for body field with async processing
|
||||
if (record.field === 'body') {
|
||||
return (
|
||||
<div className={cx('value-field', isOpen ? 'open-popover' : '')}>
|
||||
<CopyClipboardHOC entityKey={fieldFilterKey} textToCopy={textToCopy}>
|
||||
<BodyContent fieldData={fieldData} record={record} bodyHtml={bodyHtml} />
|
||||
</CopyClipboardHOC>
|
||||
{!isListViewPanel && !RESTRICTED_SELECTED_FIELDS.includes(fieldFilterKey) && (
|
||||
<span className="action-btn">
|
||||
<Tooltip title="Filter for value">
|
||||
<Button
|
||||
className="filter-btn periscope-btn"
|
||||
icon={
|
||||
isfilterInLoading ? (
|
||||
<Spin size="small" />
|
||||
) : (
|
||||
<ArrowDownToDot size={14} style={{ transform: 'rotate(90deg)' }} />
|
||||
)
|
||||
}
|
||||
onClick={onClickHandler(
|
||||
OPERATORS['='],
|
||||
fieldFilterKey,
|
||||
parseFieldValue(fieldData.value),
|
||||
dataType,
|
||||
)}
|
||||
/>
|
||||
</Tooltip>
|
||||
<Tooltip title="Filter out value">
|
||||
<Button
|
||||
className="filter-btn periscope-btn"
|
||||
icon={
|
||||
isfilterOutLoading ? (
|
||||
<Spin size="small" />
|
||||
) : (
|
||||
<ArrowUpFromDot size={14} style={{ transform: 'rotate(90deg)' }} />
|
||||
)
|
||||
}
|
||||
onClick={onClickHandler(
|
||||
OPERATORS['!='],
|
||||
fieldFilterKey,
|
||||
parseFieldValue(fieldData.value),
|
||||
dataType,
|
||||
)}
|
||||
/>
|
||||
</Tooltip>
|
||||
{!isOldLogsExplorerOrLiveLogsPage && (
|
||||
<Popover
|
||||
open={isOpen}
|
||||
onOpenChange={setIsOpen}
|
||||
arrow={false}
|
||||
content={
|
||||
<div>
|
||||
<Button
|
||||
className="group-by-clause"
|
||||
type="text"
|
||||
icon={<GroupByIcon />}
|
||||
onClick={(): Promise<void> | void =>
|
||||
onGroupByAttribute?.(fieldFilterKey)
|
||||
}
|
||||
>
|
||||
Group By Attribute
|
||||
</Button>
|
||||
</div>
|
||||
}
|
||||
rootClassName="table-view-actions-content"
|
||||
trigger="hover"
|
||||
placement="bottomLeft"
|
||||
>
|
||||
<Button
|
||||
icon={<Ellipsis size={14} />}
|
||||
className="filter-btn periscope-btn"
|
||||
/>
|
||||
</Popover>
|
||||
)}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={cx('value-field', isOpen ? 'open-popover' : '')}>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { RESTRICTED_SELECTED_FIELDS } from 'container/LogsFilters/config';
|
||||
|
||||
import { TableViewActions } from '../TableViewActions';
|
||||
import TableViewActions from '../TableViewActions';
|
||||
|
||||
// Mock the components and hooks
|
||||
jest.mock('components/Logs/CopyClipboardHOC', () => ({
|
||||
|
||||
@@ -0,0 +1,102 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
|
||||
import { jsonToDataNodes, recursiveParseJSON } from '../utils';
|
||||
|
||||
// Hook for async JSON processing
|
||||
const useAsyncJSONProcessing = (
|
||||
value: string,
|
||||
shouldProcess: boolean,
|
||||
): {
|
||||
isLoading: boolean;
|
||||
treeData: any[] | null;
|
||||
error: string | null;
|
||||
} => {
|
||||
const [jsonState, setJsonState] = useState<{
|
||||
isLoading: boolean;
|
||||
treeData: any[] | null;
|
||||
error: string | null;
|
||||
}>({
|
||||
isLoading: false,
|
||||
treeData: null,
|
||||
error: null,
|
||||
});
|
||||
|
||||
const processingRef = useRef<boolean>(false);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
useEffect((): (() => void) => {
|
||||
if (!shouldProcess || processingRef.current) {
|
||||
return (): void => {};
|
||||
}
|
||||
|
||||
processingRef.current = true;
|
||||
setJsonState({ isLoading: true, treeData: null, error: null });
|
||||
|
||||
// Option 1: Using setTimeout for non-blocking processing
|
||||
const processAsync = (): void => {
|
||||
setTimeout(() => {
|
||||
try {
|
||||
const parsedBody = recursiveParseJSON(value);
|
||||
if (!isEmpty(parsedBody)) {
|
||||
const treeData = jsonToDataNodes(parsedBody);
|
||||
setJsonState({ isLoading: false, treeData, error: null });
|
||||
} else {
|
||||
setJsonState({ isLoading: false, treeData: null, error: null });
|
||||
}
|
||||
} catch (error) {
|
||||
setJsonState({
|
||||
isLoading: false,
|
||||
treeData: null,
|
||||
error: error instanceof Error ? error.message : 'Parsing failed',
|
||||
});
|
||||
} finally {
|
||||
processingRef.current = false;
|
||||
}
|
||||
}, 0);
|
||||
};
|
||||
|
||||
// Option 2: Using requestIdleCallback for better performance
|
||||
const processWithIdleCallback = (): void => {
|
||||
if ('requestIdleCallback' in window) {
|
||||
requestIdleCallback(
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
(): void => {
|
||||
try {
|
||||
const parsedBody = recursiveParseJSON(value);
|
||||
if (!isEmpty(parsedBody)) {
|
||||
const treeData = jsonToDataNodes(parsedBody);
|
||||
setJsonState({ isLoading: false, treeData, error: null });
|
||||
} else {
|
||||
setJsonState({ isLoading: false, treeData: null, error: null });
|
||||
}
|
||||
} catch (error) {
|
||||
setJsonState({
|
||||
isLoading: false,
|
||||
treeData: null,
|
||||
error: error instanceof Error ? error.message : 'Parsing failed',
|
||||
});
|
||||
} finally {
|
||||
processingRef.current = false;
|
||||
}
|
||||
},
|
||||
{ timeout: 1000 },
|
||||
);
|
||||
} else {
|
||||
processAsync();
|
||||
}
|
||||
};
|
||||
|
||||
processWithIdleCallback();
|
||||
|
||||
// Cleanup function
|
||||
return (): void => {
|
||||
processingRef.current = false;
|
||||
};
|
||||
}, [value, shouldProcess]);
|
||||
|
||||
return jsonState;
|
||||
};
|
||||
|
||||
export default useAsyncJSONProcessing;
|
||||
@@ -0,0 +1,221 @@
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import ROUTES from 'constants/routes';
|
||||
import LogsExplorerViews from 'container/LogsExplorerViews';
|
||||
import { mockQueryBuilderContextValue } from 'container/LogsExplorerViews/tests/mock';
|
||||
import { useGetExplorerQueryRange } from 'hooks/queryBuilder/useGetExplorerQueryRange';
|
||||
import { logsQueryRangeEmptyResponse } from 'mocks-server/__mockdata__/logs_query_range';
|
||||
import { server } from 'mocks-server/server';
|
||||
import { rest } from 'msw';
|
||||
import { SELECTED_VIEWS } from 'pages/LogsExplorer/utils';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { QueryBuilderContext } from 'providers/QueryBuilder';
|
||||
import { render, screen } from 'tests/test-utils';
|
||||
|
||||
const queryRangeURL = 'http://localhost/api/v3/query_range';
|
||||
|
||||
const logsQueryServerRequest = ({
|
||||
response = logsQueryRangeEmptyResponse,
|
||||
}: {
|
||||
response?: any;
|
||||
}): void =>
|
||||
server.use(
|
||||
rest.post(queryRangeURL, (req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(response)),
|
||||
),
|
||||
);
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: (): { pathname: string } => ({
|
||||
pathname: `${ROUTES.LOGS_EXPLORER}`,
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('providers/preferences/sync/usePreferenceSync', () => ({
|
||||
usePreferenceSync: (): any => ({
|
||||
preferences: {
|
||||
columns: [],
|
||||
formatting: {
|
||||
maxLines: 2,
|
||||
format: 'table',
|
||||
fontSize: 'small',
|
||||
version: 1,
|
||||
},
|
||||
},
|
||||
loading: false,
|
||||
error: null,
|
||||
updateColumns: jest.fn(),
|
||||
updateFormatting: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock(
|
||||
'container/TimeSeriesView/TimeSeriesView',
|
||||
() =>
|
||||
// eslint-disable-next-line func-names, @typescript-eslint/explicit-function-return-type, react/display-name
|
||||
function () {
|
||||
return <div>Time Series Chart</div>;
|
||||
},
|
||||
);
|
||||
|
||||
jest.mock(
|
||||
'container/LogsExplorerChart',
|
||||
() =>
|
||||
// eslint-disable-next-line func-names, @typescript-eslint/explicit-function-return-type, react/display-name
|
||||
function () {
|
||||
return <div>Histogram Chart</div>;
|
||||
},
|
||||
);
|
||||
|
||||
jest.mock('hooks/useSafeNavigate', () => ({
|
||||
useSafeNavigate: (): any => ({
|
||||
safeNavigate: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('uplot', () => {
|
||||
const paths = {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
};
|
||||
const uplotMock = jest.fn(() => ({
|
||||
paths,
|
||||
}));
|
||||
return {
|
||||
paths,
|
||||
default: uplotMock,
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('hooks/queryBuilder/useGetExplorerQueryRange', () => ({
|
||||
__esModule: true,
|
||||
useGetExplorerQueryRange: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('LogsExplorerList - empty states', () => {
|
||||
beforeEach(() => {
|
||||
(useGetExplorerQueryRange as jest.Mock).mockReturnValue({
|
||||
data: { payload: logsQueryRangeEmptyResponse },
|
||||
});
|
||||
logsQueryServerRequest({});
|
||||
});
|
||||
|
||||
it('should display custom empty state when navigating from trace to logs with no results', async () => {
|
||||
const mockTraceToLogsContextValue = {
|
||||
...mockQueryBuilderContextValue,
|
||||
panelType: PANEL_TYPES.LIST,
|
||||
stagedQuery: {
|
||||
...mockQueryBuilderContextValue.stagedQuery,
|
||||
builder: {
|
||||
...mockQueryBuilderContextValue.stagedQuery.builder,
|
||||
queryData: [
|
||||
{
|
||||
...mockQueryBuilderContextValue.stagedQuery.builder.queryData[0],
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'trace-filter',
|
||||
key: {
|
||||
key: 'trace_id',
|
||||
type: '',
|
||||
dataType: 'string',
|
||||
isColumn: true,
|
||||
},
|
||||
op: '=',
|
||||
value: 'test-trace-id',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
render(
|
||||
<QueryBuilderContext.Provider value={mockTraceToLogsContextValue as any}>
|
||||
<PreferenceContextProvider>
|
||||
<LogsExplorerViews
|
||||
selectedView={SELECTED_VIEWS.SEARCH}
|
||||
showFrequencyChart
|
||||
setIsLoadingQueries={(): void => {}}
|
||||
listQueryKeyRef={{ current: {} }}
|
||||
chartQueryKeyRef={{ current: {} }}
|
||||
/>
|
||||
</PreferenceContextProvider>
|
||||
</QueryBuilderContext.Provider>,
|
||||
);
|
||||
|
||||
// Check for custom empty state message
|
||||
expect(screen.getByText('No logs found for this trace.')).toBeInTheDocument();
|
||||
expect(screen.getByText('This could be because :')).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByText('Logs are not linked to Traces.'),
|
||||
).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByText('Logs are not being sent to SigNoz.'),
|
||||
).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByText('No logs are associated with this particular trace/span.'),
|
||||
).toBeInTheDocument();
|
||||
|
||||
// Check for documentation links
|
||||
expect(screen.getByText('Sending logs to SigNoz')).toBeInTheDocument();
|
||||
expect(screen.getByText('Correlate traces and logs')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should display empty state when filters are applied and no results are found', async () => {
|
||||
const mockTraceToLogsContextValue = {
|
||||
...mockQueryBuilderContextValue,
|
||||
panelType: PANEL_TYPES.LIST,
|
||||
stagedQuery: {
|
||||
...mockQueryBuilderContextValue.stagedQuery,
|
||||
builder: {
|
||||
...mockQueryBuilderContextValue.stagedQuery.builder,
|
||||
queryData: [
|
||||
{
|
||||
...mockQueryBuilderContextValue.stagedQuery.builder.queryData[0],
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
id: 'service-filter',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
type: '',
|
||||
dataType: 'string',
|
||||
isColumn: true,
|
||||
},
|
||||
op: '=',
|
||||
value: 'test-service-name',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
render(
|
||||
<QueryBuilderContext.Provider value={mockTraceToLogsContextValue as any}>
|
||||
<PreferenceContextProvider>
|
||||
<LogsExplorerViews
|
||||
selectedView={SELECTED_VIEWS.SEARCH}
|
||||
showFrequencyChart
|
||||
setIsLoadingQueries={(): void => {}}
|
||||
listQueryKeyRef={{ current: {} }}
|
||||
chartQueryKeyRef={{ current: {} }}
|
||||
/>
|
||||
</PreferenceContextProvider>
|
||||
</QueryBuilderContext.Provider>,
|
||||
);
|
||||
|
||||
// Check for custom empty state message
|
||||
expect(screen.getByText(/This query had no results./i)).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByText(/Edit your query and try again!/i),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -29,7 +29,11 @@ import NoLogs from '../NoLogs/NoLogs';
|
||||
import InfinityTableView from './InfinityTableView';
|
||||
import { LogsExplorerListProps } from './LogsExplorerList.interfaces';
|
||||
import { InfinityWrapperStyled } from './styles';
|
||||
import { convertKeysToColumnFields } from './utils';
|
||||
import {
|
||||
convertKeysToColumnFields,
|
||||
getEmptyLogsListConfig,
|
||||
isTraceToLogsQuery,
|
||||
} from './utils';
|
||||
|
||||
function Footer(): JSX.Element {
|
||||
return <Spinner height={20} tip="Getting Logs" />;
|
||||
@@ -44,7 +48,6 @@ function LogsExplorerList({
|
||||
isFilterApplied,
|
||||
}: LogsExplorerListProps): JSX.Element {
|
||||
const ref = useRef<VirtuosoHandle>(null);
|
||||
const { initialDataSource } = useQueryBuilder();
|
||||
|
||||
const { activeLogId } = useCopyLogLink();
|
||||
|
||||
@@ -58,11 +61,17 @@ function LogsExplorerList({
|
||||
|
||||
const { options } = useOptionsMenu({
|
||||
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
|
||||
dataSource: initialDataSource || DataSource.METRICS,
|
||||
dataSource: DataSource.LOGS,
|
||||
aggregateOperator:
|
||||
currentStagedQueryData?.aggregateOperator || StringOperators.NOOP,
|
||||
});
|
||||
|
||||
const {
|
||||
currentQuery,
|
||||
lastUsedQuery,
|
||||
redirectWithQueryBuilderData,
|
||||
} = useQueryBuilder();
|
||||
|
||||
const activeLogIndex = useMemo(
|
||||
() => logs.findIndex(({ id }) => id === activeLogId),
|
||||
[logs, activeLogId],
|
||||
@@ -186,6 +195,44 @@ function LogsExplorerList({
|
||||
selectedFields,
|
||||
]);
|
||||
|
||||
const isTraceToLogsNavigation = useMemo(() => {
|
||||
if (!currentStagedQueryData) return false;
|
||||
return isTraceToLogsQuery(currentStagedQueryData);
|
||||
}, [currentStagedQueryData]);
|
||||
|
||||
const handleClearFilters = useCallback((): void => {
|
||||
const queryIndex = lastUsedQuery ?? 0;
|
||||
const updatedQuery = currentQuery?.builder.queryData?.[queryIndex];
|
||||
|
||||
if (!updatedQuery) return;
|
||||
|
||||
if (updatedQuery?.filters?.items) {
|
||||
updatedQuery.filters.items = [];
|
||||
}
|
||||
|
||||
const preparedQuery = {
|
||||
...currentQuery,
|
||||
builder: {
|
||||
...currentQuery.builder,
|
||||
queryData: currentQuery.builder.queryData.map((item, idx: number) => ({
|
||||
...item,
|
||||
filters: {
|
||||
...item.filters,
|
||||
items: idx === queryIndex ? [] : [...(item.filters?.items || [])],
|
||||
},
|
||||
})),
|
||||
},
|
||||
};
|
||||
|
||||
redirectWithQueryBuilderData(preparedQuery);
|
||||
}, [currentQuery, lastUsedQuery, redirectWithQueryBuilderData]);
|
||||
|
||||
const getEmptyStateMessage = useMemo(() => {
|
||||
if (!isTraceToLogsNavigation) return;
|
||||
|
||||
return getEmptyLogsListConfig(handleClearFilters);
|
||||
}, [isTraceToLogsNavigation, handleClearFilters]);
|
||||
|
||||
return (
|
||||
<div className="logs-list-view-container">
|
||||
{(isLoading || (isFetching && logs.length === 0)) && <LogsLoading />}
|
||||
@@ -201,7 +248,11 @@ function LogsExplorerList({
|
||||
logs.length === 0 &&
|
||||
!isError &&
|
||||
isFilterApplied && (
|
||||
<EmptyLogsSearch dataSource={DataSource.LOGS} panelType="LIST" />
|
||||
<EmptyLogsSearch
|
||||
dataSource={DataSource.LOGS}
|
||||
panelType="LIST"
|
||||
customMessage={getEmptyStateMessage}
|
||||
/>
|
||||
)}
|
||||
|
||||
{isError && !isLoading && !isFetching && <LogsError />}
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
import { IField } from 'types/api/logs/fields';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
TagFilterItem,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
export const convertKeysToColumnFields = (
|
||||
keys: BaseAutocompleteData[],
|
||||
@@ -9,3 +13,56 @@ export const convertKeysToColumnFields = (
|
||||
name: item.key,
|
||||
type: item.type as string,
|
||||
}));
|
||||
/**
|
||||
* Determines if a query represents a trace-to-logs navigation
|
||||
* by checking for the presence of a trace_id filter.
|
||||
*/
|
||||
export const isTraceToLogsQuery = (queryData: IBuilderQuery): boolean => {
|
||||
// Check if this is a trace-to-logs query by looking for trace_id filter
|
||||
if (!queryData?.filters?.items) return false;
|
||||
|
||||
const traceIdFilter = queryData.filters.items.find(
|
||||
(item: TagFilterItem) => item.key?.key === 'trace_id',
|
||||
);
|
||||
|
||||
return !!traceIdFilter;
|
||||
};
|
||||
|
||||
export type EmptyLogsListConfig = {
|
||||
title: string;
|
||||
subTitle: string;
|
||||
description: string | string[];
|
||||
documentationLinks?: Array<{
|
||||
text: string;
|
||||
url: string;
|
||||
}>;
|
||||
showClearFiltersButton?: boolean;
|
||||
onClearFilters?: () => void;
|
||||
clearFiltersButtonText?: string;
|
||||
};
|
||||
|
||||
export const getEmptyLogsListConfig = (
|
||||
handleClearFilters: () => void,
|
||||
): EmptyLogsListConfig => ({
|
||||
title: 'No logs found for this trace.',
|
||||
subTitle: 'This could be because :',
|
||||
description: [
|
||||
'Logs are not linked to Traces.',
|
||||
'Logs are not being sent to SigNoz.',
|
||||
'No logs are associated with this particular trace/span.',
|
||||
],
|
||||
documentationLinks: [
|
||||
{
|
||||
text: 'Sending logs to SigNoz',
|
||||
url: 'https://signoz.io/docs/logs-management/send-logs-to-signoz/',
|
||||
},
|
||||
{
|
||||
text: 'Correlate traces and logs',
|
||||
url:
|
||||
'https://signoz.io/docs/traces-management/guides/correlate-traces-and-logs/',
|
||||
},
|
||||
],
|
||||
clearFiltersButtonText: 'Clear filters from Trace to view other logs',
|
||||
showClearFiltersButton: true,
|
||||
onClearFilters: handleClearFilters,
|
||||
});
|
||||
|
||||
@@ -153,13 +153,13 @@ function LogsExplorerViews({
|
||||
|
||||
const isMultipleQueries = useMemo(
|
||||
() =>
|
||||
currentQuery.builder.queryData.length > 1 ||
|
||||
currentQuery.builder.queryFormulas.length > 0,
|
||||
currentQuery?.builder?.queryData?.length > 1 ||
|
||||
currentQuery?.builder?.queryFormulas?.length > 0,
|
||||
[currentQuery],
|
||||
);
|
||||
|
||||
const isGroupByExist = useMemo(() => {
|
||||
const groupByCount: number = currentQuery.builder.queryData.reduce<number>(
|
||||
const groupByCount: number = currentQuery?.builder?.queryData?.reduce<number>(
|
||||
(acc, query) => acc + query.groupBy.length,
|
||||
0,
|
||||
);
|
||||
@@ -551,19 +551,19 @@ function LogsExplorerViews({
|
||||
if (!stagedQuery) return [];
|
||||
|
||||
if (panelType === PANEL_TYPES.LIST) {
|
||||
if (listChartData && listChartData.payload.data.result.length > 0) {
|
||||
if (listChartData && listChartData.payload.data?.result.length > 0) {
|
||||
return listChartData.payload.data.result;
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
if (!data || data.payload.data.result.length === 0) return [];
|
||||
if (!data || data.payload.data?.result.length === 0) return [];
|
||||
|
||||
const isGroupByExist = stagedQuery.builder.queryData.some(
|
||||
(queryData) => queryData.groupBy.length > 0,
|
||||
);
|
||||
|
||||
const firstPayloadQuery = data.payload.data.result.find(
|
||||
const firstPayloadQuery = data.payload.data?.result.find(
|
||||
(item) => item.queryName === listQuery?.queryName,
|
||||
);
|
||||
|
||||
|
||||
@@ -98,6 +98,7 @@ interface QueryBuilderSearchV2Props {
|
||||
hideSpanScopeSelector?: boolean;
|
||||
// Determines whether to call onChange when a tag is closed
|
||||
triggerOnChangeOnClose?: boolean;
|
||||
skipQueryBuilderRedirect?: boolean;
|
||||
}
|
||||
|
||||
export interface Option {
|
||||
@@ -137,6 +138,7 @@ function QueryBuilderSearchV2(
|
||||
operatorConfigKey,
|
||||
hideSpanScopeSelector,
|
||||
triggerOnChangeOnClose,
|
||||
skipQueryBuilderRedirect,
|
||||
} = props;
|
||||
|
||||
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
|
||||
@@ -1038,7 +1040,11 @@ function QueryBuilderSearchV2(
|
||||
})}
|
||||
</Select>
|
||||
{!hideSpanScopeSelector && (
|
||||
<SpanScopeSelector query={query} onChange={onChange} />
|
||||
<SpanScopeSelector
|
||||
query={query}
|
||||
onChange={onChange}
|
||||
skipQueryBuilderRedirect={skipQueryBuilderRedirect}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
@@ -1056,6 +1062,7 @@ QueryBuilderSearchV2.defaultProps = {
|
||||
operatorConfigKey: undefined,
|
||||
hideSpanScopeSelector: true,
|
||||
triggerOnChangeOnClose: false,
|
||||
skipQueryBuilderRedirect: false,
|
||||
};
|
||||
|
||||
export default QueryBuilderSearchV2;
|
||||
|
||||
@@ -23,6 +23,7 @@ interface SpanFilterConfig {
|
||||
interface SpanScopeSelectorProps {
|
||||
onChange?: (value: TagFilter) => void;
|
||||
query?: IBuilderQuery;
|
||||
skipQueryBuilderRedirect?: boolean;
|
||||
}
|
||||
|
||||
const SPAN_FILTER_CONFIG: Record<SpanScope, SpanFilterConfig | null> = {
|
||||
@@ -58,6 +59,7 @@ const SELECT_OPTIONS = [
|
||||
function SpanScopeSelector({
|
||||
onChange,
|
||||
query,
|
||||
skipQueryBuilderRedirect,
|
||||
}: SpanScopeSelectorProps): JSX.Element {
|
||||
const { currentQuery, redirectWithQueryBuilderData } = useQueryBuilder();
|
||||
const [selectedScope, setSelectedScope] = useState<SpanScope>(
|
||||
@@ -79,6 +81,7 @@ function SpanScopeSelector({
|
||||
if (hasFilter('isEntryPoint')) return SpanScope.ENTRYPOINT_SPANS;
|
||||
return SpanScope.ALL_SPANS;
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
let queryData = (currentQuery?.builder?.queryData || [])?.find(
|
||||
(item) => item.queryName === query?.queryName,
|
||||
@@ -127,13 +130,10 @@ function SpanScopeSelector({
|
||||
},
|
||||
}));
|
||||
|
||||
if (onChange && query) {
|
||||
if (skipQueryBuilderRedirect && onChange && query) {
|
||||
onChange({
|
||||
...query.filters,
|
||||
items: getUpdatedFilters(
|
||||
[...query.filters.items, ...newQuery.builder.queryData[0].filters.items],
|
||||
true,
|
||||
),
|
||||
items: getUpdatedFilters([...query.filters.items], true),
|
||||
});
|
||||
|
||||
setSelectedScope(newScope);
|
||||
@@ -156,6 +156,7 @@ function SpanScopeSelector({
|
||||
SpanScopeSelector.defaultProps = {
|
||||
onChange: undefined,
|
||||
query: undefined,
|
||||
skipQueryBuilderRedirect: false,
|
||||
};
|
||||
|
||||
export default SpanScopeSelector;
|
||||
|
||||
@@ -3,9 +3,11 @@ import {
|
||||
render,
|
||||
RenderResult,
|
||||
screen,
|
||||
within,
|
||||
} from '@testing-library/react';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import { QueryBuilderContext } from 'providers/QueryBuilder';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
Query,
|
||||
@@ -13,6 +15,7 @@ import {
|
||||
TagFilterItem,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import QueryBuilderSearchV2 from '../QueryBuilderSearchV2';
|
||||
import SpanScopeSelector from '../SpanScopeSelector';
|
||||
|
||||
const mockRedirectWithQueryBuilderData = jest.fn();
|
||||
@@ -48,6 +51,14 @@ const defaultQuery = {
|
||||
},
|
||||
};
|
||||
|
||||
const queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: {
|
||||
refetchOnWindowFocus: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const defaultQueryBuilderQuery: IBuilderQuery = {
|
||||
...initialQueriesMap.traces.builder.queryData[0],
|
||||
queryName: 'A',
|
||||
@@ -76,6 +87,7 @@ const renderWithContext = (
|
||||
initialQuery = defaultQuery,
|
||||
onChangeProp?: (value: TagFilter) => void,
|
||||
queryProp?: IBuilderQuery,
|
||||
skipQueryBuilderRedirect = false,
|
||||
): RenderResult =>
|
||||
render(
|
||||
<QueryBuilderContext.Provider
|
||||
@@ -87,12 +99,19 @@ const renderWithContext = (
|
||||
} as any
|
||||
}
|
||||
>
|
||||
<SpanScopeSelector onChange={onChangeProp} query={queryProp} />
|
||||
<SpanScopeSelector
|
||||
onChange={onChangeProp}
|
||||
query={queryProp}
|
||||
skipQueryBuilderRedirect={skipQueryBuilderRedirect}
|
||||
/>
|
||||
</QueryBuilderContext.Provider>,
|
||||
);
|
||||
|
||||
const selectOption = async (optionText: string): Promise<void> => {
|
||||
const selector = screen.getByRole('combobox');
|
||||
const selector = within(screen.getByTestId('span-scope-selector')).getByRole(
|
||||
'combobox',
|
||||
);
|
||||
|
||||
fireEvent.mouseDown(selector);
|
||||
|
||||
// Wait for dropdown to appear
|
||||
@@ -264,6 +283,7 @@ describe('SpanScopeSelector', () => {
|
||||
defaultQuery,
|
||||
mockOnChange,
|
||||
localQuery,
|
||||
true,
|
||||
);
|
||||
expect(await screen.findByText('All Spans')).toBeInTheDocument();
|
||||
|
||||
@@ -283,6 +303,7 @@ describe('SpanScopeSelector', () => {
|
||||
defaultQuery,
|
||||
mockOnChange,
|
||||
localQuery,
|
||||
true,
|
||||
);
|
||||
expect(await screen.findByText('Root Spans')).toBeInTheDocument();
|
||||
|
||||
@@ -303,6 +324,7 @@ describe('SpanScopeSelector', () => {
|
||||
defaultQuery,
|
||||
mockOnChange,
|
||||
localQuery,
|
||||
true,
|
||||
);
|
||||
expect(await screen.findByText('Root Spans')).toBeInTheDocument();
|
||||
|
||||
@@ -324,6 +346,7 @@ describe('SpanScopeSelector', () => {
|
||||
defaultQuery,
|
||||
mockOnChange,
|
||||
localQuery,
|
||||
true,
|
||||
);
|
||||
expect(await screen.findByText('Root Spans')).toBeInTheDocument();
|
||||
|
||||
@@ -350,6 +373,7 @@ describe('SpanScopeSelector', () => {
|
||||
defaultQuery,
|
||||
mockOnChange,
|
||||
localQuery,
|
||||
true,
|
||||
);
|
||||
expect(await screen.findByText('Entrypoint Spans')).toBeInTheDocument();
|
||||
|
||||
@@ -361,5 +385,60 @@ describe('SpanScopeSelector', () => {
|
||||
container.querySelector('span[title="All Spans"]'),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should not duplicate non-scope filters when changing span scope', async () => {
|
||||
const query = {
|
||||
...defaultQuery,
|
||||
builder: {
|
||||
...defaultQuery.builder,
|
||||
queryData: [
|
||||
{
|
||||
...defaultQuery.builder.queryData[0],
|
||||
filters: {
|
||||
items: [createNonScopeFilter('service', 'checkout')],
|
||||
op: 'AND',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<QueryBuilderContext.Provider
|
||||
value={
|
||||
{
|
||||
currentQuery: query,
|
||||
redirectWithQueryBuilderData: mockRedirectWithQueryBuilderData,
|
||||
} as any
|
||||
}
|
||||
>
|
||||
<QueryBuilderSearchV2
|
||||
query={query.builder.queryData[0] as any}
|
||||
onChange={mockOnChange}
|
||||
hideSpanScopeSelector={false}
|
||||
/>
|
||||
</QueryBuilderContext.Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
expect(await screen.findByText('All Spans')).toBeInTheDocument();
|
||||
|
||||
await selectOption('Entrypoint Spans');
|
||||
|
||||
expect(mockRedirectWithQueryBuilderData).toHaveBeenCalled();
|
||||
|
||||
const redirectQueryArg = mockRedirectWithQueryBuilderData.mock
|
||||
.calls[0][0] as Query;
|
||||
const { items } = redirectQueryArg.builder.queryData[0].filters;
|
||||
// Count non-scope filters
|
||||
const nonScopeFilters = items.filter(
|
||||
(filter) => filter.key?.type !== 'spanSearchScope',
|
||||
);
|
||||
expect(nonScopeFilters).toHaveLength(1);
|
||||
|
||||
expect(nonScopeFilters).toContainEqual(
|
||||
createNonScopeFilter('service', 'checkout'),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { WarningFilled } from '@ant-design/icons';
|
||||
import { DataTable } from '@signozhq/table';
|
||||
import { Flex, Typography } from 'antd';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import { MAX_RPS_LIMIT } from 'constants/global';
|
||||
import ResourceAttributesFilter from 'container/ResourceAttributesFilter';
|
||||
import { getTableColumns } from 'container/ServiceTable/Columns/ServiceColumn';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
@@ -10,7 +11,6 @@ import { useTranslation } from 'react-i18next';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { getTotalRPS } from 'utils/services';
|
||||
|
||||
import { getColumns } from '../Columns/ServiceColumn';
|
||||
import ServiceTableProps from '../types';
|
||||
|
||||
function ServiceTraceTable({
|
||||
@@ -23,7 +23,7 @@ function ServiceTraceTable({
|
||||
|
||||
const { isFetchingActiveLicense, trialInfo } = useAppContext();
|
||||
const { isCloudUser: isCloudUserVal } = useGetTenantLicense();
|
||||
const tableColumns = useMemo(() => getColumns(search, false), [search]);
|
||||
const tableColumns = useMemo(() => getTableColumns(search), [search]);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
@@ -65,13 +65,19 @@ function ServiceTraceTable({
|
||||
|
||||
<ResourceAttributesFilter />
|
||||
|
||||
<ResizeTable
|
||||
pagination={paginationConfig}
|
||||
<DataTable
|
||||
columns={tableColumns}
|
||||
loading={loading}
|
||||
dataSource={services}
|
||||
rowKey="serviceName"
|
||||
className="service-traces-table"
|
||||
data={services}
|
||||
tableId="service-traces-table"
|
||||
isLoading={loading}
|
||||
enablePagination
|
||||
pageSize={paginationConfig.defaultPageSize}
|
||||
showHeaders
|
||||
enableSorting
|
||||
enableColumnResizing={false}
|
||||
enableColumnReordering={false}
|
||||
enableColumnPinning={false}
|
||||
enableGlobalFilter={false}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
import type { ColumnsType } from 'antd/es/table';
|
||||
import { ServicesList } from 'types/api/metrics/getService';
|
||||
|
||||
import {
|
||||
ColumnKey,
|
||||
ColumnTitle,
|
||||
ColumnWidth,
|
||||
SORTING_ORDER,
|
||||
} from './ColumnContants';
|
||||
import { getColumnSearchProps } from './GetColumnSearchProps';
|
||||
|
||||
export const getColumns = (search: string): ColumnsType<ServicesList> => [
|
||||
{
|
||||
title: ColumnTitle[ColumnKey.Application],
|
||||
dataIndex: ColumnKey.Application,
|
||||
width: ColumnWidth.Application,
|
||||
key: ColumnKey.Application,
|
||||
...getColumnSearchProps('serviceName', search),
|
||||
},
|
||||
{
|
||||
title: ColumnTitle[ColumnKey.P99],
|
||||
dataIndex: ColumnKey.P99,
|
||||
key: ColumnKey.P99,
|
||||
width: ColumnWidth.P99,
|
||||
defaultSortOrder: SORTING_ORDER,
|
||||
sorter: (a: ServicesList, b: ServicesList): number => a.p99 - b.p99,
|
||||
render: (value: number): string => (value / 1000000).toFixed(2),
|
||||
},
|
||||
{
|
||||
title: ColumnTitle[ColumnKey.ErrorRate],
|
||||
dataIndex: ColumnKey.ErrorRate,
|
||||
key: ColumnKey.ErrorRate,
|
||||
width: 150,
|
||||
sorter: (a: ServicesList, b: ServicesList): number =>
|
||||
a.errorRate - b.errorRate,
|
||||
render: (value: number): string => value.toFixed(2),
|
||||
},
|
||||
{
|
||||
title: ColumnTitle[ColumnKey.Operations],
|
||||
dataIndex: ColumnKey.Operations,
|
||||
key: ColumnKey.Operations,
|
||||
width: ColumnWidth.Operations,
|
||||
sorter: (a: ServicesList, b: ServicesList): number => a.callRate - b.callRate,
|
||||
render: (value: number): string => value.toFixed(2),
|
||||
},
|
||||
];
|
||||
@@ -0,0 +1,92 @@
|
||||
import { ColumnDef, createColumnHelper } from '@tanstack/react-table';
|
||||
import type { ColumnsType } from 'antd/es/table';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { routeConfig } from 'container/SideNav/config';
|
||||
import { getQueryString } from 'container/SideNav/helper';
|
||||
import { Link } from 'react-router-dom';
|
||||
import { ServicesList } from 'types/api/metrics/getService';
|
||||
|
||||
import { Name } from '../styles';
|
||||
import {
|
||||
ColumnKey,
|
||||
ColumnTitle,
|
||||
ColumnWidth,
|
||||
SORTING_ORDER,
|
||||
} from './ColumnContants';
|
||||
import { getColumnSearchProps } from './GetColumnSearchProps';
|
||||
|
||||
export const getColumns = (search: string): ColumnsType<ServicesList> => [
|
||||
{
|
||||
title: ColumnTitle[ColumnKey.Application],
|
||||
dataIndex: ColumnKey.Application,
|
||||
width: ColumnWidth.Application,
|
||||
key: ColumnKey.Application,
|
||||
...getColumnSearchProps('serviceName', search),
|
||||
},
|
||||
{
|
||||
title: ColumnTitle[ColumnKey.P99],
|
||||
dataIndex: ColumnKey.P99,
|
||||
key: ColumnKey.P99,
|
||||
width: ColumnWidth.P99,
|
||||
defaultSortOrder: SORTING_ORDER,
|
||||
sorter: (a: ServicesList, b: ServicesList): number => a.p99 - b.p99,
|
||||
render: (value: number): string => (value / 1000000).toFixed(2),
|
||||
},
|
||||
{
|
||||
title: ColumnTitle[ColumnKey.ErrorRate],
|
||||
dataIndex: ColumnKey.ErrorRate,
|
||||
key: ColumnKey.ErrorRate,
|
||||
width: 150,
|
||||
sorter: (a: ServicesList, b: ServicesList): number =>
|
||||
a.errorRate - b.errorRate,
|
||||
render: (value: number): string => value.toFixed(2),
|
||||
},
|
||||
{
|
||||
title: ColumnTitle[ColumnKey.Operations],
|
||||
dataIndex: ColumnKey.Operations,
|
||||
key: ColumnKey.Operations,
|
||||
width: ColumnWidth.Operations,
|
||||
sorter: (a: ServicesList, b: ServicesList): number => a.callRate - b.callRate,
|
||||
render: (value: number): string => value.toFixed(2),
|
||||
},
|
||||
];
|
||||
|
||||
// Utility to convert AntD columns to ColumnDef (minimal for your columns)
|
||||
const columnHelper = createColumnHelper<ServicesList>();
|
||||
export const getTableColumns = (
|
||||
search: string,
|
||||
): ColumnDef<ServicesList, any>[] => [
|
||||
columnHelper.accessor(ColumnKey.Application, {
|
||||
header: ColumnTitle[ColumnKey.Application],
|
||||
cell: (info) => {
|
||||
const metrics = info.getValue();
|
||||
const urlParams = new URLSearchParams(search);
|
||||
const availableParams = routeConfig[ROUTES.SERVICE_METRICS];
|
||||
const queryString = getQueryString(availableParams, urlParams);
|
||||
return (
|
||||
<Link to={`${ROUTES.APPLICATION}/${metrics}?${queryString.join('')}`}>
|
||||
<Name>{metrics}</Name>
|
||||
</Link>
|
||||
);
|
||||
},
|
||||
enableColumnFilter: true, // Enable filtering only for this column
|
||||
}),
|
||||
columnHelper.accessor(ColumnKey.P99, {
|
||||
header: ColumnTitle[ColumnKey.P99],
|
||||
cell: (info) => (info.getValue() / 1000000).toFixed(2),
|
||||
sortingFn: 'basic',
|
||||
enableColumnFilter: false,
|
||||
}),
|
||||
columnHelper.accessor(ColumnKey.ErrorRate, {
|
||||
header: ColumnTitle[ColumnKey.ErrorRate],
|
||||
cell: (info) => info.getValue().toFixed(2),
|
||||
sortingFn: 'basic',
|
||||
enableColumnFilter: false,
|
||||
}),
|
||||
columnHelper.accessor(ColumnKey.Operations, {
|
||||
header: ColumnTitle[ColumnKey.Operations],
|
||||
cell: (info) => info.getValue().toFixed(2),
|
||||
sortingFn: 'basic',
|
||||
enableColumnFilter: false,
|
||||
}),
|
||||
];
|
||||
@@ -112,9 +112,6 @@
|
||||
font-weight: 500;
|
||||
line-height: 14px; /* 140% */
|
||||
letter-spacing: 0.4px;
|
||||
|
||||
cursor: pointer;
|
||||
|
||||
max-width: 60px;
|
||||
|
||||
overflow: hidden;
|
||||
@@ -122,6 +119,10 @@
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.version-clickable {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.version-update-notification-dot-icon {
|
||||
width: 4px;
|
||||
height: 4px;
|
||||
|
||||
@@ -195,11 +195,25 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
};
|
||||
}, [checkScroll]);
|
||||
|
||||
const {
|
||||
isCloudUser,
|
||||
isEnterpriseSelfHostedUser,
|
||||
isCommunityUser,
|
||||
isCommunityEnterpriseUser,
|
||||
} = useGetTenantLicense();
|
||||
|
||||
const [licenseTag, setLicenseTag] = useState('');
|
||||
const isAdmin = user.role === USER_ROLES.ADMIN;
|
||||
const isEditor = user.role === USER_ROLES.EDITOR;
|
||||
|
||||
useEffect(() => {
|
||||
const navShortcuts = (userPreferences?.find(
|
||||
(preference) => preference.name === USER_PREFERENCES.NAV_SHORTCUTS,
|
||||
)?.value as unknown) as string[];
|
||||
|
||||
const shouldShowIntegrations =
|
||||
(isCloudUser || isEnterpriseSelfHostedUser) && (isAdmin || isEditor);
|
||||
|
||||
if (navShortcuts && isArray(navShortcuts) && navShortcuts.length > 0) {
|
||||
// nav shortcuts is array of strings
|
||||
const pinnedItems = navShortcuts
|
||||
@@ -211,11 +225,14 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
// Set pinned items in the order they were stored
|
||||
setPinnedMenuItems(pinnedItems);
|
||||
|
||||
// Set secondary items with proper isPinned state
|
||||
setSecondaryMenuItems(
|
||||
defaultMoreMenuItems.map((item) => ({
|
||||
...item,
|
||||
isPinned: pinnedItems.some((pinned) => pinned.itemKey === item.itemKey),
|
||||
isEnabled:
|
||||
item.key === ROUTES.INTEGRATIONS
|
||||
? shouldShowIntegrations
|
||||
: item.isEnabled,
|
||||
})),
|
||||
);
|
||||
} else {
|
||||
@@ -225,17 +242,26 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
);
|
||||
setPinnedMenuItems(defaultPinnedItems);
|
||||
|
||||
// Set secondary items with proper isPinned state
|
||||
setSecondaryMenuItems(
|
||||
defaultMoreMenuItems.map((item) => ({
|
||||
...item,
|
||||
isPinned: defaultPinnedItems.some(
|
||||
(pinned) => pinned.itemKey === item.itemKey,
|
||||
),
|
||||
isEnabled:
|
||||
item.key === ROUTES.INTEGRATIONS
|
||||
? shouldShowIntegrations
|
||||
: item.isEnabled,
|
||||
})),
|
||||
);
|
||||
}
|
||||
}, [userPreferences]);
|
||||
}, [
|
||||
userPreferences,
|
||||
isCloudUser,
|
||||
isEnterpriseSelfHostedUser,
|
||||
isAdmin,
|
||||
isEditor,
|
||||
]);
|
||||
|
||||
const isOnboardingV3Enabled = featureFlags?.find(
|
||||
(flag) => flag.name === FeatureKeys.ONBOARDING_V3,
|
||||
@@ -249,10 +275,6 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
(flag) => flag.name === FeatureKeys.PREMIUM_SUPPORT,
|
||||
)?.active;
|
||||
|
||||
const [licenseTag, setLicenseTag] = useState('');
|
||||
const isAdmin = user.role === USER_ROLES.ADMIN;
|
||||
const isEditor = user.role === USER_ROLES.EDITOR;
|
||||
|
||||
const userSettingsMenuItem = {
|
||||
key: ROUTES.SETTINGS,
|
||||
label: 'Settings',
|
||||
@@ -375,13 +397,6 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
|
||||
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
|
||||
|
||||
const {
|
||||
isCloudUser,
|
||||
isEnterpriseSelfHostedUser,
|
||||
isCommunityUser,
|
||||
isCommunityEnterpriseUser,
|
||||
} = useGetTenantLicense();
|
||||
|
||||
const isWorkspaceBlocked = trialInfo?.workSpaceBlock || false;
|
||||
|
||||
const openInNewTab = (path: string): void => {
|
||||
@@ -718,35 +733,13 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if ((isCloudUser || isEnterpriseSelfHostedUser) && (isAdmin || isEditor)) {
|
||||
// enable integrations for cloud users
|
||||
setSecondaryMenuItems((prevItems) =>
|
||||
prevItems.map((item) => ({
|
||||
...item,
|
||||
isEnabled: item.key === ROUTES.INTEGRATIONS ? true : item.isEnabled,
|
||||
})),
|
||||
);
|
||||
|
||||
// enable integrations for pinned menu items
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
setPinnedMenuItems((prevItems) =>
|
||||
prevItems.map((item) => ({
|
||||
...item,
|
||||
isEnabled: item.key === ROUTES.INTEGRATIONS ? true : item.isEnabled,
|
||||
})),
|
||||
);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [isCloudUser, isEnterpriseSelfHostedUser]);
|
||||
|
||||
const onClickVersionHandler = useCallback((): void => {
|
||||
if (isCloudUser) {
|
||||
if (isCloudUser || !changelog) {
|
||||
return;
|
||||
}
|
||||
|
||||
setShowChangelogModal(true);
|
||||
}, [isCloudUser]);
|
||||
}, [isCloudUser, changelog]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isLatestVersion && !isCloudUser) {
|
||||
@@ -814,7 +807,10 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
}
|
||||
>
|
||||
<div className="version-container">
|
||||
<span className="version" onClick={onClickVersionHandler}>
|
||||
<span
|
||||
className={cx('version', changelog && 'version-clickable')}
|
||||
onClick={onClickVersionHandler}
|
||||
>
|
||||
{currentVersion}
|
||||
</span>
|
||||
|
||||
|
||||
@@ -142,6 +142,7 @@ function Filters({
|
||||
}}
|
||||
onChange={handleFilterChange}
|
||||
hideSpanScopeSelector={false}
|
||||
skipQueryBuilderRedirect
|
||||
/>
|
||||
{filteredSpanIds.length > 0 && (
|
||||
<div className="pre-next-toggle">
|
||||
|
||||
@@ -17,6 +17,7 @@ import { AppState } from 'store/reducers';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import DOCLINKS from 'utils/docLinks';
|
||||
import { transformBuilderQueryFields } from 'utils/queryTransformers';
|
||||
|
||||
import TraceExplorerControls from '../Controls';
|
||||
import { TracesLoading } from '../TraceLoading/TraceLoading';
|
||||
@@ -39,9 +40,22 @@ function TracesView({ isFilterApplied }: TracesViewProps): JSX.Element {
|
||||
QueryParams.pagination,
|
||||
);
|
||||
|
||||
const transformedQuery = useMemo(
|
||||
() =>
|
||||
transformBuilderQueryFields(stagedQuery || initialQueriesMap.traces, {
|
||||
orderBy: [
|
||||
{
|
||||
columnName: 'timestamp',
|
||||
order: 'desc',
|
||||
},
|
||||
],
|
||||
}),
|
||||
[stagedQuery],
|
||||
);
|
||||
|
||||
const { data, isLoading, isFetching, isError } = useGetQueryRange(
|
||||
{
|
||||
query: stagedQuery || initialQueriesMap.traces,
|
||||
query: transformedQuery,
|
||||
graphType: panelType || PANEL_TYPES.TRACE,
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
globalSelectedInterval: globalSelectedTime,
|
||||
|
||||
306
frontend/src/hooks/__tests__/useUrlQueryData.test.tsx
Normal file
306
frontend/src/hooks/__tests__/useUrlQueryData.test.tsx
Normal file
@@ -0,0 +1,306 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
/* eslint-disable @typescript-eslint/explicit-function-return-type */
|
||||
|
||||
import { act, renderHook } from '@testing-library/react';
|
||||
import { createMemoryHistory } from 'history';
|
||||
import { Router } from 'react-router-dom';
|
||||
|
||||
import useUrlQueryData from '../useUrlQueryData';
|
||||
|
||||
// Mock the useSafeNavigate hook
|
||||
const mockSafeNavigate = jest.fn();
|
||||
jest.mock('hooks/useSafeNavigate', () => ({
|
||||
useSafeNavigate: () => ({
|
||||
safeNavigate: mockSafeNavigate,
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('useUrlQueryData', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
const renderHookWithRouter = (
|
||||
queryKey: string,
|
||||
defaultData?: any,
|
||||
initialEntries: string[] = ['/test'],
|
||||
) => {
|
||||
const history = createMemoryHistory({ initialEntries });
|
||||
|
||||
// Mock window.location.search to match the current route
|
||||
Object.defineProperty(window, 'location', {
|
||||
value: {
|
||||
search: history.location.search,
|
||||
pathname: history.location.pathname,
|
||||
origin: 'http://localhost',
|
||||
},
|
||||
writable: true,
|
||||
});
|
||||
|
||||
return renderHook(() => useUrlQueryData(queryKey, defaultData), {
|
||||
wrapper: ({ children }) => <Router history={history}>{children}</Router>,
|
||||
});
|
||||
};
|
||||
|
||||
describe('query parsing', () => {
|
||||
test('should parse valid JSON query parameter', () => {
|
||||
const testData = { name: 'test', value: 123 };
|
||||
const { result } = renderHookWithRouter('testKey', {}, [
|
||||
`/test?testKey=${encodeURIComponent(JSON.stringify(testData))}`,
|
||||
]);
|
||||
|
||||
expect(result.current.query).toBe(JSON.stringify(testData));
|
||||
expect(result.current.queryData).toEqual(testData);
|
||||
});
|
||||
|
||||
test('should return default data when query parameter is not present', () => {
|
||||
const defaultData = { default: 'value' };
|
||||
const { result } = renderHookWithRouter('testKey', defaultData);
|
||||
|
||||
expect(result.current.query).toBeNull();
|
||||
expect(result.current.queryData).toEqual(defaultData);
|
||||
});
|
||||
|
||||
test('should return default data when query parameter is empty', () => {
|
||||
const defaultData = { default: 'value' };
|
||||
const { result } = renderHookWithRouter('testKey', defaultData, [
|
||||
'/test?testKey=',
|
||||
]);
|
||||
|
||||
expect(result.current.query).toBe('');
|
||||
expect(result.current.queryData).toEqual(defaultData);
|
||||
});
|
||||
|
||||
test('should handle invalid JSON and return default data', () => {
|
||||
const defaultData = { default: 'value' };
|
||||
const consoleSpy = jest.spyOn(console, 'warn').mockImplementation();
|
||||
|
||||
const { result } = renderHookWithRouter('testKey', defaultData, [
|
||||
'/test?testKey=invalid-json',
|
||||
]);
|
||||
|
||||
expect(result.current.query).toBe('invalid-json');
|
||||
expect(result.current.queryData).toEqual(defaultData);
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
'Failed to parse query as JSON:',
|
||||
'invalid-json',
|
||||
expect.any(Error),
|
||||
);
|
||||
|
||||
consoleSpy.mockRestore();
|
||||
});
|
||||
|
||||
test('should handle malformed JSON and return default data', () => {
|
||||
const defaultData = { default: 'value' };
|
||||
const consoleSpy = jest.spyOn(console, 'warn').mockImplementation();
|
||||
|
||||
const { result } = renderHookWithRouter(
|
||||
'testKey',
|
||||
defaultData,
|
||||
['/test?testKey={"name":"test",}'], // Missing closing brace
|
||||
);
|
||||
|
||||
expect(result.current.query).toBe('{"name":"test",}');
|
||||
expect(result.current.queryData).toEqual(defaultData);
|
||||
expect(consoleSpy).toHaveBeenCalled();
|
||||
|
||||
consoleSpy.mockRestore();
|
||||
});
|
||||
|
||||
test('should handle complex nested objects', () => {
|
||||
const complexData = {
|
||||
users: [
|
||||
{ id: 1, name: 'John', settings: { theme: 'dark', notifications: true } },
|
||||
{
|
||||
id: 2,
|
||||
name: 'Jane',
|
||||
settings: { theme: 'light', notifications: false },
|
||||
},
|
||||
],
|
||||
metadata: {
|
||||
total: 2,
|
||||
page: 1,
|
||||
},
|
||||
};
|
||||
|
||||
const { result } = renderHookWithRouter('complexKey', {}, [
|
||||
`/test?complexKey=${encodeURIComponent(JSON.stringify(complexData))}`,
|
||||
]);
|
||||
|
||||
expect(result.current.query).toBe(JSON.stringify(complexData));
|
||||
expect(result.current.queryData).toEqual(complexData);
|
||||
});
|
||||
|
||||
test('should handle primitive values', () => {
|
||||
const stringData = 'simple string';
|
||||
const { result } = renderHookWithRouter('stringKey', '', [
|
||||
`/test?stringKey=${encodeURIComponent(JSON.stringify(stringData))}`,
|
||||
]);
|
||||
|
||||
expect(result.current.query).toBe(JSON.stringify(stringData));
|
||||
expect(result.current.queryData).toBe(stringData);
|
||||
});
|
||||
});
|
||||
|
||||
describe('redirectWithQuery', () => {
|
||||
test('should navigate with new query data', () => {
|
||||
const { result } = renderHookWithRouter('testKey', {});
|
||||
|
||||
const newData = { name: 'new', value: 456 };
|
||||
act(() => {
|
||||
result.current.redirectWithQuery(newData);
|
||||
});
|
||||
|
||||
expect(mockSafeNavigate).toHaveBeenCalledWith(
|
||||
expect.stringContaining('testKey='),
|
||||
);
|
||||
|
||||
const calledUrl = mockSafeNavigate.mock.calls[0][0];
|
||||
const urlParams = new URLSearchParams(calledUrl.split('?')[1]);
|
||||
expect(urlParams.get('testKey')).toBe(JSON.stringify(newData));
|
||||
});
|
||||
|
||||
test('should preserve existing query parameters when adding new one', () => {
|
||||
const { result } = renderHookWithRouter('newKey', {}, [
|
||||
'/test?existingKey=existingValue',
|
||||
]);
|
||||
|
||||
const newData = { name: 'new' };
|
||||
act(() => {
|
||||
result.current.redirectWithQuery(newData);
|
||||
});
|
||||
|
||||
const calledUrl = mockSafeNavigate.mock.calls[0][0];
|
||||
const urlParams = new URLSearchParams(calledUrl.split('?')[1]);
|
||||
|
||||
expect(urlParams.get('existingKey')).toBe('existingValue');
|
||||
expect(urlParams.get('newKey')).toBe(JSON.stringify(newData));
|
||||
});
|
||||
|
||||
test('should update existing query parameter', () => {
|
||||
const initialData = { name: 'old' };
|
||||
const { result } = renderHookWithRouter('testKey', {}, [
|
||||
`/test?testKey=${encodeURIComponent(JSON.stringify(initialData))}`,
|
||||
]);
|
||||
|
||||
const newData = { name: 'new', value: 789 };
|
||||
act(() => {
|
||||
result.current.redirectWithQuery(newData);
|
||||
});
|
||||
|
||||
const calledUrl = mockSafeNavigate.mock.calls[0][0];
|
||||
const urlParams = new URLSearchParams(calledUrl.split('?')[1]);
|
||||
expect(urlParams.get('testKey')).toBe(JSON.stringify(newData));
|
||||
});
|
||||
|
||||
test('should handle complex data in redirectWithQuery', () => {
|
||||
const { result } = renderHookWithRouter('complexKey', {});
|
||||
|
||||
const complexData = {
|
||||
filters: {
|
||||
status: ['active', 'pending'],
|
||||
dateRange: { start: '2023-01-01', end: '2023-12-31' },
|
||||
},
|
||||
sort: { field: 'created_at', direction: 'desc' },
|
||||
};
|
||||
|
||||
act(() => {
|
||||
result.current.redirectWithQuery(complexData);
|
||||
});
|
||||
|
||||
const calledUrl = mockSafeNavigate.mock.calls[0][0];
|
||||
const urlParams = new URLSearchParams(calledUrl.split('?')[1]);
|
||||
expect(urlParams.get('complexKey')).toBe(JSON.stringify(complexData));
|
||||
});
|
||||
|
||||
test('should handle primitive values in redirectWithQuery', () => {
|
||||
const { result } = renderHookWithRouter('primitiveKey', '');
|
||||
|
||||
act(() => {
|
||||
result.current.redirectWithQuery('simple string');
|
||||
});
|
||||
|
||||
const calledUrl = mockSafeNavigate.mock.calls[0][0];
|
||||
const urlParams = new URLSearchParams(calledUrl.split('?')[1]);
|
||||
expect(urlParams.get('primitiveKey')).toBe(JSON.stringify('simple string'));
|
||||
});
|
||||
|
||||
test('should handle null and undefined values', () => {
|
||||
const { result } = renderHookWithRouter('nullKey', {});
|
||||
|
||||
act(() => {
|
||||
result.current.redirectWithQuery(null);
|
||||
});
|
||||
|
||||
const calledUrl = mockSafeNavigate.mock.calls[0][0];
|
||||
const urlParams = new URLSearchParams(calledUrl.split('?')[1]);
|
||||
expect(urlParams.get('nullKey')).toBe('null');
|
||||
|
||||
act(() => {
|
||||
result.current.redirectWithQuery(undefined);
|
||||
});
|
||||
|
||||
const secondCalledUrl = mockSafeNavigate.mock.calls[1][0];
|
||||
const secondUrlParams = new URLSearchParams(secondCalledUrl.split('?')[1]);
|
||||
expect(secondUrlParams.get('nullKey')).toBe('undefined');
|
||||
});
|
||||
});
|
||||
|
||||
describe('hook interface', () => {
|
||||
test('should return correct interface structure', () => {
|
||||
const { result } = renderHookWithRouter('testKey', {});
|
||||
|
||||
expect(result.current).toHaveProperty('query');
|
||||
expect(result.current).toHaveProperty('queryData');
|
||||
expect(result.current).toHaveProperty('redirectWithQuery');
|
||||
expect(typeof result.current.redirectWithQuery).toBe('function');
|
||||
});
|
||||
|
||||
test('should handle different query keys', () => {
|
||||
const { result: result1 } = renderHookWithRouter('key1', {});
|
||||
const { result: result2 } = renderHookWithRouter('key2', {});
|
||||
|
||||
expect(result1.current.query).toBeNull();
|
||||
expect(result2.current.query).toBeNull();
|
||||
|
||||
const testData = { test: 'data' };
|
||||
act(() => {
|
||||
result1.current.redirectWithQuery(testData);
|
||||
});
|
||||
|
||||
const calledUrl = mockSafeNavigate.mock.calls[0][0];
|
||||
const urlParams = new URLSearchParams(calledUrl.split('?')[1]);
|
||||
expect(urlParams.get('key1')).toBe(JSON.stringify(testData));
|
||||
expect(urlParams.get('key2')).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('URL encoding/decoding', () => {
|
||||
test('should handle URL encoded query parameters', () => {
|
||||
const testData = { name: 'test with spaces', value: 'special&chars' };
|
||||
const encodedData = encodeURIComponent(JSON.stringify(testData));
|
||||
|
||||
const { result } = renderHookWithRouter('testKey', {}, [
|
||||
`/test?testKey=${encodedData}`,
|
||||
]);
|
||||
|
||||
expect(result.current.queryData).toEqual(testData);
|
||||
});
|
||||
|
||||
test('should properly encode data in redirectWithQuery', () => {
|
||||
const { result } = renderHookWithRouter('testKey', {});
|
||||
|
||||
const testData = { name: 'test with spaces', value: 'special&chars' };
|
||||
act(() => {
|
||||
result.current.redirectWithQuery(testData);
|
||||
});
|
||||
|
||||
const calledUrl = mockSafeNavigate.mock.calls[0][0];
|
||||
const urlParams = new URLSearchParams(calledUrl.split('?')[1]);
|
||||
const decodedValue = JSON.parse(
|
||||
decodeURIComponent(urlParams.get('testKey') || ''),
|
||||
);
|
||||
expect(decodedValue).toEqual(testData);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -14,10 +14,17 @@ const useUrlQueryData = <T>(
|
||||
|
||||
const query = useMemo(() => urlQuery.get(queryKey), [urlQuery, queryKey]);
|
||||
|
||||
const queryData: T = useMemo(() => (query ? JSON.parse(query) : defaultData), [
|
||||
query,
|
||||
defaultData,
|
||||
]);
|
||||
const queryData: T = useMemo(() => {
|
||||
if (query) {
|
||||
try {
|
||||
return JSON.parse(query);
|
||||
} catch (e) {
|
||||
console.warn('Failed to parse query as JSON:', query, e);
|
||||
return defaultData;
|
||||
}
|
||||
}
|
||||
return defaultData;
|
||||
}, [query, defaultData]);
|
||||
|
||||
const redirectWithQuery = useCallback(
|
||||
(newQueryData: T): void => {
|
||||
|
||||
@@ -11,22 +11,23 @@ export const mapQueryDataToApi = <Data extends MapData, Key extends keyof Data>(
|
||||
): MapQueryDataToApiResult<Record<string, Data>> => {
|
||||
const newLegendMap: Record<string, string> = {};
|
||||
|
||||
const preparedResult = data.reduce<Record<string, Data>>((acc, query) => {
|
||||
const newResult: Record<string, Data> = {
|
||||
...acc,
|
||||
[query[nameField] as string]: {
|
||||
...query,
|
||||
...tableParams?.pagination,
|
||||
...(tableParams?.selectColumns
|
||||
? { selectColumns: tableParams?.selectColumns }
|
||||
: null),
|
||||
},
|
||||
};
|
||||
const preparedResult =
|
||||
data?.reduce<Record<string, Data>>((acc, query) => {
|
||||
const newResult: Record<string, Data> = {
|
||||
...acc,
|
||||
[query[nameField] as string]: {
|
||||
...query,
|
||||
...tableParams?.pagination,
|
||||
...(tableParams?.selectColumns
|
||||
? { selectColumns: tableParams?.selectColumns }
|
||||
: null),
|
||||
},
|
||||
};
|
||||
|
||||
newLegendMap[query[nameField] as string] = query.legend;
|
||||
newLegendMap[query[nameField] as string] = query.legend;
|
||||
|
||||
return newResult;
|
||||
}, {} as Record<string, Data>);
|
||||
return newResult;
|
||||
}, {} as Record<string, Data>) || {};
|
||||
|
||||
return {
|
||||
data: preparedResult,
|
||||
|
||||
@@ -46,6 +46,14 @@ export const logsQueryRangeSuccessResponse = {
|
||||
],
|
||||
},
|
||||
};
|
||||
export const logsQueryRangeEmptyResponse = {
|
||||
resultType: '',
|
||||
result: [
|
||||
{
|
||||
queryName: 'A',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
export const logsPaginationQueryRangeSuccessResponse = ({
|
||||
offset = 0,
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
.service-route-tab {
|
||||
margin-bottom: 64px;
|
||||
}
|
||||
@@ -1,3 +1,5 @@
|
||||
import './MetricsApplication.styles.scss';
|
||||
|
||||
import RouteTab from 'components/RouteTab';
|
||||
import ROUTES from 'constants/routes';
|
||||
import DBCall from 'container/MetricsApplication/Tabs/DBCall';
|
||||
@@ -62,7 +64,12 @@ function MetricsApplication(): JSX.Element {
|
||||
<div className="metrics-application-container">
|
||||
<ResourceAttributesFilter />
|
||||
<ApDexApplication />
|
||||
<RouteTab routes={routes} history={history} activeKey={activeKey} />
|
||||
<RouteTab
|
||||
routes={routes}
|
||||
history={history}
|
||||
activeKey={activeKey}
|
||||
className="service-route-tab"
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -594,6 +594,53 @@ describe('TracesExplorer - ', () => {
|
||||
'http://localhost/trace/5765b60ba7cc4ddafe8bdaa9c1b4b246',
|
||||
);
|
||||
});
|
||||
it('trace explorer - trace view should only send order by timestamp in the query', async () => {
|
||||
let capturedPayload: QueryRangePayload;
|
||||
const orderBy = [
|
||||
{ columnName: 'id', order: 'desc' },
|
||||
{ columnName: 'serviceName', order: 'desc' },
|
||||
];
|
||||
const defaultOrderBy = [{ columnName: 'timestamp', order: 'desc' }];
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v4/query_range`, async (req, res, ctx) => {
|
||||
const payload = await req.json();
|
||||
capturedPayload = payload;
|
||||
return res(ctx.status(200), ctx.json(queryRangeForTraceView));
|
||||
}),
|
||||
);
|
||||
render(
|
||||
<QueryBuilderContext.Provider
|
||||
value={{
|
||||
...qbProviderValue,
|
||||
panelType: PANEL_TYPES.TRACE,
|
||||
stagedQuery: {
|
||||
...qbProviderValue.stagedQuery,
|
||||
builder: {
|
||||
...qbProviderValue.stagedQuery.builder,
|
||||
queryData: [
|
||||
{
|
||||
...qbProviderValue.stagedQuery.builder.queryData[0],
|
||||
orderBy,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}}
|
||||
>
|
||||
<TracesExplorer />
|
||||
</QueryBuilderContext.Provider>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(capturedPayload).toBeDefined();
|
||||
expect(capturedPayload?.compositeQuery?.builderQueries?.A.orderBy).toEqual(
|
||||
defaultOrderBy,
|
||||
);
|
||||
expect(
|
||||
capturedPayload?.compositeQuery?.builderQueries?.A.orderBy,
|
||||
).not.toEqual(orderBy);
|
||||
});
|
||||
});
|
||||
|
||||
it('test for explorer options', async () => {
|
||||
const { getByText, getByTestId } = render(
|
||||
|
||||
28
frontend/src/utils/queryTransformers.ts
Normal file
28
frontend/src/utils/queryTransformers.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { IBuilderQuery, Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
/**
|
||||
* Transforms a query by modifying specific fields in the builder queries
|
||||
* @param query - The original query object
|
||||
* @param fieldOverrides - Partial object containing fields to override in each builder query
|
||||
* @returns A new query object with the modified fields
|
||||
*/
|
||||
export const transformBuilderQueryFields = (
|
||||
query: Query,
|
||||
fieldOverrides: Partial<IBuilderQuery>,
|
||||
): Query => {
|
||||
// Create a deep copy of the query
|
||||
const transformedQuery: Query = cloneDeep(query);
|
||||
|
||||
// Update the specified fields for each query in the builder
|
||||
if (transformedQuery.builder?.queryData) {
|
||||
transformedQuery.builder.queryData = transformedQuery.builder.queryData.map(
|
||||
(queryItem) => ({
|
||||
...queryItem,
|
||||
...fieldOverrides,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
return transformedQuery;
|
||||
};
|
||||
9
frontend/yalc.lock
Normal file
9
frontend/yalc.lock
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"version": "v1",
|
||||
"packages": {
|
||||
"@signozhq/table": {
|
||||
"signature": "a82ce696abf1d0eaafed9c9884edbe12",
|
||||
"file": true
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3228,6 +3228,11 @@
|
||||
dependencies:
|
||||
"@babel/runtime" "^7.13.10"
|
||||
|
||||
"@radix-ui/react-compose-refs@1.1.2":
|
||||
version "1.1.2"
|
||||
resolved "https://registry.yarnpkg.com/@radix-ui/react-compose-refs/-/react-compose-refs-1.1.2.tgz#a2c4c47af6337048ee78ff6dc0d090b390d2bb30"
|
||||
integrity sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg==
|
||||
|
||||
"@radix-ui/react-context@1.0.1":
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@radix-ui/react-context/-/react-context-1.0.1.tgz#fe46e67c96b240de59187dcb7a1a50ce3e2ec00c"
|
||||
@@ -3254,6 +3259,11 @@
|
||||
"@radix-ui/react-use-callback-ref" "1.0.1"
|
||||
"@radix-ui/react-use-escape-keydown" "1.0.3"
|
||||
|
||||
"@radix-ui/react-icons@^1.3.0":
|
||||
version "1.3.2"
|
||||
resolved "https://registry.yarnpkg.com/@radix-ui/react-icons/-/react-icons-1.3.2.tgz#09be63d178262181aeca5fb7f7bc944b10a7f441"
|
||||
integrity sha512-fyQIhGDhzfc9pK2kH6Pl9c4BDJGfMkPqkyIgYDthyNYoNg3wVhoJMMh19WS4Up/1KMPFVpNsT2q3WmXn2N1m6g==
|
||||
|
||||
"@radix-ui/react-id@1.0.1":
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@radix-ui/react-id/-/react-id-1.0.1.tgz#73cdc181f650e4df24f0b6a5b7aa426b912c88c0"
|
||||
@@ -3328,6 +3338,13 @@
|
||||
"@babel/runtime" "^7.13.10"
|
||||
"@radix-ui/react-compose-refs" "1.0.1"
|
||||
|
||||
"@radix-ui/react-slot@^1.1.0":
|
||||
version "1.2.3"
|
||||
resolved "https://registry.yarnpkg.com/@radix-ui/react-slot/-/react-slot-1.2.3.tgz#502d6e354fc847d4169c3bc5f189de777f68cfe1"
|
||||
integrity sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==
|
||||
dependencies:
|
||||
"@radix-ui/react-compose-refs" "1.1.2"
|
||||
|
||||
"@radix-ui/react-tabs@1.0.4":
|
||||
version "1.0.4"
|
||||
resolved "https://registry.yarnpkg.com/@radix-ui/react-tabs/-/react-tabs-1.0.4.tgz#993608eec55a5d1deddd446fa9978d2bc1053da2"
|
||||
@@ -3673,6 +3690,21 @@
|
||||
resolved "https://registry.yarnpkg.com/@signozhq/design-tokens/-/design-tokens-1.1.4.tgz#5d5de5bd9d19b6a3631383db015cc4b70c3f7661"
|
||||
integrity sha512-ICZz5szxTq8NcKAsk6LP+nSybPyEcyy8eu2zfxlPQCnJ1YjJP1PglaKLlF0N6+D60gAd3yC5he06BqR8/HxjNg==
|
||||
|
||||
"@signozhq/table@file:.yalc/@signozhq/table":
|
||||
version "0.0.0"
|
||||
dependencies:
|
||||
"@radix-ui/react-icons" "^1.3.0"
|
||||
"@radix-ui/react-slot" "^1.1.0"
|
||||
"@tanstack/react-table" "^8.21.3"
|
||||
"@tanstack/react-virtual" "^3.13.9"
|
||||
"@types/lodash-es" "^4.17.12"
|
||||
class-variance-authority "^0.7.0"
|
||||
clsx "^2.1.1"
|
||||
lodash-es "^4.17.21"
|
||||
lucide-react "^0.445.0"
|
||||
tailwind-merge "^2.5.2"
|
||||
tailwindcss-animate "^1.0.7"
|
||||
|
||||
"@sinclair/typebox@^0.25.16":
|
||||
version "0.25.24"
|
||||
resolved "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.25.24.tgz"
|
||||
@@ -3709,6 +3741,13 @@
|
||||
dependencies:
|
||||
"@tanstack/table-core" "8.20.5"
|
||||
|
||||
"@tanstack/react-table@^8.21.3":
|
||||
version "8.21.3"
|
||||
resolved "https://registry.yarnpkg.com/@tanstack/react-table/-/react-table-8.21.3.tgz#2c38c747a5731c1a07174fda764b9c2b1fb5e91b"
|
||||
integrity sha512-5nNMTSETP4ykGegmVkhjcS8tTLW6Vl4axfEGQN3v0zdHYbK4UfoqfPChclTrJ4EoK9QynqAu9oUf8VEmrpZ5Ww==
|
||||
dependencies:
|
||||
"@tanstack/table-core" "8.21.3"
|
||||
|
||||
"@tanstack/react-virtual@3.11.2":
|
||||
version "3.11.2"
|
||||
resolved "https://registry.yarnpkg.com/@tanstack/react-virtual/-/react-virtual-3.11.2.tgz#d6b9bd999c181f0a2edce270c87a2febead04322"
|
||||
@@ -3716,16 +3755,33 @@
|
||||
dependencies:
|
||||
"@tanstack/virtual-core" "3.11.2"
|
||||
|
||||
"@tanstack/react-virtual@^3.13.9":
|
||||
version "3.13.12"
|
||||
resolved "https://registry.yarnpkg.com/@tanstack/react-virtual/-/react-virtual-3.13.12.tgz#d372dc2783739cc04ec1a728ca8203937687a819"
|
||||
integrity sha512-Gd13QdxPSukP8ZrkbgS2RwoZseTTbQPLnQEn7HY/rqtM+8Zt95f7xKC7N0EsKs7aoz0WzZ+fditZux+F8EzYxA==
|
||||
dependencies:
|
||||
"@tanstack/virtual-core" "3.13.12"
|
||||
|
||||
"@tanstack/table-core@8.20.5":
|
||||
version "8.20.5"
|
||||
resolved "https://registry.yarnpkg.com/@tanstack/table-core/-/table-core-8.20.5.tgz#3974f0b090bed11243d4107283824167a395cf1d"
|
||||
integrity sha512-P9dF7XbibHph2PFRz8gfBKEXEY/HJPOhym8CHmjF8y3q5mWpKx9xtZapXQUWCgkqvsK0R46Azuz+VaxD4Xl+Tg==
|
||||
|
||||
"@tanstack/table-core@8.21.3":
|
||||
version "8.21.3"
|
||||
resolved "https://registry.yarnpkg.com/@tanstack/table-core/-/table-core-8.21.3.tgz#2977727d8fc8dfa079112d9f4d4c019110f1732c"
|
||||
integrity sha512-ldZXEhOBb8Is7xLs01fR3YEc3DERiz5silj8tnGkFZytt1abEvl/GhUmCE0PMLaMPTa3Jk4HbKmRlHmu+gCftg==
|
||||
|
||||
"@tanstack/virtual-core@3.11.2":
|
||||
version "3.11.2"
|
||||
resolved "https://registry.yarnpkg.com/@tanstack/virtual-core/-/virtual-core-3.11.2.tgz#00409e743ac4eea9afe5b7708594d5fcebb00212"
|
||||
integrity sha512-vTtpNt7mKCiZ1pwU9hfKPhpdVO2sVzFQsxoVBGtOSHxlrRRzYr8iQ2TlwbAcRYCcEiZ9ECAM8kBzH0v2+VzfKw==
|
||||
|
||||
"@tanstack/virtual-core@3.13.12":
|
||||
version "3.13.12"
|
||||
resolved "https://registry.yarnpkg.com/@tanstack/virtual-core/-/virtual-core-3.13.12.tgz#1dff176df9cc8f93c78c5e46bcea11079b397578"
|
||||
integrity sha512-1YBOJfRHV4sXUmWsFSf5rQor4Ss82G8dQWLRbnk3GA4jeP8hQt1hxXh0tmflpC0dz3VgEv/1+qwPyLeWkQuPFA==
|
||||
|
||||
"@testing-library/dom@^8.5.0":
|
||||
version "8.20.0"
|
||||
resolved "https://registry.npmjs.org/@testing-library/dom/-/dom-8.20.0.tgz"
|
||||
@@ -4188,6 +4244,13 @@
|
||||
resolved "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz"
|
||||
integrity sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==
|
||||
|
||||
"@types/lodash-es@^4.17.12":
|
||||
version "4.17.12"
|
||||
resolved "https://registry.yarnpkg.com/@types/lodash-es/-/lodash-es-4.17.12.tgz#65f6d1e5f80539aa7cfbfc962de5def0cf4f341b"
|
||||
integrity sha512-0NgftHUcV4v34VhXm8QBSftKVXtbkBG3ViCjs6+eJ5a6y6Mi/jiFGPc1sC7QK+9BFhWrURE3EOggmWaSxL9OzQ==
|
||||
dependencies:
|
||||
"@types/lodash" "*"
|
||||
|
||||
"@types/lodash-es@^4.17.4":
|
||||
version "4.17.7"
|
||||
resolved "https://registry.npmjs.org/@types/lodash-es/-/lodash-es-4.17.7.tgz"
|
||||
@@ -6508,6 +6571,13 @@ cjs-module-lexer@^1.0.0:
|
||||
resolved "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz"
|
||||
integrity sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA==
|
||||
|
||||
class-variance-authority@^0.7.0:
|
||||
version "0.7.1"
|
||||
resolved "https://registry.yarnpkg.com/class-variance-authority/-/class-variance-authority-0.7.1.tgz#4008a798a0e4553a781a57ac5177c9fb5d043787"
|
||||
integrity sha512-Ka+9Trutv7G8M6WT6SeiRWz792K5qEqIGEGzXKhAE6xOWAY6pPH8U+9IY3oCMv6kqTmLsv7Xh/2w2RigkePMsg==
|
||||
dependencies:
|
||||
clsx "^2.1.1"
|
||||
|
||||
classnames@2.3.2, classnames@2.x, classnames@^2.2.1, classnames@^2.2.3, classnames@^2.2.5, classnames@^2.2.6, classnames@^2.3.1, classnames@^2.3.2:
|
||||
version "2.3.2"
|
||||
resolved "https://registry.npmjs.org/classnames/-/classnames-2.3.2.tgz"
|
||||
@@ -6602,6 +6672,11 @@ clsx@^1.1.1:
|
||||
resolved "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz"
|
||||
integrity sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==
|
||||
|
||||
clsx@^2.1.1:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/clsx/-/clsx-2.1.1.tgz#eed397c9fd8bd882bfb18deab7102049a2f32999"
|
||||
integrity sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==
|
||||
|
||||
co@^4.6.0:
|
||||
version "4.6.0"
|
||||
resolved "https://registry.npmjs.org/co/-/co-4.6.0.tgz"
|
||||
@@ -11897,6 +11972,11 @@ lucide-react@0.498.0:
|
||||
resolved "https://registry.yarnpkg.com/lucide-react/-/lucide-react-0.498.0.tgz#3109eac93dfd0c1561db7a5cddfe4b9b20c14315"
|
||||
integrity sha512-k8IKbvMNV5Dj7CHRrKyIc1wAtmGdEF0r6SCaiGAt5cZ8KnjcEao8mfdydKkWspy65l40MdlcfdK0kT3QrxpnIg==
|
||||
|
||||
lucide-react@^0.445.0:
|
||||
version "0.445.0"
|
||||
resolved "https://registry.yarnpkg.com/lucide-react/-/lucide-react-0.445.0.tgz#35c42341e98fbf0475b2a6cf74fd25ef7cbfcd62"
|
||||
integrity sha512-YrLf3aAHvmd4dZ8ot+mMdNFrFpJD7YRwQ2pUcBhgqbmxtrMP4xDzIorcj+8y+6kpuXBF4JB0NOCTUWIYetJjgA==
|
||||
|
||||
lz-string@^1.4.4:
|
||||
version "1.5.0"
|
||||
resolved "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz"
|
||||
@@ -16653,6 +16733,16 @@ table@^6.0.9:
|
||||
string-width "^4.2.3"
|
||||
strip-ansi "^6.0.1"
|
||||
|
||||
tailwind-merge@^2.5.2:
|
||||
version "2.6.0"
|
||||
resolved "https://registry.yarnpkg.com/tailwind-merge/-/tailwind-merge-2.6.0.tgz#ac5fb7e227910c038d458f396b7400d93a3142d5"
|
||||
integrity sha512-P+Vu1qXfzediirmHOC3xKGAYeZtPcV9g76X+xg2FD4tYgR71ewMA35Y3sCz3zhiN/dwefRpJX0yBcgwi1fXNQA==
|
||||
|
||||
tailwindcss-animate@^1.0.7:
|
||||
version "1.0.7"
|
||||
resolved "https://registry.yarnpkg.com/tailwindcss-animate/-/tailwindcss-animate-1.0.7.tgz#318b692c4c42676cc9e67b19b78775742388bef4"
|
||||
integrity sha512-bl6mpH3T7I3UFxuvDEXLxy/VuFxBk5bbzplh7tXI68mwMokNYd1t9qPBHlnyTwfa4JGC4zP516I1hYYtQ/vspA==
|
||||
|
||||
tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0:
|
||||
version "2.2.1"
|
||||
resolved "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz"
|
||||
|
||||
2
go.mod
2
go.mod
@@ -19,7 +19,7 @@ require (
|
||||
github.com/go-openapi/strfmt v0.23.0
|
||||
github.com/go-redis/redis/v8 v8.11.5
|
||||
github.com/go-redis/redismock/v8 v8.11.5
|
||||
github.com/go-viper/mapstructure/v2 v2.2.1
|
||||
github.com/go-viper/mapstructure/v2 v2.3.0
|
||||
github.com/gojek/heimdall/v7 v7.0.3
|
||||
github.com/golang-jwt/jwt/v5 v5.2.2
|
||||
github.com/google/uuid v1.6.0
|
||||
|
||||
4
go.sum
4
go.sum
@@ -321,8 +321,8 @@ github.com/go-resty/resty/v2 v2.16.5/go.mod h1:hkJtXbA2iKHzJheXYvQ8snQES5ZLGKMwQ
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
|
||||
github.com/go-test/deep v1.0.2-0.20181118220953-042da051cf31/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
|
||||
github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss=
|
||||
github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
|
||||
github.com/go-viper/mapstructure/v2 v2.3.0 h1:27XbWsHIqhbdR5TIC911OfYvgSaW93HM+dX7970Q7jk=
|
||||
github.com/go-viper/mapstructure/v2 v2.3.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
|
||||
github.com/go-zookeeper/zk v1.0.4 h1:DPzxraQx7OrPyXq2phlGlNSIyWEsAox0RJmjTseMV6I=
|
||||
github.com/go-zookeeper/zk v1.0.4/go.mod h1:nOB03cncLtlp4t+UAkGSV+9beXP/akpekBwL+UX1Qcw=
|
||||
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
|
||||
|
||||
73
pkg/cache/memorycache/provider.go
vendored
73
pkg/cache/memorycache/provider.go
vendored
@@ -11,11 +11,11 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/types/cachetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
go_cache "github.com/patrickmn/go-cache"
|
||||
gocache "github.com/patrickmn/go-cache"
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
cc *go_cache.Cache
|
||||
cc *gocache.Cache
|
||||
config cache.Config
|
||||
settings factory.ScopedProviderSettings
|
||||
}
|
||||
@@ -26,50 +26,75 @@ func NewFactory() factory.ProviderFactory[cache.Cache, cache.Config] {
|
||||
|
||||
func New(ctx context.Context, settings factory.ProviderSettings, config cache.Config) (cache.Cache, error) {
|
||||
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/cache/memorycache")
|
||||
return &provider{cc: go_cache.New(config.Memory.TTL, config.Memory.CleanupInterval), settings: scopedProviderSettings, config: config}, nil
|
||||
|
||||
return &provider{
|
||||
cc: gocache.New(config.Memory.TTL, config.Memory.CleanupInterval),
|
||||
settings: scopedProviderSettings,
|
||||
config: config,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Set(ctx context.Context, orgID valuer.UUID, cacheKey string, data cachetypes.Cacheable, ttl time.Duration) error {
|
||||
// check if the data being passed is a pointer and is not nil
|
||||
err := cachetypes.ValidatePointer(data, "inmemory")
|
||||
err := cachetypes.CheckCacheablePointer(data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if ttl == 0 {
|
||||
provider.settings.Logger().WarnContext(ctx, "zero value for TTL found. defaulting to the base TTL", "cache_key", cacheKey, "default_ttl", provider.config.Memory.TTL)
|
||||
if cloneable, ok := data.(cachetypes.Cloneable); ok {
|
||||
toCache := cloneable.Clone()
|
||||
provider.cc.Set(strings.Join([]string{orgID.StringValue(), cacheKey}, "::"), toCache, ttl)
|
||||
return nil
|
||||
}
|
||||
provider.cc.Set(strings.Join([]string{orgID.StringValue(), cacheKey}, "::"), data, ttl)
|
||||
|
||||
toCache, err := data.MarshalBinary()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
provider.cc.Set(strings.Join([]string{orgID.StringValue(), cacheKey}, "::"), toCache, ttl)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) Get(_ context.Context, orgID valuer.UUID, cacheKey string, dest cachetypes.Cacheable, allowExpired bool) error {
|
||||
// check if the destination being passed is a pointer and is not nil
|
||||
err := cachetypes.ValidatePointer(dest, "inmemory")
|
||||
err := cachetypes.CheckCacheablePointer(dest)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// check if the destination value is settable
|
||||
dstv := reflect.ValueOf(dest)
|
||||
if !dstv.Elem().CanSet() {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "destination value is not settable, %s", dstv.Elem())
|
||||
}
|
||||
|
||||
data, found := provider.cc.Get(strings.Join([]string{orgID.StringValue(), cacheKey}, "::"))
|
||||
cachedData, found := provider.cc.Get(strings.Join([]string{orgID.StringValue(), cacheKey}, "::"))
|
||||
if !found {
|
||||
return errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "key miss")
|
||||
}
|
||||
|
||||
// check the type compatbility between the src and dest
|
||||
srcv := reflect.ValueOf(data)
|
||||
if !srcv.Type().AssignableTo(dstv.Type()) {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "src type is not assignable to dst type")
|
||||
if cloneable, ok := cachedData.(cachetypes.Cloneable); ok {
|
||||
// check if the destination value is settable
|
||||
dstv := reflect.ValueOf(dest)
|
||||
if !dstv.Elem().CanSet() {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "unsettable: (value: \"%s\")", dstv.Elem())
|
||||
}
|
||||
|
||||
fromCache := cloneable.Clone()
|
||||
|
||||
// check the type compatbility between the src and dest
|
||||
srcv := reflect.ValueOf(fromCache)
|
||||
if !srcv.Type().AssignableTo(dstv.Type()) {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "unassignable: (src: \"%s\", dst: \"%s\")", srcv.Type().String(), dstv.Type().String())
|
||||
}
|
||||
|
||||
// set the value to from src to dest
|
||||
dstv.Elem().Set(srcv.Elem())
|
||||
return nil
|
||||
}
|
||||
|
||||
// set the value to from src to dest
|
||||
dstv.Elem().Set(srcv.Elem())
|
||||
return nil
|
||||
if fromCache, ok := cachedData.([]byte); ok {
|
||||
if err = dest.UnmarshalBinary(fromCache); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
return errors.NewInternalf(errors.CodeInternal, "unrecognized: (value: \"%s\")", reflect.TypeOf(cachedData).String())
|
||||
}
|
||||
|
||||
func (provider *provider) Delete(_ context.Context, orgID valuer.UUID, cacheKey string) {
|
||||
|
||||
348
pkg/cache/memorycache/provider_test.go
vendored
348
pkg/cache/memorycache/provider_test.go
vendored
@@ -3,247 +3,217 @@ package memorycache
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
"sync"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/factory/factorytest"
|
||||
"github.com/SigNoz/signoz/pkg/types/cachetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
// TestNew tests the New function
|
||||
func TestNew(t *testing.T) {
|
||||
opts := cache.Memory{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
}
|
||||
c, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
require.NoError(t, err)
|
||||
assert.NotNil(t, c)
|
||||
assert.NotNil(t, c.(*provider).cc)
|
||||
}
|
||||
|
||||
type CacheableEntity struct {
|
||||
type CloneableA struct {
|
||||
Key string
|
||||
Value int
|
||||
Expiry time.Duration
|
||||
}
|
||||
|
||||
func (ce CacheableEntity) MarshalBinary() ([]byte, error) {
|
||||
return json.Marshal(ce)
|
||||
func (cloneable *CloneableA) Clone() cachetypes.Cacheable {
|
||||
return &CloneableA{
|
||||
Key: cloneable.Key,
|
||||
Value: cloneable.Value,
|
||||
Expiry: cloneable.Expiry,
|
||||
}
|
||||
}
|
||||
|
||||
func (ce CacheableEntity) UnmarshalBinary(data []byte) error {
|
||||
return nil
|
||||
func (cloneable *CloneableA) MarshalBinary() ([]byte, error) {
|
||||
return json.Marshal(cloneable)
|
||||
}
|
||||
|
||||
type DCacheableEntity struct {
|
||||
func (cloneable *CloneableA) UnmarshalBinary(data []byte) error {
|
||||
return json.Unmarshal(data, cloneable)
|
||||
}
|
||||
|
||||
type CacheableB struct {
|
||||
Key string
|
||||
Value int
|
||||
Expiry time.Duration
|
||||
}
|
||||
|
||||
func (dce DCacheableEntity) MarshalBinary() ([]byte, error) {
|
||||
return json.Marshal(dce)
|
||||
func (cacheable *CacheableB) MarshalBinary() ([]byte, error) {
|
||||
return json.Marshal(cacheable)
|
||||
}
|
||||
|
||||
func (dce DCacheableEntity) UnmarshalBinary(data []byte) error {
|
||||
return nil
|
||||
func (cacheable *CacheableB) UnmarshalBinary(data []byte) error {
|
||||
return json.Unmarshal(data, cacheable)
|
||||
}
|
||||
|
||||
// TestStore tests the Store function
|
||||
// this should fail because of nil pointer error
|
||||
func TestStoreWithNilPointer(t *testing.T) {
|
||||
opts := cache.Memory{
|
||||
func TestCloneableSetWithNilPointer(t *testing.T) {
|
||||
cache, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cache.Memory{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
}
|
||||
c, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
}})
|
||||
require.NoError(t, err)
|
||||
var storeCacheableEntity *CacheableEntity
|
||||
assert.Error(t, c.Set(context.Background(), valuer.GenerateUUID(), "key", storeCacheableEntity, 10*time.Second))
|
||||
|
||||
var cloneable *CloneableA
|
||||
assert.Error(t, cache.Set(context.Background(), valuer.GenerateUUID(), "key", cloneable, 10*time.Second))
|
||||
}
|
||||
|
||||
// this should fail because of no pointer error
|
||||
func TestStoreWithStruct(t *testing.T) {
|
||||
opts := cache.Memory{
|
||||
func TestCacheableSetWithNilPointer(t *testing.T) {
|
||||
cache, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cache.Memory{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
}
|
||||
c, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
}})
|
||||
require.NoError(t, err)
|
||||
var storeCacheableEntity CacheableEntity
|
||||
assert.Error(t, c.Set(context.Background(), valuer.GenerateUUID(), "key", storeCacheableEntity, 10*time.Second))
|
||||
|
||||
var cacheable *CacheableB
|
||||
assert.Error(t, cache.Set(context.Background(), valuer.GenerateUUID(), "key", cacheable, 10*time.Second))
|
||||
}
|
||||
|
||||
func TestStoreWithNonNilPointer(t *testing.T) {
|
||||
opts := cache.Memory{
|
||||
func TestCloneableSetGet(t *testing.T) {
|
||||
cache, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cache.Memory{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
}
|
||||
c, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
}})
|
||||
require.NoError(t, err)
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
Key: "some-random-key",
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
assert.NoError(t, c.Set(context.Background(), valuer.GenerateUUID(), "key", storeCacheableEntity, 10*time.Second))
|
||||
}
|
||||
|
||||
// TestRetrieve tests the Retrieve function
|
||||
func TestRetrieveWithNilPointer(t *testing.T) {
|
||||
opts := cache.Memory{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
}
|
||||
c, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
require.NoError(t, err)
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
Key: "some-random-key",
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
|
||||
orgID := valuer.GenerateUUID()
|
||||
assert.NoError(t, c.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second))
|
||||
cloneable := &CloneableA{
|
||||
Key: "some-random-key",
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
|
||||
var retrieveCacheableEntity *CacheableEntity
|
||||
assert.NoError(t, cache.Set(context.Background(), orgID, "key", cloneable, 10*time.Second))
|
||||
|
||||
err = c.Get(context.Background(), orgID, "key", retrieveCacheableEntity, false)
|
||||
provider := cache.(*provider)
|
||||
insideCache, found := provider.cc.Get(strings.Join([]string{orgID.StringValue(), "key"}, "::"))
|
||||
assert.True(t, found)
|
||||
assert.IsType(t, &CloneableA{}, insideCache)
|
||||
|
||||
cached := new(CloneableA)
|
||||
assert.NoError(t, cache.Get(context.Background(), orgID, "key", cached, false))
|
||||
|
||||
assert.Equal(t, cloneable, cached)
|
||||
// confirm that the cached cloneable is a different pointer
|
||||
assert.NotSame(t, cloneable, cached)
|
||||
}
|
||||
|
||||
func TestCacheableSetGet(t *testing.T) {
|
||||
cache, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cache.Memory{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
}})
|
||||
require.NoError(t, err)
|
||||
|
||||
orgID := valuer.GenerateUUID()
|
||||
cacheable := &CacheableB{
|
||||
Key: "some-random-key",
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
|
||||
assert.NoError(t, cache.Set(context.Background(), orgID, "key", cacheable, 10*time.Second))
|
||||
|
||||
provider := cache.(*provider)
|
||||
insideCache, found := provider.cc.Get(strings.Join([]string{orgID.StringValue(), "key"}, "::"))
|
||||
assert.True(t, found)
|
||||
assert.IsType(t, []byte{}, insideCache)
|
||||
assert.Equal(t, "{\"Key\":\"some-random-key\",\"Value\":1,\"Expiry\":1000}", string(insideCache.([]byte)))
|
||||
|
||||
cached := new(CacheableB)
|
||||
assert.NoError(t, cache.Get(context.Background(), orgID, "key", cached, false))
|
||||
|
||||
assert.Equal(t, cacheable, cached)
|
||||
assert.NotSame(t, cacheable, cached)
|
||||
}
|
||||
|
||||
func TestGetWithNilPointer(t *testing.T) {
|
||||
cache, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cache.Memory{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
}})
|
||||
require.NoError(t, err)
|
||||
|
||||
var cloneable *CloneableA
|
||||
assert.Error(t, cache.Get(context.Background(), valuer.GenerateUUID(), "key", cloneable, false))
|
||||
}
|
||||
|
||||
func TestSetGetWithDifferentTypes(t *testing.T) {
|
||||
cache, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cache.Memory{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
}})
|
||||
require.NoError(t, err)
|
||||
|
||||
orgID := valuer.GenerateUUID()
|
||||
|
||||
cloneable := &CloneableA{
|
||||
Key: "some-random-key",
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
assert.NoError(t, cache.Set(context.Background(), orgID, "key", cloneable, 10*time.Second))
|
||||
|
||||
cachedCacheable := new(CacheableB)
|
||||
err = cache.Get(context.Background(), orgID, "key", cachedCacheable, false)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestRetrieveWitNonPointer(t *testing.T) {
|
||||
opts := cache.Memory{
|
||||
func TestCloneableConcurrentSetGet(t *testing.T) {
|
||||
cache, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cache.Memory{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
}
|
||||
c, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
}})
|
||||
require.NoError(t, err)
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
Key: "some-random-key",
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
|
||||
orgID := valuer.GenerateUUID()
|
||||
assert.NoError(t, c.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second))
|
||||
numGoroutines := 100
|
||||
done := make(chan bool, numGoroutines*2)
|
||||
cloneables := make([]*CloneableA, numGoroutines)
|
||||
mu := sync.Mutex{}
|
||||
|
||||
var retrieveCacheableEntity CacheableEntity
|
||||
for i := 0; i < numGoroutines; i++ {
|
||||
go func(id int) {
|
||||
cloneable := &CloneableA{
|
||||
Key: fmt.Sprintf("key-%d", id),
|
||||
Value: id,
|
||||
Expiry: 50 * time.Second,
|
||||
}
|
||||
err := cache.Set(context.Background(), orgID, fmt.Sprintf("key-%d", id), cloneable, 10*time.Second)
|
||||
assert.NoError(t, err)
|
||||
mu.Lock()
|
||||
cloneables[id] = cloneable
|
||||
mu.Unlock()
|
||||
done <- true
|
||||
}(i)
|
||||
}
|
||||
|
||||
err = c.Get(context.Background(), orgID, "key", retrieveCacheableEntity, false)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestRetrieveWithDifferentTypes(t *testing.T) {
|
||||
opts := cache.Memory{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
}
|
||||
c, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
require.NoError(t, err)
|
||||
orgID := valuer.GenerateUUID()
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
Key: "some-random-key",
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
assert.NoError(t, c.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second))
|
||||
|
||||
retrieveCacheableEntity := new(DCacheableEntity)
|
||||
err = c.Get(context.Background(), orgID, "key", retrieveCacheableEntity, false)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestRetrieveWithSameTypes(t *testing.T) {
|
||||
opts := cache.Memory{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
}
|
||||
c, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
require.NoError(t, err)
|
||||
orgID := valuer.GenerateUUID()
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
Key: "some-random-key",
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
assert.NoError(t, c.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second))
|
||||
|
||||
retrieveCacheableEntity := new(CacheableEntity)
|
||||
err = c.Get(context.Background(), orgID, "key", retrieveCacheableEntity, false)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, storeCacheableEntity, retrieveCacheableEntity)
|
||||
}
|
||||
|
||||
// TestRemove tests the Remove function
|
||||
func TestRemove(t *testing.T) {
|
||||
opts := cache.Memory{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
}
|
||||
c, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
require.NoError(t, err)
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
Key: "some-random-key",
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
retrieveCacheableEntity := new(CacheableEntity)
|
||||
orgID := valuer.GenerateUUID()
|
||||
assert.NoError(t, c.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second))
|
||||
c.Delete(context.Background(), orgID, "key")
|
||||
|
||||
err = c.Get(context.Background(), orgID, "key", retrieveCacheableEntity, false)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
// TestBulkRemove tests the BulkRemove function
|
||||
func TestBulkRemove(t *testing.T) {
|
||||
opts := cache.Memory{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
}
|
||||
c, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
require.NoError(t, err)
|
||||
orgID := valuer.GenerateUUID()
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
Key: "some-random-key",
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
retrieveCacheableEntity := new(CacheableEntity)
|
||||
assert.NoError(t, c.Set(context.Background(), orgID, "key1", storeCacheableEntity, 10*time.Second))
|
||||
assert.NoError(t, c.Set(context.Background(), orgID, "key2", storeCacheableEntity, 10*time.Second))
|
||||
c.DeleteMany(context.Background(), orgID, []string{"key1", "key2"})
|
||||
|
||||
err = c.Get(context.Background(), orgID, "key1", retrieveCacheableEntity, false)
|
||||
assert.Error(t, err)
|
||||
|
||||
err = c.Get(context.Background(), orgID, "key2", retrieveCacheableEntity, false)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
// TestCache tests the cache
|
||||
func TestCache(t *testing.T) {
|
||||
opts := cache.Memory{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
}
|
||||
c, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: opts})
|
||||
require.NoError(t, err)
|
||||
orgID := valuer.GenerateUUID()
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
Key: "some-random-key",
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
retrieveCacheableEntity := new(CacheableEntity)
|
||||
assert.NoError(t, c.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second))
|
||||
err = c.Get(context.Background(), orgID, "key", retrieveCacheableEntity, false)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, storeCacheableEntity, retrieveCacheableEntity)
|
||||
c.Delete(context.Background(), orgID, "key")
|
||||
for i := 0; i < numGoroutines; i++ {
|
||||
go func(id int) {
|
||||
cachedCloneable := new(CloneableA)
|
||||
err := cache.Get(context.Background(), orgID, fmt.Sprintf("key-%d", id), cachedCloneable, false)
|
||||
// Some keys might not exist due to concurrent access, which is expected
|
||||
_ = err
|
||||
done <- true
|
||||
}(i)
|
||||
}
|
||||
|
||||
for i := 0; i < numGoroutines*2; i++ {
|
||||
<-done
|
||||
}
|
||||
|
||||
for i := 0; i < numGoroutines; i++ {
|
||||
cachedCloneable := new(CloneableA)
|
||||
assert.NoError(t, cache.Get(context.Background(), orgID, fmt.Sprintf("key-%d", i), cachedCloneable, false))
|
||||
assert.Equal(t, fmt.Sprintf("key-%d", i), cachedCloneable.Key)
|
||||
assert.Equal(t, i, cachedCloneable.Value)
|
||||
// confirm that the cached cacheable is a different pointer
|
||||
assert.NotSame(t, cachedCloneable, cloneables[i])
|
||||
}
|
||||
}
|
||||
|
||||
7
pkg/cache/rediscache/provider.go
vendored
7
pkg/cache/rediscache/provider.go
vendored
@@ -2,14 +2,13 @@ package rediscache
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"fmt"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/types/cachetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
@@ -48,10 +47,12 @@ func (c *provider) Get(ctx context.Context, orgID valuer.UUID, cacheKey string,
|
||||
err := c.client.Get(ctx, strings.Join([]string{orgID.StringValue(), cacheKey}, "::")).Scan(dest)
|
||||
if err != nil {
|
||||
if errors.Is(err, redis.Nil) {
|
||||
return errorsV2.Newf(errorsV2.TypeNotFound, errorsV2.CodeNotFound, "key miss")
|
||||
return errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "key miss")
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
109
pkg/cache/rediscache/provider_test.go
vendored
109
pkg/cache/rediscache/provider_test.go
vendored
@@ -8,114 +8,49 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/factory/factorytest"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/types/cachetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/go-redis/redismock/v8"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
type CacheableEntity struct {
|
||||
type CacheableA struct {
|
||||
Key string
|
||||
Value int
|
||||
Expiry time.Duration
|
||||
}
|
||||
|
||||
func (ce *CacheableEntity) MarshalBinary() ([]byte, error) {
|
||||
return json.Marshal(ce)
|
||||
func (cacheable *CacheableA) Clone() cachetypes.Cacheable {
|
||||
return &CacheableA{
|
||||
Key: cacheable.Key,
|
||||
Value: cacheable.Value,
|
||||
Expiry: cacheable.Expiry,
|
||||
}
|
||||
}
|
||||
|
||||
func (ce *CacheableEntity) UnmarshalBinary(data []byte) error {
|
||||
return json.Unmarshal(data, ce)
|
||||
func (cacheable *CacheableA) MarshalBinary() ([]byte, error) {
|
||||
return json.Marshal(cacheable)
|
||||
}
|
||||
|
||||
func (cacheable *CacheableA) UnmarshalBinary(data []byte) error {
|
||||
return json.Unmarshal(data, cacheable)
|
||||
}
|
||||
|
||||
func TestSet(t *testing.T) {
|
||||
db, mock := redismock.NewClientMock()
|
||||
cache := &provider{client: db, settings: factory.NewScopedProviderSettings(factorytest.NewSettings(), "github.com/SigNoz/signoz/pkg/cache/rediscache")}
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
providerSettings := instrumentationtest.New().ToProviderSettings()
|
||||
cache := &provider{client: db, settings: factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/cache/rediscache")}
|
||||
|
||||
cacheable := &CacheableA{
|
||||
Key: "some-random-key",
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
|
||||
orgID := valuer.GenerateUUID()
|
||||
mock.ExpectSet(strings.Join([]string{orgID.StringValue(), "key"}, "::"), storeCacheableEntity, 10*time.Second).RedisNil()
|
||||
_ = cache.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second)
|
||||
mock.ExpectSet(strings.Join([]string{orgID.StringValue(), "key"}, "::"), cacheable, 10*time.Second).SetVal("ok")
|
||||
|
||||
if err := mock.ExpectationsWereMet(); err != nil {
|
||||
t.Errorf("there were unfulfilled expectations: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGet(t *testing.T) {
|
||||
db, mock := redismock.NewClientMock()
|
||||
cache := &provider{client: db, settings: factory.NewScopedProviderSettings(factorytest.NewSettings(), "github.com/SigNoz/signoz/pkg/cache/rediscache")}
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
Key: "some-random-key",
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
retrieveCacheableEntity := new(CacheableEntity)
|
||||
|
||||
orgID := valuer.GenerateUUID()
|
||||
mock.ExpectSet(strings.Join([]string{orgID.StringValue(), "key"}, "::"), storeCacheableEntity, 10*time.Second).RedisNil()
|
||||
_ = cache.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second)
|
||||
|
||||
data, err := storeCacheableEntity.MarshalBinary()
|
||||
assert.NoError(t, err)
|
||||
|
||||
mock.ExpectGet(strings.Join([]string{orgID.StringValue(), "key"}, "::")).SetVal(string(data))
|
||||
err = cache.Get(context.Background(), orgID, "key", retrieveCacheableEntity, false)
|
||||
if err != nil {
|
||||
t.Errorf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
assert.Equal(t, storeCacheableEntity, retrieveCacheableEntity)
|
||||
if err := mock.ExpectationsWereMet(); err != nil {
|
||||
t.Errorf("there were unfulfilled expectations: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDelete(t *testing.T) {
|
||||
db, mock := redismock.NewClientMock()
|
||||
cache := &provider{client: db, settings: factory.NewScopedProviderSettings(factorytest.NewSettings(), "github.com/SigNoz/signoz/pkg/cache/rediscache")}
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
Key: "some-random-key",
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
orgID := valuer.GenerateUUID()
|
||||
|
||||
mock.ExpectSet(strings.Join([]string{orgID.StringValue(), "key"}, "::"), storeCacheableEntity, 10*time.Second).RedisNil()
|
||||
_ = cache.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second)
|
||||
|
||||
mock.ExpectDel(strings.Join([]string{orgID.StringValue(), "key"}, "::")).RedisNil()
|
||||
cache.Delete(context.Background(), orgID, "key")
|
||||
|
||||
if err := mock.ExpectationsWereMet(); err != nil {
|
||||
t.Errorf("there were unfulfilled expectations: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDeleteMany(t *testing.T) {
|
||||
db, mock := redismock.NewClientMock()
|
||||
cache := &provider{client: db, settings: factory.NewScopedProviderSettings(factorytest.NewSettings(), "github.com/SigNoz/signoz/pkg/cache/rediscache")}
|
||||
storeCacheableEntity := &CacheableEntity{
|
||||
Key: "some-random-key",
|
||||
Value: 1,
|
||||
Expiry: time.Microsecond,
|
||||
}
|
||||
orgID := valuer.GenerateUUID()
|
||||
|
||||
mock.ExpectSet(strings.Join([]string{orgID.StringValue(), "key"}, "::"), storeCacheableEntity, 10*time.Second).RedisNil()
|
||||
_ = cache.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second)
|
||||
|
||||
mock.ExpectSet(strings.Join([]string{orgID.StringValue(), "key2"}, "::"), storeCacheableEntity, 10*time.Second).RedisNil()
|
||||
_ = cache.Set(context.Background(), orgID, "key2", storeCacheableEntity, 10*time.Second)
|
||||
|
||||
mock.ExpectDel(strings.Join([]string{orgID.StringValue(), "key"}, "::"), strings.Join([]string{orgID.StringValue(), "key2"}, "::")).RedisNil()
|
||||
cache.DeleteMany(context.Background(), orgID, []string{"key", "key2"})
|
||||
|
||||
if err := mock.ExpectationsWereMet(); err != nil {
|
||||
t.Errorf("there were unfulfilled expectations: %s", err)
|
||||
}
|
||||
assert.NoError(t, cache.Set(context.Background(), orgID, "key", cacheable, 10*time.Second))
|
||||
assert.NoError(t, mock.ExpectationsWereMet())
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
@@ -205,6 +206,10 @@ func (q *builderQuery[T]) executeWithContext(ctx context.Context, query string,
|
||||
|
||||
rows, err := q.telemetryStore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
if errors.Is(err, context.DeadlineExceeded) {
|
||||
return nil, errors.Newf(errors.TypeTimeout, errors.CodeTimeout, "Query timed out").
|
||||
WithAdditional("Try refining your search by adding relevant resource attributes filtering")
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
@@ -89,6 +89,9 @@ func newProvider(
|
||||
resourceFilterFieldMapper,
|
||||
resourceFilterConditionBuilder,
|
||||
telemetryMetadataStore,
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
telemetrylogs.BodyJSONStringSearchPrefix,
|
||||
telemetrylogs.GetBodyJSONKey,
|
||||
)
|
||||
logAggExprRewriter := querybuilder.NewAggExprRewriter(
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
|
||||
@@ -4521,24 +4521,46 @@ func (aH *APIHandler) sendQueryResultEvents(r *http.Request, result []*v3.Result
|
||||
}
|
||||
|
||||
queryInfoResult := NewQueryInfoResult(queryRangeParams, version)
|
||||
if !(len(result) > 0 && (len(result[0].Series) > 0 || len(result[0].List) > 0)) {
|
||||
aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Empty", queryInfoResult.ToMap())
|
||||
return
|
||||
}
|
||||
|
||||
aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Results", queryInfoResult.ToMap())
|
||||
|
||||
if !(queryInfoResult.LogsUsed || queryInfoResult.MetricsUsed || queryInfoResult.TracesUsed) {
|
||||
return
|
||||
}
|
||||
|
||||
referrer := r.Header.Get("Referer")
|
||||
if referrer == "" {
|
||||
properties := queryInfoResult.ToMap()
|
||||
|
||||
// Check if result is empty or has no data
|
||||
if len(result) == 0 {
|
||||
aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Empty", properties)
|
||||
return
|
||||
}
|
||||
|
||||
// Check if first result has no series data
|
||||
if len(result[0].Series) == 0 {
|
||||
// Check if first result has no list data
|
||||
if len(result[0].List) == 0 {
|
||||
// Check if first result has no table data
|
||||
if result[0].Table == nil {
|
||||
aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Empty", properties)
|
||||
return
|
||||
}
|
||||
|
||||
if len(result[0].Table.Rows) == 0 {
|
||||
aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Empty", properties)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
referrer := r.Header.Get("Referer")
|
||||
|
||||
if referrer == "" {
|
||||
aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties)
|
||||
return
|
||||
}
|
||||
|
||||
properties["referrer"] = referrer
|
||||
|
||||
if matched, _ := regexp.MatchString(`/dashboard/[a-zA-Z0-9\-]+/(new|edit)(?:\?.*)?$`, referrer); matched {
|
||||
properties := queryInfoResult.ToMap()
|
||||
|
||||
if dashboardIDRegex, err := regexp.Compile(`/dashboard/([a-f0-9\-]+)/`); err == nil {
|
||||
if matches := dashboardIDRegex.FindStringSubmatch(referrer); len(matches) > 1 {
|
||||
@@ -4552,13 +4574,12 @@ func (aH *APIHandler) sendQueryResultEvents(r *http.Request, result []*v3.Result
|
||||
}
|
||||
}
|
||||
|
||||
properties["referrer"] = referrer
|
||||
properties["module_name"] = "dashboard"
|
||||
aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties)
|
||||
return
|
||||
}
|
||||
|
||||
if matched, _ := regexp.MatchString(`/alerts/(new|edit)(?:\?.*)?$`, referrer); matched {
|
||||
properties := queryInfoResult.ToMap()
|
||||
|
||||
if alertIDRegex, err := regexp.Compile(`ruleId=(\d+)`); err == nil {
|
||||
if matches := alertIDRegex.FindStringSubmatch(referrer); len(matches) > 1 {
|
||||
@@ -4566,11 +4587,13 @@ func (aH *APIHandler) sendQueryResultEvents(r *http.Request, result []*v3.Result
|
||||
}
|
||||
}
|
||||
|
||||
properties["referrer"] = referrer
|
||||
properties["module_name"] = "rule"
|
||||
aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties)
|
||||
return
|
||||
}
|
||||
|
||||
aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties)
|
||||
|
||||
}
|
||||
|
||||
func (aH *APIHandler) QueryRangeV3(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
package model
|
||||
|
||||
import "encoding/json"
|
||||
import (
|
||||
"encoding/json"
|
||||
"maps"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/cachetypes"
|
||||
)
|
||||
|
||||
type GetWaterfallSpansForTraceWithMetadataCache struct {
|
||||
StartTime uint64 `json:"startTime"`
|
||||
@@ -14,6 +19,28 @@ type GetWaterfallSpansForTraceWithMetadataCache struct {
|
||||
HasMissingSpans bool `json:"hasMissingSpans"`
|
||||
}
|
||||
|
||||
func (c *GetWaterfallSpansForTraceWithMetadataCache) Clone() cachetypes.Cacheable {
|
||||
copyOfServiceNameToTotalDurationMap := make(map[string]uint64)
|
||||
maps.Copy(copyOfServiceNameToTotalDurationMap, c.ServiceNameToTotalDurationMap)
|
||||
|
||||
copyOfSpanIdToSpanNodeMap := make(map[string]*Span)
|
||||
maps.Copy(copyOfSpanIdToSpanNodeMap, c.SpanIdToSpanNodeMap)
|
||||
|
||||
copyOfTraceRoots := make([]*Span, len(c.TraceRoots))
|
||||
copy(copyOfTraceRoots, c.TraceRoots)
|
||||
return &GetWaterfallSpansForTraceWithMetadataCache{
|
||||
StartTime: c.StartTime,
|
||||
EndTime: c.EndTime,
|
||||
DurationNano: c.DurationNano,
|
||||
TotalSpans: c.TotalSpans,
|
||||
TotalErrorSpans: c.TotalErrorSpans,
|
||||
ServiceNameToTotalDurationMap: copyOfServiceNameToTotalDurationMap,
|
||||
SpanIdToSpanNodeMap: copyOfSpanIdToSpanNodeMap,
|
||||
TraceRoots: copyOfTraceRoots,
|
||||
HasMissingSpans: c.HasMissingSpans,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *GetWaterfallSpansForTraceWithMetadataCache) MarshalBinary() (data []byte, err error) {
|
||||
return json.Marshal(c)
|
||||
}
|
||||
@@ -29,6 +56,16 @@ type GetFlamegraphSpansForTraceCache struct {
|
||||
TraceRoots []*FlamegraphSpan `json:"traceRoots"`
|
||||
}
|
||||
|
||||
func (c *GetFlamegraphSpansForTraceCache) Clone() cachetypes.Cacheable {
|
||||
return &GetFlamegraphSpansForTraceCache{
|
||||
StartTime: c.StartTime,
|
||||
EndTime: c.EndTime,
|
||||
DurationNano: c.DurationNano,
|
||||
SelectedSpans: c.SelectedSpans,
|
||||
TraceRoots: c.TraceRoots,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *GetFlamegraphSpansForTraceCache) MarshalBinary() (data []byte, err error) {
|
||||
return json.Marshal(c)
|
||||
}
|
||||
|
||||
@@ -2,8 +2,9 @@ package model
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"time"
|
||||
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
)
|
||||
|
||||
type UpdateMetricsMetadata struct {
|
||||
|
||||
@@ -94,3 +94,11 @@ func CollisionHandledFinalExpr(
|
||||
|
||||
return multiIfStmt, allArgs, nil
|
||||
}
|
||||
|
||||
func GroupByKeys(keys []qbtypes.GroupByKey) []string {
|
||||
k := []string{}
|
||||
for _, key := range keys {
|
||||
k = append(k, "`"+key.Name+"`")
|
||||
}
|
||||
return k
|
||||
}
|
||||
|
||||
@@ -38,6 +38,10 @@ type resourceFilterStatementBuilder[T any] struct {
|
||||
conditionBuilder qbtypes.ConditionBuilder
|
||||
metadataStore telemetrytypes.MetadataStore
|
||||
signal telemetrytypes.Signal
|
||||
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||
jsonBodyPrefix string
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
}
|
||||
|
||||
// Ensure interface compliance at compile time
|
||||
@@ -64,12 +68,18 @@ func NewLogResourceFilterStatementBuilder(
|
||||
fieldMapper qbtypes.FieldMapper,
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
metadataStore telemetrytypes.MetadataStore,
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *resourceFilterStatementBuilder[qbtypes.LogAggregation] {
|
||||
return &resourceFilterStatementBuilder[qbtypes.LogAggregation]{
|
||||
fieldMapper: fieldMapper,
|
||||
conditionBuilder: conditionBuilder,
|
||||
metadataStore: metadataStore,
|
||||
signal: telemetrytypes.SignalLogs,
|
||||
fullTextColumn: fullTextColumn,
|
||||
jsonBodyPrefix: jsonBodyPrefix,
|
||||
jsonKeyToKey: jsonKeyToKey,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -140,7 +150,11 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
|
||||
FieldMapper: b.fieldMapper,
|
||||
ConditionBuilder: b.conditionBuilder,
|
||||
FieldKeys: keys,
|
||||
FullTextColumn: b.fullTextColumn,
|
||||
JsonBodyPrefix: b.jsonBodyPrefix,
|
||||
JsonKeyToKey: b.jsonKeyToKey,
|
||||
SkipFullTextFilter: true,
|
||||
SkipFunctionCalls: true,
|
||||
Variables: variables,
|
||||
})
|
||||
|
||||
|
||||
@@ -29,7 +29,10 @@ type filterExpressionVisitor struct {
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
skipResourceFilter bool
|
||||
skipFullTextFilter bool
|
||||
skipFunctionCalls bool
|
||||
variables map[string]qbtypes.VariableItem
|
||||
|
||||
keysWithWarnings map[string]bool
|
||||
}
|
||||
|
||||
type FilterExprVisitorOpts struct {
|
||||
@@ -42,6 +45,7 @@ type FilterExprVisitorOpts struct {
|
||||
JsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||
SkipResourceFilter bool
|
||||
SkipFullTextFilter bool
|
||||
SkipFunctionCalls bool
|
||||
Variables map[string]qbtypes.VariableItem
|
||||
}
|
||||
|
||||
@@ -57,7 +61,9 @@ func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVis
|
||||
jsonKeyToKey: opts.JsonKeyToKey,
|
||||
skipResourceFilter: opts.SkipResourceFilter,
|
||||
skipFullTextFilter: opts.SkipFullTextFilter,
|
||||
skipFunctionCalls: opts.SkipFunctionCalls,
|
||||
variables: opts.Variables,
|
||||
keysWithWarnings: make(map[string]bool),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -547,6 +553,10 @@ func (v *filterExpressionVisitor) VisitFullText(ctx *grammar.FullTextContext) an
|
||||
|
||||
// VisitFunctionCall handles function calls like has(), hasAny(), etc.
|
||||
func (v *filterExpressionVisitor) VisitFunctionCall(ctx *grammar.FunctionCallContext) any {
|
||||
if v.skipFunctionCalls {
|
||||
return "true"
|
||||
}
|
||||
|
||||
// Get function name based on which token is present
|
||||
var functionName string
|
||||
if ctx.HAS() != nil {
|
||||
@@ -690,7 +700,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
|
||||
}
|
||||
}
|
||||
|
||||
if len(fieldKeysForName) > 1 {
|
||||
if len(fieldKeysForName) > 1 && !v.keysWithWarnings[keyName] {
|
||||
// this is warning state, we must have a unambiguous key
|
||||
v.warnings = append(v.warnings, fmt.Sprintf(
|
||||
"key `%s` is ambiguous, found %d different combinations of field context and data type: %v",
|
||||
@@ -698,6 +708,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
|
||||
len(fieldKeysForName),
|
||||
fieldKeysForName,
|
||||
))
|
||||
v.keysWithWarnings[keyName] = true
|
||||
}
|
||||
|
||||
return fieldKeysForName
|
||||
|
||||
@@ -220,6 +220,7 @@ func (provider *provider) collectOrg(ctx context.Context, orgID valuer.UUID) map
|
||||
if err := provider.telemetryStore.ClickhouseDB().QueryRow(ctx, "SELECT max(timestamp) FROM signoz_traces.distributed_signoz_index_v3").Scan(&tracesLastSeenAt); err == nil {
|
||||
if tracesLastSeenAt.Unix() != 0 {
|
||||
stats["telemetry.traces.last_observed.time"] = tracesLastSeenAt.UTC()
|
||||
stats["telemetry.traces.last_observed.time_unix"] = tracesLastSeenAt.Unix()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -227,6 +228,7 @@ func (provider *provider) collectOrg(ctx context.Context, orgID valuer.UUID) map
|
||||
if err := provider.telemetryStore.ClickhouseDB().QueryRow(ctx, "SELECT fromUnixTimestamp64Nano(max(timestamp)) FROM signoz_logs.distributed_logs_v2").Scan(&logsLastSeenAt); err == nil {
|
||||
if logsLastSeenAt.Unix() != 0 {
|
||||
stats["telemetry.logs.last_observed.time"] = logsLastSeenAt.UTC()
|
||||
stats["telemetry.logs.last_observed.time_unix"] = logsLastSeenAt.Unix()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -234,6 +236,7 @@ func (provider *provider) collectOrg(ctx context.Context, orgID valuer.UUID) map
|
||||
if err := provider.telemetryStore.ClickhouseDB().QueryRow(ctx, "SELECT toDateTime(max(unix_milli) / 1000) FROM signoz_metrics.distributed_samples_v4").Scan(&metricsLastSeenAt); err == nil {
|
||||
if metricsLastSeenAt.Unix() != 0 {
|
||||
stats["telemetry.metrics.last_observed.time"] = metricsLastSeenAt.UTC()
|
||||
stats["telemetry.metrics.last_observed.time_unix"] = metricsLastSeenAt.Unix()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
)
|
||||
@@ -148,7 +149,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
}
|
||||
|
||||
// if the field is intrinsic, it always exists
|
||||
if slices.Contains(IntrinsicFields, key.Name) {
|
||||
if slices.Contains(maps.Keys(IntrinsicFields), key.Name) {
|
||||
return "true", nil
|
||||
}
|
||||
|
||||
@@ -210,7 +211,7 @@ func (c *conditionBuilder) ConditionFor(
|
||||
// skip adding exists filter for intrinsic fields
|
||||
// with an exception for body json search
|
||||
field, _ := c.fm.FieldFor(ctx, key)
|
||||
if slices.Contains(IntrinsicFields, field) && !strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) {
|
||||
if slices.Contains(maps.Keys(IntrinsicFields), field) && !strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) {
|
||||
return condition, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -337,7 +337,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
|
||||
},
|
||||
operator: qbtypes.FilterOperatorEqual,
|
||||
value: "GET",
|
||||
expectedSQL: `JSONExtract(JSON_VALUE(body, '$."http"."method"'), 'String') = ?`,
|
||||
expectedSQL: `JSON_VALUE(body, '$."http"."method"') = ?`,
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -417,7 +417,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
|
||||
},
|
||||
operator: qbtypes.FilterOperatorContains,
|
||||
value: "200",
|
||||
expectedSQL: `LOWER(JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'String')) LIKE LOWER(?)`,
|
||||
expectedSQL: `LOWER(JSON_VALUE(body, '$."http"."status_code"')) LIKE LOWER(?)`,
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
@@ -427,7 +427,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
|
||||
},
|
||||
operator: qbtypes.FilterOperatorNotContains,
|
||||
value: "200",
|
||||
expectedSQL: `LOWER(JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'String')) NOT LIKE LOWER(?)`,
|
||||
expectedSQL: `LOWER(JSON_VALUE(body, '$."http"."status_code"')) NOT LIKE LOWER(?)`,
|
||||
expectedError: nil,
|
||||
},
|
||||
{
|
||||
|
||||
@@ -10,7 +10,57 @@ var (
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
}
|
||||
BodyJSONStringSearchPrefix = `body.`
|
||||
IntrinsicFields = []string{
|
||||
"body", "trace_id", "span_id", "trace_flags", "severity_text", "severity_number", "scope_name", "scope_version",
|
||||
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
|
||||
"body": {
|
||||
Name: "body",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"trace_id": {
|
||||
Name: "trace_id",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"span_id": {
|
||||
Name: "span_id",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"trace_flags": {
|
||||
Name: "trace_flags",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"severity_text": {
|
||||
Name: "severity_text",
|
||||
Description: "Log level. Learn more [here](https://opentelemetry.io/docs/specs/otel/logs/data-model/#field-severitytext)",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"severity_number": {
|
||||
Name: "severity_number",
|
||||
Description: "Numerical value of the severity. Learn more [here](https://opentelemetry.io/docs/specs/otel/logs/data-model/#field-severitynumber)",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"scope_name": {
|
||||
Name: "scope_name",
|
||||
Description: "Logger name. Learn more about instrumentation scope [here](https://opentelemetry.io/docs/concepts/instrumentation-scope/)",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextScope,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"scope_version": {
|
||||
Name: "scope_version",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextScope,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
@@ -73,7 +73,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
|
||||
category: "json",
|
||||
query: "body.message = hello",
|
||||
shouldPass: true,
|
||||
expectedQuery: `WHERE (JSONExtract(JSON_VALUE(body, '$."message"'), 'String') = ? AND JSON_EXISTS(body, '$."message"'))`,
|
||||
expectedQuery: `WHERE (JSON_VALUE(body, '$."message"') = ? AND JSON_EXISTS(body, '$."message"'))`,
|
||||
expectedArgs: []any{"hello"},
|
||||
expectedErrorContains: "",
|
||||
},
|
||||
@@ -113,7 +113,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
|
||||
category: "json",
|
||||
query: "body.message REGEXP 'a*'",
|
||||
shouldPass: true,
|
||||
expectedQuery: `WHERE (match(JSONExtract(JSON_VALUE(body, '$."message"'), 'String'), ?) AND JSON_EXISTS(body, '$."message"'))`,
|
||||
expectedQuery: `WHERE (match(JSON_VALUE(body, '$."message"'), ?) AND JSON_EXISTS(body, '$."message"'))`,
|
||||
expectedArgs: []any{"a*"},
|
||||
expectedErrorContains: "",
|
||||
},
|
||||
@@ -121,7 +121,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
|
||||
category: "json",
|
||||
query: `body.message CONTAINS "hello 'world'"`,
|
||||
shouldPass: true,
|
||||
expectedQuery: `WHERE (LOWER(JSONExtract(JSON_VALUE(body, '$."message"'), 'String')) LIKE LOWER(?) AND JSON_EXISTS(body, '$."message"'))`,
|
||||
expectedQuery: `WHERE (LOWER(JSON_VALUE(body, '$."message"')) LIKE LOWER(?) AND JSON_EXISTS(body, '$."message"'))`,
|
||||
expectedArgs: []any{"%hello 'world'%"},
|
||||
expectedErrorContains: "",
|
||||
},
|
||||
@@ -136,7 +136,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
|
||||
category: "json",
|
||||
query: `body.name IN ('hello', 'world')`,
|
||||
shouldPass: true,
|
||||
expectedQuery: `WHERE ((JSONExtract(JSON_VALUE(body, '$."name"'), 'String') = ? OR JSONExtract(JSON_VALUE(body, '$."name"'), 'String') = ?) AND JSON_EXISTS(body, '$."name"'))`,
|
||||
expectedQuery: `WHERE ((JSON_VALUE(body, '$."name"') = ? OR JSON_VALUE(body, '$."name"') = ?) AND JSON_EXISTS(body, '$."name"'))`,
|
||||
expectedArgs: []any{"hello", "world"},
|
||||
expectedErrorContains: "",
|
||||
},
|
||||
|
||||
@@ -61,7 +61,7 @@ func inferDataType(value any, operator qbtypes.FilterOperator, key *telemetrytyp
|
||||
}
|
||||
|
||||
// check if it is array
|
||||
if strings.HasSuffix(key.Name, "[*]") {
|
||||
if strings.HasSuffix(key.Name, "[*]") || strings.HasSuffix(key.Name, "[]") {
|
||||
valueType = telemetrytypes.FieldDataType{String: valuer.NewString(fmt.Sprintf("[]%s", valueType.StringValue()))}
|
||||
}
|
||||
|
||||
@@ -74,6 +74,8 @@ func getBodyJSONPath(key *telemetrytypes.TelemetryFieldKey) string {
|
||||
for _, part := range parts {
|
||||
if strings.HasSuffix(part, "[*]") {
|
||||
newParts = append(newParts, fmt.Sprintf(`"%s"[*]`, strings.TrimSuffix(part, "[*]")))
|
||||
} else if strings.HasSuffix(part, "[]") {
|
||||
newParts = append(newParts, fmt.Sprintf(`"%s"[*]`, strings.TrimSuffix(part, "[]")))
|
||||
} else {
|
||||
newParts = append(newParts, fmt.Sprintf(`"%s"`, part))
|
||||
}
|
||||
@@ -94,8 +96,12 @@ func GetBodyJSONKey(_ context.Context, key *telemetrytypes.TelemetryFieldKey, op
|
||||
return fmt.Sprintf("JSONExtract(JSON_QUERY(body, '$.%s'), '%s')", getBodyJSONPath(key), dataType.CHDataType()), value
|
||||
}
|
||||
|
||||
// for all other types, we need to extract the value from the JSON_VALUE
|
||||
return fmt.Sprintf("JSONExtract(JSON_VALUE(body, '$.%s'), '%s')", getBodyJSONPath(key), dataType.CHDataType()), value
|
||||
if dataType != telemetrytypes.FieldDataTypeString {
|
||||
// for all types except strings, we need to extract the value from the JSON_VALUE
|
||||
return fmt.Sprintf("JSONExtract(JSON_VALUE(body, '$.%s'), '%s')", getBodyJSONPath(key), dataType.CHDataType()), value
|
||||
}
|
||||
// for string types, we should compare with the JSON_VALUE
|
||||
return fmt.Sprintf("JSON_VALUE(body, '$.%s')", getBodyJSONPath(key)), value
|
||||
}
|
||||
|
||||
func GetBodyJSONKeyForExists(_ context.Context, key *telemetrytypes.TelemetryFieldKey, _ qbtypes.FilterOperator, _ any) string {
|
||||
|
||||
@@ -270,10 +270,11 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
|
||||
// Constrain the main query to the rows that appear in the CTE.
|
||||
tuple := fmt.Sprintf("(%s)", strings.Join(fieldNames, ", "))
|
||||
sb.Where(fmt.Sprintf("%s IN (SELECT %s FROM __limit_cte)", tuple, strings.Join(fieldNames, ", ")))
|
||||
sb.Where(fmt.Sprintf("%s GLOBAL IN (SELECT %s FROM __limit_cte)", tuple, strings.Join(fieldNames, ", ")))
|
||||
|
||||
// Group by all dimensions
|
||||
sb.GroupBy("ALL")
|
||||
sb.GroupBy("ts")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
// Rewrite having expression to use SQL column names
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
@@ -290,7 +291,8 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
finalArgs = querybuilder.PrependArgs(cteArgs, mainArgs)
|
||||
|
||||
} else {
|
||||
sb.GroupBy("ALL")
|
||||
sb.GroupBy("ts")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForLogs(query.Having.Expression, query.Aggregations)
|
||||
@@ -380,7 +382,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
}
|
||||
|
||||
// Group by dimensions
|
||||
sb.GroupBy("ALL")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
// Add having clause if needed
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
@@ -492,7 +494,7 @@ func (b *logQueryStatementBuilder) maybeAttachResourceFilter(
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
sb.Where("resource_fingerprint IN (SELECT fingerprint FROM __resource_filter)")
|
||||
sb.Where("resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)")
|
||||
|
||||
return fmt.Sprintf("__resource_filter AS (%s)", stmt.Query), stmt.Args, nil
|
||||
}
|
||||
|
||||
@@ -30,6 +30,9 @@ func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.LogAggregation
|
||||
fm,
|
||||
cb,
|
||||
mockMetadataStore,
|
||||
DefaultFullTextColumn,
|
||||
BodyJSONStringSearchPrefix,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -65,7 +68,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY ALL ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) IN (SELECT `service.name` FROM __limit_cte) GROUP BY ALL",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name%cartservice%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -104,7 +107,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY ALL ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) IN (SELECT `service.name` FROM __limit_cte) GROUP BY ALL",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"cartservice", "%service.name%", "%service.name%cartservice%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
|
||||
@@ -16,6 +16,7 @@ import (
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -208,6 +209,7 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector
|
||||
}
|
||||
|
||||
keys = append(keys, key)
|
||||
mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()] = key
|
||||
}
|
||||
|
||||
if rows.Err() != nil {
|
||||
@@ -215,8 +217,8 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector
|
||||
}
|
||||
|
||||
staticKeys := []string{"isRoot", "isEntrypoint"}
|
||||
staticKeys = append(staticKeys, telemetrytraces.IntrinsicFields...)
|
||||
staticKeys = append(staticKeys, telemetrytraces.CalculatedFields...)
|
||||
staticKeys = append(staticKeys, maps.Keys(telemetrytraces.IntrinsicFields)...)
|
||||
staticKeys = append(staticKeys, maps.Keys(telemetrytraces.CalculatedFields)...)
|
||||
|
||||
// add matching intrinsic and matching calculated fields
|
||||
for _, key := range staticKeys {
|
||||
@@ -228,6 +230,19 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector
|
||||
}
|
||||
}
|
||||
if found {
|
||||
if field, exists := telemetrytraces.IntrinsicFields[key]; exists {
|
||||
if _, added := mapOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added {
|
||||
keys = append(keys, &field)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if field, exists := telemetrytraces.CalculatedFields[key]; exists {
|
||||
if _, added := mapOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added {
|
||||
keys = append(keys, &field)
|
||||
}
|
||||
continue
|
||||
}
|
||||
keys = append(keys, &telemetrytypes.TelemetryFieldKey{
|
||||
Name: key,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
@@ -361,6 +376,7 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
||||
}
|
||||
|
||||
keys = append(keys, key)
|
||||
mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()] = key
|
||||
}
|
||||
|
||||
if rows.Err() != nil {
|
||||
@@ -368,7 +384,7 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
||||
}
|
||||
|
||||
staticKeys := []string{}
|
||||
staticKeys = append(staticKeys, telemetrylogs.IntrinsicFields...)
|
||||
staticKeys = append(staticKeys, maps.Keys(telemetrylogs.IntrinsicFields)...)
|
||||
|
||||
// add matching intrinsic and matching calculated fields
|
||||
for _, key := range staticKeys {
|
||||
@@ -380,6 +396,13 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
||||
}
|
||||
}
|
||||
if found {
|
||||
if field, exists := telemetrylogs.IntrinsicFields[key]; exists {
|
||||
if _, added := mapOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added {
|
||||
keys = append(keys, &field)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
keys = append(keys, &telemetrytypes.TelemetryFieldKey{
|
||||
Name: key,
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
|
||||
@@ -258,7 +258,8 @@ func (b *metricQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
sb.GTE("unix_milli", start),
|
||||
sb.LT("unix_milli", end),
|
||||
)
|
||||
sb.GroupBy("ALL")
|
||||
sb.GroupBy("ts")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
|
||||
@@ -320,7 +321,8 @@ func (b *metricQueryStatementBuilder) buildTimeSeriesCTE(
|
||||
sb.AddWhereClause(filterWhere)
|
||||
}
|
||||
|
||||
sb.GroupBy("ALL")
|
||||
sb.GroupBy("fingerprint")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return fmt.Sprintf("(%s) AS filtered_time_series", q), args, nil
|
||||
@@ -375,7 +377,8 @@ func (b *metricQueryStatementBuilder) buildTemporalAggDelta(
|
||||
sb.GTE("unix_milli", start),
|
||||
sb.LT("unix_milli", end),
|
||||
)
|
||||
sb.GroupBy("ALL")
|
||||
sb.GroupBy("fingerprint", "ts")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
sb.OrderBy("fingerprint", "ts")
|
||||
|
||||
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
|
||||
@@ -412,7 +415,8 @@ func (b *metricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
baseSb.GTE("unix_milli", start),
|
||||
baseSb.LT("unix_milli", end),
|
||||
)
|
||||
baseSb.GroupBy("ALL")
|
||||
baseSb.GroupBy("fingerprint", "ts")
|
||||
baseSb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
baseSb.OrderBy("fingerprint", "ts")
|
||||
|
||||
innerQuery, innerArgs := baseSb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
|
||||
@@ -438,7 +442,7 @@ func (b *metricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", incExpr))
|
||||
wrapped.From(fmt.Sprintf("(%s) WINDOW increase_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
|
||||
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
|
||||
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
|
||||
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
|
||||
default:
|
||||
@@ -465,7 +469,8 @@ func (b *metricQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
if query.Aggregations[0].ValueFilter != nil {
|
||||
sb.Where(sb.EQ("per_series_value", query.Aggregations[0].ValueFilter.Value))
|
||||
}
|
||||
sb.GroupBy("ALL")
|
||||
sb.GroupBy("ts")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
|
||||
|
||||
@@ -49,7 +49,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ALL) SELECT * FROM __spatial_aggregation_cte",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983448000), "cumulative", false, "cartservice", "signoz_calls_total", uint64(1747947419000), uint64(1747983448000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -82,7 +82,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL) SELECT * FROM __spatial_aggregation_cte",
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983448000), "delta", false, "cartservice", "signoz_calls_total", uint64(1747947419000), uint64(1747983448000)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -114,7 +114,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
|
||||
Args: []any{"signoz_latency", uint64(1747936800000), uint64(1747983448000), "delta", false, "cartservice", "signoz_latency", uint64(1747947419000), uint64(1747983448000)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -147,7 +147,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `host.name`, avg(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'host.name') AS `host.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'host.name') = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ALL) SELECT * FROM __spatial_aggregation_cte",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `host.name`, avg(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'host.name') AS `host.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'host.name') = ? GROUP BY fingerprint, `host.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Args: []any{"system.memory.usage", uint64(1747936800000), uint64(1747983448000), "unspecified", false, "big-data-node-1", "system.memory.usage", uint64(1747947419000), uint64(1747983448000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -176,7 +176,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ALL) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name`, `le` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
|
||||
Args: []any{"http_server_duration_bucket", uint64(1747936800000), uint64(1747983448000), "cumulative", false, "http_server_duration_bucket", uint64(1747947419000), uint64(1747983448000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
type conditionBuilder struct {
|
||||
@@ -129,10 +130,10 @@ func (c *conditionBuilder) conditionFor(
|
||||
// key membership checks, so depending on the column type, the condition changes
|
||||
case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists:
|
||||
// if the field is intrinsic, it always exists
|
||||
if slices.Contains(IntrinsicFields, tblFieldName) ||
|
||||
slices.Contains(CalculatedFields, tblFieldName) ||
|
||||
slices.Contains(IntrinsicFieldsDeprecated, tblFieldName) ||
|
||||
slices.Contains(CalculatedFieldsDeprecated, tblFieldName) {
|
||||
if slices.Contains(maps.Keys(IntrinsicFields), tblFieldName) ||
|
||||
slices.Contains(maps.Keys(CalculatedFields), tblFieldName) ||
|
||||
slices.Contains(maps.Keys(IntrinsicFieldsDeprecated), tblFieldName) ||
|
||||
slices.Contains(maps.Keys(CalculatedFieldsDeprecated), tblFieldName) {
|
||||
return "true", nil
|
||||
}
|
||||
|
||||
@@ -205,10 +206,10 @@ func (c *conditionBuilder) ConditionFor(
|
||||
if operator.AddDefaultExistsFilter() {
|
||||
// skip adding exists filter for intrinsic fields
|
||||
field, _ := c.fm.FieldFor(ctx, key)
|
||||
if slices.Contains(IntrinsicFields, field) ||
|
||||
slices.Contains(IntrinsicFieldsDeprecated, field) ||
|
||||
slices.Contains(CalculatedFields, field) ||
|
||||
slices.Contains(CalculatedFieldsDeprecated, field) {
|
||||
if slices.Contains(maps.Keys(IntrinsicFields), field) ||
|
||||
slices.Contains(maps.Keys(IntrinsicFieldsDeprecated), field) ||
|
||||
slices.Contains(maps.Keys(CalculatedFields), field) ||
|
||||
slices.Contains(maps.Keys(CalculatedFieldsDeprecated), field) {
|
||||
return condition, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -3,89 +3,320 @@ package telemetrytraces
|
||||
import "github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
|
||||
var (
|
||||
IntrinsicFields = []string{
|
||||
"trace_id",
|
||||
"span_id",
|
||||
"trace_state",
|
||||
"parent_span_id",
|
||||
"flags",
|
||||
"name",
|
||||
"kind",
|
||||
"kind_string",
|
||||
"duration_nano",
|
||||
"status_code",
|
||||
"status_message",
|
||||
"status_code_string",
|
||||
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
|
||||
"trace_id": {
|
||||
Name: "trace_id",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"span_id": {
|
||||
Name: "span_id",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"trace_state": {
|
||||
Name: "trace_state",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"parent_span_id": {
|
||||
Name: "parent_span_id",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"flags": {
|
||||
Name: "flags",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"name": {
|
||||
Name: "name",
|
||||
Description: "Name of the span",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"kind": {
|
||||
Name: "kind",
|
||||
Description: "Span kind enum (number). Use `kind_string` instead. Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-kind)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"kind_string": {
|
||||
Name: "kind_string",
|
||||
Description: "Span kind enum (string). Known values are ['Client', 'Server', 'Internal', 'Producer', 'Consumer']. Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-kind)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"duration_nano": {
|
||||
Name: "duration_nano",
|
||||
Description: "Span duration",
|
||||
Unit: "ns",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"status_code": {
|
||||
Name: "status_code",
|
||||
Description: "Span status code enum (number). Use `status_code_string` instead. Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-status)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"status_message": {
|
||||
Name: "status_message",
|
||||
Description: "Span status message. Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-status)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"status_code_string": {
|
||||
Name: "status_code_string",
|
||||
Description: "Span status code enum (string). Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-status)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
}
|
||||
IntrinsicFieldsDeprecated = []string{
|
||||
"traceID",
|
||||
"spanID",
|
||||
"parentSpanID",
|
||||
"spanKind",
|
||||
"durationNano",
|
||||
"statusCode",
|
||||
"statusMessage",
|
||||
"statusCodeString",
|
||||
IntrinsicFieldsDeprecated = map[string]telemetrytypes.TelemetryFieldKey{
|
||||
"traceID": {
|
||||
Name: "traceID",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"spanID": {
|
||||
Name: "spanID",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"parentSpanID": {
|
||||
Name: "parentSpanID",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"spanKind": {
|
||||
Name: "spanKind",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"durationNano": {
|
||||
Name: "durationNano",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"statusCode": {
|
||||
Name: "statusCode",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"statusMessage": {
|
||||
Name: "statusMessage",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"statusCodeString": {
|
||||
Name: "statusCodeString",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
}
|
||||
|
||||
CalculatedFields = []string{
|
||||
"response_status_code",
|
||||
"external_http_url",
|
||||
"http_url",
|
||||
"external_http_method",
|
||||
"http_method",
|
||||
"http_host",
|
||||
"db_name",
|
||||
"db_operation",
|
||||
"has_error",
|
||||
"is_remote",
|
||||
CalculatedFields = map[string]telemetrytypes.TelemetryFieldKey{
|
||||
"response_status_code": {
|
||||
Name: "response_status_code",
|
||||
Description: "Derived response status code from the HTTP/RPC status code attributes. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#response_status_code)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"external_http_url": {
|
||||
Name: "external_http_url",
|
||||
Description: "The hostname of the external HTTP URL. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#external_http_url)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"http_url": {
|
||||
Name: "http_url",
|
||||
Description: "HTTP URL of the request. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#http_url)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"external_http_method": {
|
||||
Name: "external_http_method",
|
||||
Description: "HTTP request method of client spans. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#external_http_method)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"http_method": {
|
||||
Name: "http_method",
|
||||
Description: "The HTTP request method. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#http_method)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"http_host": {
|
||||
Name: "http_host",
|
||||
Description: "The HTTP host or server address. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#http_host)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"db_name": {
|
||||
Name: "db_name",
|
||||
Description: "The database name or namespace. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#db_name)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"db_operation": {
|
||||
Name: "db_operation",
|
||||
Description: "The database operation being performed. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#db_operation)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"has_error": {
|
||||
Name: "has_error",
|
||||
Description: "Whether the span has an error. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#has_error)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeBool,
|
||||
},
|
||||
"is_remote": {
|
||||
Name: "is_remote",
|
||||
Description: "Whether the span is remote. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#is_remote)",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeBool,
|
||||
},
|
||||
}
|
||||
|
||||
CalculatedFieldsDeprecated = []string{
|
||||
"responseStatusCode",
|
||||
"externalHttpUrl",
|
||||
"httpUrl",
|
||||
"externalHttpMethod",
|
||||
"httpMethod",
|
||||
"httpHost",
|
||||
"dbName",
|
||||
"dbOperation",
|
||||
"hasError",
|
||||
"isRemote",
|
||||
CalculatedFieldsDeprecated = map[string]telemetrytypes.TelemetryFieldKey{
|
||||
"responseStatusCode": {
|
||||
Name: "responseStatusCode",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
"externalHttpUrl": {
|
||||
Name: "externalHttpUrl",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"httpUrl": {
|
||||
Name: "httpUrl",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"externalHttpMethod": {
|
||||
Name: "externalHttpMethod",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"httpMethod": {
|
||||
Name: "httpMethod",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"httpHost": {
|
||||
Name: "httpHost",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"dbName": {
|
||||
Name: "dbName",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"dbOperation": {
|
||||
Name: "dbOperation",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
"hasError": {
|
||||
Name: "hasError",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeBool,
|
||||
},
|
||||
"isRemote": {
|
||||
Name: "isRemote",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeBool,
|
||||
},
|
||||
}
|
||||
SpanSearchScopeRoot = "isroot"
|
||||
SpanSearchScopeEntryPoint = "isentrypoint"
|
||||
|
||||
DefaultFields = []telemetrytypes.TelemetryFieldKey{
|
||||
{
|
||||
Name: "timestamp",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
Name: "timestamp",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
{
|
||||
Name: "span_id",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
Name: "span_id",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
{
|
||||
Name: "trace_id",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
Name: "trace_id",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
{
|
||||
Name: "name",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
Name: "name",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
{
|
||||
Name: "service.name",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
Materialized: true,
|
||||
},
|
||||
{
|
||||
Name: "duration_nano",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
Name: "duration_nano",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||
},
|
||||
{
|
||||
Name: "response_status_code",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
Name: "response_status_code",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
@@ -305,10 +305,11 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
|
||||
// Constrain the main query to the rows that appear in the CTE.
|
||||
tuple := fmt.Sprintf("(%s)", strings.Join(fieldNames, ", "))
|
||||
sb.Where(fmt.Sprintf("%s IN (SELECT %s FROM __limit_cte)", tuple, strings.Join(fieldNames, ", ")))
|
||||
sb.Where(fmt.Sprintf("%s GLOBAL IN (SELECT %s FROM __limit_cte)", tuple, strings.Join(fieldNames, ", ")))
|
||||
|
||||
// Group by all dimensions
|
||||
sb.GroupBy("ALL")
|
||||
sb.GroupBy("ts")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForTraces(query.Having.Expression, query.Aggregations)
|
||||
@@ -323,7 +324,8 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
finalArgs = querybuilder.PrependArgs(cteArgs, mainArgs)
|
||||
|
||||
} else {
|
||||
sb.GroupBy("ALL")
|
||||
sb.GroupBy("ts")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForTraces(query.Having.Expression, query.Aggregations)
|
||||
@@ -412,7 +414,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
||||
}
|
||||
|
||||
// Group by dimensions
|
||||
sb.GroupBy("ALL")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
// Add having clause if needed
|
||||
if query.Having != nil && query.Having.Expression != "" && !skipHaving {
|
||||
@@ -521,7 +523,7 @@ func (b *traceQueryStatementBuilder) maybeAttachResourceFilter(
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
sb.Where("resource_fingerprint IN (SELECT fingerprint FROM __resource_filter)")
|
||||
sb.Where("resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)")
|
||||
|
||||
return fmt.Sprintf("__resource_filter AS (%s)", stmt.Query), stmt.Args, nil
|
||||
}
|
||||
|
||||
@@ -59,7 +59,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY ALL ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) IN (SELECT `service.name` FROM __limit_cte) GROUP BY ALL",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
|
||||
Args: []any{"redis-manual", "%service.name%", "%service.name%redis-manual%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
|
||||
@@ -12,22 +12,26 @@ type Cacheable interface {
|
||||
encoding.BinaryUnmarshaler
|
||||
}
|
||||
|
||||
func WrapCacheableErrors(rt reflect.Type, caller string) error {
|
||||
if rt == nil {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "%s: (nil)", caller)
|
||||
}
|
||||
|
||||
if rt.Kind() != reflect.Pointer {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "%s: (non-pointer \"%s\")", caller, rt.String())
|
||||
}
|
||||
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "%s: (nil \"%s\")", caller, rt.String())
|
||||
type Cloneable interface {
|
||||
// Creates a deep copy of the Cacheable. This method is useful for memory caches to avoid the need for serialization/deserialization. It also prevents
|
||||
// race conditions in the memory cache.
|
||||
Clone() Cacheable
|
||||
}
|
||||
|
||||
func ValidatePointer(dest any, caller string) error {
|
||||
func CheckCacheablePointer(dest any) error {
|
||||
rv := reflect.ValueOf(dest)
|
||||
if rv.Kind() != reflect.Pointer || rv.IsNil() {
|
||||
return WrapCacheableErrors(reflect.TypeOf(dest), caller)
|
||||
rt := reflect.TypeOf(dest)
|
||||
if rt == nil {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cacheable: (nil)")
|
||||
}
|
||||
|
||||
if rt.Kind() != reflect.Pointer {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cacheable: (non-pointer \"%s\")", rt.String())
|
||||
}
|
||||
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cacheable: (nil \"%s\")", rt.String())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -104,6 +104,8 @@ func detectPlatform() string {
|
||||
return "heroku"
|
||||
case os.Getenv("RENDER") != "" || os.Getenv("RENDER_SERVICE_ID") != "":
|
||||
return "render"
|
||||
case os.Getenv("COOLIFY_RESOURCE_UUID") != "":
|
||||
return "coolify"
|
||||
}
|
||||
|
||||
// Try to detect cloud provider through metadata endpoints
|
||||
@@ -151,5 +153,16 @@ func detectPlatform() string {
|
||||
}
|
||||
}
|
||||
|
||||
// Hetzner metadata
|
||||
if req, err := http.NewRequest(http.MethodGet, "http://169.254.169.254/hetzner/v1/metadata", nil); err == nil {
|
||||
if resp, err := client.Do(req); err == nil {
|
||||
resp.Body.Close()
|
||||
if resp.StatusCode == 200 {
|
||||
return "hetzner"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return "unknown"
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user