Compare commits

...

23 Commits

Author SHA1 Message Date
primus-bot[bot]
7c17ac42b1 chore(release): bump to v0.88.1 (#8436) 2025-07-04 16:54:57 +05:30
Aditya Singh
74ee7bb2c7 fix: fix setting nav items from multiple places (#8435)
Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>
2025-07-04 15:44:53 +05:30
Sahil Khan
2f5640b2e6 fix: used a new classname for banner container earlier one was in ad block list (#8433)
* fix: used a new classname for banner container earlier one was in ad block list

* chore: minor comment added

* chore: minor comment added
2025-07-04 08:20:32 +00:00
Aditya Singh
121debcecc Fix drag handle obstruction on dashboard and service details page (#8428)
* fix: fix overlapping pylon support btn on dashboard and services details page

* fix: minor refactor

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>
2025-07-04 12:12:30 +05:30
Sahil Khan
ff13504a74 fix: performance optimizations in log details view json field rendering (#8324)
* fix: log details filters use data types from log data response as primary data type

* chore: added test cases

* test: add comprehensive unit tests for chooseAutocompleteFromCustomValue function

* fix: added datatypes to util and test cases

* fix: added new tests

* fix: performance optimizations in log details view json field rendering

* fix: fixed import failing tests

* fix: added default html rendering field in body field in log details

* fix: fixed eslint errors

* chore: moved hook to a new file and renamed a state
2025-07-04 04:58:26 +00:00
dependabot[bot]
d4e373443b chore(deps): bump github.com/go-viper/mapstructure/v2 (#8379)
Bumps [github.com/go-viper/mapstructure/v2](https://github.com/go-viper/mapstructure) from 2.2.1 to 2.3.0.
- [Release notes](https://github.com/go-viper/mapstructure/releases)
- [Changelog](https://github.com/go-viper/mapstructure/blob/main/CHANGELOG.md)
- [Commits](https://github.com/go-viper/mapstructure/compare/v2.2.1...v2.3.0)

---
updated-dependencies:
- dependency-name: github.com/go-viper/mapstructure/v2
  dependency-version: 2.3.0
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-07-04 02:40:14 +00:00
Vibhu Pandey
3ccf822d67 fix(statsreporter): add unix timestamps for last observed time (#8426) 2025-07-03 14:25:12 +00:00
Abhi kumar
0e270e6f51 fix: update media styling for changelog renderer and adjust video class (#8425) 2025-07-03 18:35:11 +05:30
Vishal Sharma
749df2a979 fix: nil pointer exception when result[0].Table is nil (#8424)
* fix: nil pointer exception when result[0].Table is nil

* fix: decrease complexity

---------

Co-authored-by: grandwizard28 <vibhupandey28@gmail.com>
2025-07-03 12:13:51 +00:00
Sahil Khan
9ee5d5d599 fix: no logs in list view issue - added logs datasource for formatting and column options in the useoptionsmenu consumption (#8421)
* fix: added logs datasource for formatting and column options in the useoptionsmenu consumption

* fix: changed data source to logs in context log renderer from metrics
2025-07-03 14:32:19 +05:30
Amlan Kumar Nandy
4940dfd46f Revert "chore: fix regex issue (#8393)" (#8418)
This reverts commit 53c58b9983.
2025-07-03 06:04:51 +00:00
Aditya Singh
79a31cc205 Sentry issues (#8264)
* fix: fix breaking json parsing

* fix: sentry fix

* fix: fix sentry edge case

* test: update test for useUrlQueryData

* test: minor fix

---------

Co-authored-by: Aditya Singh <adityasingh@Adityas-MacBook-Pro.local>
2025-07-03 05:33:11 +00:00
Srikanth Chekuri
5102cf2b7b fix: remove deprecated telemetry::metrics::address from config (#8412) 2025-07-02 11:58:40 +00:00
Vishal Sharma
9ec5594648 fix: telemetry query events (#8388)
* fix: telemetry query events

* chore: reduced cyclomatic complexity

* chore: nit
2025-07-02 08:22:54 +00:00
Shaheer Kochai
b6c2ebd6d7 feat: trace to logs custom empty state UI (#8381)
* feat: display custom empty message if no logs on navigating from trace to logs

* chore: write tests for logs explorer normal and custom empty state

* feat: build the custom empty logs UI based on the updated designs

* feat: clear the filters and run stage query on clicking clear filters in logs custom empty state

* fix: update the failing test to match the logs custom empty state

* chore: remove the unnecessary onClick for documentation links

* refactor: overall improvements

* refactor: move the empty logs list config to util

* chore: update the documentation links + remove the explicit height from resources card

* refactor: reuse the EmptyLogsListConfig type in EmptyLogsSearch

* test: update LogsExplorerList tests to reflect changes in documentation links

---------

Co-authored-by: Vishal Sharma <makeavish786@gmail.com>
2025-07-02 07:30:17 +00:00
primus-bot[bot]
9a3a8c8305 chore(release): bump SigNoz to v0.88.0, OTel Collector to v0.128.0 (#8410)
* chore(release): bump to v0.88.0

* chore: bump to v0.111.28

* chore: bump to v0.111.28

---------

Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
Co-authored-by: grandwizard28 <vibhupandey28@gmail.com>
2025-07-02 12:19:54 +05:30
Nageshbansal
2ac45b0174 feat: adds support for Hetzner and coolify deployment platform in statsreporter (#8409) 2025-07-02 06:30:32 +00:00
Srikanth Chekuri
2a53918ebd chore: make queries compatible with 24.1 and fix string json query (#8391) 2025-07-02 05:09:16 +00:00
Shaheer Kochai
9daefeb881 fix: override the stagedQuery orderBy and send order by timestamp in traces view of traces explorer (#8390)
* fix: override the stagedQuery orderBy and send order by timestamp in traces view of traces explorer

* chore: write test for sending order by timestamp in the traces view of traces explorer

* refactor: refactor the query transformer to accept partial query object and override fields
2025-07-01 14:14:07 +00:00
Shaheer Kochai
526cf01cb7 fix: fix the issue of traces filters getting duplicated on switching between the span scopes (#8389)
* fix: fix the issue of changing span scope duplicating filters

* chore: write test for duplicate filters issue on changing the span scope
2025-07-01 07:53:33 +00:00
Amlan Kumar Nandy
cd4766ec2b fix: correct step numbering for non-metric based alerts (#8367) 2025-07-01 05:29:24 +00:00
Amlan Kumar Nandy
2196b58d36 fix: correct query data for cluster details metric view in infra monitoring (#8380) 2025-07-01 05:12:11 +00:00
Amlan Kumar Nandy
53c58b9983 chore: fix regex issue (#8393) 2025-07-01 11:32:07 +07:00
70 changed files with 2108 additions and 706 deletions

View File

@@ -40,7 +40,7 @@ services:
timeout: 5s
retries: 3
schema-migrator-sync:
image: signoz/signoz-schema-migrator:v0.111.42
image: signoz/signoz-schema-migrator:v0.128.0
container_name: schema-migrator-sync
command:
- sync
@@ -53,7 +53,7 @@ services:
condition: service_healthy
restart: on-failure
schema-migrator-async:
image: signoz/signoz-schema-migrator:v0.111.42
image: signoz/signoz-schema-migrator:v0.128.0
container_name: schema-migrator-async
command:
- async

View File

@@ -22,7 +22,7 @@ jobs:
- 24.1.2-alpine
- 24.12-alpine
schema-migrator-version:
- v0.111.38
- v0.128.0
postgres-version:
- 15
if: |

View File

@@ -174,7 +174,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.87.0
image: signoz/signoz:v0.88.1
command:
- --config=/root/config/prometheus.yml
ports:
@@ -206,7 +206,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.111.42
image: signoz/signoz-otel-collector:v0.128.0
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -230,7 +230,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.111.42
image: signoz/signoz-schema-migrator:v0.128.0
deploy:
restart_policy:
condition: on-failure

View File

@@ -100,7 +100,7 @@ services:
# - "9000:9000"
# - "8123:8123"
# - "9181:9181"
configs:
- source: clickhouse-config
target: /etc/clickhouse-server/config.xml
@@ -110,13 +110,12 @@ services:
target: /etc/clickhouse-server/custom-function.xml
- source: clickhouse-cluster
target: /etc/clickhouse-server/config.d/cluster.xml
volumes:
- clickhouse:/var/lib/clickhouse/
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.87.0
image: signoz/signoz:v0.88.1
command:
- --config=/root/config/prometheus.yml
ports:
@@ -148,7 +147,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.111.42
image: signoz/signoz-otel-collector:v0.128.0
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -174,7 +173,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.111.42
image: signoz/signoz-schema-migrator:v0.128.0
deploy:
restart_policy:
condition: on-failure
@@ -195,7 +194,6 @@ volumes:
name: signoz-sqlite
zookeeper-1:
name: signoz-zookeeper-1
configs:
clickhouse-config:
file: ../common/clickhouse/config.xml
@@ -205,7 +203,6 @@ configs:
file: ../common/clickhouse/custom-function.xml
clickhouse-cluster:
file: ../common/clickhouse/cluster.xml
signoz-prometheus-config:
file: ../common/signoz/prometheus.yml
# If you have multiple dashboard files, you can list them individually:

View File

@@ -74,13 +74,10 @@ exporters:
dsn: tcp://clickhouse:9000/signoz_logs
timeout: 10s
use_new_schema: true
# debug: {}
service:
telemetry:
logs:
encoding: json
metrics:
address: 0.0.0.0:8888
extensions:
- health_check
- pprof

View File

@@ -177,7 +177,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.87.0}
image: signoz/signoz:${VERSION:-v0.88.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -210,7 +210,7 @@ services:
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.0}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -236,7 +236,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
container_name: schema-migrator-sync
command:
- sync
@@ -247,7 +247,7 @@ services:
condition: service_healthy
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
container_name: schema-migrator-async
command:
- async

View File

@@ -110,7 +110,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.87.0}
image: signoz/signoz:${VERSION:-v0.88.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -142,7 +142,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.111.42}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.0}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -164,7 +164,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
container_name: schema-migrator-sync
command:
- sync
@@ -176,7 +176,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.111.42}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0}
container_name: schema-migrator-async
command:
- async

View File

@@ -74,13 +74,10 @@ exporters:
dsn: tcp://clickhouse:9000/signoz_logs
timeout: 10s
use_new_schema: true
# debug: {}
service:
telemetry:
logs:
encoding: json
metrics:
address: 0.0.0.0:8888
extensions:
- health_check
- pprof

View File

@@ -14,8 +14,8 @@
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
"remove_label_success": "Labels cleared",
"alert_form_step1": "Step 1 - Define the metric",
"alert_form_step2": "Step 2 - Define Alert Conditions",
"alert_form_step3": "Step 3 - Alert Configuration",
"alert_form_step2": "Step {{step}} - Define Alert Conditions",
"alert_form_step3": "Step {{step}} - Alert Configuration",
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
"confirm_save_title": "Save Changes",
"confirm_save_content_part1": "Your alert built with",

View File

@@ -7,8 +7,8 @@
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
"remove_label_success": "Labels cleared",
"alert_form_step1": "Step 1 - Define the metric",
"alert_form_step2": "Step 2 - Define Alert Conditions",
"alert_form_step3": "Step 3 - Alert Configuration",
"alert_form_step2": "Step {{step}} - Define Alert Conditions",
"alert_form_step3": "Step {{step}} - Alert Configuration",
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
"confirm_save_title": "Save Changes",
"confirm_save_content_part1": "Your alert built with",

View File

@@ -7,8 +7,8 @@
"remove_label_confirm": "This action will remove all the labels. Do you want to proceed?",
"remove_label_success": "Labels cleared",
"alert_form_step1": "Step 1 - Define the metric",
"alert_form_step2": "Step 2 - Define Alert Conditions",
"alert_form_step3": "Step 3 - Alert Configuration",
"alert_form_step2": "Step {{step}} - Define Alert Conditions",
"alert_form_step3": "Step {{step}} - Alert Configuration",
"metric_query_max_limit": "Can not create query. You can create maximum of 5 queries",
"confirm_save_title": "Save Changes",
"confirm_save_content_part1": "Your alert built with",

View File

@@ -191,7 +191,8 @@ function App(): JSX.Element {
// if the user is on basic plan then remove billing
if (isOnBasicPlan) {
updatedRoutes = updatedRoutes.filter(
(route) => route?.path !== ROUTES.BILLING,
(route) =>
route?.path !== ROUTES.BILLING && route?.path !== ROUTES.INTEGRATIONS,
);
}
@@ -204,7 +205,8 @@ function App(): JSX.Element {
} else {
// if not a cloud user then remove billing and add list licenses route
updatedRoutes = updatedRoutes.filter(
(route) => route?.path !== ROUTES.BILLING,
(route) =>
route?.path !== ROUTES.BILLING && route?.path !== ROUTES.INTEGRATIONS,
);
updatedRoutes = [...updatedRoutes, LIST_LICENSES];
}

View File

@@ -101,13 +101,18 @@
line-height: 28px;
}
.changelog-media-image {
.changelog-media-image,
.changelog-media-video {
height: auto;
width: 100%;
overflow: hidden;
border-radius: 4px;
border: 1px solid var(--bg-slate-400, #1d212d);
}
.changelog-media-video {
margin: 12px 0;
}
}
.lightMode {

View File

@@ -32,7 +32,7 @@ function renderMedia(media: Media): JSX.Element | null {
controls
controlsList="nodownload noplaybackrate"
loop
className="my-3 h-auto w-full rounded"
className="changelog-media-video"
>
<source src={media.url} type={media.mime} />
<track kind="captions" src="" label="No captions available" default />
@@ -56,7 +56,7 @@ function ChangelogRenderer({ changelog }: Props): JSX.Element {
</div>
<span className="changelog-release-date">{formattedReleaseDate}</span>
{changelog.features && changelog.features.length > 0 && (
<div className="changelog-renderer-list flex flex-col gap-7">
<div className="changelog-renderer-list">
{changelog.features.map((feature) => (
<div key={feature.id}>
<h2>{feature.title}</h2>

View File

@@ -71,7 +71,7 @@ function LogDetail({
const [contextQuery, setContextQuery] = useState<Query | undefined>();
const [filters, setFilters] = useState<TagFilter | null>(null);
const [isEdit, setIsEdit] = useState<boolean>(false);
const { initialDataSource, stagedQuery } = useQueryBuilder();
const { stagedQuery } = useQueryBuilder();
const listQuery = useMemo(() => {
if (!stagedQuery || stagedQuery.builder.queryData.length < 1) return null;
@@ -81,7 +81,7 @@ function LogDetail({
const { options } = useOptionsMenu({
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
dataSource: initialDataSource || DataSource.LOGS,
dataSource: DataSource.LOGS,
aggregateOperator: listQuery?.aggregateOperator || StringOperators.NOOP,
});

View File

@@ -1,4 +1,7 @@
.app-banner-container {
// Earlier we were having app-banner-container class
// we change it to app-banner-wrapper as the adblocker was blocking the app-banner-container class
// Keep an eye on What classnames are used in the codebase
.app-banner-wrapper {
position: relative;
width: 100%;
}

View File

@@ -613,7 +613,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element {
</Helmet>
{isLoggedIn && (
<div className={cx('app-banner-container')}>
<div className={cx('app-banner-wrapper')}>
{SHOW_TRIAL_EXPIRY_BANNER && (
<div className="trial-expiry-banner">
You are in free trial period. Your free trial will end on{' '}

View File

@@ -1,30 +1,173 @@
.empty-logs-search-container {
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
height: 240px;
.empty-logs-search-container-content {
.empty-logs-search {
&__container {
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
height: 240px;
}
&__content {
display: flex;
flex-direction: column;
gap: 4px;
color: var(--text-vanilla-400);
font-family: Inter;
font-size: 14px;
font-style: normal;
font-weight: 400;
line-height: 18px; /* 128.571% */
line-height: 18px;
letter-spacing: -0.07px;
align-items: flex-start;
.empty-state-svg {
height: 50px;
width: 50px;
}
}
&__sub-text {
font-weight: 600;
}
.sub-text {
font-weight: 600;
&__container {
&--custom-message {
height: 445px;
.empty-state-svg {
height: 32px;
width: 32px;
}
.empty-logs-search {
&__header {
display: flex;
align-items: center;
gap: 4px;
}
&__title {
color: var(--bg-vanilla-100);
font-size: 14px;
font-weight: 500;
line-height: 20px;
letter-spacing: -0.07px;
}
&__subtitle {
color: var(--bg-vanilla-400);
font-size: 14px;
font-weight: 400;
line-height: 20px;
letter-spacing: -0.07px;
}
&__description {
font-size: 14px;
color: var(--text-vanilla-400);
line-height: 20px;
}
&__description-list {
margin: 0;
margin-top: 8px;
color: var(--bg-vanilla-400);
font-size: 14px;
font-weight: 400;
line-height: 20px;
letter-spacing: -0.07px;
display: flex;
flex-direction: column;
gap: 6px;
list-style: none;
padding: 0;
font-family: Inter;
}
&__description-list li {
position: relative;
padding-left: 20px;
}
&__description-list li::before {
content: '';
font-family: Inter;
position: absolute;
left: 0;
color: var(--bg-robin-400);
font-weight: bold;
font-size: 16px;
line-height: 20px;
}
&__clear-filters-btn {
display: flex;
width: 468px;
font-family: Inter;
padding: 12px;
justify-content: space-between;
align-items: flex-start;
border-radius: 3px;
border: 1px dashed var(--bg-slate-500);
background: transparent;
color: var(--bg-vanilla-400);
font-size: 14px;
font-weight: 400;
line-height: 18px;
letter-spacing: -0.07px;
cursor: pointer;
margin-top: 12px;
}
&__clear-filters-btn-icon {
display: flex;
align-items: center;
gap: 6px;
}
&__row {
display: flex;
flex-direction: row;
align-items: flex-end;
max-width: 825px;
gap: 25px;
justify-content: center;
margin-left: 21px;
}
&__content {
display: flex;
flex-direction: column;
gap: 4px;
min-width: 260px;
}
&__resources-card {
background: var(--bg-ink-400);
border: 1px solid var(--bg-slate-500);
border-radius: 4px;
width: 332px;
}
&__resources-title {
color: var(--bg-vanilla-400);
font-family: Inter;
font-size: 11px;
font-weight: 600;
line-height: 18px;
letter-spacing: 0.88px;
text-transform: uppercase;
padding: 16px 16px 12px;
border-bottom: 1px solid var(--bg-slate-500);
height: 46px;
}
&__resources-links {
padding: 16px;
display: flex;
flex-direction: column;
gap: 16px;
.learn-more {
height: 18px;
}
}
}
}
}
}

View File

@@ -2,16 +2,24 @@ import './EmptyLogsSearch.styles.scss';
import { Typography } from 'antd';
import logEvent from 'api/common/logEvent';
import cx from 'classnames';
import LearnMore from 'components/LearnMore/LearnMore';
import { EmptyLogsListConfig } from 'container/LogsExplorerList/utils';
import { Delete } from 'lucide-react';
import { useEffect, useRef } from 'react';
import { DataSource, PanelTypeKeys } from 'types/common/queryBuilder';
interface EmptyLogsSearchProps {
dataSource: DataSource;
panelType: PanelTypeKeys;
customMessage?: EmptyLogsListConfig;
}
export default function EmptyLogsSearch({
dataSource,
panelType,
}: {
dataSource: DataSource;
panelType: PanelTypeKeys;
}): JSX.Element {
customMessage,
}: EmptyLogsSearchProps): JSX.Element {
const logEventCalledRef = useRef(false);
useEffect(() => {
if (!logEventCalledRef.current) {
@@ -30,18 +38,80 @@ export default function EmptyLogsSearch({
}, []);
return (
<div className="empty-logs-search-container">
<div className="empty-logs-search-container-content">
<img
src="/Icons/emptyState.svg"
alt="thinking-emoji"
className="empty-state-svg"
/>
<Typography.Text>
<span className="sub-text">This query had no results. </span>
Edit your query and try again!
</Typography.Text>
<div
className={cx('empty-logs-search__container', {
'empty-logs-search__container--custom-message': !!customMessage,
})}
>
<div className="empty-logs-search__row">
<div className="empty-logs-search__content">
<img
src="/Icons/emptyState.svg"
alt="thinking-emoji"
className="empty-state-svg"
/>
{customMessage ? (
<>
<div className="empty-logs-search__header">
<Typography.Text className="empty-logs-search__title">
{customMessage.title}
</Typography.Text>
{customMessage.subTitle && (
<Typography.Text className="empty-logs-search__subtitle">
{customMessage.subTitle}
</Typography.Text>
)}
</div>
{Array.isArray(customMessage.description) ? (
<ul className="empty-logs-search__description-list">
{customMessage.description.map((desc) => (
<li key={desc}>{desc}</li>
))}
</ul>
) : (
<Typography.Text className="empty-logs-search__description">
{customMessage.description}
</Typography.Text>
)}
{/* Clear filters button */}
{customMessage.showClearFiltersButton && (
<button
type="button"
className="empty-logs-search__clear-filters-btn"
onClick={customMessage.onClearFilters}
>
{customMessage.clearFiltersButtonText}
<span className="empty-logs-search__clear-filters-btn-icon">
<Delete size={14} />
Clear filters
</span>
</button>
)}
</>
) : (
<Typography.Text>
<span className="empty-logs-search__sub-text">
This query had no results.{' '}
</span>
Edit your query and try again!
</Typography.Text>
)}
</div>
{customMessage?.documentationLinks && (
<div className="empty-logs-search__resources-card">
<div className="empty-logs-search__resources-title">RESOURCES</div>
<div className="empty-logs-search__resources-links">
{customMessage.documentationLinks.map((link) => (
<LearnMore key={link.text} text={link.text} url={link.url} />
))}
</div>
</div>
)}
</div>
</div>
);
}
EmptyLogsSearch.defaultProps = {
customMessage: null,
};

View File

@@ -212,9 +212,12 @@ function QuerySection({
return null;
}
};
const step2Label = alertDef.alertType === 'METRIC_BASED_ALERT' ? '2' : '1';
return (
<>
<StepHeading> {t('alert_form_step2')}</StepHeading>
<StepHeading> {t('alert_form_step2', { step: step2Label })}</StepHeading>
<FormContainer>
<div>{renderTabs(alertType)}</div>
{renderQuerySection(currentTab)}

View File

@@ -371,9 +371,11 @@ function RuleOptions({
selectedCategory?.name,
);
const step3Label = alertDef.alertType === 'METRIC_BASED_ALERT' ? '3' : '2';
return (
<>
<StepHeading>{t('alert_form_step3')}</StepHeading>
<StepHeading>{t('alert_form_step3', { step: step3Label })}</StepHeading>
<FormContainer>
{queryCategory === EQueryType.PROM && renderPromRuleOptions()}
{queryCategory !== EQueryType.PROM &&

View File

@@ -2,6 +2,7 @@
overflow: auto;
margin: 8px -8px;
margin-right: 0;
margin-bottom: 64px;
.react-grid-layout {
border: none !important;

View File

@@ -731,7 +731,7 @@ export const getClusterMetricsQueryPayload = (
},
{
selectedTime: 'GLOBAL_TIME',
graphType: PANEL_TYPES.TIME_SERIES,
graphType: PANEL_TYPES.TABLE,
query: {
builder: {
queryData: [
@@ -751,7 +751,7 @@ export const getClusterMetricsQueryPayload = (
filters: {
items: [
{
id: 'd7779183',
id: 'a7da59c7',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -786,12 +786,12 @@ export const getClusterMetricsQueryPayload = (
},
],
having: [],
legend: `{{${k8sDeploymentNameKey}}} ({{${k8sNamespaceNameKey}})`,
legend: 'available',
limit: null,
orderBy: [],
queryName: 'A',
reduceTo: 'avg',
spaceAggregation: 'max',
reduceTo: 'last',
spaceAggregation: 'sum',
stepInterval: 60,
timeAggregation: 'latest',
},
@@ -804,14 +804,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sDeploymentDesiredKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
aggregateOperator: 'avg',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'B',
filters: {
items: [
{
id: 'd7779183',
id: '55110885',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -846,14 +846,14 @@ export const getClusterMetricsQueryPayload = (
},
],
having: [],
legend: `{{${k8sDeploymentNameKey}}} ({{${k8sNamespaceNameKey}})`,
legend: 'desired',
limit: null,
orderBy: [],
queryName: 'B',
reduceTo: 'avg',
spaceAggregation: 'max',
reduceTo: 'last',
spaceAggregation: 'sum',
stepInterval: 60,
timeAggregation: 'latest',
timeAggregation: 'avg',
},
],
queryFormulas: [],
@@ -890,13 +890,13 @@ export const getClusterMetricsQueryPayload = (
queryType: EQueryType.QUERY_BUILDER,
},
variables: {},
formatForWeb: false,
formatForWeb: true,
start,
end,
},
{
selectedTime: 'GLOBAL_TIME',
graphType: PANEL_TYPES.TIME_SERIES,
graphType: PANEL_TYPES.TABLE,
query: {
builder: {
queryData: [
@@ -909,14 +909,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sStatefulsetCurrentPodsKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
aggregateOperator: 'max',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'A',
filters: {
items: [
{
id: 'd7779183',
id: '3c57b4d1',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -951,14 +951,14 @@ export const getClusterMetricsQueryPayload = (
},
],
having: [],
legend: `{{${k8sStatefulsetNameKey}}} ({{${k8sNamespaceNameKey}})`,
legend: 'current',
limit: null,
orderBy: [],
queryName: 'A',
reduceTo: 'avg',
spaceAggregation: 'max',
reduceTo: 'last',
spaceAggregation: 'sum',
stepInterval: 60,
timeAggregation: 'latest',
timeAggregation: 'max',
},
{
aggregateAttribute: {
@@ -969,14 +969,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sStatefulsetDesiredPodsKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
aggregateOperator: 'max',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'B',
filters: {
items: [
{
id: 'd7779183',
id: '0f49fe64',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -1011,14 +1011,14 @@ export const getClusterMetricsQueryPayload = (
},
],
having: [],
legend: `{{${k8sStatefulsetNameKey}}} ({{${k8sNamespaceNameKey}})`,
legend: 'desired',
limit: null,
orderBy: [],
queryName: 'B',
reduceTo: 'avg',
spaceAggregation: 'max',
reduceTo: 'last',
spaceAggregation: 'sum',
stepInterval: 60,
timeAggregation: 'latest',
timeAggregation: 'max',
},
{
aggregateAttribute: {
@@ -1029,14 +1029,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sStatefulsetReadyPodsKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
aggregateOperator: 'max',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'C',
filters: {
items: [
{
id: 'd7779183',
id: '0bebf625',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -1071,14 +1071,14 @@ export const getClusterMetricsQueryPayload = (
},
],
having: [],
legend: `{{${k8sStatefulsetNameKey}}} ({{${k8sNamespaceNameKey}})`,
legend: 'ready',
limit: null,
orderBy: [],
queryName: 'C',
reduceTo: 'avg',
spaceAggregation: 'max',
reduceTo: 'last',
spaceAggregation: 'sum',
stepInterval: 60,
timeAggregation: 'latest',
timeAggregation: 'max',
},
{
aggregateAttribute: {
@@ -1089,14 +1089,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sStatefulsetUpdatedPodsKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
aggregateOperator: 'max',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'D',
filters: {
items: [
{
id: 'd7779183',
id: '1ddacbbe',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -1131,14 +1131,14 @@ export const getClusterMetricsQueryPayload = (
},
],
having: [],
legend: `{{${k8sStatefulsetNameKey}}} ({{${k8sNamespaceNameKey}})`,
legend: 'updated',
limit: null,
orderBy: [],
queryName: 'D',
reduceTo: 'avg',
spaceAggregation: 'max',
reduceTo: 'last',
spaceAggregation: 'sum',
stepInterval: 60,
timeAggregation: 'latest',
timeAggregation: 'max',
},
],
queryFormulas: [],
@@ -1199,13 +1199,13 @@ export const getClusterMetricsQueryPayload = (
queryType: EQueryType.QUERY_BUILDER,
},
variables: {},
formatForWeb: false,
formatForWeb: true,
start,
end,
},
{
selectedTime: 'GLOBAL_TIME',
graphType: PANEL_TYPES.TIME_SERIES,
graphType: PANEL_TYPES.TABLE,
query: {
builder: {
queryData: [
@@ -1218,14 +1218,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sDaemonsetCurrentScheduledNodesKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
aggregateOperator: 'avg',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'A',
filters: {
items: [
{
id: 'd7779183',
id: 'e0bea554',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -1250,24 +1250,16 @@ export const getClusterMetricsQueryPayload = (
key: k8sDaemonsetNameKey,
type: 'tag',
},
{
dataType: DataTypes.String,
id: 'k8s_namespace_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sNamespaceNameKey,
type: 'tag',
},
],
having: [],
legend: `{{${k8sDaemonsetNameKey}} ({{${k8sNamespaceNameKey}})`,
legend: 'current_nodes',
limit: null,
orderBy: [],
queryName: 'A',
reduceTo: 'avg',
spaceAggregation: 'max',
reduceTo: 'last',
spaceAggregation: 'avg',
stepInterval: 60,
timeAggregation: 'latest',
timeAggregation: 'avg',
},
{
aggregateAttribute: {
@@ -1278,14 +1270,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sDaemonsetDesiredScheduledNodesKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
aggregateOperator: 'avg',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'B',
filters: {
items: [
{
id: 'd7779183',
id: '741052f7',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -1310,24 +1302,16 @@ export const getClusterMetricsQueryPayload = (
key: k8sDaemonsetNameKey,
type: 'tag',
},
{
dataType: DataTypes.String,
id: 'k8s_namespace_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sNamespaceNameKey,
type: 'tag',
},
],
having: [],
legend: `{{${k8sDaemonsetNameKey}} ({{${k8sNamespaceNameKey}})`,
legend: 'desired_nodes',
limit: null,
orderBy: [],
queryName: 'B',
reduceTo: 'avg',
spaceAggregation: 'max',
reduceTo: 'last',
spaceAggregation: 'avg',
stepInterval: 60,
timeAggregation: 'latest',
timeAggregation: 'avg',
},
{
aggregateAttribute: {
@@ -1338,14 +1322,14 @@ export const getClusterMetricsQueryPayload = (
key: k8sDaemonsetReadyNodesKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
aggregateOperator: 'avg',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'C',
filters: {
items: [
{
id: 'd7779183',
id: 'f23759f2',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
@@ -1370,24 +1354,16 @@ export const getClusterMetricsQueryPayload = (
key: k8sDaemonsetNameKey,
type: 'tag',
},
{
dataType: DataTypes.String,
id: 'k8s_namespace_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sNamespaceNameKey,
type: 'tag',
},
],
having: [],
legend: `{{${k8sDaemonsetNameKey}} ({{${k8sNamespaceNameKey}})`,
legend: 'ready_nodes',
limit: null,
orderBy: [],
queryName: 'C',
reduceTo: 'avg',
spaceAggregation: 'max',
reduceTo: 'last',
spaceAggregation: 'avg',
stepInterval: 60,
timeAggregation: 'latest',
timeAggregation: 'avg',
},
],
queryFormulas: [],
@@ -1436,316 +1412,7 @@ export const getClusterMetricsQueryPayload = (
queryType: EQueryType.QUERY_BUILDER,
},
variables: {},
formatForWeb: false,
start,
end,
},
{
selectedTime: 'GLOBAL_TIME',
graphType: PANEL_TYPES.TIME_SERIES,
query: {
builder: {
queryData: [
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'k8s_job_active_pods--float64--Gauge--true',
isColumn: true,
isJSON: false,
key: k8sJobActivePodsKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'A',
filters: {
items: [
{
id: 'd7779183',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sClusterNameKey,
type: 'tag',
},
op: '=',
value: cluster.meta.k8s_cluster_name,
},
],
op: 'AND',
},
functions: [],
groupBy: [
{
dataType: DataTypes.String,
id: 'k8s_job_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sJobNameKey,
type: 'tag',
},
{
dataType: DataTypes.String,
id: 'k8s_namespace_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sNamespaceNameKey,
type: 'tag',
},
],
having: [],
legend: `{{${k8sJobNameKey}}} ({{${k8sNamespaceNameKey}})`,
limit: null,
orderBy: [],
queryName: 'A',
reduceTo: 'avg',
spaceAggregation: 'max',
stepInterval: 60,
timeAggregation: 'latest',
},
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'k8s_job_successful_pods--float64--Gauge--true',
isColumn: true,
isJSON: false,
key: k8sJobSuccessfulPodsKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'B',
filters: {
items: [
{
id: 'd7779183',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sClusterNameKey,
type: 'tag',
},
op: '=',
value: cluster.meta.k8s_cluster_name,
},
],
op: 'AND',
},
functions: [],
groupBy: [
{
dataType: DataTypes.String,
id: 'k8s_job_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sJobNameKey,
type: 'tag',
},
{
dataType: DataTypes.String,
id: 'k8s_namespace_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sNamespaceNameKey,
type: 'tag',
},
],
having: [],
legend: `{{${k8sJobNameKey}}} ({{${k8sNamespaceNameKey}})`,
limit: null,
orderBy: [],
queryName: 'B',
reduceTo: 'avg',
spaceAggregation: 'max',
stepInterval: 60,
timeAggregation: 'latest',
},
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'k8s_job_failed_pods--float64--Gauge--true',
isColumn: true,
isJSON: false,
key: k8sJobFailedPodsKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'C',
filters: {
items: [
{
id: 'd7779183',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sClusterNameKey,
type: 'tag',
},
op: '=',
value: cluster.meta.k8s_cluster_name,
},
],
op: 'AND',
},
functions: [],
groupBy: [
{
dataType: DataTypes.String,
id: 'k8s_job_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sJobNameKey,
type: 'tag',
},
{
dataType: DataTypes.String,
id: 'k8s_namespace_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sNamespaceNameKey,
type: 'tag',
},
],
having: [],
legend: `{{${k8sJobNameKey}}} ({{${k8sNamespaceNameKey}})`,
limit: null,
orderBy: [],
queryName: 'C',
reduceTo: 'avg',
spaceAggregation: 'max',
stepInterval: 60,
timeAggregation: 'latest',
},
{
aggregateAttribute: {
dataType: DataTypes.Float64,
id: 'k8s_job_desired_successful_pods--float64--Gauge--true',
isColumn: true,
isJSON: false,
key: k8sJobDesiredSuccessfulPodsKey,
type: 'Gauge',
},
aggregateOperator: 'latest',
dataSource: DataSource.METRICS,
disabled: false,
expression: 'D',
filters: {
items: [
{
id: 'd7779183',
key: {
dataType: DataTypes.String,
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sClusterNameKey,
type: 'tag',
},
op: '=',
value: cluster.meta.k8s_cluster_name,
},
],
op: 'AND',
},
functions: [],
groupBy: [
{
dataType: DataTypes.String,
id: 'k8s_job_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sJobNameKey,
type: 'tag',
},
{
dataType: DataTypes.String,
id: 'k8s_namespace_name--string--tag--false',
isColumn: false,
isJSON: false,
key: k8sNamespaceNameKey,
type: 'tag',
},
],
having: [],
legend: `{{${k8sJobNameKey}}} ({{${k8sNamespaceNameKey}})`,
limit: null,
orderBy: [],
queryName: 'D',
reduceTo: 'avg',
spaceAggregation: 'max',
stepInterval: 60,
timeAggregation: 'latest',
},
],
queryFormulas: [],
},
clickhouse_sql: [
{
disabled: false,
legend: '',
name: 'A',
query: '',
},
{
disabled: false,
legend: '',
name: 'B',
query: '',
},
{
disabled: false,
legend: '',
name: 'C',
query: '',
},
{
disabled: false,
legend: '',
name: 'D',
query: '',
},
],
id: v4(),
promql: [
{
disabled: false,
legend: '',
name: 'A',
query: '',
},
{
disabled: false,
legend: '',
name: 'B',
query: '',
},
{
disabled: false,
legend: '',
name: 'C',
query: '',
},
{
disabled: false,
legend: '',
name: 'D',
query: '',
},
],
queryType: EQueryType.QUERY_BUILDER,
},
variables: {},
formatForWeb: false,
formatForWeb: true,
start,
end,
},
@@ -1777,7 +1444,7 @@ export const getClusterMetricsQueryPayload = (
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: 'k8s_cluster_name',
key: k8sClusterNameKey,
type: 'tag',
},
op: '=',
@@ -1837,7 +1504,7 @@ export const getClusterMetricsQueryPayload = (
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: 'k8s_cluster_name',
key: k8sClusterNameKey,
type: 'tag',
},
op: '=',
@@ -1897,7 +1564,7 @@ export const getClusterMetricsQueryPayload = (
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: 'k8s_cluster_name',
key: k8sClusterNameKey,
type: 'tag',
},
op: '=',
@@ -1957,7 +1624,7 @@ export const getClusterMetricsQueryPayload = (
id: 'k8s_cluster_name--string--tag--false',
isColumn: false,
isJSON: false,
key: 'k8s_cluster_name',
key: k8sClusterNameKey,
type: 'tag',
},
op: '=',
@@ -2005,6 +1672,24 @@ export const getClusterMetricsQueryPayload = (
name: 'A',
query: '',
},
{
disabled: false,
legend: '',
name: 'B',
query: '',
},
{
disabled: false,
legend: '',
name: 'C',
query: '',
},
{
disabled: false,
legend: '',
name: 'D',
query: '',
},
],
id: v4(),
promql: [
@@ -2014,6 +1699,24 @@ export const getClusterMetricsQueryPayload = (
name: 'A',
query: '',
},
{
disabled: false,
legend: '',
name: 'B',
query: '',
},
{
disabled: false,
legend: '',
name: 'C',
query: '',
},
{
disabled: false,
legend: '',
name: 'D',
query: '',
},
],
queryType: EQueryType.QUERY_BUILDER,
},

View File

@@ -32,7 +32,7 @@ function ContextLogRenderer({
const [afterLogPage, setAfterLogPage] = useState<number>(1);
const [logs, setLogs] = useState<ILog[]>([log]);
const { initialDataSource, stagedQuery } = useQueryBuilder();
const { stagedQuery } = useQueryBuilder();
const listQuery = useMemo(() => {
if (!stagedQuery || stagedQuery.builder.queryData.length < 1) return null;
@@ -42,7 +42,7 @@ function ContextLogRenderer({
const { options } = useOptionsMenu({
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
dataSource: initialDataSource || DataSource.METRICS,
dataSource: DataSource.LOGS,
aggregateOperator: listQuery?.aggregateOperator || StringOperators.NOOP,
});

View File

@@ -32,7 +32,7 @@ import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { ActionItemProps } from './ActionItem';
import FieldRenderer from './FieldRenderer';
import { TableViewActions } from './TableView/TableViewActions';
import TableViewActions from './TableView/TableViewActions';
import {
filterKeyForField,
findKeyPath,

View File

@@ -1,3 +1,4 @@
/* eslint-disable sonarjs/no-duplicate-string */
import './TableViewActions.styles.scss';
import { Color } from '@signozhq/design-tokens';
@@ -11,10 +12,9 @@ import { OPERATORS } from 'constants/queryBuilder';
import ROUTES from 'constants/routes';
import { RESTRICTED_SELECTED_FIELDS } from 'container/LogsFilters/config';
import dompurify from 'dompurify';
import { isEmpty } from 'lodash-es';
import { ArrowDownToDot, ArrowUpFromDot, Ellipsis } from 'lucide-react';
import { useTimezone } from 'providers/Timezone';
import React, { useMemo, useState } from 'react';
import React, { useCallback, useMemo, useState } from 'react';
import { useLocation } from 'react-router-dom';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { FORBID_DOM_PURIFY_TAGS } from 'utils/app';
@@ -24,12 +24,11 @@ import {
escapeHtml,
filterKeyForField,
getFieldAttributes,
jsonToDataNodes,
parseFieldValue,
recursiveParseJSON,
removeEscapeCharacters,
unescapeString,
} from '../utils';
import useAsyncJSONProcessing from './useAsyncJSONProcessing';
interface ITableViewActionsProps {
fieldData: Record<string, string>;
@@ -52,7 +51,64 @@ interface ITableViewActionsProps {
const convert = new Convert();
export function TableViewActions(
// Memoized Tree Component
const MemoizedTree = React.memo<{ treeData: any[] }>(({ treeData }) => (
<Tree defaultExpandAll showLine treeData={treeData} />
));
MemoizedTree.displayName = 'MemoizedTree';
// Body Content Component
const BodyContent: React.FC<{
fieldData: Record<string, string>;
record: DataType;
bodyHtml: { __html: string };
}> = React.memo(({ fieldData, record, bodyHtml }) => {
const { isLoading, treeData, error } = useAsyncJSONProcessing(
fieldData.value,
record.field === 'body',
);
// Show JSON tree if available, otherwise show HTML content
if (record.field === 'body' && treeData) {
return <MemoizedTree treeData={treeData} />;
}
if (record.field === 'body' && isLoading) {
return (
<div style={{ display: 'flex', alignItems: 'center', gap: '8px' }}>
<Spin size="small" />
<span style={{ color: Color.BG_SIENNA_400 }}>Processing JSON...</span>
</div>
);
}
if (record.field === 'body' && error) {
return (
<span
style={{ color: Color.BG_SIENNA_400, whiteSpace: 'pre-wrap', tabSize: 4 }}
>
Error parsing Body JSON
</span>
);
}
if (record.field === 'body') {
return (
<span
style={{ color: Color.BG_SIENNA_400, whiteSpace: 'pre-wrap', tabSize: 4 }}
>
<span dangerouslySetInnerHTML={bodyHtml} />
</span>
);
}
return null;
});
BodyContent.displayName = 'BodyContent';
export default function TableViewActions(
props: ITableViewActionsProps,
): React.ReactElement {
const {
@@ -78,44 +134,42 @@ export function TableViewActions(
const { formatTimezoneAdjustedTimestamp } = useTimezone();
if (record.field === 'body') {
const parsedBody = recursiveParseJSON(fieldData.value);
if (!isEmpty(parsedBody)) {
return (
<Tree defaultExpandAll showLine treeData={jsonToDataNodes(parsedBody)} />
);
}
}
const bodyHtml =
record.field === 'body'
? {
__html: convert.toHtml(
dompurify.sanitize(unescapeString(escapeHtml(record.value)), {
FORBID_TAGS: [...FORBID_DOM_PURIFY_TAGS],
}),
),
}
: { __html: '' };
// Memoize bodyHtml computation
const bodyHtml = useMemo(() => {
if (record.field !== 'body') return { __html: '' };
return {
__html: convert.toHtml(
dompurify.sanitize(unescapeString(escapeHtml(record.value)), {
FORBID_TAGS: [...FORBID_DOM_PURIFY_TAGS],
}),
),
};
}, [record.field, record.value]);
const fieldFilterKey = filterKeyForField(fieldData.field);
let textToCopy = fieldData.value;
// remove starting and ending quotes from the value
try {
textToCopy = textToCopy.replace(/^"|"$/g, '');
} catch (error) {
console.error(
'Failed to remove starting and ending quotes from the value',
error,
);
}
// Memoize textToCopy computation
const textToCopy = useMemo(() => {
let text = fieldData.value;
try {
text = text.replace(/^"|"$/g, '');
} catch (error) {
console.error(
'Failed to remove starting and ending quotes from the value',
error,
);
}
return text;
}, [fieldData.value]);
let cleanTimestamp: string;
if (record.field === 'timestamp') {
cleanTimestamp = fieldData.value.replace(/^["']|["']$/g, '');
}
// Memoize cleanTimestamp computation
const cleanTimestamp = useMemo(() => {
if (record.field !== 'timestamp') return '';
return fieldData.value.replace(/^["']|["']$/g, '');
}, [record.field, fieldData.value]);
const renderFieldContent = (): JSX.Element => {
const renderFieldContent = useCallback((): JSX.Element => {
const commonStyles: React.CSSProperties = {
color: Color.BG_SIENNA_400,
whiteSpace: 'pre-wrap',
@@ -124,7 +178,9 @@ export function TableViewActions(
switch (record.field) {
case 'body':
return <span style={commonStyles} dangerouslySetInnerHTML={bodyHtml} />;
return (
<BodyContent fieldData={fieldData} record={record} bodyHtml={bodyHtml} />
);
case 'timestamp':
return (
@@ -141,7 +197,93 @@ export function TableViewActions(
<span style={commonStyles}>{removeEscapeCharacters(fieldData.value)}</span>
);
}
};
}, [
record,
fieldData,
bodyHtml,
formatTimezoneAdjustedTimestamp,
cleanTimestamp,
]);
// Early return for body field with async processing
if (record.field === 'body') {
return (
<div className={cx('value-field', isOpen ? 'open-popover' : '')}>
<CopyClipboardHOC entityKey={fieldFilterKey} textToCopy={textToCopy}>
<BodyContent fieldData={fieldData} record={record} bodyHtml={bodyHtml} />
</CopyClipboardHOC>
{!isListViewPanel && !RESTRICTED_SELECTED_FIELDS.includes(fieldFilterKey) && (
<span className="action-btn">
<Tooltip title="Filter for value">
<Button
className="filter-btn periscope-btn"
icon={
isfilterInLoading ? (
<Spin size="small" />
) : (
<ArrowDownToDot size={14} style={{ transform: 'rotate(90deg)' }} />
)
}
onClick={onClickHandler(
OPERATORS['='],
fieldFilterKey,
parseFieldValue(fieldData.value),
dataType,
)}
/>
</Tooltip>
<Tooltip title="Filter out value">
<Button
className="filter-btn periscope-btn"
icon={
isfilterOutLoading ? (
<Spin size="small" />
) : (
<ArrowUpFromDot size={14} style={{ transform: 'rotate(90deg)' }} />
)
}
onClick={onClickHandler(
OPERATORS['!='],
fieldFilterKey,
parseFieldValue(fieldData.value),
dataType,
)}
/>
</Tooltip>
{!isOldLogsExplorerOrLiveLogsPage && (
<Popover
open={isOpen}
onOpenChange={setIsOpen}
arrow={false}
content={
<div>
<Button
className="group-by-clause"
type="text"
icon={<GroupByIcon />}
onClick={(): Promise<void> | void =>
onGroupByAttribute?.(fieldFilterKey)
}
>
Group By Attribute
</Button>
</div>
}
rootClassName="table-view-actions-content"
trigger="hover"
placement="bottomLeft"
>
<Button
icon={<Ellipsis size={14} />}
className="filter-btn periscope-btn"
/>
</Popover>
)}
</span>
)}
</div>
);
}
return (
<div className={cx('value-field', isOpen ? 'open-popover' : '')}>

View File

@@ -1,7 +1,7 @@
import { render, screen } from '@testing-library/react';
import { RESTRICTED_SELECTED_FIELDS } from 'container/LogsFilters/config';
import { TableViewActions } from '../TableViewActions';
import TableViewActions from '../TableViewActions';
// Mock the components and hooks
jest.mock('components/Logs/CopyClipboardHOC', () => ({

View File

@@ -0,0 +1,102 @@
/* eslint-disable sonarjs/no-duplicate-string */
import { isEmpty } from 'lodash-es';
import { useEffect, useRef, useState } from 'react';
import { jsonToDataNodes, recursiveParseJSON } from '../utils';
// Hook for async JSON processing
const useAsyncJSONProcessing = (
value: string,
shouldProcess: boolean,
): {
isLoading: boolean;
treeData: any[] | null;
error: string | null;
} => {
const [jsonState, setJsonState] = useState<{
isLoading: boolean;
treeData: any[] | null;
error: string | null;
}>({
isLoading: false,
treeData: null,
error: null,
});
const processingRef = useRef<boolean>(false);
// eslint-disable-next-line sonarjs/cognitive-complexity
useEffect((): (() => void) => {
if (!shouldProcess || processingRef.current) {
return (): void => {};
}
processingRef.current = true;
setJsonState({ isLoading: true, treeData: null, error: null });
// Option 1: Using setTimeout for non-blocking processing
const processAsync = (): void => {
setTimeout(() => {
try {
const parsedBody = recursiveParseJSON(value);
if (!isEmpty(parsedBody)) {
const treeData = jsonToDataNodes(parsedBody);
setJsonState({ isLoading: false, treeData, error: null });
} else {
setJsonState({ isLoading: false, treeData: null, error: null });
}
} catch (error) {
setJsonState({
isLoading: false,
treeData: null,
error: error instanceof Error ? error.message : 'Parsing failed',
});
} finally {
processingRef.current = false;
}
}, 0);
};
// Option 2: Using requestIdleCallback for better performance
const processWithIdleCallback = (): void => {
if ('requestIdleCallback' in window) {
requestIdleCallback(
// eslint-disable-next-line sonarjs/no-identical-functions
(): void => {
try {
const parsedBody = recursiveParseJSON(value);
if (!isEmpty(parsedBody)) {
const treeData = jsonToDataNodes(parsedBody);
setJsonState({ isLoading: false, treeData, error: null });
} else {
setJsonState({ isLoading: false, treeData: null, error: null });
}
} catch (error) {
setJsonState({
isLoading: false,
treeData: null,
error: error instanceof Error ? error.message : 'Parsing failed',
});
} finally {
processingRef.current = false;
}
},
{ timeout: 1000 },
);
} else {
processAsync();
}
};
processWithIdleCallback();
// Cleanup function
return (): void => {
processingRef.current = false;
};
}, [value, shouldProcess]);
return jsonState;
};
export default useAsyncJSONProcessing;

View File

@@ -0,0 +1,221 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import ROUTES from 'constants/routes';
import LogsExplorerViews from 'container/LogsExplorerViews';
import { mockQueryBuilderContextValue } from 'container/LogsExplorerViews/tests/mock';
import { useGetExplorerQueryRange } from 'hooks/queryBuilder/useGetExplorerQueryRange';
import { logsQueryRangeEmptyResponse } from 'mocks-server/__mockdata__/logs_query_range';
import { server } from 'mocks-server/server';
import { rest } from 'msw';
import { SELECTED_VIEWS } from 'pages/LogsExplorer/utils';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
import { QueryBuilderContext } from 'providers/QueryBuilder';
import { render, screen } from 'tests/test-utils';
const queryRangeURL = 'http://localhost/api/v3/query_range';
const logsQueryServerRequest = ({
response = logsQueryRangeEmptyResponse,
}: {
response?: any;
}): void =>
server.use(
rest.post(queryRangeURL, (req, res, ctx) =>
res(ctx.status(200), ctx.json(response)),
),
);
jest.mock('react-router-dom', () => ({
...jest.requireActual('react-router-dom'),
useLocation: (): { pathname: string } => ({
pathname: `${ROUTES.LOGS_EXPLORER}`,
}),
}));
jest.mock('providers/preferences/sync/usePreferenceSync', () => ({
usePreferenceSync: (): any => ({
preferences: {
columns: [],
formatting: {
maxLines: 2,
format: 'table',
fontSize: 'small',
version: 1,
},
},
loading: false,
error: null,
updateColumns: jest.fn(),
updateFormatting: jest.fn(),
}),
}));
jest.mock(
'container/TimeSeriesView/TimeSeriesView',
() =>
// eslint-disable-next-line func-names, @typescript-eslint/explicit-function-return-type, react/display-name
function () {
return <div>Time Series Chart</div>;
},
);
jest.mock(
'container/LogsExplorerChart',
() =>
// eslint-disable-next-line func-names, @typescript-eslint/explicit-function-return-type, react/display-name
function () {
return <div>Histogram Chart</div>;
},
);
jest.mock('hooks/useSafeNavigate', () => ({
useSafeNavigate: (): any => ({
safeNavigate: jest.fn(),
}),
}));
jest.mock('uplot', () => {
const paths = {
spline: jest.fn(),
bars: jest.fn(),
};
const uplotMock = jest.fn(() => ({
paths,
}));
return {
paths,
default: uplotMock,
};
});
jest.mock('hooks/queryBuilder/useGetExplorerQueryRange', () => ({
__esModule: true,
useGetExplorerQueryRange: jest.fn(),
}));
describe('LogsExplorerList - empty states', () => {
beforeEach(() => {
(useGetExplorerQueryRange as jest.Mock).mockReturnValue({
data: { payload: logsQueryRangeEmptyResponse },
});
logsQueryServerRequest({});
});
it('should display custom empty state when navigating from trace to logs with no results', async () => {
const mockTraceToLogsContextValue = {
...mockQueryBuilderContextValue,
panelType: PANEL_TYPES.LIST,
stagedQuery: {
...mockQueryBuilderContextValue.stagedQuery,
builder: {
...mockQueryBuilderContextValue.stagedQuery.builder,
queryData: [
{
...mockQueryBuilderContextValue.stagedQuery.builder.queryData[0],
filters: {
items: [
{
id: 'trace-filter',
key: {
key: 'trace_id',
type: '',
dataType: 'string',
isColumn: true,
},
op: '=',
value: 'test-trace-id',
},
],
op: 'AND',
},
},
],
},
},
};
render(
<QueryBuilderContext.Provider value={mockTraceToLogsContextValue as any}>
<PreferenceContextProvider>
<LogsExplorerViews
selectedView={SELECTED_VIEWS.SEARCH}
showFrequencyChart
setIsLoadingQueries={(): void => {}}
listQueryKeyRef={{ current: {} }}
chartQueryKeyRef={{ current: {} }}
/>
</PreferenceContextProvider>
</QueryBuilderContext.Provider>,
);
// Check for custom empty state message
expect(screen.getByText('No logs found for this trace.')).toBeInTheDocument();
expect(screen.getByText('This could be because :')).toBeInTheDocument();
expect(
screen.getByText('Logs are not linked to Traces.'),
).toBeInTheDocument();
expect(
screen.getByText('Logs are not being sent to SigNoz.'),
).toBeInTheDocument();
expect(
screen.getByText('No logs are associated with this particular trace/span.'),
).toBeInTheDocument();
// Check for documentation links
expect(screen.getByText('Sending logs to SigNoz')).toBeInTheDocument();
expect(screen.getByText('Correlate traces and logs')).toBeInTheDocument();
});
it('should display empty state when filters are applied and no results are found', async () => {
const mockTraceToLogsContextValue = {
...mockQueryBuilderContextValue,
panelType: PANEL_TYPES.LIST,
stagedQuery: {
...mockQueryBuilderContextValue.stagedQuery,
builder: {
...mockQueryBuilderContextValue.stagedQuery.builder,
queryData: [
{
...mockQueryBuilderContextValue.stagedQuery.builder.queryData[0],
filters: {
items: [
{
id: 'service-filter',
key: {
key: 'service.name',
type: '',
dataType: 'string',
isColumn: true,
},
op: '=',
value: 'test-service-name',
},
],
op: 'AND',
},
},
],
},
},
};
render(
<QueryBuilderContext.Provider value={mockTraceToLogsContextValue as any}>
<PreferenceContextProvider>
<LogsExplorerViews
selectedView={SELECTED_VIEWS.SEARCH}
showFrequencyChart
setIsLoadingQueries={(): void => {}}
listQueryKeyRef={{ current: {} }}
chartQueryKeyRef={{ current: {} }}
/>
</PreferenceContextProvider>
</QueryBuilderContext.Provider>,
);
// Check for custom empty state message
expect(screen.getByText(/This query had no results./i)).toBeInTheDocument();
expect(
screen.getByText(/Edit your query and try again!/i),
).toBeInTheDocument();
});
});

View File

@@ -29,7 +29,11 @@ import NoLogs from '../NoLogs/NoLogs';
import InfinityTableView from './InfinityTableView';
import { LogsExplorerListProps } from './LogsExplorerList.interfaces';
import { InfinityWrapperStyled } from './styles';
import { convertKeysToColumnFields } from './utils';
import {
convertKeysToColumnFields,
getEmptyLogsListConfig,
isTraceToLogsQuery,
} from './utils';
function Footer(): JSX.Element {
return <Spinner height={20} tip="Getting Logs" />;
@@ -44,7 +48,6 @@ function LogsExplorerList({
isFilterApplied,
}: LogsExplorerListProps): JSX.Element {
const ref = useRef<VirtuosoHandle>(null);
const { initialDataSource } = useQueryBuilder();
const { activeLogId } = useCopyLogLink();
@@ -58,11 +61,17 @@ function LogsExplorerList({
const { options } = useOptionsMenu({
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
dataSource: initialDataSource || DataSource.METRICS,
dataSource: DataSource.LOGS,
aggregateOperator:
currentStagedQueryData?.aggregateOperator || StringOperators.NOOP,
});
const {
currentQuery,
lastUsedQuery,
redirectWithQueryBuilderData,
} = useQueryBuilder();
const activeLogIndex = useMemo(
() => logs.findIndex(({ id }) => id === activeLogId),
[logs, activeLogId],
@@ -186,6 +195,44 @@ function LogsExplorerList({
selectedFields,
]);
const isTraceToLogsNavigation = useMemo(() => {
if (!currentStagedQueryData) return false;
return isTraceToLogsQuery(currentStagedQueryData);
}, [currentStagedQueryData]);
const handleClearFilters = useCallback((): void => {
const queryIndex = lastUsedQuery ?? 0;
const updatedQuery = currentQuery?.builder.queryData?.[queryIndex];
if (!updatedQuery) return;
if (updatedQuery?.filters?.items) {
updatedQuery.filters.items = [];
}
const preparedQuery = {
...currentQuery,
builder: {
...currentQuery.builder,
queryData: currentQuery.builder.queryData.map((item, idx: number) => ({
...item,
filters: {
...item.filters,
items: idx === queryIndex ? [] : [...(item.filters?.items || [])],
},
})),
},
};
redirectWithQueryBuilderData(preparedQuery);
}, [currentQuery, lastUsedQuery, redirectWithQueryBuilderData]);
const getEmptyStateMessage = useMemo(() => {
if (!isTraceToLogsNavigation) return;
return getEmptyLogsListConfig(handleClearFilters);
}, [isTraceToLogsNavigation, handleClearFilters]);
return (
<div className="logs-list-view-container">
{(isLoading || (isFetching && logs.length === 0)) && <LogsLoading />}
@@ -201,7 +248,11 @@ function LogsExplorerList({
logs.length === 0 &&
!isError &&
isFilterApplied && (
<EmptyLogsSearch dataSource={DataSource.LOGS} panelType="LIST" />
<EmptyLogsSearch
dataSource={DataSource.LOGS}
panelType="LIST"
customMessage={getEmptyStateMessage}
/>
)}
{isError && !isLoading && !isFetching && <LogsError />}

View File

@@ -1,5 +1,9 @@
import { IField } from 'types/api/logs/fields';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
import {
IBuilderQuery,
TagFilterItem,
} from 'types/api/queryBuilder/queryBuilderData';
export const convertKeysToColumnFields = (
keys: BaseAutocompleteData[],
@@ -9,3 +13,56 @@ export const convertKeysToColumnFields = (
name: item.key,
type: item.type as string,
}));
/**
* Determines if a query represents a trace-to-logs navigation
* by checking for the presence of a trace_id filter.
*/
export const isTraceToLogsQuery = (queryData: IBuilderQuery): boolean => {
// Check if this is a trace-to-logs query by looking for trace_id filter
if (!queryData?.filters?.items) return false;
const traceIdFilter = queryData.filters.items.find(
(item: TagFilterItem) => item.key?.key === 'trace_id',
);
return !!traceIdFilter;
};
export type EmptyLogsListConfig = {
title: string;
subTitle: string;
description: string | string[];
documentationLinks?: Array<{
text: string;
url: string;
}>;
showClearFiltersButton?: boolean;
onClearFilters?: () => void;
clearFiltersButtonText?: string;
};
export const getEmptyLogsListConfig = (
handleClearFilters: () => void,
): EmptyLogsListConfig => ({
title: 'No logs found for this trace.',
subTitle: 'This could be because :',
description: [
'Logs are not linked to Traces.',
'Logs are not being sent to SigNoz.',
'No logs are associated with this particular trace/span.',
],
documentationLinks: [
{
text: 'Sending logs to SigNoz',
url: 'https://signoz.io/docs/logs-management/send-logs-to-signoz/',
},
{
text: 'Correlate traces and logs',
url:
'https://signoz.io/docs/traces-management/guides/correlate-traces-and-logs/',
},
],
clearFiltersButtonText: 'Clear filters from Trace to view other logs',
showClearFiltersButton: true,
onClearFilters: handleClearFilters,
});

View File

@@ -153,13 +153,13 @@ function LogsExplorerViews({
const isMultipleQueries = useMemo(
() =>
currentQuery.builder.queryData.length > 1 ||
currentQuery.builder.queryFormulas.length > 0,
currentQuery?.builder?.queryData?.length > 1 ||
currentQuery?.builder?.queryFormulas?.length > 0,
[currentQuery],
);
const isGroupByExist = useMemo(() => {
const groupByCount: number = currentQuery.builder.queryData.reduce<number>(
const groupByCount: number = currentQuery?.builder?.queryData?.reduce<number>(
(acc, query) => acc + query.groupBy.length,
0,
);
@@ -551,19 +551,19 @@ function LogsExplorerViews({
if (!stagedQuery) return [];
if (panelType === PANEL_TYPES.LIST) {
if (listChartData && listChartData.payload.data.result.length > 0) {
if (listChartData && listChartData.payload.data?.result.length > 0) {
return listChartData.payload.data.result;
}
return [];
}
if (!data || data.payload.data.result.length === 0) return [];
if (!data || data.payload.data?.result.length === 0) return [];
const isGroupByExist = stagedQuery.builder.queryData.some(
(queryData) => queryData.groupBy.length > 0,
);
const firstPayloadQuery = data.payload.data.result.find(
const firstPayloadQuery = data.payload.data?.result.find(
(item) => item.queryName === listQuery?.queryName,
);

View File

@@ -98,6 +98,7 @@ interface QueryBuilderSearchV2Props {
hideSpanScopeSelector?: boolean;
// Determines whether to call onChange when a tag is closed
triggerOnChangeOnClose?: boolean;
skipQueryBuilderRedirect?: boolean;
}
export interface Option {
@@ -137,6 +138,7 @@ function QueryBuilderSearchV2(
operatorConfigKey,
hideSpanScopeSelector,
triggerOnChangeOnClose,
skipQueryBuilderRedirect,
} = props;
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
@@ -1038,7 +1040,11 @@ function QueryBuilderSearchV2(
})}
</Select>
{!hideSpanScopeSelector && (
<SpanScopeSelector query={query} onChange={onChange} />
<SpanScopeSelector
query={query}
onChange={onChange}
skipQueryBuilderRedirect={skipQueryBuilderRedirect}
/>
)}
</div>
);
@@ -1056,6 +1062,7 @@ QueryBuilderSearchV2.defaultProps = {
operatorConfigKey: undefined,
hideSpanScopeSelector: true,
triggerOnChangeOnClose: false,
skipQueryBuilderRedirect: false,
};
export default QueryBuilderSearchV2;

View File

@@ -23,6 +23,7 @@ interface SpanFilterConfig {
interface SpanScopeSelectorProps {
onChange?: (value: TagFilter) => void;
query?: IBuilderQuery;
skipQueryBuilderRedirect?: boolean;
}
const SPAN_FILTER_CONFIG: Record<SpanScope, SpanFilterConfig | null> = {
@@ -58,6 +59,7 @@ const SELECT_OPTIONS = [
function SpanScopeSelector({
onChange,
query,
skipQueryBuilderRedirect,
}: SpanScopeSelectorProps): JSX.Element {
const { currentQuery, redirectWithQueryBuilderData } = useQueryBuilder();
const [selectedScope, setSelectedScope] = useState<SpanScope>(
@@ -79,6 +81,7 @@ function SpanScopeSelector({
if (hasFilter('isEntryPoint')) return SpanScope.ENTRYPOINT_SPANS;
return SpanScope.ALL_SPANS;
};
useEffect(() => {
let queryData = (currentQuery?.builder?.queryData || [])?.find(
(item) => item.queryName === query?.queryName,
@@ -127,13 +130,10 @@ function SpanScopeSelector({
},
}));
if (onChange && query) {
if (skipQueryBuilderRedirect && onChange && query) {
onChange({
...query.filters,
items: getUpdatedFilters(
[...query.filters.items, ...newQuery.builder.queryData[0].filters.items],
true,
),
items: getUpdatedFilters([...query.filters.items], true),
});
setSelectedScope(newScope);
@@ -156,6 +156,7 @@ function SpanScopeSelector({
SpanScopeSelector.defaultProps = {
onChange: undefined,
query: undefined,
skipQueryBuilderRedirect: false,
};
export default SpanScopeSelector;

View File

@@ -3,9 +3,11 @@ import {
render,
RenderResult,
screen,
within,
} from '@testing-library/react';
import { initialQueriesMap } from 'constants/queryBuilder';
import { QueryBuilderContext } from 'providers/QueryBuilder';
import { QueryClient, QueryClientProvider } from 'react-query';
import {
IBuilderQuery,
Query,
@@ -13,6 +15,7 @@ import {
TagFilterItem,
} from 'types/api/queryBuilder/queryBuilderData';
import QueryBuilderSearchV2 from '../QueryBuilderSearchV2';
import SpanScopeSelector from '../SpanScopeSelector';
const mockRedirectWithQueryBuilderData = jest.fn();
@@ -48,6 +51,14 @@ const defaultQuery = {
},
};
const queryClient = new QueryClient({
defaultOptions: {
queries: {
refetchOnWindowFocus: false,
},
},
});
const defaultQueryBuilderQuery: IBuilderQuery = {
...initialQueriesMap.traces.builder.queryData[0],
queryName: 'A',
@@ -76,6 +87,7 @@ const renderWithContext = (
initialQuery = defaultQuery,
onChangeProp?: (value: TagFilter) => void,
queryProp?: IBuilderQuery,
skipQueryBuilderRedirect = false,
): RenderResult =>
render(
<QueryBuilderContext.Provider
@@ -87,12 +99,19 @@ const renderWithContext = (
} as any
}
>
<SpanScopeSelector onChange={onChangeProp} query={queryProp} />
<SpanScopeSelector
onChange={onChangeProp}
query={queryProp}
skipQueryBuilderRedirect={skipQueryBuilderRedirect}
/>
</QueryBuilderContext.Provider>,
);
const selectOption = async (optionText: string): Promise<void> => {
const selector = screen.getByRole('combobox');
const selector = within(screen.getByTestId('span-scope-selector')).getByRole(
'combobox',
);
fireEvent.mouseDown(selector);
// Wait for dropdown to appear
@@ -264,6 +283,7 @@ describe('SpanScopeSelector', () => {
defaultQuery,
mockOnChange,
localQuery,
true,
);
expect(await screen.findByText('All Spans')).toBeInTheDocument();
@@ -283,6 +303,7 @@ describe('SpanScopeSelector', () => {
defaultQuery,
mockOnChange,
localQuery,
true,
);
expect(await screen.findByText('Root Spans')).toBeInTheDocument();
@@ -303,6 +324,7 @@ describe('SpanScopeSelector', () => {
defaultQuery,
mockOnChange,
localQuery,
true,
);
expect(await screen.findByText('Root Spans')).toBeInTheDocument();
@@ -324,6 +346,7 @@ describe('SpanScopeSelector', () => {
defaultQuery,
mockOnChange,
localQuery,
true,
);
expect(await screen.findByText('Root Spans')).toBeInTheDocument();
@@ -350,6 +373,7 @@ describe('SpanScopeSelector', () => {
defaultQuery,
mockOnChange,
localQuery,
true,
);
expect(await screen.findByText('Entrypoint Spans')).toBeInTheDocument();
@@ -361,5 +385,60 @@ describe('SpanScopeSelector', () => {
container.querySelector('span[title="All Spans"]'),
).toBeInTheDocument();
});
it('should not duplicate non-scope filters when changing span scope', async () => {
const query = {
...defaultQuery,
builder: {
...defaultQuery.builder,
queryData: [
{
...defaultQuery.builder.queryData[0],
filters: {
items: [createNonScopeFilter('service', 'checkout')],
op: 'AND',
},
},
],
},
};
render(
<QueryClientProvider client={queryClient}>
<QueryBuilderContext.Provider
value={
{
currentQuery: query,
redirectWithQueryBuilderData: mockRedirectWithQueryBuilderData,
} as any
}
>
<QueryBuilderSearchV2
query={query.builder.queryData[0] as any}
onChange={mockOnChange}
hideSpanScopeSelector={false}
/>
</QueryBuilderContext.Provider>
</QueryClientProvider>,
);
expect(await screen.findByText('All Spans')).toBeInTheDocument();
await selectOption('Entrypoint Spans');
expect(mockRedirectWithQueryBuilderData).toHaveBeenCalled();
const redirectQueryArg = mockRedirectWithQueryBuilderData.mock
.calls[0][0] as Query;
const { items } = redirectQueryArg.builder.queryData[0].filters;
// Count non-scope filters
const nonScopeFilters = items.filter(
(filter) => filter.key?.type !== 'spanSearchScope',
);
expect(nonScopeFilters).toHaveLength(1);
expect(nonScopeFilters).toContainEqual(
createNonScopeFilter('service', 'checkout'),
);
});
});
});

View File

@@ -195,11 +195,25 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
};
}, [checkScroll]);
const {
isCloudUser,
isEnterpriseSelfHostedUser,
isCommunityUser,
isCommunityEnterpriseUser,
} = useGetTenantLicense();
const [licenseTag, setLicenseTag] = useState('');
const isAdmin = user.role === USER_ROLES.ADMIN;
const isEditor = user.role === USER_ROLES.EDITOR;
useEffect(() => {
const navShortcuts = (userPreferences?.find(
(preference) => preference.name === USER_PREFERENCES.NAV_SHORTCUTS,
)?.value as unknown) as string[];
const shouldShowIntegrations =
(isCloudUser || isEnterpriseSelfHostedUser) && (isAdmin || isEditor);
if (navShortcuts && isArray(navShortcuts) && navShortcuts.length > 0) {
// nav shortcuts is array of strings
const pinnedItems = navShortcuts
@@ -211,11 +225,14 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
// Set pinned items in the order they were stored
setPinnedMenuItems(pinnedItems);
// Set secondary items with proper isPinned state
setSecondaryMenuItems(
defaultMoreMenuItems.map((item) => ({
...item,
isPinned: pinnedItems.some((pinned) => pinned.itemKey === item.itemKey),
isEnabled:
item.key === ROUTES.INTEGRATIONS
? shouldShowIntegrations
: item.isEnabled,
})),
);
} else {
@@ -225,17 +242,26 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
);
setPinnedMenuItems(defaultPinnedItems);
// Set secondary items with proper isPinned state
setSecondaryMenuItems(
defaultMoreMenuItems.map((item) => ({
...item,
isPinned: defaultPinnedItems.some(
(pinned) => pinned.itemKey === item.itemKey,
),
isEnabled:
item.key === ROUTES.INTEGRATIONS
? shouldShowIntegrations
: item.isEnabled,
})),
);
}
}, [userPreferences]);
}, [
userPreferences,
isCloudUser,
isEnterpriseSelfHostedUser,
isAdmin,
isEditor,
]);
const isOnboardingV3Enabled = featureFlags?.find(
(flag) => flag.name === FeatureKeys.ONBOARDING_V3,
@@ -249,10 +275,6 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
(flag) => flag.name === FeatureKeys.PREMIUM_SUPPORT,
)?.active;
const [licenseTag, setLicenseTag] = useState('');
const isAdmin = user.role === USER_ROLES.ADMIN;
const isEditor = user.role === USER_ROLES.EDITOR;
const userSettingsMenuItem = {
key: ROUTES.SETTINGS,
label: 'Settings',
@@ -375,13 +397,6 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
const {
isCloudUser,
isEnterpriseSelfHostedUser,
isCommunityUser,
isCommunityEnterpriseUser,
} = useGetTenantLicense();
const isWorkspaceBlocked = trialInfo?.workSpaceBlock || false;
const openInNewTab = (path: string): void => {
@@ -718,28 +733,6 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
}
};
useEffect(() => {
if ((isCloudUser || isEnterpriseSelfHostedUser) && (isAdmin || isEditor)) {
// enable integrations for cloud users
setSecondaryMenuItems((prevItems) =>
prevItems.map((item) => ({
...item,
isEnabled: item.key === ROUTES.INTEGRATIONS ? true : item.isEnabled,
})),
);
// enable integrations for pinned menu items
// eslint-disable-next-line sonarjs/no-identical-functions
setPinnedMenuItems((prevItems) =>
prevItems.map((item) => ({
...item,
isEnabled: item.key === ROUTES.INTEGRATIONS ? true : item.isEnabled,
})),
);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [isCloudUser, isEnterpriseSelfHostedUser]);
const onClickVersionHandler = useCallback((): void => {
if (isCloudUser) {
return;

View File

@@ -142,6 +142,7 @@ function Filters({
}}
onChange={handleFilterChange}
hideSpanScopeSelector={false}
skipQueryBuilderRedirect
/>
{filteredSpanIds.length > 0 && (
<div className="pre-next-toggle">

View File

@@ -17,6 +17,7 @@ import { AppState } from 'store/reducers';
import { DataSource } from 'types/common/queryBuilder';
import { GlobalReducer } from 'types/reducer/globalTime';
import DOCLINKS from 'utils/docLinks';
import { transformBuilderQueryFields } from 'utils/queryTransformers';
import TraceExplorerControls from '../Controls';
import { TracesLoading } from '../TraceLoading/TraceLoading';
@@ -39,9 +40,22 @@ function TracesView({ isFilterApplied }: TracesViewProps): JSX.Element {
QueryParams.pagination,
);
const transformedQuery = useMemo(
() =>
transformBuilderQueryFields(stagedQuery || initialQueriesMap.traces, {
orderBy: [
{
columnName: 'timestamp',
order: 'desc',
},
],
}),
[stagedQuery],
);
const { data, isLoading, isFetching, isError } = useGetQueryRange(
{
query: stagedQuery || initialQueriesMap.traces,
query: transformedQuery,
graphType: panelType || PANEL_TYPES.TRACE,
selectedTime: 'GLOBAL_TIME',
globalSelectedInterval: globalSelectedTime,

View File

@@ -0,0 +1,306 @@
/* eslint-disable sonarjs/no-duplicate-string */
/* eslint-disable @typescript-eslint/explicit-function-return-type */
import { act, renderHook } from '@testing-library/react';
import { createMemoryHistory } from 'history';
import { Router } from 'react-router-dom';
import useUrlQueryData from '../useUrlQueryData';
// Mock the useSafeNavigate hook
const mockSafeNavigate = jest.fn();
jest.mock('hooks/useSafeNavigate', () => ({
useSafeNavigate: () => ({
safeNavigate: mockSafeNavigate,
}),
}));
describe('useUrlQueryData', () => {
beforeEach(() => {
jest.clearAllMocks();
});
const renderHookWithRouter = (
queryKey: string,
defaultData?: any,
initialEntries: string[] = ['/test'],
) => {
const history = createMemoryHistory({ initialEntries });
// Mock window.location.search to match the current route
Object.defineProperty(window, 'location', {
value: {
search: history.location.search,
pathname: history.location.pathname,
origin: 'http://localhost',
},
writable: true,
});
return renderHook(() => useUrlQueryData(queryKey, defaultData), {
wrapper: ({ children }) => <Router history={history}>{children}</Router>,
});
};
describe('query parsing', () => {
test('should parse valid JSON query parameter', () => {
const testData = { name: 'test', value: 123 };
const { result } = renderHookWithRouter('testKey', {}, [
`/test?testKey=${encodeURIComponent(JSON.stringify(testData))}`,
]);
expect(result.current.query).toBe(JSON.stringify(testData));
expect(result.current.queryData).toEqual(testData);
});
test('should return default data when query parameter is not present', () => {
const defaultData = { default: 'value' };
const { result } = renderHookWithRouter('testKey', defaultData);
expect(result.current.query).toBeNull();
expect(result.current.queryData).toEqual(defaultData);
});
test('should return default data when query parameter is empty', () => {
const defaultData = { default: 'value' };
const { result } = renderHookWithRouter('testKey', defaultData, [
'/test?testKey=',
]);
expect(result.current.query).toBe('');
expect(result.current.queryData).toEqual(defaultData);
});
test('should handle invalid JSON and return default data', () => {
const defaultData = { default: 'value' };
const consoleSpy = jest.spyOn(console, 'warn').mockImplementation();
const { result } = renderHookWithRouter('testKey', defaultData, [
'/test?testKey=invalid-json',
]);
expect(result.current.query).toBe('invalid-json');
expect(result.current.queryData).toEqual(defaultData);
expect(consoleSpy).toHaveBeenCalledWith(
'Failed to parse query as JSON:',
'invalid-json',
expect.any(Error),
);
consoleSpy.mockRestore();
});
test('should handle malformed JSON and return default data', () => {
const defaultData = { default: 'value' };
const consoleSpy = jest.spyOn(console, 'warn').mockImplementation();
const { result } = renderHookWithRouter(
'testKey',
defaultData,
['/test?testKey={"name":"test",}'], // Missing closing brace
);
expect(result.current.query).toBe('{"name":"test",}');
expect(result.current.queryData).toEqual(defaultData);
expect(consoleSpy).toHaveBeenCalled();
consoleSpy.mockRestore();
});
test('should handle complex nested objects', () => {
const complexData = {
users: [
{ id: 1, name: 'John', settings: { theme: 'dark', notifications: true } },
{
id: 2,
name: 'Jane',
settings: { theme: 'light', notifications: false },
},
],
metadata: {
total: 2,
page: 1,
},
};
const { result } = renderHookWithRouter('complexKey', {}, [
`/test?complexKey=${encodeURIComponent(JSON.stringify(complexData))}`,
]);
expect(result.current.query).toBe(JSON.stringify(complexData));
expect(result.current.queryData).toEqual(complexData);
});
test('should handle primitive values', () => {
const stringData = 'simple string';
const { result } = renderHookWithRouter('stringKey', '', [
`/test?stringKey=${encodeURIComponent(JSON.stringify(stringData))}`,
]);
expect(result.current.query).toBe(JSON.stringify(stringData));
expect(result.current.queryData).toBe(stringData);
});
});
describe('redirectWithQuery', () => {
test('should navigate with new query data', () => {
const { result } = renderHookWithRouter('testKey', {});
const newData = { name: 'new', value: 456 };
act(() => {
result.current.redirectWithQuery(newData);
});
expect(mockSafeNavigate).toHaveBeenCalledWith(
expect.stringContaining('testKey='),
);
const calledUrl = mockSafeNavigate.mock.calls[0][0];
const urlParams = new URLSearchParams(calledUrl.split('?')[1]);
expect(urlParams.get('testKey')).toBe(JSON.stringify(newData));
});
test('should preserve existing query parameters when adding new one', () => {
const { result } = renderHookWithRouter('newKey', {}, [
'/test?existingKey=existingValue',
]);
const newData = { name: 'new' };
act(() => {
result.current.redirectWithQuery(newData);
});
const calledUrl = mockSafeNavigate.mock.calls[0][0];
const urlParams = new URLSearchParams(calledUrl.split('?')[1]);
expect(urlParams.get('existingKey')).toBe('existingValue');
expect(urlParams.get('newKey')).toBe(JSON.stringify(newData));
});
test('should update existing query parameter', () => {
const initialData = { name: 'old' };
const { result } = renderHookWithRouter('testKey', {}, [
`/test?testKey=${encodeURIComponent(JSON.stringify(initialData))}`,
]);
const newData = { name: 'new', value: 789 };
act(() => {
result.current.redirectWithQuery(newData);
});
const calledUrl = mockSafeNavigate.mock.calls[0][0];
const urlParams = new URLSearchParams(calledUrl.split('?')[1]);
expect(urlParams.get('testKey')).toBe(JSON.stringify(newData));
});
test('should handle complex data in redirectWithQuery', () => {
const { result } = renderHookWithRouter('complexKey', {});
const complexData = {
filters: {
status: ['active', 'pending'],
dateRange: { start: '2023-01-01', end: '2023-12-31' },
},
sort: { field: 'created_at', direction: 'desc' },
};
act(() => {
result.current.redirectWithQuery(complexData);
});
const calledUrl = mockSafeNavigate.mock.calls[0][0];
const urlParams = new URLSearchParams(calledUrl.split('?')[1]);
expect(urlParams.get('complexKey')).toBe(JSON.stringify(complexData));
});
test('should handle primitive values in redirectWithQuery', () => {
const { result } = renderHookWithRouter('primitiveKey', '');
act(() => {
result.current.redirectWithQuery('simple string');
});
const calledUrl = mockSafeNavigate.mock.calls[0][0];
const urlParams = new URLSearchParams(calledUrl.split('?')[1]);
expect(urlParams.get('primitiveKey')).toBe(JSON.stringify('simple string'));
});
test('should handle null and undefined values', () => {
const { result } = renderHookWithRouter('nullKey', {});
act(() => {
result.current.redirectWithQuery(null);
});
const calledUrl = mockSafeNavigate.mock.calls[0][0];
const urlParams = new URLSearchParams(calledUrl.split('?')[1]);
expect(urlParams.get('nullKey')).toBe('null');
act(() => {
result.current.redirectWithQuery(undefined);
});
const secondCalledUrl = mockSafeNavigate.mock.calls[1][0];
const secondUrlParams = new URLSearchParams(secondCalledUrl.split('?')[1]);
expect(secondUrlParams.get('nullKey')).toBe('undefined');
});
});
describe('hook interface', () => {
test('should return correct interface structure', () => {
const { result } = renderHookWithRouter('testKey', {});
expect(result.current).toHaveProperty('query');
expect(result.current).toHaveProperty('queryData');
expect(result.current).toHaveProperty('redirectWithQuery');
expect(typeof result.current.redirectWithQuery).toBe('function');
});
test('should handle different query keys', () => {
const { result: result1 } = renderHookWithRouter('key1', {});
const { result: result2 } = renderHookWithRouter('key2', {});
expect(result1.current.query).toBeNull();
expect(result2.current.query).toBeNull();
const testData = { test: 'data' };
act(() => {
result1.current.redirectWithQuery(testData);
});
const calledUrl = mockSafeNavigate.mock.calls[0][0];
const urlParams = new URLSearchParams(calledUrl.split('?')[1]);
expect(urlParams.get('key1')).toBe(JSON.stringify(testData));
expect(urlParams.get('key2')).toBeNull();
});
});
describe('URL encoding/decoding', () => {
test('should handle URL encoded query parameters', () => {
const testData = { name: 'test with spaces', value: 'special&chars' };
const encodedData = encodeURIComponent(JSON.stringify(testData));
const { result } = renderHookWithRouter('testKey', {}, [
`/test?testKey=${encodedData}`,
]);
expect(result.current.queryData).toEqual(testData);
});
test('should properly encode data in redirectWithQuery', () => {
const { result } = renderHookWithRouter('testKey', {});
const testData = { name: 'test with spaces', value: 'special&chars' };
act(() => {
result.current.redirectWithQuery(testData);
});
const calledUrl = mockSafeNavigate.mock.calls[0][0];
const urlParams = new URLSearchParams(calledUrl.split('?')[1]);
const decodedValue = JSON.parse(
decodeURIComponent(urlParams.get('testKey') || ''),
);
expect(decodedValue).toEqual(testData);
});
});
});

View File

@@ -14,10 +14,17 @@ const useUrlQueryData = <T>(
const query = useMemo(() => urlQuery.get(queryKey), [urlQuery, queryKey]);
const queryData: T = useMemo(() => (query ? JSON.parse(query) : defaultData), [
query,
defaultData,
]);
const queryData: T = useMemo(() => {
if (query) {
try {
return JSON.parse(query);
} catch (e) {
console.warn('Failed to parse query as JSON:', query, e);
return defaultData;
}
}
return defaultData;
}, [query, defaultData]);
const redirectWithQuery = useCallback(
(newQueryData: T): void => {

View File

@@ -11,22 +11,23 @@ export const mapQueryDataToApi = <Data extends MapData, Key extends keyof Data>(
): MapQueryDataToApiResult<Record<string, Data>> => {
const newLegendMap: Record<string, string> = {};
const preparedResult = data.reduce<Record<string, Data>>((acc, query) => {
const newResult: Record<string, Data> = {
...acc,
[query[nameField] as string]: {
...query,
...tableParams?.pagination,
...(tableParams?.selectColumns
? { selectColumns: tableParams?.selectColumns }
: null),
},
};
const preparedResult =
data?.reduce<Record<string, Data>>((acc, query) => {
const newResult: Record<string, Data> = {
...acc,
[query[nameField] as string]: {
...query,
...tableParams?.pagination,
...(tableParams?.selectColumns
? { selectColumns: tableParams?.selectColumns }
: null),
},
};
newLegendMap[query[nameField] as string] = query.legend;
newLegendMap[query[nameField] as string] = query.legend;
return newResult;
}, {} as Record<string, Data>);
return newResult;
}, {} as Record<string, Data>) || {};
return {
data: preparedResult,

View File

@@ -46,6 +46,14 @@ export const logsQueryRangeSuccessResponse = {
],
},
};
export const logsQueryRangeEmptyResponse = {
resultType: '',
result: [
{
queryName: 'A',
},
],
};
export const logsPaginationQueryRangeSuccessResponse = ({
offset = 0,

View File

@@ -0,0 +1,3 @@
.service-route-tab {
margin-bottom: 64px;
}

View File

@@ -1,3 +1,5 @@
import './MetricsApplication.styles.scss';
import RouteTab from 'components/RouteTab';
import ROUTES from 'constants/routes';
import DBCall from 'container/MetricsApplication/Tabs/DBCall';
@@ -62,7 +64,12 @@ function MetricsApplication(): JSX.Element {
<div className="metrics-application-container">
<ResourceAttributesFilter />
<ApDexApplication />
<RouteTab routes={routes} history={history} activeKey={activeKey} />
<RouteTab
routes={routes}
history={history}
activeKey={activeKey}
className="service-route-tab"
/>
</div>
);
}

View File

@@ -594,6 +594,53 @@ describe('TracesExplorer - ', () => {
'http://localhost/trace/5765b60ba7cc4ddafe8bdaa9c1b4b246',
);
});
it('trace explorer - trace view should only send order by timestamp in the query', async () => {
let capturedPayload: QueryRangePayload;
const orderBy = [
{ columnName: 'id', order: 'desc' },
{ columnName: 'serviceName', order: 'desc' },
];
const defaultOrderBy = [{ columnName: 'timestamp', order: 'desc' }];
server.use(
rest.post(`${BASE_URL}/api/v4/query_range`, async (req, res, ctx) => {
const payload = await req.json();
capturedPayload = payload;
return res(ctx.status(200), ctx.json(queryRangeForTraceView));
}),
);
render(
<QueryBuilderContext.Provider
value={{
...qbProviderValue,
panelType: PANEL_TYPES.TRACE,
stagedQuery: {
...qbProviderValue.stagedQuery,
builder: {
...qbProviderValue.stagedQuery.builder,
queryData: [
{
...qbProviderValue.stagedQuery.builder.queryData[0],
orderBy,
},
],
},
},
}}
>
<TracesExplorer />
</QueryBuilderContext.Provider>,
);
await waitFor(() => {
expect(capturedPayload).toBeDefined();
expect(capturedPayload?.compositeQuery?.builderQueries?.A.orderBy).toEqual(
defaultOrderBy,
);
expect(
capturedPayload?.compositeQuery?.builderQueries?.A.orderBy,
).not.toEqual(orderBy);
});
});
it('test for explorer options', async () => {
const { getByText, getByTestId } = render(

View File

@@ -0,0 +1,28 @@
import { cloneDeep } from 'lodash-es';
import { IBuilderQuery, Query } from 'types/api/queryBuilder/queryBuilderData';
/**
* Transforms a query by modifying specific fields in the builder queries
* @param query - The original query object
* @param fieldOverrides - Partial object containing fields to override in each builder query
* @returns A new query object with the modified fields
*/
export const transformBuilderQueryFields = (
query: Query,
fieldOverrides: Partial<IBuilderQuery>,
): Query => {
// Create a deep copy of the query
const transformedQuery: Query = cloneDeep(query);
// Update the specified fields for each query in the builder
if (transformedQuery.builder?.queryData) {
transformedQuery.builder.queryData = transformedQuery.builder.queryData.map(
(queryItem) => ({
...queryItem,
...fieldOverrides,
}),
);
}
return transformedQuery;
};

2
go.mod
View File

@@ -19,7 +19,7 @@ require (
github.com/go-openapi/strfmt v0.23.0
github.com/go-redis/redis/v8 v8.11.5
github.com/go-redis/redismock/v8 v8.11.5
github.com/go-viper/mapstructure/v2 v2.2.1
github.com/go-viper/mapstructure/v2 v2.3.0
github.com/gojek/heimdall/v7 v7.0.3
github.com/golang-jwt/jwt/v5 v5.2.2
github.com/google/uuid v1.6.0

4
go.sum
View File

@@ -321,8 +321,8 @@ github.com/go-resty/resty/v2 v2.16.5/go.mod h1:hkJtXbA2iKHzJheXYvQ8snQES5ZLGKMwQ
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
github.com/go-test/deep v1.0.2-0.20181118220953-042da051cf31/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss=
github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/go-viper/mapstructure/v2 v2.3.0 h1:27XbWsHIqhbdR5TIC911OfYvgSaW93HM+dX7970Q7jk=
github.com/go-viper/mapstructure/v2 v2.3.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/go-zookeeper/zk v1.0.4 h1:DPzxraQx7OrPyXq2phlGlNSIyWEsAox0RJmjTseMV6I=
github.com/go-zookeeper/zk v1.0.4/go.mod h1:nOB03cncLtlp4t+UAkGSV+9beXP/akpekBwL+UX1Qcw=
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=

View File

@@ -9,6 +9,7 @@ import (
"time"
"github.com/ClickHouse/clickhouse-go/v2"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/telemetrystore"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
@@ -205,6 +206,10 @@ func (q *builderQuery[T]) executeWithContext(ctx context.Context, query string,
rows, err := q.telemetryStore.ClickhouseDB().Query(ctx, query, args...)
if err != nil {
if errors.Is(err, context.DeadlineExceeded) {
return nil, errors.Newf(errors.TypeTimeout, errors.CodeTimeout, "Query timed out").
WithAdditional("Try refining your search by adding relevant resource attributes filtering")
}
return nil, err
}
defer rows.Close()

View File

@@ -89,6 +89,9 @@ func newProvider(
resourceFilterFieldMapper,
resourceFilterConditionBuilder,
telemetryMetadataStore,
telemetrylogs.DefaultFullTextColumn,
telemetrylogs.BodyJSONStringSearchPrefix,
telemetrylogs.GetBodyJSONKey,
)
logAggExprRewriter := querybuilder.NewAggExprRewriter(
telemetrylogs.DefaultFullTextColumn,

View File

@@ -4521,24 +4521,46 @@ func (aH *APIHandler) sendQueryResultEvents(r *http.Request, result []*v3.Result
}
queryInfoResult := NewQueryInfoResult(queryRangeParams, version)
if !(len(result) > 0 && (len(result[0].Series) > 0 || len(result[0].List) > 0)) {
aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Empty", queryInfoResult.ToMap())
return
}
aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Results", queryInfoResult.ToMap())
if !(queryInfoResult.LogsUsed || queryInfoResult.MetricsUsed || queryInfoResult.TracesUsed) {
return
}
referrer := r.Header.Get("Referer")
if referrer == "" {
properties := queryInfoResult.ToMap()
// Check if result is empty or has no data
if len(result) == 0 {
aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Empty", properties)
return
}
// Check if first result has no series data
if len(result[0].Series) == 0 {
// Check if first result has no list data
if len(result[0].List) == 0 {
// Check if first result has no table data
if result[0].Table == nil {
aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Empty", properties)
return
}
if len(result[0].Table.Rows) == 0 {
aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Empty", properties)
return
}
}
}
referrer := r.Header.Get("Referer")
if referrer == "" {
aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties)
return
}
properties["referrer"] = referrer
if matched, _ := regexp.MatchString(`/dashboard/[a-zA-Z0-9\-]+/(new|edit)(?:\?.*)?$`, referrer); matched {
properties := queryInfoResult.ToMap()
if dashboardIDRegex, err := regexp.Compile(`/dashboard/([a-f0-9\-]+)/`); err == nil {
if matches := dashboardIDRegex.FindStringSubmatch(referrer); len(matches) > 1 {
@@ -4552,13 +4574,12 @@ func (aH *APIHandler) sendQueryResultEvents(r *http.Request, result []*v3.Result
}
}
properties["referrer"] = referrer
properties["module_name"] = "dashboard"
aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties)
return
}
if matched, _ := regexp.MatchString(`/alerts/(new|edit)(?:\?.*)?$`, referrer); matched {
properties := queryInfoResult.ToMap()
if alertIDRegex, err := regexp.Compile(`ruleId=(\d+)`); err == nil {
if matches := alertIDRegex.FindStringSubmatch(referrer); len(matches) > 1 {
@@ -4566,11 +4587,13 @@ func (aH *APIHandler) sendQueryResultEvents(r *http.Request, result []*v3.Result
}
}
properties["referrer"] = referrer
properties["module_name"] = "rule"
aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties)
return
}
aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties)
}
func (aH *APIHandler) QueryRangeV3(w http.ResponseWriter, r *http.Request) {

View File

@@ -94,3 +94,11 @@ func CollisionHandledFinalExpr(
return multiIfStmt, allArgs, nil
}
func GroupByKeys(keys []qbtypes.GroupByKey) []string {
k := []string{}
for _, key := range keys {
k = append(k, "`"+key.Name+"`")
}
return k
}

View File

@@ -38,6 +38,10 @@ type resourceFilterStatementBuilder[T any] struct {
conditionBuilder qbtypes.ConditionBuilder
metadataStore telemetrytypes.MetadataStore
signal telemetrytypes.Signal
fullTextColumn *telemetrytypes.TelemetryFieldKey
jsonBodyPrefix string
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
}
// Ensure interface compliance at compile time
@@ -64,12 +68,18 @@ func NewLogResourceFilterStatementBuilder(
fieldMapper qbtypes.FieldMapper,
conditionBuilder qbtypes.ConditionBuilder,
metadataStore telemetrytypes.MetadataStore,
fullTextColumn *telemetrytypes.TelemetryFieldKey,
jsonBodyPrefix string,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *resourceFilterStatementBuilder[qbtypes.LogAggregation] {
return &resourceFilterStatementBuilder[qbtypes.LogAggregation]{
fieldMapper: fieldMapper,
conditionBuilder: conditionBuilder,
metadataStore: metadataStore,
signal: telemetrytypes.SignalLogs,
fullTextColumn: fullTextColumn,
jsonBodyPrefix: jsonBodyPrefix,
jsonKeyToKey: jsonKeyToKey,
}
}
@@ -140,7 +150,11 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
FieldMapper: b.fieldMapper,
ConditionBuilder: b.conditionBuilder,
FieldKeys: keys,
FullTextColumn: b.fullTextColumn,
JsonBodyPrefix: b.jsonBodyPrefix,
JsonKeyToKey: b.jsonKeyToKey,
SkipFullTextFilter: true,
SkipFunctionCalls: true,
Variables: variables,
})

View File

@@ -29,7 +29,10 @@ type filterExpressionVisitor struct {
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
skipResourceFilter bool
skipFullTextFilter bool
skipFunctionCalls bool
variables map[string]qbtypes.VariableItem
keysWithWarnings map[string]bool
}
type FilterExprVisitorOpts struct {
@@ -42,6 +45,7 @@ type FilterExprVisitorOpts struct {
JsonKeyToKey qbtypes.JsonKeyToFieldFunc
SkipResourceFilter bool
SkipFullTextFilter bool
SkipFunctionCalls bool
Variables map[string]qbtypes.VariableItem
}
@@ -57,7 +61,9 @@ func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVis
jsonKeyToKey: opts.JsonKeyToKey,
skipResourceFilter: opts.SkipResourceFilter,
skipFullTextFilter: opts.SkipFullTextFilter,
skipFunctionCalls: opts.SkipFunctionCalls,
variables: opts.Variables,
keysWithWarnings: make(map[string]bool),
}
}
@@ -547,6 +553,10 @@ func (v *filterExpressionVisitor) VisitFullText(ctx *grammar.FullTextContext) an
// VisitFunctionCall handles function calls like has(), hasAny(), etc.
func (v *filterExpressionVisitor) VisitFunctionCall(ctx *grammar.FunctionCallContext) any {
if v.skipFunctionCalls {
return "true"
}
// Get function name based on which token is present
var functionName string
if ctx.HAS() != nil {
@@ -690,7 +700,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
}
}
if len(fieldKeysForName) > 1 {
if len(fieldKeysForName) > 1 && !v.keysWithWarnings[keyName] {
// this is warning state, we must have a unambiguous key
v.warnings = append(v.warnings, fmt.Sprintf(
"key `%s` is ambiguous, found %d different combinations of field context and data type: %v",
@@ -698,6 +708,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
len(fieldKeysForName),
fieldKeysForName,
))
v.keysWithWarnings[keyName] = true
}
return fieldKeysForName

View File

@@ -220,6 +220,7 @@ func (provider *provider) collectOrg(ctx context.Context, orgID valuer.UUID) map
if err := provider.telemetryStore.ClickhouseDB().QueryRow(ctx, "SELECT max(timestamp) FROM signoz_traces.distributed_signoz_index_v3").Scan(&tracesLastSeenAt); err == nil {
if tracesLastSeenAt.Unix() != 0 {
stats["telemetry.traces.last_observed.time"] = tracesLastSeenAt.UTC()
stats["telemetry.traces.last_observed.time_unix"] = tracesLastSeenAt.Unix()
}
}
@@ -227,6 +228,7 @@ func (provider *provider) collectOrg(ctx context.Context, orgID valuer.UUID) map
if err := provider.telemetryStore.ClickhouseDB().QueryRow(ctx, "SELECT fromUnixTimestamp64Nano(max(timestamp)) FROM signoz_logs.distributed_logs_v2").Scan(&logsLastSeenAt); err == nil {
if logsLastSeenAt.Unix() != 0 {
stats["telemetry.logs.last_observed.time"] = logsLastSeenAt.UTC()
stats["telemetry.logs.last_observed.time_unix"] = logsLastSeenAt.Unix()
}
}
@@ -234,6 +236,7 @@ func (provider *provider) collectOrg(ctx context.Context, orgID valuer.UUID) map
if err := provider.telemetryStore.ClickhouseDB().QueryRow(ctx, "SELECT toDateTime(max(unix_milli) / 1000) FROM signoz_metrics.distributed_samples_v4").Scan(&metricsLastSeenAt); err == nil {
if metricsLastSeenAt.Unix() != 0 {
stats["telemetry.metrics.last_observed.time"] = metricsLastSeenAt.UTC()
stats["telemetry.metrics.last_observed.time_unix"] = metricsLastSeenAt.Unix()
}
}

View File

@@ -9,6 +9,7 @@ import (
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"golang.org/x/exp/maps"
"github.com/huandu/go-sqlbuilder"
)
@@ -148,7 +149,7 @@ func (c *conditionBuilder) conditionFor(
}
// if the field is intrinsic, it always exists
if slices.Contains(IntrinsicFields, key.Name) {
if slices.Contains(maps.Keys(IntrinsicFields), key.Name) {
return "true", nil
}
@@ -210,7 +211,7 @@ func (c *conditionBuilder) ConditionFor(
// skip adding exists filter for intrinsic fields
// with an exception for body json search
field, _ := c.fm.FieldFor(ctx, key)
if slices.Contains(IntrinsicFields, field) && !strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) {
if slices.Contains(maps.Keys(IntrinsicFields), field) && !strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) {
return condition, nil
}

View File

@@ -337,7 +337,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
},
operator: qbtypes.FilterOperatorEqual,
value: "GET",
expectedSQL: `JSONExtract(JSON_VALUE(body, '$."http"."method"'), 'String') = ?`,
expectedSQL: `JSON_VALUE(body, '$."http"."method"') = ?`,
expectedError: nil,
},
{
@@ -417,7 +417,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
},
operator: qbtypes.FilterOperatorContains,
value: "200",
expectedSQL: `LOWER(JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'String')) LIKE LOWER(?)`,
expectedSQL: `LOWER(JSON_VALUE(body, '$."http"."status_code"')) LIKE LOWER(?)`,
expectedError: nil,
},
{
@@ -427,7 +427,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
},
operator: qbtypes.FilterOperatorNotContains,
value: "200",
expectedSQL: `LOWER(JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'String')) NOT LIKE LOWER(?)`,
expectedSQL: `LOWER(JSON_VALUE(body, '$."http"."status_code"')) NOT LIKE LOWER(?)`,
expectedError: nil,
},
{

View File

@@ -10,7 +10,57 @@ var (
FieldDataType: telemetrytypes.FieldDataTypeString,
}
BodyJSONStringSearchPrefix = `body.`
IntrinsicFields = []string{
"body", "trace_id", "span_id", "trace_flags", "severity_text", "severity_number", "scope_name", "scope_version",
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
"body": {
Name: "body",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"trace_id": {
Name: "trace_id",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"span_id": {
Name: "span_id",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"trace_flags": {
Name: "trace_flags",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
"severity_text": {
Name: "severity_text",
Description: "Log level. Learn more [here](https://opentelemetry.io/docs/specs/otel/logs/data-model/#field-severitytext)",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"severity_number": {
Name: "severity_number",
Description: "Numerical value of the severity. Learn more [here](https://opentelemetry.io/docs/specs/otel/logs/data-model/#field-severitynumber)",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
"scope_name": {
Name: "scope_name",
Description: "Logger name. Learn more about instrumentation scope [here](https://opentelemetry.io/docs/concepts/instrumentation-scope/)",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextScope,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"scope_version": {
Name: "scope_version",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextScope,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
}
)

View File

@@ -73,7 +73,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
category: "json",
query: "body.message = hello",
shouldPass: true,
expectedQuery: `WHERE (JSONExtract(JSON_VALUE(body, '$."message"'), 'String') = ? AND JSON_EXISTS(body, '$."message"'))`,
expectedQuery: `WHERE (JSON_VALUE(body, '$."message"') = ? AND JSON_EXISTS(body, '$."message"'))`,
expectedArgs: []any{"hello"},
expectedErrorContains: "",
},
@@ -113,7 +113,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
category: "json",
query: "body.message REGEXP 'a*'",
shouldPass: true,
expectedQuery: `WHERE (match(JSONExtract(JSON_VALUE(body, '$."message"'), 'String'), ?) AND JSON_EXISTS(body, '$."message"'))`,
expectedQuery: `WHERE (match(JSON_VALUE(body, '$."message"'), ?) AND JSON_EXISTS(body, '$."message"'))`,
expectedArgs: []any{"a*"},
expectedErrorContains: "",
},
@@ -121,7 +121,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
category: "json",
query: `body.message CONTAINS "hello 'world'"`,
shouldPass: true,
expectedQuery: `WHERE (LOWER(JSONExtract(JSON_VALUE(body, '$."message"'), 'String')) LIKE LOWER(?) AND JSON_EXISTS(body, '$."message"'))`,
expectedQuery: `WHERE (LOWER(JSON_VALUE(body, '$."message"')) LIKE LOWER(?) AND JSON_EXISTS(body, '$."message"'))`,
expectedArgs: []any{"%hello 'world'%"},
expectedErrorContains: "",
},
@@ -136,7 +136,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
category: "json",
query: `body.name IN ('hello', 'world')`,
shouldPass: true,
expectedQuery: `WHERE ((JSONExtract(JSON_VALUE(body, '$."name"'), 'String') = ? OR JSONExtract(JSON_VALUE(body, '$."name"'), 'String') = ?) AND JSON_EXISTS(body, '$."name"'))`,
expectedQuery: `WHERE ((JSON_VALUE(body, '$."name"') = ? OR JSON_VALUE(body, '$."name"') = ?) AND JSON_EXISTS(body, '$."name"'))`,
expectedArgs: []any{"hello", "world"},
expectedErrorContains: "",
},

View File

@@ -61,7 +61,7 @@ func inferDataType(value any, operator qbtypes.FilterOperator, key *telemetrytyp
}
// check if it is array
if strings.HasSuffix(key.Name, "[*]") {
if strings.HasSuffix(key.Name, "[*]") || strings.HasSuffix(key.Name, "[]") {
valueType = telemetrytypes.FieldDataType{String: valuer.NewString(fmt.Sprintf("[]%s", valueType.StringValue()))}
}
@@ -74,6 +74,8 @@ func getBodyJSONPath(key *telemetrytypes.TelemetryFieldKey) string {
for _, part := range parts {
if strings.HasSuffix(part, "[*]") {
newParts = append(newParts, fmt.Sprintf(`"%s"[*]`, strings.TrimSuffix(part, "[*]")))
} else if strings.HasSuffix(part, "[]") {
newParts = append(newParts, fmt.Sprintf(`"%s"[*]`, strings.TrimSuffix(part, "[]")))
} else {
newParts = append(newParts, fmt.Sprintf(`"%s"`, part))
}
@@ -94,8 +96,12 @@ func GetBodyJSONKey(_ context.Context, key *telemetrytypes.TelemetryFieldKey, op
return fmt.Sprintf("JSONExtract(JSON_QUERY(body, '$.%s'), '%s')", getBodyJSONPath(key), dataType.CHDataType()), value
}
// for all other types, we need to extract the value from the JSON_VALUE
return fmt.Sprintf("JSONExtract(JSON_VALUE(body, '$.%s'), '%s')", getBodyJSONPath(key), dataType.CHDataType()), value
if dataType != telemetrytypes.FieldDataTypeString {
// for all types except strings, we need to extract the value from the JSON_VALUE
return fmt.Sprintf("JSONExtract(JSON_VALUE(body, '$.%s'), '%s')", getBodyJSONPath(key), dataType.CHDataType()), value
}
// for string types, we should compare with the JSON_VALUE
return fmt.Sprintf("JSON_VALUE(body, '$.%s')", getBodyJSONPath(key)), value
}
func GetBodyJSONKeyForExists(_ context.Context, key *telemetrytypes.TelemetryFieldKey, _ qbtypes.FilterOperator, _ any) string {

View File

@@ -270,10 +270,11 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
// Constrain the main query to the rows that appear in the CTE.
tuple := fmt.Sprintf("(%s)", strings.Join(fieldNames, ", "))
sb.Where(fmt.Sprintf("%s IN (SELECT %s FROM __limit_cte)", tuple, strings.Join(fieldNames, ", ")))
sb.Where(fmt.Sprintf("%s GLOBAL IN (SELECT %s FROM __limit_cte)", tuple, strings.Join(fieldNames, ", ")))
// Group by all dimensions
sb.GroupBy("ALL")
sb.GroupBy("ts")
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
if query.Having != nil && query.Having.Expression != "" {
// Rewrite having expression to use SQL column names
rewriter := querybuilder.NewHavingExpressionRewriter()
@@ -290,7 +291,8 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
finalArgs = querybuilder.PrependArgs(cteArgs, mainArgs)
} else {
sb.GroupBy("ALL")
sb.GroupBy("ts")
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
if query.Having != nil && query.Having.Expression != "" {
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForLogs(query.Having.Expression, query.Aggregations)
@@ -380,7 +382,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
}
// Group by dimensions
sb.GroupBy("ALL")
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
// Add having clause if needed
if query.Having != nil && query.Having.Expression != "" {
@@ -492,7 +494,7 @@ func (b *logQueryStatementBuilder) maybeAttachResourceFilter(
return "", nil, err
}
sb.Where("resource_fingerprint IN (SELECT fingerprint FROM __resource_filter)")
sb.Where("resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)")
return fmt.Sprintf("__resource_filter AS (%s)", stmt.Query), stmt.Args, nil
}

View File

@@ -30,6 +30,9 @@ func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.LogAggregation
fm,
cb,
mockMetadataStore,
DefaultFullTextColumn,
BodyJSONStringSearchPrefix,
GetBodyJSONKey,
)
}
@@ -65,7 +68,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY ALL ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) IN (SELECT `service.name` FROM __limit_cte) GROUP BY ALL",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
Args: []any{"cartservice", "%service.name%", "%service.name%cartservice%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)},
},
expectedErr: nil,
@@ -104,7 +107,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY ALL ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) IN (SELECT `service.name` FROM __limit_cte) GROUP BY ALL",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
Args: []any{"cartservice", "%service.name%", "%service.name%cartservice%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)},
},
expectedErr: nil,

View File

@@ -16,6 +16,7 @@ import (
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
"golang.org/x/exp/maps"
)
var (
@@ -208,6 +209,7 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector
}
keys = append(keys, key)
mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()] = key
}
if rows.Err() != nil {
@@ -215,8 +217,8 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector
}
staticKeys := []string{"isRoot", "isEntrypoint"}
staticKeys = append(staticKeys, telemetrytraces.IntrinsicFields...)
staticKeys = append(staticKeys, telemetrytraces.CalculatedFields...)
staticKeys = append(staticKeys, maps.Keys(telemetrytraces.IntrinsicFields)...)
staticKeys = append(staticKeys, maps.Keys(telemetrytraces.CalculatedFields)...)
// add matching intrinsic and matching calculated fields
for _, key := range staticKeys {
@@ -228,6 +230,19 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector
}
}
if found {
if field, exists := telemetrytraces.IntrinsicFields[key]; exists {
if _, added := mapOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added {
keys = append(keys, &field)
}
continue
}
if field, exists := telemetrytraces.CalculatedFields[key]; exists {
if _, added := mapOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added {
keys = append(keys, &field)
}
continue
}
keys = append(keys, &telemetrytypes.TelemetryFieldKey{
Name: key,
FieldContext: telemetrytypes.FieldContextSpan,
@@ -361,6 +376,7 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
}
keys = append(keys, key)
mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()] = key
}
if rows.Err() != nil {
@@ -368,7 +384,7 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
}
staticKeys := []string{}
staticKeys = append(staticKeys, telemetrylogs.IntrinsicFields...)
staticKeys = append(staticKeys, maps.Keys(telemetrylogs.IntrinsicFields)...)
// add matching intrinsic and matching calculated fields
for _, key := range staticKeys {
@@ -380,6 +396,13 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
}
}
if found {
if field, exists := telemetrylogs.IntrinsicFields[key]; exists {
if _, added := mapOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added {
keys = append(keys, &field)
}
continue
}
keys = append(keys, &telemetrytypes.TelemetryFieldKey{
Name: key,
FieldContext: telemetrytypes.FieldContextLog,

View File

@@ -258,7 +258,8 @@ func (b *metricQueryStatementBuilder) buildTemporalAggDeltaFastPath(
sb.GTE("unix_milli", start),
sb.LT("unix_milli", end),
)
sb.GroupBy("ALL")
sb.GroupBy("ts")
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
@@ -320,7 +321,8 @@ func (b *metricQueryStatementBuilder) buildTimeSeriesCTE(
sb.AddWhereClause(filterWhere)
}
sb.GroupBy("ALL")
sb.GroupBy("fingerprint")
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
return fmt.Sprintf("(%s) AS filtered_time_series", q), args, nil
@@ -375,7 +377,8 @@ func (b *metricQueryStatementBuilder) buildTemporalAggDelta(
sb.GTE("unix_milli", start),
sb.LT("unix_milli", end),
)
sb.GroupBy("ALL")
sb.GroupBy("fingerprint", "ts")
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
sb.OrderBy("fingerprint", "ts")
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
@@ -412,7 +415,8 @@ func (b *metricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
baseSb.GTE("unix_milli", start),
baseSb.LT("unix_milli", end),
)
baseSb.GroupBy("ALL")
baseSb.GroupBy("fingerprint", "ts")
baseSb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
baseSb.OrderBy("fingerprint", "ts")
innerQuery, innerArgs := baseSb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
@@ -438,7 +442,7 @@ func (b *metricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", incExpr))
wrapped.From(fmt.Sprintf("(%s) WINDOW increase_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
default:
@@ -465,7 +469,8 @@ func (b *metricQueryStatementBuilder) buildSpatialAggregationCTE(
if query.Aggregations[0].ValueFilter != nil {
sb.Where(sb.EQ("per_series_value", query.Aggregations[0].ValueFilter.Value))
}
sb.GroupBy("ALL")
sb.GroupBy("ts")
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args

View File

@@ -49,7 +49,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ALL) SELECT * FROM __spatial_aggregation_cte",
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983448000), "cumulative", false, "cartservice", "signoz_calls_total", uint64(1747947419000), uint64(1747983448000), 0},
},
expectedErr: nil,
@@ -82,7 +82,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL) SELECT * FROM __spatial_aggregation_cte",
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983448000), "delta", false, "cartservice", "signoz_calls_total", uint64(1747947419000), uint64(1747983448000)},
},
expectedErr: nil,
@@ -114,7 +114,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
Args: []any{"signoz_latency", uint64(1747936800000), uint64(1747983448000), "delta", false, "cartservice", "signoz_latency", uint64(1747947419000), uint64(1747983448000)},
},
expectedErr: nil,
@@ -147,7 +147,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `host.name`, avg(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'host.name') AS `host.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'host.name') = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ALL) SELECT * FROM __spatial_aggregation_cte",
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `host.name`, avg(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'host.name') AS `host.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'host.name') = ? GROUP BY fingerprint, `host.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte",
Args: []any{"system.memory.usage", uint64(1747936800000), uint64(1747983448000), "unspecified", false, "big-data-node-1", "system.memory.usage", uint64(1747947419000), uint64(1747983448000), 0},
},
expectedErr: nil,
@@ -176,7 +176,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ALL) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name`, `le` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
Args: []any{"http_server_duration_bucket", uint64(1747936800000), uint64(1747983448000), "cumulative", false, "http_server_duration_bucket", uint64(1747947419000), uint64(1747983448000), 0},
},
expectedErr: nil,

View File

@@ -11,6 +11,7 @@ import (
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
"golang.org/x/exp/maps"
)
type conditionBuilder struct {
@@ -129,10 +130,10 @@ func (c *conditionBuilder) conditionFor(
// key membership checks, so depending on the column type, the condition changes
case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists:
// if the field is intrinsic, it always exists
if slices.Contains(IntrinsicFields, tblFieldName) ||
slices.Contains(CalculatedFields, tblFieldName) ||
slices.Contains(IntrinsicFieldsDeprecated, tblFieldName) ||
slices.Contains(CalculatedFieldsDeprecated, tblFieldName) {
if slices.Contains(maps.Keys(IntrinsicFields), tblFieldName) ||
slices.Contains(maps.Keys(CalculatedFields), tblFieldName) ||
slices.Contains(maps.Keys(IntrinsicFieldsDeprecated), tblFieldName) ||
slices.Contains(maps.Keys(CalculatedFieldsDeprecated), tblFieldName) {
return "true", nil
}
@@ -205,10 +206,10 @@ func (c *conditionBuilder) ConditionFor(
if operator.AddDefaultExistsFilter() {
// skip adding exists filter for intrinsic fields
field, _ := c.fm.FieldFor(ctx, key)
if slices.Contains(IntrinsicFields, field) ||
slices.Contains(IntrinsicFieldsDeprecated, field) ||
slices.Contains(CalculatedFields, field) ||
slices.Contains(CalculatedFieldsDeprecated, field) {
if slices.Contains(maps.Keys(IntrinsicFields), field) ||
slices.Contains(maps.Keys(IntrinsicFieldsDeprecated), field) ||
slices.Contains(maps.Keys(CalculatedFields), field) ||
slices.Contains(maps.Keys(CalculatedFieldsDeprecated), field) {
return condition, nil
}

View File

@@ -3,89 +3,320 @@ package telemetrytraces
import "github.com/SigNoz/signoz/pkg/types/telemetrytypes"
var (
IntrinsicFields = []string{
"trace_id",
"span_id",
"trace_state",
"parent_span_id",
"flags",
"name",
"kind",
"kind_string",
"duration_nano",
"status_code",
"status_message",
"status_code_string",
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
"trace_id": {
Name: "trace_id",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"span_id": {
Name: "span_id",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"trace_state": {
Name: "trace_state",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"parent_span_id": {
Name: "parent_span_id",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"flags": {
Name: "flags",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
"name": {
Name: "name",
Description: "Name of the span",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"kind": {
Name: "kind",
Description: "Span kind enum (number). Use `kind_string` instead. Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-kind)",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
"kind_string": {
Name: "kind_string",
Description: "Span kind enum (string). Known values are ['Client', 'Server', 'Internal', 'Producer', 'Consumer']. Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-kind)",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"duration_nano": {
Name: "duration_nano",
Description: "Span duration",
Unit: "ns",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
"status_code": {
Name: "status_code",
Description: "Span status code enum (number). Use `status_code_string` instead. Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-status)",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
"status_message": {
Name: "status_message",
Description: "Span status message. Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-status)",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"status_code_string": {
Name: "status_code_string",
Description: "Span status code enum (string). Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-status)",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
}
IntrinsicFieldsDeprecated = []string{
"traceID",
"spanID",
"parentSpanID",
"spanKind",
"durationNano",
"statusCode",
"statusMessage",
"statusCodeString",
IntrinsicFieldsDeprecated = map[string]telemetrytypes.TelemetryFieldKey{
"traceID": {
Name: "traceID",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"spanID": {
Name: "spanID",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"parentSpanID": {
Name: "parentSpanID",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"spanKind": {
Name: "spanKind",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
"durationNano": {
Name: "durationNano",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
"statusCode": {
Name: "statusCode",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
"statusMessage": {
Name: "statusMessage",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"statusCodeString": {
Name: "statusCodeString",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
}
CalculatedFields = []string{
"response_status_code",
"external_http_url",
"http_url",
"external_http_method",
"http_method",
"http_host",
"db_name",
"db_operation",
"has_error",
"is_remote",
CalculatedFields = map[string]telemetrytypes.TelemetryFieldKey{
"response_status_code": {
Name: "response_status_code",
Description: "Derived response status code from the HTTP/RPC status code attributes. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#response_status_code)",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
"external_http_url": {
Name: "external_http_url",
Description: "The hostname of the external HTTP URL. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#external_http_url)",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"http_url": {
Name: "http_url",
Description: "HTTP URL of the request. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#http_url)",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"external_http_method": {
Name: "external_http_method",
Description: "HTTP request method of client spans. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#external_http_method)",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"http_method": {
Name: "http_method",
Description: "The HTTP request method. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#http_method)",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"http_host": {
Name: "http_host",
Description: "The HTTP host or server address. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#http_host)",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"db_name": {
Name: "db_name",
Description: "The database name or namespace. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#db_name)",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"db_operation": {
Name: "db_operation",
Description: "The database operation being performed. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#db_operation)",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"has_error": {
Name: "has_error",
Description: "Whether the span has an error. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#has_error)",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
"is_remote": {
Name: "is_remote",
Description: "Whether the span is remote. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#is_remote)",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
}
CalculatedFieldsDeprecated = []string{
"responseStatusCode",
"externalHttpUrl",
"httpUrl",
"externalHttpMethod",
"httpMethod",
"httpHost",
"dbName",
"dbOperation",
"hasError",
"isRemote",
CalculatedFieldsDeprecated = map[string]telemetrytypes.TelemetryFieldKey{
"responseStatusCode": {
Name: "responseStatusCode",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
"externalHttpUrl": {
Name: "externalHttpUrl",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"httpUrl": {
Name: "httpUrl",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"externalHttpMethod": {
Name: "externalHttpMethod",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"httpMethod": {
Name: "httpMethod",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"httpHost": {
Name: "httpHost",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"dbName": {
Name: "dbName",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"dbOperation": {
Name: "dbOperation",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"hasError": {
Name: "hasError",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
"isRemote": {
Name: "isRemote",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
}
SpanSearchScopeRoot = "isroot"
SpanSearchScopeEntryPoint = "isentrypoint"
DefaultFields = []telemetrytypes.TelemetryFieldKey{
{
Name: "timestamp",
FieldContext: telemetrytypes.FieldContextSpan,
Name: "timestamp",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
{
Name: "span_id",
FieldContext: telemetrytypes.FieldContextSpan,
Name: "span_id",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextSpan,
Name: "trace_id",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
{
Name: "name",
FieldContext: telemetrytypes.FieldContextSpan,
Name: "name",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
{
Name: "service.name",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
{
Name: "duration_nano",
FieldContext: telemetrytypes.FieldContextSpan,
Name: "duration_nano",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
{
Name: "response_status_code",
FieldContext: telemetrytypes.FieldContextSpan,
Name: "response_status_code",
Signal: telemetrytypes.SignalTraces,
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
}
)

View File

@@ -305,10 +305,11 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
// Constrain the main query to the rows that appear in the CTE.
tuple := fmt.Sprintf("(%s)", strings.Join(fieldNames, ", "))
sb.Where(fmt.Sprintf("%s IN (SELECT %s FROM __limit_cte)", tuple, strings.Join(fieldNames, ", ")))
sb.Where(fmt.Sprintf("%s GLOBAL IN (SELECT %s FROM __limit_cte)", tuple, strings.Join(fieldNames, ", ")))
// Group by all dimensions
sb.GroupBy("ALL")
sb.GroupBy("ts")
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
if query.Having != nil && query.Having.Expression != "" {
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForTraces(query.Having.Expression, query.Aggregations)
@@ -323,7 +324,8 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
finalArgs = querybuilder.PrependArgs(cteArgs, mainArgs)
} else {
sb.GroupBy("ALL")
sb.GroupBy("ts")
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
if query.Having != nil && query.Having.Expression != "" {
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForTraces(query.Having.Expression, query.Aggregations)
@@ -412,7 +414,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
}
// Group by dimensions
sb.GroupBy("ALL")
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
// Add having clause if needed
if query.Having != nil && query.Having.Expression != "" && !skipHaving {
@@ -521,7 +523,7 @@ func (b *traceQueryStatementBuilder) maybeAttachResourceFilter(
return "", nil, err
}
sb.Where("resource_fingerprint IN (SELECT fingerprint FROM __resource_filter)")
sb.Where("resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)")
return fmt.Sprintf("__resource_filter AS (%s)", stmt.Query), stmt.Args, nil
}

View File

@@ -59,7 +59,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY ALL ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) IN (SELECT `service.name` FROM __limit_cte) GROUP BY ALL",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
Args: []any{"redis-manual", "%service.name%", "%service.name%redis-manual%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)},
},
expectedErr: nil,

View File

@@ -104,6 +104,8 @@ func detectPlatform() string {
return "heroku"
case os.Getenv("RENDER") != "" || os.Getenv("RENDER_SERVICE_ID") != "":
return "render"
case os.Getenv("COOLIFY_RESOURCE_UUID") != "":
return "coolify"
}
// Try to detect cloud provider through metadata endpoints
@@ -151,5 +153,16 @@ func detectPlatform() string {
}
}
// Hetzner metadata
if req, err := http.NewRequest(http.MethodGet, "http://169.254.169.254/hetzner/v1/metadata", nil); err == nil {
if resp, err := client.Do(req); err == nil {
resp.Body.Close()
if resp.StatusCode == 200 {
return "hetzner"
}
}
}
return "unknown"
}