Compare commits

..

21 Commits

Author SHA1 Message Date
Srikanth Chekuri
18e856f62c Merge branch 'main' into perf/panel-rerender 2025-12-16 09:44:30 +05:30
Abhi kumar
c0bd545e26 Merge branch 'main' into perf/panel-rerender 2025-12-02 16:47:40 +05:30
Abhi kumar
346be7b7ba Merge branch 'main' into perf/panel-rerender 2025-11-19 11:19:01 +05:30
Abhi kumar
10176e3128 Merge branch 'main' into perf/panel-rerender 2025-11-19 09:57:07 +05:30
Abhi kumar
a6832d6ed0 Merge branch 'main' into perf/panel-rerender 2025-11-18 16:17:58 +05:30
Abhi kumar
5504e90620 Merge branch 'main' into perf/panel-rerender 2025-11-17 10:43:08 +05:30
Srikanth Chekuri
19cffa0165 Merge branch 'main' into perf/panel-rerender 2025-11-13 23:21:05 +05:30
Abhi kumar
4dc4a0b95e Merge branch 'main' into perf/panel-rerender 2025-11-13 11:44:04 +05:30
Srikanth Chekuri
2462f551be Merge branch 'main' into perf/panel-rerender 2025-11-11 13:15:30 +05:30
Abhi Kumar
78ab80d294 fix: removed unnessasary usememo 2025-11-06 17:59:34 +05:30
Abhi Kumar
d285b90f09 Merge branch 'main' of https://github.com/SigNoz/signoz into perf/panel-rerender 2025-11-06 17:53:45 +05:30
Abhi kumar
08e756cf5d Merge branch 'main' into perf/panel-rerender 2025-11-06 17:08:48 +05:30
Abhi Kumar
42c1ddda39 chore: removing deepcompare for all the props 2025-11-06 15:45:27 +05:30
Abhi Kumar
efb85fc205 Merge branch 'main' of https://github.com/SigNoz/signoz into perf/panel-rerender 2025-11-06 14:57:34 +05:30
Abhi Kumar
0e972257db perf: memoize click handlers and use refs for dashboard mutations 2025-11-06 11:36:01 +05:30
Abhi Kumar
f8144e3a1d perf: added deep comparision for props 2025-11-06 11:31:11 +05:30
Abhi Kumar
02a8a11976 chore: removed useMemo from clickhandler 2025-11-06 11:29:10 +05:30
Abhi Kumar
4a351e5280 perf: added changes to make onclickhandler stable 2025-11-06 11:28:27 +05:30
Abhi Kumar
311257a518 perf: memoized panelwrapper and not rerendering it when onclickhandler is changing 2025-11-06 02:31:25 +05:30
Abhi Kumar
d5c665a72a perf: only calling getUplotchartdata when the props are changing 2025-11-06 02:27:31 +05:30
Abhi Kumar
0d6c8785d9 perf: memoized certain expensive functions 2025-11-06 02:26:51 +05:30
93 changed files with 773 additions and 3529 deletions

2
.github/CODEOWNERS vendored
View File

@@ -2,7 +2,7 @@
# Owners are automatically requested for review for PRs that changes code
# that they own.
/frontend/ @SigNoz/frontend-maintainers
/frontend/ @YounixM @aks07
# Onboarding
/frontend/src/container/OnboardingV2Container/onboarding-configs/onboarding-config-with-links.json @makeavish

View File

@@ -9,29 +9,6 @@ on:
- labeled
jobs:
fmtlint:
if: |
((github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))) && contains(github.event.pull_request.labels.*.name, 'safe-to-integrate')
runs-on: ubuntu-latest
steps:
- name: checkout
uses: actions/checkout@v4
- name: python
uses: actions/setup-python@v5
with:
python-version: 3.13
- name: poetry
run: |
python -m pip install poetry==2.1.2
python -m poetry config virtualenvs.in-project true
cd tests/integration && poetry install --no-root
- name: fmt
run: |
make py-fmt
- name: lint
run: |
make py-lint
test:
strategy:
fail-fast: false
@@ -44,7 +21,6 @@ jobs:
- dashboard
- querier
- ttl
- preference
sqlstore-provider:
- postgres
- sqlite

View File

@@ -1,3 +1,11 @@
FROM node:18-bullseye AS build
WORKDIR /opt/
COPY ./frontend/ ./
ENV NODE_OPTIONS=--max-old-space-size=8192
RUN CI=1 yarn install
RUN CI=1 yarn build
FROM golang:1.24-bullseye
ARG OS="linux"
@@ -32,6 +40,8 @@ COPY Makefile Makefile
RUN TARGET_DIR=/root ARCHS=${TARGETARCH} ZEUS_URL=${ZEUSURL} LICENSE_URL=${ZEUSURL}/api/v1 make go-build-enterprise-race
RUN mv /root/linux-${TARGETARCH}/signoz /root/signoz
COPY --from=build /opt/build ./web/
RUN chmod 755 /root /root/signoz
ENTRYPOINT ["/root/signoz", "server"]

View File

@@ -1,47 +0,0 @@
FROM node:18-bullseye AS build
WORKDIR /opt/
COPY ./frontend/ ./
ENV NODE_OPTIONS=--max-old-space-size=8192
RUN CI=1 yarn install
RUN CI=1 yarn build
FROM golang:1.24-bullseye
ARG OS="linux"
ARG TARGETARCH
ARG ZEUSURL
# This path is important for stacktraces
WORKDIR $GOPATH/src/github.com/signoz/signoz
WORKDIR /root
RUN set -eux; \
apt-get update; \
apt-get install -y --no-install-recommends \
g++ \
gcc \
libc6-dev \
make \
pkg-config \
; \
rm -rf /var/lib/apt/lists/*
COPY go.mod go.sum ./
RUN go mod download
COPY ./cmd/ ./cmd/
COPY ./ee/ ./ee/
COPY ./pkg/ ./pkg/
COPY ./templates/email /root/templates
COPY Makefile Makefile
RUN TARGET_DIR=/root ARCHS=${TARGETARCH} ZEUS_URL=${ZEUSURL} LICENSE_URL=${ZEUSURL}/api/v1 make go-build-enterprise-race
RUN mv /root/linux-${TARGETARCH}/signoz /root/signoz
COPY --from=build /opt/build ./web/
RUN chmod 755 /root /root/signoz
ENTRYPOINT ["/root/signoz", "server"]

View File

@@ -3,13 +3,6 @@
# Do not modify this file
#
##################### Global #####################
global:
# the url under which the signoz apiserver is externally reachable.
external_url: <unset>
# the url where the SigNoz backend receives telemetry data (traces, metrics, logs) from instrumented applications.
ingestion_url: <unset>
##################### Version #####################
version:
banner:

View File

@@ -176,7 +176,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.105.1
image: signoz/signoz:v0.104.0
command:
- --config=/root/config/prometheus.yml
ports:

View File

@@ -117,7 +117,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.105.1
image: signoz/signoz:v0.104.0
command:
- --config=/root/config/prometheus.yml
ports:

View File

@@ -179,7 +179,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.105.1}
image: signoz/signoz:${VERSION:-v0.104.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml

View File

@@ -111,7 +111,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.105.1}
image: signoz/signoz:${VERSION:-v0.104.0}
container_name: signoz
command:
- --config=/root/config/prometheus.yml

View File

@@ -473,49 +473,6 @@ paths:
summary: Get reset password token
tags:
- users
/api/v1/global/config:
get:
deprecated: false
description: This endpoints returns global config
operationId: GetGlobalConfig
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/TypesGettableGlobalConfig'
status:
type: string
type: object
description: OK
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- EDITOR
- tokenizer:
- EDITOR
summary: Get global config
tags:
- global
/api/v1/invite:
get:
deprecated: false
@@ -2188,13 +2145,6 @@ components:
userId:
type: string
type: object
TypesGettableGlobalConfig:
properties:
external_url:
type: string
ingestion_url:
type: string
type: object
TypesInvite:
properties:
createdAt:

View File

@@ -1,179 +0,0 @@
# Handler
Handlers in SigNoz are responsible for exposing module functionality over HTTP. They are thin adapters that:
- Decode incoming HTTP requests
- Call the appropriate module layer
- Return structured responses (or errors) in a consistent format
- Describe themselves for OpenAPI generation
They are **not** the place for complex business logic; that belongs in modules (for example, `pkg/modules/user`, `pkg/modules/session`, etc).
## How are handlers structured?
At a high level, a typical flow looks like this:
1. A `Handler` interface is defined in the module (for example, `user.Handler`, `session.Handler`, `organization.Handler`).
2. The `apiserver` provider wires those handlers into HTTP routes using Gorilla `mux.Router`.
Each route wraps a module handler method with the following:
- Authorization middleware (from `pkg/http/middleware`)
- A generic HTTP `handler.Handler` (from `pkg/http/handler`)
- An `OpenAPIDef` that describes the operation for OpenAPI generation
For example, in `pkg/apiserver/signozapiserver`:
```go
if err := router.Handle("/api/v1/invite", handler.New(
provider.authZ.AdminAccess(provider.userHandler.CreateInvite),
handler.OpenAPIDef{
ID: "CreateInvite",
Tags: []string{"users"},
Summary: "Create invite",
Description: "This endpoint creates an invite for a user",
Request: new(types.PostableInvite),
RequestContentType: "application/json",
Response: new(types.Invite),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusCreated,
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusConflict},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
},
)).Methods(http.MethodPost).GetError(); err != nil {
return err
}
```
In this pattern:
- `provider.userHandler.CreateInvite` is a handler method.
- `provider.authZ.AdminAccess(...)` wraps that method with authorization checks and context setup.
- `handler.New` converts it into an HTTP handler and wires it to OpenAPI via the `OpenAPIDef`.
## How to write a new handler method?
When adding a new endpoint:
1. Add a method to the appropriate module `Handler` interface.
2. Implement that method in the module.
3. Register the method in `signozapiserver` with the correct route, HTTP method, auth, and `OpenAPIDef`.
### 1. Extend an existing `Handler` interface or create a new one
Find the module in `pkg/modules/<name>` and extend its `Handler` interface with a new method that receives an `http.ResponseWriter` and `*http.Request`. For example:
```go
type Handler interface {
// existing methods...
CreateThing(rw http.ResponseWriter, req *http.Request)
}
```
Keep the method focused on HTTP concerns and delegate business logic to the module.
### 2. Implement the handler method
In the module implementation, implement the new method. A typical implementation:
- Extracts authentication and organization context from `req.Context()`
- Decodes the request body into a `types.*` struct using the `binding` package
- Calls module functions
- Uses the `render` package to write responses or errors
```go
func (h *handler) CreateThing(rw http.ResponseWriter, req *http.Request) {
// Extract authentication and organization context from req.Context()
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
// Decode the request body into a `types.*` struct using the `binding` package
var in types.PostableThing
if err := binding.JSON.BindBody(req.Body, &in); err != nil {
render.Error(rw, err)
return
}
// Call module functions
out, err := h.module.CreateThing(req.Context(), claims.OrgID, &in)
if err != nil {
render.Error(rw, err)
return
}
// Use the `render` package to write responses or errors
render.Success(rw, http.StatusCreated, out)
}
```
### 3. Register the handler in `signozapiserver`
In `pkg/apiserver/signozapiserver`, add a route in the appropriate `add*Routes` function (`addUserRoutes`, `addSessionRoutes`, `addOrgRoutes`, etc.). The pattern is:
```go
if err := router.Handle("/api/v1/things", handler.New(
provider.authZ.AdminAccess(provider.thingHandler.CreateThing),
handler.OpenAPIDef{
ID: "CreateThing",
Tags: []string{"things"},
Summary: "Create thing",
Description: "This endpoint creates a thing",
Request: new(types.PostableThing),
RequestContentType: "application/json",
Response: new(types.GettableThing),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusCreated,
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusConflict},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
},
)).Methods(http.MethodPost).GetError(); err != nil {
return err
}
```
### 4. Update the OpenAPI spec
Run the following command to update the OpenAPI spec:
```bash
go run cmd/enterprise/*.go generate openapi
```
This will update the OpenAPI spec in `docs/api/openapi.yml` to reflect the new endpoint.
## How does OpenAPI integration work?
The `handler.New` function ties the HTTP handler to OpenAPI metadata via `OpenAPIDef`. This drives the generated OpenAPI document.
- **ID**: A unique identifier for the operation (used as the `operationId`).
- **Tags**: Logical grouping for the operation (for example, `"users"`, `"sessions"`, `"orgs"`).
- **Summary / Description**: Human-friendly documentation.
- **Request / RequestContentType**:
- `Request` is a Go type that describes the request body or form.
- `RequestContentType` is usually `"application/json"` or `"application/x-www-form-urlencoded"` (for callbacks like SAML).
- **Response / ResponseContentType**:
- `Response` is the Go type for the successful response payload.
- `ResponseContentType` is usually `"application/json"`; use `""` for responses without a body.
- **SuccessStatusCode**: The HTTP status for successful responses (for example, `http.StatusOK`, `http.StatusCreated`, `http.StatusNoContent`).
- **ErrorStatusCodes**: Additional error status codes beyond the standard ones automatically added by `handler.New`.
- **SecuritySchemes**: Auth mechanisms and scopes required by the operation.
The generic handler:
- Automatically appends `401`, `403`, and `500` to `ErrorStatusCodes` when appropriate.
- Registers request and response schemas with the OpenAPI reflector so they appear in `docs/api/openapi.yml`.
See existing examples in:
- `addUserRoutes` (for typical JSON request/response)
- `addSessionRoutes` (for form-encoded and redirect flows)
## What should I remember?
- **Keep handlers thin**: focus on HTTP concerns and delegate logic to modules/services.
- **Always register routes through `signozapiserver`** using `handler.New` and a complete `OpenAPIDef`.
- **Choose accurate request/response types** from the `types` packages so OpenAPI schemas are correct.

View File

@@ -3,14 +3,13 @@ package app
import (
"context"
"fmt"
"log/slog"
"net"
"net/http"
_ "net/http/pprof" // http profiler
"slices"
"github.com/SigNoz/signoz/pkg/cache/memorycache"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
"go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux"
"go.opentelemetry.io/otel/propagation"
@@ -107,8 +106,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
signoz.Prometheus,
signoz.Modules.OrgGetter,
signoz.Querier,
signoz.Instrumentation.ToProviderSettings(),
signoz.QueryParser,
signoz.Instrumentation.Logger(),
)
if err != nil {
@@ -355,8 +353,8 @@ func (s *Server) Stop(ctx context.Context) error {
return nil
}
func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertmanager.Alertmanager, sqlstore sqlstore.SQLStore, telemetryStore telemetrystore.TelemetryStore, prometheus prometheus.Prometheus, orgGetter organization.Getter, querier querier.Querier, providerSettings factory.ProviderSettings, queryParser queryparser.QueryParser) (*baserules.Manager, error) {
ruleStore := sqlrulestore.NewRuleStore(sqlstore, queryParser, providerSettings)
func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertmanager.Alertmanager, sqlstore sqlstore.SQLStore, telemetryStore telemetrystore.TelemetryStore, prometheus prometheus.Prometheus, orgGetter organization.Getter, querier querier.Querier, logger *slog.Logger) (*baserules.Manager, error) {
ruleStore := sqlrulestore.NewRuleStore(sqlstore)
maintenanceStore := sqlrulestore.NewMaintenanceStore(sqlstore)
// create manager opts
managerOpts := &baserules.ManagerOptions{
@@ -366,7 +364,7 @@ func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertma
Logger: zap.L(),
Reader: ch,
Querier: querier,
SLogger: providerSettings.Logger,
SLogger: logger,
Cache: cache,
EvalDelay: baseconst.GetEvalDelay(),
PrepareTaskFunc: rules.PrepareTaskFunc,

View File

@@ -132,34 +132,117 @@ function CeleryTaskBar({
[selectedFilters, celerySuccessStateData],
);
const onGraphClick = (
widgetData: Widgets,
xValue: number,
_yValue: number,
_mouseX: number,
_mouseY: number,
data?: {
[key: string]: string;
const onGraphClick = useCallback(
(
widgetData: Widgets,
xValue: number,
_yValue: number,
_mouseX: number,
_mouseY: number,
data?: {
[key: string]: string;
},
): void => {
const { start, end } = getStartAndEndTimesInMilliseconds(xValue);
// Extract entity and value from data
const [firstDataPoint] = Object.entries(data || {});
const [entity, value] = (firstDataPoint || ([] as unknown)) as [
string,
string,
];
if (!isEmpty(entity) || !isEmpty(value)) {
onClick?.({
entity,
value,
timeRange: [start, end],
widgetData,
});
}
},
): void => {
const { start, end } = getStartAndEndTimesInMilliseconds(xValue);
[onClick],
);
// Extract entity and value from data
const [firstDataPoint] = Object.entries(data || {});
const [entity, value] = (firstDataPoint || ([] as unknown)) as [
string,
string,
];
const onAllStateClick = useCallback(
(
xValue: number,
yValue: number,
mouseX: number,
mouseY: number,
data?: any,
): void => {
onGraphClick(
celerySlowestTasksTableWidgetData,
xValue,
yValue,
mouseX,
mouseY,
data,
);
},
[onGraphClick],
);
if (!isEmpty(entity) || !isEmpty(value)) {
onClick?.({
entity,
value,
timeRange: [start, end],
widgetData,
});
}
};
const onFailedStateClick = useCallback(
(
xValue: number,
yValue: number,
mouseX: number,
mouseY: number,
data?: any,
): void => {
onGraphClick(
celeryFailedTasksTableWidgetData,
xValue,
yValue,
mouseX,
mouseY,
data,
);
},
[onGraphClick],
);
const onRetryStateClick = useCallback(
(
xValue: number,
yValue: number,
mouseX: number,
mouseY: number,
data?: any,
): void => {
onGraphClick(
celeryRetryTasksTableWidgetData,
xValue,
yValue,
mouseX,
mouseY,
data,
);
},
[onGraphClick],
);
const onSuccessStateClick = useCallback(
(
xValue: number,
yValue: number,
mouseX: number,
mouseY: number,
data?: any,
): void => {
onGraphClick(
celerySuccessTasksTableWidgetData,
xValue,
yValue,
mouseX,
mouseY,
data,
);
},
[onGraphClick],
);
const { getCustomSeries } = useGetGraphCustomSeries({
isDarkMode,
@@ -185,16 +268,7 @@ function CeleryTaskBar({
headerMenuList={[...ViewMenuAction]}
onDragSelect={onDragSelect}
isQueryEnabled={queryEnabled}
onClickHandler={(xValue, yValue, mouseX, mouseY, data): void =>
onGraphClick(
celerySlowestTasksTableWidgetData,
xValue,
yValue,
mouseX,
mouseY,
data,
)
}
onClickHandler={onAllStateClick}
customSeries={getCustomSeries}
dataAvailable={checkIfDataExists}
/>
@@ -205,16 +279,7 @@ function CeleryTaskBar({
headerMenuList={[...ViewMenuAction]}
onDragSelect={onDragSelect}
isQueryEnabled={queryEnabled}
onClickHandler={(xValue, yValue, mouseX, mouseY, data): void =>
onGraphClick(
celeryFailedTasksTableWidgetData,
xValue,
yValue,
mouseX,
mouseY,
data,
)
}
onClickHandler={onFailedStateClick}
customSeries={getCustomSeries}
/>
)}
@@ -224,16 +289,7 @@ function CeleryTaskBar({
headerMenuList={[...ViewMenuAction]}
onDragSelect={onDragSelect}
isQueryEnabled={queryEnabled}
onClickHandler={(xValue, yValue, mouseX, mouseY, data): void =>
onGraphClick(
celeryRetryTasksTableWidgetData,
xValue,
yValue,
mouseX,
mouseY,
data,
)
}
onClickHandler={onRetryStateClick}
customSeries={getCustomSeries}
/>
)}
@@ -243,16 +299,7 @@ function CeleryTaskBar({
headerMenuList={[...ViewMenuAction]}
onDragSelect={onDragSelect}
isQueryEnabled={queryEnabled}
onClickHandler={(xValue, yValue, mouseX, mouseY, data): void =>
onGraphClick(
celerySuccessTasksTableWidgetData,
xValue,
yValue,
mouseX,
mouseY,
data,
)
}
onClickHandler={onSuccessStateClick}
customSeries={getCustomSeries}
/>
)}

View File

@@ -1,15 +1,11 @@
.log-field-container {
display: flex;
overflow: hidden;
width: 100%;
align-items: baseline;
}
.log-field-key,
.log-field-key-colon {
.log-field-key {
padding-right: 5px;
color: var(--text-vanilla-400, #c0c1c3);
font-size: 14px;
font-style: normal;
font-weight: 400;
line-height: 18px; /* 128.571% */
letter-spacing: -0.07px;
&.small {
font-size: 11px;
@@ -26,20 +22,6 @@
line-height: 24px;
}
}
.log-field-key {
line-height: 18px; /* 128.571% */
letter-spacing: -0.07px;
white-space: nowrap;
display: inline-block;
max-width: 20vw;
text-overflow: ellipsis;
overflow: hidden;
margin: 0;
}
.log-field-key-colon {
min-width: 0.8rem;
flex-shrink: 0;
}
.log-value {
color: var(--text-vanilla-400, #c0c1c3);
font-size: 14px;
@@ -176,8 +158,7 @@
}
.lightMode {
.log-field-key,
.log-field-key-colon {
.log-field-key {
color: var(--text-slate-400);
}
.log-value {
@@ -189,10 +170,3 @@
}
}
}
.dark {
.log-field-key,
.log-field-key-colon {
color: rgba(255, 255, 255, 0.45);
}
}

View File

@@ -25,7 +25,13 @@ import LogLinesActionButtons from '../LogLinesActionButtons/LogLinesActionButton
import LogStateIndicator from '../LogStateIndicator/LogStateIndicator';
import { getLogIndicatorType } from '../LogStateIndicator/utils';
// styles
import { Container, LogContainer, LogText } from './styles';
import {
Container,
LogContainer,
LogText,
Text,
TextContainer,
} from './styles';
import { isValidLogField } from './util';
interface LogFieldProps {
@@ -52,18 +58,16 @@ function LogGeneralField({
);
return (
<div className="log-field-container">
<p className={cx('log-field-key', fontSize)} title={fieldKey}>
{fieldKey}
</p>
<span className={cx('log-field-key-colon', fontSize)}>&nbsp;:&nbsp;</span>
<TextContainer>
<Text ellipsis type="secondary" className={cx('log-field-key', fontSize)}>
{`${fieldKey} : `}
</Text>
<LogText
dangerouslySetInnerHTML={html}
className={cx('log-value', fontSize)}
title={fieldValue}
linesPerRow={linesPerRow > 1 ? linesPerRow : undefined}
/>
</div>
</TextContainer>
);
}

View File

@@ -1,5 +1,5 @@
/* eslint-disable no-nested-ternary */
import { Card } from 'antd';
import { Card, Typography } from 'antd';
import { FontSize } from 'container/OptionsMenu/types';
import styled from 'styled-components';
import { getActiveLogBackground } from 'utils/logs';
@@ -46,6 +46,19 @@ export const Container = styled(Card)<{
getActiveLogBackground($isActiveLog, $isDarkMode, $logType)}
`;
export const Text = styled(Typography.Text)`
&&& {
min-width: 2.5rem;
white-space: nowrap;
}
`;
export const TextContainer = styled.div`
display: flex;
overflow: hidden;
width: 100%;
`;
export const LogContainer = styled.div<LogContainerProps>`
margin-left: 0.5rem;
display: flex;

View File

@@ -300,7 +300,7 @@ function QueryAddOns({
);
return (
<div className="query-add-ons" data-testid="query-add-ons">
<div className="query-add-ons">
{selectedViews.length > 0 && (
<div className="selected-add-ons-content">
{selectedViews.find((view) => view.key === 'group_by') && (

View File

@@ -43,10 +43,7 @@ function QueryAggregationOptions({
};
return (
<div
className="query-aggregation-container"
data-testid="query-aggregation-container"
>
<div className="query-aggregation-container">
<div className="aggregation-container">
<QueryAggregationSelect
onChange={onChange}

View File

@@ -114,9 +114,9 @@ function QuerySearch({
const [isFocused, setIsFocused] = useState(false);
const editorRef = useRef<EditorView | null>(null);
const handleQueryValidation = useCallback((newExpression: string): void => {
const handleQueryValidation = useCallback((newQuery: string): void => {
try {
const validationResponse = validateQuery(newExpression);
const validationResponse = validateQuery(newQuery);
setValidation(validationResponse);
} catch (error) {
setValidation({
@@ -127,7 +127,7 @@ function QuerySearch({
}
}, []);
const getCurrentExpression = useCallback(
const getCurrentQuery = useCallback(
(): string => editorRef.current?.state.doc.toString() || '',
[],
);
@@ -167,14 +167,19 @@ function QuerySearch({
() => {
if (!isEditorReady) return;
const newExpression = queryData.filter?.expression || '';
const currentExpression = getCurrentExpression();
const newQuery = queryData.filter?.expression || '';
const currentQuery = getCurrentQuery();
// Do not update codemirror editor if the expression is the same
if (newExpression !== currentExpression && !isFocused) {
updateEditorValue(newExpression, { skipOnChange: true });
if (newExpression) {
handleQueryValidation(newExpression);
/* eslint-disable-next-line sonarjs/no-collapsible-if */
if (newQuery !== currentQuery && !isFocused) {
// Prevent clearing a non-empty editor when queryData becomes empty temporarily
// Only update if newQuery has a value, or if both are empty (initial state)
if (newQuery || !currentQuery) {
updateEditorValue(newQuery, { skipOnChange: true });
if (newQuery) {
handleQueryValidation(newQuery);
}
}
}
},
@@ -608,8 +613,8 @@ function QuerySearch({
};
const handleBlur = (): void => {
const currentExpression = getCurrentExpression();
handleQueryValidation(currentExpression);
const currentQuery = getCurrentQuery();
handleQueryValidation(currentQuery);
setIsFocused(false);
};
@@ -628,11 +633,11 @@ function QuerySearch({
const handleExampleClick = (exampleQuery: string): void => {
// If there's an existing query, append the example with AND
const currentExpression = getCurrentExpression();
const newExpression = currentExpression
? `${currentExpression} AND ${exampleQuery}`
const currentQuery = getCurrentQuery();
const newQuery = currentQuery
? `${currentQuery} AND ${exampleQuery}`
: exampleQuery;
updateEditorValue(newExpression);
updateEditorValue(newQuery);
};
// Helper function to render a badge for the current context mode
@@ -668,9 +673,9 @@ function QuerySearch({
if (word?.from === word?.to && !context.explicit) return null;
// Get current query from editor
const currentExpression = getCurrentExpression();
const currentQuery = editorRef.current?.state.doc.toString() || '';
// Get the query context at the cursor position
const queryContext = getQueryContextAtCursor(currentExpression, cursorPos.ch);
const queryContext = getQueryContextAtCursor(currentQuery, cursorPos.ch);
// Define autocomplete options based on the context
let options: {
@@ -1166,8 +1171,8 @@ function QuerySearch({
if (queryContext.isInParenthesis) {
// Different suggestions based on the context within parenthesis or bracket
const currentExpression = getCurrentExpression();
const curChar = currentExpression.charAt(cursorPos.ch - 1) || '';
const currentQuery = editorRef.current?.state.doc.toString() || '';
const curChar = currentQuery.charAt(cursorPos.ch - 1) || '';
if (curChar === '(' || curChar === '[') {
// Right after opening parenthesis/bracket
@@ -1316,7 +1321,7 @@ function QuerySearch({
style={{
position: 'absolute',
top: 8,
right: validation.isValid === false && getCurrentExpression() ? 40 : 8, // Move left when error shown
right: validation.isValid === false && getCurrentQuery() ? 40 : 8, // Move left when error shown
cursor: 'help',
zIndex: 10,
transition: 'right 0.2s ease',
@@ -1378,7 +1383,7 @@ function QuerySearch({
// Mod-Enter is usually Ctrl-Enter or Cmd-Enter based on OS
run: (): boolean => {
if (onRun && typeof onRun === 'function') {
onRun(getCurrentExpression());
onRun(getCurrentQuery());
} else {
handleRunQuery();
}
@@ -1404,7 +1409,7 @@ function QuerySearch({
onBlur={handleBlur}
/>
{getCurrentExpression() && validation.isValid === false && !isFocused && (
{getCurrentQuery() && validation.isValid === false && !isFocused && (
<div
className={cx('query-status-container', {
hasErrors: validation.errors.length > 0,

View File

@@ -1,7 +1,6 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable sonarjs/cognitive-complexity */
/* eslint-disable import/named */
import { EditorView } from '@uiw/react-codemirror';
import { getKeySuggestions } from 'api/querySuggestions/getKeySuggestions';
import { getValueSuggestions } from 'api/querySuggestions/getValueSuggestion';
import { initialQueriesMap } from 'constants/queryBuilder';
@@ -152,6 +151,8 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
>;
mockedGetKeys.mockClear();
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(
<QuerySearch
onChange={jest.fn() as jest.MockedFunction<(v: string) => void>}
@@ -170,8 +171,8 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
const editor = document.querySelector(CM_EDITOR_SELECTOR) as HTMLElement;
// Focus and type into the editor
await userEvent.click(editor);
await userEvent.type(editor, SAMPLE_KEY_TYPING);
await user.click(editor);
await user.type(editor, SAMPLE_KEY_TYPING);
// Wait for debounced API call (300ms debounce + some buffer)
await waitFor(() => expect(mockedGetKeys).toHaveBeenCalled(), {
@@ -186,6 +187,8 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
>;
mockedGetValues.mockClear();
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(
<QuerySearch
onChange={jest.fn() as jest.MockedFunction<(v: string) => void>}
@@ -201,8 +204,8 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
});
const editor = document.querySelector(CM_EDITOR_SELECTOR) as HTMLElement;
await userEvent.click(editor);
await userEvent.type(editor, SAMPLE_VALUE_TYPING_INCOMPLETE);
await user.click(editor);
await user.type(editor, SAMPLE_VALUE_TYPING_INCOMPLETE);
// Wait for debounced API call (300ms debounce + some buffer)
await waitFor(() => expect(mockedGetValues).toHaveBeenCalled(), {
@@ -238,6 +241,7 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
it('calls provided onRun on Mod-Enter', async () => {
const onRun = jest.fn() as jest.MockedFunction<(q: string) => void>;
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(
<QuerySearch
@@ -255,8 +259,8 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
});
const editor = document.querySelector(CM_EDITOR_SELECTOR) as HTMLElement;
await userEvent.click(editor);
await userEvent.type(editor, SAMPLE_STATUS_QUERY);
await user.click(editor);
await user.type(editor, SAMPLE_STATUS_QUERY);
// Use fireEvent for keyboard shortcuts as userEvent might not work well with CodeMirror
const modKey = navigator.platform.includes('Mac') ? 'metaKey' : 'ctrlKey';
@@ -276,6 +280,8 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
>;
mockedHandleRunQuery.mockClear();
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(
<QuerySearch
onChange={jest.fn() as jest.MockedFunction<(v: string) => void>}
@@ -291,8 +297,8 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
});
const editor = document.querySelector(CM_EDITOR_SELECTOR) as HTMLElement;
await userEvent.click(editor);
await userEvent.type(editor, SAMPLE_VALUE_TYPING_COMPLETE);
await user.click(editor);
await user.type(editor, SAMPLE_VALUE_TYPING_COMPLETE);
// Use fireEvent for keyboard shortcuts as userEvent might not work well with CodeMirror
const modKey = navigator.platform.includes('Mac') ? 'metaKey' : 'ctrlKey';
@@ -342,73 +348,4 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
{ timeout: 3000 },
);
});
it('handles queryData.filter.expression changes without triggering onChange', async () => {
// Spy on CodeMirror's EditorView.dispatch, which is invoked when updateEditorValue
// applies a programmatic change to the editor.
const dispatchSpy = jest.spyOn(EditorView.prototype, 'dispatch');
const initialExpression = "service.name = 'frontend'";
const updatedExpression = "service.name = 'backend'";
const onChange = jest.fn() as jest.MockedFunction<(v: string) => void>;
const initialQueryData = {
...initialQueriesMap.logs.builder.queryData[0],
filter: {
expression: initialExpression,
},
};
const { rerender } = render(
<QuerySearch
onChange={onChange}
queryData={initialQueryData}
dataSource={DataSource.LOGS}
/>,
);
// Wait for CodeMirror to initialize with the initial expression
await waitFor(
() => {
const editorContent = document.querySelector(
CM_EDITOR_SELECTOR,
) as HTMLElement;
expect(editorContent).toBeInTheDocument();
const textContent = editorContent.textContent || '';
expect(textContent).toBe(initialExpression);
},
{ timeout: 3000 },
);
// Ensure the editor is explicitly blurred (not focused)
// Blur the actual CodeMirror editor container so that QuerySearch's onBlur handler runs.
// Note: In jsdom + CodeMirror we can't reliably assert the DOM text content changes when
// the expression is updated programmatically, but we can assert that:
// 1) The component continues to render, and
// 2) No onChange is fired for programmatic updates.
const updatedQueryData = {
...initialQueryData,
filter: {
expression: updatedExpression,
},
};
// Re-render with updated queryData.filter.expression
rerender(
<QuerySearch
onChange={onChange}
queryData={updatedQueryData}
dataSource={DataSource.LOGS}
/>,
);
// updateEditorValue should have resulted in a dispatch call + onChange should not have been called
await waitFor(() => {
expect(dispatchSpy).toHaveBeenCalled();
expect(onChange).not.toHaveBeenCalled();
});
dispatchSpy.mockRestore();
});
});

View File

@@ -163,10 +163,6 @@ function formatSingleValueForFilter(
if (trimmed === 'true' || trimmed === 'false') {
return trimmed === 'true';
}
if (isQuoted(value)) {
return unquote(value);
}
}
// Return non-string values as-is, or string values that couldn't be converted

View File

@@ -1,4 +1,3 @@
import userEvent from '@testing-library/user-event';
import { FiltersType, QuickFiltersSource } from 'components/QuickFilters/types';
import { useGetAggregateValues } from 'hooks/queryBuilder/useGetAggregateValues';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
@@ -6,7 +5,7 @@ import { useGetQueryKeyValueSuggestions } from 'hooks/querySuggestions/useGetQue
import { quickFiltersAttributeValuesResponse } from 'mocks-server/__mockdata__/customQuickFilters';
import { rest, server } from 'mocks-server/server';
import { UseQueryResult } from 'react-query';
import { render, screen, waitFor } from 'tests/test-utils';
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import { SuccessResponse } from 'types/api';
import { IAttributeValuesResponse } from 'types/api/queryBuilder/getAttributesValues';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
@@ -42,15 +41,13 @@ interface MockFilterConfig {
type: FiltersType;
}
const SERVICE_NAME_KEY = 'service.name';
const createMockFilter = (
overrides: Partial<MockFilterConfig> = {},
): MockFilterConfig => ({
// eslint-disable-next-line sonarjs/no-duplicate-string
title: 'Service Name',
attributeKey: {
key: SERVICE_NAME_KEY,
key: 'service.name',
dataType: DataTypes.String,
type: 'resource',
},
@@ -71,7 +68,7 @@ const createMockQueryBuilderData = (hasActiveFilters = false): any => ({
? [
{
key: {
key: SERVICE_NAME_KEY,
key: 'service.name',
dataType: DataTypes.String,
type: 'resource',
},
@@ -191,222 +188,4 @@ describe('CheckboxFilter - User Flows', () => {
expect(screen.getByPlaceholderText('Filter values')).toBeInTheDocument();
});
});
it('should update query filters when a checkbox is clicked', async () => {
const redirectWithQueryBuilderData = jest.fn();
// Start with no active filters so clicking a checkbox creates one
mockUseQueryBuilder.mockReturnValue({
...createMockQueryBuilderData(false),
redirectWithQueryBuilderData,
} as any);
const mockFilter = createMockFilter({ defaultOpen: true });
render(
<CheckboxFilter
filter={mockFilter}
source={QuickFiltersSource.LOGS_EXPLORER}
/>,
);
// Wait for checkboxes to render
await waitFor(() => {
expect(screen.getAllByRole('checkbox')).toHaveLength(4);
});
const checkboxes = screen.getAllByRole('checkbox');
// User unchecks the first value (`mq-kafka`)
await userEvent.click(checkboxes[0]);
// Composite query params (query builder data) should be updated via redirectWithQueryBuilderData
expect(redirectWithQueryBuilderData).toHaveBeenCalledTimes(1);
const [updatedQuery] = redirectWithQueryBuilderData.mock.calls[0];
const updatedFilters = updatedQuery.builder.queryData[0].filters;
expect(updatedFilters.items).toHaveLength(1);
expect(updatedFilters.items[0].key.key).toBe(SERVICE_NAME_KEY);
// When unchecking from an "all selected" state, we use a NOT_IN filter for that value
expect(updatedFilters.items[0].op).toBe('not in');
expect(updatedFilters.items[0].value).toBe('mq-kafka');
});
it('should set an IN filter with only the clicked value when using Only', async () => {
const redirectWithQueryBuilderData = jest.fn();
// Existing filter: service.name IN ['mq-kafka', 'otel-demo']
mockUseQueryBuilder.mockReturnValue({
lastUsedQuery: 0,
currentQuery: {
builder: {
queryData: [
{
filters: {
items: [
{
key: {
key: SERVICE_NAME_KEY,
dataType: DataTypes.String,
type: 'resource',
},
op: 'in',
value: ['mq-kafka', 'otel-demo'],
},
],
op: 'AND',
},
},
],
},
},
redirectWithQueryBuilderData,
} as any);
const mockFilter = createMockFilter({ defaultOpen: true });
render(
<CheckboxFilter
filter={mockFilter}
source={QuickFiltersSource.LOGS_EXPLORER}
/>,
);
// Wait for values to render
await waitFor(() => {
expect(screen.getByText('mq-kafka')).toBeInTheDocument();
});
// Click on the value label to trigger the "Only" behavior
await userEvent.click(screen.getByText('mq-kafka'));
expect(redirectWithQueryBuilderData).toHaveBeenCalledTimes(1);
const [updatedQuery] = redirectWithQueryBuilderData.mock.calls[0];
const updatedFilters = updatedQuery.builder.queryData[0].filters;
expect(updatedFilters.items).toHaveLength(1);
expect(updatedFilters.items[0].key.key).toBe(SERVICE_NAME_KEY);
expect(updatedFilters.items[0].op).toBe('in');
expect(updatedFilters.items[0].value).toBe('mq-kafka');
});
it('should clear filters for the attribute when using All', async () => {
const redirectWithQueryBuilderData = jest.fn();
// Existing filter: service.name IN ['mq-kafka']
mockUseQueryBuilder.mockReturnValue({
lastUsedQuery: 0,
currentQuery: {
builder: {
queryData: [
{
filters: {
items: [
{
key: {
key: SERVICE_NAME_KEY,
dataType: DataTypes.String,
type: 'resource',
},
op: 'in',
value: ['mq-kafka'],
},
],
op: 'AND',
},
},
],
},
},
redirectWithQueryBuilderData,
} as any);
const mockFilter = createMockFilter({ defaultOpen: true });
render(
<CheckboxFilter
filter={mockFilter}
source={QuickFiltersSource.LOGS_EXPLORER}
/>,
);
await waitFor(() => {
expect(screen.getByText('mq-kafka')).toBeInTheDocument();
});
// Only one value is selected, so clicking it should switch to "All" (no filter for this key)
await userEvent.click(screen.getByText('mq-kafka'));
expect(redirectWithQueryBuilderData).toHaveBeenCalledTimes(1);
const [updatedQuery] = redirectWithQueryBuilderData.mock.calls[0];
const updatedFilters = updatedQuery.builder.queryData[0].filters;
const filtersForServiceName = updatedFilters.items.filter(
(item: any) => item.key?.key === SERVICE_NAME_KEY,
);
expect(filtersForServiceName).toHaveLength(0);
});
it('should extend an existing IN filter when checking an additional value', async () => {
const redirectWithQueryBuilderData = jest.fn();
// Existing filter: service.name IN 'mq-kafka'
mockUseQueryBuilder.mockReturnValue({
lastUsedQuery: 0,
currentQuery: {
builder: {
queryData: [
{
filters: {
items: [
{
key: {
key: SERVICE_NAME_KEY,
dataType: DataTypes.String,
type: 'resource',
},
op: 'in',
value: 'mq-kafka',
},
],
op: 'AND',
},
},
],
},
},
redirectWithQueryBuilderData,
} as any);
const mockFilter = createMockFilter({ defaultOpen: true });
render(
<CheckboxFilter
filter={mockFilter}
source={QuickFiltersSource.LOGS_EXPLORER}
/>,
);
// Wait for checkboxes to render
await waitFor(() => {
expect(screen.getAllByRole('checkbox')).toHaveLength(4);
});
const checkboxes = screen.getAllByRole('checkbox');
// First checkbox corresponds to 'mq-kafka' (already selected),
// second will be 'otel-demo' which we now select additionally.
await userEvent.click(checkboxes[1]);
expect(redirectWithQueryBuilderData).toHaveBeenCalledTimes(1);
const [updatedQuery] = redirectWithQueryBuilderData.mock.calls[0];
const updatedFilters = updatedQuery.builder.queryData[0].filters;
const [filterForServiceName] = updatedFilters.items;
expect(filterForServiceName.key.key).toBe(SERVICE_NAME_KEY);
expect(filterForServiceName.op).toBe('in');
expect(filterForServiceName.value).toEqual(['mq-kafka', 'otel-demo']);
});
});

View File

@@ -1,205 +0,0 @@
import { screen } from '@testing-library/react';
import { render } from 'tests/test-utils';
import ValueGraph from '../index';
import { getBackgroundColorAndThresholdCheck } from '../utils';
// Mock the utils module
jest.mock('../utils', () => ({
getBackgroundColorAndThresholdCheck: jest.fn(() => ({
threshold: {} as any,
isConflictingThresholds: false,
})),
}));
const mockGetBackgroundColorAndThresholdCheck = getBackgroundColorAndThresholdCheck as jest.MockedFunction<
typeof getBackgroundColorAndThresholdCheck
>;
const TEST_ID_VALUE_GRAPH_TEXT = 'value-graph-text';
const TEST_ID_VALUE_GRAPH_PREFIX_UNIT = 'value-graph-prefix-unit';
const TEST_ID_VALUE_GRAPH_SUFFIX_UNIT = 'value-graph-suffix-unit';
describe('ValueGraph', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('renders the numeric value correctly', () => {
const { getByTestId } = render(
<ValueGraph value="42" rawValue={42} thresholds={[]} />,
);
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('42');
});
it('renders value with suffix unit', () => {
const { getByTestId } = render(
<ValueGraph value="42ms" rawValue={42} thresholds={[]} />,
);
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('42');
expect(getByTestId(TEST_ID_VALUE_GRAPH_SUFFIX_UNIT)).toHaveTextContent('ms');
});
it('renders value with prefix unit', () => {
const { getByTestId } = render(
<ValueGraph value="$100" rawValue={100} thresholds={[]} />,
);
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('100');
expect(getByTestId(TEST_ID_VALUE_GRAPH_PREFIX_UNIT)).toHaveTextContent('$');
});
it('renders value with both prefix and suffix units', () => {
const { getByTestId } = render(
<ValueGraph value="$100USD" rawValue={100} thresholds={[]} />,
);
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('100');
expect(getByTestId(TEST_ID_VALUE_GRAPH_PREFIX_UNIT)).toHaveTextContent('$');
expect(getByTestId(TEST_ID_VALUE_GRAPH_SUFFIX_UNIT)).toHaveTextContent('USD');
});
it('renders value with K suffix', () => {
const { getByTestId } = render(
<ValueGraph value="1.5K" rawValue={1500} thresholds={[]} />,
);
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('1.5K');
});
it('applies text color when threshold format is Text', () => {
mockGetBackgroundColorAndThresholdCheck.mockReturnValue({
threshold: {
thresholdFormat: 'Text',
thresholdColor: 'red',
} as any,
isConflictingThresholds: false,
});
const { getByTestId } = render(
<ValueGraph value="42" rawValue={42} thresholds={[]} />,
);
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveStyle({ color: 'red' });
});
it('applies background color when threshold format is Background', () => {
mockGetBackgroundColorAndThresholdCheck.mockReturnValue({
threshold: {
thresholdFormat: 'Background',
thresholdColor: 'blue',
} as any,
isConflictingThresholds: false,
});
const { container } = render(
<ValueGraph value="42" rawValue={42} thresholds={[]} />,
);
const containerElement = container.querySelector('.value-graph-container');
expect(containerElement).toHaveStyle({ backgroundColor: 'blue' });
});
it('displays conflicting thresholds indicator when multiple thresholds match', () => {
mockGetBackgroundColorAndThresholdCheck.mockReturnValue({
threshold: {
thresholdFormat: 'Text',
thresholdColor: 'red',
} as any,
isConflictingThresholds: true,
});
const { getByTestId } = render(
<ValueGraph value="42" rawValue={42} thresholds={[]} />,
);
expect(getByTestId('conflicting-thresholds')).toBeInTheDocument();
});
it('does not display conflicting thresholds indicator when no conflict', () => {
mockGetBackgroundColorAndThresholdCheck.mockReturnValue({
threshold: {} as any,
isConflictingThresholds: false,
});
render(<ValueGraph value="42" rawValue={42} thresholds={[]} />);
expect(
screen.queryByTestId('conflicting-thresholds'),
).not.toBeInTheDocument();
});
it('applies text color to units when threshold format is Text', () => {
mockGetBackgroundColorAndThresholdCheck.mockReturnValue({
threshold: {
thresholdFormat: 'Text',
thresholdColor: 'green',
} as any,
isConflictingThresholds: false,
});
render(<ValueGraph value="42ms" rawValue={42} thresholds={[]} />);
const unitElement = screen.getByText('ms');
expect(unitElement).toHaveStyle({ color: 'green' });
});
it('renders decimal values correctly', () => {
const { getByTestId } = render(
<ValueGraph value="42.5" rawValue={42.5} thresholds={[]} />,
);
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('42.5');
});
it('handles values with M suffix', () => {
const { getByTestId } = render(
<ValueGraph value="1.2M" rawValue={1200000} thresholds={[]} />,
);
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('1.2M');
});
it('handles values with B suffix', () => {
const { getByTestId } = render(
<ValueGraph value="2.3B" rawValue={2300000000} thresholds={[]} />,
);
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('2.3B');
});
it('handles scientific notation values', () => {
const { getByTestId } = render(
<ValueGraph value="1e-9" rawValue={1e-9} thresholds={[]} />,
);
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('1e-9');
});
it('handles scientific notation with suffix unit', () => {
const { getByTestId } = render(
<ValueGraph value="1e-9%" rawValue={1e-9} thresholds={[]} />,
);
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('1e-9');
expect(getByTestId(TEST_ID_VALUE_GRAPH_SUFFIX_UNIT)).toHaveTextContent('%');
});
it('handles scientific notation with uppercase E', () => {
const { getByTestId } = render(
<ValueGraph value="1E-9" rawValue={1e-9} thresholds={[]} />,
);
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('1E-9');
});
it('handles scientific notation with positive exponent', () => {
const { getByTestId } = render(
<ValueGraph value="1e+9" rawValue={1e9} thresholds={[]} />,
);
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('1e+9');
});
});

View File

@@ -3,39 +3,11 @@ import './ValueGraph.styles.scss';
import { ExclamationCircleFilled } from '@ant-design/icons';
import { Tooltip, Typography } from 'antd';
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
import { useEffect, useMemo, useRef, useState } from 'react';
import { useEffect, useRef, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { getBackgroundColorAndThresholdCheck } from './utils';
function Unit({
type,
unit,
threshold,
fontSize,
}: {
type: 'prefix' | 'suffix';
unit: string;
threshold: ThresholdProps;
fontSize: string;
}): JSX.Element {
return (
<Typography.Text
className="value-graph-unit"
data-testid={`value-graph-${type}-unit`}
style={{
color:
threshold.thresholdFormat === 'Text'
? threshold.thresholdColor
: undefined,
fontSize: `calc(${fontSize} * 0.7)`,
}}
>
{unit}
</Typography.Text>
);
}
function ValueGraph({
value,
rawValue,
@@ -45,16 +17,10 @@ function ValueGraph({
const containerRef = useRef<HTMLDivElement>(null);
const [fontSize, setFontSize] = useState('2.5vw');
const { numericValue, prefixUnit, suffixUnit } = useMemo(() => {
const matches = value.match(
/^([^\d.]*)?([\d.]+(?:[eE][+-]?[\d]+)?[KMB]?)([^\d.]*)?$/,
);
return {
numericValue: matches?.[2] || value,
prefixUnit: matches?.[1]?.trim() || '',
suffixUnit: matches?.[3]?.trim() || '',
};
}, [value]);
// Parse value to separate number and unit (assuming unit is at the end)
const matches = value.match(/([\d.]+[KMB]?)(.*)$/);
const numericValue = matches?.[1] || value;
const unit = matches?.[2]?.trim() || '';
// Adjust font size based on container size
useEffect(() => {
@@ -99,17 +65,8 @@ function ValueGraph({
}}
>
<div className="value-text-container">
{prefixUnit && (
<Unit
type="prefix"
unit={prefixUnit}
threshold={threshold}
fontSize={fontSize}
/>
)}
<Typography.Text
className="value-graph-text"
data-testid="value-graph-text"
style={{
color:
threshold.thresholdFormat === 'Text'
@@ -120,13 +77,19 @@ function ValueGraph({
>
{numericValue}
</Typography.Text>
{suffixUnit && (
<Unit
type="suffix"
unit={suffixUnit}
threshold={threshold}
fontSize={fontSize}
/>
{unit && (
<Typography.Text
className="value-graph-unit"
style={{
color:
threshold.thresholdFormat === 'Text'
? threshold.thresholdColor
: undefined,
fontSize: `calc(${fontSize} * 0.7)`,
}}
>
{unit}
</Typography.Text>
)}
</div>
{isConflictingThresholds && (

View File

@@ -393,21 +393,15 @@ function ExplorerOptions({
backwardCompatibleOptions = omit(options, 'version');
}
// Use the correct default columns based on the current data source
const defaultColumns =
sourcepage === DataSource.TRACES
? defaultTraceSelectedColumns
: defaultLogsSelectedColumns;
if (extraData.selectColumns?.length) {
handleOptionsChange({
...backwardCompatibleOptions,
selectColumns: extraData.selectColumns,
});
} else if (!isEqual(defaultColumns, options.selectColumns)) {
} else if (!isEqual(defaultTraceSelectedColumns, options.selectColumns)) {
handleOptionsChange({
...backwardCompatibleOptions,
selectColumns: defaultColumns,
selectColumns: defaultTraceSelectedColumns,
});
}
};

View File

@@ -26,6 +26,7 @@ import {
SetStateAction,
useCallback,
useEffect,
useMemo,
useRef,
useState,
} from 'react';
@@ -76,6 +77,7 @@ function WidgetGraphComponent({
const isFullViewOpen = params.get(QueryParams.expandedWidgetId) === widget.id;
const lineChartRef = useRef<ToggleGraphProps>();
const [graphVisibility, setGraphVisibility] = useState<boolean[]>(
Array(queryResponse.data?.payload?.data?.result?.length || 0).fill(true),
);
@@ -110,7 +112,7 @@ function WidgetGraphComponent({
const updateDashboardMutation = useUpdateDashboard();
const onDeleteHandler = (): void => {
const onDeleteHandler = useCallback((): void => {
if (!selectedDashboard) return;
const updatedWidgets = selectedDashboard?.data?.widgets?.filter(
@@ -138,9 +140,15 @@ function WidgetGraphComponent({
setDeleteModal(false);
},
});
};
}, [
selectedDashboard,
widget.id,
updateDashboardMutation,
setLayouts,
setSelectedDashboard,
]);
const onCloneHandler = async (): Promise<void> => {
const onCloneHandler = useCallback(async (): Promise<void> => {
if (!selectedDashboard) return;
const uuid = v4();
@@ -204,9 +212,18 @@ function WidgetGraphComponent({
},
},
);
};
}, [
selectedDashboard,
widget,
updateDashboardMutation,
setLayouts,
setSelectedDashboard,
notifications,
safeNavigate,
pathname,
]);
const handleOnView = (): void => {
const handleOnView = useCallback((): void => {
const queryParams = {
[QueryParams.expandedWidgetId]: widget.id,
};
@@ -225,17 +242,17 @@ function WidgetGraphComponent({
pathname,
search: newSearch,
});
};
}, [widget.id, search, pathname, safeNavigate]);
const handleOnDelete = (): void => {
const handleOnDelete = useCallback((): void => {
onToggleModal(setDeleteModal);
};
}, [onToggleModal]);
const onDeleteModelHandler = (): void => {
const onDeleteModelHandler = useCallback((): void => {
onToggleModal(setDeleteModal);
};
}, [onToggleModal]);
const onToggleModelHandler = (): void => {
const onToggleModelHandler = useCallback((): void => {
const existingSearchParams = new URLSearchParams(search);
existingSearchParams.delete(QueryParams.expandedWidgetId);
existingSearchParams.delete(QueryParams.compositeQuery);
@@ -254,63 +271,84 @@ function WidgetGraphComponent({
pathname,
search: createQueryParams(updatedQueryParams),
});
};
}, [search, queryResponse.data?.payload, widget.id, pathname, safeNavigate]);
const [searchTerm, setSearchTerm] = useState<string>('');
// Memoize the isButtonEnabled value to prevent recalculation
const isGraphClickButtonEnabled = useMemo(
() =>
(widget?.query?.builder?.queryData &&
Array.isArray(widget.query.builder.queryData)
? widget.query.builder.queryData
: []
).some(
(q) =>
q.dataSource === DataSource.TRACES || q.dataSource === DataSource.LOGS,
),
[widget?.query?.builder?.queryData],
);
const graphClick = useGraphClickToShowButton({
graphRef: currentGraphRef?.current ? currentGraphRef : graphRef,
isButtonEnabled: (widget?.query?.builder?.queryData &&
Array.isArray(widget.query.builder.queryData)
? widget.query.builder.queryData
: []
).some(
(q) =>
q.dataSource === DataSource.TRACES || q.dataSource === DataSource.LOGS,
),
isButtonEnabled: isGraphClickButtonEnabled,
buttonClassName: 'view-onclick-show-button',
});
const navigateToExplorer = useNavigateToExplorer();
const graphClickHandler = (
xValue: number,
yValue: number,
mouseX: number,
mouseY: number,
metric?: { [key: string]: string },
queryData?: { queryName: string; inFocusOrNot: boolean },
): void => {
const customTracesTimeRange = getCustomTimeRangeWindowSweepInMS(
const graphClickHandler = useCallback(
(
xValue: number,
yValue: number,
mouseX: number,
mouseY: number,
metric?: { [key: string]: string },
queryData?: { queryName: string; inFocusOrNot: boolean },
): void => {
const customTracesTimeRange = getCustomTimeRangeWindowSweepInMS(
customTimeRangeWindowForCoRelation,
);
const { start, end } = getStartAndEndTimesInMilliseconds(
xValue,
customTracesTimeRange,
);
handleGraphClick({
xValue,
yValue,
mouseX,
mouseY,
metric,
queryData,
widget,
navigateToExplorerPages,
navigateToExplorer,
notifications,
graphClick,
...(customTimeRangeWindowForCoRelation
? { customTracesTimeRange: { start, end } }
: {}),
});
},
[
customTimeRangeWindowForCoRelation,
);
const { start, end } = getStartAndEndTimesInMilliseconds(
xValue,
customTracesTimeRange,
);
handleGraphClick({
xValue,
yValue,
mouseX,
mouseY,
metric,
queryData,
widget,
navigateToExplorerPages,
navigateToExplorer,
notifications,
graphClick,
...(customTimeRangeWindowForCoRelation
? { customTracesTimeRange: { start, end } }
: {}),
});
};
],
);
const { truncatedText, fullText } = useGetResolvedText({
text: widget.title as string,
maxLength: 100,
});
// Use the provided onClickHandler if available, otherwise use the default graphClickHandler
// Both should be stable references due to useCallback
const clickHandler = onClickHandler ?? graphClickHandler;
return (
<div
style={{
@@ -366,7 +404,7 @@ function WidgetGraphComponent({
yAxisUnit={widget.yAxisUnit}
onToggleModelHandler={onToggleModelHandler}
tableProcessedDataRef={tableProcessedDataRef}
onClickHandler={onClickHandler ?? graphClickHandler}
onClickHandler={clickHandler}
customOnDragSelect={customOnDragSelect}
setCurrentGraphRef={setCurrentGraphRef}
enableDrillDown={
@@ -416,7 +454,7 @@ function WidgetGraphComponent({
setRequestData={setRequestData}
setGraphVisibility={setGraphVisibility}
graphVisibility={graphVisibility}
onClickHandler={onClickHandler ?? graphClickHandler}
onClickHandler={clickHandler}
onDragSelect={onDragSelect}
tableProcessedDataRef={tableProcessedDataRef}
customTooltipElement={customTooltipElement}

View File

@@ -1,469 +0,0 @@
import { render as rtlRender, screen } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { RowData } from 'lib/query/createTableColumnsFromQuery';
import { AppContext } from 'providers/App/App';
import { IAppContext } from 'providers/App/types';
import React, { MutableRefObject } from 'react';
import { QueryClient, QueryClientProvider, UseQueryResult } from 'react-query';
import { Provider } from 'react-redux';
import { MemoryRouter } from 'react-router-dom';
import configureStore from 'redux-mock-store';
import thunk from 'redux-thunk';
import { SuccessResponse, Warning } from 'types/api';
import { Widgets } from 'types/api/dashboard/getAll';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { EQueryType } from 'types/common/dashboard';
import { ROLES } from 'types/roles';
import { MenuItemKeys } from '../contants';
import WidgetHeader from '../index';
const TEST_WIDGET_TITLE = 'Test Widget';
const TABLE_WIDGET_TITLE = 'Table Widget';
const WIDGET_HEADER_SEARCH = 'widget-header-search';
const WIDGET_HEADER_SEARCH_INPUT = 'widget-header-search-input';
const TEST_WIDGET_TITLE_RESOLVED = 'Test Widget Title';
const mockStore = configureStore([thunk]);
const createMockStore = (): ReturnType<typeof mockStore> =>
mockStore({
app: {
role: 'ADMIN',
user: {
userId: 'test-user-id',
email: 'test@signoz.io',
name: 'TestUser',
},
isLoggedIn: true,
org: [],
},
globalTime: {
minTime: '2023-01-01T00:00:00Z',
maxTime: '2023-01-02T00:00:00Z',
},
});
const queryClient = new QueryClient({
defaultOptions: {
queries: {
refetchOnWindowFocus: false,
retry: false,
},
},
});
const createMockAppContext = (): Partial<IAppContext> => ({
user: {
accessJwt: '',
refreshJwt: '',
id: '',
email: '',
displayName: '',
createdAt: 0,
organization: '',
orgId: '',
role: 'ADMIN' as ROLES,
},
});
const render = (ui: React.ReactElement): ReturnType<typeof rtlRender> =>
rtlRender(
<MemoryRouter>
<QueryClientProvider client={queryClient}>
<Provider store={createMockStore()}>
<AppContext.Provider value={createMockAppContext() as IAppContext}>
{ui}
</AppContext.Provider>
</Provider>
</QueryClientProvider>
</MemoryRouter>,
);
jest.mock('hooks/queryBuilder/useCreateAlerts', () => ({
__esModule: true,
default: jest.fn(() => jest.fn()),
}));
jest.mock('hooks/dashboard/useGetResolvedText', () => {
// eslint-disable-next-line sonarjs/no-duplicate-string
const TEST_WIDGET_TITLE_RESOLVED = 'Test Widget Title';
return {
__esModule: true,
default: jest.fn(() => ({
truncatedText: TEST_WIDGET_TITLE_RESOLVED,
fullText: TEST_WIDGET_TITLE_RESOLVED,
})),
};
});
jest.mock('lucide-react', () => ({
CircleX: (): JSX.Element => <svg data-testid="lucide-circle-x" />,
TriangleAlert: (): JSX.Element => <svg data-testid="lucide-triangle-alert" />,
X: (): JSX.Element => <svg data-testid="lucide-x" />,
}));
jest.mock('antd', () => ({
...jest.requireActual('antd'),
Spin: (): JSX.Element => <div data-testid="antd-spin" />,
}));
const mockWidget: Widgets = {
id: 'test-widget-id',
title: TEST_WIDGET_TITLE,
description: 'Test Description',
panelTypes: PANEL_TYPES.TIME_SERIES,
query: {
builder: {
queryData: [],
queryFormulas: [],
queryTraceOperator: [],
},
promql: [],
clickhouse_sql: [],
id: 'query-id',
queryType: 'builder' as EQueryType,
},
timePreferance: 'GLOBAL_TIME',
opacity: '',
nullZeroValues: '',
yAxisUnit: '',
fillSpans: false,
softMin: null,
softMax: null,
selectedLogFields: [],
selectedTracesFields: [],
};
const mockQueryResponse = ({
data: {
payload: {
data: {
result: [],
resultType: '',
},
},
statusCode: 200,
message: 'success',
error: null,
},
isLoading: false,
isError: false,
error: null,
isFetching: false,
} as unknown) as UseQueryResult<
SuccessResponse<MetricRangePayloadProps, unknown> & {
warning?: Warning;
},
Error
>;
describe('WidgetHeader', () => {
const mockOnView = jest.fn();
const mockSetSearchTerm = jest.fn();
const tableProcessedDataRef: MutableRefObject<RowData[]> = {
current: [
{
timestamp: 1234567890,
key: 'key1',
col1: 'val1',
col2: 'val2',
},
],
};
beforeEach(() => {
jest.clearAllMocks();
});
it('renders widget header with title', () => {
render(
<WidgetHeader
title={TEST_WIDGET_TITLE}
widget={mockWidget}
onView={mockOnView}
parentHover={false}
queryResponse={mockQueryResponse}
isWarning={false}
isFetchingResponse={false}
tableProcessedDataRef={tableProcessedDataRef}
setSearchTerm={mockSetSearchTerm}
/>,
);
expect(screen.getByText(TEST_WIDGET_TITLE_RESOLVED)).toBeInTheDocument();
});
it('returns null for empty widget', () => {
const emptyWidget = {
...mockWidget,
id: PANEL_TYPES.EMPTY_WIDGET,
};
const { container } = render(
<WidgetHeader
title="Empty Widget"
widget={emptyWidget}
onView={mockOnView}
parentHover={false}
queryResponse={mockQueryResponse}
isWarning={false}
isFetchingResponse={false}
tableProcessedDataRef={tableProcessedDataRef}
setSearchTerm={mockSetSearchTerm}
/>,
);
expect(container.innerHTML).toBe('');
});
it('shows search input for table panels', async () => {
const tableWidget = {
...mockWidget,
panelTypes: PANEL_TYPES.TABLE,
};
render(
<WidgetHeader
title={TABLE_WIDGET_TITLE}
widget={tableWidget}
onView={mockOnView}
parentHover={false}
queryResponse={mockQueryResponse}
isWarning={false}
isFetchingResponse={false}
tableProcessedDataRef={tableProcessedDataRef}
setSearchTerm={mockSetSearchTerm}
/>,
);
const searchIcon = screen.getByTestId(WIDGET_HEADER_SEARCH);
expect(searchIcon).toBeInTheDocument();
await userEvent.click(searchIcon);
expect(screen.getByTestId(WIDGET_HEADER_SEARCH_INPUT)).toBeInTheDocument();
});
it('handles search input changes and closing', async () => {
const tableWidget = {
...mockWidget,
panelTypes: PANEL_TYPES.TABLE,
};
render(
<WidgetHeader
title={TABLE_WIDGET_TITLE}
widget={tableWidget}
onView={mockOnView}
parentHover={false}
queryResponse={mockQueryResponse}
isWarning={false}
isFetchingResponse={false}
tableProcessedDataRef={tableProcessedDataRef}
setSearchTerm={mockSetSearchTerm}
/>,
);
const searchIcon = screen.getByTestId(`${WIDGET_HEADER_SEARCH}`);
await userEvent.click(searchIcon);
const searchInput = screen.getByTestId(WIDGET_HEADER_SEARCH_INPUT);
await userEvent.type(searchInput, 'test search');
expect(mockSetSearchTerm).toHaveBeenCalledWith('test search');
const closeButton = screen
.getByTestId(WIDGET_HEADER_SEARCH_INPUT)
.parentElement?.querySelector('.search-header-icons');
if (closeButton) {
await userEvent.click(closeButton);
expect(mockSetSearchTerm).toHaveBeenCalledWith('');
}
});
it('shows error icon when query has error', () => {
const errorResponse = {
...mockQueryResponse,
isError: true as const,
error: { message: 'Test error' } as Error,
data: undefined,
} as UseQueryResult<
SuccessResponse<MetricRangePayloadProps, unknown> & {
warning?: Warning;
},
Error
>;
render(
<WidgetHeader
title={TEST_WIDGET_TITLE}
widget={mockWidget}
onView={mockOnView}
parentHover={false}
queryResponse={errorResponse}
isWarning={false}
isFetchingResponse={false}
tableProcessedDataRef={tableProcessedDataRef}
setSearchTerm={mockSetSearchTerm}
/>,
);
// check if CircleX icon is rendered
const circleXIcon = screen.getByTestId('lucide-circle-x');
expect(circleXIcon).toBeInTheDocument();
});
it('shows warning icon when query has warning', () => {
const warningData = mockQueryResponse.data
? {
...mockQueryResponse.data,
warning: {
code: 'WARNING_CODE',
message: 'Test warning',
url: 'https://example.com',
warnings: [{ message: 'Test warning' }],
} as Warning,
}
: undefined;
const warningResponse = {
...mockQueryResponse,
data: warningData,
} as UseQueryResult<
SuccessResponse<MetricRangePayloadProps, unknown> & {
warning?: Warning;
},
Error
>;
render(
<WidgetHeader
title={TEST_WIDGET_TITLE}
widget={mockWidget}
onView={mockOnView}
parentHover={false}
queryResponse={warningResponse}
isWarning
isFetchingResponse={false}
tableProcessedDataRef={tableProcessedDataRef}
setSearchTerm={mockSetSearchTerm}
/>,
);
const triangleAlertIcon = screen.getByTestId('lucide-triangle-alert');
expect(triangleAlertIcon).toBeInTheDocument();
});
it('shows spinner when fetching response', () => {
const fetchingResponse = {
...mockQueryResponse,
isFetching: true,
isLoading: true,
} as UseQueryResult<
SuccessResponse<MetricRangePayloadProps, unknown> & {
warning?: Warning;
},
Error
>;
render(
<WidgetHeader
title={TEST_WIDGET_TITLE}
widget={mockWidget}
onView={mockOnView}
parentHover={false}
queryResponse={fetchingResponse}
isWarning={false}
isFetchingResponse
tableProcessedDataRef={tableProcessedDataRef}
setSearchTerm={mockSetSearchTerm}
/>,
);
const antSpin = screen.getByTestId('antd-spin');
expect(antSpin).toBeInTheDocument();
});
it('renders menu options icon', () => {
render(
<WidgetHeader
title={TEST_WIDGET_TITLE}
widget={mockWidget}
onView={mockOnView}
parentHover={false}
queryResponse={mockQueryResponse}
isWarning={false}
isFetchingResponse={false}
tableProcessedDataRef={tableProcessedDataRef}
setSearchTerm={mockSetSearchTerm}
headerMenuList={[MenuItemKeys.View]}
/>,
);
const moreOptionsIcon = screen.getByTestId('widget-header-options');
expect(moreOptionsIcon).toBeInTheDocument();
});
it('shows search icon for table panels', () => {
const tableWidget = {
...mockWidget,
panelTypes: PANEL_TYPES.TABLE,
};
render(
<WidgetHeader
title={TABLE_WIDGET_TITLE}
widget={tableWidget}
onView={mockOnView}
parentHover={false}
queryResponse={mockQueryResponse}
isWarning={false}
isFetchingResponse={false}
tableProcessedDataRef={tableProcessedDataRef}
setSearchTerm={mockSetSearchTerm}
/>,
);
const searchIcon = screen.getByTestId(WIDGET_HEADER_SEARCH);
expect(searchIcon).toBeInTheDocument();
});
it('does not show search icon for non-table panels', () => {
render(
<WidgetHeader
title={TEST_WIDGET_TITLE}
widget={mockWidget}
onView={mockOnView}
parentHover={false}
queryResponse={mockQueryResponse}
isWarning={false}
isFetchingResponse={false}
tableProcessedDataRef={tableProcessedDataRef}
setSearchTerm={mockSetSearchTerm}
/>,
);
const searchIcon = screen.queryByTestId(WIDGET_HEADER_SEARCH);
expect(searchIcon).not.toBeInTheDocument();
});
it('renders threshold when provided', () => {
const threshold = <div data-testid="threshold">Threshold Component</div>;
render(
<WidgetHeader
title={TEST_WIDGET_TITLE}
widget={mockWidget}
onView={mockOnView}
parentHover={false}
queryResponse={mockQueryResponse}
isWarning={false}
isFetchingResponse={false}
tableProcessedDataRef={tableProcessedDataRef}
setSearchTerm={mockSetSearchTerm}
threshold={threshold}
/>,
);
expect(screen.getByTestId('threshold')).toBeInTheDocument();
});
});

View File

@@ -35,7 +35,6 @@ import { SuccessResponse, Warning } from 'types/api';
import { Widgets } from 'types/api/dashboard/getAll';
import APIError from 'types/api/error';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { buildAbsolutePath } from 'utils/app';
import { errorTooltipPosition } from './config';
import { MENUITEM_KEYS_VS_LABELS, MenuItemKeys } from './contants';
@@ -88,10 +87,7 @@ function WidgetHeader({
QueryParams.compositeQuery,
encodeURIComponent(JSON.stringify(widget.query)),
);
const generatedUrl = buildAbsolutePath({
relativePath: 'new',
urlQueryString: urlQuery.toString(),
});
const generatedUrl = `${window.location.pathname}/new?${urlQuery}`;
safeNavigate(generatedUrl);
}, [safeNavigate, urlQuery, widget.id, widget.panelTypes, widget.query]);
@@ -244,7 +240,6 @@ function WidgetHeader({
onClick={(e): void => {
e.stopPropagation();
e.preventDefault();
setSearchTerm('');
setShowGlobalSearch(false);
}}
className="search-header-icons"

View File

@@ -5,7 +5,7 @@ import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { useCallback, useMemo } from 'react';
import { useCallback, useMemo, useRef } from 'react';
import { useMutation } from 'react-query';
import { useSelector } from 'react-redux';
import { AppState } from 'store/reducers';
@@ -46,6 +46,11 @@ function useUpdatedQuery(): UseUpdatedQueryResult {
[selectedDashboard],
);
// Use ref to access latest mutateAsync without recreating the callback
// queryRangeMutation object recreates on every render, but mutateAsync is stable
const mutateAsyncRef = useRef(queryRangeMutation.mutateAsync);
mutateAsyncRef.current = queryRangeMutation.mutateAsync;
const getUpdatedQuery = useCallback(
async ({
widgetConfig,
@@ -63,12 +68,12 @@ function useUpdatedQuery(): UseUpdatedQueryResult {
});
// Execute query and process results
const queryResult = await queryRangeMutation.mutateAsync(queryPayload);
const queryResult = await mutateAsyncRef.current(queryPayload);
// Map query data from API response
return mapQueryDataFromApi(queryResult.data.compositeQuery);
},
[dynamicVariables, globalSelectedInterval, queryRangeMutation],
[dynamicVariables, globalSelectedInterval],
);
return {

View File

@@ -1,366 +0,0 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import { useGetPanelTypesQueryParam } from 'hooks/queryBuilder/useGetPanelTypesQueryParam';
import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
import { ExplorerViews } from 'pages/LogsExplorer/utils';
import { cleanup, render, screen, waitFor } from 'tests/test-utils';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { Query, QueryState } from 'types/api/queryBuilder/queryBuilderData';
import { EQueryType } from 'types/common/dashboard';
import { DataSource, QueryBuilderContextType } from 'types/common/queryBuilder';
import { explorerViewToPanelType } from 'utils/explorerUtils';
import LogExplorerQuerySection from './index';
const CM_EDITOR_SELECTOR = '.cm-editor .cm-content';
const QUERY_AGGREGATION_TEST_ID = 'query-aggregation-container';
const QUERY_ADDON_TEST_ID = 'query-add-ons';
// Mock DOM APIs that CodeMirror needs
beforeAll(() => {
// Mock getClientRects and getBoundingClientRect for Range objects
const mockRect: DOMRect = {
width: 100,
height: 20,
top: 0,
left: 0,
right: 100,
bottom: 20,
x: 0,
y: 0,
toJSON: (): DOMRect => mockRect,
} as DOMRect;
// Create a minimal Range mock with only what CodeMirror actually uses
const createMockRange = (): Range => {
let startContainer: Node = document.createTextNode('');
let endContainer: Node = document.createTextNode('');
let startOffset = 0;
let endOffset = 0;
const rectList = {
length: 1,
item: (index: number): DOMRect | null => (index === 0 ? mockRect : null),
0: mockRect,
};
const mockRange = {
// CodeMirror uses these for text measurement
getClientRects: (): DOMRectList => (rectList as unknown) as DOMRectList,
getBoundingClientRect: (): DOMRect => mockRect,
// CodeMirror calls these to set up text ranges
setStart: (node: Node, offset: number): void => {
startContainer = node;
startOffset = offset;
},
setEnd: (node: Node, offset: number): void => {
endContainer = node;
endOffset = offset;
},
// Minimal Range properties (TypeScript requires these)
get startContainer(): Node {
return startContainer;
},
get endContainer(): Node {
return endContainer;
},
get startOffset(): number {
return startOffset;
},
get endOffset(): number {
return endOffset;
},
get collapsed(): boolean {
return startContainer === endContainer && startOffset === endOffset;
},
commonAncestorContainer: document.body,
};
return (mockRange as unknown) as Range;
};
// Mock document.createRange to return a new Range instance each time
document.createRange = (): Range => createMockRange();
// Mock getBoundingClientRect for elements
Element.prototype.getBoundingClientRect = (): DOMRect => mockRect;
});
jest.mock('hooks/useDarkMode', () => ({
useIsDarkMode: (): boolean => false,
}));
jest.mock('providers/Dashboard/Dashboard', () => ({
useDashboard: (): { selectedDashboard: undefined } => ({
selectedDashboard: undefined,
}),
}));
jest.mock('api/querySuggestions/getKeySuggestions', () => ({
getKeySuggestions: jest.fn().mockResolvedValue({
data: {
data: { keys: {} },
},
}),
}));
jest.mock('api/querySuggestions/getValueSuggestion', () => ({
getValueSuggestions: jest.fn().mockResolvedValue({
data: { data: { values: { stringValues: [], numberValues: [] } } },
}),
}));
// Mock the hooks
jest.mock('hooks/queryBuilder/useGetPanelTypesQueryParam');
jest.mock('hooks/queryBuilder/useShareBuilderUrl');
const mockUseGetPanelTypesQueryParam = jest.mocked(useGetPanelTypesQueryParam);
const mockUseShareBuilderUrl = jest.mocked(useShareBuilderUrl);
const mockUpdateAllQueriesOperators = jest.fn() as jest.MockedFunction<
(query: Query, panelType: PANEL_TYPES, dataSource: DataSource) => Query
>;
const mockResetQuery = jest.fn() as jest.MockedFunction<
(newCurrentQuery?: QueryState) => void
>;
const mockRedirectWithQueryBuilderData = jest.fn() as jest.MockedFunction<
(query: Query) => void
>;
// Create a mock query that we'll use to verify persistence
const createMockQuery = (filterExpression?: string): Query => ({
id: 'test-query-id',
queryType: EQueryType.QUERY_BUILDER,
builder: {
queryData: [
{
aggregateAttribute: {
id: 'body--string----false',
dataType: DataTypes.String,
key: 'body',
type: '',
},
aggregateOperator: 'count',
dataSource: DataSource.LOGS,
disabled: false,
expression: 'A',
filters: {
items: [],
op: 'AND',
},
filter: filterExpression
? {
expression: filterExpression,
}
: undefined,
functions: [],
groupBy: [
{
key: 'cloud.account.id',
type: 'tag',
},
],
having: [],
legend: '',
limit: null,
orderBy: [{ columnName: 'timestamp', order: 'desc' }],
pageSize: 0,
queryName: 'A',
reduceTo: 'avg',
stepInterval: 60,
},
],
queryFormulas: [],
queryTraceOperator: [],
},
clickhouse_sql: [],
promql: [],
});
// Helper function to verify CodeMirror content
const verifyCodeMirrorContent = async (
expectedFilterExpression: string,
): Promise<void> => {
await waitFor(
() => {
const editorContent = document.querySelector(
CM_EDITOR_SELECTOR,
) as HTMLElement;
expect(editorContent).toBeInTheDocument();
const textContent = editorContent.textContent || '';
expect(textContent).toBe(expectedFilterExpression);
},
{ timeout: 3000 },
);
};
const VIEWS_TO_TEST = [
ExplorerViews.LIST,
ExplorerViews.TIMESERIES,
ExplorerViews.TABLE,
];
describe('LogExplorerQuerySection', () => {
let mockQuery: Query;
let mockQueryBuilderContext: Partial<QueryBuilderContextType>;
beforeEach(() => {
jest.clearAllMocks();
mockQuery = createMockQuery();
// Mock the return value of updateAllQueriesOperators to return the same query
mockUpdateAllQueriesOperators.mockReturnValue(mockQuery);
// Setup query builder context mock
mockQueryBuilderContext = {
currentQuery: mockQuery,
updateAllQueriesOperators: mockUpdateAllQueriesOperators,
resetQuery: mockResetQuery,
redirectWithQueryBuilderData: mockRedirectWithQueryBuilderData,
panelType: PANEL_TYPES.LIST,
initialDataSource: DataSource.LOGS,
addNewBuilderQuery: jest.fn() as jest.MockedFunction<() => void>,
addNewFormula: jest.fn() as jest.MockedFunction<() => void>,
handleSetConfig: jest.fn() as jest.MockedFunction<
(panelType: PANEL_TYPES, dataSource: DataSource | null) => void
>,
addTraceOperator: jest.fn() as jest.MockedFunction<() => void>,
};
// Mock useGetPanelTypesQueryParam
mockUseGetPanelTypesQueryParam.mockReturnValue(PANEL_TYPES.LIST);
// Mock useShareBuilderUrl
mockUseShareBuilderUrl.mockImplementation(() => {});
});
afterEach(() => {
jest.clearAllMocks();
});
it('should maintain query state across multiple view changes', () => {
const { rerender } = render(
<LogExplorerQuerySection selectedView={ExplorerViews.LIST} />,
undefined,
{
queryBuilderOverrides: mockQueryBuilderContext as QueryBuilderContextType,
},
);
const initialQuery = mockQueryBuilderContext.currentQuery;
VIEWS_TO_TEST.forEach((view) => {
rerender(<LogExplorerQuerySection selectedView={view} />);
expect(mockQueryBuilderContext.currentQuery).toEqual(initialQuery);
});
});
it('should persist filter expressions across view changes', async () => {
// Test with a more complex filter expression
const complexFilter =
"(service.name = 'api-gateway' OR service.name = 'backend') AND http.status_code IN [500, 502, 503] AND NOT error = 'timeout'";
const queryWithComplexFilter = createMockQuery(complexFilter);
const contextWithComplexFilter: Partial<QueryBuilderContextType> = {
...mockQueryBuilderContext,
currentQuery: queryWithComplexFilter,
};
const { rerender } = render(
<LogExplorerQuerySection selectedView={ExplorerViews.LIST} />,
undefined,
{
queryBuilderOverrides: contextWithComplexFilter as QueryBuilderContextType,
},
);
VIEWS_TO_TEST.forEach(async (view) => {
rerender(<LogExplorerQuerySection selectedView={view} />);
await verifyCodeMirrorContent(complexFilter);
});
});
it('should render QueryAggregation and QueryAddOns when switching from LIST to TIMESERIES or TABLE view', async () => {
// Helper function to verify components are rendered
const verifyComponentsRendered = async (): Promise<void> => {
await waitFor(
() => {
expect(screen.getByTestId(QUERY_AGGREGATION_TEST_ID)).toBeInTheDocument();
},
{ timeout: 3000 },
);
await waitFor(
() => {
expect(screen.getByTestId(QUERY_ADDON_TEST_ID)).toBeInTheDocument();
},
{ timeout: 3000 },
);
};
// Start with LIST view - QueryAggregation and QueryAddOns should NOT be rendered
mockUseGetPanelTypesQueryParam.mockReturnValue(PANEL_TYPES.LIST);
const contextWithList: Partial<QueryBuilderContextType> = {
...mockQueryBuilderContext,
panelType: PANEL_TYPES.LIST,
};
render(
<LogExplorerQuerySection selectedView={ExplorerViews.LIST} />,
undefined,
{
queryBuilderOverrides: contextWithList as QueryBuilderContextType,
},
);
// Verify QueryAggregation is NOT rendered in LIST view
expect(
screen.queryByTestId(QUERY_AGGREGATION_TEST_ID),
).not.toBeInTheDocument();
// Verify QueryAddOns is NOT rendered in LIST view (check for one of the add-on tabs)
expect(screen.queryByTestId(QUERY_ADDON_TEST_ID)).not.toBeInTheDocument();
cleanup();
// Switch to TIMESERIES view
const timeseriesPanelType = explorerViewToPanelType[ExplorerViews.TIMESERIES];
mockUseGetPanelTypesQueryParam.mockReturnValue(timeseriesPanelType);
const contextWithTimeseries: Partial<QueryBuilderContextType> = {
...mockQueryBuilderContext,
panelType: timeseriesPanelType,
};
render(
<LogExplorerQuerySection selectedView={ExplorerViews.TIMESERIES} />,
undefined,
{
queryBuilderOverrides: contextWithTimeseries as QueryBuilderContextType,
},
);
// Verify QueryAggregation and QueryAddOns are rendered
await verifyComponentsRendered();
cleanup();
// Switch to TABLE view
const tablePanelType = explorerViewToPanelType[ExplorerViews.TABLE];
mockUseGetPanelTypesQueryParam.mockReturnValue(tablePanelType);
const contextWithTable: Partial<QueryBuilderContextType> = {
...mockQueryBuilderContext,
panelType: tablePanelType,
};
render(
<LogExplorerQuerySection selectedView={ExplorerViews.TABLE} />,
undefined,
{
queryBuilderOverrides: contextWithTable as QueryBuilderContextType,
},
);
// Verify QueryAggregation and QueryAddOns are still rendered in TABLE view
await verifyComponentsRendered();
});
});

View File

@@ -238,6 +238,86 @@ function External(): JSX.Element {
setSelectedData,
);
const onErrorPercentageClick = useCallback(
(
xValue: number,
yValue: number,
mouseX: number,
mouseY: number,
data: any,
): void => {
onGraphClickHandler(
xValue,
yValue,
mouseX,
mouseY,
'external_call_error_percentage',
data,
);
},
[onGraphClickHandler],
);
const onDurationClick = useCallback(
(
xValue: number,
yValue: number,
mouseX: number,
mouseY: number,
data: any,
): void => {
onGraphClickHandler(
xValue,
yValue,
mouseX,
mouseY,
'external_call_duration',
data,
);
},
[onGraphClickHandler],
);
const onRPSByAddressClick = useCallback(
(
xValue: number,
yValue: number,
mouseX: number,
mouseY: number,
data: any,
): void => {
onGraphClickHandler(
xValue,
yValue,
mouseX,
mouseY,
'external_call_rps_by_address',
data,
);
},
[onGraphClickHandler],
);
const onDurationByAddressClick = useCallback(
(
xValue: number,
yValue: number,
mouseX: number,
mouseY: number,
data: any,
): void => {
onGraphClickHandler(
xValue,
yValue,
mouseX,
mouseY,
'external_call_duration_by_address',
data,
);
},
[onGraphClickHandler],
);
return (
<>
<Row gutter={24}>
@@ -266,16 +346,7 @@ function External(): JSX.Element {
<Graph
headerMenuList={MENU_ITEMS}
widget={externalCallErrorWidget}
onClickHandler={(xValue, yValue, mouseX, mouseY, data): void => {
onGraphClickHandler(
xValue,
yValue,
mouseX,
mouseY,
'external_call_error_percentage',
data,
);
}}
onClickHandler={onErrorPercentageClick}
onDragSelect={onDragSelect}
version={ENTITY_VERSION_V4}
/>
@@ -309,16 +380,7 @@ function External(): JSX.Element {
<Graph
headerMenuList={MENU_ITEMS}
widget={externalCallDurationWidget}
onClickHandler={(xValue, yValue, mouseX, mouseY, data): void => {
onGraphClickHandler(
xValue,
yValue,
mouseX,
mouseY,
'external_call_duration',
data,
);
}}
onClickHandler={onDurationClick}
onDragSelect={onDragSelect}
version={ENTITY_VERSION_V4}
/>
@@ -353,16 +415,7 @@ function External(): JSX.Element {
<Graph
widget={externalCallRPSWidget}
headerMenuList={MENU_ITEMS}
onClickHandler={(xValue, yValue, mouseX, mouseY, data): Promise<void> =>
onGraphClickHandler(
xValue,
yValue,
mouseX,
mouseY,
'external_call_rps_by_address',
data,
)
}
onClickHandler={onRPSByAddressClick}
onDragSelect={onDragSelect}
version={ENTITY_VERSION_V4}
/>
@@ -396,16 +449,7 @@ function External(): JSX.Element {
<Graph
widget={externalCallDurationAddressWidget}
headerMenuList={MENU_ITEMS}
onClickHandler={(xValue, yValue, mouseX, mouseY, data): void => {
onGraphClickHandler(
xValue,
yValue,
mouseX,
mouseY,
'external_call_duration_by_address',
data,
);
}}
onClickHandler={onDurationByAddressClick}
onDragSelect={onDragSelect}
version={ENTITY_VERSION_V4}
/>

View File

@@ -501,7 +501,7 @@ function NewWidget({
stackedBarChart: selectedWidget?.stackedBarChart || false,
yAxisUnit: selectedWidget?.yAxisUnit,
decimalPrecision:
selectedWidget?.decimalPrecision ?? PrecisionOptionsEnum.TWO,
selectedWidget?.decimalPrecision || PrecisionOptionsEnum.TWO,
panelTypes: graphType,
query: adjustedQueryForV5,
thresholds: selectedWidget?.thresholds,
@@ -532,7 +532,7 @@ function NewWidget({
stackedBarChart: selectedWidget?.stackedBarChart || false,
yAxisUnit: selectedWidget?.yAxisUnit,
decimalPrecision:
selectedWidget?.decimalPrecision ?? PrecisionOptionsEnum.TWO,
selectedWidget?.decimalPrecision || PrecisionOptionsEnum.TWO,
panelTypes: graphType,
query: adjustedQueryForV5,
thresholds: selectedWidget?.thresholds,

View File

@@ -516,8 +516,6 @@
"falcon",
"fastapi",
"flask",
"celery",
"gunicorn",
"monitor python application",
"monitor python backend",
"opentelemetry python",
@@ -542,33 +540,134 @@
],
"id": "python",
"question": {
"desc": "What is your Environment?",
"desc": "Which Python framework do you use?",
"type": "select",
"entityID": "environment",
"entityID": "framework",
"options": [
{
"key": "vm",
"label": "VM",
"imgUrl": "/Logos/vm.svg",
"link": "https://signoz.io/docs/instrumentation/opentelemetry-python/"
"key": "flask",
"label": "Flask",
"imgUrl": "/Logos/flask.svg",
"link": "https://signoz.io/docs/instrumentation/opentelemetry-flask/",
"question": {
"desc": "What is your Environment?",
"type": "select",
"entityID": "environment",
"options": [
{
"key": "vm",
"label": "VM",
"imgUrl": "/Logos/vm.svg",
"link": "https://signoz.io/docs/instrumentation/opentelemetry-flask/"
},
{
"key": "k8s",
"label": "Kubernetes",
"imgUrl": "/Logos/kubernetes.svg",
"link": "https://signoz.io/docs/instrumentation/opentelemetry-flask/"
}
]
}
},
{
"key": "k8s",
"label": "Kubernetes",
"imgUrl": "/Logos/kubernetes.svg",
"link": "https://signoz.io/docs/instrumentation/opentelemetry-python/"
"key": "django",
"label": "Django",
"imgUrl": "/Logos/django.svg",
"link": "https://signoz.io/docs/instrumentation/opentelemetry-django/",
"question": {
"desc": "What is your Environment?",
"type": "select",
"entityID": "environment",
"options": [
{
"key": "vm",
"label": "VM",
"imgUrl": "/Logos/vm.svg",
"link": "https://signoz.io/docs/instrumentation/opentelemetry-django/"
},
{
"key": "k8s",
"label": "Kubernetes",
"imgUrl": "/Logos/kubernetes.svg",
"link": "https://signoz.io/docs/instrumentation/opentelemetry-django/"
}
]
}
},
{
"key": "windows",
"label": "Windows",
"imgUrl": "/Logos/windows.svg",
"link": "https://signoz.io/docs/instrumentation/opentelemetry-python/"
"key": "fastapi",
"label": "FastAPI",
"imgUrl": "/Logos/fastapi.svg",
"link": "https://signoz.io/docs/instrumentation/opentelemetry-fastapi/",
"question": {
"desc": "What is your Environment?",
"type": "select",
"entityID": "environment",
"options": [
{
"key": "vm",
"label": "VM",
"imgUrl": "/Logos/vm.svg",
"link": "https://signoz.io/docs/instrumentation/opentelemetry-fastapi/"
},
{
"key": "k8s",
"label": "Kubernetes",
"imgUrl": "/Logos/kubernetes.svg",
"link": "https://signoz.io/docs/instrumentation/opentelemetry-fastapi/"
}
]
}
},
{
"key": "docker",
"label": "Docker",
"imgUrl": "/Logos/docker.svg",
"link": "https://signoz.io/docs/instrumentation/opentelemetry-python/"
"key": "falcon",
"label": "Falcon",
"imgUrl": "/Logos/falcon.svg",
"link": "https://signoz.io/docs/instrumentation/opentelemetry-falcon/",
"question": {
"desc": "What is your Environment?",
"type": "select",
"entityID": "environment",
"options": [
{
"key": "vm",
"label": "VM",
"imgUrl": "/Logos/vm.svg",
"link": "https://signoz.io/docs/instrumentation/opentelemetry-falcon/"
},
{
"key": "k8s",
"label": "Kubernetes",
"imgUrl": "/Logos/kubernetes.svg",
"link": "https://signoz.io/docs/instrumentation/opentelemetry-falcon/"
}
]
}
},
{
"key": "others",
"label": "Others",
"imgUrl": "/Logos/python-others.svg",
"link": "https://signoz.io/docs/instrumentation/opentelemetry-python/",
"question": {
"desc": "What is your Environment?",
"type": "select",
"entityID": "environment",
"options": [
{
"key": "vm",
"label": "VM",
"imgUrl": "/Logos/vm.svg",
"link": "https://signoz.io/docs/instrumentation/opentelemetry-python/"
},
{
"key": "k8s",
"label": "Kubernetes",
"imgUrl": "/Logos/kubernetes.svg",
"link": "https://signoz.io/docs/instrumentation/opentelemetry-python/"
}
]
}
}
]
}
@@ -5323,7 +5422,7 @@
"imgUrl": "/Logos/logs.svg",
"tags": [
"Frontend Monitoring",
"logs"
"logs"
],
"module": "logs",
"relatedSearchKeywords": [

View File

@@ -1,4 +1,5 @@
import { FC } from 'react';
import isEqual from 'lodash-es/isEqual';
import { FC, memo } from 'react';
import { PanelTypeVsPanelWrapper } from './constants';
import { PanelWrapperProps } from './panelWrapper.types';
@@ -55,4 +56,36 @@ function PanelWrapper({
);
}
export default PanelWrapper;
function arePropsEqual(
prevProps: PanelWrapperProps,
nextProps: PanelWrapperProps,
): boolean {
// Destructure to separate props that need deep comparison from the rest
const {
widget: prevWidget,
queryResponse: prevQueryResponse,
...prevRest
} = prevProps;
const {
widget: nextWidget,
queryResponse: nextQueryResponse,
...nextRest
} = nextProps;
// Shallow equality check for all other props (primitives, functions, refs, arrays)
const restKeys = Object.keys(prevRest) as Array<
keyof Omit<PanelWrapperProps, 'widget' | 'queryResponse'>
>;
if (restKeys.some((key) => prevRest[key] !== nextRest[key])) {
return false;
}
// Deep equality only for widget config and query response data payload
return (
isEqual(prevWidget, nextWidget) &&
isEqual(prevQueryResponse.data?.payload, nextQueryResponse.data?.payload)
);
}
export default memo(PanelWrapper, arePropsEqual);

View File

@@ -132,11 +132,21 @@ function UplotPanelWrapper({
[selectedGraph, widget?.panelTypes, widget?.stackedBarChart],
);
const chartData = getUPlotChartData(
queryResponse?.data?.payload,
widget.fillSpans,
stackedBarChart,
hiddenGraph,
// Memoize chartData to prevent unnecessary recalculations
const chartData = useMemo(
() =>
getUPlotChartData(
queryResponse?.data?.payload,
widget.fillSpans,
stackedBarChart,
hiddenGraph,
),
[
queryResponse?.data?.payload,
widget.fillSpans,
stackedBarChart,
hiddenGraph,
],
);
useEffect(() => {

View File

@@ -49,14 +49,12 @@ exports[`Value panel wrappper tests should render tooltip when there are conflic
>
<span
class="ant-typography value-graph-text css-dev-only-do-not-override-2i2tap"
data-testid="value-graph-text"
style="color: Blue; font-size: 16px;"
>
295.43
</span>
<span
class="ant-typography value-graph-unit css-dev-only-do-not-override-2i2tap"
data-testid="value-graph-suffix-unit"
style="color: Blue; font-size: calc(16px * 0.7);"
>
ms

View File

@@ -3,7 +3,6 @@ import { Tooltip, Typography } from 'antd';
import AttributeWithExpandablePopover from './AttributeWithExpandablePopover';
const EXPANDABLE_ATTRIBUTE_KEYS = ['exception.stacktrace', 'exception.message'];
const ATTRIBUTE_LENGTH_THRESHOLD = 100;
interface EventAttributeProps {
attributeKey: string;
@@ -16,11 +15,7 @@ function EventAttribute({
attributeValue,
onExpand,
}: EventAttributeProps): JSX.Element {
const shouldExpand =
EXPANDABLE_ATTRIBUTE_KEYS.includes(attributeKey) ||
attributeValue.length > ATTRIBUTE_LENGTH_THRESHOLD;
if (shouldExpand) {
if (EXPANDABLE_ATTRIBUTE_KEYS.includes(attributeKey)) {
return (
<AttributeWithExpandablePopover
attributeKey={attributeKey}

View File

@@ -51,12 +51,9 @@
padding: 10px 12px;
.item {
&,
.attribute-container {
display: flex;
flex-direction: column;
gap: 8px;
}
display: flex;
flex-direction: column;
gap: 8px;
.span-name-wrapper {
display: flex;
@@ -416,7 +413,6 @@
text-transform: uppercase;
}
.attribute-container .wrapper,
.value-wrapper {
display: flex;
align-items: center;

View File

@@ -4,7 +4,6 @@ import {
Button,
Checkbox,
Input,
Modal,
Select,
Skeleton,
Tabs,
@@ -53,7 +52,6 @@ import { formatEpochTimestamp } from 'utils/timeUtils';
import Attributes from './Attributes/Attributes';
import { RelatedSignalsViews } from './constants';
import EventAttribute from './Events/components/EventAttribute';
import Events from './Events/Events';
import LinkedSpans from './LinkedSpans/LinkedSpans';
import SpanRelatedSignals from './SpanRelatedSignals/SpanRelatedSignals';
@@ -168,27 +166,11 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
setShouldUpdateUserPreference,
] = useState<boolean>(false);
const [statusMessageModalContent, setStatusMessageModalContent] = useState<{
title: string;
content: string;
} | null>(null);
const handleTimeRangeChange = useCallback((value: number): void => {
setShouldFetchSpanPercentilesData(true);
setSelectedTimeRange(value);
}, []);
const showStatusMessageModal = useCallback(
(title: string, content: string): void => {
setStatusMessageModalContent({ title, content });
},
[],
);
const handleStatusMessageModalCancel = useCallback((): void => {
setStatusMessageModalContent(null);
}, []);
const color = generateColor(
selectedSpan?.serviceName || '',
themeColors.traceDetailColors,
@@ -886,11 +868,14 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
{selectedSpan.statusMessage && (
<div className="item">
<EventAttribute
attributeKey="status message"
attributeValue={selectedSpan.statusMessage}
onExpand={showStatusMessageModal}
/>
<Typography.Text className="attribute-key">
status message
</Typography.Text>
<div className="value-wrapper">
<Typography.Text className="attribute-value">
{selectedSpan.statusMessage}
</Typography.Text>
</div>
</div>
)}
<div className="item">
@@ -951,19 +936,6 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
key={activeDrawerView}
/>
)}
<Modal
title={statusMessageModalContent?.title}
open={!!statusMessageModalContent}
onCancel={handleStatusMessageModalCancel}
footer={null}
width="80vw"
centered
>
<pre className="attribute-with-expandable-popover__full-view">
{statusMessageModalContent?.content}
</pre>
</Modal>
</>
);
}

View File

@@ -29,8 +29,6 @@ import {
mockEmptyLogsResponse,
mockSpan,
mockSpanLogsResponse,
mockSpanWithLongStatusMessage,
mockSpanWithShortStatusMessage,
} from './mockData';
// Get typed mocks
@@ -130,39 +128,6 @@ jest.mock('lib/uPlotLib/utils/generateColor', () => ({
generateColor: jest.fn().mockReturnValue('#1f77b4'),
}));
jest.mock(
'container/SpanDetailsDrawer/Events/components/AttributeWithExpandablePopover',
() =>
// eslint-disable-next-line func-names, @typescript-eslint/explicit-function-return-type, react/display-name
function ({
attributeKey,
attributeValue,
onExpand,
}: {
attributeKey: string;
attributeValue: string;
onExpand: (title: string, content: string) => void;
}) {
return (
<div className="attribute-container" key={attributeKey}>
<div className="attribute-key">{attributeKey}</div>
<div className="wrapper">
<div className="attribute-value">{attributeValue}</div>
<div data-testid="popover-content">
<pre>{attributeValue}</pre>
<button
type="button"
onClick={(): void => onExpand(attributeKey, attributeValue)}
>
Expand
</button>
</div>
</div>
</div>
);
},
);
// Mock getSpanPercentiles API
jest.mock('api/trace/getSpanPercentiles', () => ({
__esModule: true,
@@ -1188,112 +1153,3 @@ describe('SpanDetailsDrawer - Search Visibility User Flows', () => {
expect(searchInput).toHaveFocus();
});
});
describe('SpanDetailsDrawer - Status Message Truncation User Flows', () => {
beforeEach(() => {
jest.clearAllMocks();
mockSafeNavigate.mockClear();
mockWindowOpen.mockClear();
mockUpdateAllQueriesOperators.mockClear();
(GetMetricQueryRange as jest.Mock).mockImplementation(() =>
Promise.resolve(mockEmptyLogsResponse),
);
});
afterEach(() => {
server.resetHandlers();
});
it('should display expandable popover with Expand button for long status message', () => {
render(
<QueryBuilderContext.Provider value={mockQueryBuilderContextValue as any}>
<SpanDetailsDrawer
isSpanDetailsDocked={false}
setIsSpanDetailsDocked={jest.fn()}
selectedSpan={mockSpanWithLongStatusMessage}
traceStartTime={1640995200000}
traceEndTime={1640995260000}
/>
</QueryBuilderContext.Provider>,
);
// User sees status message label
expect(screen.getByText('status message')).toBeInTheDocument();
// User sees the status message value (appears in both original element and popover preview)
const statusMessageElements = screen.getAllByText(
mockSpanWithLongStatusMessage.statusMessage,
);
expect(statusMessageElements.length).toBeGreaterThan(0);
// User sees Expand button in popover (popover is mocked to render immediately)
const expandButton = screen.getByRole('button', { name: /expand/i });
expect(expandButton).toBeInTheDocument();
});
it('should open modal with full status message when user clicks Expand button', async () => {
render(
<QueryBuilderContext.Provider value={mockQueryBuilderContextValue as any}>
<SpanDetailsDrawer
isSpanDetailsDocked={false}
setIsSpanDetailsDocked={jest.fn()}
selectedSpan={mockSpanWithLongStatusMessage}
traceStartTime={1640995200000}
traceEndTime={1640995260000}
/>
</QueryBuilderContext.Provider>,
);
// User clicks the Expand button (popover is mocked to render immediately)
const expandButton = screen.getByRole('button', { name: /expand/i });
await fireEvent.click(expandButton);
// User sees modal with the full status message content
await waitFor(() => {
// Modal should be visible with the title
const modalTitle = document.querySelector('.ant-modal-title');
expect(modalTitle).toBeInTheDocument();
expect(modalTitle?.textContent).toBe('status message');
// Modal content should contain the full message in a pre tag
const preElement = document.querySelector(
'.attribute-with-expandable-popover__full-view',
);
expect(preElement).toBeInTheDocument();
expect(preElement?.textContent).toBe(
mockSpanWithLongStatusMessage.statusMessage,
);
});
});
it('should display short status message as simple text without popover', () => {
render(
<QueryBuilderContext.Provider value={mockQueryBuilderContextValue as any}>
<SpanDetailsDrawer
isSpanDetailsDocked={false}
setIsSpanDetailsDocked={jest.fn()}
selectedSpan={mockSpanWithShortStatusMessage}
traceStartTime={1640995200000}
traceEndTime={1640995260000}
/>
</QueryBuilderContext.Provider>,
);
// User sees status message label and value
expect(screen.getByText('status message')).toBeInTheDocument();
expect(
screen.getByText(mockSpanWithShortStatusMessage.statusMessage),
).toBeInTheDocument();
// User hovers over the status message value
const statusMessageValue = screen.getByText(
mockSpanWithShortStatusMessage.statusMessage,
);
fireEvent.mouseEnter(statusMessageValue);
// No Expand button should appear (no expandable popover for short messages)
expect(
screen.queryByRole('button', { name: /expand/i }),
).not.toBeInTheDocument();
});
});

View File

@@ -35,19 +35,6 @@ export const mockSpan: Span = {
level: 0,
};
// Mock span with long status message (> 100 characters) for testing truncation
export const mockSpanWithLongStatusMessage: Span = {
...mockSpan,
statusMessage:
'Error: Connection timeout occurred while trying to reach the database server. The connection pool was exhausted and all retry attempts failed after 30 seconds.',
};
// Mock span with short status message (<= 100 characters)
export const mockSpanWithShortStatusMessage: Span = {
...mockSpan,
statusMessage: 'Connection successful',
};
// Mock logs with proper relationships
export const mockSpanLogs: ILog[] = [
{

View File

@@ -77,6 +77,20 @@ function MessagingQueuesGraph(): JSX.Element {
});
}
};
const onClickHandler = useCallback(
(
xValue: number,
_yValue: number,
_mouseX: number,
_mouseY: number,
data?: any,
): void => {
setSelectedTimelineQuery(urlQuery, xValue, location, history, data);
},
[urlQuery, location, history],
);
return (
<Card
isDarkMode={isDarkMode}
@@ -86,9 +100,7 @@ function MessagingQueuesGraph(): JSX.Element {
<GridCard
widget={widgetData}
headerMenuList={[...ViewMenuAction]}
onClickHandler={(xValue, _yValue, _mouseX, _mouseY, data): void => {
setSelectedTimelineQuery(urlQuery, xValue, location, history, data);
}}
onClickHandler={onClickHandler}
onDragSelect={onDragSelect}
customTooltipElement={messagingQueueCustomTooltipText()}
dataAvailable={checkIfDataExists}

View File

@@ -18,16 +18,6 @@ jest.mock('api/browser/localstorage/get', () => ({
default: jest.fn((key: string) => mockLocalStorage[key] || null),
}));
const mockLogsColumns = [
{ name: 'timestamp', signal: 'logs', fieldContext: 'log' },
{ name: 'body', signal: 'logs', fieldContext: 'log' },
];
const mockTracesColumns = [
{ name: 'service.name', signal: 'traces', fieldContext: 'resource' },
{ name: 'name', signal: 'traces', fieldContext: 'span' },
];
describe('logsLoaderConfig', () => {
// Save original location object
const originalWindowLocation = window.location;
@@ -167,83 +157,4 @@ describe('logsLoaderConfig', () => {
} as FormattingOptions,
});
});
describe('Column validation - filtering Traces columns', () => {
it('should filter out Traces columns (name with traces signal) from URL', async () => {
const mixedColumns = [...mockLogsColumns, ...mockTracesColumns];
mockedLocation.search = `?options=${encodeURIComponent(
JSON.stringify({
selectColumns: mixedColumns,
}),
)}`;
const result = await logsLoaderConfig.url();
// Should only keep logs columns
expect(result.columns).toEqual(mockLogsColumns);
});
it('should filter out Traces columns from localStorage', async () => {
const tracesColumns = [...mockTracesColumns];
mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS] = JSON.stringify({
selectColumns: tracesColumns,
});
const result = await logsLoaderConfig.local();
// Should filter out all Traces columns
expect(result.columns).toEqual([]);
});
it('should accept valid Logs columns from URL', async () => {
const logsColumns = [...mockLogsColumns];
mockedLocation.search = `?options=${encodeURIComponent(
JSON.stringify({
selectColumns: logsColumns,
}),
)}`;
const result = await logsLoaderConfig.url();
expect(result.columns).toEqual(logsColumns);
});
it('should fall back to defaults when all columns are filtered out from URL', async () => {
const tracesColumns = [...mockTracesColumns];
mockedLocation.search = `?options=${encodeURIComponent(
JSON.stringify({
selectColumns: tracesColumns,
}),
)}`;
const result = await logsLoaderConfig.url();
// Should return empty array, which triggers fallback to defaults in preferencesLoader
expect(result.columns).toEqual([]);
});
it('should handle columns without signal field (legacy data)', async () => {
const columnsWithoutSignal = [
{ name: 'body', fieldContext: 'log' },
{ name: 'service.name', fieldContext: 'resource' },
];
mockedLocation.search = `?options=${encodeURIComponent(
JSON.stringify({
selectColumns: columnsWithoutSignal,
}),
)}`;
const result = await logsLoaderConfig.url();
// Without signal field, columns pass through validation
// This matches the current implementation behavior where only columns
// with signal !== 'logs' are filtered out
expect(result.columns).toEqual(columnsWithoutSignal);
});
});
});

View File

@@ -1,4 +1,3 @@
/* eslint-disable sonarjs/no-duplicate-string */
import { LOCALSTORAGE } from 'constants/localStorage';
import { defaultTraceSelectedColumns } from 'container/OptionsMenu/constants';
import {
@@ -127,112 +126,4 @@ describe('tracesLoaderConfig', () => {
columns: defaultTraceSelectedColumns as TelemetryFieldKey[],
});
});
describe('Column validation - filtering Logs columns', () => {
it('should filter out Logs columns (body) from URL', async () => {
const logsColumns = [
{ name: 'timestamp', signal: 'logs', fieldContext: 'log' },
{ name: 'body', signal: 'logs', fieldContext: 'log' },
];
mockedLocation.search = `?options=${encodeURIComponent(
JSON.stringify({
selectColumns: logsColumns,
}),
)}`;
const result = await tracesLoaderConfig.url();
// Should filter out all Logs columns
expect(result.columns).toEqual([]);
});
it('should filter out Logs columns (timestamp with logs signal) from URL', async () => {
const mixedColumns = [
{ name: 'timestamp', signal: 'logs', fieldContext: 'log' },
{ name: 'service.name', signal: 'traces', fieldContext: 'resource' },
];
mockedLocation.search = `?options=${encodeURIComponent(
JSON.stringify({
selectColumns: mixedColumns,
}),
)}`;
const result = await tracesLoaderConfig.url();
// Should only keep trace columns
expect(result.columns).toEqual([
{ name: 'service.name', signal: 'traces', fieldContext: 'resource' },
]);
});
it('should filter out Logs columns from localStorage', async () => {
const logsColumns = [
{ name: 'body', signal: 'logs', fieldContext: 'log' },
{ name: 'timestamp', signal: 'logs', fieldContext: 'log' },
];
mockLocalStorage[LOCALSTORAGE.TRACES_LIST_OPTIONS] = JSON.stringify({
selectColumns: logsColumns,
});
const result = await tracesLoaderConfig.local();
// Should filter out all Logs columns
expect(result.columns).toEqual([]);
});
it('should accept valid Trace columns from URL', async () => {
const traceColumns = [
{ name: 'service.name', signal: 'traces', fieldContext: 'resource' },
{ name: 'name', signal: 'traces', fieldContext: 'span' },
];
mockedLocation.search = `?options=${encodeURIComponent(
JSON.stringify({
selectColumns: traceColumns,
}),
)}`;
const result = await tracesLoaderConfig.url();
expect(result.columns).toEqual(traceColumns);
});
it('should fall back to defaults when all columns are filtered out from URL', async () => {
const logsColumns = [{ name: 'body', signal: 'logs' }];
mockedLocation.search = `?options=${encodeURIComponent(
JSON.stringify({
selectColumns: logsColumns,
}),
)}`;
const result = await tracesLoaderConfig.url();
// Should return empty array, which triggers fallback to defaults in preferencesLoader
expect(result.columns).toEqual([]);
});
it('should handle columns without signal field (legacy data)', async () => {
const columnsWithoutSignal = [
{ name: 'service.name', fieldContext: 'resource' },
{ name: 'body', fieldContext: 'log' },
];
mockedLocation.search = `?options=${encodeURIComponent(
JSON.stringify({
selectColumns: columnsWithoutSignal,
}),
)}`;
const result = await tracesLoaderConfig.url();
// Without signal field, columns pass through validation
// This matches the current implementation behavior where only columns
// with signal !== 'traces' are filtered out
expect(result.columns).toEqual(columnsWithoutSignal);
});
});
});

View File

@@ -8,18 +8,6 @@ import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteRe
import { FormattingOptions } from '../types';
/**
* Validates if a column is valid for Logs Explorer
* Filters out Traces-specific columns that would cause query failures
*/
const isValidLogColumn = (col: {
name?: string;
signal?: string;
[key: string]: unknown;
}): boolean =>
// If column has signal field, it must be 'logs'
!(col?.signal && col.signal !== 'logs');
// --- LOGS preferences loader config ---
const logsLoaders = {
local: (): {
@@ -30,14 +18,8 @@ const logsLoaders = {
if (local) {
try {
const parsed = JSON.parse(local);
const localColumns = parsed.selectColumns || [];
// Filter out invalid columns (e.g., Logs columns that might have been incorrectly stored)
const validLogColumns = localColumns.filter(isValidLogColumn);
return {
columns: validLogColumns.length > 0 ? validLogColumns : [],
columns: parsed.selectColumns || [],
formatting: {
maxLines: parsed.maxLines ?? 2,
format: parsed.format ?? 'table',
@@ -56,14 +38,8 @@ const logsLoaders = {
const urlParams = new URLSearchParams(window.location.search);
try {
const options = JSON.parse(urlParams.get('options') || '{}');
const urlColumns = options.selectColumns || [];
// Filter out invalid columns (e.g., Logs columns that might have been incorrectly stored)
const validLogColumns = urlColumns.filter(isValidLogColumn);
return {
columns: validLogColumns.length > 0 ? validLogColumns : [],
columns: options.selectColumns || [],
formatting: {
maxLines: options.maxLines ?? 2,
format: options.format ?? 'table',

View File

@@ -5,18 +5,6 @@ import { LOCALSTORAGE } from 'constants/localStorage';
import { defaultTraceSelectedColumns } from 'container/OptionsMenu/constants';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
/**
* Validates if a column is valid for Traces Explorer
* Filters out Logs-specific columns that would cause query failures
*/
const isValidTraceColumn = (col: {
name?: string;
signal?: string;
[key: string]: unknown;
}): boolean =>
// If column has signal field, it must be 'traces'
!(col?.signal && col.signal !== 'traces');
// --- TRACES preferences loader config ---
const tracesLoaders = {
local: (): {
@@ -26,13 +14,8 @@ const tracesLoaders = {
if (local) {
try {
const parsed = JSON.parse(local);
const localColumns = parsed.selectColumns || [];
// Filter out invalid columns (e.g., Logs columns that might have been incorrectly stored)
const validTraceColumns = localColumns.filter(isValidTraceColumn);
return {
columns: validTraceColumns.length > 0 ? validTraceColumns : [],
columns: parsed.selectColumns || [],
};
} catch {}
}
@@ -44,15 +27,8 @@ const tracesLoaders = {
const urlParams = new URLSearchParams(window.location.search);
try {
const options = JSON.parse(urlParams.get('options') || '{}');
const urlColumns = options.selectColumns || [];
// Filter out invalid columns (e.g., Logs columns)
// Only accept columns that are valid for Traces (signal='traces' or columns without signal that aren't logs-specific)
const validTraceColumns = urlColumns.filter(isValidTraceColumn);
// Only return columns if we have valid trace columns, otherwise return empty to fall back to defaults
return {
columns: validTraceColumns.length > 0 ? validTraceColumns : [],
columns: options.selectColumns || [],
};
} catch {}
return { columns: [] };

View File

@@ -29,20 +29,6 @@ import { QueryBuilderContextType } from 'types/common/queryBuilder';
import { ROLES, USER_ROLES } from 'types/roles';
// import { MemoryRouter as V5MemoryRouter } from 'react-router-dom-v5-compat';
// Mock ResizeObserver
class ResizeObserverMock {
// eslint-disable-next-line class-methods-use-this
observe(): void {}
// eslint-disable-next-line class-methods-use-this
unobserve(): void {}
// eslint-disable-next-line class-methods-use-this
disconnect(): void {}
}
global.ResizeObserver = (ResizeObserverMock as unknown) as typeof ResizeObserver;
const queryClient = new QueryClient({
defaultOptions: {
queries: {

View File

@@ -1,126 +0,0 @@
import { buildAbsolutePath } from '../app';
const BASE_PATH = '/some-base-path';
describe('buildAbsolutePath', () => {
const originalLocation = window.location;
afterEach(() => {
Object.defineProperty(window, 'location', {
writable: true,
value: originalLocation,
});
});
const mockLocation = (pathname: string): void => {
Object.defineProperty(window, 'location', {
writable: true,
value: {
pathname,
href: `http://localhost:8080${pathname}`,
origin: 'http://localhost:8080',
protocol: 'http:',
host: 'localhost',
hostname: 'localhost',
port: '',
search: '',
hash: '',
},
});
};
describe('when base path ends with a forward slash', () => {
beforeEach(() => {
mockLocation(`${BASE_PATH}/`);
});
it('should build absolute path without query string', () => {
const result = buildAbsolutePath({ relativePath: 'users' });
expect(result).toBe(`${BASE_PATH}/users`);
});
it('should build absolute path with query string', () => {
const result = buildAbsolutePath({
relativePath: 'users',
urlQueryString: 'id=123&sort=name',
});
expect(result).toBe(`${BASE_PATH}/users?id=123&sort=name`);
});
it('should handle nested relative paths', () => {
const result = buildAbsolutePath({ relativePath: 'users/profile/settings' });
expect(result).toBe(`${BASE_PATH}/users/profile/settings`);
});
});
describe('when base path does not end with a forward slash', () => {
beforeEach(() => {
mockLocation(`${BASE_PATH}`);
});
it('should append forward slash and build absolute path', () => {
const result = buildAbsolutePath({ relativePath: 'users' });
expect(result).toBe(`${BASE_PATH}/users`);
});
it('should append forward slash and build absolute path with query string', () => {
const result = buildAbsolutePath({
relativePath: 'users',
urlQueryString: 'filter=active',
});
expect(result).toBe(`${BASE_PATH}/users?filter=active`);
});
});
describe('edge cases', () => {
it('should handle empty relative path', () => {
mockLocation(`${BASE_PATH}/`);
const result = buildAbsolutePath({ relativePath: '' });
expect(result).toBe(`${BASE_PATH}/`);
});
it('should handle query string with empty relative path', () => {
mockLocation(`${BASE_PATH}/`);
const result = buildAbsolutePath({
relativePath: '',
urlQueryString: 'search=test',
});
expect(result).toBe(`${BASE_PATH}/?search=test`);
});
it('should handle relative path starting with forward slash', () => {
mockLocation(`${BASE_PATH}/`);
const result = buildAbsolutePath({ relativePath: '/users' });
expect(result).toBe(`${BASE_PATH}/users`);
});
it('should handle complex query strings', () => {
mockLocation(`${BASE_PATH}/dashboard`);
const result = buildAbsolutePath({
relativePath: 'reports',
urlQueryString: 'date=2024-01-01&type=summary&format=pdf',
});
expect(result).toBe(
`${BASE_PATH}/dashboard/reports?date=2024-01-01&type=summary&format=pdf`,
);
});
it('should handle undefined query string', () => {
mockLocation(`${BASE_PATH}/`);
const result = buildAbsolutePath({
relativePath: 'users',
urlQueryString: undefined,
});
expect(result).toBe(`${BASE_PATH}/users`);
});
it('should handle empty query string', () => {
mockLocation(`${BASE_PATH}/`);
const result = buildAbsolutePath({
relativePath: 'users',
urlQueryString: '',
});
expect(result).toBe(`${BASE_PATH}/users`);
});
});
});

View File

@@ -61,58 +61,6 @@ describe('extractQueryPairs', () => {
]);
});
test('should test for filter expression with freeText', () => {
const input = "disconnected deployment.env not in ['mq-kafka']";
const result = extractQueryPairs(input);
expect(result).toEqual([
{
key: 'disconnected',
operator: '',
valueList: [],
valuesPosition: [],
hasNegation: false,
isMultiValue: false,
value: undefined,
position: {
keyStart: 0,
keyEnd: 11,
operatorStart: 0,
operatorEnd: 0,
negationStart: 0,
negationEnd: 0,
valueStart: undefined,
valueEnd: undefined,
},
isComplete: false,
},
{
key: 'deployment.env',
operator: 'in',
value: "['mq-kafka']",
valueList: ["'mq-kafka'"],
valuesPosition: [
{
start: 36,
end: 45,
},
],
hasNegation: true,
isMultiValue: true,
position: {
keyStart: 13,
keyEnd: 26,
operatorStart: 32,
operatorEnd: 33,
valueStart: 35,
valueEnd: 46,
negationStart: 28,
negationEnd: 30,
},
isComplete: true,
},
]);
});
test('should extract IN with numeric list inside parentheses', () => {
const input = 'id IN (1, 2, 3)';
const result = extractQueryPairs(input);

View File

@@ -38,33 +38,3 @@ export function isIngestionActive(data: any): boolean {
return parseInt(value, 10) > 0;
}
/**
* Builds an absolute path by combining the current page's pathname with a relative path.
*
* @param {Object} params - The parameters for building the absolute path
* @param {string} params.relativePath - The relative path to append to the current pathname
* @param {string} [params.urlQueryString] - Optional query string to append to the final path (without leading '?')
*
* @returns {string} The constructed absolute path, optionally with query string
*/
export function buildAbsolutePath({
relativePath,
urlQueryString,
}: {
relativePath: string;
urlQueryString?: string;
}): string {
const { pathname } = window.location;
// ensure base path always ends with a forward slash
const basePath = pathname.endsWith('/') ? pathname : `${pathname}/`;
// handle relative path starting with a forward slash
const normalizedRelativePath = relativePath.startsWith('/')
? relativePath.slice(1)
: relativePath;
const absolutePath = basePath + normalizedRelativePath;
return urlQueryString ? `${absolutePath}?${urlQueryString}` : absolutePath;
}

View File

@@ -1339,7 +1339,8 @@ export function extractQueryPairs(query: string): IQueryPair[] {
else if (
currentPair &&
currentPair.key &&
(isConjunctionToken(token.type) || token.type === FilterQueryLexer.KEY)
(isConjunctionToken(token.type) ||
(token.type === FilterQueryLexer.KEY && isQueryPairComplete(currentPair)))
) {
queryPairs.push({
key: currentPair.key,

View File

@@ -1,30 +0,0 @@
package signozapiserver
import (
"net/http"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/types"
"github.com/gorilla/mux"
)
func (provider *provider) addGlobalRoutes(router *mux.Router) error {
if err := router.Handle("/api/v1/global/config", handler.New(provider.authZ.EditAccess(provider.globalHandler.GetConfig), handler.OpenAPIDef{
ID: "GetGlobalConfig",
Tags: []string{"global"},
Summary: "Get global config",
Description: "This endpoints returns global config",
Request: nil,
RequestContentType: "",
Response: new(types.GettableGlobalConfig),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleEditor),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
return nil
}

View File

@@ -6,7 +6,6 @@ import (
"github.com/SigNoz/signoz/pkg/apiserver"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/modules/authdomain"
@@ -29,7 +28,6 @@ type provider struct {
sessionHandler session.Handler
authDomainHandler authdomain.Handler
preferenceHandler preference.Handler
globalHandler global.Handler
}
func NewFactory(
@@ -40,10 +38,9 @@ func NewFactory(
sessionHandler session.Handler,
authDomainHandler authdomain.Handler,
preferenceHandler preference.Handler,
globalHandler global.Handler,
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler)
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler)
})
}
@@ -58,7 +55,6 @@ func newProvider(
sessionHandler session.Handler,
authDomainHandler authdomain.Handler,
preferenceHandler preference.Handler,
globalHandler global.Handler,
) (apiserver.APIServer, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
router := mux.NewRouter().UseEncodedPath()
@@ -72,7 +68,6 @@ func newProvider(
sessionHandler: sessionHandler,
authDomainHandler: authDomainHandler,
preferenceHandler: preferenceHandler,
globalHandler: globalHandler,
}
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
@@ -101,15 +96,11 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
return err
}
if err := provider.addPreferenceRoutes(router); err != nil {
return err
}
if err := provider.addUserRoutes(router); err != nil {
return err
}
if err := provider.addGlobalRoutes(router); err != nil {
if err := provider.addPreferenceRoutes(router); err != nil {
return err
}

View File

@@ -1,41 +0,0 @@
package global
import (
"net/url"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
)
var (
ErrCodeInvalidGlobalConfig = errors.MustNewCode("invalid_global_config")
)
type Config struct {
ExternalURL *url.URL `mapstructure:"external_url"`
IngestionURL *url.URL `mapstructure:"ingestion_url"`
}
func NewConfigFactory() factory.ConfigFactory {
return factory.NewConfigFactory(factory.MustNewName("global"), newConfig)
}
func newConfig() factory.Config {
return &Config{
ExternalURL: &url.URL{
Scheme: "",
Host: "<unset>",
Path: "",
},
IngestionURL: &url.URL{
Scheme: "",
Host: "<unset>",
Path: "",
},
}
}
func (c Config) Validate() error {
return nil
}

View File

@@ -1,11 +0,0 @@
package global
import "net/http"
type Global interface {
GetConfig() Config
}
type Handler interface {
GetConfig(http.ResponseWriter, *http.Request)
}

View File

@@ -1,23 +0,0 @@
package signozglobal
import (
"net/http"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types"
)
type handler struct {
global global.Global
}
func NewHandler(global global.Global) global.Handler {
return &handler{global: global}
}
func (handker *handler) GetConfig(rw http.ResponseWriter, r *http.Request) {
cfg := handker.global.GetConfig()
render.Success(rw, http.StatusOK, types.NewGettableGlobalConfig(cfg.ExternalURL, cfg.IngestionURL))
}

View File

@@ -1,31 +0,0 @@
package signozglobal
import (
"context"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/global"
)
type provider struct {
config global.Config
settings factory.ScopedProviderSettings
}
func NewFactory() factory.ProviderFactory[global.Global, global.Config] {
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config global.Config) (global.Global, error) {
return newProvider(ctx, providerSettings, config)
})
}
func newProvider(_ context.Context, providerSettings factory.ProviderSettings, config global.Config) (global.Global, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/global/signozglobal")
return &provider{
config: config,
settings: settings,
}, nil
}
func (provider *provider) GetConfig() global.Config {
return provider.config
}

View File

@@ -1,7 +1,6 @@
package middleware
import (
"context"
"log/slog"
"net/http"
"time"
@@ -12,7 +11,6 @@ import (
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"golang.org/x/sync/singleflight"
)
const (
@@ -25,18 +23,10 @@ type APIKey struct {
headers []string
logger *slog.Logger
sharder sharder.Sharder
sfGroup *singleflight.Group
}
func NewAPIKey(store sqlstore.SQLStore, headers []string, logger *slog.Logger, sharder sharder.Sharder) *APIKey {
return &APIKey{
store: store,
uuid: authtypes.NewUUID(),
headers: headers,
logger: logger,
sharder: sharder,
sfGroup: &singleflight.Group{},
}
return &APIKey{store: store, uuid: authtypes.NewUUID(), headers: headers, logger: logger, sharder: sharder}
}
func (a *APIKey) Wrap(next http.Handler) http.Handler {
@@ -119,24 +109,11 @@ func (a *APIKey) Wrap(next http.Handler) http.Handler {
next.ServeHTTP(w, r)
lastUsedCtx := context.WithoutCancel(r.Context())
_, _, _ = a.sfGroup.Do(apiKey.ID.StringValue(), func() (any, error) {
apiKey.LastUsed = time.Now()
_, err = a.
store.
BunDB().
NewUpdate().
Model(&apiKey).
Column("last_used").
Where("token = ?", apiKeyToken).
Where("revoked = false").
Exec(lastUsedCtx)
if err != nil {
a.logger.ErrorContext(lastUsedCtx, "failed to update last used of api key", "error", err)
}
return true, nil
})
apiKey.LastUsed = time.Now()
_, err = a.store.BunDB().NewUpdate().Model(&apiKey).Column("last_used").Where("token = ?", apiKeyToken).Where("revoked = false").Exec(r.Context())
if err != nil {
a.logger.ErrorContext(r.Context(), "failed to update last used of api key", "error", err)
}
})

View File

@@ -3,7 +3,7 @@ package impldashboard
import (
"context"
"maps"
"slices"
"strings"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/errors"
@@ -11,34 +11,30 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/role"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type module struct {
store dashboardtypes.Store
settings factory.ScopedProviderSettings
analytics analytics.Analytics
orgGetter organization.Getter
role role.Module
queryParser queryparser.QueryParser
store dashboardtypes.Store
settings factory.ScopedProviderSettings
analytics analytics.Analytics
orgGetter organization.Getter
role role.Module
}
func NewModule(sqlstore sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, role role.Module, queryParser queryparser.QueryParser) dashboard.Module {
func NewModule(sqlstore sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, role role.Module) dashboard.Module {
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/modules/impldashboard")
return &module{
store: NewStore(sqlstore),
settings: scopedProviderSettings,
analytics: analytics,
orgGetter: orgGetter,
role: role,
queryParser: queryParser,
store: NewStore(sqlstore),
settings: scopedProviderSettings,
analytics: analytics,
orgGetter: orgGetter,
role: role,
}
}
@@ -273,10 +269,13 @@ func (module *module) GetByMetricNames(ctx context.Context, orgID valuer.UUID, m
return nil, err
}
// Initialize result map for each metric
result := make(map[string][]map[string]string)
// Process the JSON data in Go
for _, dashboard := range dashboards {
dashData := dashboard.Data
var dashData = dashboard.Data
dashTitle, _ := dashData["title"].(string)
widgets, ok := dashData["widgets"].([]interface{})
if !ok {
@@ -297,22 +296,44 @@ func (module *module) GetByMetricNames(ctx context.Context, orgID valuer.UUID, m
continue
}
// Track which metrics were found in this widget
foundMetrics := make(map[string]bool)
builder, ok := query["builder"].(map[string]interface{})
if !ok {
continue
}
// Check all three query types
module.checkBuilderQueriesForMetricNames(query, metricNames, foundMetrics)
module.checkClickHouseQueriesForMetricNames(ctx, query, metricNames, foundMetrics)
module.checkPromQLQueriesForMetricNames(ctx, query, metricNames, foundMetrics)
queryData, ok := builder["queryData"].([]interface{})
if !ok {
continue
}
// Add widget to results for all found metrics
for metricName := range foundMetrics {
result[metricName] = append(result[metricName], map[string]string{
"dashboard_id": dashboard.ID,
"widget_name": widgetTitle,
"widget_id": widgetID,
"dashboard_name": dashTitle,
})
for _, qd := range queryData {
data, ok := qd.(map[string]interface{})
if !ok {
continue
}
if dataSource, ok := data["dataSource"].(string); !ok || dataSource != "metrics" {
continue
}
aggregateAttr, ok := data["aggregateAttribute"].(map[string]interface{})
if !ok {
continue
}
if key, ok := aggregateAttr["key"].(string); ok {
// Check if this metric is in our list of interest
for _, metricName := range metricNames {
if strings.TrimSpace(key) == metricName {
result[metricName] = append(result[metricName], map[string]string{
"dashboard_id": dashboard.ID,
"widget_name": widgetTitle,
"widget_id": widgetID,
"dashboard_name": dashTitle,
})
}
}
}
}
}
}
@@ -340,120 +361,3 @@ func (module *module) Collect(ctx context.Context, orgID valuer.UUID) (map[strin
func (module *module) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{dashboardtypes.TypeableMetaResourceDashboard, dashboardtypes.TypeableMetaResourcesDashboards}
}
// checkBuilderQueriesForMetricNames checks builder.queryData[] for aggregations[].metricName
func (module *module) checkBuilderQueriesForMetricNames(query map[string]interface{}, metricNames []string, foundMetrics map[string]bool) {
builder, ok := query["builder"].(map[string]interface{})
if !ok {
return
}
queryData, ok := builder["queryData"].([]interface{})
if !ok {
return
}
for _, qd := range queryData {
data, ok := qd.(map[string]interface{})
if !ok {
continue
}
// Check dataSource is metrics
if dataSource, ok := data["dataSource"].(string); !ok || dataSource != "metrics" {
continue
}
// Check aggregations[].metricName
aggregations, ok := data["aggregations"].([]interface{})
if !ok {
continue
}
for _, agg := range aggregations {
aggMap, ok := agg.(map[string]interface{})
if !ok {
continue
}
metricName, ok := aggMap["metricName"].(string)
if !ok || metricName == "" {
continue
}
if slices.Contains(metricNames, metricName) {
foundMetrics[metricName] = true
}
}
}
}
// checkClickHouseQueriesForMetricNames checks clickhouse_sql[] array for metric names in query strings
func (module *module) checkClickHouseQueriesForMetricNames(ctx context.Context, query map[string]interface{}, metricNames []string, foundMetrics map[string]bool) {
clickhouseSQL, ok := query["clickhouse_sql"].([]interface{})
if !ok {
return
}
for _, chQuery := range clickhouseSQL {
chQueryMap, ok := chQuery.(map[string]interface{})
if !ok {
continue
}
queryStr, ok := chQueryMap["query"].(string)
if !ok || queryStr == "" {
continue
}
// Parse query to extract metric names
result, err := module.queryParser.AnalyzeQueryFilter(ctx, qbtypes.QueryTypeClickHouseSQL, queryStr)
if err != nil {
// Log warning and continue - parsing errors shouldn't break the search
module.settings.Logger().WarnContext(ctx, "failed to parse ClickHouse query", "query", queryStr, "error", err)
continue
}
// Check if any of the search metric names are in the extracted metric names
for _, metricName := range metricNames {
if slices.Contains(result.MetricNames, metricName) {
foundMetrics[metricName] = true
}
}
}
}
// checkPromQLQueriesForMetricNames checks promql[] array for metric names in query strings
func (module *module) checkPromQLQueriesForMetricNames(ctx context.Context, query map[string]interface{}, metricNames []string, foundMetrics map[string]bool) {
promQL, ok := query["promql"].([]interface{})
if !ok {
return
}
for _, promQuery := range promQL {
promQueryMap, ok := promQuery.(map[string]interface{})
if !ok {
continue
}
queryStr, ok := promQueryMap["query"].(string)
if !ok || queryStr == "" {
continue
}
// Parse query to extract metric names
result, err := module.queryParser.AnalyzeQueryFilter(ctx, qbtypes.QueryTypePromQL, queryStr)
if err != nil {
// Log warning and continue - parsing errors shouldn't break the search
module.settings.Logger().WarnContext(ctx, "failed to parse PromQL query", "query", queryStr, "error", err)
continue
}
// Check if any of the search metric names are in the extracted metric names
for _, metricName := range metricNames {
if slices.Contains(result.MetricNames, metricName) {
foundMetrics[metricName] = true
}
}
}
}

View File

@@ -137,50 +137,6 @@ func (h *handler) GetMetricMetadata(rw http.ResponseWriter, req *http.Request) {
render.Success(rw, http.StatusOK, metadata)
}
func (h *handler) GetMetricAlerts(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
metricName := strings.TrimSpace(req.URL.Query().Get("metricName"))
if metricName == "" {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName query parameter is required"))
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
out, err := h.module.GetMetricAlerts(req.Context(), orgID, metricName)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, out)
}
func (h *handler) GetMetricDashboards(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
metricName := strings.TrimSpace(req.URL.Query().Get("metricName"))
if metricName == "" {
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName query parameter is required"))
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
out, err := h.module.GetMetricDashboards(req.Context(), orgID, metricName)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, out)
}
func (h *handler) GetMetricHighlights(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
@@ -209,6 +165,7 @@ func (h *handler) GetMetricAttributes(rw http.ResponseWriter, req *http.Request)
render.Error(rw, err)
return
}
var in metricsexplorertypes.MetricAttributesRequest
if err := binding.JSON.BindBody(req.Body, &in); err != nil {
render.Error(rw, err)

View File

@@ -11,7 +11,6 @@ import (
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
@@ -20,7 +19,6 @@ import (
"github.com/SigNoz/signoz/pkg/types/metricsexplorertypes"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
sqlbuilder "github.com/huandu/go-sqlbuilder"
@@ -34,13 +32,11 @@ type module struct {
condBuilder qbtypes.ConditionBuilder
logger *slog.Logger
cache cache.Cache
ruleStore ruletypes.RuleStore
dashboardModule dashboard.Module
config metricsexplorer.Config
}
// NewModule constructs the metrics module with the provided dependencies.
func NewModule(ts telemetrystore.TelemetryStore, telemetryMetadataStore telemetrytypes.MetadataStore, cache cache.Cache, ruleStore ruletypes.RuleStore, dashboardModule dashboard.Module, providerSettings factory.ProviderSettings, cfg metricsexplorer.Config) metricsexplorer.Module {
func NewModule(ts telemetrystore.TelemetryStore, telemetryMetadataStore telemetrytypes.MetadataStore, cache cache.Cache, providerSettings factory.ProviderSettings, cfg metricsexplorer.Config) metricsexplorer.Module {
fieldMapper := telemetrymetrics.NewFieldMapper()
condBuilder := telemetrymetrics.NewConditionBuilder(fieldMapper)
return &module{
@@ -50,8 +46,6 @@ func NewModule(ts telemetrystore.TelemetryStore, telemetryMetadataStore telemetr
logger: providerSettings.Logger,
telemetryMetadataStore: telemetryMetadataStore,
cache: cache,
ruleStore: ruleStore,
dashboardModule: dashboardModule,
config: cfg,
}
}
@@ -200,55 +194,6 @@ func (m *module) UpdateMetricMetadata(ctx context.Context, orgID valuer.UUID, re
return nil
}
func (m *module) GetMetricAlerts(ctx context.Context, orgID valuer.UUID, metricName string) (*metricsexplorertypes.MetricAlertsResponse, error) {
if metricName == "" {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName is required")
}
ruleAlerts, err := m.ruleStore.GetStoredRulesByMetricName(ctx, orgID.String(), metricName)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to get stored rules by metric name")
}
alerts := make([]metricsexplorertypes.MetricAlert, len(ruleAlerts))
for i, ruleAlert := range ruleAlerts {
alerts[i] = metricsexplorertypes.MetricAlert{
AlertName: ruleAlert.AlertName,
AlertID: ruleAlert.AlertID,
}
}
return &metricsexplorertypes.MetricAlertsResponse{
Alerts: alerts,
}, nil
}
func (m *module) GetMetricDashboards(ctx context.Context, orgID valuer.UUID, metricName string) (*metricsexplorertypes.MetricDashboardsResponse, error) {
if metricName == "" {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName is required")
}
data, err := m.dashboardModule.GetByMetricNames(ctx, orgID, []string{metricName})
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to get dashboards for metric")
}
dashboards := make([]metricsexplorertypes.MetricDashboard, 0)
if dashboardList, ok := data[metricName]; ok {
dashboards = make([]metricsexplorertypes.MetricDashboard, 0, len(dashboardList))
for _, item := range dashboardList {
dashboards = append(dashboards, metricsexplorertypes.MetricDashboard{
DashboardName: item["dashboard_name"],
DashboardID: item["dashboard_id"],
WidgetID: item["widget_id"],
WidgetName: item["widget_name"],
})
}
}
return &metricsexplorertypes.MetricDashboardsResponse{
Dashboards: dashboards,
}, nil
}
// GetMetricHighlights returns highlights for a metric including data points, last received, total time series, and active time series.
func (m *module) GetMetricHighlights(ctx context.Context, orgID valuer.UUID, metricName string) (*metricsexplorertypes.MetricHighlightsResponse, error) {
if metricName == "" {

View File

@@ -15,8 +15,6 @@ type Handler interface {
GetMetricMetadata(http.ResponseWriter, *http.Request)
GetMetricAttributes(http.ResponseWriter, *http.Request)
UpdateMetricMetadata(http.ResponseWriter, *http.Request)
GetMetricAlerts(http.ResponseWriter, *http.Request)
GetMetricDashboards(http.ResponseWriter, *http.Request)
GetMetricHighlights(http.ResponseWriter, *http.Request)
}
@@ -26,8 +24,6 @@ type Module interface {
GetTreemap(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.TreemapRequest) (*metricsexplorertypes.TreemapResponse, error)
GetMetricMetadataMulti(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, error)
UpdateMetricMetadata(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.UpdateMetricMetadataRequest) error
GetMetricAlerts(ctx context.Context, orgID valuer.UUID, metricName string) (*metricsexplorertypes.MetricAlertsResponse, error)
GetMetricDashboards(ctx context.Context, orgID valuer.UUID, metricName string) (*metricsexplorertypes.MetricDashboardsResponse, error)
GetMetricHighlights(ctx context.Context, orgID valuer.UUID, metricName string) (*metricsexplorertypes.MetricHighlightsResponse, error)
GetMetricAttributes(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.MetricAttributesRequest) (*metricsexplorertypes.MetricAttributesResponse, error)
}

View File

@@ -630,8 +630,6 @@ func (ah *APIHandler) MetricExplorerRoutes(router *mux.Router, am *middleware.Au
router.HandleFunc("/api/v2/metrics/metadata", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricMetadata)).Methods(http.MethodGet)
router.HandleFunc("/api/v2/metrics/{metric_name}/metadata", am.EditAccess(ah.Signoz.Handlers.MetricsExplorer.UpdateMetricMetadata)).Methods(http.MethodPost)
router.HandleFunc("/api/v2/metric/highlights", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricHighlights)).Methods(http.MethodGet)
router.HandleFunc("/api/v2/metric/alerts", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricAlerts)).Methods(http.MethodGet)
router.HandleFunc("/api/v2/metric/dashboards", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricDashboards)).Methods(http.MethodGet)
}
func Intersection(a, b []int) (c []int) {

View File

@@ -3,13 +3,13 @@ package app
import (
"context"
"fmt"
"log/slog"
"net"
"net/http"
_ "net/http/pprof" // http profiler
"slices"
"github.com/SigNoz/signoz/pkg/cache/memorycache"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
@@ -17,7 +17,6 @@ import (
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
"github.com/SigNoz/signoz/pkg/modules/organization"
@@ -31,7 +30,6 @@ import (
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
opAmpModel "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/telemetrystore"
@@ -39,8 +37,10 @@ import (
"github.com/rs/cors"
"github.com/soheilhy/cmux"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/query-service/constants"
"github.com/SigNoz/signoz/pkg/query-service/healthcheck"
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
"github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/utils"
"go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux"
@@ -107,8 +107,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
signoz.Prometheus,
signoz.Modules.OrgGetter,
signoz.Querier,
signoz.Instrumentation.ToProviderSettings(),
signoz.QueryParser,
signoz.Instrumentation.Logger(),
)
if err != nil {
return nil, err
@@ -340,10 +339,9 @@ func makeRulesManager(
prometheus prometheus.Prometheus,
orgGetter organization.Getter,
querier querier.Querier,
providerSettings factory.ProviderSettings,
queryParser queryparser.QueryParser,
logger *slog.Logger,
) (*rules.Manager, error) {
ruleStore := sqlrulestore.NewRuleStore(sqlstore, queryParser, providerSettings)
ruleStore := sqlrulestore.NewRuleStore(sqlstore)
maintenanceStore := sqlrulestore.NewMaintenanceStore(sqlstore)
// create manager opts
managerOpts := &rules.ManagerOptions{
@@ -353,7 +351,7 @@ func makeRulesManager(
Logger: zap.L(),
Reader: ch,
Querier: querier,
SLogger: providerSettings.Logger,
SLogger: logger,
Cache: cache,
EvalDelay: constants.GetEvalDelay(),
OrgGetter: orgGetter,

View File

@@ -5,8 +5,6 @@ import (
"regexp"
"github.com/DATA-DOG/go-sqlmock"
"github.com/SigNoz/signoz/pkg/factory/factorytest"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstoretest"
@@ -23,10 +21,7 @@ type MockSQLRuleStore struct {
// NewMockSQLRuleStore creates a new MockSQLRuleStore with sqlmock
func NewMockSQLRuleStore() *MockSQLRuleStore {
sqlStore := sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherRegexp)
// For tests, we can pass nil for queryParser and use test provider settings
providerSettings := factorytest.NewSettings()
ruleStore := sqlrulestore.NewRuleStore(sqlStore, queryparser.New(providerSettings), providerSettings)
ruleStore := sqlrulestore.NewRuleStore(sqlStore)
return &MockSQLRuleStore{
ruleStore: ruleStore,
@@ -64,11 +59,6 @@ func (m *MockSQLRuleStore) GetStoredRules(ctx context.Context, orgID string) ([]
return m.ruleStore.GetStoredRules(ctx, orgID)
}
// GetStoredRulesByMetricName implements ruletypes.RuleStore - delegates to underlying ruleStore
func (m *MockSQLRuleStore) GetStoredRulesByMetricName(ctx context.Context, orgID string, metricName string) ([]ruletypes.RuleAlert, error) {
return m.ruleStore.GetStoredRulesByMetricName(ctx, orgID, metricName)
}
// ExpectCreateRule sets up SQL expectations for CreateRule operation
func (m *MockSQLRuleStore) ExpectCreateRule(rule *ruletypes.Rule) {
rows := sqlmock.NewRows([]string{"id", "created_at", "updated_at", "created_by", "updated_by", "deleted", "data", "org_id"}).
@@ -114,17 +104,6 @@ func (m *MockSQLRuleStore) ExpectGetStoredRules(orgID string, rules []*ruletypes
WillReturnRows(rows)
}
// ExpectGetStoredRulesByMetricName sets up SQL expectations for GetStoredRulesByMetricName operation
func (m *MockSQLRuleStore) ExpectGetStoredRulesByMetricName(orgID string, metricName string, rules []*ruletypes.Rule) {
rows := sqlmock.NewRows([]string{"id", "created_at", "updated_at", "created_by", "updated_by", "deleted", "data", "org_id"})
for _, rule := range rules {
rows.AddRow(rule.ID, rule.CreatedAt, rule.UpdatedAt, rule.CreatedBy, rule.UpdatedBy, rule.Deleted, rule.Data, rule.OrgID)
}
expectedPattern := `SELECT (.+) FROM "rule".+WHERE \(.+org_id.+'` + orgID + `'\)`
m.mock.ExpectQuery(expectedPattern).
WillReturnRows(rows)
}
// AssertExpectations asserts that all SQL expectations were met
func (m *MockSQLRuleStore) AssertExpectations() error {
return m.mock.ExpectationsWereMet()

View File

@@ -2,31 +2,18 @@ package sqlrulestore
import (
"context"
"encoding/json"
"log/slog"
"slices"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/sqlstore"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type rule struct {
sqlstore sqlstore.SQLStore
queryParser queryparser.QueryParser
logger *slog.Logger
sqlstore sqlstore.SQLStore
}
func NewRuleStore(store sqlstore.SQLStore, queryParser queryparser.QueryParser, providerSettings factory.ProviderSettings) ruletypes.RuleStore {
return &rule{
sqlstore: store,
queryParser: queryParser,
logger: providerSettings.Logger,
}
func NewRuleStore(store sqlstore.SQLStore) ruletypes.RuleStore {
return &rule{sqlstore: store}
}
func (r *rule) CreateRule(ctx context.Context, storedRule *ruletypes.Rule, cb func(context.Context, valuer.UUID) error) (valuer.UUID, error) {
@@ -114,92 +101,3 @@ func (r *rule) GetStoredRule(ctx context.Context, id valuer.UUID) (*ruletypes.Ru
}
return rule, nil
}
func (r *rule) GetStoredRulesByMetricName(ctx context.Context, orgID string, metricName string) ([]ruletypes.RuleAlert, error) {
if metricName == "" {
return []ruletypes.RuleAlert{}, nil
}
// Get all stored rules for the organization
storedRules, err := r.GetStoredRules(ctx, orgID)
if err != nil {
return nil, err
}
alerts := make([]ruletypes.RuleAlert, 0)
seen := make(map[string]bool)
for _, storedRule := range storedRules {
var ruleData ruletypes.PostableRule
if err := json.Unmarshal([]byte(storedRule.Data), &ruleData); err != nil {
r.logger.WarnContext(ctx, "failed to unmarshal rule data", "rule_id", storedRule.ID.StringValue(), "error", err)
continue
}
// Check conditions: must be metric-based alert with valid composite query
if ruleData.AlertType != ruletypes.AlertTypeMetric ||
ruleData.RuleCondition == nil ||
ruleData.RuleCondition.CompositeQuery == nil {
continue
}
// Search for metricName in the Queries array (v5 format only)
// TODO check if we need to support v3 query format structs
found := false
for _, queryEnvelope := range ruleData.RuleCondition.CompositeQuery.Queries {
// Check based on query type
switch queryEnvelope.Type {
case qbtypes.QueryTypeBuilder:
// Cast to QueryBuilderQuery[MetricAggregation] for metrics
if spec, ok := queryEnvelope.Spec.(qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]); ok {
// Check if signal is metrics
if spec.Signal == telemetrytypes.SignalMetrics {
for _, agg := range spec.Aggregations {
if agg.MetricName == metricName {
found = true
break
}
}
}
}
case qbtypes.QueryTypePromQL:
if spec, ok := queryEnvelope.Spec.(qbtypes.PromQuery); ok {
result, err := r.queryParser.AnalyzeQueryFilter(ctx, qbtypes.QueryTypePromQL, spec.Query)
if err != nil {
r.logger.WarnContext(ctx, "failed to parse PromQL query", "query", spec.Query, "error", err)
continue
}
if slices.Contains(result.MetricNames, metricName) {
found = true
break
}
}
case qbtypes.QueryTypeClickHouseSQL:
if spec, ok := queryEnvelope.Spec.(qbtypes.ClickHouseQuery); ok {
result, err := r.queryParser.AnalyzeQueryFilter(ctx, qbtypes.QueryTypeClickHouseSQL, spec.Query)
if err != nil {
r.logger.WarnContext(ctx, "failed to parse ClickHouse query", "query", spec.Query, "error", err)
continue
}
if slices.Contains(result.MetricNames, metricName) {
found = true
break
}
}
}
if found {
break
}
}
if found && !seen[storedRule.ID.StringValue()] {
seen[storedRule.ID.StringValue()] = true
alerts = append(alerts, ruletypes.RuleAlert{
AlertName: ruleData.AlertName,
AlertID: storedRule.ID.StringValue(),
})
}
}
return alerts, nil
}

View File

@@ -4,7 +4,6 @@ import (
"context"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/ruler"
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
"github.com/SigNoz/signoz/pkg/sqlstore"
@@ -23,8 +22,7 @@ func NewFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[ruler.Ruler,
}
func New(ctx context.Context, settings factory.ProviderSettings, config ruler.Config, sqlstore sqlstore.SQLStore) (ruler.Ruler, error) {
queryParser := queryparser.New(settings)
return &provider{ruleStore: sqlrulestore.NewRuleStore(sqlstore, queryParser, settings)}, nil
return &provider{ruleStore: sqlrulestore.NewRuleStore(sqlstore)}, nil
}
func (provider *provider) Collect(ctx context.Context, orgID valuer.UUID) (map[string]any, error) {

View File

@@ -18,7 +18,6 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/gateway"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/instrumentation"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/prometheus"
@@ -40,9 +39,6 @@ import (
// Config defines the entire input configuration of signoz.
type Config struct {
// Global config
Global global.Config `mapstructure:"global"`
// Version config
Version version.Config `mapstructure:"version"`
@@ -145,7 +141,6 @@ func (df *DeprecatedFlags) RegisterFlags(cmd *cobra.Command) {
func NewConfig(ctx context.Context, logger *slog.Logger, resolverConfig config.ResolverConfig, deprecatedFlags DeprecatedFlags) (Config, error) {
configFactories := []factory.ConfigFactory{
global.NewConfigFactory(),
version.NewConfigFactory(),
instrumentation.NewConfigFactory(),
analytics.NewConfigFactory(),

View File

@@ -2,8 +2,6 @@ package signoz
import (
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/global/signozglobal"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/apdex"
"github.com/SigNoz/signoz/pkg/modules/apdex/implapdex"
@@ -36,10 +34,9 @@ type Handlers struct {
SpanPercentile spanpercentile.Handler
Services services.Handler
MetricsExplorer metricsexplorer.Handler
Global global.Handler
}
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing, global global.Global) Handlers {
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing) Handlers {
return Handlers{
SavedView: implsavedview.NewHandler(modules.SavedView),
Apdex: implapdex.NewHandler(modules.Apdex),
@@ -50,6 +47,5 @@ func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, que
Services: implservices.NewHandler(modules.Services),
MetricsExplorer: implmetricsexplorer.NewHandler(modules.MetricsExplorer),
SpanPercentile: implspanpercentile.NewHandler(modules.SpanPercentile),
Global: signozglobal.NewHandler(global),
}
}

View File

@@ -12,7 +12,6 @@ import (
"github.com/SigNoz/signoz/pkg/emailing/emailingtest"
"github.com/SigNoz/signoz/pkg/factory/factorytest"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/sharder"
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
"github.com/SigNoz/signoz/pkg/sqlstore"
@@ -36,11 +35,10 @@ func TestNewHandlers(t *testing.T) {
require.NoError(t, err)
tokenizer := tokenizertest.New()
emailing := emailingtest.New()
queryParser := queryparser.New(providerSettings)
require.NoError(t, err)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{})
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, Config{})
handlers := NewHandlers(modules, providerSettings, nil, nil, nil)
handlers := NewHandlers(modules, providerSettings, nil, nil)
reflectVal := reflect.ValueOf(handlers)
for i := 0; i < reflectVal.NumField(); i++ {

View File

@@ -38,8 +38,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/tokenizer"
@@ -81,15 +79,12 @@ func NewModules(
authNs map[authtypes.AuthNProvider]authn.AuthN,
authz authz.AuthZ,
cache cache.Cache,
queryParser queryparser.QueryParser,
config Config,
) Modules {
quickfilter := implquickfilter.NewModule(implquickfilter.NewStore(sqlstore))
orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter)
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, analytics)
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
ruleStore := sqlrulestore.NewRuleStore(sqlstore, queryParser, providerSettings)
dashboard := impldashboard.NewModule(sqlstore, providerSettings, analytics, orgGetter, implrole.NewModule(implrole.NewStore(sqlstore), authz, nil), queryParser)
return Modules{
OrgGetter: orgGetter,
@@ -97,7 +92,7 @@ func NewModules(
Preference: implpreference.NewModule(implpreference.NewStore(sqlstore), preferencetypes.NewAvailablePreference()),
SavedView: implsavedview.NewModule(sqlstore),
Apdex: implapdex.NewModule(sqlstore),
Dashboard: dashboard,
Dashboard: impldashboard.NewModule(sqlstore, providerSettings, analytics, orgGetter, implrole.NewModule(implrole.NewStore(sqlstore), authz, nil)),
User: user,
UserGetter: userGetter,
QuickFilter: quickfilter,
@@ -107,6 +102,6 @@ func NewModules(
Session: implsession.NewModule(providerSettings, authNs, user, userGetter, implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs), tokenizer, orgGetter),
SpanPercentile: implspanpercentile.NewModule(querier, providerSettings),
Services: implservices.NewModule(querier, telemetryStore),
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, ruleStore, dashboard, providerSettings, config.MetricsExplorer),
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, providerSettings, config.MetricsExplorer),
}
}

View File

@@ -12,7 +12,6 @@ import (
"github.com/SigNoz/signoz/pkg/emailing/emailingtest"
"github.com/SigNoz/signoz/pkg/factory/factorytest"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/sharder"
"github.com/SigNoz/signoz/pkg/sharder/noopsharder"
"github.com/SigNoz/signoz/pkg/sqlstore"
@@ -36,9 +35,8 @@ func TestNewModules(t *testing.T) {
require.NoError(t, err)
tokenizer := tokenizertest.New()
emailing := emailingtest.New()
queryParser := queryparser.New(providerSettings)
require.NoError(t, err)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{})
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, Config{})
reflectVal := reflect.ValueOf(modules)
for i := 0; i < reflectVal.NumField(); i++ {

View File

@@ -8,7 +8,6 @@ import (
"github.com/SigNoz/signoz/pkg/apiserver"
"github.com/SigNoz/signoz/pkg/apiserver/signozapiserver"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/instrumentation"
"github.com/SigNoz/signoz/pkg/modules/authdomain"
@@ -37,7 +36,6 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
struct{ session.Handler }{},
struct{ authdomain.Handler }{},
struct{ preference.Handler }{},
struct{ global.Handler }{},
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
if err != nil {
return nil, err

View File

@@ -18,8 +18,6 @@ import (
"github.com/SigNoz/signoz/pkg/emailing/noopemailing"
"github.com/SigNoz/signoz/pkg/emailing/smtpemailing"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/global/signozglobal"
"github.com/SigNoz/signoz/pkg/modules/authdomain/implauthdomain"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
@@ -223,7 +221,7 @@ func NewQuerierProviderFactories(telemetryStore telemetrystore.TelemetryStore, p
)
}
func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.AuthZ, global global.Global, modules Modules, handlers Handlers) factory.NamedMap[factory.ProviderFactory[apiserver.APIServer, apiserver.Config]] {
func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.AuthZ, modules Modules, handlers Handlers) factory.NamedMap[factory.ProviderFactory[apiserver.APIServer, apiserver.Config]] {
return factory.MustNewNamedMap(
signozapiserver.NewFactory(
orgGetter,
@@ -233,7 +231,6 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
implsession.NewHandler(modules.Session),
implauthdomain.NewHandler(modules.AuthDomain),
implpreference.NewHandler(modules.Preference),
signozglobal.NewHandler(global),
),
)
}
@@ -245,9 +242,3 @@ func NewTokenizerProviderFactories(cache cache.Cache, sqlstore sqlstore.SQLStore
jwttokenizer.NewFactory(cache, tokenStore),
)
}
func NewGlobalProviderFactories() factory.NamedMap[factory.ProviderFactory[global.Global, global.Config]] {
return factory.MustNewNamedMap(
signozglobal.NewFactory(),
)
}

View File

@@ -83,7 +83,6 @@ func TestNewProviderFactories(t *testing.T) {
NewAPIServerProviderFactories(
implorganization.NewGetter(implorganization.NewStore(sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual)), nil),
nil,
nil,
Modules{},
Handlers{},
)

View File

@@ -345,29 +345,18 @@ func New(
telemetrymetadata.AttributesMetadataLocalTableName,
)
global, err := factory.NewProviderFromNamedMap(
ctx,
providerSettings,
config.Global,
NewGlobalProviderFactories(),
"signoz",
)
if err != nil {
return nil, err
}
// Initialize all modules
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config)
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, config)
// Initialize all handlers for the modules
handlers := NewHandlers(modules, providerSettings, querier, licensing, global)
handlers := NewHandlers(modules, providerSettings, querier, licensing)
// Initialize the API server
apiserver, err := factory.NewProviderFromNamedMap(
ctx,
providerSettings,
config.APIServer,
NewAPIServerProviderFactories(orgGetter, authz, global, modules, handlers),
NewAPIServerProviderFactories(orgGetter, authz, modules, handlers),
"signoz",
)
if err != nil {

View File

@@ -1,15 +0,0 @@
package types
import "net/url"
type GettableGlobalConfig struct {
ExternalURL string `json:"external_url"`
IngestionURL string `json:"ingestion_url"`
}
func NewGettableGlobalConfig(externalURL, ingestionURL *url.URL) *GettableGlobalConfig {
return &GettableGlobalConfig{
ExternalURL: externalURL.String(),
IngestionURL: ingestionURL.String(),
}
}

View File

@@ -221,30 +221,6 @@ type TreemapResponse struct {
Samples []TreemapEntry `json:"samples"`
}
// MetricAlert represents an alert associated with a metric.
type MetricAlert struct {
AlertName string `json:"alertName"`
AlertID string `json:"alertId"`
}
// MetricAlertsResponse represents the response for metric alerts endpoint.
type MetricAlertsResponse struct {
Alerts []MetricAlert `json:"alerts"`
}
// MetricDashboard represents a dashboard/widget referencing a metric.
type MetricDashboard struct {
DashboardName string `json:"dashboardName"`
DashboardID string `json:"dashboardId"`
WidgetID string `json:"widgetId"`
WidgetName string `json:"widgetName"`
}
// MetricDashboardsResponse represents the response for metric dashboards endpoint.
type MetricDashboardsResponse struct {
Dashboards []MetricDashboard `json:"dashboards"`
}
// MetricHighlightsResponse is the output structure for the metric highlights endpoint.
type MetricHighlightsResponse struct {
DataPoints uint64 `json:"dataPoints"`

View File

@@ -47,17 +47,10 @@ func NewStatsFromRules(rules []*Rule) map[string]any {
return stats
}
// RuleAlert represents an alert associated with a rule, used when filtering by metric name
type RuleAlert struct {
AlertName string
AlertID string
}
type RuleStore interface {
CreateRule(context.Context, *Rule, func(context.Context, valuer.UUID) error) (valuer.UUID, error)
EditRule(context.Context, *Rule, func(context.Context) error) error
DeleteRule(context.Context, valuer.UUID, func(context.Context) error) error
GetStoredRules(context.Context, string) ([]*Rule, error)
GetStoredRule(context.Context, valuer.UUID) (*Rule, error)
GetStoredRulesByMetricName(context.Context, string, string) ([]RuleAlert, error)
}

View File

@@ -33,12 +33,6 @@ def pytest_addoption(parser: pytest.Parser):
default=False,
help="Teardown environment. Run pytest --basetemp=./tmp/ -vv --teardown src/bootstrap/setup::test_teardown to teardown your local dev environment.",
)
parser.addoption(
"--with-web",
action="store_true",
default=False,
help="Build and run with web. Run pytest --basetemp=./tmp/ -vv --with-web src/bootstrap/setup::test_setup to setup your local dev environment with web.",
)
parser.addoption(
"--sqlstore-provider",
action="store",

View File

@@ -1,5 +1,6 @@
from typing import Tuple
from http import HTTPStatus
from typing import Callable, List, Tuple
from typing import Callable, List
import pytest
import requests
@@ -133,9 +134,9 @@ def get_tokens(signoz: types.SigNoz) -> Callable[[str, str], Tuple[str, str]]:
)
assert response.status_code == HTTPStatus.OK
access_token = response.json()["data"]["accessToken"]
refresh_token = response.json()["data"]["refreshToken"]
return access_token, refresh_token
accessToken = response.json()["data"]["accessToken"]
refreshToken = response.json()["data"]["refreshToken"]
return accessToken, refreshToken
return _get_tokens

View File

@@ -1,4 +1,4 @@
from typing import Any, Callable, Dict
from typing import Callable, Dict, Any
from urllib.parse import urljoin
from xml.etree import ElementTree
@@ -71,9 +71,7 @@ def create_saml_client(
"saml_signature_canonicalization_method": "http://www.w3.org/2001/10/xml-exc-c14n#",
"saml.onetimeuse.condition": "false",
"saml.server.signature.keyinfo.xmlSigKeyInfoKeyNameTransformer": "NONE",
"saml_assertion_consumer_url_post": urljoin(
f"{signoz.self.host_configs['8080'].base()}", callback_path
),
"saml_assertion_consumer_url_post": urljoin(f"{signoz.self.host_configs['8080'].base()}", callback_path)
},
"authenticationFlowBindingOverrides": {},
"fullScopeAllowed": True,
@@ -126,11 +124,9 @@ def create_saml_client(
@pytest.fixture(name="update_saml_client_attributes", scope="function")
def update_saml_client_attributes(
idp: types.TestContainerIDP,
idp: types.TestContainerIDP
) -> Callable[[str, Dict[str, Any]], None]:
def _update_saml_client_attributes(
client_id: str, attributes: Dict[str, Any]
) -> None:
def _update_saml_client_attributes(client_id: str, attributes: Dict[str, Any]) -> None:
client = KeycloakAdmin(
server_url=idp.container.host_configs["6060"].base(),
username=IDP_ROOT_USERNAME,

View File

@@ -429,11 +429,7 @@ def ttl_legacy_logs_v2_table_setup(request, signoz: types.SigNoz):
).result_rows
assert result is not None
# Add cleanup to restore original table
request.addfinalizer(
lambda: signoz.telemetrystore.conn.query(
"RENAME TABLE signoz_logs.logs_v2_backup TO signoz_logs.logs_v2;"
)
)
request.addfinalizer(lambda: signoz.telemetrystore.conn.query("RENAME TABLE signoz_logs.logs_v2_backup TO signoz_logs.logs_v2;"))
# Create new test tables
result = signoz.telemetrystore.conn.query(
@@ -449,15 +445,10 @@ def ttl_legacy_logs_v2_table_setup(request, signoz: types.SigNoz):
assert result is not None
# Add cleanup to drop test table
request.addfinalizer(
lambda: signoz.telemetrystore.conn.query(
"DROP TABLE IF EXISTS signoz_logs.logs_v2;"
)
)
request.addfinalizer(lambda: signoz.telemetrystore.conn.query("DROP TABLE IF EXISTS signoz_logs.logs_v2;"))
yield # Test runs here
@pytest.fixture(name="ttl_legacy_logs_v2_resource_table_setup", scope="function")
def ttl_legacy_logs_v2_resource_table_setup(request, signoz: types.SigNoz):
"""
@@ -472,11 +463,7 @@ def ttl_legacy_logs_v2_resource_table_setup(request, signoz: types.SigNoz):
).result_rows
assert result is not None
# Add cleanup to restore original table
request.addfinalizer(
lambda: signoz.telemetrystore.conn.query(
"RENAME TABLE signoz_logs.logs_v2_resource_backup TO signoz_logs.logs_v2_resource;"
)
)
request.addfinalizer(lambda: signoz.telemetrystore.conn.query("RENAME TABLE signoz_logs.logs_v2_resource_backup TO signoz_logs.logs_v2_resource;"))
# Create new test tables
result = signoz.telemetrystore.conn.query(
@@ -491,10 +478,6 @@ def ttl_legacy_logs_v2_resource_table_setup(request, signoz: types.SigNoz):
assert result is not None
# Add cleanup to drop test table
request.addfinalizer(
lambda: signoz.telemetrystore.conn.query(
"DROP TABLE IF EXISTS signoz_logs.logs_v2_resource;"
)
)
request.addfinalizer(lambda: signoz.telemetrystore.conn.query("DROP TABLE IF EXISTS signoz_logs.logs_v2_resource;"))
yield # Test runs here
yield # Test runs here

View File

@@ -1,7 +1,7 @@
from os import path
import platform
import time
from http import HTTPStatus
from os import path
import docker
import docker.errors
@@ -34,21 +34,14 @@ def signoz( # pylint: disable=too-many-arguments,too-many-positional-arguments
# Run the migrations for clickhouse
request.getfixturevalue("migrator")
# Get the no-web flag
with_web = pytestconfig.getoption("--with-web")
arch = platform.machine()
if arch == "x86_64":
arch = "amd64"
# Build the image
dockerfile_path = "cmd/enterprise/Dockerfile.integration"
if with_web:
dockerfile_path = "cmd/enterprise/Dockerfile.with-web.integration"
self = DockerImage(
path="../../",
dockerfile_path=dockerfile_path,
dockerfile_path="cmd/enterprise/Dockerfile.integration",
tag="signoz:integration",
buildargs={
"TARGETARCH": arch,
@@ -60,7 +53,7 @@ def signoz( # pylint: disable=too-many-arguments,too-many-positional-arguments
env = (
{
"SIGNOZ_WEB_ENABLED": False,
"SIGNOZ_WEB_ENABLED": True,
"SIGNOZ_WEB_DIRECTORY": "/root/web",
"SIGNOZ_INSTRUMENTATION_LOGS_LEVEL": "debug",
"SIGNOZ_PROMETHEUS_ACTIVE__QUERY__TRACKER_ENABLED": False,
@@ -70,9 +63,6 @@ def signoz( # pylint: disable=too-many-arguments,too-many-positional-arguments
| clickhouse.env
)
if with_web:
env["SIGNOZ_WEB_ENABLED"] = True
container = DockerContainer("signoz:integration")
for k, v in env.items():
container.with_env(k, v)
@@ -81,7 +71,7 @@ def signoz( # pylint: disable=too-many-arguments,too-many-positional-arguments
provider = request.config.getoption("--sqlstore-provider")
if provider == "sqlite":
dir_path = path.dirname(sqlstore.env["SIGNOZ_SQLSTORE_SQLITE_PATH"])
dir_path = path.dirname(sqlstore.env["SIGNOZ_SQLSTORE_SQLITE_PATH"])
container.with_volume_mapping(
dir_path,
dir_path,

View File

@@ -27,6 +27,7 @@ def sqlite(
with engine.connect() as conn:
result = conn.execute(sql.text("SELECT 1"))
assert result.fetchone()[0] == 1
return types.TestContainerSQL(
container=types.TestContainerDocker(
@@ -52,6 +53,7 @@ def sqlite(
result = conn.execute(sql.text("SELECT 1"))
assert result.fetchone()[0] == 1
return types.TestContainerSQL(
container=types.TestContainerDocker(
id="",

View File

@@ -1,5 +1,5 @@
from http import HTTPStatus
from typing import Any, Callable, Dict, List
from typing import Callable, List, Dict, Any
import requests
from selenium import webdriver
@@ -93,11 +93,10 @@ def test_create_auth_domain(
f"{signoz.self.host_configs['8080'].address}:{signoz.self.host_configs['8080'].port}",
{
"saml_idp_initiated_sso_url_name": "idp-initiated-saml-test",
"saml_idp_initiated_sso_relay_state": relay_state_url,
},
"saml_idp_initiated_sso_relay_state": relay_state_url
}
)
def test_saml_authn(
signoz: SigNoz,
idp: TestContainerIDP, # pylint: disable=unused-argument
@@ -164,10 +163,7 @@ def test_idp_initiated_saml_authn(
assert len(session_context["orgs"]) == 1
assert len(session_context["orgs"][0]["authNSupport"]["callback"]) == 1
idp_initiated_login_url = (
idp.container.host_configs["6060"].base()
+ "/realms/master/protocol/saml/clients/idp-initiated-saml-test"
)
idp_initiated_login_url = idp.container.host_configs["6060"].base() + "/realms/master/protocol/saml/clients/idp-initiated-saml-test"
driver.get(idp_initiated_login_url)
idp_login("viewer.idp.initiated@saml.integration.test", "password")

View File

@@ -146,16 +146,16 @@ def test_generate_connection_params(
data = response_data["data"]
# ingestion_key is created by the mocked gateway and should match
assert (
data["ingestion_key"] == "test-ingestion-key-123456"
), "ingestion_key should match the mocked ingestion key"
assert data["ingestion_key"] == "test-ingestion-key-123456", (
"ingestion_key should match the mocked ingestion key"
)
# ingestion_url should be https://ingest.test.signoz.cloud based on the mocked deployment DNS
assert (
data["ingestion_url"] == "https://ingest.test.signoz.cloud"
), "ingestion_url should be https://ingest.test.signoz.cloud"
assert data["ingestion_url"] == "https://ingest.test.signoz.cloud", (
"ingestion_url should be https://ingest.test.signoz.cloud"
)
# signoz_api_url should be https://test-deployment.test.signoz.cloud based on the mocked deployment name and DNS
assert (
data["signoz_api_url"] == "https://test-deployment.test.signoz.cloud"
), "signoz_api_url should be https://test-deployment.test.signoz.cloud"
assert data["signoz_api_url"] == "https://test-deployment.test.signoz.cloud", (
"signoz_api_url should be https://test-deployment.test.signoz.cloud"
)

View File

@@ -1,12 +1,11 @@
from http import HTTPStatus
from typing import Callable, List
from wiremock.resources.mappings import Mapping
import requests
from sqlalchemy import sql
from wiremock.resources.mappings import Mapping
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD, add_license
from fixtures.types import Operation, SigNoz, TestContainerDocker
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD,add_license
from fixtures.types import Operation, SigNoz,TestContainerDocker
def test_apply_license(
@@ -20,7 +19,6 @@ def test_apply_license(
"""
add_license(signoz, make_http_mocks, get_token)
def test_create_and_get_public_dashboard(
signoz: SigNoz,
create_user_admin: Operation, # pylint: disable=unused-argument
@@ -30,7 +28,11 @@ def test_create_and_get_public_dashboard(
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/dashboards"),
json={"title": "Sample Title", "uploadedGrafana": False, "version": "v5"},
json={
"title": "Sample Title",
"uploadedGrafana": False,
"version": "v5"
},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
@@ -38,12 +40,10 @@ def test_create_and_get_public_dashboard(
assert response.status_code == HTTPStatus.CREATED
assert response.json()["status"] == "success"
data = response.json()["data"]
dashboard_id = data["id"]
id = data["id"]
response = requests.post(
signoz.self.host_configs["8080"].get(
f"/api/v1/dashboards/{dashboard_id}/public"
),
signoz.self.host_configs["8080"].get(f"/api/v1/dashboards/{id}/public"),
json={
"timeRangeEnabled": True,
"defaultTimeRange": "10s",
@@ -56,27 +56,25 @@ def test_create_and_get_public_dashboard(
assert "id" in response.json()["data"]
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/dashboards/{dashboard_id}/public"
),
signoz.self.host_configs["8080"].get(f"/api/v1/dashboards/{id}/public"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
assert response.json()["data"]["timeRangeEnabled"] is True
assert response.json()["data"]["timeRangeEnabled"] == True
assert response.json()["data"]["defaultTimeRange"] == "10s"
public_path = response.json()["data"]["publicPath"]
assert public_path.startswith("/public/dashboard/")
public_dashboard_id = public_path.split("/public/dashboard/")[-1]
row = None
row = None
with signoz.sqlstore.conn.connect() as conn:
# verify the role creation
result = conn.execute(
sql.text("SELECT * FROM role WHERE name = :role"),
{"role": "signoz-anonymous"},
{"role": "signoz-anonymous"}
)
row = result.mappings().fetchone()
assert row is not None
@@ -86,18 +84,18 @@ def test_create_and_get_public_dashboard(
tuple_object_id = f"organization/{row["org_id"]}/role/{row["id"]}"
tuple_result = conn.execute(
sql.text("SELECT * FROM tuple WHERE object_id = :object_id"),
{"object_id": tuple_object_id},
{"object_id": tuple_object_id}
)
tuple_row = tuple_result.fetchone()
assert tuple_row is not None
# verify the tuple creation for public-dashboard
tuple_object_id = (
f"organization/{row["org_id"]}/public-dashboard/{public_dashboard_id}"
)
tuple_object_id = f"organization/{row["org_id"]}/public-dashboard/{public_dashboard_id}"
tuple_result = conn.execute(
sql.text("SELECT * FROM tuple WHERE object_id = :object_id"),
{"object_id": tuple_object_id},
{"object_id": tuple_object_id}
)
tuple_row = tuple_result.fetchone()
assert tuple_row is not None

View File

@@ -1,7 +1,9 @@
import http
import json
from typing import Callable, List
import requests
from sqlalchemy import sql
from wiremock.client import (
HttpMethods,
Mapping,
@@ -136,7 +138,7 @@ def test_refresh_license(
)
assert response.status_code == http.HTTPStatus.OK
assert response.json()["data"]["valid_from"] == 1732146922
response = requests.post(
url=signoz.zeus.host_configs["8080"].get("/__admin/requests/count"),
json={"method": "GET", "url": "/v2/licenses/me"},

View File

@@ -185,6 +185,7 @@ def test_reset_password(
assert token is not None
def test_reset_password_with_no_password(
signoz: types.SigNoz, get_token: Callable[[str, str], str]
) -> None:

View File

@@ -1,16 +1,11 @@
from http import HTTPStatus
from typing import Callable, Tuple
from typing import Tuple
import requests
from http import HTTPStatus
import requests
from typing import Callable
from fixtures import types
def test_change_role(
signoz: types.SigNoz,
get_token: Callable[[str, str], str],
get_tokens: Callable[[str, str], Tuple[str, str]],
):
def test_change_role(signoz: types.SigNoz, get_token: Callable[[str, str], str], get_tokens: Callable[[str, str], Tuple[str, str]]):
admin_token = get_token("admin@integration.test", "password123Z$")
# Create a new user as VIEWER
@@ -39,9 +34,7 @@ def test_change_role(
assert response.status_code == HTTPStatus.CREATED
# Make some API calls as new user
new_user_token, new_user_refresh_token = get_tokens(
"admin+rolechange@integration.test", "password123Z$"
)
new_user_token, new_user_refresh_token = get_tokens("admin+rolechange@integration.test", "password123Z$")
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user/me"),
@@ -61,7 +54,7 @@ def test_change_role(
)
assert response.status_code == HTTPStatus.FORBIDDEN
# Change the new user's role - move to ADMIN
response = requests.put(
signoz.self.host_configs["8080"].get(f"/api/v1/user/{new_user_id}"),
@@ -98,10 +91,7 @@ def test_change_role(
# Make some API call again which is protected
rotate_response = response.json()["data"]
new_user_token, new_user_refresh_token = (
rotate_response["accessToken"],
rotate_response["refreshToken"],
)
new_user_token, new_user_refresh_token = rotate_response["accessToken"], rotate_response["refreshToken"]
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/org/preferences"),

View File

@@ -1,71 +0,0 @@
from http import HTTPStatus
from typing import Callable
import requests
from fixtures import types
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
def test_get_user_preference(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
):
admin_token = get_token("admin@integration.test", "password123Z$")
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user/preferences"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.OK
assert response.json()["data"] is not None
def test_get_set_user_preference_by_name(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
):
admin_token = get_token("admin@integration.test", "password123Z$")
# preference does not exist
response = requests.get(
signoz.self.host_configs["8080"].get(
"/api/v1/user/preferences/somenonexistentpreference"
),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
# This should be NOT_FOUND
assert response.status_code == HTTPStatus.BAD_REQUEST
# play with welcome_checklist_do_later preference
response = requests.put(
signoz.self.host_configs["8080"].get(
"/api/v1/user/preferences/welcome_checklist_do_later"
),
headers={"Authorization": f"Bearer {admin_token}"},
json={"value": True},
timeout=2,
)
assert response.status_code == HTTPStatus.NO_CONTENT
# get preference by name
response = requests.get(
signoz.self.host_configs["8080"].get(
"/api/v1/user/preferences/welcome_checklist_do_later"
),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.OK
assert response.json()["data"] is not None
assert response.json()["data"]["value"] is True

View File

@@ -83,9 +83,7 @@ def test_set_ttl_traces_success(
assert all("toIntervalSecond(12960000)" in ttl_part for ttl_part in ttl_parts)
def test_set_ttl_traces_with_cold_storage(
signoz: types.SigNoz, get_token: Callable[[str, str], str]
):
def test_set_ttl_traces_with_cold_storage(signoz: types.SigNoz, get_token: Callable[[str, str], str]):
"""Test setting TTL for traces with cold storage configuration."""
payload = {
"type": "traces",
@@ -294,7 +292,10 @@ def test_set_custom_retention_ttl_basic(
retention_col[3] == "100"
), f"Expected default value of _retention_days to be 100 in table {table}, but got {retention_col[3]}"
tables_to_check = ["logs_attribute_keys", "logs_resource_keys"]
tables_to_check = [
"logs_attribute_keys",
"logs_resource_keys"
]
# Query to get table engine info which includes TTL
table_list = "', '".join(tables_to_check)
@@ -315,8 +316,8 @@ def test_set_custom_retention_ttl_basic(
def test_set_custom_retention_ttl_basic_fallback(
signoz: types.SigNoz,
get_token,
ttl_legacy_logs_v2_table_setup, # pylint: disable=unused-argument
ttl_legacy_logs_v2_resource_table_setup, # pylint: disable=unused-argument
ttl_legacy_logs_v2_table_setup, # pylint: disable=unused-argument
ttl_legacy_logs_v2_resource_table_setup, # pylint: disable=unused-argument
):
"""Test setting TTL for logs using the new setTTLLogs method."""
@@ -353,7 +354,7 @@ def test_set_custom_retention_ttl_basic_fallback(
"logs_v2",
"logs_v2_resource",
"logs_attribute_keys",
"logs_resource_keys",
"logs_resource_keys"
]
# Query to get table engine info which includes TTL
@@ -677,8 +678,8 @@ def test_get_custom_retention_ttl(
def test_set_ttl_logs_success(
signoz: types.SigNoz,
get_token,
ttl_legacy_logs_v2_table_setup, # pylint: disable=unused-argument
ttl_legacy_logs_v2_resource_table_setup, # pylint: disable=unused-argument
ttl_legacy_logs_v2_table_setup,# pylint: disable=unused-argument
ttl_legacy_logs_v2_resource_table_setup,# pylint: disable=unused-argument
):
"""Test setting TTL for logs using the new setTTLLogs method."""