mirror of
https://github.com/SigNoz/signoz.git
synced 2025-12-27 18:54:27 +00:00
Compare commits
21 Commits
chore/remo
...
fix/qb-lim
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f4b74eef11 | ||
|
|
dba038c6e0 | ||
|
|
bca761498a | ||
|
|
0e6bd90fdf | ||
|
|
f3256aeac4 | ||
|
|
c9f1526e33 | ||
|
|
dba536578b | ||
|
|
15ceb228fa | ||
|
|
6b3c6fc722 | ||
|
|
edcae53b64 | ||
|
|
72fda90ec2 | ||
|
|
8acfc3c9f7 | ||
|
|
463ae443f9 | ||
|
|
f72535a15f | ||
|
|
e21e99ce64 | ||
|
|
d1559a3262 | ||
|
|
1ccb9bb4c2 | ||
|
|
0c059df327 | ||
|
|
8a5539679c | ||
|
|
89b188f73d | ||
|
|
bb4d6117ac |
2
.github/CODEOWNERS
vendored
2
.github/CODEOWNERS
vendored
@@ -2,7 +2,7 @@
|
||||
# Owners are automatically requested for review for PRs that changes code
|
||||
# that they own.
|
||||
|
||||
/frontend/ @YounixM @aks07
|
||||
/frontend/ @SigNoz/frontend-maintainers
|
||||
|
||||
# Onboarding
|
||||
/frontend/src/container/OnboardingV2Container/onboarding-configs/onboarding-config-with-links.json @makeavish
|
||||
|
||||
24
.github/workflows/integrationci.yaml
vendored
24
.github/workflows/integrationci.yaml
vendored
@@ -9,6 +9,29 @@ on:
|
||||
- labeled
|
||||
|
||||
jobs:
|
||||
fmtlint:
|
||||
if: |
|
||||
((github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))) && contains(github.event.pull_request.labels.*.name, 'safe-to-integrate')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.13
|
||||
- name: poetry
|
||||
run: |
|
||||
python -m pip install poetry==2.1.2
|
||||
python -m poetry config virtualenvs.in-project true
|
||||
cd tests/integration && poetry install --no-root
|
||||
- name: fmt
|
||||
run: |
|
||||
make py-fmt
|
||||
- name: lint
|
||||
run: |
|
||||
make py-lint
|
||||
test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
@@ -21,6 +44,7 @@ jobs:
|
||||
- dashboard
|
||||
- querier
|
||||
- ttl
|
||||
- preference
|
||||
sqlstore-provider:
|
||||
- postgres
|
||||
- sqlite
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -49,6 +49,7 @@ ee/query-service/tests/test-deploy/data/
|
||||
# local data
|
||||
*.backup
|
||||
*.db
|
||||
**/db
|
||||
/deploy/docker/clickhouse-setup/data/
|
||||
/deploy/docker-swarm/clickhouse-setup/data/
|
||||
bin/
|
||||
|
||||
6
Makefile
6
Makefile
@@ -72,6 +72,12 @@ devenv-up: devenv-clickhouse devenv-signoz-otel-collector ## Start both clickhou
|
||||
@echo " - ClickHouse: http://localhost:8123"
|
||||
@echo " - Signoz OTel Collector: grpc://localhost:4317, http://localhost:4318"
|
||||
|
||||
.PHONY: devenv-clickhouse-clean
|
||||
devenv-clickhouse-clean: ## Clean all ClickHouse data from filesystem
|
||||
@echo "Removing ClickHouse data..."
|
||||
@rm -rf .devenv/docker/clickhouse/fs/tmp/*
|
||||
@echo "ClickHouse data cleaned!"
|
||||
|
||||
##############################################################
|
||||
# go commands
|
||||
##############################################################
|
||||
|
||||
@@ -1,11 +1,3 @@
|
||||
FROM node:18-bullseye AS build
|
||||
|
||||
WORKDIR /opt/
|
||||
COPY ./frontend/ ./
|
||||
ENV NODE_OPTIONS=--max-old-space-size=8192
|
||||
RUN CI=1 yarn install
|
||||
RUN CI=1 yarn build
|
||||
|
||||
FROM golang:1.24-bullseye
|
||||
|
||||
ARG OS="linux"
|
||||
@@ -40,8 +32,6 @@ COPY Makefile Makefile
|
||||
RUN TARGET_DIR=/root ARCHS=${TARGETARCH} ZEUS_URL=${ZEUSURL} LICENSE_URL=${ZEUSURL}/api/v1 make go-build-enterprise-race
|
||||
RUN mv /root/linux-${TARGETARCH}/signoz /root/signoz
|
||||
|
||||
COPY --from=build /opt/build ./web/
|
||||
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["/root/signoz", "server"]
|
||||
|
||||
47
cmd/enterprise/Dockerfile.with-web.integration
Normal file
47
cmd/enterprise/Dockerfile.with-web.integration
Normal file
@@ -0,0 +1,47 @@
|
||||
FROM node:18-bullseye AS build
|
||||
|
||||
WORKDIR /opt/
|
||||
COPY ./frontend/ ./
|
||||
ENV NODE_OPTIONS=--max-old-space-size=8192
|
||||
RUN CI=1 yarn install
|
||||
RUN CI=1 yarn build
|
||||
|
||||
FROM golang:1.24-bullseye
|
||||
|
||||
ARG OS="linux"
|
||||
ARG TARGETARCH
|
||||
ARG ZEUSURL
|
||||
|
||||
# This path is important for stacktraces
|
||||
WORKDIR $GOPATH/src/github.com/signoz/signoz
|
||||
WORKDIR /root
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update; \
|
||||
apt-get install -y --no-install-recommends \
|
||||
g++ \
|
||||
gcc \
|
||||
libc6-dev \
|
||||
make \
|
||||
pkg-config \
|
||||
; \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY go.mod go.sum ./
|
||||
|
||||
RUN go mod download
|
||||
|
||||
COPY ./cmd/ ./cmd/
|
||||
COPY ./ee/ ./ee/
|
||||
COPY ./pkg/ ./pkg/
|
||||
COPY ./templates/email /root/templates
|
||||
|
||||
COPY Makefile Makefile
|
||||
RUN TARGET_DIR=/root ARCHS=${TARGETARCH} ZEUS_URL=${ZEUSURL} LICENSE_URL=${ZEUSURL}/api/v1 make go-build-enterprise-race
|
||||
RUN mv /root/linux-${TARGETARCH}/signoz /root/signoz
|
||||
|
||||
COPY --from=build /opt/build ./web/
|
||||
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["/root/signoz", "server"]
|
||||
@@ -3,6 +3,13 @@
|
||||
# Do not modify this file
|
||||
#
|
||||
|
||||
##################### Global #####################
|
||||
global:
|
||||
# the url under which the signoz apiserver is externally reachable.
|
||||
external_url: <unset>
|
||||
# the url where the SigNoz backend receives telemetry data (traces, metrics, logs) from instrumented applications.
|
||||
ingestion_url: <unset>
|
||||
|
||||
##################### Version #####################
|
||||
version:
|
||||
banner:
|
||||
|
||||
@@ -473,6 +473,49 @@ paths:
|
||||
summary: Get reset password token
|
||||
tags:
|
||||
- users
|
||||
/api/v1/global/config:
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoints returns global config
|
||||
operationId: GetGlobalConfig
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/TypesGettableGlobalConfig'
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- EDITOR
|
||||
- tokenizer:
|
||||
- EDITOR
|
||||
summary: Get global config
|
||||
tags:
|
||||
- global
|
||||
/api/v1/invite:
|
||||
get:
|
||||
deprecated: false
|
||||
@@ -806,6 +849,75 @@ paths:
|
||||
summary: Deprecated create session by email password
|
||||
tags:
|
||||
- sessions
|
||||
/api/v1/logs/promote_paths:
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoints promotes and indexes paths
|
||||
operationId: PromotePaths
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
items:
|
||||
$ref: '#/components/schemas/PromotetypesPromotePath'
|
||||
nullable: true
|
||||
type: array
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
summary: Promote and index paths
|
||||
tags:
|
||||
- promoted_paths
|
||||
- logs
|
||||
- json_logs
|
||||
post:
|
||||
deprecated: false
|
||||
description: This endpoints promotes and indexes paths
|
||||
operationId: PromotePaths
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
items:
|
||||
$ref: '#/components/schemas/PromotetypesPromotePath'
|
||||
nullable: true
|
||||
type: array
|
||||
responses:
|
||||
"201":
|
||||
description: Created
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
summary: Promote and index paths
|
||||
tags:
|
||||
- promoted_paths
|
||||
- logs
|
||||
- json_logs
|
||||
/api/v1/org/preferences:
|
||||
get:
|
||||
deprecated: false
|
||||
@@ -2094,6 +2206,26 @@ components:
|
||||
type: object
|
||||
PreferencetypesValue:
|
||||
type: object
|
||||
PromotetypesPromotePath:
|
||||
properties:
|
||||
indexes:
|
||||
items:
|
||||
$ref: '#/components/schemas/PromotetypesWrappedIndex'
|
||||
type: array
|
||||
path:
|
||||
type: string
|
||||
promote:
|
||||
type: boolean
|
||||
type: object
|
||||
PromotetypesWrappedIndex:
|
||||
properties:
|
||||
column_type:
|
||||
type: string
|
||||
granularity:
|
||||
type: integer
|
||||
type:
|
||||
type: string
|
||||
type: object
|
||||
RenderErrorResponse:
|
||||
properties:
|
||||
error:
|
||||
@@ -2145,6 +2277,13 @@ components:
|
||||
userId:
|
||||
type: string
|
||||
type: object
|
||||
TypesGettableGlobalConfig:
|
||||
properties:
|
||||
external_url:
|
||||
type: string
|
||||
ingestion_url:
|
||||
type: string
|
||||
type: object
|
||||
TypesInvite:
|
||||
properties:
|
||||
createdAt:
|
||||
|
||||
179
docs/contributing/go/handler.md
Normal file
179
docs/contributing/go/handler.md
Normal file
@@ -0,0 +1,179 @@
|
||||
# Handler
|
||||
|
||||
Handlers in SigNoz are responsible for exposing module functionality over HTTP. They are thin adapters that:
|
||||
|
||||
- Decode incoming HTTP requests
|
||||
- Call the appropriate module layer
|
||||
- Return structured responses (or errors) in a consistent format
|
||||
- Describe themselves for OpenAPI generation
|
||||
|
||||
They are **not** the place for complex business logic; that belongs in modules (for example, `pkg/modules/user`, `pkg/modules/session`, etc).
|
||||
|
||||
## How are handlers structured?
|
||||
|
||||
At a high level, a typical flow looks like this:
|
||||
|
||||
1. A `Handler` interface is defined in the module (for example, `user.Handler`, `session.Handler`, `organization.Handler`).
|
||||
2. The `apiserver` provider wires those handlers into HTTP routes using Gorilla `mux.Router`.
|
||||
|
||||
Each route wraps a module handler method with the following:
|
||||
- Authorization middleware (from `pkg/http/middleware`)
|
||||
- A generic HTTP `handler.Handler` (from `pkg/http/handler`)
|
||||
- An `OpenAPIDef` that describes the operation for OpenAPI generation
|
||||
|
||||
For example, in `pkg/apiserver/signozapiserver`:
|
||||
|
||||
```go
|
||||
if err := router.Handle("/api/v1/invite", handler.New(
|
||||
provider.authZ.AdminAccess(provider.userHandler.CreateInvite),
|
||||
handler.OpenAPIDef{
|
||||
ID: "CreateInvite",
|
||||
Tags: []string{"users"},
|
||||
Summary: "Create invite",
|
||||
Description: "This endpoint creates an invite for a user",
|
||||
Request: new(types.PostableInvite),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(types.Invite),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusCreated,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusConflict},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
},
|
||||
)).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
```
|
||||
|
||||
In this pattern:
|
||||
|
||||
- `provider.userHandler.CreateInvite` is a handler method.
|
||||
- `provider.authZ.AdminAccess(...)` wraps that method with authorization checks and context setup.
|
||||
- `handler.New` converts it into an HTTP handler and wires it to OpenAPI via the `OpenAPIDef`.
|
||||
|
||||
## How to write a new handler method?
|
||||
|
||||
When adding a new endpoint:
|
||||
|
||||
1. Add a method to the appropriate module `Handler` interface.
|
||||
2. Implement that method in the module.
|
||||
3. Register the method in `signozapiserver` with the correct route, HTTP method, auth, and `OpenAPIDef`.
|
||||
|
||||
### 1. Extend an existing `Handler` interface or create a new one
|
||||
|
||||
Find the module in `pkg/modules/<name>` and extend its `Handler` interface with a new method that receives an `http.ResponseWriter` and `*http.Request`. For example:
|
||||
|
||||
```go
|
||||
type Handler interface {
|
||||
// existing methods...
|
||||
CreateThing(rw http.ResponseWriter, req *http.Request)
|
||||
}
|
||||
```
|
||||
|
||||
Keep the method focused on HTTP concerns and delegate business logic to the module.
|
||||
|
||||
### 2. Implement the handler method
|
||||
|
||||
In the module implementation, implement the new method. A typical implementation:
|
||||
|
||||
- Extracts authentication and organization context from `req.Context()`
|
||||
- Decodes the request body into a `types.*` struct using the `binding` package
|
||||
- Calls module functions
|
||||
- Uses the `render` package to write responses or errors
|
||||
|
||||
```go
|
||||
func (h *handler) CreateThing(rw http.ResponseWriter, req *http.Request) {
|
||||
// Extract authentication and organization context from req.Context()
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Decode the request body into a `types.*` struct using the `binding` package
|
||||
var in types.PostableThing
|
||||
if err := binding.JSON.BindBody(req.Body, &in); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Call module functions
|
||||
out, err := h.module.CreateThing(req.Context(), claims.OrgID, &in)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Use the `render` package to write responses or errors
|
||||
render.Success(rw, http.StatusCreated, out)
|
||||
}
|
||||
```
|
||||
|
||||
### 3. Register the handler in `signozapiserver`
|
||||
|
||||
In `pkg/apiserver/signozapiserver`, add a route in the appropriate `add*Routes` function (`addUserRoutes`, `addSessionRoutes`, `addOrgRoutes`, etc.). The pattern is:
|
||||
|
||||
```go
|
||||
if err := router.Handle("/api/v1/things", handler.New(
|
||||
provider.authZ.AdminAccess(provider.thingHandler.CreateThing),
|
||||
handler.OpenAPIDef{
|
||||
ID: "CreateThing",
|
||||
Tags: []string{"things"},
|
||||
Summary: "Create thing",
|
||||
Description: "This endpoint creates a thing",
|
||||
Request: new(types.PostableThing),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(types.GettableThing),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusCreated,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusConflict},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
},
|
||||
)).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
```
|
||||
|
||||
### 4. Update the OpenAPI spec
|
||||
|
||||
Run the following command to update the OpenAPI spec:
|
||||
|
||||
```bash
|
||||
go run cmd/enterprise/*.go generate openapi
|
||||
```
|
||||
|
||||
This will update the OpenAPI spec in `docs/api/openapi.yml` to reflect the new endpoint.
|
||||
|
||||
## How does OpenAPI integration work?
|
||||
|
||||
The `handler.New` function ties the HTTP handler to OpenAPI metadata via `OpenAPIDef`. This drives the generated OpenAPI document.
|
||||
|
||||
- **ID**: A unique identifier for the operation (used as the `operationId`).
|
||||
- **Tags**: Logical grouping for the operation (for example, `"users"`, `"sessions"`, `"orgs"`).
|
||||
- **Summary / Description**: Human-friendly documentation.
|
||||
- **Request / RequestContentType**:
|
||||
- `Request` is a Go type that describes the request body or form.
|
||||
- `RequestContentType` is usually `"application/json"` or `"application/x-www-form-urlencoded"` (for callbacks like SAML).
|
||||
- **Response / ResponseContentType**:
|
||||
- `Response` is the Go type for the successful response payload.
|
||||
- `ResponseContentType` is usually `"application/json"`; use `""` for responses without a body.
|
||||
- **SuccessStatusCode**: The HTTP status for successful responses (for example, `http.StatusOK`, `http.StatusCreated`, `http.StatusNoContent`).
|
||||
- **ErrorStatusCodes**: Additional error status codes beyond the standard ones automatically added by `handler.New`.
|
||||
- **SecuritySchemes**: Auth mechanisms and scopes required by the operation.
|
||||
|
||||
The generic handler:
|
||||
|
||||
- Automatically appends `401`, `403`, and `500` to `ErrorStatusCodes` when appropriate.
|
||||
- Registers request and response schemas with the OpenAPI reflector so they appear in `docs/api/openapi.yml`.
|
||||
|
||||
See existing examples in:
|
||||
|
||||
- `addUserRoutes` (for typical JSON request/response)
|
||||
- `addSessionRoutes` (for form-encoded and redirect flows)
|
||||
|
||||
## What should I remember?
|
||||
|
||||
- **Keep handlers thin**: focus on HTTP concerns and delegate logic to modules/services.
|
||||
- **Always register routes through `signozapiserver`** using `handler.New` and a complete `OpenAPIDef`.
|
||||
- **Choose accurate request/response types** from the `types` packages so OpenAPI schemas are correct.
|
||||
@@ -3,13 +3,14 @@ package app
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net"
|
||||
"net/http"
|
||||
_ "net/http/pprof" // http profiler
|
||||
"slices"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache/memorycache"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
|
||||
"go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux"
|
||||
"go.opentelemetry.io/otel/propagation"
|
||||
@@ -106,7 +107,8 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
signoz.Prometheus,
|
||||
signoz.Modules.OrgGetter,
|
||||
signoz.Querier,
|
||||
signoz.Instrumentation.Logger(),
|
||||
signoz.Instrumentation.ToProviderSettings(),
|
||||
signoz.QueryParser,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -353,8 +355,8 @@ func (s *Server) Stop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertmanager.Alertmanager, sqlstore sqlstore.SQLStore, telemetryStore telemetrystore.TelemetryStore, prometheus prometheus.Prometheus, orgGetter organization.Getter, querier querier.Querier, logger *slog.Logger) (*baserules.Manager, error) {
|
||||
ruleStore := sqlrulestore.NewRuleStore(sqlstore)
|
||||
func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertmanager.Alertmanager, sqlstore sqlstore.SQLStore, telemetryStore telemetrystore.TelemetryStore, prometheus prometheus.Prometheus, orgGetter organization.Getter, querier querier.Querier, providerSettings factory.ProviderSettings, queryParser queryparser.QueryParser) (*baserules.Manager, error) {
|
||||
ruleStore := sqlrulestore.NewRuleStore(sqlstore, queryParser, providerSettings)
|
||||
maintenanceStore := sqlrulestore.NewMaintenanceStore(sqlstore)
|
||||
// create manager opts
|
||||
managerOpts := &baserules.ManagerOptions{
|
||||
@@ -364,7 +366,7 @@ func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertma
|
||||
Logger: zap.L(),
|
||||
Reader: ch,
|
||||
Querier: querier,
|
||||
SLogger: logger,
|
||||
SLogger: providerSettings.Logger,
|
||||
Cache: cache,
|
||||
EvalDelay: baseconst.GetEvalDelay(),
|
||||
PrepareTaskFunc: rules.PrepareTaskFunc,
|
||||
|
||||
@@ -6,6 +6,7 @@ import logEvent from 'api/common/logEvent';
|
||||
import AppLoading from 'components/AppLoading/AppLoading';
|
||||
import { CmdKPalette } from 'components/cmdKPalette/cmdKPalette';
|
||||
import NotFound from 'components/NotFound';
|
||||
import { ShiftHoldOverlayController } from 'components/ShiftOverlay/ShiftHoldOverlayController';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
@@ -368,6 +369,9 @@ function App(): JSX.Element {
|
||||
<NotificationProvider>
|
||||
<ErrorModalProvider>
|
||||
{isLoggedInState && <CmdKPalette userRole={user.role} />}
|
||||
{isLoggedInState && (
|
||||
<ShiftHoldOverlayController userRole={user.role} />
|
||||
)}
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
<QueryBuilderProvider>
|
||||
|
||||
@@ -50,6 +50,13 @@
|
||||
color: var(--bg-vanilla-400) !important;
|
||||
font-size: 12px !important;
|
||||
}
|
||||
&[type='number']::-webkit-inner-spin-button,
|
||||
&[type='number']::-webkit-outer-spin-button {
|
||||
-webkit-appearance: none;
|
||||
-moz-appearance: none;
|
||||
appearance: none;
|
||||
margin: 0;
|
||||
}
|
||||
}
|
||||
|
||||
.close-btn {
|
||||
|
||||
@@ -1,11 +1,15 @@
|
||||
.log-field-key {
|
||||
padding-right: 5px;
|
||||
.log-field-container {
|
||||
display: flex;
|
||||
overflow: hidden;
|
||||
width: 100%;
|
||||
align-items: baseline;
|
||||
}
|
||||
.log-field-key,
|
||||
.log-field-key-colon {
|
||||
color: var(--text-vanilla-400, #c0c1c3);
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 18px; /* 128.571% */
|
||||
letter-spacing: -0.07px;
|
||||
|
||||
&.small {
|
||||
font-size: 11px;
|
||||
@@ -22,6 +26,20 @@
|
||||
line-height: 24px;
|
||||
}
|
||||
}
|
||||
.log-field-key {
|
||||
line-height: 18px; /* 128.571% */
|
||||
letter-spacing: -0.07px;
|
||||
white-space: nowrap;
|
||||
display: inline-block;
|
||||
max-width: 20vw;
|
||||
text-overflow: ellipsis;
|
||||
overflow: hidden;
|
||||
margin: 0;
|
||||
}
|
||||
.log-field-key-colon {
|
||||
min-width: 0.8rem;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
.log-value {
|
||||
color: var(--text-vanilla-400, #c0c1c3);
|
||||
font-size: 14px;
|
||||
@@ -158,7 +176,8 @@
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.log-field-key {
|
||||
.log-field-key,
|
||||
.log-field-key-colon {
|
||||
color: var(--text-slate-400);
|
||||
}
|
||||
.log-value {
|
||||
@@ -170,3 +189,10 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.dark {
|
||||
.log-field-key,
|
||||
.log-field-key-colon {
|
||||
color: rgba(255, 255, 255, 0.45);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,13 +25,7 @@ import LogLinesActionButtons from '../LogLinesActionButtons/LogLinesActionButton
|
||||
import LogStateIndicator from '../LogStateIndicator/LogStateIndicator';
|
||||
import { getLogIndicatorType } from '../LogStateIndicator/utils';
|
||||
// styles
|
||||
import {
|
||||
Container,
|
||||
LogContainer,
|
||||
LogText,
|
||||
Text,
|
||||
TextContainer,
|
||||
} from './styles';
|
||||
import { Container, LogContainer, LogText } from './styles';
|
||||
import { isValidLogField } from './util';
|
||||
|
||||
interface LogFieldProps {
|
||||
@@ -58,16 +52,18 @@ function LogGeneralField({
|
||||
);
|
||||
|
||||
return (
|
||||
<TextContainer>
|
||||
<Text ellipsis type="secondary" className={cx('log-field-key', fontSize)}>
|
||||
{`${fieldKey} : `}
|
||||
</Text>
|
||||
<div className="log-field-container">
|
||||
<p className={cx('log-field-key', fontSize)} title={fieldKey}>
|
||||
{fieldKey}
|
||||
</p>
|
||||
<span className={cx('log-field-key-colon', fontSize)}> : </span>
|
||||
<LogText
|
||||
dangerouslySetInnerHTML={html}
|
||||
className={cx('log-value', fontSize)}
|
||||
title={fieldValue}
|
||||
linesPerRow={linesPerRow > 1 ? linesPerRow : undefined}
|
||||
/>
|
||||
</TextContainer>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/* eslint-disable no-nested-ternary */
|
||||
import { Card, Typography } from 'antd';
|
||||
import { Card } from 'antd';
|
||||
import { FontSize } from 'container/OptionsMenu/types';
|
||||
import styled from 'styled-components';
|
||||
import { getActiveLogBackground } from 'utils/logs';
|
||||
@@ -46,19 +46,6 @@ export const Container = styled(Card)<{
|
||||
getActiveLogBackground($isActiveLog, $isDarkMode, $logType)}
|
||||
`;
|
||||
|
||||
export const Text = styled(Typography.Text)`
|
||||
&&& {
|
||||
min-width: 2.5rem;
|
||||
white-space: nowrap;
|
||||
}
|
||||
`;
|
||||
|
||||
export const TextContainer = styled.div`
|
||||
display: flex;
|
||||
overflow: hidden;
|
||||
width: 100%;
|
||||
`;
|
||||
|
||||
export const LogContainer = styled.div<LogContainerProps>`
|
||||
margin-left: 0.5rem;
|
||||
display: flex;
|
||||
|
||||
@@ -300,7 +300,7 @@ function QueryAddOns({
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="query-add-ons">
|
||||
<div className="query-add-ons" data-testid="query-add-ons">
|
||||
{selectedViews.length > 0 && (
|
||||
<div className="selected-add-ons-content">
|
||||
{selectedViews.find((view) => view.key === 'group_by') && (
|
||||
@@ -375,6 +375,7 @@ function QueryAddOns({
|
||||
<div className="add-on-content" data-testid="limit-content">
|
||||
<InputWithLabel
|
||||
label="Limit"
|
||||
type="number"
|
||||
onChange={handleChangeLimit}
|
||||
initialValue={query?.limit ?? undefined}
|
||||
placeholder="Enter limit"
|
||||
|
||||
@@ -43,7 +43,10 @@ function QueryAggregationOptions({
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="query-aggregation-container">
|
||||
<div
|
||||
className="query-aggregation-container"
|
||||
data-testid="query-aggregation-container"
|
||||
>
|
||||
<div className="aggregation-container">
|
||||
<QueryAggregationSelect
|
||||
onChange={onChange}
|
||||
|
||||
@@ -114,9 +114,9 @@ function QuerySearch({
|
||||
const [isFocused, setIsFocused] = useState(false);
|
||||
const editorRef = useRef<EditorView | null>(null);
|
||||
|
||||
const handleQueryValidation = useCallback((newQuery: string): void => {
|
||||
const handleQueryValidation = useCallback((newExpression: string): void => {
|
||||
try {
|
||||
const validationResponse = validateQuery(newQuery);
|
||||
const validationResponse = validateQuery(newExpression);
|
||||
setValidation(validationResponse);
|
||||
} catch (error) {
|
||||
setValidation({
|
||||
@@ -127,7 +127,7 @@ function QuerySearch({
|
||||
}
|
||||
}, []);
|
||||
|
||||
const getCurrentQuery = useCallback(
|
||||
const getCurrentExpression = useCallback(
|
||||
(): string => editorRef.current?.state.doc.toString() || '',
|
||||
[],
|
||||
);
|
||||
@@ -167,19 +167,14 @@ function QuerySearch({
|
||||
() => {
|
||||
if (!isEditorReady) return;
|
||||
|
||||
const newQuery = queryData.filter?.expression || '';
|
||||
const currentQuery = getCurrentQuery();
|
||||
const newExpression = queryData.filter?.expression || '';
|
||||
const currentExpression = getCurrentExpression();
|
||||
|
||||
/* eslint-disable-next-line sonarjs/no-collapsible-if */
|
||||
if (newQuery !== currentQuery && !isFocused) {
|
||||
// Prevent clearing a non-empty editor when queryData becomes empty temporarily
|
||||
// Only update if newQuery has a value, or if both are empty (initial state)
|
||||
if (newQuery || !currentQuery) {
|
||||
updateEditorValue(newQuery, { skipOnChange: true });
|
||||
|
||||
if (newQuery) {
|
||||
handleQueryValidation(newQuery);
|
||||
}
|
||||
// Do not update codemirror editor if the expression is the same
|
||||
if (newExpression !== currentExpression && !isFocused) {
|
||||
updateEditorValue(newExpression, { skipOnChange: true });
|
||||
if (newExpression) {
|
||||
handleQueryValidation(newExpression);
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -613,8 +608,8 @@ function QuerySearch({
|
||||
};
|
||||
|
||||
const handleBlur = (): void => {
|
||||
const currentQuery = getCurrentQuery();
|
||||
handleQueryValidation(currentQuery);
|
||||
const currentExpression = getCurrentExpression();
|
||||
handleQueryValidation(currentExpression);
|
||||
setIsFocused(false);
|
||||
};
|
||||
|
||||
@@ -633,11 +628,11 @@ function QuerySearch({
|
||||
|
||||
const handleExampleClick = (exampleQuery: string): void => {
|
||||
// If there's an existing query, append the example with AND
|
||||
const currentQuery = getCurrentQuery();
|
||||
const newQuery = currentQuery
|
||||
? `${currentQuery} AND ${exampleQuery}`
|
||||
const currentExpression = getCurrentExpression();
|
||||
const newExpression = currentExpression
|
||||
? `${currentExpression} AND ${exampleQuery}`
|
||||
: exampleQuery;
|
||||
updateEditorValue(newQuery);
|
||||
updateEditorValue(newExpression);
|
||||
};
|
||||
|
||||
// Helper function to render a badge for the current context mode
|
||||
@@ -673,9 +668,9 @@ function QuerySearch({
|
||||
if (word?.from === word?.to && !context.explicit) return null;
|
||||
|
||||
// Get current query from editor
|
||||
const currentQuery = editorRef.current?.state.doc.toString() || '';
|
||||
const currentExpression = getCurrentExpression();
|
||||
// Get the query context at the cursor position
|
||||
const queryContext = getQueryContextAtCursor(currentQuery, cursorPos.ch);
|
||||
const queryContext = getQueryContextAtCursor(currentExpression, cursorPos.ch);
|
||||
|
||||
// Define autocomplete options based on the context
|
||||
let options: {
|
||||
@@ -1171,8 +1166,8 @@ function QuerySearch({
|
||||
|
||||
if (queryContext.isInParenthesis) {
|
||||
// Different suggestions based on the context within parenthesis or bracket
|
||||
const currentQuery = editorRef.current?.state.doc.toString() || '';
|
||||
const curChar = currentQuery.charAt(cursorPos.ch - 1) || '';
|
||||
const currentExpression = getCurrentExpression();
|
||||
const curChar = currentExpression.charAt(cursorPos.ch - 1) || '';
|
||||
|
||||
if (curChar === '(' || curChar === '[') {
|
||||
// Right after opening parenthesis/bracket
|
||||
@@ -1321,7 +1316,7 @@ function QuerySearch({
|
||||
style={{
|
||||
position: 'absolute',
|
||||
top: 8,
|
||||
right: validation.isValid === false && getCurrentQuery() ? 40 : 8, // Move left when error shown
|
||||
right: validation.isValid === false && getCurrentExpression() ? 40 : 8, // Move left when error shown
|
||||
cursor: 'help',
|
||||
zIndex: 10,
|
||||
transition: 'right 0.2s ease',
|
||||
@@ -1383,7 +1378,7 @@ function QuerySearch({
|
||||
// Mod-Enter is usually Ctrl-Enter or Cmd-Enter based on OS
|
||||
run: (): boolean => {
|
||||
if (onRun && typeof onRun === 'function') {
|
||||
onRun(getCurrentQuery());
|
||||
onRun(getCurrentExpression());
|
||||
} else {
|
||||
handleRunQuery();
|
||||
}
|
||||
@@ -1409,7 +1404,7 @@ function QuerySearch({
|
||||
onBlur={handleBlur}
|
||||
/>
|
||||
|
||||
{getCurrentQuery() && validation.isValid === false && !isFocused && (
|
||||
{getCurrentExpression() && validation.isValid === false && !isFocused && (
|
||||
<div
|
||||
className={cx('query-status-container', {
|
||||
hasErrors: validation.errors.length > 0,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
/* eslint-disable import/named */
|
||||
import { EditorView } from '@uiw/react-codemirror';
|
||||
import { getKeySuggestions } from 'api/querySuggestions/getKeySuggestions';
|
||||
import { getValueSuggestions } from 'api/querySuggestions/getValueSuggestion';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
@@ -151,8 +152,6 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
|
||||
>;
|
||||
mockedGetKeys.mockClear();
|
||||
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
render(
|
||||
<QuerySearch
|
||||
onChange={jest.fn() as jest.MockedFunction<(v: string) => void>}
|
||||
@@ -171,8 +170,8 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
|
||||
const editor = document.querySelector(CM_EDITOR_SELECTOR) as HTMLElement;
|
||||
|
||||
// Focus and type into the editor
|
||||
await user.click(editor);
|
||||
await user.type(editor, SAMPLE_KEY_TYPING);
|
||||
await userEvent.click(editor);
|
||||
await userEvent.type(editor, SAMPLE_KEY_TYPING);
|
||||
|
||||
// Wait for debounced API call (300ms debounce + some buffer)
|
||||
await waitFor(() => expect(mockedGetKeys).toHaveBeenCalled(), {
|
||||
@@ -187,8 +186,6 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
|
||||
>;
|
||||
mockedGetValues.mockClear();
|
||||
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
render(
|
||||
<QuerySearch
|
||||
onChange={jest.fn() as jest.MockedFunction<(v: string) => void>}
|
||||
@@ -204,8 +201,8 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
|
||||
});
|
||||
|
||||
const editor = document.querySelector(CM_EDITOR_SELECTOR) as HTMLElement;
|
||||
await user.click(editor);
|
||||
await user.type(editor, SAMPLE_VALUE_TYPING_INCOMPLETE);
|
||||
await userEvent.click(editor);
|
||||
await userEvent.type(editor, SAMPLE_VALUE_TYPING_INCOMPLETE);
|
||||
|
||||
// Wait for debounced API call (300ms debounce + some buffer)
|
||||
await waitFor(() => expect(mockedGetValues).toHaveBeenCalled(), {
|
||||
@@ -241,7 +238,6 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
|
||||
|
||||
it('calls provided onRun on Mod-Enter', async () => {
|
||||
const onRun = jest.fn() as jest.MockedFunction<(q: string) => void>;
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
render(
|
||||
<QuerySearch
|
||||
@@ -259,8 +255,8 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
|
||||
});
|
||||
|
||||
const editor = document.querySelector(CM_EDITOR_SELECTOR) as HTMLElement;
|
||||
await user.click(editor);
|
||||
await user.type(editor, SAMPLE_STATUS_QUERY);
|
||||
await userEvent.click(editor);
|
||||
await userEvent.type(editor, SAMPLE_STATUS_QUERY);
|
||||
|
||||
// Use fireEvent for keyboard shortcuts as userEvent might not work well with CodeMirror
|
||||
const modKey = navigator.platform.includes('Mac') ? 'metaKey' : 'ctrlKey';
|
||||
@@ -280,8 +276,6 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
|
||||
>;
|
||||
mockedHandleRunQuery.mockClear();
|
||||
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
render(
|
||||
<QuerySearch
|
||||
onChange={jest.fn() as jest.MockedFunction<(v: string) => void>}
|
||||
@@ -297,8 +291,8 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
|
||||
});
|
||||
|
||||
const editor = document.querySelector(CM_EDITOR_SELECTOR) as HTMLElement;
|
||||
await user.click(editor);
|
||||
await user.type(editor, SAMPLE_VALUE_TYPING_COMPLETE);
|
||||
await userEvent.click(editor);
|
||||
await userEvent.type(editor, SAMPLE_VALUE_TYPING_COMPLETE);
|
||||
|
||||
// Use fireEvent for keyboard shortcuts as userEvent might not work well with CodeMirror
|
||||
const modKey = navigator.platform.includes('Mac') ? 'metaKey' : 'ctrlKey';
|
||||
@@ -348,4 +342,73 @@ describe('QuerySearch (Integration with Real CodeMirror)', () => {
|
||||
{ timeout: 3000 },
|
||||
);
|
||||
});
|
||||
|
||||
it('handles queryData.filter.expression changes without triggering onChange', async () => {
|
||||
// Spy on CodeMirror's EditorView.dispatch, which is invoked when updateEditorValue
|
||||
// applies a programmatic change to the editor.
|
||||
const dispatchSpy = jest.spyOn(EditorView.prototype, 'dispatch');
|
||||
const initialExpression = "service.name = 'frontend'";
|
||||
const updatedExpression = "service.name = 'backend'";
|
||||
|
||||
const onChange = jest.fn() as jest.MockedFunction<(v: string) => void>;
|
||||
|
||||
const initialQueryData = {
|
||||
...initialQueriesMap.logs.builder.queryData[0],
|
||||
filter: {
|
||||
expression: initialExpression,
|
||||
},
|
||||
};
|
||||
|
||||
const { rerender } = render(
|
||||
<QuerySearch
|
||||
onChange={onChange}
|
||||
queryData={initialQueryData}
|
||||
dataSource={DataSource.LOGS}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Wait for CodeMirror to initialize with the initial expression
|
||||
await waitFor(
|
||||
() => {
|
||||
const editorContent = document.querySelector(
|
||||
CM_EDITOR_SELECTOR,
|
||||
) as HTMLElement;
|
||||
expect(editorContent).toBeInTheDocument();
|
||||
const textContent = editorContent.textContent || '';
|
||||
expect(textContent).toBe(initialExpression);
|
||||
},
|
||||
{ timeout: 3000 },
|
||||
);
|
||||
|
||||
// Ensure the editor is explicitly blurred (not focused)
|
||||
// Blur the actual CodeMirror editor container so that QuerySearch's onBlur handler runs.
|
||||
// Note: In jsdom + CodeMirror we can't reliably assert the DOM text content changes when
|
||||
// the expression is updated programmatically, but we can assert that:
|
||||
// 1) The component continues to render, and
|
||||
// 2) No onChange is fired for programmatic updates.
|
||||
|
||||
const updatedQueryData = {
|
||||
...initialQueryData,
|
||||
filter: {
|
||||
expression: updatedExpression,
|
||||
},
|
||||
};
|
||||
|
||||
// Re-render with updated queryData.filter.expression
|
||||
rerender(
|
||||
<QuerySearch
|
||||
onChange={onChange}
|
||||
queryData={updatedQueryData}
|
||||
dataSource={DataSource.LOGS}
|
||||
/>,
|
||||
);
|
||||
|
||||
// updateEditorValue should have resulted in a dispatch call + onChange should not have been called
|
||||
await waitFor(() => {
|
||||
expect(dispatchSpy).toHaveBeenCalled();
|
||||
expect(onChange).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
dispatchSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -0,0 +1,118 @@
|
||||
/* eslint-disable @typescript-eslint/explicit-function-return-type */
|
||||
/* eslint-disable react/display-name */
|
||||
import '@testing-library/jest-dom';
|
||||
|
||||
import { jest } from '@jest/globals';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import { render, screen } from 'tests/test-utils';
|
||||
import { Having, IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { UseQueryOperations } from 'types/common/operations.types';
|
||||
import { DataSource, QueryBuilderContextType } from 'types/common/queryBuilder';
|
||||
|
||||
import { QueryV2 } from '../QueryV2';
|
||||
|
||||
// Local mocks for domain-specific heavy child components
|
||||
jest.mock(
|
||||
'../QueryAggregation/QueryAggregation',
|
||||
() =>
|
||||
function () {
|
||||
return <div>QueryAggregation</div>;
|
||||
},
|
||||
);
|
||||
jest.mock(
|
||||
'../MerticsAggregateSection/MetricsAggregateSection',
|
||||
() =>
|
||||
function () {
|
||||
return <div>MetricsAggregateSection</div>;
|
||||
},
|
||||
);
|
||||
// Mock hooks
|
||||
jest.mock('hooks/queryBuilder/useQueryBuilder');
|
||||
jest.mock('hooks/queryBuilder/useQueryBuilderOperations');
|
||||
|
||||
const mockedUseQueryBuilder = jest.mocked(useQueryBuilder);
|
||||
const mockedUseQueryOperations = jest.mocked(
|
||||
useQueryOperations,
|
||||
) as jest.MockedFunction<UseQueryOperations>;
|
||||
|
||||
describe('QueryV2 - base render', () => {
|
||||
beforeEach(() => {
|
||||
const mockCloneQuery = jest.fn() as jest.MockedFunction<
|
||||
(type: string, q: IBuilderQuery) => void
|
||||
>;
|
||||
|
||||
mockedUseQueryBuilder.mockReturnValue(({
|
||||
// Only fields used by QueryV2
|
||||
cloneQuery: mockCloneQuery,
|
||||
panelType: null,
|
||||
} as unknown) as QueryBuilderContextType);
|
||||
|
||||
mockedUseQueryOperations.mockReturnValue({
|
||||
isTracePanelType: false,
|
||||
isMetricsDataSource: false,
|
||||
operators: [],
|
||||
spaceAggregationOptions: [],
|
||||
listOfAdditionalFilters: [],
|
||||
handleChangeOperator: jest.fn(),
|
||||
handleSpaceAggregationChange: jest.fn(),
|
||||
handleChangeAggregatorAttribute: jest.fn(),
|
||||
handleChangeDataSource: jest.fn(),
|
||||
handleDeleteQuery: jest.fn(),
|
||||
handleChangeQueryData: (jest.fn() as unknown) as ReturnType<UseQueryOperations>['handleChangeQueryData'],
|
||||
handleChangeFormulaData: jest.fn(),
|
||||
handleQueryFunctionsUpdates: jest.fn(),
|
||||
listOfAdditionalFormulaFilters: [],
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('renders limit input when dataSource is logs', () => {
|
||||
const baseQuery: IBuilderQuery = {
|
||||
queryName: 'A',
|
||||
dataSource: DataSource.LOGS,
|
||||
aggregateOperator: '',
|
||||
aggregations: [],
|
||||
timeAggregation: '',
|
||||
spaceAggregation: '',
|
||||
temporality: '',
|
||||
functions: [],
|
||||
filter: undefined,
|
||||
filters: { items: [], op: 'AND' },
|
||||
groupBy: [],
|
||||
expression: '',
|
||||
disabled: false,
|
||||
having: [] as Having[],
|
||||
limit: 10,
|
||||
stepInterval: null,
|
||||
orderBy: [],
|
||||
legend: 'A',
|
||||
};
|
||||
|
||||
render(
|
||||
<QueryV2
|
||||
index={0}
|
||||
isAvailableToDisable
|
||||
query={baseQuery}
|
||||
version="v4"
|
||||
onSignalSourceChange={jest.fn() as jest.MockedFunction<(v: string) => void>}
|
||||
signalSourceChangeEnabled={false}
|
||||
queriesCount={1}
|
||||
showTraceOperator={false}
|
||||
hasTraceOperator={false}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Ensure the Limit add-on input is present and is of type number
|
||||
const limitInput = screen.getByPlaceholderText(
|
||||
'Enter limit',
|
||||
) as HTMLInputElement;
|
||||
expect(limitInput).toBeInTheDocument();
|
||||
expect(limitInput).toHaveAttribute('type', 'number');
|
||||
expect(limitInput).toHaveAttribute('name', 'limit');
|
||||
expect(limitInput).toHaveAttribute('data-testid', 'input-Limit');
|
||||
});
|
||||
});
|
||||
@@ -163,6 +163,10 @@ function formatSingleValueForFilter(
|
||||
if (trimmed === 'true' || trimmed === 'false') {
|
||||
return trimmed === 'true';
|
||||
}
|
||||
|
||||
if (isQuoted(value)) {
|
||||
return unquote(value);
|
||||
}
|
||||
}
|
||||
|
||||
// Return non-string values as-is, or string values that couldn't be converted
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import { FiltersType, QuickFiltersSource } from 'components/QuickFilters/types';
|
||||
import { useGetAggregateValues } from 'hooks/queryBuilder/useGetAggregateValues';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
@@ -5,7 +6,7 @@ import { useGetQueryKeyValueSuggestions } from 'hooks/querySuggestions/useGetQue
|
||||
import { quickFiltersAttributeValuesResponse } from 'mocks-server/__mockdata__/customQuickFilters';
|
||||
import { rest, server } from 'mocks-server/server';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
import { render, screen, waitFor } from 'tests/test-utils';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { IAttributeValuesResponse } from 'types/api/queryBuilder/getAttributesValues';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
@@ -41,13 +42,15 @@ interface MockFilterConfig {
|
||||
type: FiltersType;
|
||||
}
|
||||
|
||||
const SERVICE_NAME_KEY = 'service.name';
|
||||
|
||||
const createMockFilter = (
|
||||
overrides: Partial<MockFilterConfig> = {},
|
||||
): MockFilterConfig => ({
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
title: 'Service Name',
|
||||
attributeKey: {
|
||||
key: 'service.name',
|
||||
key: SERVICE_NAME_KEY,
|
||||
dataType: DataTypes.String,
|
||||
type: 'resource',
|
||||
},
|
||||
@@ -68,7 +71,7 @@ const createMockQueryBuilderData = (hasActiveFilters = false): any => ({
|
||||
? [
|
||||
{
|
||||
key: {
|
||||
key: 'service.name',
|
||||
key: SERVICE_NAME_KEY,
|
||||
dataType: DataTypes.String,
|
||||
type: 'resource',
|
||||
},
|
||||
@@ -188,4 +191,222 @@ describe('CheckboxFilter - User Flows', () => {
|
||||
expect(screen.getByPlaceholderText('Filter values')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should update query filters when a checkbox is clicked', async () => {
|
||||
const redirectWithQueryBuilderData = jest.fn();
|
||||
|
||||
// Start with no active filters so clicking a checkbox creates one
|
||||
mockUseQueryBuilder.mockReturnValue({
|
||||
...createMockQueryBuilderData(false),
|
||||
redirectWithQueryBuilderData,
|
||||
} as any);
|
||||
|
||||
const mockFilter = createMockFilter({ defaultOpen: true });
|
||||
|
||||
render(
|
||||
<CheckboxFilter
|
||||
filter={mockFilter}
|
||||
source={QuickFiltersSource.LOGS_EXPLORER}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Wait for checkboxes to render
|
||||
await waitFor(() => {
|
||||
expect(screen.getAllByRole('checkbox')).toHaveLength(4);
|
||||
});
|
||||
|
||||
const checkboxes = screen.getAllByRole('checkbox');
|
||||
|
||||
// User unchecks the first value (`mq-kafka`)
|
||||
await userEvent.click(checkboxes[0]);
|
||||
|
||||
// Composite query params (query builder data) should be updated via redirectWithQueryBuilderData
|
||||
expect(redirectWithQueryBuilderData).toHaveBeenCalledTimes(1);
|
||||
|
||||
const [updatedQuery] = redirectWithQueryBuilderData.mock.calls[0];
|
||||
const updatedFilters = updatedQuery.builder.queryData[0].filters;
|
||||
|
||||
expect(updatedFilters.items).toHaveLength(1);
|
||||
expect(updatedFilters.items[0].key.key).toBe(SERVICE_NAME_KEY);
|
||||
// When unchecking from an "all selected" state, we use a NOT_IN filter for that value
|
||||
expect(updatedFilters.items[0].op).toBe('not in');
|
||||
expect(updatedFilters.items[0].value).toBe('mq-kafka');
|
||||
});
|
||||
|
||||
it('should set an IN filter with only the clicked value when using Only', async () => {
|
||||
const redirectWithQueryBuilderData = jest.fn();
|
||||
|
||||
// Existing filter: service.name IN ['mq-kafka', 'otel-demo']
|
||||
mockUseQueryBuilder.mockReturnValue({
|
||||
lastUsedQuery: 0,
|
||||
currentQuery: {
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
key: {
|
||||
key: SERVICE_NAME_KEY,
|
||||
dataType: DataTypes.String,
|
||||
type: 'resource',
|
||||
},
|
||||
op: 'in',
|
||||
value: ['mq-kafka', 'otel-demo'],
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
redirectWithQueryBuilderData,
|
||||
} as any);
|
||||
|
||||
const mockFilter = createMockFilter({ defaultOpen: true });
|
||||
|
||||
render(
|
||||
<CheckboxFilter
|
||||
filter={mockFilter}
|
||||
source={QuickFiltersSource.LOGS_EXPLORER}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Wait for values to render
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('mq-kafka')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Click on the value label to trigger the "Only" behavior
|
||||
await userEvent.click(screen.getByText('mq-kafka'));
|
||||
|
||||
expect(redirectWithQueryBuilderData).toHaveBeenCalledTimes(1);
|
||||
const [updatedQuery] = redirectWithQueryBuilderData.mock.calls[0];
|
||||
const updatedFilters = updatedQuery.builder.queryData[0].filters;
|
||||
|
||||
expect(updatedFilters.items).toHaveLength(1);
|
||||
expect(updatedFilters.items[0].key.key).toBe(SERVICE_NAME_KEY);
|
||||
expect(updatedFilters.items[0].op).toBe('in');
|
||||
expect(updatedFilters.items[0].value).toBe('mq-kafka');
|
||||
});
|
||||
|
||||
it('should clear filters for the attribute when using All', async () => {
|
||||
const redirectWithQueryBuilderData = jest.fn();
|
||||
|
||||
// Existing filter: service.name IN ['mq-kafka']
|
||||
mockUseQueryBuilder.mockReturnValue({
|
||||
lastUsedQuery: 0,
|
||||
currentQuery: {
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
key: {
|
||||
key: SERVICE_NAME_KEY,
|
||||
dataType: DataTypes.String,
|
||||
type: 'resource',
|
||||
},
|
||||
op: 'in',
|
||||
value: ['mq-kafka'],
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
redirectWithQueryBuilderData,
|
||||
} as any);
|
||||
|
||||
const mockFilter = createMockFilter({ defaultOpen: true });
|
||||
|
||||
render(
|
||||
<CheckboxFilter
|
||||
filter={mockFilter}
|
||||
source={QuickFiltersSource.LOGS_EXPLORER}
|
||||
/>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('mq-kafka')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Only one value is selected, so clicking it should switch to "All" (no filter for this key)
|
||||
await userEvent.click(screen.getByText('mq-kafka'));
|
||||
|
||||
expect(redirectWithQueryBuilderData).toHaveBeenCalledTimes(1);
|
||||
const [updatedQuery] = redirectWithQueryBuilderData.mock.calls[0];
|
||||
const updatedFilters = updatedQuery.builder.queryData[0].filters;
|
||||
|
||||
const filtersForServiceName = updatedFilters.items.filter(
|
||||
(item: any) => item.key?.key === SERVICE_NAME_KEY,
|
||||
);
|
||||
|
||||
expect(filtersForServiceName).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should extend an existing IN filter when checking an additional value', async () => {
|
||||
const redirectWithQueryBuilderData = jest.fn();
|
||||
|
||||
// Existing filter: service.name IN 'mq-kafka'
|
||||
mockUseQueryBuilder.mockReturnValue({
|
||||
lastUsedQuery: 0,
|
||||
currentQuery: {
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
filters: {
|
||||
items: [
|
||||
{
|
||||
key: {
|
||||
key: SERVICE_NAME_KEY,
|
||||
dataType: DataTypes.String,
|
||||
type: 'resource',
|
||||
},
|
||||
op: 'in',
|
||||
value: 'mq-kafka',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
redirectWithQueryBuilderData,
|
||||
} as any);
|
||||
|
||||
const mockFilter = createMockFilter({ defaultOpen: true });
|
||||
|
||||
render(
|
||||
<CheckboxFilter
|
||||
filter={mockFilter}
|
||||
source={QuickFiltersSource.LOGS_EXPLORER}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Wait for checkboxes to render
|
||||
await waitFor(() => {
|
||||
expect(screen.getAllByRole('checkbox')).toHaveLength(4);
|
||||
});
|
||||
|
||||
const checkboxes = screen.getAllByRole('checkbox');
|
||||
|
||||
// First checkbox corresponds to 'mq-kafka' (already selected),
|
||||
// second will be 'otel-demo' which we now select additionally.
|
||||
await userEvent.click(checkboxes[1]);
|
||||
|
||||
expect(redirectWithQueryBuilderData).toHaveBeenCalledTimes(1);
|
||||
const [updatedQuery] = redirectWithQueryBuilderData.mock.calls[0];
|
||||
const updatedFilters = updatedQuery.builder.queryData[0].filters;
|
||||
const [filterForServiceName] = updatedFilters.items;
|
||||
|
||||
expect(filterForServiceName.key.key).toBe(SERVICE_NAME_KEY);
|
||||
expect(filterForServiceName.op).toBe('in');
|
||||
expect(filterForServiceName.value).toEqual(['mq-kafka', 'otel-demo']);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
import { createShortcutActions } from '../../constants/shortcutActions';
|
||||
import { useCmdK } from '../../providers/cmdKProvider';
|
||||
import { ShiftOverlay } from './ShiftOverlay';
|
||||
import { useShiftHoldOverlay } from './useShiftHoldOverlay';
|
||||
|
||||
type UserRole = 'ADMIN' | 'EDITOR' | 'AUTHOR' | 'VIEWER';
|
||||
export function ShiftHoldOverlayController({
|
||||
userRole,
|
||||
}: {
|
||||
userRole: UserRole;
|
||||
}): JSX.Element | null {
|
||||
const { open: isCmdKOpen } = useCmdK();
|
||||
const noop = (): void => undefined;
|
||||
|
||||
const actions = createShortcutActions({
|
||||
navigate: noop,
|
||||
handleThemeChange: noop,
|
||||
});
|
||||
|
||||
const visible = useShiftHoldOverlay({
|
||||
isModalOpen: isCmdKOpen,
|
||||
});
|
||||
|
||||
return (
|
||||
<ShiftOverlay visible={visible} actions={actions} userRole={userRole} />
|
||||
);
|
||||
}
|
||||
77
frontend/src/components/ShiftOverlay/ShiftOverlay.tsx
Normal file
77
frontend/src/components/ShiftOverlay/ShiftOverlay.tsx
Normal file
@@ -0,0 +1,77 @@
|
||||
import './shiftOverlay.scss';
|
||||
|
||||
import { useMemo } from 'react';
|
||||
import ReactDOM from 'react-dom';
|
||||
|
||||
import { formatShortcut } from './formatShortcut';
|
||||
|
||||
export type UserRole = 'ADMIN' | 'EDITOR' | 'AUTHOR' | 'VIEWER';
|
||||
export type CmdAction = {
|
||||
id: string;
|
||||
name: string;
|
||||
shortcut?: string[];
|
||||
keywords?: string;
|
||||
section?: string;
|
||||
roles?: UserRole[];
|
||||
perform: () => void;
|
||||
};
|
||||
|
||||
interface ShortcutProps {
|
||||
label: string;
|
||||
keyHint: React.ReactNode;
|
||||
}
|
||||
|
||||
function Shortcut({ label, keyHint }: ShortcutProps): JSX.Element {
|
||||
return (
|
||||
<div className="shift-overlay__item">
|
||||
<span className="shift-overlay__label">{label}</span>
|
||||
<kbd className="shift-overlay__kbd">{keyHint}</kbd>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
interface ShiftOverlayProps {
|
||||
visible: boolean;
|
||||
actions: CmdAction[];
|
||||
userRole: UserRole;
|
||||
}
|
||||
|
||||
export function ShiftOverlay({
|
||||
visible,
|
||||
actions,
|
||||
userRole,
|
||||
}: ShiftOverlayProps): JSX.Element | null {
|
||||
const navigationActions = useMemo(() => {
|
||||
// RBAC filter: show action if no roles set OR current user role is included
|
||||
const permitted = actions.filter(
|
||||
(a) => !a.roles || a.roles.includes(userRole),
|
||||
);
|
||||
|
||||
// Navigation only + must have shortcut
|
||||
return permitted.filter(
|
||||
(a) =>
|
||||
a.section?.toLowerCase() === 'navigation' &&
|
||||
a.shortcut &&
|
||||
a.shortcut.length > 0,
|
||||
);
|
||||
}, [actions, userRole]);
|
||||
|
||||
if (!visible || navigationActions.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return ReactDOM.createPortal(
|
||||
<div className="shift-overlay">
|
||||
<div className="shift-overlay__panel">
|
||||
{navigationActions.map((action) => (
|
||||
<Shortcut
|
||||
key={action.id}
|
||||
label={action.name.replace(/^Go to\s+/i, '')}
|
||||
keyHint={formatShortcut(action.shortcut)}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>,
|
||||
document.body,
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,102 @@
|
||||
import '@testing-library/jest-dom';
|
||||
|
||||
import { render, screen } from '@testing-library/react';
|
||||
|
||||
import type { CmdAction } from '../ShiftOverlay';
|
||||
import { ShiftOverlay } from '../ShiftOverlay';
|
||||
|
||||
jest.mock('../formatShortcut', () => ({
|
||||
formatShortcut: (shortcut: string[]): string => shortcut.join('+'),
|
||||
}));
|
||||
|
||||
const baseActions: CmdAction[] = [
|
||||
{
|
||||
id: '1',
|
||||
name: 'Go to Traces',
|
||||
section: 'navigation',
|
||||
shortcut: ['Shift', 'T'],
|
||||
perform: jest.fn(),
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
name: 'Go to Metrics',
|
||||
section: 'navigation',
|
||||
shortcut: ['Shift', 'M'],
|
||||
roles: ['ADMIN'], // ✅ now UserRole[]
|
||||
perform: jest.fn(),
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
name: 'Create Alert',
|
||||
section: 'actions',
|
||||
shortcut: ['A'],
|
||||
perform: jest.fn(),
|
||||
},
|
||||
{
|
||||
id: '4',
|
||||
name: 'Go to Logs',
|
||||
section: 'navigation',
|
||||
perform: jest.fn(),
|
||||
},
|
||||
];
|
||||
|
||||
describe('ShiftOverlay', () => {
|
||||
it('renders nothing when not visible', () => {
|
||||
const { container } = render(
|
||||
<ShiftOverlay visible={false} actions={baseActions} userRole="ADMIN" />,
|
||||
);
|
||||
|
||||
expect(container.firstChild).toBeNull();
|
||||
});
|
||||
|
||||
it('renders nothing when no navigation shortcuts exist', () => {
|
||||
const { container } = render(
|
||||
<ShiftOverlay
|
||||
visible
|
||||
actions={[
|
||||
{
|
||||
id: 'x',
|
||||
name: 'Create Alert',
|
||||
section: 'actions',
|
||||
perform: jest.fn(),
|
||||
},
|
||||
]}
|
||||
userRole="ADMIN"
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(container.firstChild).toBeNull();
|
||||
});
|
||||
|
||||
it('renders navigation shortcuts in a portal', () => {
|
||||
render(<ShiftOverlay visible actions={baseActions} userRole="ADMIN" />);
|
||||
|
||||
expect(document.body.querySelector('.shift-overlay')).toBeInTheDocument();
|
||||
|
||||
expect(screen.getByText('Traces')).toBeInTheDocument();
|
||||
expect(screen.getByText('Metrics')).toBeInTheDocument();
|
||||
|
||||
expect(screen.getByText('Shift+T')).toBeInTheDocument();
|
||||
expect(screen.getByText('Shift+M')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('applies RBAC filtering correctly', () => {
|
||||
render(<ShiftOverlay visible actions={baseActions} userRole="VIEWER" />);
|
||||
|
||||
expect(screen.getByText('Traces')).toBeInTheDocument();
|
||||
expect(screen.queryByText('Metrics')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('strips "Go to" prefix from labels', () => {
|
||||
render(<ShiftOverlay visible actions={baseActions} userRole="ADMIN" />);
|
||||
|
||||
expect(screen.getByText('Traces')).toBeInTheDocument();
|
||||
expect(screen.queryByText('Go to Traces')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('does not render actions without shortcuts', () => {
|
||||
render(<ShiftOverlay visible actions={baseActions} userRole="ADMIN" />);
|
||||
|
||||
expect(screen.queryByText('Logs')).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,144 @@
|
||||
import { act, renderHook } from '@testing-library/react';
|
||||
|
||||
import { useShiftHoldOverlay } from '../useShiftHoldOverlay';
|
||||
|
||||
jest.useFakeTimers();
|
||||
|
||||
function pressShift(target: EventTarget = window): void {
|
||||
const event = new KeyboardEvent('keydown', {
|
||||
key: 'Shift',
|
||||
bubbles: true,
|
||||
});
|
||||
Object.defineProperty(event, 'target', { value: target });
|
||||
window.dispatchEvent(event);
|
||||
}
|
||||
|
||||
function releaseShift(): void {
|
||||
window.dispatchEvent(
|
||||
new KeyboardEvent('keyup', {
|
||||
key: 'Shift',
|
||||
bubbles: true,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
describe('useShiftHoldOverlay', () => {
|
||||
afterEach(() => {
|
||||
jest.clearAllTimers();
|
||||
});
|
||||
|
||||
it('shows overlay after holding Shift for 600ms', () => {
|
||||
const { result } = renderHook(() => useShiftHoldOverlay({}));
|
||||
|
||||
act(() => {
|
||||
pressShift();
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
|
||||
it('does not show overlay if Shift is released early', () => {
|
||||
const { result } = renderHook(() => useShiftHoldOverlay({}));
|
||||
|
||||
act(() => {
|
||||
pressShift();
|
||||
jest.advanceTimersByTime(300);
|
||||
releaseShift();
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('hides overlay on Shift key release', () => {
|
||||
const { result } = renderHook(() => useShiftHoldOverlay({}));
|
||||
|
||||
act(() => {
|
||||
pressShift();
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(true);
|
||||
|
||||
act(() => {
|
||||
releaseShift();
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('does not activate when modal is open', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useShiftHoldOverlay({ isModalOpen: true }),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
pressShift();
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('does not activate in typing context (input)', () => {
|
||||
const input = document.createElement('input');
|
||||
document.body.appendChild(input);
|
||||
|
||||
const { result } = renderHook(() => useShiftHoldOverlay({}));
|
||||
|
||||
act(() => {
|
||||
pressShift(input);
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
|
||||
document.body.removeChild(input);
|
||||
});
|
||||
|
||||
it('cleans up on window blur', () => {
|
||||
const { result } = renderHook(() => useShiftHoldOverlay({}));
|
||||
|
||||
act(() => {
|
||||
pressShift();
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(true);
|
||||
|
||||
act(() => {
|
||||
window.dispatchEvent(new Event('blur'));
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('cleans up on document visibility change', () => {
|
||||
const { result } = renderHook(() => useShiftHoldOverlay({}));
|
||||
|
||||
act(() => {
|
||||
pressShift();
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(true);
|
||||
|
||||
act(() => {
|
||||
document.dispatchEvent(new Event('visibilitychange'));
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('does nothing when disabled', () => {
|
||||
const { result } = renderHook(() => useShiftHoldOverlay({ disabled: true }));
|
||||
|
||||
act(() => {
|
||||
pressShift();
|
||||
jest.advanceTimersByTime(600);
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
});
|
||||
44
frontend/src/components/ShiftOverlay/formatShortcut.tsx
Normal file
44
frontend/src/components/ShiftOverlay/formatShortcut.tsx
Normal file
@@ -0,0 +1,44 @@
|
||||
import './shiftOverlay.scss';
|
||||
|
||||
import { ArrowUp, ChevronUp, Command, Option } from 'lucide-react';
|
||||
import { ReactNode } from 'react';
|
||||
|
||||
export function formatShortcut(shortcut?: string[]): ReactNode {
|
||||
if (!shortcut || shortcut.length === 0) return null;
|
||||
|
||||
const combo = shortcut.find((s) => typeof s === 'string' && s.trim());
|
||||
if (!combo) return null;
|
||||
|
||||
return combo.split('+').map((key) => {
|
||||
const k = key.trim().toLowerCase();
|
||||
|
||||
let node: ReactNode;
|
||||
switch (k) {
|
||||
case 'shift':
|
||||
node = <ArrowUp size={14} />;
|
||||
break;
|
||||
case 'cmd':
|
||||
case 'meta':
|
||||
node = <Command size={14} />;
|
||||
break;
|
||||
case 'alt':
|
||||
node = <Option size={14} />;
|
||||
break;
|
||||
case 'ctrl':
|
||||
case 'control':
|
||||
node = <ChevronUp size={14} />;
|
||||
break;
|
||||
case 'arrowup':
|
||||
node = <ArrowUp size={14} />;
|
||||
break;
|
||||
default:
|
||||
node = k.toUpperCase();
|
||||
}
|
||||
|
||||
return (
|
||||
<span key={`shortcut-${k}`} className="shift-overlay__key">
|
||||
{node}
|
||||
</span>
|
||||
);
|
||||
});
|
||||
}
|
||||
75
frontend/src/components/ShiftOverlay/shiftOverlay.scss
Normal file
75
frontend/src/components/ShiftOverlay/shiftOverlay.scss
Normal file
@@ -0,0 +1,75 @@
|
||||
.shift-overlay {
|
||||
position: fixed;
|
||||
bottom: 20px;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
z-index: 9999;
|
||||
pointer-events: none;
|
||||
|
||||
&__panel {
|
||||
display: flex;
|
||||
gap: 20px;
|
||||
padding: 8px 12px;
|
||||
|
||||
background: var(--bg-ink-500);
|
||||
color: var(--bg-vanilla-300);
|
||||
|
||||
border-radius: 8px;
|
||||
font-size: 13px;
|
||||
line-height: 1.2;
|
||||
|
||||
box-shadow: 0 6px 20px var(--bg-ink-500);
|
||||
animation: shift-overlay-fade-in 120ms ease-out;
|
||||
}
|
||||
|
||||
&__item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
&__label {
|
||||
opacity: 0.9;
|
||||
}
|
||||
|
||||
&__kbd {
|
||||
font-family: monospace;
|
||||
font-size: 12px;
|
||||
padding: 2px 6px;
|
||||
display: flex;
|
||||
|
||||
border-radius: 4px;
|
||||
background: var(--bg-slate-100);
|
||||
}
|
||||
|
||||
&__key {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
|
||||
min-width: 15px;
|
||||
height: 20px;
|
||||
|
||||
border-radius: 4px;
|
||||
|
||||
background-color: var(--bg-slate-100);
|
||||
|
||||
font-size: 12px;
|
||||
font-weight: 500;
|
||||
line-height: 1;
|
||||
color: var(--bg-vanilla-300);
|
||||
flex-shrink: 0;
|
||||
}
|
||||
}
|
||||
|
||||
@keyframes shift-overlay-fade-in {
|
||||
from {
|
||||
opacity: 0;
|
||||
transform: translateY(-4px);
|
||||
}
|
||||
to {
|
||||
opacity: 1;
|
||||
transform: translateY(0);
|
||||
}
|
||||
}
|
||||
87
frontend/src/components/ShiftOverlay/useShiftHoldOverlay.ts
Normal file
87
frontend/src/components/ShiftOverlay/useShiftHoldOverlay.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
|
||||
const HOLD_DELAY_MS = 500;
|
||||
|
||||
function isTypingContext(target: EventTarget | null): boolean {
|
||||
if (!(target instanceof HTMLElement)) return false;
|
||||
|
||||
const tag = target.tagName;
|
||||
return tag === 'INPUT' || tag === 'TEXTAREA' || target.isContentEditable;
|
||||
}
|
||||
|
||||
interface UseShiftHoldOverlayOptions {
|
||||
disabled?: boolean;
|
||||
isModalOpen?: boolean;
|
||||
}
|
||||
|
||||
export function useShiftHoldOverlay({
|
||||
disabled = false,
|
||||
isModalOpen = false,
|
||||
}: UseShiftHoldOverlayOptions): boolean {
|
||||
const [visible, setVisible] = useState<boolean>(false);
|
||||
|
||||
const timerRef = useRef<number | null>(null);
|
||||
const isHoldingRef = useRef<boolean>(false);
|
||||
|
||||
useEffect((): (() => void) | void => {
|
||||
if (disabled) return;
|
||||
|
||||
function cleanup(): void {
|
||||
isHoldingRef.current = false;
|
||||
|
||||
if (timerRef.current !== null) {
|
||||
window.clearTimeout(timerRef.current);
|
||||
timerRef.current = null;
|
||||
}
|
||||
|
||||
setVisible(false);
|
||||
}
|
||||
|
||||
function onKeyDown(e: KeyboardEvent): void {
|
||||
if (e.key !== 'Shift') return;
|
||||
if (e.repeat) return;
|
||||
|
||||
// Suppress in bad contexts
|
||||
if (
|
||||
isModalOpen ||
|
||||
e.metaKey ||
|
||||
e.ctrlKey ||
|
||||
e.altKey ||
|
||||
isTypingContext(e.target)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
isHoldingRef.current = true;
|
||||
|
||||
timerRef.current = window.setTimeout(() => {
|
||||
if (isHoldingRef.current) {
|
||||
setVisible(true);
|
||||
}
|
||||
}, HOLD_DELAY_MS);
|
||||
}
|
||||
|
||||
function onKeyUp(e: KeyboardEvent): void {
|
||||
if (e.key !== 'Shift') return;
|
||||
cleanup();
|
||||
}
|
||||
|
||||
function onBlur(): void {
|
||||
cleanup();
|
||||
}
|
||||
|
||||
window.addEventListener('keydown', onKeyDown);
|
||||
window.addEventListener('keyup', onKeyUp);
|
||||
window.addEventListener('blur', onBlur);
|
||||
document.addEventListener('visibilitychange', cleanup);
|
||||
|
||||
return (): void => {
|
||||
window.removeEventListener('keydown', onKeyDown);
|
||||
window.removeEventListener('keyup', onKeyUp);
|
||||
window.removeEventListener('blur', onBlur);
|
||||
document.removeEventListener('visibilitychange', cleanup);
|
||||
};
|
||||
}, [disabled, isModalOpen]);
|
||||
|
||||
return visible;
|
||||
}
|
||||
205
frontend/src/components/ValueGraph/__tests__/ValueGraph.test.tsx
Normal file
205
frontend/src/components/ValueGraph/__tests__/ValueGraph.test.tsx
Normal file
@@ -0,0 +1,205 @@
|
||||
import { screen } from '@testing-library/react';
|
||||
import { render } from 'tests/test-utils';
|
||||
|
||||
import ValueGraph from '../index';
|
||||
import { getBackgroundColorAndThresholdCheck } from '../utils';
|
||||
|
||||
// Mock the utils module
|
||||
jest.mock('../utils', () => ({
|
||||
getBackgroundColorAndThresholdCheck: jest.fn(() => ({
|
||||
threshold: {} as any,
|
||||
isConflictingThresholds: false,
|
||||
})),
|
||||
}));
|
||||
|
||||
const mockGetBackgroundColorAndThresholdCheck = getBackgroundColorAndThresholdCheck as jest.MockedFunction<
|
||||
typeof getBackgroundColorAndThresholdCheck
|
||||
>;
|
||||
|
||||
const TEST_ID_VALUE_GRAPH_TEXT = 'value-graph-text';
|
||||
const TEST_ID_VALUE_GRAPH_PREFIX_UNIT = 'value-graph-prefix-unit';
|
||||
const TEST_ID_VALUE_GRAPH_SUFFIX_UNIT = 'value-graph-suffix-unit';
|
||||
|
||||
describe('ValueGraph', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('renders the numeric value correctly', () => {
|
||||
const { getByTestId } = render(
|
||||
<ValueGraph value="42" rawValue={42} thresholds={[]} />,
|
||||
);
|
||||
|
||||
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('42');
|
||||
});
|
||||
|
||||
it('renders value with suffix unit', () => {
|
||||
const { getByTestId } = render(
|
||||
<ValueGraph value="42ms" rawValue={42} thresholds={[]} />,
|
||||
);
|
||||
|
||||
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('42');
|
||||
expect(getByTestId(TEST_ID_VALUE_GRAPH_SUFFIX_UNIT)).toHaveTextContent('ms');
|
||||
});
|
||||
|
||||
it('renders value with prefix unit', () => {
|
||||
const { getByTestId } = render(
|
||||
<ValueGraph value="$100" rawValue={100} thresholds={[]} />,
|
||||
);
|
||||
|
||||
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('100');
|
||||
expect(getByTestId(TEST_ID_VALUE_GRAPH_PREFIX_UNIT)).toHaveTextContent('$');
|
||||
});
|
||||
|
||||
it('renders value with both prefix and suffix units', () => {
|
||||
const { getByTestId } = render(
|
||||
<ValueGraph value="$100USD" rawValue={100} thresholds={[]} />,
|
||||
);
|
||||
|
||||
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('100');
|
||||
expect(getByTestId(TEST_ID_VALUE_GRAPH_PREFIX_UNIT)).toHaveTextContent('$');
|
||||
expect(getByTestId(TEST_ID_VALUE_GRAPH_SUFFIX_UNIT)).toHaveTextContent('USD');
|
||||
});
|
||||
|
||||
it('renders value with K suffix', () => {
|
||||
const { getByTestId } = render(
|
||||
<ValueGraph value="1.5K" rawValue={1500} thresholds={[]} />,
|
||||
);
|
||||
|
||||
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('1.5K');
|
||||
});
|
||||
|
||||
it('applies text color when threshold format is Text', () => {
|
||||
mockGetBackgroundColorAndThresholdCheck.mockReturnValue({
|
||||
threshold: {
|
||||
thresholdFormat: 'Text',
|
||||
thresholdColor: 'red',
|
||||
} as any,
|
||||
isConflictingThresholds: false,
|
||||
});
|
||||
|
||||
const { getByTestId } = render(
|
||||
<ValueGraph value="42" rawValue={42} thresholds={[]} />,
|
||||
);
|
||||
|
||||
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveStyle({ color: 'red' });
|
||||
});
|
||||
|
||||
it('applies background color when threshold format is Background', () => {
|
||||
mockGetBackgroundColorAndThresholdCheck.mockReturnValue({
|
||||
threshold: {
|
||||
thresholdFormat: 'Background',
|
||||
thresholdColor: 'blue',
|
||||
} as any,
|
||||
isConflictingThresholds: false,
|
||||
});
|
||||
|
||||
const { container } = render(
|
||||
<ValueGraph value="42" rawValue={42} thresholds={[]} />,
|
||||
);
|
||||
|
||||
const containerElement = container.querySelector('.value-graph-container');
|
||||
expect(containerElement).toHaveStyle({ backgroundColor: 'blue' });
|
||||
});
|
||||
|
||||
it('displays conflicting thresholds indicator when multiple thresholds match', () => {
|
||||
mockGetBackgroundColorAndThresholdCheck.mockReturnValue({
|
||||
threshold: {
|
||||
thresholdFormat: 'Text',
|
||||
thresholdColor: 'red',
|
||||
} as any,
|
||||
isConflictingThresholds: true,
|
||||
});
|
||||
|
||||
const { getByTestId } = render(
|
||||
<ValueGraph value="42" rawValue={42} thresholds={[]} />,
|
||||
);
|
||||
|
||||
expect(getByTestId('conflicting-thresholds')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('does not display conflicting thresholds indicator when no conflict', () => {
|
||||
mockGetBackgroundColorAndThresholdCheck.mockReturnValue({
|
||||
threshold: {} as any,
|
||||
isConflictingThresholds: false,
|
||||
});
|
||||
|
||||
render(<ValueGraph value="42" rawValue={42} thresholds={[]} />);
|
||||
|
||||
expect(
|
||||
screen.queryByTestId('conflicting-thresholds'),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('applies text color to units when threshold format is Text', () => {
|
||||
mockGetBackgroundColorAndThresholdCheck.mockReturnValue({
|
||||
threshold: {
|
||||
thresholdFormat: 'Text',
|
||||
thresholdColor: 'green',
|
||||
} as any,
|
||||
isConflictingThresholds: false,
|
||||
});
|
||||
|
||||
render(<ValueGraph value="42ms" rawValue={42} thresholds={[]} />);
|
||||
|
||||
const unitElement = screen.getByText('ms');
|
||||
expect(unitElement).toHaveStyle({ color: 'green' });
|
||||
});
|
||||
|
||||
it('renders decimal values correctly', () => {
|
||||
const { getByTestId } = render(
|
||||
<ValueGraph value="42.5" rawValue={42.5} thresholds={[]} />,
|
||||
);
|
||||
|
||||
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('42.5');
|
||||
});
|
||||
|
||||
it('handles values with M suffix', () => {
|
||||
const { getByTestId } = render(
|
||||
<ValueGraph value="1.2M" rawValue={1200000} thresholds={[]} />,
|
||||
);
|
||||
|
||||
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('1.2M');
|
||||
});
|
||||
|
||||
it('handles values with B suffix', () => {
|
||||
const { getByTestId } = render(
|
||||
<ValueGraph value="2.3B" rawValue={2300000000} thresholds={[]} />,
|
||||
);
|
||||
|
||||
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('2.3B');
|
||||
});
|
||||
|
||||
it('handles scientific notation values', () => {
|
||||
const { getByTestId } = render(
|
||||
<ValueGraph value="1e-9" rawValue={1e-9} thresholds={[]} />,
|
||||
);
|
||||
|
||||
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('1e-9');
|
||||
});
|
||||
|
||||
it('handles scientific notation with suffix unit', () => {
|
||||
const { getByTestId } = render(
|
||||
<ValueGraph value="1e-9%" rawValue={1e-9} thresholds={[]} />,
|
||||
);
|
||||
|
||||
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('1e-9');
|
||||
expect(getByTestId(TEST_ID_VALUE_GRAPH_SUFFIX_UNIT)).toHaveTextContent('%');
|
||||
});
|
||||
|
||||
it('handles scientific notation with uppercase E', () => {
|
||||
const { getByTestId } = render(
|
||||
<ValueGraph value="1E-9" rawValue={1e-9} thresholds={[]} />,
|
||||
);
|
||||
|
||||
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('1E-9');
|
||||
});
|
||||
|
||||
it('handles scientific notation with positive exponent', () => {
|
||||
const { getByTestId } = render(
|
||||
<ValueGraph value="1e+9" rawValue={1e9} thresholds={[]} />,
|
||||
);
|
||||
|
||||
expect(getByTestId(TEST_ID_VALUE_GRAPH_TEXT)).toHaveTextContent('1e+9');
|
||||
});
|
||||
});
|
||||
@@ -3,11 +3,39 @@ import './ValueGraph.styles.scss';
|
||||
import { ExclamationCircleFilled } from '@ant-design/icons';
|
||||
import { Tooltip, Typography } from 'antd';
|
||||
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
import { useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
|
||||
import { getBackgroundColorAndThresholdCheck } from './utils';
|
||||
|
||||
function Unit({
|
||||
type,
|
||||
unit,
|
||||
threshold,
|
||||
fontSize,
|
||||
}: {
|
||||
type: 'prefix' | 'suffix';
|
||||
unit: string;
|
||||
threshold: ThresholdProps;
|
||||
fontSize: string;
|
||||
}): JSX.Element {
|
||||
return (
|
||||
<Typography.Text
|
||||
className="value-graph-unit"
|
||||
data-testid={`value-graph-${type}-unit`}
|
||||
style={{
|
||||
color:
|
||||
threshold.thresholdFormat === 'Text'
|
||||
? threshold.thresholdColor
|
||||
: undefined,
|
||||
fontSize: `calc(${fontSize} * 0.7)`,
|
||||
}}
|
||||
>
|
||||
{unit}
|
||||
</Typography.Text>
|
||||
);
|
||||
}
|
||||
|
||||
function ValueGraph({
|
||||
value,
|
||||
rawValue,
|
||||
@@ -17,10 +45,16 @@ function ValueGraph({
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
const [fontSize, setFontSize] = useState('2.5vw');
|
||||
|
||||
// Parse value to separate number and unit (assuming unit is at the end)
|
||||
const matches = value.match(/([\d.]+[KMB]?)(.*)$/);
|
||||
const numericValue = matches?.[1] || value;
|
||||
const unit = matches?.[2]?.trim() || '';
|
||||
const { numericValue, prefixUnit, suffixUnit } = useMemo(() => {
|
||||
const matches = value.match(
|
||||
/^([^\d.]*)?([\d.]+(?:[eE][+-]?[\d]+)?[KMB]?)([^\d.]*)?$/,
|
||||
);
|
||||
return {
|
||||
numericValue: matches?.[2] || value,
|
||||
prefixUnit: matches?.[1]?.trim() || '',
|
||||
suffixUnit: matches?.[3]?.trim() || '',
|
||||
};
|
||||
}, [value]);
|
||||
|
||||
// Adjust font size based on container size
|
||||
useEffect(() => {
|
||||
@@ -65,8 +99,17 @@ function ValueGraph({
|
||||
}}
|
||||
>
|
||||
<div className="value-text-container">
|
||||
{prefixUnit && (
|
||||
<Unit
|
||||
type="prefix"
|
||||
unit={prefixUnit}
|
||||
threshold={threshold}
|
||||
fontSize={fontSize}
|
||||
/>
|
||||
)}
|
||||
<Typography.Text
|
||||
className="value-graph-text"
|
||||
data-testid="value-graph-text"
|
||||
style={{
|
||||
color:
|
||||
threshold.thresholdFormat === 'Text'
|
||||
@@ -77,19 +120,13 @@ function ValueGraph({
|
||||
>
|
||||
{numericValue}
|
||||
</Typography.Text>
|
||||
{unit && (
|
||||
<Typography.Text
|
||||
className="value-graph-unit"
|
||||
style={{
|
||||
color:
|
||||
threshold.thresholdFormat === 'Text'
|
||||
? threshold.thresholdColor
|
||||
: undefined,
|
||||
fontSize: `calc(${fontSize} * 0.7)`,
|
||||
}}
|
||||
>
|
||||
{unit}
|
||||
</Typography.Text>
|
||||
{suffixUnit && (
|
||||
<Unit
|
||||
type="suffix"
|
||||
unit={suffixUnit}
|
||||
threshold={threshold}
|
||||
fontSize={fontSize}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
{isConflictingThresholds && (
|
||||
|
||||
@@ -159,7 +159,6 @@ describe('CmdKPalette', () => {
|
||||
|
||||
expect(screen.getByText(HOME_LABEL)).toBeInTheDocument();
|
||||
expect(screen.getByText('Go to Dashboards')).toBeInTheDocument();
|
||||
expect(screen.getByText('Open Sidebar')).toBeInTheDocument();
|
||||
expect(screen.getByText('Switch to Dark Mode')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
|
||||
@@ -9,34 +9,12 @@ import {
|
||||
CommandList,
|
||||
CommandShortcut,
|
||||
} from '@signozhq/command';
|
||||
import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import updateUserPreference from 'api/v1/user/preferences/name/update';
|
||||
import { AxiosError } from 'axios';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { USER_PREFERENCES } from 'constants/userPreferences';
|
||||
import { useThemeMode } from 'hooks/useDarkMode';
|
||||
import { THEME_MODE } from 'hooks/useDarkMode/constant';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import history from 'lib/history';
|
||||
import {
|
||||
BellDot,
|
||||
BugIcon,
|
||||
DraftingCompass,
|
||||
Expand,
|
||||
HardDrive,
|
||||
Home,
|
||||
LayoutGrid,
|
||||
ListMinus,
|
||||
ScrollText,
|
||||
Settings,
|
||||
} from 'lucide-react';
|
||||
import React, { useEffect } from 'react';
|
||||
import { useMutation } from 'react-query';
|
||||
import { UserPreference } from 'types/api/preferences/preference';
|
||||
import { showErrorNotification } from 'utils/error';
|
||||
|
||||
import { useAppContext } from '../../providers/App/App';
|
||||
import { createShortcutActions } from '../../constants/shortcutActions';
|
||||
import { useCmdK } from '../../providers/cmdKProvider';
|
||||
|
||||
type CmdAction = {
|
||||
@@ -58,19 +36,8 @@ export function CmdKPalette({
|
||||
}): JSX.Element | null {
|
||||
const { open, setOpen } = useCmdK();
|
||||
|
||||
const { updateUserPreferenceInContext } = useAppContext();
|
||||
const { notifications } = useNotifications();
|
||||
const { setAutoSwitch, setTheme, theme } = useThemeMode();
|
||||
|
||||
const { mutate: updateUserPreferenceMutation } = useMutation(
|
||||
updateUserPreference,
|
||||
{
|
||||
onError: (error) => {
|
||||
showErrorNotification(notifications, error as AxiosError);
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
// toggle palette with ⌘/Ctrl+K
|
||||
function handleGlobalCmdK(
|
||||
e: KeyboardEvent,
|
||||
@@ -111,164 +78,10 @@ export function CmdKPalette({
|
||||
history.push(key);
|
||||
}
|
||||
|
||||
function handleOpenSidebar(): void {
|
||||
setLocalStorageApi(USER_PREFERENCES.SIDENAV_PINNED, 'true');
|
||||
const save = { name: USER_PREFERENCES.SIDENAV_PINNED, value: true };
|
||||
updateUserPreferenceInContext(save as UserPreference);
|
||||
updateUserPreferenceMutation({
|
||||
name: USER_PREFERENCES.SIDENAV_PINNED,
|
||||
value: true,
|
||||
});
|
||||
}
|
||||
|
||||
function handleCloseSidebar(): void {
|
||||
setLocalStorageApi(USER_PREFERENCES.SIDENAV_PINNED, 'false');
|
||||
const save = { name: USER_PREFERENCES.SIDENAV_PINNED, value: false };
|
||||
updateUserPreferenceInContext(save as UserPreference);
|
||||
updateUserPreferenceMutation({
|
||||
name: USER_PREFERENCES.SIDENAV_PINNED,
|
||||
value: false,
|
||||
});
|
||||
}
|
||||
|
||||
const actions: CmdAction[] = [
|
||||
{
|
||||
id: 'home',
|
||||
name: 'Go to Home',
|
||||
shortcut: ['shift + h'],
|
||||
keywords: 'home',
|
||||
section: 'Navigation',
|
||||
icon: <Home size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.HOME),
|
||||
},
|
||||
{
|
||||
id: 'dashboards',
|
||||
name: 'Go to Dashboards',
|
||||
shortcut: ['shift + d'],
|
||||
keywords: 'dashboards',
|
||||
section: 'Navigation',
|
||||
icon: <LayoutGrid size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.ALL_DASHBOARD),
|
||||
},
|
||||
{
|
||||
id: 'services',
|
||||
name: 'Go to Services',
|
||||
shortcut: ['shift + s'],
|
||||
keywords: 'services monitoring',
|
||||
section: 'Navigation',
|
||||
icon: <HardDrive size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.APPLICATION),
|
||||
},
|
||||
{
|
||||
id: 'traces',
|
||||
name: 'Go to Traces',
|
||||
shortcut: ['shift + t'],
|
||||
keywords: 'traces',
|
||||
section: 'Navigation',
|
||||
icon: <DraftingCompass size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.TRACES_EXPLORER),
|
||||
},
|
||||
{
|
||||
id: 'logs',
|
||||
name: 'Go to Logs',
|
||||
shortcut: ['shift + l'],
|
||||
keywords: 'logs',
|
||||
section: 'Navigation',
|
||||
icon: <ScrollText size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.LOGS),
|
||||
},
|
||||
{
|
||||
id: 'alerts',
|
||||
name: 'Go to Alerts',
|
||||
shortcut: ['shift + a'],
|
||||
keywords: 'alerts',
|
||||
section: 'Navigation',
|
||||
icon: <BellDot size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.LIST_ALL_ALERT),
|
||||
},
|
||||
{
|
||||
id: 'exceptions',
|
||||
name: 'Go to Exceptions',
|
||||
shortcut: ['shift + e'],
|
||||
keywords: 'exceptions errors',
|
||||
section: 'Navigation',
|
||||
icon: <BugIcon size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.ALL_ERROR),
|
||||
},
|
||||
{
|
||||
id: 'messaging-queues',
|
||||
name: 'Go to Messaging Queues',
|
||||
shortcut: ['shift + m'],
|
||||
keywords: 'messaging queues mq',
|
||||
section: 'Navigation',
|
||||
icon: <ListMinus size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.MESSAGING_QUEUES_OVERVIEW),
|
||||
},
|
||||
{
|
||||
id: 'my-settings',
|
||||
name: 'Go to Account Settings',
|
||||
keywords: 'account settings',
|
||||
section: 'Navigation',
|
||||
icon: <Settings size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => onClickHandler(ROUTES.MY_SETTINGS),
|
||||
},
|
||||
|
||||
// Settings
|
||||
{
|
||||
id: 'open-sidebar',
|
||||
name: 'Open Sidebar',
|
||||
keywords: 'sidebar navigation menu expand',
|
||||
section: 'Settings',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => handleOpenSidebar(),
|
||||
},
|
||||
{
|
||||
id: 'collapse-sidebar',
|
||||
name: 'Collapse Sidebar',
|
||||
keywords: 'sidebar navigation menu collapse',
|
||||
section: 'Settings',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => handleCloseSidebar(),
|
||||
},
|
||||
{
|
||||
id: 'dark-mode',
|
||||
name: 'Switch to Dark Mode',
|
||||
keywords: 'theme dark mode appearance',
|
||||
section: 'Settings',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => handleThemeChange(THEME_MODE.DARK),
|
||||
},
|
||||
{
|
||||
id: 'light-mode',
|
||||
name: 'Switch to Light Mode [Beta]',
|
||||
keywords: 'theme light mode appearance',
|
||||
section: 'Settings',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => handleThemeChange(THEME_MODE.LIGHT),
|
||||
},
|
||||
{
|
||||
id: 'system-theme',
|
||||
name: 'Switch to System Theme',
|
||||
keywords: 'system theme appearance',
|
||||
section: 'Settings',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'AUTHOR', 'VIEWER'],
|
||||
perform: (): void => handleThemeChange(THEME_MODE.SYSTEM),
|
||||
},
|
||||
];
|
||||
const actions = createShortcutActions({
|
||||
navigate: onClickHandler,
|
||||
handleThemeChange,
|
||||
});
|
||||
|
||||
// RBAC filter: show action if no roles set OR current user role is included
|
||||
const permitted = actions.filter(
|
||||
|
||||
263
frontend/src/constants/shortcutActions.tsx
Normal file
263
frontend/src/constants/shortcutActions.tsx
Normal file
@@ -0,0 +1,263 @@
|
||||
import ROUTES from 'constants/routes';
|
||||
import { GlobalShortcutsName } from 'constants/shortcuts/globalShortcuts';
|
||||
import { THEME_MODE } from 'hooks/useDarkMode/constant';
|
||||
import {
|
||||
BarChart2,
|
||||
BellDot,
|
||||
BugIcon,
|
||||
Compass,
|
||||
DraftingCompass,
|
||||
Expand,
|
||||
HardDrive,
|
||||
Home,
|
||||
LayoutGrid,
|
||||
ListMinus,
|
||||
ScrollText,
|
||||
Settings,
|
||||
TowerControl,
|
||||
Workflow,
|
||||
} from 'lucide-react';
|
||||
import React from 'react';
|
||||
|
||||
export type UserRole = 'ADMIN' | 'EDITOR' | 'AUTHOR' | 'VIEWER';
|
||||
|
||||
export type CmdAction = {
|
||||
id: string;
|
||||
name: string;
|
||||
shortcut?: string[];
|
||||
keywords?: string;
|
||||
section?: string;
|
||||
icon?: React.ReactNode;
|
||||
roles?: UserRole[];
|
||||
perform: () => void;
|
||||
};
|
||||
|
||||
type ActionDeps = {
|
||||
navigate: (path: string) => void;
|
||||
handleThemeChange: (mode: string) => void;
|
||||
};
|
||||
|
||||
export function createShortcutActions(deps: ActionDeps): CmdAction[] {
|
||||
const { navigate, handleThemeChange } = deps;
|
||||
|
||||
return [
|
||||
{
|
||||
id: 'home',
|
||||
name: 'Go to Home',
|
||||
shortcut: [GlobalShortcutsName.NavigateToHome],
|
||||
keywords: 'home',
|
||||
section: 'Navigation',
|
||||
icon: <Home size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.HOME),
|
||||
},
|
||||
{
|
||||
id: 'dashboards',
|
||||
name: 'Go to Dashboards',
|
||||
shortcut: [GlobalShortcutsName.NavigateToDashboards],
|
||||
keywords: 'dashboards',
|
||||
section: 'Navigation',
|
||||
icon: <LayoutGrid size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.ALL_DASHBOARD),
|
||||
},
|
||||
{
|
||||
id: 'services',
|
||||
name: 'Go to Services',
|
||||
shortcut: [GlobalShortcutsName.NavigateToServices],
|
||||
keywords: 'services monitoring',
|
||||
section: 'Navigation',
|
||||
icon: <HardDrive size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.APPLICATION),
|
||||
},
|
||||
{
|
||||
id: 'alerts',
|
||||
name: 'Go to Alerts',
|
||||
shortcut: [GlobalShortcutsName.NavigateToAlerts],
|
||||
keywords: 'alerts',
|
||||
section: 'Navigation',
|
||||
icon: <BellDot size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.LIST_ALL_ALERT),
|
||||
},
|
||||
{
|
||||
id: 'exceptions',
|
||||
name: 'Go to Exceptions',
|
||||
shortcut: [GlobalShortcutsName.NavigateToExceptions],
|
||||
keywords: 'exceptions errors',
|
||||
section: 'Navigation',
|
||||
icon: <BugIcon size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.ALL_ERROR),
|
||||
},
|
||||
{
|
||||
id: 'messaging-queues',
|
||||
name: 'Go to Messaging Queues',
|
||||
shortcut: [GlobalShortcutsName.NavigateToMessagingQueues],
|
||||
keywords: 'messaging queues mq',
|
||||
section: 'Navigation',
|
||||
icon: <ListMinus size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.MESSAGING_QUEUES_OVERVIEW),
|
||||
},
|
||||
|
||||
// logs
|
||||
{
|
||||
id: 'logs',
|
||||
name: 'Go to Logs',
|
||||
shortcut: [GlobalShortcutsName.NavigateToLogs],
|
||||
keywords: 'logs',
|
||||
section: 'Logs',
|
||||
icon: <ScrollText size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.LOGS),
|
||||
},
|
||||
{
|
||||
id: 'logs',
|
||||
name: 'Go to Logs Pipelines',
|
||||
shortcut: [GlobalShortcutsName.NavigateToLogsPipelines],
|
||||
keywords: 'logs pipelines',
|
||||
section: 'Logs',
|
||||
icon: <Workflow size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.LOGS_PIPELINES),
|
||||
},
|
||||
{
|
||||
id: 'logs',
|
||||
name: 'Go to Logs Views',
|
||||
shortcut: [GlobalShortcutsName.NavigateToLogsViews],
|
||||
keywords: 'logs views',
|
||||
section: 'Logs',
|
||||
icon: <TowerControl size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.LOGS_SAVE_VIEWS),
|
||||
},
|
||||
|
||||
// metrics
|
||||
{
|
||||
id: 'metrics-summary',
|
||||
name: 'Go to Metrics Summary',
|
||||
shortcut: [GlobalShortcutsName.NavigateToMetricsSummary],
|
||||
keywords: 'metrics summary',
|
||||
section: 'Metrics',
|
||||
icon: <BarChart2 size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.METRICS_EXPLORER),
|
||||
},
|
||||
{
|
||||
id: 'metrics-explorer',
|
||||
name: 'Go to Metrics Explorer',
|
||||
shortcut: [GlobalShortcutsName.NavigateToMetricsExplorer],
|
||||
keywords: 'metrics explorer',
|
||||
section: 'Metrics',
|
||||
icon: <Compass size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.METRICS_EXPLORER_EXPLORER),
|
||||
},
|
||||
{
|
||||
id: 'metrics-views',
|
||||
name: 'Go to Metrics Views',
|
||||
shortcut: [GlobalShortcutsName.NavigateToMetricsViews],
|
||||
keywords: 'metrics views',
|
||||
section: 'Metrics',
|
||||
icon: <TowerControl size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.METRICS_EXPLORER_VIEWS),
|
||||
},
|
||||
|
||||
// Traces
|
||||
{
|
||||
id: 'traces',
|
||||
name: 'Go to Traces',
|
||||
shortcut: [GlobalShortcutsName.NavigateToTraces],
|
||||
keywords: 'traces',
|
||||
section: 'Traces',
|
||||
icon: <DraftingCompass size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.TRACES_EXPLORER),
|
||||
},
|
||||
{
|
||||
id: 'traces-funnel',
|
||||
name: 'Go to Traces Funnels',
|
||||
shortcut: [GlobalShortcutsName.NavigateToTracesFunnel],
|
||||
keywords: 'traces funnel',
|
||||
section: 'Traces',
|
||||
icon: <DraftingCompass size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.TRACES_FUNNELS),
|
||||
},
|
||||
|
||||
// Common actions
|
||||
{
|
||||
id: 'dark-mode',
|
||||
name: 'Switch to Dark Mode',
|
||||
keywords: 'theme dark mode appearance',
|
||||
section: 'Common',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => handleThemeChange(THEME_MODE.DARK),
|
||||
},
|
||||
{
|
||||
id: 'light-mode',
|
||||
name: 'Switch to Light Mode [Beta]',
|
||||
keywords: 'theme light mode appearance',
|
||||
section: 'Common',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => handleThemeChange(THEME_MODE.LIGHT),
|
||||
},
|
||||
{
|
||||
id: 'system-theme',
|
||||
name: 'Switch to System Theme',
|
||||
keywords: 'system theme appearance',
|
||||
section: 'Common',
|
||||
icon: <Expand size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => handleThemeChange(THEME_MODE.SYSTEM),
|
||||
},
|
||||
|
||||
// settings sub-pages
|
||||
{
|
||||
id: 'my-settings',
|
||||
name: 'Go to Account Settings',
|
||||
shortcut: [GlobalShortcutsName.NavigateToSettings],
|
||||
keywords: 'account settings',
|
||||
section: 'Settings',
|
||||
icon: <Settings size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
perform: (): void => navigate(ROUTES.MY_SETTINGS),
|
||||
},
|
||||
{
|
||||
id: 'my-settings-ingestion',
|
||||
name: 'Go to Account Settings Ingestion',
|
||||
shortcut: [GlobalShortcutsName.NavigateToSettingsIngestion],
|
||||
keywords: 'account settings',
|
||||
section: 'Settings',
|
||||
icon: <Settings size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR'],
|
||||
perform: (): void => navigate(ROUTES.INGESTION_SETTINGS),
|
||||
},
|
||||
|
||||
{
|
||||
id: 'my-settings-billing',
|
||||
name: 'Go to Account Settings Billing',
|
||||
shortcut: [GlobalShortcutsName.NavigateToSettingsBilling],
|
||||
keywords: 'account settings billing',
|
||||
section: 'Settings',
|
||||
icon: <Settings size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR'],
|
||||
perform: (): void => navigate(ROUTES.BILLING),
|
||||
},
|
||||
{
|
||||
id: 'my-settings-api-keys',
|
||||
name: 'Go to Account Settings API Keys',
|
||||
shortcut: [GlobalShortcutsName.NavigateToSettingsAPIKeys],
|
||||
keywords: 'account settings api keys',
|
||||
section: 'Settings',
|
||||
icon: <Settings size={14} />,
|
||||
roles: ['ADMIN', 'EDITOR'],
|
||||
perform: (): void => navigate(ROUTES.API_KEYS),
|
||||
},
|
||||
];
|
||||
}
|
||||
@@ -1,25 +1,57 @@
|
||||
export const GlobalShortcuts = {
|
||||
NavigateToServices: 's+shift',
|
||||
NavigateToTraces: 't+shift',
|
||||
NavigateToLogs: 'l+shift',
|
||||
NavigateToDashboards: 'd+shift',
|
||||
NavigateToAlerts: 'a+shift',
|
||||
NavigateToExceptions: 'e+shift',
|
||||
NavigateToMessagingQueues: 'm+shift',
|
||||
ToggleSidebar: 'b+shift',
|
||||
NavigateToHome: 'h+shift',
|
||||
NavigateToServices: 'shift+s',
|
||||
NavigateToDashboards: 'shift+d',
|
||||
NavigateToAlerts: 'shift+a',
|
||||
NavigateToExceptions: 'shift+e',
|
||||
NavigateToMessagingQueues: 'shift+q',
|
||||
ToggleSidebar: 'shift+b',
|
||||
NavigateToHome: 'shift+h',
|
||||
|
||||
// logs
|
||||
NavigateToLogs: 'shift+l',
|
||||
NavigateToLogsPipelines: 'shift+l+p',
|
||||
NavigateToLogsViews: 'shift+l+v',
|
||||
|
||||
// traces
|
||||
NavigateToTraces: 'shift+t',
|
||||
NavigateToTracesFunnel: 'shift+t+f',
|
||||
NavigateToTracesViews: 'shift+t+v',
|
||||
|
||||
// metrics
|
||||
NavigateToMetricsSummary: 'shift+m',
|
||||
NavigateToMetricsExplorer: 'shift+m+e',
|
||||
NavigateToMetricsViews: 'shift+m+v',
|
||||
|
||||
// settings
|
||||
NavigateToSettings: 'shift+g',
|
||||
NavigateToSettingsIngestion: 'shift+g+i',
|
||||
NavigateToSettingsBilling: 'shift+g+b',
|
||||
NavigateToSettingsAPIKeys: 'shift+g+k',
|
||||
NavigateToSettingsNotificationChannels: 'shift+g+n',
|
||||
};
|
||||
|
||||
export const GlobalShortcutsName = {
|
||||
NavigateToServices: 'shift+s',
|
||||
NavigateToTraces: 'shift+t',
|
||||
NavigateToLogs: 'shift+l',
|
||||
NavigateToDashboards: 'shift+d',
|
||||
NavigateToAlerts: 'shift+a',
|
||||
NavigateToExceptions: 'shift+e',
|
||||
NavigateToMessagingQueues: 'shift+m',
|
||||
NavigateToMessagingQueues: 'shift+q',
|
||||
ToggleSidebar: 'shift+b',
|
||||
NavigateToHome: 'shift+h',
|
||||
NavigateToTracesFunnel: 'shift+t+f',
|
||||
NavigateToTracesViews: 'shift+t+v',
|
||||
NavigateToMetricsSummary: 'shift+m',
|
||||
NavigateToMetricsExplorer: 'shift+m+e',
|
||||
NavigateToMetricsViews: 'shift+m+v',
|
||||
NavigateToSettings: 'shift+g',
|
||||
NavigateToSettingsIngestion: 'shift+g+i',
|
||||
NavigateToSettingsBilling: 'shift+g+b',
|
||||
NavigateToSettingsAPIKeys: 'shift+g+k',
|
||||
NavigateToSettingsNotificationChannels: 'shift+g+n',
|
||||
NavigateToLogs: 'shift+l',
|
||||
NavigateToLogsPipelines: 'shift+l+p',
|
||||
NavigateToLogsViews: 'shift+l+v',
|
||||
};
|
||||
|
||||
export const GlobalShortcutsDescription = {
|
||||
@@ -32,4 +64,17 @@ export const GlobalShortcutsDescription = {
|
||||
NavigateToExceptions: 'Navigate to Exceptions List',
|
||||
NavigateToMessagingQueues: 'Navigate to Messaging Queues',
|
||||
ToggleSidebar: 'Toggle sidebar visibility',
|
||||
NavigateToTracesFunnel: 'Navigate to Traces Funnel',
|
||||
NavigateToTracesViews: 'Navigate to Traces Views',
|
||||
NavigateToMetricsSummary: 'Navigate to Metrics Summary',
|
||||
NavigateToMetricsExplorer: 'Navigate to Metrics Explorer',
|
||||
NavigateToMetricsViews: 'Navigate to Metrics Views',
|
||||
NavigateToSettings: 'Navigate to Settings',
|
||||
NavigateToSettingsIngestion: 'Navigate to Ingestion Settings',
|
||||
NavigateToSettingsBilling: 'Navigate to Billing Settings',
|
||||
NavigateToSettingsAPIKeys: 'Navigate to API Keys Settings',
|
||||
NavigateToSettingsNotificationChannels:
|
||||
'Navigate to Notification Channels Settings',
|
||||
NavigateToLogsPipelines: 'Navigate to Logs Pipelines',
|
||||
NavigateToLogsViews: 'Navigate to Logs Views',
|
||||
};
|
||||
|
||||
@@ -10,6 +10,20 @@ import {
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
|
||||
// Mock dependencies
|
||||
jest.mock('providers/cmdKProvider', () => ({
|
||||
useCmdK: (): {
|
||||
open: boolean;
|
||||
setOpen: React.Dispatch<React.SetStateAction<boolean>>;
|
||||
openCmdK: () => void;
|
||||
closeCmdK: () => void;
|
||||
} => ({
|
||||
open: false,
|
||||
setOpen: jest.fn(),
|
||||
openCmdK: jest.fn(),
|
||||
closeCmdK: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('api/common/logEvent', () => jest.fn());
|
||||
|
||||
// Mock the AppContext
|
||||
@@ -63,7 +77,7 @@ describe('Sidebar Toggle Shortcut', () => {
|
||||
|
||||
describe('Global Shortcuts Constants', () => {
|
||||
it('should have the correct shortcut key combination', () => {
|
||||
expect(GlobalShortcuts.ToggleSidebar).toBe('b+shift');
|
||||
expect(GlobalShortcuts.ToggleSidebar).toBe('shift+b');
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -67,7 +67,6 @@ function WidgetGraphComponent({
|
||||
}: WidgetGraphComponentProps): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const [deleteModal, setDeleteModal] = useState(false);
|
||||
const [hovered, setHovered] = useState(false);
|
||||
const { notifications } = useNotifications();
|
||||
const { pathname, search } = useLocation();
|
||||
|
||||
@@ -316,18 +315,6 @@ function WidgetGraphComponent({
|
||||
style={{
|
||||
height: '100%',
|
||||
}}
|
||||
onMouseOver={(): void => {
|
||||
setHovered(true);
|
||||
}}
|
||||
onFocus={(): void => {
|
||||
setHovered(true);
|
||||
}}
|
||||
onMouseOut={(): void => {
|
||||
setHovered(false);
|
||||
}}
|
||||
onBlur={(): void => {
|
||||
setHovered(false);
|
||||
}}
|
||||
id={widget.id}
|
||||
className="widget-graph-component-container"
|
||||
>
|
||||
@@ -377,7 +364,6 @@ function WidgetGraphComponent({
|
||||
|
||||
<div className="drag-handle">
|
||||
<WidgetHeader
|
||||
parentHover={hovered}
|
||||
title={widget?.title}
|
||||
widget={widget}
|
||||
onView={handleOnView}
|
||||
|
||||
@@ -99,6 +99,12 @@
|
||||
height: calc(100% - 30px);
|
||||
}
|
||||
}
|
||||
|
||||
&:hover {
|
||||
.widget-header-more-options {
|
||||
visibility: visible;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.widget-full-view {
|
||||
|
||||
@@ -51,10 +51,6 @@
|
||||
visibility: visible;
|
||||
}
|
||||
|
||||
.widget-header-hover {
|
||||
visibility: visible;
|
||||
}
|
||||
|
||||
.widget-api-actions {
|
||||
padding-right: 0.25rem;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,458 @@
|
||||
import { render as rtlRender, screen } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { AppContext } from 'providers/App/App';
|
||||
import { IAppContext } from 'providers/App/types';
|
||||
import React, { MutableRefObject } from 'react';
|
||||
import { QueryClient, QueryClientProvider, UseQueryResult } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import configureStore from 'redux-mock-store';
|
||||
import thunk from 'redux-thunk';
|
||||
import { SuccessResponse, Warning } from 'types/api';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { ROLES } from 'types/roles';
|
||||
|
||||
import { MenuItemKeys } from '../contants';
|
||||
import WidgetHeader from '../index';
|
||||
|
||||
const TEST_WIDGET_TITLE = 'Test Widget';
|
||||
const TABLE_WIDGET_TITLE = 'Table Widget';
|
||||
const WIDGET_HEADER_SEARCH = 'widget-header-search';
|
||||
const WIDGET_HEADER_SEARCH_INPUT = 'widget-header-search-input';
|
||||
const TEST_WIDGET_TITLE_RESOLVED = 'Test Widget Title';
|
||||
|
||||
const mockStore = configureStore([thunk]);
|
||||
const createMockStore = (): ReturnType<typeof mockStore> =>
|
||||
mockStore({
|
||||
app: {
|
||||
role: 'ADMIN',
|
||||
user: {
|
||||
userId: 'test-user-id',
|
||||
email: 'test@signoz.io',
|
||||
name: 'TestUser',
|
||||
},
|
||||
isLoggedIn: true,
|
||||
org: [],
|
||||
},
|
||||
globalTime: {
|
||||
minTime: '2023-01-01T00:00:00Z',
|
||||
maxTime: '2023-01-02T00:00:00Z',
|
||||
},
|
||||
});
|
||||
|
||||
const queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: {
|
||||
refetchOnWindowFocus: false,
|
||||
retry: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const createMockAppContext = (): Partial<IAppContext> => ({
|
||||
user: {
|
||||
accessJwt: '',
|
||||
refreshJwt: '',
|
||||
id: '',
|
||||
email: '',
|
||||
displayName: '',
|
||||
createdAt: 0,
|
||||
organization: '',
|
||||
orgId: '',
|
||||
role: 'ADMIN' as ROLES,
|
||||
},
|
||||
});
|
||||
|
||||
const render = (ui: React.ReactElement): ReturnType<typeof rtlRender> =>
|
||||
rtlRender(
|
||||
<MemoryRouter>
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={createMockStore()}>
|
||||
<AppContext.Provider value={createMockAppContext() as IAppContext}>
|
||||
{ui}
|
||||
</AppContext.Provider>
|
||||
</Provider>
|
||||
</QueryClientProvider>
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
jest.mock('hooks/queryBuilder/useCreateAlerts', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(() => jest.fn()),
|
||||
}));
|
||||
|
||||
jest.mock('hooks/dashboard/useGetResolvedText', () => {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
const TEST_WIDGET_TITLE_RESOLVED = 'Test Widget Title';
|
||||
return {
|
||||
__esModule: true,
|
||||
default: jest.fn(() => ({
|
||||
truncatedText: TEST_WIDGET_TITLE_RESOLVED,
|
||||
fullText: TEST_WIDGET_TITLE_RESOLVED,
|
||||
})),
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('lucide-react', () => ({
|
||||
CircleX: (): JSX.Element => <svg data-testid="lucide-circle-x" />,
|
||||
TriangleAlert: (): JSX.Element => <svg data-testid="lucide-triangle-alert" />,
|
||||
X: (): JSX.Element => <svg data-testid="lucide-x" />,
|
||||
}));
|
||||
jest.mock('antd', () => ({
|
||||
...jest.requireActual('antd'),
|
||||
Spin: (): JSX.Element => <div data-testid="antd-spin" />,
|
||||
}));
|
||||
|
||||
const mockWidget: Widgets = {
|
||||
id: 'test-widget-id',
|
||||
title: TEST_WIDGET_TITLE,
|
||||
description: 'Test Description',
|
||||
panelTypes: PANEL_TYPES.TIME_SERIES,
|
||||
query: {
|
||||
builder: {
|
||||
queryData: [],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
promql: [],
|
||||
clickhouse_sql: [],
|
||||
id: 'query-id',
|
||||
queryType: 'builder' as EQueryType,
|
||||
},
|
||||
timePreferance: 'GLOBAL_TIME',
|
||||
opacity: '',
|
||||
nullZeroValues: '',
|
||||
yAxisUnit: '',
|
||||
fillSpans: false,
|
||||
softMin: null,
|
||||
softMax: null,
|
||||
selectedLogFields: [],
|
||||
selectedTracesFields: [],
|
||||
};
|
||||
|
||||
const mockQueryResponse = ({
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [],
|
||||
resultType: '',
|
||||
},
|
||||
},
|
||||
statusCode: 200,
|
||||
message: 'success',
|
||||
error: null,
|
||||
},
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
error: null,
|
||||
isFetching: false,
|
||||
} as unknown) as UseQueryResult<
|
||||
SuccessResponse<MetricRangePayloadProps, unknown> & {
|
||||
warning?: Warning;
|
||||
},
|
||||
Error
|
||||
>;
|
||||
|
||||
describe('WidgetHeader', () => {
|
||||
const mockOnView = jest.fn();
|
||||
const mockSetSearchTerm = jest.fn();
|
||||
const tableProcessedDataRef: MutableRefObject<RowData[]> = {
|
||||
current: [
|
||||
{
|
||||
timestamp: 1234567890,
|
||||
key: 'key1',
|
||||
col1: 'val1',
|
||||
col2: 'val2',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('renders widget header with title', () => {
|
||||
render(
|
||||
<WidgetHeader
|
||||
title={TEST_WIDGET_TITLE}
|
||||
widget={mockWidget}
|
||||
onView={mockOnView}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
tableProcessedDataRef={tableProcessedDataRef}
|
||||
setSearchTerm={mockSetSearchTerm}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByText(TEST_WIDGET_TITLE_RESOLVED)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('returns null for empty widget', () => {
|
||||
const emptyWidget = {
|
||||
...mockWidget,
|
||||
id: PANEL_TYPES.EMPTY_WIDGET,
|
||||
};
|
||||
|
||||
const { container } = render(
|
||||
<WidgetHeader
|
||||
title="Empty Widget"
|
||||
widget={emptyWidget}
|
||||
onView={mockOnView}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
tableProcessedDataRef={tableProcessedDataRef}
|
||||
setSearchTerm={mockSetSearchTerm}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(container.innerHTML).toBe('');
|
||||
});
|
||||
|
||||
it('shows search input for table panels', async () => {
|
||||
const tableWidget = {
|
||||
...mockWidget,
|
||||
panelTypes: PANEL_TYPES.TABLE,
|
||||
};
|
||||
|
||||
render(
|
||||
<WidgetHeader
|
||||
title={TABLE_WIDGET_TITLE}
|
||||
widget={tableWidget}
|
||||
onView={mockOnView}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
tableProcessedDataRef={tableProcessedDataRef}
|
||||
setSearchTerm={mockSetSearchTerm}
|
||||
/>,
|
||||
);
|
||||
|
||||
const searchIcon = screen.getByTestId(WIDGET_HEADER_SEARCH);
|
||||
expect(searchIcon).toBeInTheDocument();
|
||||
|
||||
await userEvent.click(searchIcon);
|
||||
|
||||
expect(screen.getByTestId(WIDGET_HEADER_SEARCH_INPUT)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('handles search input changes and closing', async () => {
|
||||
const tableWidget = {
|
||||
...mockWidget,
|
||||
panelTypes: PANEL_TYPES.TABLE,
|
||||
};
|
||||
|
||||
render(
|
||||
<WidgetHeader
|
||||
title={TABLE_WIDGET_TITLE}
|
||||
widget={tableWidget}
|
||||
onView={mockOnView}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
tableProcessedDataRef={tableProcessedDataRef}
|
||||
setSearchTerm={mockSetSearchTerm}
|
||||
/>,
|
||||
);
|
||||
|
||||
const searchIcon = screen.getByTestId(`${WIDGET_HEADER_SEARCH}`);
|
||||
await userEvent.click(searchIcon);
|
||||
|
||||
const searchInput = screen.getByTestId(WIDGET_HEADER_SEARCH_INPUT);
|
||||
await userEvent.type(searchInput, 'test search');
|
||||
expect(mockSetSearchTerm).toHaveBeenCalledWith('test search');
|
||||
|
||||
const closeButton = screen
|
||||
.getByTestId(WIDGET_HEADER_SEARCH_INPUT)
|
||||
.parentElement?.querySelector('.search-header-icons');
|
||||
if (closeButton) {
|
||||
await userEvent.click(closeButton);
|
||||
expect(mockSetSearchTerm).toHaveBeenCalledWith('');
|
||||
}
|
||||
});
|
||||
|
||||
it('shows error icon when query has error', () => {
|
||||
const errorResponse = {
|
||||
...mockQueryResponse,
|
||||
isError: true as const,
|
||||
error: { message: 'Test error' } as Error,
|
||||
data: undefined,
|
||||
} as UseQueryResult<
|
||||
SuccessResponse<MetricRangePayloadProps, unknown> & {
|
||||
warning?: Warning;
|
||||
},
|
||||
Error
|
||||
>;
|
||||
|
||||
render(
|
||||
<WidgetHeader
|
||||
title={TEST_WIDGET_TITLE}
|
||||
widget={mockWidget}
|
||||
onView={mockOnView}
|
||||
queryResponse={errorResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
tableProcessedDataRef={tableProcessedDataRef}
|
||||
setSearchTerm={mockSetSearchTerm}
|
||||
/>,
|
||||
);
|
||||
|
||||
// check if CircleX icon is rendered
|
||||
const circleXIcon = screen.getByTestId('lucide-circle-x');
|
||||
expect(circleXIcon).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows warning icon when query has warning', () => {
|
||||
const warningData = mockQueryResponse.data
|
||||
? {
|
||||
...mockQueryResponse.data,
|
||||
warning: {
|
||||
code: 'WARNING_CODE',
|
||||
message: 'Test warning',
|
||||
url: 'https://example.com',
|
||||
warnings: [{ message: 'Test warning' }],
|
||||
} as Warning,
|
||||
}
|
||||
: undefined;
|
||||
|
||||
const warningResponse = {
|
||||
...mockQueryResponse,
|
||||
data: warningData,
|
||||
} as UseQueryResult<
|
||||
SuccessResponse<MetricRangePayloadProps, unknown> & {
|
||||
warning?: Warning;
|
||||
},
|
||||
Error
|
||||
>;
|
||||
|
||||
render(
|
||||
<WidgetHeader
|
||||
title={TEST_WIDGET_TITLE}
|
||||
widget={mockWidget}
|
||||
onView={mockOnView}
|
||||
queryResponse={warningResponse}
|
||||
isWarning
|
||||
isFetchingResponse={false}
|
||||
tableProcessedDataRef={tableProcessedDataRef}
|
||||
setSearchTerm={mockSetSearchTerm}
|
||||
/>,
|
||||
);
|
||||
|
||||
const triangleAlertIcon = screen.getByTestId('lucide-triangle-alert');
|
||||
expect(triangleAlertIcon).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows spinner when fetching response', () => {
|
||||
const fetchingResponse = {
|
||||
...mockQueryResponse,
|
||||
isFetching: true,
|
||||
isLoading: true,
|
||||
} as UseQueryResult<
|
||||
SuccessResponse<MetricRangePayloadProps, unknown> & {
|
||||
warning?: Warning;
|
||||
},
|
||||
Error
|
||||
>;
|
||||
|
||||
render(
|
||||
<WidgetHeader
|
||||
title={TEST_WIDGET_TITLE}
|
||||
widget={mockWidget}
|
||||
onView={mockOnView}
|
||||
queryResponse={fetchingResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse
|
||||
tableProcessedDataRef={tableProcessedDataRef}
|
||||
setSearchTerm={mockSetSearchTerm}
|
||||
/>,
|
||||
);
|
||||
|
||||
const antSpin = screen.getByTestId('antd-spin');
|
||||
expect(antSpin).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders menu options icon', () => {
|
||||
render(
|
||||
<WidgetHeader
|
||||
title={TEST_WIDGET_TITLE}
|
||||
widget={mockWidget}
|
||||
onView={mockOnView}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
tableProcessedDataRef={tableProcessedDataRef}
|
||||
setSearchTerm={mockSetSearchTerm}
|
||||
headerMenuList={[MenuItemKeys.View]}
|
||||
/>,
|
||||
);
|
||||
|
||||
const moreOptionsIcon = screen.getByTestId('widget-header-options');
|
||||
expect(moreOptionsIcon).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows search icon for table panels', () => {
|
||||
const tableWidget = {
|
||||
...mockWidget,
|
||||
panelTypes: PANEL_TYPES.TABLE,
|
||||
};
|
||||
|
||||
render(
|
||||
<WidgetHeader
|
||||
title={TABLE_WIDGET_TITLE}
|
||||
widget={tableWidget}
|
||||
onView={mockOnView}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
tableProcessedDataRef={tableProcessedDataRef}
|
||||
setSearchTerm={mockSetSearchTerm}
|
||||
/>,
|
||||
);
|
||||
|
||||
const searchIcon = screen.getByTestId(WIDGET_HEADER_SEARCH);
|
||||
expect(searchIcon).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('does not show search icon for non-table panels', () => {
|
||||
render(
|
||||
<WidgetHeader
|
||||
title={TEST_WIDGET_TITLE}
|
||||
widget={mockWidget}
|
||||
onView={mockOnView}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
tableProcessedDataRef={tableProcessedDataRef}
|
||||
setSearchTerm={mockSetSearchTerm}
|
||||
/>,
|
||||
);
|
||||
|
||||
const searchIcon = screen.queryByTestId(WIDGET_HEADER_SEARCH);
|
||||
expect(searchIcon).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders threshold when provided', () => {
|
||||
const threshold = <div data-testid="threshold">Threshold Component</div>;
|
||||
|
||||
render(
|
||||
<WidgetHeader
|
||||
title={TEST_WIDGET_TITLE}
|
||||
widget={mockWidget}
|
||||
onView={mockOnView}
|
||||
queryResponse={mockQueryResponse}
|
||||
isWarning={false}
|
||||
isFetchingResponse={false}
|
||||
tableProcessedDataRef={tableProcessedDataRef}
|
||||
setSearchTerm={mockSetSearchTerm}
|
||||
threshold={threshold}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('threshold')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -35,6 +35,7 @@ import { SuccessResponse, Warning } from 'types/api';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import APIError from 'types/api/error';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { buildAbsolutePath } from 'utils/app';
|
||||
|
||||
import { errorTooltipPosition } from './config';
|
||||
import { MENUITEM_KEYS_VS_LABELS, MenuItemKeys } from './contants';
|
||||
@@ -47,7 +48,6 @@ interface IWidgetHeaderProps {
|
||||
onView: VoidFunction;
|
||||
onDelete?: VoidFunction;
|
||||
onClone?: VoidFunction;
|
||||
parentHover: boolean;
|
||||
queryResponse: UseQueryResult<
|
||||
SuccessResponse<MetricRangePayloadProps, unknown> & {
|
||||
warning?: Warning;
|
||||
@@ -68,7 +68,6 @@ function WidgetHeader({
|
||||
onView,
|
||||
onDelete,
|
||||
onClone,
|
||||
parentHover,
|
||||
queryResponse,
|
||||
threshold,
|
||||
headerMenuList,
|
||||
@@ -87,7 +86,10 @@ function WidgetHeader({
|
||||
QueryParams.compositeQuery,
|
||||
encodeURIComponent(JSON.stringify(widget.query)),
|
||||
);
|
||||
const generatedUrl = `${window.location.pathname}/new?${urlQuery}`;
|
||||
const generatedUrl = buildAbsolutePath({
|
||||
relativePath: 'new',
|
||||
urlQueryString: urlQuery.toString(),
|
||||
});
|
||||
safeNavigate(generatedUrl);
|
||||
}, [safeNavigate, urlQuery, widget.id, widget.panelTypes, widget.query]);
|
||||
|
||||
@@ -240,6 +242,7 @@ function WidgetHeader({
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
e.preventDefault();
|
||||
setSearchTerm('');
|
||||
setShowGlobalSearch(false);
|
||||
}}
|
||||
className="search-header-icons"
|
||||
@@ -310,8 +313,6 @@ function WidgetHeader({
|
||||
<MoreOutlined
|
||||
data-testid="widget-header-options"
|
||||
className={`widget-header-more-options ${
|
||||
parentHover ? 'widget-header-hover' : ''
|
||||
} ${
|
||||
globalSearchAvailable ? 'widget-header-more-options-visible' : ''
|
||||
}`}
|
||||
/>
|
||||
|
||||
@@ -92,14 +92,14 @@ function BodyTitleRenderer({
|
||||
|
||||
if (isObject) {
|
||||
// For objects/arrays, stringify the entire structure
|
||||
copyText = `"${cleanedKey}": ${JSON.stringify(value, null, 2)}`;
|
||||
copyText = JSON.stringify(value, null, 2);
|
||||
} else if (parentIsArray) {
|
||||
// For array elements, copy just the value
|
||||
copyText = `"${cleanedKey}": ${value}`;
|
||||
// array elements
|
||||
copyText = `${value}`;
|
||||
} else {
|
||||
// For primitive values, format as JSON key-value pair
|
||||
const valueStr = typeof value === 'string' ? `"${value}"` : String(value);
|
||||
copyText = `"${cleanedKey}": ${valueStr}`;
|
||||
// primitive values
|
||||
const valueStr = typeof value === 'string' ? value : String(value);
|
||||
copyText = valueStr;
|
||||
}
|
||||
|
||||
setCopy(copyText);
|
||||
|
||||
@@ -60,7 +60,8 @@ const BodyContent: React.FC<{
|
||||
fieldData: Record<string, string>;
|
||||
record: DataType;
|
||||
bodyHtml: { __html: string };
|
||||
}> = React.memo(({ fieldData, record, bodyHtml }) => {
|
||||
textToCopy: string;
|
||||
}> = React.memo(({ fieldData, record, bodyHtml, textToCopy }) => {
|
||||
const { isLoading, treeData, error } = useAsyncJSONProcessing(
|
||||
fieldData.value,
|
||||
record.field === 'body',
|
||||
@@ -92,11 +93,13 @@ const BodyContent: React.FC<{
|
||||
|
||||
if (record.field === 'body') {
|
||||
return (
|
||||
<span
|
||||
style={{ color: Color.BG_SIENNA_400, whiteSpace: 'pre-wrap', tabSize: 4 }}
|
||||
>
|
||||
<span dangerouslySetInnerHTML={bodyHtml} />
|
||||
</span>
|
||||
<CopyClipboardHOC entityKey="body" textToCopy={textToCopy}>
|
||||
<span
|
||||
style={{ color: Color.BG_SIENNA_400, whiteSpace: 'pre-wrap', tabSize: 4 }}
|
||||
>
|
||||
<span dangerouslySetInnerHTML={bodyHtml} />
|
||||
</span>
|
||||
</CopyClipboardHOC>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -172,7 +175,12 @@ export default function TableViewActions(
|
||||
switch (record.field) {
|
||||
case 'body':
|
||||
return (
|
||||
<BodyContent fieldData={fieldData} record={record} bodyHtml={bodyHtml} />
|
||||
<BodyContent
|
||||
fieldData={fieldData}
|
||||
record={record}
|
||||
bodyHtml={bodyHtml}
|
||||
textToCopy={textToCopy}
|
||||
/>
|
||||
);
|
||||
|
||||
case 'timestamp':
|
||||
@@ -194,6 +202,7 @@ export default function TableViewActions(
|
||||
record,
|
||||
fieldData,
|
||||
bodyHtml,
|
||||
textToCopy,
|
||||
formatTimezoneAdjustedTimestamp,
|
||||
cleanTimestamp,
|
||||
]);
|
||||
@@ -202,7 +211,12 @@ export default function TableViewActions(
|
||||
if (record.field === 'body') {
|
||||
return (
|
||||
<div className={cx('value-field', isOpen ? 'open-popover' : '')}>
|
||||
<BodyContent fieldData={fieldData} record={record} bodyHtml={bodyHtml} />
|
||||
<BodyContent
|
||||
fieldData={fieldData}
|
||||
record={record}
|
||||
bodyHtml={bodyHtml}
|
||||
textToCopy={textToCopy}
|
||||
/>
|
||||
{!isListViewPanel && !RESTRICTED_SELECTED_FIELDS.includes(fieldFilterKey) && (
|
||||
<span className="action-btn">
|
||||
<Tooltip title="Filter for value">
|
||||
|
||||
@@ -1,16 +1,54 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import { RESTRICTED_SELECTED_FIELDS } from 'container/LogsFilters/config';
|
||||
|
||||
import TableViewActions from '../TableViewActions';
|
||||
import useAsyncJSONProcessing from '../useAsyncJSONProcessing';
|
||||
|
||||
// Mock data for tests
|
||||
let mockCopyToClipboard: jest.Mock;
|
||||
let mockNotificationsSuccess: jest.Mock;
|
||||
|
||||
// Mock the components and hooks
|
||||
jest.mock('components/Logs/CopyClipboardHOC', () => ({
|
||||
__esModule: true,
|
||||
default: ({ children }: { children: React.ReactNode }): JSX.Element => (
|
||||
<div className="CopyClipboardHOC">{children}</div>
|
||||
default: ({
|
||||
children,
|
||||
textToCopy,
|
||||
entityKey,
|
||||
}: {
|
||||
children: React.ReactNode;
|
||||
textToCopy: string;
|
||||
entityKey: string;
|
||||
}): JSX.Element => (
|
||||
// eslint-disable-next-line jsx-a11y/click-events-have-key-events
|
||||
<div
|
||||
className="CopyClipboardHOC"
|
||||
data-testid={`copy-clipboard-${entityKey}`}
|
||||
data-text-to-copy={textToCopy}
|
||||
onClick={(): void => {
|
||||
if (mockCopyToClipboard) {
|
||||
mockCopyToClipboard(textToCopy);
|
||||
}
|
||||
if (mockNotificationsSuccess) {
|
||||
mockNotificationsSuccess({
|
||||
message: `${entityKey} copied to clipboard`,
|
||||
key: `${entityKey} copied to clipboard`,
|
||||
});
|
||||
}
|
||||
}}
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
>
|
||||
{children}
|
||||
</div>
|
||||
),
|
||||
}));
|
||||
|
||||
jest.mock('../useAsyncJSONProcessing', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('providers/Timezone', () => ({
|
||||
useTimezone: (): {
|
||||
formatTimezoneAdjustedTimestamp: (timestamp: string) => string;
|
||||
@@ -53,6 +91,19 @@ describe('TableViewActions', () => {
|
||||
onGroupByAttribute: jest.fn(),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockCopyToClipboard = jest.fn();
|
||||
mockNotificationsSuccess = jest.fn();
|
||||
|
||||
// Default mock for useAsyncJSONProcessing
|
||||
const mockUseAsyncJSONProcessing = jest.mocked(useAsyncJSONProcessing);
|
||||
mockUseAsyncJSONProcessing.mockReturnValue({
|
||||
isLoading: false,
|
||||
treeData: null,
|
||||
error: null,
|
||||
});
|
||||
});
|
||||
|
||||
it('should render without crashing', () => {
|
||||
render(
|
||||
<TableViewActions
|
||||
@@ -127,4 +178,60 @@ describe('TableViewActions', () => {
|
||||
container.querySelector(ACTION_BUTTON_TEST_ID),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should copy non-JSON body text without quotes when user clicks on body', () => {
|
||||
// Setup: body field with surrounding quotes
|
||||
const bodyValueWithQuotes =
|
||||
'"FeatureFlag \'kafkaQueueProblems\' is enabled, sleeping 1 second"';
|
||||
const expectedCopiedText =
|
||||
"FeatureFlag 'kafkaQueueProblems' is enabled, sleeping 1 second";
|
||||
|
||||
const bodyProps = {
|
||||
fieldData: {
|
||||
field: 'body',
|
||||
value: bodyValueWithQuotes,
|
||||
},
|
||||
record: {
|
||||
key: 'body-key',
|
||||
field: 'body',
|
||||
value: bodyValueWithQuotes,
|
||||
},
|
||||
isListViewPanel: false,
|
||||
isfilterInLoading: false,
|
||||
isfilterOutLoading: false,
|
||||
onClickHandler: jest.fn(),
|
||||
onGroupByAttribute: jest.fn(),
|
||||
};
|
||||
|
||||
// Render component with body field
|
||||
render(
|
||||
<TableViewActions
|
||||
fieldData={bodyProps.fieldData}
|
||||
record={bodyProps.record}
|
||||
isListViewPanel={bodyProps.isListViewPanel}
|
||||
isfilterInLoading={bodyProps.isfilterInLoading}
|
||||
isfilterOutLoading={bodyProps.isfilterOutLoading}
|
||||
onClickHandler={bodyProps.onClickHandler}
|
||||
onGroupByAttribute={bodyProps.onGroupByAttribute}
|
||||
/>,
|
||||
);
|
||||
|
||||
// Find the clickable copy area for body
|
||||
const copyArea = screen.getByTestId('copy-clipboard-body');
|
||||
|
||||
// Verify it has the correct text to copy (without quotes)
|
||||
expect(copyArea).toHaveAttribute('data-text-to-copy', expectedCopiedText);
|
||||
|
||||
// Action: User clicks on body content
|
||||
fireEvent.click(copyArea);
|
||||
|
||||
// Assert: Text was copied without surrounding quotes
|
||||
expect(mockCopyToClipboard).toHaveBeenCalledWith(expectedCopiedText);
|
||||
|
||||
// Assert: Success notification shown
|
||||
expect(mockNotificationsSuccess).toHaveBeenCalledWith({
|
||||
message: 'body copied to clipboard',
|
||||
key: 'body copied to clipboard',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -51,7 +51,7 @@ describe('BodyTitleRenderer', () => {
|
||||
await user.click(screen.getByText('name'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockSetCopy).toHaveBeenCalledWith('"user.name": "John"');
|
||||
expect(mockSetCopy).toHaveBeenCalledWith('John');
|
||||
expect(mockNotification).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
message: expect.stringContaining('user.name'),
|
||||
@@ -75,7 +75,7 @@ describe('BodyTitleRenderer', () => {
|
||||
await user.click(screen.getByText('0'));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockSetCopy).toHaveBeenCalledWith('"items[*].0": arrayElement');
|
||||
expect(mockSetCopy).toHaveBeenCalledWith('arrayElement');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -96,9 +96,8 @@ describe('BodyTitleRenderer', () => {
|
||||
|
||||
await waitFor(() => {
|
||||
const callArg = mockSetCopy.mock.calls[0][0];
|
||||
expect(callArg).toContain('"user.metadata":');
|
||||
expect(callArg).toContain('"id": 123');
|
||||
expect(callArg).toContain('"active": true');
|
||||
const expectedJson = JSON.stringify(testObject, null, 2);
|
||||
expect(callArg).toBe(expectedJson);
|
||||
expect(mockNotification).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
message: expect.stringContaining('object copied'),
|
||||
|
||||
@@ -0,0 +1,366 @@
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { useGetPanelTypesQueryParam } from 'hooks/queryBuilder/useGetPanelTypesQueryParam';
|
||||
import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
|
||||
import { ExplorerViews } from 'pages/LogsExplorer/utils';
|
||||
import { cleanup, render, screen, waitFor } from 'tests/test-utils';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { Query, QueryState } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource, QueryBuilderContextType } from 'types/common/queryBuilder';
|
||||
import { explorerViewToPanelType } from 'utils/explorerUtils';
|
||||
|
||||
import LogExplorerQuerySection from './index';
|
||||
|
||||
const CM_EDITOR_SELECTOR = '.cm-editor .cm-content';
|
||||
const QUERY_AGGREGATION_TEST_ID = 'query-aggregation-container';
|
||||
const QUERY_ADDON_TEST_ID = 'query-add-ons';
|
||||
|
||||
// Mock DOM APIs that CodeMirror needs
|
||||
beforeAll(() => {
|
||||
// Mock getClientRects and getBoundingClientRect for Range objects
|
||||
const mockRect: DOMRect = {
|
||||
width: 100,
|
||||
height: 20,
|
||||
top: 0,
|
||||
left: 0,
|
||||
right: 100,
|
||||
bottom: 20,
|
||||
x: 0,
|
||||
y: 0,
|
||||
toJSON: (): DOMRect => mockRect,
|
||||
} as DOMRect;
|
||||
|
||||
// Create a minimal Range mock with only what CodeMirror actually uses
|
||||
const createMockRange = (): Range => {
|
||||
let startContainer: Node = document.createTextNode('');
|
||||
let endContainer: Node = document.createTextNode('');
|
||||
let startOffset = 0;
|
||||
let endOffset = 0;
|
||||
|
||||
const rectList = {
|
||||
length: 1,
|
||||
item: (index: number): DOMRect | null => (index === 0 ? mockRect : null),
|
||||
0: mockRect,
|
||||
};
|
||||
|
||||
const mockRange = {
|
||||
// CodeMirror uses these for text measurement
|
||||
getClientRects: (): DOMRectList => (rectList as unknown) as DOMRectList,
|
||||
getBoundingClientRect: (): DOMRect => mockRect,
|
||||
// CodeMirror calls these to set up text ranges
|
||||
setStart: (node: Node, offset: number): void => {
|
||||
startContainer = node;
|
||||
startOffset = offset;
|
||||
},
|
||||
setEnd: (node: Node, offset: number): void => {
|
||||
endContainer = node;
|
||||
endOffset = offset;
|
||||
},
|
||||
// Minimal Range properties (TypeScript requires these)
|
||||
get startContainer(): Node {
|
||||
return startContainer;
|
||||
},
|
||||
get endContainer(): Node {
|
||||
return endContainer;
|
||||
},
|
||||
get startOffset(): number {
|
||||
return startOffset;
|
||||
},
|
||||
get endOffset(): number {
|
||||
return endOffset;
|
||||
},
|
||||
get collapsed(): boolean {
|
||||
return startContainer === endContainer && startOffset === endOffset;
|
||||
},
|
||||
commonAncestorContainer: document.body,
|
||||
};
|
||||
return (mockRange as unknown) as Range;
|
||||
};
|
||||
|
||||
// Mock document.createRange to return a new Range instance each time
|
||||
document.createRange = (): Range => createMockRange();
|
||||
|
||||
// Mock getBoundingClientRect for elements
|
||||
Element.prototype.getBoundingClientRect = (): DOMRect => mockRect;
|
||||
});
|
||||
|
||||
jest.mock('hooks/useDarkMode', () => ({
|
||||
useIsDarkMode: (): boolean => false,
|
||||
}));
|
||||
|
||||
jest.mock('providers/Dashboard/Dashboard', () => ({
|
||||
useDashboard: (): { selectedDashboard: undefined } => ({
|
||||
selectedDashboard: undefined,
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('api/querySuggestions/getKeySuggestions', () => ({
|
||||
getKeySuggestions: jest.fn().mockResolvedValue({
|
||||
data: {
|
||||
data: { keys: {} },
|
||||
},
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('api/querySuggestions/getValueSuggestion', () => ({
|
||||
getValueSuggestions: jest.fn().mockResolvedValue({
|
||||
data: { data: { values: { stringValues: [], numberValues: [] } } },
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock the hooks
|
||||
jest.mock('hooks/queryBuilder/useGetPanelTypesQueryParam');
|
||||
jest.mock('hooks/queryBuilder/useShareBuilderUrl');
|
||||
|
||||
const mockUseGetPanelTypesQueryParam = jest.mocked(useGetPanelTypesQueryParam);
|
||||
const mockUseShareBuilderUrl = jest.mocked(useShareBuilderUrl);
|
||||
|
||||
const mockUpdateAllQueriesOperators = jest.fn() as jest.MockedFunction<
|
||||
(query: Query, panelType: PANEL_TYPES, dataSource: DataSource) => Query
|
||||
>;
|
||||
|
||||
const mockResetQuery = jest.fn() as jest.MockedFunction<
|
||||
(newCurrentQuery?: QueryState) => void
|
||||
>;
|
||||
|
||||
const mockRedirectWithQueryBuilderData = jest.fn() as jest.MockedFunction<
|
||||
(query: Query) => void
|
||||
>;
|
||||
|
||||
// Create a mock query that we'll use to verify persistence
|
||||
const createMockQuery = (filterExpression?: string): Query => ({
|
||||
id: 'test-query-id',
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
builder: {
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
id: 'body--string----false',
|
||||
dataType: DataTypes.String,
|
||||
key: 'body',
|
||||
type: '',
|
||||
},
|
||||
aggregateOperator: 'count',
|
||||
dataSource: DataSource.LOGS,
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
},
|
||||
filter: filterExpression
|
||||
? {
|
||||
expression: filterExpression,
|
||||
}
|
||||
: undefined,
|
||||
functions: [],
|
||||
groupBy: [
|
||||
{
|
||||
key: 'cloud.account.id',
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
having: [],
|
||||
legend: '',
|
||||
limit: null,
|
||||
orderBy: [{ columnName: 'timestamp', order: 'desc' }],
|
||||
pageSize: 0,
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
stepInterval: 60,
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [],
|
||||
promql: [],
|
||||
});
|
||||
|
||||
// Helper function to verify CodeMirror content
|
||||
const verifyCodeMirrorContent = async (
|
||||
expectedFilterExpression: string,
|
||||
): Promise<void> => {
|
||||
await waitFor(
|
||||
() => {
|
||||
const editorContent = document.querySelector(
|
||||
CM_EDITOR_SELECTOR,
|
||||
) as HTMLElement;
|
||||
expect(editorContent).toBeInTheDocument();
|
||||
const textContent = editorContent.textContent || '';
|
||||
expect(textContent).toBe(expectedFilterExpression);
|
||||
},
|
||||
{ timeout: 3000 },
|
||||
);
|
||||
};
|
||||
|
||||
const VIEWS_TO_TEST = [
|
||||
ExplorerViews.LIST,
|
||||
ExplorerViews.TIMESERIES,
|
||||
ExplorerViews.TABLE,
|
||||
];
|
||||
|
||||
describe('LogExplorerQuerySection', () => {
|
||||
let mockQuery: Query;
|
||||
let mockQueryBuilderContext: Partial<QueryBuilderContextType>;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
mockQuery = createMockQuery();
|
||||
|
||||
// Mock the return value of updateAllQueriesOperators to return the same query
|
||||
mockUpdateAllQueriesOperators.mockReturnValue(mockQuery);
|
||||
|
||||
// Setup query builder context mock
|
||||
mockQueryBuilderContext = {
|
||||
currentQuery: mockQuery,
|
||||
updateAllQueriesOperators: mockUpdateAllQueriesOperators,
|
||||
resetQuery: mockResetQuery,
|
||||
redirectWithQueryBuilderData: mockRedirectWithQueryBuilderData,
|
||||
panelType: PANEL_TYPES.LIST,
|
||||
initialDataSource: DataSource.LOGS,
|
||||
addNewBuilderQuery: jest.fn() as jest.MockedFunction<() => void>,
|
||||
addNewFormula: jest.fn() as jest.MockedFunction<() => void>,
|
||||
handleSetConfig: jest.fn() as jest.MockedFunction<
|
||||
(panelType: PANEL_TYPES, dataSource: DataSource | null) => void
|
||||
>,
|
||||
addTraceOperator: jest.fn() as jest.MockedFunction<() => void>,
|
||||
};
|
||||
|
||||
// Mock useGetPanelTypesQueryParam
|
||||
mockUseGetPanelTypesQueryParam.mockReturnValue(PANEL_TYPES.LIST);
|
||||
|
||||
// Mock useShareBuilderUrl
|
||||
mockUseShareBuilderUrl.mockImplementation(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should maintain query state across multiple view changes', () => {
|
||||
const { rerender } = render(
|
||||
<LogExplorerQuerySection selectedView={ExplorerViews.LIST} />,
|
||||
undefined,
|
||||
{
|
||||
queryBuilderOverrides: mockQueryBuilderContext as QueryBuilderContextType,
|
||||
},
|
||||
);
|
||||
|
||||
const initialQuery = mockQueryBuilderContext.currentQuery;
|
||||
|
||||
VIEWS_TO_TEST.forEach((view) => {
|
||||
rerender(<LogExplorerQuerySection selectedView={view} />);
|
||||
expect(mockQueryBuilderContext.currentQuery).toEqual(initialQuery);
|
||||
});
|
||||
});
|
||||
|
||||
it('should persist filter expressions across view changes', async () => {
|
||||
// Test with a more complex filter expression
|
||||
const complexFilter =
|
||||
"(service.name = 'api-gateway' OR service.name = 'backend') AND http.status_code IN [500, 502, 503] AND NOT error = 'timeout'";
|
||||
const queryWithComplexFilter = createMockQuery(complexFilter);
|
||||
|
||||
const contextWithComplexFilter: Partial<QueryBuilderContextType> = {
|
||||
...mockQueryBuilderContext,
|
||||
currentQuery: queryWithComplexFilter,
|
||||
};
|
||||
|
||||
const { rerender } = render(
|
||||
<LogExplorerQuerySection selectedView={ExplorerViews.LIST} />,
|
||||
undefined,
|
||||
{
|
||||
queryBuilderOverrides: contextWithComplexFilter as QueryBuilderContextType,
|
||||
},
|
||||
);
|
||||
|
||||
VIEWS_TO_TEST.forEach(async (view) => {
|
||||
rerender(<LogExplorerQuerySection selectedView={view} />);
|
||||
await verifyCodeMirrorContent(complexFilter);
|
||||
});
|
||||
});
|
||||
|
||||
it('should render QueryAggregation and QueryAddOns when switching from LIST to TIMESERIES or TABLE view', async () => {
|
||||
// Helper function to verify components are rendered
|
||||
const verifyComponentsRendered = async (): Promise<void> => {
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(screen.getByTestId(QUERY_AGGREGATION_TEST_ID)).toBeInTheDocument();
|
||||
},
|
||||
{ timeout: 3000 },
|
||||
);
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(screen.getByTestId(QUERY_ADDON_TEST_ID)).toBeInTheDocument();
|
||||
},
|
||||
{ timeout: 3000 },
|
||||
);
|
||||
};
|
||||
|
||||
// Start with LIST view - QueryAggregation and QueryAddOns should NOT be rendered
|
||||
mockUseGetPanelTypesQueryParam.mockReturnValue(PANEL_TYPES.LIST);
|
||||
const contextWithList: Partial<QueryBuilderContextType> = {
|
||||
...mockQueryBuilderContext,
|
||||
panelType: PANEL_TYPES.LIST,
|
||||
};
|
||||
|
||||
render(
|
||||
<LogExplorerQuerySection selectedView={ExplorerViews.LIST} />,
|
||||
undefined,
|
||||
{
|
||||
queryBuilderOverrides: contextWithList as QueryBuilderContextType,
|
||||
},
|
||||
);
|
||||
|
||||
// Verify QueryAggregation is NOT rendered in LIST view
|
||||
expect(
|
||||
screen.queryByTestId(QUERY_AGGREGATION_TEST_ID),
|
||||
).not.toBeInTheDocument();
|
||||
|
||||
// Verify QueryAddOns is NOT rendered in LIST view (check for one of the add-on tabs)
|
||||
expect(screen.queryByTestId(QUERY_ADDON_TEST_ID)).not.toBeInTheDocument();
|
||||
|
||||
cleanup();
|
||||
|
||||
// Switch to TIMESERIES view
|
||||
const timeseriesPanelType = explorerViewToPanelType[ExplorerViews.TIMESERIES];
|
||||
mockUseGetPanelTypesQueryParam.mockReturnValue(timeseriesPanelType);
|
||||
const contextWithTimeseries: Partial<QueryBuilderContextType> = {
|
||||
...mockQueryBuilderContext,
|
||||
panelType: timeseriesPanelType,
|
||||
};
|
||||
|
||||
render(
|
||||
<LogExplorerQuerySection selectedView={ExplorerViews.TIMESERIES} />,
|
||||
undefined,
|
||||
{
|
||||
queryBuilderOverrides: contextWithTimeseries as QueryBuilderContextType,
|
||||
},
|
||||
);
|
||||
|
||||
// Verify QueryAggregation and QueryAddOns are rendered
|
||||
await verifyComponentsRendered();
|
||||
|
||||
cleanup();
|
||||
|
||||
// Switch to TABLE view
|
||||
const tablePanelType = explorerViewToPanelType[ExplorerViews.TABLE];
|
||||
mockUseGetPanelTypesQueryParam.mockReturnValue(tablePanelType);
|
||||
const contextWithTable: Partial<QueryBuilderContextType> = {
|
||||
...mockQueryBuilderContext,
|
||||
panelType: tablePanelType,
|
||||
};
|
||||
|
||||
render(
|
||||
<LogExplorerQuerySection selectedView={ExplorerViews.TABLE} />,
|
||||
undefined,
|
||||
{
|
||||
queryBuilderOverrides: contextWithTable as QueryBuilderContextType,
|
||||
},
|
||||
);
|
||||
|
||||
// Verify QueryAggregation and QueryAddOns are still rendered in TABLE view
|
||||
await verifyComponentsRendered();
|
||||
});
|
||||
});
|
||||
@@ -501,7 +501,7 @@ function NewWidget({
|
||||
stackedBarChart: selectedWidget?.stackedBarChart || false,
|
||||
yAxisUnit: selectedWidget?.yAxisUnit,
|
||||
decimalPrecision:
|
||||
selectedWidget?.decimalPrecision || PrecisionOptionsEnum.TWO,
|
||||
selectedWidget?.decimalPrecision ?? PrecisionOptionsEnum.TWO,
|
||||
panelTypes: graphType,
|
||||
query: adjustedQueryForV5,
|
||||
thresholds: selectedWidget?.thresholds,
|
||||
@@ -532,7 +532,7 @@ function NewWidget({
|
||||
stackedBarChart: selectedWidget?.stackedBarChart || false,
|
||||
yAxisUnit: selectedWidget?.yAxisUnit,
|
||||
decimalPrecision:
|
||||
selectedWidget?.decimalPrecision || PrecisionOptionsEnum.TWO,
|
||||
selectedWidget?.decimalPrecision ?? PrecisionOptionsEnum.TWO,
|
||||
panelTypes: graphType,
|
||||
query: adjustedQueryForV5,
|
||||
thresholds: selectedWidget?.thresholds,
|
||||
|
||||
@@ -49,12 +49,14 @@ exports[`Value panel wrappper tests should render tooltip when there are conflic
|
||||
>
|
||||
<span
|
||||
class="ant-typography value-graph-text css-dev-only-do-not-override-2i2tap"
|
||||
data-testid="value-graph-text"
|
||||
style="color: Blue; font-size: 16px;"
|
||||
>
|
||||
295.43
|
||||
</span>
|
||||
<span
|
||||
class="ant-typography value-graph-unit css-dev-only-do-not-override-2i2tap"
|
||||
data-testid="value-graph-suffix-unit"
|
||||
style="color: Blue; font-size: calc(16px * 0.7);"
|
||||
>
|
||||
ms
|
||||
|
||||
@@ -242,6 +242,7 @@ export function Formula({
|
||||
</div>
|
||||
<InputWithLabel
|
||||
label="Limit"
|
||||
type="number"
|
||||
onChange={(value): void => handleChangeLimit(Number(value))}
|
||||
initialValue={formula?.limit ?? undefined}
|
||||
placeholder="Enter limit"
|
||||
|
||||
@@ -363,7 +363,6 @@ export const WidgetHeaderProps: any = {
|
||||
title: 'Table - Panel',
|
||||
yAxisUnit: 'none',
|
||||
},
|
||||
parentHover: false,
|
||||
queryResponse: {
|
||||
status: 'success',
|
||||
isLoading: false,
|
||||
|
||||
@@ -679,7 +679,42 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
registerShortcut(GlobalShortcuts.NavigateToExceptions, () =>
|
||||
onClickHandler(ROUTES.ALL_ERROR, null),
|
||||
);
|
||||
|
||||
registerShortcut(GlobalShortcuts.NavigateToTracesFunnel, () =>
|
||||
onClickHandler(ROUTES.TRACES_FUNNELS, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToTracesViews, () =>
|
||||
onClickHandler(ROUTES.TRACES_SAVE_VIEWS, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToMetricsSummary, () =>
|
||||
onClickHandler(ROUTES.METRICS_EXPLORER, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToMetricsExplorer, () =>
|
||||
onClickHandler(ROUTES.METRICS_EXPLORER_EXPLORER, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToMetricsViews, () =>
|
||||
onClickHandler(ROUTES.METRICS_EXPLORER_VIEWS, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToSettings, () =>
|
||||
onClickHandler(ROUTES.SETTINGS, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToSettingsIngestion, () =>
|
||||
onClickHandler(ROUTES.INGESTION_SETTINGS, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToSettingsBilling, () =>
|
||||
onClickHandler(ROUTES.BILLING, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToSettingsAPIKeys, () =>
|
||||
onClickHandler(ROUTES.API_KEYS, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToSettingsNotificationChannels, () =>
|
||||
onClickHandler(ROUTES.ALL_CHANNELS, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToLogsPipelines, () =>
|
||||
onClickHandler(ROUTES.LOGS_PIPELINES, null),
|
||||
);
|
||||
registerShortcut(GlobalShortcuts.NavigateToLogsViews, () =>
|
||||
onClickHandler(ROUTES.LOGS_SAVE_VIEWS, null),
|
||||
);
|
||||
return (): void => {
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToHome);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToServices);
|
||||
@@ -689,6 +724,18 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToAlerts);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToExceptions);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToMessagingQueues);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToTracesFunnel);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToMetricsSummary);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToMetricsExplorer);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToMetricsViews);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToSettings);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToSettingsIngestion);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToSettingsBilling);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToSettingsAPIKeys);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToSettingsNotificationChannels);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToLogsPipelines);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToLogsViews);
|
||||
deregisterShortcut(GlobalShortcuts.NavigateToTracesViews);
|
||||
};
|
||||
}, [deregisterShortcut, onClickHandler, registerShortcut]);
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ import { Tooltip, Typography } from 'antd';
|
||||
import AttributeWithExpandablePopover from './AttributeWithExpandablePopover';
|
||||
|
||||
const EXPANDABLE_ATTRIBUTE_KEYS = ['exception.stacktrace', 'exception.message'];
|
||||
const ATTRIBUTE_LENGTH_THRESHOLD = 100;
|
||||
|
||||
interface EventAttributeProps {
|
||||
attributeKey: string;
|
||||
@@ -15,7 +16,11 @@ function EventAttribute({
|
||||
attributeValue,
|
||||
onExpand,
|
||||
}: EventAttributeProps): JSX.Element {
|
||||
if (EXPANDABLE_ATTRIBUTE_KEYS.includes(attributeKey)) {
|
||||
const shouldExpand =
|
||||
EXPANDABLE_ATTRIBUTE_KEYS.includes(attributeKey) ||
|
||||
attributeValue.length > ATTRIBUTE_LENGTH_THRESHOLD;
|
||||
|
||||
if (shouldExpand) {
|
||||
return (
|
||||
<AttributeWithExpandablePopover
|
||||
attributeKey={attributeKey}
|
||||
|
||||
@@ -51,9 +51,12 @@
|
||||
padding: 10px 12px;
|
||||
|
||||
.item {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
&,
|
||||
.attribute-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.span-name-wrapper {
|
||||
display: flex;
|
||||
@@ -413,6 +416,7 @@
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
.attribute-container .wrapper,
|
||||
.value-wrapper {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
|
||||
@@ -4,6 +4,7 @@ import {
|
||||
Button,
|
||||
Checkbox,
|
||||
Input,
|
||||
Modal,
|
||||
Select,
|
||||
Skeleton,
|
||||
Tabs,
|
||||
@@ -52,6 +53,7 @@ import { formatEpochTimestamp } from 'utils/timeUtils';
|
||||
|
||||
import Attributes from './Attributes/Attributes';
|
||||
import { RelatedSignalsViews } from './constants';
|
||||
import EventAttribute from './Events/components/EventAttribute';
|
||||
import Events from './Events/Events';
|
||||
import LinkedSpans from './LinkedSpans/LinkedSpans';
|
||||
import SpanRelatedSignals from './SpanRelatedSignals/SpanRelatedSignals';
|
||||
@@ -166,11 +168,27 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
|
||||
setShouldUpdateUserPreference,
|
||||
] = useState<boolean>(false);
|
||||
|
||||
const [statusMessageModalContent, setStatusMessageModalContent] = useState<{
|
||||
title: string;
|
||||
content: string;
|
||||
} | null>(null);
|
||||
|
||||
const handleTimeRangeChange = useCallback((value: number): void => {
|
||||
setShouldFetchSpanPercentilesData(true);
|
||||
setSelectedTimeRange(value);
|
||||
}, []);
|
||||
|
||||
const showStatusMessageModal = useCallback(
|
||||
(title: string, content: string): void => {
|
||||
setStatusMessageModalContent({ title, content });
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
const handleStatusMessageModalCancel = useCallback((): void => {
|
||||
setStatusMessageModalContent(null);
|
||||
}, []);
|
||||
|
||||
const color = generateColor(
|
||||
selectedSpan?.serviceName || '',
|
||||
themeColors.traceDetailColors,
|
||||
@@ -868,14 +886,11 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
|
||||
|
||||
{selectedSpan.statusMessage && (
|
||||
<div className="item">
|
||||
<Typography.Text className="attribute-key">
|
||||
status message
|
||||
</Typography.Text>
|
||||
<div className="value-wrapper">
|
||||
<Typography.Text className="attribute-value">
|
||||
{selectedSpan.statusMessage}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<EventAttribute
|
||||
attributeKey="status message"
|
||||
attributeValue={selectedSpan.statusMessage}
|
||||
onExpand={showStatusMessageModal}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
<div className="item">
|
||||
@@ -936,6 +951,19 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
|
||||
key={activeDrawerView}
|
||||
/>
|
||||
)}
|
||||
|
||||
<Modal
|
||||
title={statusMessageModalContent?.title}
|
||||
open={!!statusMessageModalContent}
|
||||
onCancel={handleStatusMessageModalCancel}
|
||||
footer={null}
|
||||
width="80vw"
|
||||
centered
|
||||
>
|
||||
<pre className="attribute-with-expandable-popover__full-view">
|
||||
{statusMessageModalContent?.content}
|
||||
</pre>
|
||||
</Modal>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -5,16 +5,20 @@
|
||||
&-virtuoso {
|
||||
background: rgba(171, 189, 255, 0.04);
|
||||
}
|
||||
&-list-container .logs-loading-skeleton {
|
||||
&-list-container {
|
||||
height: 100%;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
border-top: none;
|
||||
color: var(--bg-vanilla-400);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding: 8px 0;
|
||||
|
||||
.logs-loading-skeleton {
|
||||
height: 100%;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
border-top: none;
|
||||
color: var(--bg-vanilla-400);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding: 8px 0;
|
||||
}
|
||||
}
|
||||
|
||||
&-empty-content {
|
||||
|
||||
@@ -29,6 +29,8 @@ import {
|
||||
mockEmptyLogsResponse,
|
||||
mockSpan,
|
||||
mockSpanLogsResponse,
|
||||
mockSpanWithLongStatusMessage,
|
||||
mockSpanWithShortStatusMessage,
|
||||
} from './mockData';
|
||||
|
||||
// Get typed mocks
|
||||
@@ -128,6 +130,39 @@ jest.mock('lib/uPlotLib/utils/generateColor', () => ({
|
||||
generateColor: jest.fn().mockReturnValue('#1f77b4'),
|
||||
}));
|
||||
|
||||
jest.mock(
|
||||
'container/SpanDetailsDrawer/Events/components/AttributeWithExpandablePopover',
|
||||
() =>
|
||||
// eslint-disable-next-line func-names, @typescript-eslint/explicit-function-return-type, react/display-name
|
||||
function ({
|
||||
attributeKey,
|
||||
attributeValue,
|
||||
onExpand,
|
||||
}: {
|
||||
attributeKey: string;
|
||||
attributeValue: string;
|
||||
onExpand: (title: string, content: string) => void;
|
||||
}) {
|
||||
return (
|
||||
<div className="attribute-container" key={attributeKey}>
|
||||
<div className="attribute-key">{attributeKey}</div>
|
||||
<div className="wrapper">
|
||||
<div className="attribute-value">{attributeValue}</div>
|
||||
<div data-testid="popover-content">
|
||||
<pre>{attributeValue}</pre>
|
||||
<button
|
||||
type="button"
|
||||
onClick={(): void => onExpand(attributeKey, attributeValue)}
|
||||
>
|
||||
Expand
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
// Mock getSpanPercentiles API
|
||||
jest.mock('api/trace/getSpanPercentiles', () => ({
|
||||
__esModule: true,
|
||||
@@ -1153,3 +1188,112 @@ describe('SpanDetailsDrawer - Search Visibility User Flows', () => {
|
||||
expect(searchInput).toHaveFocus();
|
||||
});
|
||||
});
|
||||
|
||||
describe('SpanDetailsDrawer - Status Message Truncation User Flows', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockSafeNavigate.mockClear();
|
||||
mockWindowOpen.mockClear();
|
||||
mockUpdateAllQueriesOperators.mockClear();
|
||||
|
||||
(GetMetricQueryRange as jest.Mock).mockImplementation(() =>
|
||||
Promise.resolve(mockEmptyLogsResponse),
|
||||
);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
server.resetHandlers();
|
||||
});
|
||||
|
||||
it('should display expandable popover with Expand button for long status message', () => {
|
||||
render(
|
||||
<QueryBuilderContext.Provider value={mockQueryBuilderContextValue as any}>
|
||||
<SpanDetailsDrawer
|
||||
isSpanDetailsDocked={false}
|
||||
setIsSpanDetailsDocked={jest.fn()}
|
||||
selectedSpan={mockSpanWithLongStatusMessage}
|
||||
traceStartTime={1640995200000}
|
||||
traceEndTime={1640995260000}
|
||||
/>
|
||||
</QueryBuilderContext.Provider>,
|
||||
);
|
||||
|
||||
// User sees status message label
|
||||
expect(screen.getByText('status message')).toBeInTheDocument();
|
||||
|
||||
// User sees the status message value (appears in both original element and popover preview)
|
||||
const statusMessageElements = screen.getAllByText(
|
||||
mockSpanWithLongStatusMessage.statusMessage,
|
||||
);
|
||||
expect(statusMessageElements.length).toBeGreaterThan(0);
|
||||
|
||||
// User sees Expand button in popover (popover is mocked to render immediately)
|
||||
const expandButton = screen.getByRole('button', { name: /expand/i });
|
||||
expect(expandButton).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should open modal with full status message when user clicks Expand button', async () => {
|
||||
render(
|
||||
<QueryBuilderContext.Provider value={mockQueryBuilderContextValue as any}>
|
||||
<SpanDetailsDrawer
|
||||
isSpanDetailsDocked={false}
|
||||
setIsSpanDetailsDocked={jest.fn()}
|
||||
selectedSpan={mockSpanWithLongStatusMessage}
|
||||
traceStartTime={1640995200000}
|
||||
traceEndTime={1640995260000}
|
||||
/>
|
||||
</QueryBuilderContext.Provider>,
|
||||
);
|
||||
|
||||
// User clicks the Expand button (popover is mocked to render immediately)
|
||||
const expandButton = screen.getByRole('button', { name: /expand/i });
|
||||
await fireEvent.click(expandButton);
|
||||
|
||||
// User sees modal with the full status message content
|
||||
await waitFor(() => {
|
||||
// Modal should be visible with the title
|
||||
const modalTitle = document.querySelector('.ant-modal-title');
|
||||
expect(modalTitle).toBeInTheDocument();
|
||||
expect(modalTitle?.textContent).toBe('status message');
|
||||
// Modal content should contain the full message in a pre tag
|
||||
const preElement = document.querySelector(
|
||||
'.attribute-with-expandable-popover__full-view',
|
||||
);
|
||||
expect(preElement).toBeInTheDocument();
|
||||
expect(preElement?.textContent).toBe(
|
||||
mockSpanWithLongStatusMessage.statusMessage,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should display short status message as simple text without popover', () => {
|
||||
render(
|
||||
<QueryBuilderContext.Provider value={mockQueryBuilderContextValue as any}>
|
||||
<SpanDetailsDrawer
|
||||
isSpanDetailsDocked={false}
|
||||
setIsSpanDetailsDocked={jest.fn()}
|
||||
selectedSpan={mockSpanWithShortStatusMessage}
|
||||
traceStartTime={1640995200000}
|
||||
traceEndTime={1640995260000}
|
||||
/>
|
||||
</QueryBuilderContext.Provider>,
|
||||
);
|
||||
|
||||
// User sees status message label and value
|
||||
expect(screen.getByText('status message')).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByText(mockSpanWithShortStatusMessage.statusMessage),
|
||||
).toBeInTheDocument();
|
||||
|
||||
// User hovers over the status message value
|
||||
const statusMessageValue = screen.getByText(
|
||||
mockSpanWithShortStatusMessage.statusMessage,
|
||||
);
|
||||
fireEvent.mouseEnter(statusMessageValue);
|
||||
|
||||
// No Expand button should appear (no expandable popover for short messages)
|
||||
expect(
|
||||
screen.queryByRole('button', { name: /expand/i }),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -35,6 +35,19 @@ export const mockSpan: Span = {
|
||||
level: 0,
|
||||
};
|
||||
|
||||
// Mock span with long status message (> 100 characters) for testing truncation
|
||||
export const mockSpanWithLongStatusMessage: Span = {
|
||||
...mockSpan,
|
||||
statusMessage:
|
||||
'Error: Connection timeout occurred while trying to reach the database server. The connection pool was exhausted and all retry attempts failed after 30 seconds.',
|
||||
};
|
||||
|
||||
// Mock span with short status message (<= 100 characters)
|
||||
export const mockSpanWithShortStatusMessage: Span = {
|
||||
...mockSpan,
|
||||
statusMessage: 'Connection successful',
|
||||
};
|
||||
|
||||
// Mock logs with proper relationships
|
||||
export const mockSpanLogs: ILog[] = [
|
||||
{
|
||||
|
||||
@@ -1,11 +1,18 @@
|
||||
import { render } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import { useEffect } from 'react';
|
||||
|
||||
import {
|
||||
KeyboardHotkeysProvider,
|
||||
useKeyboardHotkeys,
|
||||
} from '../useKeyboardHotkeys';
|
||||
|
||||
jest.mock('../../../providers/cmdKProvider', () => ({
|
||||
useCmdK: (): { open: boolean } => ({
|
||||
open: false,
|
||||
}),
|
||||
}));
|
||||
|
||||
function TestComponentWithRegister({
|
||||
handleShortcut,
|
||||
}: {
|
||||
@@ -13,14 +20,13 @@ function TestComponentWithRegister({
|
||||
}): JSX.Element {
|
||||
const { registerShortcut } = useKeyboardHotkeys();
|
||||
|
||||
registerShortcut('a', handleShortcut);
|
||||
useEffect(() => {
|
||||
registerShortcut('a', handleShortcut);
|
||||
}, [registerShortcut, handleShortcut]);
|
||||
|
||||
return (
|
||||
<div>
|
||||
<span>Test Component</span>
|
||||
</div>
|
||||
);
|
||||
return <span>Test Component</span>;
|
||||
}
|
||||
|
||||
function TestComponentWithDeRegister({
|
||||
handleShortcut,
|
||||
}: {
|
||||
@@ -28,21 +34,18 @@ function TestComponentWithDeRegister({
|
||||
}): JSX.Element {
|
||||
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
|
||||
|
||||
registerShortcut('b', handleShortcut);
|
||||
useEffect(() => {
|
||||
registerShortcut('b', handleShortcut);
|
||||
deregisterShortcut('b');
|
||||
}, [registerShortcut, deregisterShortcut, handleShortcut]);
|
||||
|
||||
// Deregister the shortcut before triggering it
|
||||
deregisterShortcut('b');
|
||||
|
||||
return (
|
||||
<div>
|
||||
<span>Test Component</span>
|
||||
</div>
|
||||
);
|
||||
return <span>Test Component</span>;
|
||||
}
|
||||
|
||||
describe('KeyboardHotkeysProvider', () => {
|
||||
it('registers and triggers shortcuts correctly', async () => {
|
||||
const handleShortcut = jest.fn();
|
||||
const user = userEvent.setup();
|
||||
|
||||
render(
|
||||
<KeyboardHotkeysProvider>
|
||||
@@ -50,15 +53,15 @@ describe('KeyboardHotkeysProvider', () => {
|
||||
</KeyboardHotkeysProvider>,
|
||||
);
|
||||
|
||||
// Trigger the registered shortcut
|
||||
await userEvent.keyboard('a');
|
||||
// fires on keyup
|
||||
await user.keyboard('{a}');
|
||||
|
||||
// Assert that the handleShortcut function has been called
|
||||
expect(handleShortcut).toHaveBeenCalled();
|
||||
expect(handleShortcut).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('deregisters shortcuts correctly', () => {
|
||||
it('does not trigger deregistered shortcuts', async () => {
|
||||
const handleShortcut = jest.fn();
|
||||
const user = userEvent.setup();
|
||||
|
||||
render(
|
||||
<KeyboardHotkeysProvider>
|
||||
@@ -66,10 +69,8 @@ describe('KeyboardHotkeysProvider', () => {
|
||||
</KeyboardHotkeysProvider>,
|
||||
);
|
||||
|
||||
// Try to trigger the deregistered shortcut
|
||||
userEvent.keyboard('b');
|
||||
await user.keyboard('{b}');
|
||||
|
||||
// Assert that the handleShortcut function has NOT been called
|
||||
expect(handleShortcut).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -8,20 +8,21 @@ import {
|
||||
useRef,
|
||||
} from 'react';
|
||||
|
||||
import { useCmdK } from '../../providers/cmdKProvider';
|
||||
|
||||
interface KeyboardHotkeysContextReturnValue {
|
||||
/**
|
||||
* @param keyCombination provide the string for which the subsequent callback should be triggered. Example 'ctrl+a'
|
||||
* @param keyCombo provide the string for which the subsequent callback should be triggered. Example 'ctrl+a'
|
||||
* @param callback the callback that should be triggered when the above key combination is being pressed
|
||||
* @returns void
|
||||
*/
|
||||
registerShortcut: (keyCombination: string, callback: () => void) => void;
|
||||
|
||||
registerShortcut: (keyCombo: string, callback: () => void) => void;
|
||||
/**
|
||||
*
|
||||
* @param keyCombination provide the string for which we want to deregister the callback
|
||||
* @param keyCombo provide the string for which we want to deregister the callback
|
||||
* @returns void
|
||||
*/
|
||||
deregisterShortcut: (keyCombination: string) => void;
|
||||
deregisterShortcut: (keyCombo: string) => void;
|
||||
}
|
||||
|
||||
const KeyboardHotkeysContext = createContext<KeyboardHotkeysContextReturnValue>(
|
||||
@@ -33,7 +34,7 @@ const KeyboardHotkeysContext = createContext<KeyboardHotkeysContextReturnValue>(
|
||||
|
||||
const IGNORE_INPUTS = ['input', 'textarea', 'cm-editor']; // Inputs in which hotkey events will be ignored
|
||||
|
||||
const useKeyboardHotkeys = (): KeyboardHotkeysContextReturnValue => {
|
||||
export function useKeyboardHotkeys(): KeyboardHotkeysContextReturnValue {
|
||||
const context = useContext(KeyboardHotkeysContext);
|
||||
if (!context) {
|
||||
throw new Error(
|
||||
@@ -42,21 +43,45 @@ const useKeyboardHotkeys = (): KeyboardHotkeysContextReturnValue => {
|
||||
}
|
||||
|
||||
return context;
|
||||
};
|
||||
}
|
||||
|
||||
function KeyboardHotkeysProvider({
|
||||
/**
|
||||
* Normalize a set of keys into a stable combo
|
||||
* { shift, m, e } → "e+m+shift"
|
||||
*/
|
||||
function normalizeChord(keys: Set<string>): string {
|
||||
return Array.from(keys).sort().join('+');
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize registration strings
|
||||
* "shift+m+e" → "e+m+shift"
|
||||
*/
|
||||
function normalizeComboString(combo: string): string {
|
||||
return normalizeChord(new Set(combo.split('+')));
|
||||
}
|
||||
|
||||
export function KeyboardHotkeysProvider({
|
||||
children,
|
||||
}: {
|
||||
children: JSX.Element;
|
||||
}): JSX.Element {
|
||||
const { open: cmdKOpen } = useCmdK();
|
||||
const shortcuts = useRef<Record<string, () => void>>({});
|
||||
const pressedKeys = useRef<Set<string>>(new Set());
|
||||
|
||||
const handleKeyPress = (event: KeyboardEvent): void => {
|
||||
const { key, ctrlKey, altKey, shiftKey, metaKey, target } = event;
|
||||
// A detected valid shortcut waiting to fire
|
||||
const pendingCombo = useRef<string | null>(null);
|
||||
|
||||
// Tracks whether user extended the combo
|
||||
const wasExtended = useRef(false);
|
||||
|
||||
const handleKeyDown = (event: KeyboardEvent): void => {
|
||||
if (event.repeat) return;
|
||||
|
||||
const target = event.target as HTMLElement;
|
||||
const isCodeMirrorEditor =
|
||||
(target as HTMLElement).closest('.cm-editor') !== null;
|
||||
|
||||
if (
|
||||
IGNORE_INPUTS.includes((target as HTMLElement).tagName.toLowerCase()) ||
|
||||
isCodeMirrorEditor
|
||||
@@ -64,61 +89,110 @@ function KeyboardHotkeysProvider({
|
||||
return;
|
||||
}
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/KeyboardEvent/metaKey
|
||||
const modifiers = { ctrlKey, altKey, shiftKey, metaKey };
|
||||
const key = event.key?.toLowerCase();
|
||||
if (!key) return; // Skip if key is undefined
|
||||
|
||||
let shortcutKey = `${key.toLowerCase()}`;
|
||||
// If a pending combo exists and a new key is pressed → extension
|
||||
if (pendingCombo.current && !pressedKeys.current.has(key)) {
|
||||
wasExtended.current = true;
|
||||
}
|
||||
|
||||
const isAltKey = `${modifiers.altKey ? '+alt' : ''}`;
|
||||
const isShiftKey = `${modifiers.shiftKey ? '+shift' : ''}`;
|
||||
pressedKeys.current.add(key);
|
||||
|
||||
// ctrl and cmd have the same functionality for mac and windows parity
|
||||
const isMetaKey = `${modifiers.metaKey || modifiers.ctrlKey ? '+meta' : ''}`;
|
||||
if (event.shiftKey) pressedKeys.current.add('shift');
|
||||
if (event.metaKey || event.ctrlKey) pressedKeys.current.add('meta');
|
||||
if (event.altKey) pressedKeys.current.add('alt');
|
||||
|
||||
shortcutKey = shortcutKey + isAltKey + isShiftKey + isMetaKey;
|
||||
const combo = normalizeChord(pressedKeys.current);
|
||||
|
||||
if (shortcuts.current[shortcutKey]) {
|
||||
if (shortcuts.current[combo]) {
|
||||
event.preventDefault();
|
||||
event.stopImmediatePropagation();
|
||||
|
||||
shortcuts.current[shortcutKey]();
|
||||
event.stopPropagation();
|
||||
pendingCombo.current = combo;
|
||||
wasExtended.current = false;
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
document.addEventListener('keydown', handleKeyPress);
|
||||
const handleKeyUp = (event: KeyboardEvent): void => {
|
||||
const key = event.key?.toLowerCase();
|
||||
if (!key) return; // Skip if key is undefined
|
||||
|
||||
pressedKeys.current.delete(key);
|
||||
|
||||
if (!event.shiftKey) pressedKeys.current.delete('shift');
|
||||
if (!event.metaKey && !event.ctrlKey) pressedKeys.current.delete('meta');
|
||||
if (!event.altKey) pressedKeys.current.delete('alt');
|
||||
|
||||
if (!pendingCombo.current) return;
|
||||
|
||||
// Fire only if user did NOT extend the combo
|
||||
if (!wasExtended.current) {
|
||||
event.preventDefault();
|
||||
try {
|
||||
shortcuts.current[pendingCombo.current]?.();
|
||||
} catch (error) {
|
||||
console.error('Error executing hotkey callback:', error);
|
||||
}
|
||||
}
|
||||
|
||||
pendingCombo.current = null;
|
||||
wasExtended.current = false;
|
||||
};
|
||||
|
||||
useEffect((): (() => void) => {
|
||||
document.addEventListener('keydown', handleKeyDown);
|
||||
document.addEventListener('keyup', handleKeyUp);
|
||||
|
||||
const reset = (): void => {
|
||||
pressedKeys.current.clear();
|
||||
pendingCombo.current = null;
|
||||
wasExtended.current = false;
|
||||
};
|
||||
|
||||
window.addEventListener('blur', reset);
|
||||
|
||||
return (): void => {
|
||||
document.removeEventListener('keydown', handleKeyPress);
|
||||
document.removeEventListener('keydown', handleKeyDown);
|
||||
document.removeEventListener('keyup', handleKeyUp);
|
||||
window.removeEventListener('blur', reset);
|
||||
};
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (!cmdKOpen) {
|
||||
// Reset when palette closes
|
||||
pressedKeys.current.clear();
|
||||
pendingCombo.current = null;
|
||||
wasExtended.current = false;
|
||||
}
|
||||
}, [cmdKOpen]);
|
||||
|
||||
const registerShortcut = useCallback(
|
||||
(keyCombination: string, callback: () => void): void => {
|
||||
if (!shortcuts.current[keyCombination]) {
|
||||
shortcuts.current[keyCombination] = callback;
|
||||
} else if (process.env.NODE_ENV === 'development') {
|
||||
throw new Error(
|
||||
`This shortcut is already present in current scope :- ${keyCombination}`,
|
||||
);
|
||||
(keyCombo: string, callback: () => void): void => {
|
||||
const normalized = normalizeComboString(keyCombo);
|
||||
|
||||
if (!shortcuts.current[normalized]) {
|
||||
shortcuts.current[normalized] = callback;
|
||||
return;
|
||||
}
|
||||
|
||||
const message = `This shortcut is already present in current scope :- ${keyCombo}`;
|
||||
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
throw new Error(message);
|
||||
} else {
|
||||
console.error(
|
||||
`This shortcut is already present in current scope :- ${keyCombination}`,
|
||||
);
|
||||
console.error(message);
|
||||
}
|
||||
},
|
||||
[shortcuts],
|
||||
[],
|
||||
);
|
||||
|
||||
const deregisterShortcut = useCallback(
|
||||
(keyCombination: string): void => {
|
||||
if (shortcuts.current[keyCombination]) {
|
||||
unset(shortcuts.current, keyCombination);
|
||||
}
|
||||
},
|
||||
[shortcuts],
|
||||
);
|
||||
const deregisterShortcut = useCallback((keyCombo: string) => {
|
||||
const normalized = normalizeComboString(keyCombo);
|
||||
unset(shortcuts.current, normalized);
|
||||
}, []);
|
||||
|
||||
const contextValue = useMemo(
|
||||
const ctxValue = useMemo(
|
||||
() => ({
|
||||
registerShortcut,
|
||||
deregisterShortcut,
|
||||
@@ -127,10 +201,8 @@ function KeyboardHotkeysProvider({
|
||||
);
|
||||
|
||||
return (
|
||||
<KeyboardHotkeysContext.Provider value={contextValue}>
|
||||
<KeyboardHotkeysContext.Provider value={ctxValue}>
|
||||
{children}
|
||||
</KeyboardHotkeysContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
export { KeyboardHotkeysProvider, useKeyboardHotkeys };
|
||||
|
||||
@@ -29,6 +29,20 @@ import { QueryBuilderContextType } from 'types/common/queryBuilder';
|
||||
import { ROLES, USER_ROLES } from 'types/roles';
|
||||
// import { MemoryRouter as V5MemoryRouter } from 'react-router-dom-v5-compat';
|
||||
|
||||
// Mock ResizeObserver
|
||||
class ResizeObserverMock {
|
||||
// eslint-disable-next-line class-methods-use-this
|
||||
observe(): void {}
|
||||
|
||||
// eslint-disable-next-line class-methods-use-this
|
||||
unobserve(): void {}
|
||||
|
||||
// eslint-disable-next-line class-methods-use-this
|
||||
disconnect(): void {}
|
||||
}
|
||||
|
||||
global.ResizeObserver = (ResizeObserverMock as unknown) as typeof ResizeObserver;
|
||||
|
||||
const queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: {
|
||||
|
||||
126
frontend/src/utils/__tests__/app.test.ts
Normal file
126
frontend/src/utils/__tests__/app.test.ts
Normal file
@@ -0,0 +1,126 @@
|
||||
import { buildAbsolutePath } from '../app';
|
||||
|
||||
const BASE_PATH = '/some-base-path';
|
||||
|
||||
describe('buildAbsolutePath', () => {
|
||||
const originalLocation = window.location;
|
||||
|
||||
afterEach(() => {
|
||||
Object.defineProperty(window, 'location', {
|
||||
writable: true,
|
||||
value: originalLocation,
|
||||
});
|
||||
});
|
||||
|
||||
const mockLocation = (pathname: string): void => {
|
||||
Object.defineProperty(window, 'location', {
|
||||
writable: true,
|
||||
value: {
|
||||
pathname,
|
||||
href: `http://localhost:8080${pathname}`,
|
||||
origin: 'http://localhost:8080',
|
||||
protocol: 'http:',
|
||||
host: 'localhost',
|
||||
hostname: 'localhost',
|
||||
port: '',
|
||||
search: '',
|
||||
hash: '',
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
describe('when base path ends with a forward slash', () => {
|
||||
beforeEach(() => {
|
||||
mockLocation(`${BASE_PATH}/`);
|
||||
});
|
||||
|
||||
it('should build absolute path without query string', () => {
|
||||
const result = buildAbsolutePath({ relativePath: 'users' });
|
||||
expect(result).toBe(`${BASE_PATH}/users`);
|
||||
});
|
||||
|
||||
it('should build absolute path with query string', () => {
|
||||
const result = buildAbsolutePath({
|
||||
relativePath: 'users',
|
||||
urlQueryString: 'id=123&sort=name',
|
||||
});
|
||||
expect(result).toBe(`${BASE_PATH}/users?id=123&sort=name`);
|
||||
});
|
||||
|
||||
it('should handle nested relative paths', () => {
|
||||
const result = buildAbsolutePath({ relativePath: 'users/profile/settings' });
|
||||
expect(result).toBe(`${BASE_PATH}/users/profile/settings`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when base path does not end with a forward slash', () => {
|
||||
beforeEach(() => {
|
||||
mockLocation(`${BASE_PATH}`);
|
||||
});
|
||||
|
||||
it('should append forward slash and build absolute path', () => {
|
||||
const result = buildAbsolutePath({ relativePath: 'users' });
|
||||
expect(result).toBe(`${BASE_PATH}/users`);
|
||||
});
|
||||
|
||||
it('should append forward slash and build absolute path with query string', () => {
|
||||
const result = buildAbsolutePath({
|
||||
relativePath: 'users',
|
||||
urlQueryString: 'filter=active',
|
||||
});
|
||||
expect(result).toBe(`${BASE_PATH}/users?filter=active`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle empty relative path', () => {
|
||||
mockLocation(`${BASE_PATH}/`);
|
||||
const result = buildAbsolutePath({ relativePath: '' });
|
||||
expect(result).toBe(`${BASE_PATH}/`);
|
||||
});
|
||||
|
||||
it('should handle query string with empty relative path', () => {
|
||||
mockLocation(`${BASE_PATH}/`);
|
||||
const result = buildAbsolutePath({
|
||||
relativePath: '',
|
||||
urlQueryString: 'search=test',
|
||||
});
|
||||
expect(result).toBe(`${BASE_PATH}/?search=test`);
|
||||
});
|
||||
|
||||
it('should handle relative path starting with forward slash', () => {
|
||||
mockLocation(`${BASE_PATH}/`);
|
||||
const result = buildAbsolutePath({ relativePath: '/users' });
|
||||
expect(result).toBe(`${BASE_PATH}/users`);
|
||||
});
|
||||
|
||||
it('should handle complex query strings', () => {
|
||||
mockLocation(`${BASE_PATH}/dashboard`);
|
||||
const result = buildAbsolutePath({
|
||||
relativePath: 'reports',
|
||||
urlQueryString: 'date=2024-01-01&type=summary&format=pdf',
|
||||
});
|
||||
expect(result).toBe(
|
||||
`${BASE_PATH}/dashboard/reports?date=2024-01-01&type=summary&format=pdf`,
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle undefined query string', () => {
|
||||
mockLocation(`${BASE_PATH}/`);
|
||||
const result = buildAbsolutePath({
|
||||
relativePath: 'users',
|
||||
urlQueryString: undefined,
|
||||
});
|
||||
expect(result).toBe(`${BASE_PATH}/users`);
|
||||
});
|
||||
|
||||
it('should handle empty query string', () => {
|
||||
mockLocation(`${BASE_PATH}/`);
|
||||
const result = buildAbsolutePath({
|
||||
relativePath: 'users',
|
||||
urlQueryString: '',
|
||||
});
|
||||
expect(result).toBe(`${BASE_PATH}/users`);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -61,6 +61,58 @@ describe('extractQueryPairs', () => {
|
||||
]);
|
||||
});
|
||||
|
||||
test('should test for filter expression with freeText', () => {
|
||||
const input = "disconnected deployment.env not in ['mq-kafka']";
|
||||
const result = extractQueryPairs(input);
|
||||
expect(result).toEqual([
|
||||
{
|
||||
key: 'disconnected',
|
||||
operator: '',
|
||||
valueList: [],
|
||||
valuesPosition: [],
|
||||
hasNegation: false,
|
||||
isMultiValue: false,
|
||||
value: undefined,
|
||||
position: {
|
||||
keyStart: 0,
|
||||
keyEnd: 11,
|
||||
operatorStart: 0,
|
||||
operatorEnd: 0,
|
||||
negationStart: 0,
|
||||
negationEnd: 0,
|
||||
valueStart: undefined,
|
||||
valueEnd: undefined,
|
||||
},
|
||||
isComplete: false,
|
||||
},
|
||||
{
|
||||
key: 'deployment.env',
|
||||
operator: 'in',
|
||||
value: "['mq-kafka']",
|
||||
valueList: ["'mq-kafka'"],
|
||||
valuesPosition: [
|
||||
{
|
||||
start: 36,
|
||||
end: 45,
|
||||
},
|
||||
],
|
||||
hasNegation: true,
|
||||
isMultiValue: true,
|
||||
position: {
|
||||
keyStart: 13,
|
||||
keyEnd: 26,
|
||||
operatorStart: 32,
|
||||
operatorEnd: 33,
|
||||
valueStart: 35,
|
||||
valueEnd: 46,
|
||||
negationStart: 28,
|
||||
negationEnd: 30,
|
||||
},
|
||||
isComplete: true,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test('should extract IN with numeric list inside parentheses', () => {
|
||||
const input = 'id IN (1, 2, 3)';
|
||||
const result = extractQueryPairs(input);
|
||||
|
||||
@@ -38,3 +38,33 @@ export function isIngestionActive(data: any): boolean {
|
||||
|
||||
return parseInt(value, 10) > 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds an absolute path by combining the current page's pathname with a relative path.
|
||||
*
|
||||
* @param {Object} params - The parameters for building the absolute path
|
||||
* @param {string} params.relativePath - The relative path to append to the current pathname
|
||||
* @param {string} [params.urlQueryString] - Optional query string to append to the final path (without leading '?')
|
||||
*
|
||||
* @returns {string} The constructed absolute path, optionally with query string
|
||||
*/
|
||||
export function buildAbsolutePath({
|
||||
relativePath,
|
||||
urlQueryString,
|
||||
}: {
|
||||
relativePath: string;
|
||||
urlQueryString?: string;
|
||||
}): string {
|
||||
const { pathname } = window.location;
|
||||
// ensure base path always ends with a forward slash
|
||||
const basePath = pathname.endsWith('/') ? pathname : `${pathname}/`;
|
||||
|
||||
// handle relative path starting with a forward slash
|
||||
const normalizedRelativePath = relativePath.startsWith('/')
|
||||
? relativePath.slice(1)
|
||||
: relativePath;
|
||||
|
||||
const absolutePath = basePath + normalizedRelativePath;
|
||||
|
||||
return urlQueryString ? `${absolutePath}?${urlQueryString}` : absolutePath;
|
||||
}
|
||||
|
||||
@@ -1339,8 +1339,7 @@ export function extractQueryPairs(query: string): IQueryPair[] {
|
||||
else if (
|
||||
currentPair &&
|
||||
currentPair.key &&
|
||||
(isConjunctionToken(token.type) ||
|
||||
(token.type === FilterQueryLexer.KEY && isQueryPairComplete(currentPair)))
|
||||
(isConjunctionToken(token.type) || token.type === FilterQueryLexer.KEY)
|
||||
) {
|
||||
queryPairs.push({
|
||||
key: currentPair.key,
|
||||
|
||||
30
pkg/apiserver/signozapiserver/global.go
Normal file
30
pkg/apiserver/signozapiserver/global.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package signozapiserver
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (provider *provider) addGlobalRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v1/global/config", handler.New(provider.authZ.EditAccess(provider.globalHandler.GetConfig), handler.OpenAPIDef{
|
||||
ID: "GetGlobalConfig",
|
||||
Tags: []string{"global"},
|
||||
Summary: "Get global config",
|
||||
Description: "This endpoints returns global config",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: new(types.GettableGlobalConfig),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleEditor),
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
43
pkg/apiserver/signozapiserver/promote.go
Normal file
43
pkg/apiserver/signozapiserver/promote.go
Normal file
@@ -0,0 +1,43 @@
|
||||
package signozapiserver
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types/promotetypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (provider *provider) addPromoteRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v1/logs/promote_paths", handler.New(provider.authZ.EditAccess(provider.promoteHandler.HandlePromoteAndIndexPaths), handler.OpenAPIDef{
|
||||
ID: "PromotePaths",
|
||||
Tags: []string{"promoted_paths", "logs", "json_logs"},
|
||||
Summary: "Promote and index paths",
|
||||
Description: "This endpoints promotes and indexes paths",
|
||||
Request: new([]*promotetypes.PromotePath),
|
||||
RequestContentType: "application/json",
|
||||
Response: nil,
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusCreated,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/logs/promote_paths", handler.New(provider.authZ.ViewAccess(provider.promoteHandler.ListPromotedAndIndexedPaths), handler.OpenAPIDef{
|
||||
ID: "PromotePaths",
|
||||
Tags: []string{"promoted_paths", "logs", "json_logs"},
|
||||
Summary: "Promote and index paths",
|
||||
Description: "This endpoints promotes and indexes paths",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: new([]*promotetypes.PromotePath),
|
||||
ResponseContentType: "",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
})).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -6,11 +6,13 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/apiserver"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
@@ -28,6 +30,8 @@ type provider struct {
|
||||
sessionHandler session.Handler
|
||||
authDomainHandler authdomain.Handler
|
||||
preferenceHandler preference.Handler
|
||||
globalHandler global.Handler
|
||||
promoteHandler promote.Handler
|
||||
}
|
||||
|
||||
func NewFactory(
|
||||
@@ -38,9 +42,11 @@ func NewFactory(
|
||||
sessionHandler session.Handler,
|
||||
authDomainHandler authdomain.Handler,
|
||||
preferenceHandler preference.Handler,
|
||||
globalHandler global.Handler,
|
||||
promoteHandler promote.Handler,
|
||||
) factory.ProviderFactory[apiserver.APIServer, apiserver.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config apiserver.Config) (apiserver.APIServer, error) {
|
||||
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler)
|
||||
return newProvider(ctx, providerSettings, config, orgGetter, authz, orgHandler, userHandler, sessionHandler, authDomainHandler, preferenceHandler, globalHandler, promoteHandler)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -55,6 +61,8 @@ func newProvider(
|
||||
sessionHandler session.Handler,
|
||||
authDomainHandler authdomain.Handler,
|
||||
preferenceHandler preference.Handler,
|
||||
globalHandler global.Handler,
|
||||
promoteHandler promote.Handler,
|
||||
) (apiserver.APIServer, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/apiserver/signozapiserver")
|
||||
router := mux.NewRouter().UseEncodedPath()
|
||||
@@ -68,6 +76,8 @@ func newProvider(
|
||||
sessionHandler: sessionHandler,
|
||||
authDomainHandler: authDomainHandler,
|
||||
preferenceHandler: preferenceHandler,
|
||||
globalHandler: globalHandler,
|
||||
promoteHandler: promoteHandler,
|
||||
}
|
||||
|
||||
provider.authZ = middleware.NewAuthZ(settings.Logger(), orgGetter, authz)
|
||||
@@ -104,6 +114,14 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addGlobalRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addPromoteRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -209,6 +209,11 @@ func NewUnexpectedf(code Code, format string, args ...any) *base {
|
||||
return Newf(TypeInvalidInput, code, format, args...)
|
||||
}
|
||||
|
||||
// NewMethodNotAllowedf is a wrapper around Newf with TypeMethodNotAllowed.
|
||||
func NewMethodNotAllowedf(code Code, format string, args ...any) *base {
|
||||
return Newf(TypeMethodNotAllowed, code, format, args...)
|
||||
}
|
||||
|
||||
// WrapTimeoutf is a wrapper around Wrapf with TypeTimeout.
|
||||
func WrapTimeoutf(cause error, code Code, format string, args ...any) *base {
|
||||
return Wrapf(cause, TypeTimeout, code, format, args...)
|
||||
|
||||
41
pkg/global/config.go
Normal file
41
pkg/global/config.go
Normal file
@@ -0,0 +1,41 @@
|
||||
package global
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrCodeInvalidGlobalConfig = errors.MustNewCode("invalid_global_config")
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
ExternalURL *url.URL `mapstructure:"external_url"`
|
||||
IngestionURL *url.URL `mapstructure:"ingestion_url"`
|
||||
}
|
||||
|
||||
func NewConfigFactory() factory.ConfigFactory {
|
||||
return factory.NewConfigFactory(factory.MustNewName("global"), newConfig)
|
||||
}
|
||||
|
||||
func newConfig() factory.Config {
|
||||
return &Config{
|
||||
ExternalURL: &url.URL{
|
||||
Scheme: "",
|
||||
Host: "<unset>",
|
||||
Path: "",
|
||||
},
|
||||
IngestionURL: &url.URL{
|
||||
Scheme: "",
|
||||
Host: "<unset>",
|
||||
Path: "",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (c Config) Validate() error {
|
||||
return nil
|
||||
}
|
||||
11
pkg/global/global.go
Normal file
11
pkg/global/global.go
Normal file
@@ -0,0 +1,11 @@
|
||||
package global
|
||||
|
||||
import "net/http"
|
||||
|
||||
type Global interface {
|
||||
GetConfig() Config
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
GetConfig(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
23
pkg/global/signozglobal/handler.go
Normal file
23
pkg/global/signozglobal/handler.go
Normal file
@@ -0,0 +1,23 @@
|
||||
package signozglobal
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
)
|
||||
|
||||
type handler struct {
|
||||
global global.Global
|
||||
}
|
||||
|
||||
func NewHandler(global global.Global) global.Handler {
|
||||
return &handler{global: global}
|
||||
}
|
||||
|
||||
func (handker *handler) GetConfig(rw http.ResponseWriter, r *http.Request) {
|
||||
cfg := handker.global.GetConfig()
|
||||
|
||||
render.Success(rw, http.StatusOK, types.NewGettableGlobalConfig(cfg.ExternalURL, cfg.IngestionURL))
|
||||
}
|
||||
31
pkg/global/signozglobal/provider.go
Normal file
31
pkg/global/signozglobal/provider.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package signozglobal
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
config global.Config
|
||||
settings factory.ScopedProviderSettings
|
||||
}
|
||||
|
||||
func NewFactory() factory.ProviderFactory[global.Global, global.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("signoz"), func(ctx context.Context, providerSettings factory.ProviderSettings, config global.Config) (global.Global, error) {
|
||||
return newProvider(ctx, providerSettings, config)
|
||||
})
|
||||
}
|
||||
|
||||
func newProvider(_ context.Context, providerSettings factory.ProviderSettings, config global.Config) (global.Global, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/global/signozglobal")
|
||||
return &provider{
|
||||
config: config,
|
||||
settings: settings,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *provider) GetConfig() global.Config {
|
||||
return provider.config
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"time"
|
||||
@@ -11,6 +12,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"golang.org/x/sync/singleflight"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -23,10 +25,18 @@ type APIKey struct {
|
||||
headers []string
|
||||
logger *slog.Logger
|
||||
sharder sharder.Sharder
|
||||
sfGroup *singleflight.Group
|
||||
}
|
||||
|
||||
func NewAPIKey(store sqlstore.SQLStore, headers []string, logger *slog.Logger, sharder sharder.Sharder) *APIKey {
|
||||
return &APIKey{store: store, uuid: authtypes.NewUUID(), headers: headers, logger: logger, sharder: sharder}
|
||||
return &APIKey{
|
||||
store: store,
|
||||
uuid: authtypes.NewUUID(),
|
||||
headers: headers,
|
||||
logger: logger,
|
||||
sharder: sharder,
|
||||
sfGroup: &singleflight.Group{},
|
||||
}
|
||||
}
|
||||
|
||||
func (a *APIKey) Wrap(next http.Handler) http.Handler {
|
||||
@@ -109,11 +119,24 @@ func (a *APIKey) Wrap(next http.Handler) http.Handler {
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
|
||||
apiKey.LastUsed = time.Now()
|
||||
_, err = a.store.BunDB().NewUpdate().Model(&apiKey).Column("last_used").Where("token = ?", apiKeyToken).Where("revoked = false").Exec(r.Context())
|
||||
if err != nil {
|
||||
a.logger.ErrorContext(r.Context(), "failed to update last used of api key", "error", err)
|
||||
}
|
||||
lastUsedCtx := context.WithoutCancel(r.Context())
|
||||
_, _, _ = a.sfGroup.Do(apiKey.ID.StringValue(), func() (any, error) {
|
||||
apiKey.LastUsed = time.Now()
|
||||
_, err = a.
|
||||
store.
|
||||
BunDB().
|
||||
NewUpdate().
|
||||
Model(&apiKey).
|
||||
Column("last_used").
|
||||
Where("token = ?", apiKeyToken).
|
||||
Where("revoked = false").
|
||||
Exec(lastUsedCtx)
|
||||
if err != nil {
|
||||
a.logger.ErrorContext(lastUsedCtx, "failed to update last used of api key", "error", err)
|
||||
}
|
||||
|
||||
return true, nil
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
|
||||
@@ -137,6 +137,28 @@ func (h *handler) GetMetricMetadata(rw http.ResponseWriter, req *http.Request) {
|
||||
render.Success(rw, http.StatusOK, metadata)
|
||||
}
|
||||
|
||||
func (h *handler) GetMetricAlerts(rw http.ResponseWriter, req *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
metricName := strings.TrimSpace(req.URL.Query().Get("metricName"))
|
||||
if metricName == "" {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName query parameter is required"))
|
||||
return
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
out, err := h.module.GetMetricAlerts(req.Context(), orgID, metricName)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
render.Success(rw, http.StatusOK, out)
|
||||
}
|
||||
|
||||
func (h *handler) GetMetricDashboards(rw http.ResponseWriter, req *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
|
||||
@@ -20,6 +20,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/metricsexplorertypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
sqlbuilder "github.com/huandu/go-sqlbuilder"
|
||||
@@ -33,12 +34,13 @@ type module struct {
|
||||
condBuilder qbtypes.ConditionBuilder
|
||||
logger *slog.Logger
|
||||
cache cache.Cache
|
||||
ruleStore ruletypes.RuleStore
|
||||
dashboardModule dashboard.Module
|
||||
config metricsexplorer.Config
|
||||
}
|
||||
|
||||
// NewModule constructs the metrics module with the provided dependencies.
|
||||
func NewModule(ts telemetrystore.TelemetryStore, telemetryMetadataStore telemetrytypes.MetadataStore, cache cache.Cache, dashboardModule dashboard.Module, providerSettings factory.ProviderSettings, cfg metricsexplorer.Config) metricsexplorer.Module {
|
||||
func NewModule(ts telemetrystore.TelemetryStore, telemetryMetadataStore telemetrytypes.MetadataStore, cache cache.Cache, ruleStore ruletypes.RuleStore, dashboardModule dashboard.Module, providerSettings factory.ProviderSettings, cfg metricsexplorer.Config) metricsexplorer.Module {
|
||||
fieldMapper := telemetrymetrics.NewFieldMapper()
|
||||
condBuilder := telemetrymetrics.NewConditionBuilder(fieldMapper)
|
||||
return &module{
|
||||
@@ -48,6 +50,7 @@ func NewModule(ts telemetrystore.TelemetryStore, telemetryMetadataStore telemetr
|
||||
logger: providerSettings.Logger,
|
||||
telemetryMetadataStore: telemetryMetadataStore,
|
||||
cache: cache,
|
||||
ruleStore: ruleStore,
|
||||
dashboardModule: dashboardModule,
|
||||
config: cfg,
|
||||
}
|
||||
@@ -197,11 +200,32 @@ func (m *module) UpdateMetricMetadata(ctx context.Context, orgID valuer.UUID, re
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *module) GetMetricAlerts(ctx context.Context, orgID valuer.UUID, metricName string) (*metricsexplorertypes.MetricAlertsResponse, error) {
|
||||
if metricName == "" {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName is required")
|
||||
}
|
||||
ruleAlerts, err := m.ruleStore.GetStoredRulesByMetricName(ctx, orgID.String(), metricName)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to get stored rules by metric name")
|
||||
}
|
||||
|
||||
alerts := make([]metricsexplorertypes.MetricAlert, len(ruleAlerts))
|
||||
for i, ruleAlert := range ruleAlerts {
|
||||
alerts[i] = metricsexplorertypes.MetricAlert{
|
||||
AlertName: ruleAlert.AlertName,
|
||||
AlertID: ruleAlert.AlertID,
|
||||
}
|
||||
}
|
||||
|
||||
return &metricsexplorertypes.MetricAlertsResponse{
|
||||
Alerts: alerts,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (m *module) GetMetricDashboards(ctx context.Context, orgID valuer.UUID, metricName string) (*metricsexplorertypes.MetricDashboardsResponse, error) {
|
||||
if metricName == "" {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricName is required")
|
||||
}
|
||||
|
||||
data, err := m.dashboardModule.GetByMetricNames(ctx, orgID, []string{metricName})
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to get dashboards for metric")
|
||||
|
||||
@@ -15,6 +15,7 @@ type Handler interface {
|
||||
GetMetricMetadata(http.ResponseWriter, *http.Request)
|
||||
GetMetricAttributes(http.ResponseWriter, *http.Request)
|
||||
UpdateMetricMetadata(http.ResponseWriter, *http.Request)
|
||||
GetMetricAlerts(http.ResponseWriter, *http.Request)
|
||||
GetMetricDashboards(http.ResponseWriter, *http.Request)
|
||||
GetMetricHighlights(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
@@ -25,6 +26,7 @@ type Module interface {
|
||||
GetTreemap(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.TreemapRequest) (*metricsexplorertypes.TreemapResponse, error)
|
||||
GetMetricMetadataMulti(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string]*metricsexplorertypes.MetricMetadata, error)
|
||||
UpdateMetricMetadata(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.UpdateMetricMetadataRequest) error
|
||||
GetMetricAlerts(ctx context.Context, orgID valuer.UUID, metricName string) (*metricsexplorertypes.MetricAlertsResponse, error)
|
||||
GetMetricDashboards(ctx context.Context, orgID valuer.UUID, metricName string) (*metricsexplorertypes.MetricDashboardsResponse, error)
|
||||
GetMetricHighlights(ctx context.Context, orgID valuer.UUID, metricName string) (*metricsexplorertypes.MetricHighlightsResponse, error)
|
||||
GetMetricAttributes(ctx context.Context, orgID valuer.UUID, req *metricsexplorertypes.MetricAttributesRequest) (*metricsexplorertypes.MetricAttributesResponse, error)
|
||||
|
||||
60
pkg/modules/promote/implpromote/handler.go
Normal file
60
pkg/modules/promote/implpromote/handler.go
Normal file
@@ -0,0 +1,60 @@
|
||||
package implpromote
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/binding"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/promotetypes"
|
||||
)
|
||||
|
||||
type handler struct {
|
||||
module promote.Module
|
||||
}
|
||||
|
||||
func NewHandler(module promote.Module) promote.Handler {
|
||||
return &handler{module: module}
|
||||
}
|
||||
|
||||
func (h *handler) HandlePromoteAndIndexPaths(w http.ResponseWriter, r *http.Request) {
|
||||
// TODO(Nitya): Use in multi tenant setup
|
||||
_, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, errors.NewInternalf(errors.CodeInternal, "failed to get org id from context"))
|
||||
return
|
||||
}
|
||||
|
||||
var req []*promotetypes.PromotePath
|
||||
if err := binding.JSON.BindBody(r.Body, &req); err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
err = h.module.PromoteAndIndexPaths(r.Context(), req...)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusCreated, nil)
|
||||
}
|
||||
|
||||
func (h *handler) ListPromotedAndIndexedPaths(w http.ResponseWriter, r *http.Request) {
|
||||
// TODO(Nitya): Use in multi tenant setup
|
||||
_, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, errors.NewInternalf(errors.CodeInternal, "failed to get org id from context"))
|
||||
return
|
||||
}
|
||||
|
||||
paths, err := h.module.ListPromotedAndIndexedPaths(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, paths)
|
||||
}
|
||||
201
pkg/modules/promote/implpromote/module.go
Normal file
201
pkg/modules/promote/implpromote/module.go
Normal file
@@ -0,0 +1,201 @@
|
||||
package implpromote
|
||||
|
||||
import (
|
||||
"context"
|
||||
"maps"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/promotetypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
var (
|
||||
CodeFailedToCreateIndex = errors.MustNewCode("failed_to_create_index_promoted_paths")
|
||||
CodeFailedToQueryPromotedPaths = errors.MustNewCode("failed_to_query_promoted_paths")
|
||||
)
|
||||
|
||||
type module struct {
|
||||
metadataStore telemetrytypes.MetadataStore
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
}
|
||||
|
||||
func NewModule(metadataStore telemetrytypes.MetadataStore, telemetrystore telemetrystore.TelemetryStore) promote.Module {
|
||||
return &module{metadataStore: metadataStore, telemetryStore: telemetrystore}
|
||||
}
|
||||
|
||||
func (m *module) ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetypes.PromotePath, error) {
|
||||
logsIndexes, err := m.metadataStore.ListLogsJSONIndexes(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Flatten the map values (which are slices) into a single slice
|
||||
indexes := slices.Concat(slices.Collect(maps.Values(logsIndexes))...)
|
||||
|
||||
aggr := map[string][]promotetypes.WrappedIndex{}
|
||||
for _, index := range indexes {
|
||||
path, columnType, err := schemamigrator.UnfoldJSONSubColumnIndexExpr(index.Expression)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// clean backticks from the path
|
||||
path = strings.ReplaceAll(path, "`", "")
|
||||
|
||||
aggr[path] = append(aggr[path], promotetypes.WrappedIndex{
|
||||
ColumnType: columnType,
|
||||
Type: index.Type,
|
||||
Granularity: index.Granularity,
|
||||
})
|
||||
}
|
||||
promotedPaths, err := m.listPromotedPaths(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response := []promotetypes.PromotePath{}
|
||||
for _, path := range promotedPaths {
|
||||
fullPath := telemetrylogs.BodyPromotedColumnPrefix + path
|
||||
path = telemetrylogs.BodyJSONStringSearchPrefix + path
|
||||
item := promotetypes.PromotePath{
|
||||
Path: path,
|
||||
Promote: true,
|
||||
}
|
||||
indexes, ok := aggr[fullPath]
|
||||
if ok {
|
||||
item.Indexes = indexes
|
||||
delete(aggr, fullPath)
|
||||
}
|
||||
response = append(response, item)
|
||||
}
|
||||
|
||||
// add the paths that are not promoted but have indexes
|
||||
for path, indexes := range aggr {
|
||||
path := strings.TrimPrefix(path, telemetrylogs.BodyJSONColumnPrefix)
|
||||
path = telemetrylogs.BodyJSONStringSearchPrefix + path
|
||||
response = append(response, promotetypes.PromotePath{
|
||||
Path: path,
|
||||
Indexes: indexes,
|
||||
})
|
||||
}
|
||||
return response, nil
|
||||
}
|
||||
|
||||
func (m *module) listPromotedPaths(ctx context.Context) ([]string, error) {
|
||||
paths, err := m.metadataStore.ListPromotedPaths(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return slices.Collect(maps.Keys(paths)), nil
|
||||
}
|
||||
|
||||
// PromotePaths inserts provided JSON paths into the promoted paths table for logs queries.
|
||||
func (m *module) PromotePaths(ctx context.Context, paths []string) error {
|
||||
if len(paths) == 0 {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "paths cannot be empty")
|
||||
}
|
||||
|
||||
return m.metadataStore.PromotePaths(ctx, paths...)
|
||||
}
|
||||
|
||||
// createIndexes creates string ngram + token filter indexes on JSON path subcolumns for LIKE queries.
|
||||
func (m *module) createIndexes(ctx context.Context, indexes []schemamigrator.Index) error {
|
||||
if len(indexes) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, index := range indexes {
|
||||
alterStmt := schemamigrator.AlterTableAddIndex{
|
||||
Database: telemetrylogs.DBName,
|
||||
Table: telemetrylogs.LogsV2LocalTableName,
|
||||
Index: index,
|
||||
}
|
||||
op := alterStmt.OnCluster(m.telemetryStore.Cluster())
|
||||
if err := m.telemetryStore.ClickhouseDB().Exec(ctx, op.ToSQL()); err != nil {
|
||||
return errors.WrapInternalf(err, CodeFailedToCreateIndex, "failed to create index")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// PromoteAndIndexPaths handles promoting paths and creating indexes in one call.
|
||||
func (m *module) PromoteAndIndexPaths(
|
||||
ctx context.Context,
|
||||
paths ...*promotetypes.PromotePath,
|
||||
) error {
|
||||
if len(paths) == 0 {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "paths cannot be empty")
|
||||
}
|
||||
|
||||
pathsStr := []string{}
|
||||
// validate the paths
|
||||
for _, path := range paths {
|
||||
if err := path.ValidateAndSetDefaults(); err != nil {
|
||||
return err
|
||||
}
|
||||
pathsStr = append(pathsStr, path.Path)
|
||||
}
|
||||
|
||||
existingPromotedPaths, err := m.metadataStore.ListPromotedPaths(ctx, pathsStr...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var toInsert []string
|
||||
indexes := []schemamigrator.Index{}
|
||||
for _, it := range paths {
|
||||
if it.Promote {
|
||||
if _, promoted := existingPromotedPaths[it.Path]; !promoted {
|
||||
toInsert = append(toInsert, it.Path)
|
||||
}
|
||||
}
|
||||
if len(it.Indexes) > 0 {
|
||||
parentColumn := telemetrylogs.LogsV2BodyJSONColumn
|
||||
// if the path is already promoted or is being promoted, add it to the promoted column
|
||||
if _, promoted := existingPromotedPaths[it.Path]; promoted || it.Promote {
|
||||
parentColumn = telemetrylogs.LogsV2BodyPromotedColumn
|
||||
}
|
||||
|
||||
for _, index := range it.Indexes {
|
||||
var typeIndex schemamigrator.IndexType
|
||||
switch {
|
||||
case strings.HasPrefix(index.Type, string(schemamigrator.IndexTypeNGramBF)):
|
||||
typeIndex = schemamigrator.IndexTypeNGramBF
|
||||
case strings.HasPrefix(index.Type, string(schemamigrator.IndexTypeTokenBF)):
|
||||
typeIndex = schemamigrator.IndexTypeTokenBF
|
||||
case strings.HasPrefix(index.Type, string(schemamigrator.IndexTypeMinMax)):
|
||||
typeIndex = schemamigrator.IndexTypeMinMax
|
||||
default:
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid index type: %s", index.Type)
|
||||
}
|
||||
indexes = append(indexes, schemamigrator.Index{
|
||||
Name: schemamigrator.JSONSubColumnIndexName(parentColumn, it.Path, index.JSONDataType.StringValue(), typeIndex),
|
||||
Expression: schemamigrator.JSONSubColumnIndexExpr(parentColumn, it.Path, index.JSONDataType.StringValue()),
|
||||
Type: index.Type,
|
||||
Granularity: index.Granularity,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(toInsert) > 0 {
|
||||
err := m.PromotePaths(ctx, toInsert)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if len(indexes) > 0 {
|
||||
if err := m.createIndexes(ctx, indexes); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
18
pkg/modules/promote/promote.go
Normal file
18
pkg/modules/promote/promote.go
Normal file
@@ -0,0 +1,18 @@
|
||||
package promote
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/promotetypes"
|
||||
)
|
||||
|
||||
type Module interface {
|
||||
ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetypes.PromotePath, error)
|
||||
PromoteAndIndexPaths(ctx context.Context, paths ...*promotetypes.PromotePath) error
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
HandlePromoteAndIndexPaths(w http.ResponseWriter, r *http.Request)
|
||||
ListPromotedAndIndexedPaths(w http.ResponseWriter, r *http.Request)
|
||||
}
|
||||
@@ -43,6 +43,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/traces/tracedetail"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
|
||||
chErrors "github.com/SigNoz/signoz/pkg/query-service/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/metrics"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
|
||||
@@ -555,6 +555,7 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
router.HandleFunc("/api/v1/settings/ttl", am.ViewAccess(aH.getTTL)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v2/settings/ttl", am.AdminAccess(aH.setCustomRetentionTTL)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v2/settings/ttl", am.ViewAccess(aH.getCustomRetentionTTL)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/settings/apdex", am.AdminAccess(aH.Signoz.Handlers.Apdex.Set)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/settings/apdex", am.ViewAccess(aH.Signoz.Handlers.Apdex.Get)).Methods(http.MethodGet)
|
||||
|
||||
@@ -630,6 +631,7 @@ func (ah *APIHandler) MetricExplorerRoutes(router *mux.Router, am *middleware.Au
|
||||
router.HandleFunc("/api/v2/metrics/metadata", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricMetadata)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v2/metrics/{metric_name}/metadata", am.EditAccess(ah.Signoz.Handlers.MetricsExplorer.UpdateMetricMetadata)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v2/metric/highlights", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricHighlights)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v2/metric/alerts", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricAlerts)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v2/metric/dashboards", am.ViewAccess(ah.Signoz.Handlers.MetricsExplorer.GetMetricDashboards)).Methods(http.MethodGet)
|
||||
}
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func Test_getClickhouseKey(t *testing.T) {
|
||||
@@ -1210,9 +1211,8 @@ func TestPrepareLogsQuery(t *testing.T) {
|
||||
t.Errorf("PrepareLogsQuery() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
if got != tt.want {
|
||||
t.Errorf("PrepareLogsQuery() = %v, want %v", got, tt.want)
|
||||
}
|
||||
|
||||
assert.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,13 +3,13 @@ package app
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net"
|
||||
"net/http"
|
||||
_ "net/http/pprof" // http profiler
|
||||
"slices"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache/memorycache"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
|
||||
|
||||
@@ -17,6 +17,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
@@ -30,6 +31,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
|
||||
opAmpModel "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
@@ -37,10 +39,8 @@ import (
|
||||
"github.com/rs/cors"
|
||||
"github.com/soheilhy/cmux"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/healthcheck"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux"
|
||||
@@ -107,7 +107,8 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
signoz.Prometheus,
|
||||
signoz.Modules.OrgGetter,
|
||||
signoz.Querier,
|
||||
signoz.Instrumentation.Logger(),
|
||||
signoz.Instrumentation.ToProviderSettings(),
|
||||
signoz.QueryParser,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -339,9 +340,10 @@ func makeRulesManager(
|
||||
prometheus prometheus.Prometheus,
|
||||
orgGetter organization.Getter,
|
||||
querier querier.Querier,
|
||||
logger *slog.Logger,
|
||||
providerSettings factory.ProviderSettings,
|
||||
queryParser queryparser.QueryParser,
|
||||
) (*rules.Manager, error) {
|
||||
ruleStore := sqlrulestore.NewRuleStore(sqlstore)
|
||||
ruleStore := sqlrulestore.NewRuleStore(sqlstore, queryParser, providerSettings)
|
||||
maintenanceStore := sqlrulestore.NewMaintenanceStore(sqlstore)
|
||||
// create manager opts
|
||||
managerOpts := &rules.ManagerOptions{
|
||||
@@ -351,7 +353,7 @@ func makeRulesManager(
|
||||
Logger: zap.L(),
|
||||
Reader: ch,
|
||||
Querier: querier,
|
||||
SLogger: logger,
|
||||
SLogger: providerSettings.Logger,
|
||||
Cache: cache,
|
||||
EvalDelay: constants.GetEvalDelay(),
|
||||
OrgGetter: orgGetter,
|
||||
|
||||
@@ -5,6 +5,8 @@ import (
|
||||
"regexp"
|
||||
|
||||
"github.com/DATA-DOG/go-sqlmock"
|
||||
"github.com/SigNoz/signoz/pkg/factory/factorytest"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstoretest"
|
||||
@@ -21,7 +23,10 @@ type MockSQLRuleStore struct {
|
||||
// NewMockSQLRuleStore creates a new MockSQLRuleStore with sqlmock
|
||||
func NewMockSQLRuleStore() *MockSQLRuleStore {
|
||||
sqlStore := sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherRegexp)
|
||||
ruleStore := sqlrulestore.NewRuleStore(sqlStore)
|
||||
// For tests, we can pass nil for queryParser and use test provider settings
|
||||
providerSettings := factorytest.NewSettings()
|
||||
|
||||
ruleStore := sqlrulestore.NewRuleStore(sqlStore, queryparser.New(providerSettings), providerSettings)
|
||||
|
||||
return &MockSQLRuleStore{
|
||||
ruleStore: ruleStore,
|
||||
@@ -59,6 +64,11 @@ func (m *MockSQLRuleStore) GetStoredRules(ctx context.Context, orgID string) ([]
|
||||
return m.ruleStore.GetStoredRules(ctx, orgID)
|
||||
}
|
||||
|
||||
// GetStoredRulesByMetricName implements ruletypes.RuleStore - delegates to underlying ruleStore
|
||||
func (m *MockSQLRuleStore) GetStoredRulesByMetricName(ctx context.Context, orgID string, metricName string) ([]ruletypes.RuleAlert, error) {
|
||||
return m.ruleStore.GetStoredRulesByMetricName(ctx, orgID, metricName)
|
||||
}
|
||||
|
||||
// ExpectCreateRule sets up SQL expectations for CreateRule operation
|
||||
func (m *MockSQLRuleStore) ExpectCreateRule(rule *ruletypes.Rule) {
|
||||
rows := sqlmock.NewRows([]string{"id", "created_at", "updated_at", "created_by", "updated_by", "deleted", "data", "org_id"}).
|
||||
@@ -104,6 +114,17 @@ func (m *MockSQLRuleStore) ExpectGetStoredRules(orgID string, rules []*ruletypes
|
||||
WillReturnRows(rows)
|
||||
}
|
||||
|
||||
// ExpectGetStoredRulesByMetricName sets up SQL expectations for GetStoredRulesByMetricName operation
|
||||
func (m *MockSQLRuleStore) ExpectGetStoredRulesByMetricName(orgID string, metricName string, rules []*ruletypes.Rule) {
|
||||
rows := sqlmock.NewRows([]string{"id", "created_at", "updated_at", "created_by", "updated_by", "deleted", "data", "org_id"})
|
||||
for _, rule := range rules {
|
||||
rows.AddRow(rule.ID, rule.CreatedAt, rule.UpdatedAt, rule.CreatedBy, rule.UpdatedBy, rule.Deleted, rule.Data, rule.OrgID)
|
||||
}
|
||||
expectedPattern := `SELECT (.+) FROM "rule".+WHERE \(.+org_id.+'` + orgID + `'\)`
|
||||
m.mock.ExpectQuery(expectedPattern).
|
||||
WillReturnRows(rows)
|
||||
}
|
||||
|
||||
// AssertExpectations asserts that all SQL expectations were met
|
||||
func (m *MockSQLRuleStore) AssertExpectations() error {
|
||||
return m.mock.ExpectationsWereMet()
|
||||
|
||||
@@ -2,18 +2,31 @@ package sqlrulestore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"log/slog"
|
||||
"slices"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type rule struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
sqlstore sqlstore.SQLStore
|
||||
queryParser queryparser.QueryParser
|
||||
logger *slog.Logger
|
||||
}
|
||||
|
||||
func NewRuleStore(store sqlstore.SQLStore) ruletypes.RuleStore {
|
||||
return &rule{sqlstore: store}
|
||||
func NewRuleStore(store sqlstore.SQLStore, queryParser queryparser.QueryParser, providerSettings factory.ProviderSettings) ruletypes.RuleStore {
|
||||
return &rule{
|
||||
sqlstore: store,
|
||||
queryParser: queryParser,
|
||||
logger: providerSettings.Logger,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *rule) CreateRule(ctx context.Context, storedRule *ruletypes.Rule, cb func(context.Context, valuer.UUID) error) (valuer.UUID, error) {
|
||||
@@ -101,3 +114,92 @@ func (r *rule) GetStoredRule(ctx context.Context, id valuer.UUID) (*ruletypes.Ru
|
||||
}
|
||||
return rule, nil
|
||||
}
|
||||
|
||||
func (r *rule) GetStoredRulesByMetricName(ctx context.Context, orgID string, metricName string) ([]ruletypes.RuleAlert, error) {
|
||||
if metricName == "" {
|
||||
return []ruletypes.RuleAlert{}, nil
|
||||
}
|
||||
|
||||
// Get all stored rules for the organization
|
||||
storedRules, err := r.GetStoredRules(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
alerts := make([]ruletypes.RuleAlert, 0)
|
||||
seen := make(map[string]bool)
|
||||
|
||||
for _, storedRule := range storedRules {
|
||||
var ruleData ruletypes.PostableRule
|
||||
if err := json.Unmarshal([]byte(storedRule.Data), &ruleData); err != nil {
|
||||
r.logger.WarnContext(ctx, "failed to unmarshal rule data", "rule_id", storedRule.ID.StringValue(), "error", err)
|
||||
continue
|
||||
}
|
||||
|
||||
// Check conditions: must be metric-based alert with valid composite query
|
||||
if ruleData.AlertType != ruletypes.AlertTypeMetric ||
|
||||
ruleData.RuleCondition == nil ||
|
||||
ruleData.RuleCondition.CompositeQuery == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// Search for metricName in the Queries array (v5 format only)
|
||||
// TODO check if we need to support v3 query format structs
|
||||
found := false
|
||||
for _, queryEnvelope := range ruleData.RuleCondition.CompositeQuery.Queries {
|
||||
// Check based on query type
|
||||
switch queryEnvelope.Type {
|
||||
case qbtypes.QueryTypeBuilder:
|
||||
// Cast to QueryBuilderQuery[MetricAggregation] for metrics
|
||||
if spec, ok := queryEnvelope.Spec.(qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]); ok {
|
||||
// Check if signal is metrics
|
||||
if spec.Signal == telemetrytypes.SignalMetrics {
|
||||
for _, agg := range spec.Aggregations {
|
||||
if agg.MetricName == metricName {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
case qbtypes.QueryTypePromQL:
|
||||
if spec, ok := queryEnvelope.Spec.(qbtypes.PromQuery); ok {
|
||||
result, err := r.queryParser.AnalyzeQueryFilter(ctx, qbtypes.QueryTypePromQL, spec.Query)
|
||||
if err != nil {
|
||||
r.logger.WarnContext(ctx, "failed to parse PromQL query", "query", spec.Query, "error", err)
|
||||
continue
|
||||
}
|
||||
if slices.Contains(result.MetricNames, metricName) {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
case qbtypes.QueryTypeClickHouseSQL:
|
||||
if spec, ok := queryEnvelope.Spec.(qbtypes.ClickHouseQuery); ok {
|
||||
result, err := r.queryParser.AnalyzeQueryFilter(ctx, qbtypes.QueryTypeClickHouseSQL, spec.Query)
|
||||
if err != nil {
|
||||
r.logger.WarnContext(ctx, "failed to parse ClickHouse query", "query", spec.Query, "error", err)
|
||||
continue
|
||||
}
|
||||
if slices.Contains(result.MetricNames, metricName) {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if found {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if found && !seen[storedRule.ID.StringValue()] {
|
||||
seen[storedRule.ID.StringValue()] = true
|
||||
alerts = append(alerts, ruletypes.RuleAlert{
|
||||
AlertName: ruleData.AlertName,
|
||||
AlertID: storedRule.ID.StringValue(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return alerts, nil
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/ruler"
|
||||
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
@@ -22,7 +23,8 @@ func NewFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[ruler.Ruler,
|
||||
}
|
||||
|
||||
func New(ctx context.Context, settings factory.ProviderSettings, config ruler.Config, sqlstore sqlstore.SQLStore) (ruler.Ruler, error) {
|
||||
return &provider{ruleStore: sqlrulestore.NewRuleStore(sqlstore)}, nil
|
||||
queryParser := queryparser.New(settings)
|
||||
return &provider{ruleStore: sqlrulestore.NewRuleStore(sqlstore, queryParser, settings)}, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Collect(ctx context.Context, orgID valuer.UUID) (map[string]any, error) {
|
||||
|
||||
@@ -18,6 +18,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
@@ -39,6 +40,9 @@ import (
|
||||
|
||||
// Config defines the entire input configuration of signoz.
|
||||
type Config struct {
|
||||
// Global config
|
||||
Global global.Config `mapstructure:"global"`
|
||||
|
||||
// Version config
|
||||
Version version.Config `mapstructure:"version"`
|
||||
|
||||
@@ -141,6 +145,7 @@ func (df *DeprecatedFlags) RegisterFlags(cmd *cobra.Command) {
|
||||
|
||||
func NewConfig(ctx context.Context, logger *slog.Logger, resolverConfig config.ResolverConfig, deprecatedFlags DeprecatedFlags) (Config, error) {
|
||||
configFactories := []factory.ConfigFactory{
|
||||
global.NewConfigFactory(),
|
||||
version.NewConfigFactory(),
|
||||
instrumentation.NewConfigFactory(),
|
||||
analytics.NewConfigFactory(),
|
||||
|
||||
@@ -2,6 +2,8 @@ package signoz
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/global/signozglobal"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/apdex"
|
||||
"github.com/SigNoz/signoz/pkg/modules/apdex/implapdex"
|
||||
@@ -34,9 +36,10 @@ type Handlers struct {
|
||||
SpanPercentile spanpercentile.Handler
|
||||
Services services.Handler
|
||||
MetricsExplorer metricsexplorer.Handler
|
||||
Global global.Handler
|
||||
}
|
||||
|
||||
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing) Handlers {
|
||||
func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, querier querier.Querier, licensing licensing.Licensing, global global.Global) Handlers {
|
||||
return Handlers{
|
||||
SavedView: implsavedview.NewHandler(modules.SavedView),
|
||||
Apdex: implapdex.NewHandler(modules.Apdex),
|
||||
@@ -47,5 +50,6 @@ func NewHandlers(modules Modules, providerSettings factory.ProviderSettings, que
|
||||
Services: implservices.NewHandler(modules.Services),
|
||||
MetricsExplorer: implmetricsexplorer.NewHandler(modules.MetricsExplorer),
|
||||
SpanPercentile: implspanpercentile.NewHandler(modules.SpanPercentile),
|
||||
Global: signozglobal.NewHandler(global),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,7 +40,7 @@ func TestNewHandlers(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{})
|
||||
|
||||
handlers := NewHandlers(modules, providerSettings, nil, nil)
|
||||
handlers := NewHandlers(modules, providerSettings, nil, nil, nil)
|
||||
|
||||
reflectVal := reflect.ValueOf(handlers)
|
||||
for i := 0; i < reflectVal.NumField(); i++ {
|
||||
|
||||
@@ -20,6 +20,8 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference/implpreference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote/implpromote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
|
||||
"github.com/SigNoz/signoz/pkg/modules/quickfilter/implquickfilter"
|
||||
"github.com/SigNoz/signoz/pkg/modules/rawdataexport"
|
||||
@@ -39,6 +41,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/tokenizer"
|
||||
@@ -64,6 +67,7 @@ type Modules struct {
|
||||
Services services.Module
|
||||
SpanPercentile spanpercentile.Module
|
||||
MetricsExplorer metricsexplorer.Module
|
||||
Promote promote.Module
|
||||
}
|
||||
|
||||
func NewModules(
|
||||
@@ -87,6 +91,7 @@ func NewModules(
|
||||
orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter)
|
||||
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, analytics)
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
|
||||
ruleStore := sqlrulestore.NewRuleStore(sqlstore, queryParser, providerSettings)
|
||||
dashboard := impldashboard.NewModule(sqlstore, providerSettings, analytics, orgGetter, implrole.NewModule(implrole.NewStore(sqlstore), authz, nil), queryParser)
|
||||
|
||||
return Modules{
|
||||
@@ -105,6 +110,7 @@ func NewModules(
|
||||
Session: implsession.NewModule(providerSettings, authNs, user, userGetter, implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs), tokenizer, orgGetter),
|
||||
SpanPercentile: implspanpercentile.NewModule(querier, providerSettings),
|
||||
Services: implservices.NewModule(querier, telemetryStore),
|
||||
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, dashboard, providerSettings, config.MetricsExplorer),
|
||||
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, ruleStore, dashboard, providerSettings, config.MetricsExplorer),
|
||||
Promote: implpromote.NewModule(telemetryMetadataStore, telemetryStore),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,11 +8,13 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/apiserver"
|
||||
"github.com/SigNoz/signoz/pkg/apiserver/signozapiserver"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation"
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
@@ -36,6 +38,8 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
|
||||
struct{ session.Handler }{},
|
||||
struct{ authdomain.Handler }{},
|
||||
struct{ preference.Handler }{},
|
||||
struct{ global.Handler }{},
|
||||
struct{ promote.Handler }{},
|
||||
).New(ctx, instrumentation.ToProviderSettings(), apiserver.Config{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -18,10 +18,13 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/emailing/noopemailing"
|
||||
"github.com/SigNoz/signoz/pkg/emailing/smtpemailing"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/global/signozglobal"
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain/implauthdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference/implpreference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote/implpromote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session/implsession"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
@@ -221,7 +224,7 @@ func NewQuerierProviderFactories(telemetryStore telemetrystore.TelemetryStore, p
|
||||
)
|
||||
}
|
||||
|
||||
func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.AuthZ, modules Modules, handlers Handlers) factory.NamedMap[factory.ProviderFactory[apiserver.APIServer, apiserver.Config]] {
|
||||
func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.AuthZ, global global.Global, modules Modules, handlers Handlers) factory.NamedMap[factory.ProviderFactory[apiserver.APIServer, apiserver.Config]] {
|
||||
return factory.MustNewNamedMap(
|
||||
signozapiserver.NewFactory(
|
||||
orgGetter,
|
||||
@@ -231,6 +234,8 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
|
||||
implsession.NewHandler(modules.Session),
|
||||
implauthdomain.NewHandler(modules.AuthDomain),
|
||||
implpreference.NewHandler(modules.Preference),
|
||||
signozglobal.NewHandler(global),
|
||||
implpromote.NewHandler(modules.Promote),
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -242,3 +247,9 @@ func NewTokenizerProviderFactories(cache cache.Cache, sqlstore sqlstore.SQLStore
|
||||
jwttokenizer.NewFactory(cache, tokenStore),
|
||||
)
|
||||
}
|
||||
|
||||
func NewGlobalProviderFactories() factory.NamedMap[factory.ProviderFactory[global.Global, global.Config]] {
|
||||
return factory.MustNewNamedMap(
|
||||
signozglobal.NewFactory(),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -83,6 +83,7 @@ func TestNewProviderFactories(t *testing.T) {
|
||||
NewAPIServerProviderFactories(
|
||||
implorganization.NewGetter(implorganization.NewStore(sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual)), nil),
|
||||
nil,
|
||||
nil,
|
||||
Modules{},
|
||||
Handlers{},
|
||||
)
|
||||
|
||||
@@ -345,18 +345,29 @@ func New(
|
||||
telemetrymetadata.AttributesMetadataLocalTableName,
|
||||
)
|
||||
|
||||
global, err := factory.NewProviderFromNamedMap(
|
||||
ctx,
|
||||
providerSettings,
|
||||
config.Global,
|
||||
NewGlobalProviderFactories(),
|
||||
"signoz",
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Initialize all modules
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config)
|
||||
|
||||
// Initialize all handlers for the modules
|
||||
handlers := NewHandlers(modules, providerSettings, querier, licensing)
|
||||
handlers := NewHandlers(modules, providerSettings, querier, licensing, global)
|
||||
|
||||
// Initialize the API server
|
||||
apiserver, err := factory.NewProviderFromNamedMap(
|
||||
ctx,
|
||||
providerSettings,
|
||||
config.APIServer,
|
||||
NewAPIServerProviderFactories(orgGetter, authz, modules, handlers),
|
||||
NewAPIServerProviderFactories(orgGetter, authz, global, modules, handlers),
|
||||
"signoz",
|
||||
)
|
||||
if err != nil {
|
||||
|
||||
@@ -7,7 +7,6 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/ClickHouse/clickhouse-go/v2/lib/chcol"
|
||||
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz-otel-collector/constants"
|
||||
@@ -15,7 +14,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
)
|
||||
@@ -34,6 +32,10 @@ var (
|
||||
CodeFailScanVariant = errors.MustNewCode("fail_scan_variant")
|
||||
CodeFailBuildJSONPathsQuery = errors.MustNewCode("fail_build_json_paths_query")
|
||||
CodeNoPathsToQueryIndexes = errors.MustNewCode("no_paths_to_query_indexes_provided")
|
||||
|
||||
CodeFailedToPrepareBatch = errors.MustNewCode("failed_to_prepare_batch_promoted_paths")
|
||||
CodeFailedToSendBatch = errors.MustNewCode("failed_to_send_batch_promoted_paths")
|
||||
CodeFailedToAppendPath = errors.MustNewCode("failed_to_append_path_promoted_paths")
|
||||
)
|
||||
|
||||
// GetBodyJSONPaths extracts body JSON paths from the path_types table
|
||||
@@ -48,7 +50,7 @@ var (
|
||||
// TODO(Piyush): Remove this lint skip
|
||||
//
|
||||
// nolint:unused
|
||||
func getBodyJSONPaths(ctx context.Context, telemetryStore telemetrystore.TelemetryStore,
|
||||
func (t *telemetryMetaStore) getBodyJSONPaths(ctx context.Context,
|
||||
fieldKeySelectors []*telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, bool, error) {
|
||||
|
||||
query, args, limit, err := buildGetBodyJSONPathsQuery(fieldKeySelectors)
|
||||
@@ -56,7 +58,7 @@ func getBodyJSONPaths(ctx context.Context, telemetryStore telemetrystore.Telemet
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
rows, err := telemetryStore.ClickhouseDB().Query(ctx, query, args...)
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, false, errors.WrapInternalf(err, CodeFailExtractBodyJSONKeys, "failed to extract body JSON keys")
|
||||
}
|
||||
@@ -96,12 +98,12 @@ func getBodyJSONPaths(ctx context.Context, telemetryStore telemetrystore.Telemet
|
||||
return nil, false, errors.WrapInternalf(rows.Err(), CodeFailIterateBodyJSONKeys, "error iterating body JSON keys")
|
||||
}
|
||||
|
||||
promoted, err := GetPromotedPaths(ctx, telemetryStore.ClickhouseDB(), paths...)
|
||||
promoted, err := t.GetPromotedPaths(ctx, paths...)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
indexes, err := getJSONPathIndexes(ctx, telemetryStore, paths...)
|
||||
indexes, err := t.getJSONPathIndexes(ctx, paths...)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
@@ -163,7 +165,7 @@ func buildGetBodyJSONPathsQuery(fieldKeySelectors []*telemetrytypes.FieldKeySele
|
||||
// TODO(Piyush): Remove this lint skip
|
||||
//
|
||||
// nolint:unused
|
||||
func getJSONPathIndexes(ctx context.Context, telemetryStore telemetrystore.TelemetryStore, paths ...string) (map[string][]telemetrytypes.JSONDataTypeIndex, error) {
|
||||
func (t *telemetryMetaStore) getJSONPathIndexes(ctx context.Context, paths ...string) (map[string][]telemetrytypes.JSONDataTypeIndex, error) {
|
||||
filteredPaths := []string{}
|
||||
for _, path := range paths {
|
||||
if strings.Contains(path, telemetrylogs.ArraySep) || strings.Contains(path, telemetrylogs.ArrayAnyIndex) {
|
||||
@@ -176,7 +178,7 @@ func getJSONPathIndexes(ctx context.Context, telemetryStore telemetrystore.Telem
|
||||
}
|
||||
|
||||
// list indexes for the paths
|
||||
indexesMap, err := ListLogsJSONIndexes(ctx, telemetryStore, filteredPaths...)
|
||||
indexesMap, err := t.ListLogsJSONIndexes(ctx, filteredPaths...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailLoadLogsJSONIndexes, "failed to list JSON path indexes")
|
||||
}
|
||||
@@ -215,7 +217,6 @@ func getJSONPathIndexes(ctx context.Context, telemetryStore telemetrystore.Telem
|
||||
}
|
||||
|
||||
func buildListLogsJSONIndexesQuery(cluster string, filters ...string) (string, []any) {
|
||||
// This aggregates all types per path and gets the max last_seen, then applies LIMIT
|
||||
sb := sqlbuilder.Select(
|
||||
"name", "type_full", "expr", "granularity",
|
||||
).From(fmt.Sprintf("clusterAllReplicas('%s', %s)", cluster, SkipIndexTableName))
|
||||
@@ -236,9 +237,9 @@ func buildListLogsJSONIndexesQuery(cluster string, filters ...string) (string, [
|
||||
return sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
}
|
||||
|
||||
func ListLogsJSONIndexes(ctx context.Context, telemetryStore telemetrystore.TelemetryStore, filters ...string) (map[string][]schemamigrator.Index, error) {
|
||||
query, args := buildListLogsJSONIndexesQuery(telemetryStore.Cluster(), filters...)
|
||||
rows, err := telemetryStore.ClickhouseDB().Query(ctx, query, args...)
|
||||
func (t *telemetryMetaStore) ListLogsJSONIndexes(ctx context.Context, filters ...string) (map[string][]schemamigrator.Index, error) {
|
||||
query, args := buildListLogsJSONIndexesQuery(t.telemetrystore.Cluster(), filters...)
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailLoadLogsJSONIndexes, "failed to load string indexed columns")
|
||||
}
|
||||
@@ -264,9 +265,16 @@ func ListLogsJSONIndexes(ctx context.Context, telemetryStore telemetrystore.Tele
|
||||
return indexesMap, nil
|
||||
}
|
||||
|
||||
func ListPromotedPaths(ctx context.Context, conn clickhouse.Conn) (map[string]struct{}, error) {
|
||||
query := fmt.Sprintf("SELECT path FROM %s.%s", DBName, PromotedPathsTableName)
|
||||
rows, err := conn.Query(ctx, query)
|
||||
func (t *telemetryMetaStore) ListPromotedPaths(ctx context.Context, paths ...string) (map[string]struct{}, error) {
|
||||
sb := sqlbuilder.Select("path").From(fmt.Sprintf("%s.%s", DBName, PromotedPathsTableName))
|
||||
pathConditions := []string{}
|
||||
for _, path := range paths {
|
||||
pathConditions = append(pathConditions, sb.Equal("path", path))
|
||||
}
|
||||
sb.Where(sb.Or(pathConditions...))
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailLoadPromotedPaths, "failed to load promoted paths")
|
||||
}
|
||||
@@ -285,14 +293,14 @@ func ListPromotedPaths(ctx context.Context, conn clickhouse.Conn) (map[string]st
|
||||
}
|
||||
|
||||
// TODO(Piyush): Remove this if not used in future
|
||||
func ListJSONValues(ctx context.Context, conn clickhouse.Conn, path string, limit int) (*telemetrytypes.TelemetryFieldValues, bool, error) {
|
||||
func (t *telemetryMetaStore) ListJSONValues(ctx context.Context, path string, limit int) (*telemetrytypes.TelemetryFieldValues, bool, error) {
|
||||
path = CleanPathPrefixes(path)
|
||||
|
||||
if strings.Contains(path, telemetrylogs.ArraySep) || strings.Contains(path, telemetrylogs.ArrayAnyIndex) {
|
||||
return nil, false, errors.NewInvalidInputf(errors.CodeInvalidInput, "array paths are not supported")
|
||||
}
|
||||
|
||||
promoted, err := IsPathPromoted(ctx, conn, path)
|
||||
promoted, err := t.IsPathPromoted(ctx, path)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
@@ -325,7 +333,7 @@ func ListJSONValues(ctx context.Context, conn clickhouse.Conn, path string, limi
|
||||
contextWithTimeout, cancel := context.WithTimeout(ctx, 5*time.Second)
|
||||
defer cancel()
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
rows, err := conn.Query(contextWithTimeout, query, args...)
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(contextWithTimeout, query, args...)
|
||||
if err != nil {
|
||||
if errors.Is(err, context.DeadlineExceeded) {
|
||||
return nil, false, errors.WrapTimeoutf(err, errors.CodeTimeout, "query timed out").WithAdditional("failed to list JSON values")
|
||||
@@ -447,10 +455,10 @@ func derefValue(v any) any {
|
||||
}
|
||||
|
||||
// IsPathPromoted checks if a specific path is promoted
|
||||
func IsPathPromoted(ctx context.Context, conn clickhouse.Conn, path string) (bool, error) {
|
||||
func (t *telemetryMetaStore) IsPathPromoted(ctx context.Context, path string) (bool, error) {
|
||||
split := strings.Split(path, telemetrylogs.ArraySep)
|
||||
query := fmt.Sprintf("SELECT 1 FROM %s.%s WHERE path = ? LIMIT 1", DBName, PromotedPathsTableName)
|
||||
rows, err := conn.Query(ctx, query, split[0])
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, split[0])
|
||||
if err != nil {
|
||||
return false, errors.WrapInternalf(err, CodeFailCheckPathPromoted, "failed to check if path %s is promoted", path)
|
||||
}
|
||||
@@ -460,7 +468,7 @@ func IsPathPromoted(ctx context.Context, conn clickhouse.Conn, path string) (boo
|
||||
}
|
||||
|
||||
// GetPromotedPaths checks if a specific path is promoted
|
||||
func GetPromotedPaths(ctx context.Context, conn clickhouse.Conn, paths ...string) (*utils.ConcurrentSet[string], error) {
|
||||
func (t *telemetryMetaStore) GetPromotedPaths(ctx context.Context, paths ...string) (*utils.ConcurrentSet[string], error) {
|
||||
sb := sqlbuilder.Select("path").From(fmt.Sprintf("%s.%s", DBName, PromotedPathsTableName))
|
||||
pathConditions := []string{}
|
||||
for _, path := range paths {
|
||||
@@ -469,7 +477,7 @@ func GetPromotedPaths(ctx context.Context, conn clickhouse.Conn, paths ...string
|
||||
sb.Where(sb.Or(pathConditions...))
|
||||
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
rows, err := conn.Query(ctx, query, args...)
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailCheckPathPromoted, "failed to get promoted paths")
|
||||
}
|
||||
@@ -494,3 +502,29 @@ func CleanPathPrefixes(path string) string {
|
||||
path = strings.TrimPrefix(path, telemetrylogs.BodyPromotedColumnPrefix)
|
||||
return path
|
||||
}
|
||||
|
||||
func (t *telemetryMetaStore) PromotePaths(ctx context.Context, paths ...string) error {
|
||||
batch, err := t.telemetrystore.ClickhouseDB().PrepareBatch(ctx,
|
||||
fmt.Sprintf("INSERT INTO %s.%s (path, created_at) VALUES", DBName,
|
||||
PromotedPathsTableName))
|
||||
if err != nil {
|
||||
return errors.WrapInternalf(err, CodeFailedToPrepareBatch, "failed to prepare batch")
|
||||
}
|
||||
|
||||
nowMs := uint64(time.Now().UnixMilli())
|
||||
for _, p := range paths {
|
||||
trimmed := strings.TrimSpace(p)
|
||||
if trimmed == "" {
|
||||
continue
|
||||
}
|
||||
if err := batch.Append(trimmed, nowMs); err != nil {
|
||||
_ = batch.Abort()
|
||||
return errors.WrapInternalf(err, CodeFailedToAppendPath, "failed to append path")
|
||||
}
|
||||
}
|
||||
|
||||
if err := batch.Send(); err != nil {
|
||||
return errors.WrapInternalf(err, CodeFailedToSendBatch, "failed to send batch")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -349,7 +349,7 @@ func (dashboard *Dashboard) GetWidgetQuery(startTime, endTime uint64, widgetInde
|
||||
return nil, errors.Wrapf(err, errors.TypeInvalidInput, ErrCodeDashboardInvalidData, "invalid dashboard data")
|
||||
}
|
||||
|
||||
if len(data.Widgets) < int(widgetIndex)+1 {
|
||||
if widgetIndex < 0 || int(widgetIndex) >= len(data.Widgets) {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, ErrCodeDashboardInvalidInput, "widget with index %v doesn't exist", widgetIndex)
|
||||
}
|
||||
|
||||
|
||||
15
pkg/types/global.go
Normal file
15
pkg/types/global.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package types
|
||||
|
||||
import "net/url"
|
||||
|
||||
type GettableGlobalConfig struct {
|
||||
ExternalURL string `json:"external_url"`
|
||||
IngestionURL string `json:"ingestion_url"`
|
||||
}
|
||||
|
||||
func NewGettableGlobalConfig(externalURL, ingestionURL *url.URL) *GettableGlobalConfig {
|
||||
return &GettableGlobalConfig{
|
||||
ExternalURL: externalURL.String(),
|
||||
IngestionURL: ingestionURL.String(),
|
||||
}
|
||||
}
|
||||
@@ -221,6 +221,17 @@ type TreemapResponse struct {
|
||||
Samples []TreemapEntry `json:"samples"`
|
||||
}
|
||||
|
||||
// MetricAlert represents an alert associated with a metric.
|
||||
type MetricAlert struct {
|
||||
AlertName string `json:"alertName"`
|
||||
AlertID string `json:"alertId"`
|
||||
}
|
||||
|
||||
// MetricAlertsResponse represents the response for metric alerts endpoint.
|
||||
type MetricAlertsResponse struct {
|
||||
Alerts []MetricAlert `json:"alerts"`
|
||||
}
|
||||
|
||||
// MetricDashboard represents a dashboard/widget referencing a metric.
|
||||
type MetricDashboard struct {
|
||||
DashboardName string `json:"dashboardName"`
|
||||
|
||||
76
pkg/types/promotetypes/types.go
Normal file
76
pkg/types/promotetypes/types.go
Normal file
@@ -0,0 +1,76 @@
|
||||
package promotetypes
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz-otel-collector/constants"
|
||||
"github.com/SigNoz/signoz-otel-collector/pkg/keycheck"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
type WrappedIndex struct {
|
||||
JSONDataType telemetrytypes.JSONDataType `json:"-"`
|
||||
ColumnType string `json:"column_type"`
|
||||
Type string `json:"type"`
|
||||
Granularity int `json:"granularity"`
|
||||
}
|
||||
|
||||
type PromotePath struct {
|
||||
Path string `json:"path"`
|
||||
Promote bool `json:"promote,omitempty"`
|
||||
|
||||
Indexes []WrappedIndex `json:"indexes,omitempty"`
|
||||
}
|
||||
|
||||
func (i *PromotePath) ValidateAndSetDefaults() error {
|
||||
if i.Path == "" {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "path is required")
|
||||
}
|
||||
|
||||
if strings.Contains(i.Path, " ") {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "path cannot contain spaces")
|
||||
}
|
||||
|
||||
if strings.Contains(i.Path, telemetrylogs.ArraySep) || strings.Contains(i.Path, telemetrylogs.ArrayAnyIndex) {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "array paths can not be promoted or indexed")
|
||||
}
|
||||
|
||||
if strings.HasPrefix(i.Path, constants.BodyJSONColumnPrefix) || strings.HasPrefix(i.Path, constants.BodyPromotedColumnPrefix) {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "`%s`, `%s` don't add these prefixes to the path", constants.BodyJSONColumnPrefix, constants.BodyPromotedColumnPrefix)
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(i.Path, telemetrylogs.BodyJSONStringSearchPrefix) {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "path must start with `body.`")
|
||||
}
|
||||
|
||||
// remove the "body." prefix from the path
|
||||
i.Path = strings.TrimPrefix(i.Path, telemetrylogs.BodyJSONStringSearchPrefix)
|
||||
|
||||
isCardinal := keycheck.IsCardinal(i.Path)
|
||||
if isCardinal {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cardinal paths can not be promoted or indexed")
|
||||
}
|
||||
|
||||
for idx, index := range i.Indexes {
|
||||
if index.Type == "" {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "index type is required")
|
||||
}
|
||||
if index.Granularity <= 0 {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "index granularity must be greater than 0")
|
||||
}
|
||||
|
||||
jsonDataType, ok := telemetrytypes.MappingStringToJSONDataType[index.ColumnType]
|
||||
if !ok {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid column type: %s", index.ColumnType)
|
||||
}
|
||||
if !jsonDataType.IndexSupported {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "index is not supported for column type: %s", index.ColumnType)
|
||||
}
|
||||
|
||||
i.Indexes[idx].JSONDataType = jsonDataType
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -47,10 +47,17 @@ func NewStatsFromRules(rules []*Rule) map[string]any {
|
||||
return stats
|
||||
}
|
||||
|
||||
// RuleAlert represents an alert associated with a rule, used when filtering by metric name
|
||||
type RuleAlert struct {
|
||||
AlertName string
|
||||
AlertID string
|
||||
}
|
||||
|
||||
type RuleStore interface {
|
||||
CreateRule(context.Context, *Rule, func(context.Context, valuer.UUID) error) (valuer.UUID, error)
|
||||
EditRule(context.Context, *Rule, func(context.Context) error) error
|
||||
DeleteRule(context.Context, valuer.UUID, func(context.Context) error) error
|
||||
GetStoredRules(context.Context, string) ([]*Rule, error)
|
||||
GetStoredRule(context.Context, valuer.UUID) (*Rule, error)
|
||||
GetStoredRulesByMetricName(context.Context, string, string) ([]RuleAlert, error)
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user