Compare commits
75 Commits
enh/conver
...
json-plan
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dfde78ec47 | ||
|
|
3580832117 | ||
|
|
db100b13ea | ||
|
|
a9e3bc3e0c | ||
|
|
739bb2b3fe | ||
|
|
68ad5a8344 | ||
|
|
a3a679a17d | ||
|
|
c108d21fa2 | ||
|
|
922f8cb722 | ||
|
|
0c5c2a00d9 | ||
|
|
5667793d7f | ||
|
|
92a79bbdce | ||
|
|
1887ddd49c | ||
|
|
57aac8b800 | ||
|
|
e4c1b2ce50 | ||
|
|
bc4b65dbb9 | ||
|
|
e716a2a7b1 | ||
|
|
3c564b6809 | ||
|
|
d149e53f70 | ||
|
|
220c78e72b | ||
|
|
1ff971dac4 | ||
|
|
1dc03eebd4 | ||
|
|
9f71a6423f | ||
|
|
891c56b059 | ||
|
|
0a3e2e6215 | ||
|
|
12476b719f | ||
|
|
193f35ba17 | ||
|
|
de28d6ba15 | ||
|
|
7a3f9b963d | ||
|
|
aedf61c8e0 | ||
|
|
f12f16f996 | ||
|
|
ad61e8f700 | ||
|
|
d01e6fc891 | ||
|
|
17f8c1040f | ||
|
|
286129c7a0 | ||
|
|
78a3cc69ee | ||
|
|
c2790258a3 | ||
|
|
4c70b44230 | ||
|
|
ffa5a9725e | ||
|
|
92cab8e049 | ||
|
|
7b9e6e3cbb | ||
|
|
4837ddb601 | ||
|
|
9c818955af | ||
|
|
134a051196 | ||
|
|
c904ab5d99 | ||
|
|
d53f9a7e16 | ||
|
|
1b01b61026 | ||
|
|
95a26cecba | ||
|
|
15af828005 | ||
|
|
e5b99703ac | ||
|
|
f0941c7b2e | ||
|
|
12c9b921a7 | ||
|
|
52228bc6c4 | ||
|
|
79988b448f | ||
|
|
4bfd7ba3d7 | ||
|
|
6ea517f530 | ||
|
|
c1789e7921 | ||
|
|
4b67a1c52f | ||
|
|
beb4dc060d | ||
|
|
92e5abed6e | ||
|
|
8ab44fd846 | ||
|
|
f0c405f068 | ||
|
|
1bb9386ea1 | ||
|
|
38146ae364 | ||
|
|
28b1656d4c | ||
|
|
fe28290c76 | ||
|
|
1b5738cdae | ||
|
|
0c61174506 | ||
|
|
b0d52ee87a | ||
|
|
97ead5c5b7 | ||
|
|
255b39f43c | ||
|
|
441d328976 | ||
|
|
93ea44ff62 | ||
|
|
d31cce3a1f | ||
|
|
917345ddf6 |
@@ -42,7 +42,7 @@ services:
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
schema-migrator-sync:
|
||||
image: signoz/signoz-schema-migrator:v0.129.11
|
||||
image: signoz/signoz-schema-migrator:v0.129.12
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -55,7 +55,7 @@ services:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
image: signoz/signoz-schema-migrator:v0.129.11
|
||||
image: signoz/signoz-schema-migrator:v0.129.12
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
4
.github/CODEOWNERS
vendored
4
.github/CODEOWNERS
vendored
@@ -6,6 +6,10 @@
|
||||
/frontend/src/container/MetricsApplication @srikanthccv
|
||||
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
|
||||
|
||||
# Onboarding
|
||||
/frontend/src/container/OnboardingV2Container/onboarding-configs/onboarding-config-with-links.json @makeavish
|
||||
/frontend/src/container/OnboardingV2Container/AddDataSource/AddDataSource.tsx @makeavish
|
||||
|
||||
# Dashboard, Alert, Metrics, Service Map, Services
|
||||
/frontend/src/container/ListOfDashboard/ @srikanthccv
|
||||
/frontend/src/container/NewDashboard/ @srikanthccv
|
||||
|
||||
1
.github/workflows/build-enterprise.yaml
vendored
1
.github/workflows/build-enterprise.yaml
vendored
@@ -69,6 +69,7 @@ jobs:
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> frontend/.env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> frontend/.env
|
||||
echo 'PYLON_IDENTITY_SECRET="${{ secrets.PYLON_IDENTITY_SECRET }}"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
|
||||
1
.github/workflows/build-staging.yaml
vendored
1
.github/workflows/build-staging.yaml
vendored
@@ -68,6 +68,7 @@ jobs:
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.NP_TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'PYLON_APP_ID="${{ secrets.NP_PYLON_APP_ID }}"' >> frontend/.env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.NP_APPCUES_APP_ID }}"' >> frontend/.env
|
||||
echo 'PYLON_IDENTITY_SECRET="${{ secrets.NP_PYLON_IDENTITY_SECRET }}"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
|
||||
1
.github/workflows/gor-signoz.yaml
vendored
1
.github/workflows/gor-signoz.yaml
vendored
@@ -35,6 +35,7 @@ jobs:
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> .env
|
||||
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> .env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> .env
|
||||
echo 'PYLON_IDENTITY_SECRET="${{ secrets.PYLON_IDENTITY_SECRET }}"' >> .env
|
||||
- name: build-frontend
|
||||
run: make js-build
|
||||
- name: upload-frontend-artifact
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -49,6 +49,7 @@ ee/query-service/tests/test-deploy/data/
|
||||
# local data
|
||||
*.backup
|
||||
*.db
|
||||
**/db
|
||||
/deploy/docker/clickhouse-setup/data/
|
||||
/deploy/docker-swarm/clickhouse-setup/data/
|
||||
bin/
|
||||
|
||||
8
Makefile
8
Makefile
@@ -72,6 +72,12 @@ devenv-up: devenv-clickhouse devenv-signoz-otel-collector ## Start both clickhou
|
||||
@echo " - ClickHouse: http://localhost:8123"
|
||||
@echo " - Signoz OTel Collector: grpc://localhost:4317, http://localhost:4318"
|
||||
|
||||
.PHONY: devenv-clickhouse-clean
|
||||
devenv-clickhouse-clean: ## Clean all ClickHouse data from filesystem
|
||||
@echo "Removing ClickHouse data..."
|
||||
@rm -rf .devenv/docker/clickhouse/fs/tmp/*
|
||||
@echo "ClickHouse data cleaned!"
|
||||
|
||||
##############################################################
|
||||
# go commands
|
||||
##############################################################
|
||||
@@ -86,7 +92,7 @@ go-run-enterprise: ## Runs the enterprise go backend server
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER=cluster \
|
||||
go run -race \
|
||||
$(GO_BUILD_CONTEXT_ENTERPRISE)/*.go
|
||||
$(GO_BUILD_CONTEXT_ENTERPRISE)/*.go server
|
||||
|
||||
.PHONY: go-test
|
||||
go-test: ## Runs go unit tests
|
||||
|
||||
@@ -47,10 +47,10 @@ cache:
|
||||
provider: memory
|
||||
# memory: Uses in-memory caching.
|
||||
memory:
|
||||
# Time-to-live for cache entries in memory. Specify the duration in ns
|
||||
ttl: 60000000000
|
||||
# The interval at which the cache will be cleaned up
|
||||
cleanup_interval: 1m
|
||||
# Max items for the in-memory cache (10x the entries)
|
||||
num_counters: 100000
|
||||
# Total cost in bytes allocated bounded cache
|
||||
max_cost: 67108864
|
||||
# redis: Uses Redis as the caching backend.
|
||||
redis:
|
||||
# The hostname or IP address of the Redis server.
|
||||
|
||||
@@ -176,7 +176,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.102.1
|
||||
image: signoz/signoz:v0.103.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -209,7 +209,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.129.11
|
||||
image: signoz/signoz-otel-collector:v0.129.12
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -233,7 +233,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.129.11
|
||||
image: signoz/signoz-schema-migrator:v0.129.12
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -117,7 +117,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.102.1
|
||||
image: signoz/signoz:v0.103.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -150,7 +150,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.129.11
|
||||
image: signoz/signoz-otel-collector:v0.129.12
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -176,7 +176,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.129.11
|
||||
image: signoz/signoz-schema-migrator:v0.129.12
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -179,7 +179,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.102.1}
|
||||
image: signoz/signoz:${VERSION:-v0.103.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -213,7 +213,7 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.11}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.12}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -239,7 +239,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.11}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -250,7 +250,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.11}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -111,7 +111,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.102.1}
|
||||
image: signoz/signoz:${VERSION:-v0.103.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -144,7 +144,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.11}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.12}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -166,7 +166,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.11}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -178,7 +178,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.11}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
_ "net/http/pprof" // http profiler
|
||||
"slices"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache/memorycache"
|
||||
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
|
||||
"go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux"
|
||||
"go.opentelemetry.io/otel/propagation"
|
||||
@@ -74,13 +75,26 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cacheForTraceDetail, err := memorycache.New(context.TODO(), signoz.Instrumentation.ToProviderSettings(), cache.Config{
|
||||
Provider: "memory",
|
||||
Memory: cache.Memory{
|
||||
NumCounters: 10 * 10000,
|
||||
MaxCost: 1 << 27, // 128 MB
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
reader := clickhouseReader.NewReader(
|
||||
signoz.SQLStore,
|
||||
signoz.TelemetryStore,
|
||||
signoz.Prometheus,
|
||||
signoz.TelemetryStore.Cluster(),
|
||||
config.Querier.FluxInterval,
|
||||
cacheForTraceDetail,
|
||||
signoz.Cache,
|
||||
nil,
|
||||
)
|
||||
|
||||
rm, err := makeRulesManager(
|
||||
|
||||
@@ -9,6 +9,7 @@ var LicenseAPIKey = GetOrDefaultEnv("SIGNOZ_LICENSE_API_KEY", "")
|
||||
var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "")
|
||||
var FetchFeatures = GetOrDefaultEnv("FETCH_FEATURES", "false")
|
||||
var ZeusFeaturesURL = GetOrDefaultEnv("ZEUS_FEATURES_URL", "ZeusFeaturesURL")
|
||||
var BodyJSONQueryEnabled = GetOrDefaultEnv("BODY_JSON_QUERY_ENABLED", "false") == "true"
|
||||
|
||||
func GetOrDefaultEnv(key string, fallback string) string {
|
||||
v := os.Getenv(key)
|
||||
|
||||
@@ -246,7 +246,9 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
|
||||
continue
|
||||
}
|
||||
}
|
||||
results, err := r.Threshold.ShouldAlert(*series, r.Unit())
|
||||
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -296,7 +298,9 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
|
||||
continue
|
||||
}
|
||||
}
|
||||
results, err := r.Threshold.ShouldAlert(*series, r.Unit())
|
||||
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -410,6 +414,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
GeneratorURL: r.GeneratorURL(),
|
||||
Receivers: ruleReceiverMap[lbs.Map()[ruletypes.LabelThresholdName]],
|
||||
Missing: smpl.IsMissing,
|
||||
IsRecovering: smpl.IsRecovering,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -422,6 +427,9 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
|
||||
alert.Value = a.Value
|
||||
alert.Annotations = a.Annotations
|
||||
// Update the recovering and missing state of existing alert
|
||||
alert.IsRecovering = a.IsRecovering
|
||||
alert.Missing = a.Missing
|
||||
if v, ok := alert.Labels.Map()[ruletypes.LabelThresholdName]; ok {
|
||||
alert.Receivers = ruleReceiverMap[v]
|
||||
}
|
||||
@@ -480,6 +488,30 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
Value: a.Value,
|
||||
})
|
||||
}
|
||||
|
||||
// We need to change firing alert to recovering if the returned sample meets recovery threshold
|
||||
changeFiringToRecovering := a.State == model.StateFiring && a.IsRecovering
|
||||
// We need to change recovering alerts to firing if the returned sample meets target threshold
|
||||
changeRecoveringToFiring := a.State == model.StateRecovering && !a.IsRecovering && !a.Missing
|
||||
// in any of the above case we need to update the status of alert
|
||||
if changeFiringToRecovering || changeRecoveringToFiring {
|
||||
state := model.StateRecovering
|
||||
if changeRecoveringToFiring {
|
||||
state = model.StateFiring
|
||||
}
|
||||
a.State = state
|
||||
r.logger.DebugContext(ctx, "converting alert state", "name", r.Name(), "state", state)
|
||||
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
|
||||
RuleID: r.ID(),
|
||||
RuleName: r.Name(),
|
||||
State: state,
|
||||
StateChanged: true,
|
||||
UnixMilli: ts.UnixMilli(),
|
||||
Labels: model.LabelsString(labelsJSON),
|
||||
Fingerprint: a.QueryResultLables.Hash(),
|
||||
Value: a.Value,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
currentState := r.State()
|
||||
|
||||
@@ -3,5 +3,6 @@ BUNDLE_ANALYSER="true"
|
||||
FRONTEND_API_ENDPOINT="http://localhost:8080/"
|
||||
PYLON_APP_ID="pylon-app-id"
|
||||
APPCUES_APP_ID="appcess-app-id"
|
||||
PYLON_IDENTITY_SECRET="pylon-identity-secret"
|
||||
|
||||
CI="1"
|
||||
@@ -38,7 +38,7 @@
|
||||
"@mdx-js/loader": "2.3.0",
|
||||
"@mdx-js/react": "2.3.0",
|
||||
"@monaco-editor/react": "^4.3.1",
|
||||
"@playwright/test": "1.54.1",
|
||||
"@playwright/test": "1.55.1",
|
||||
"@radix-ui/react-tabs": "1.0.4",
|
||||
"@radix-ui/react-tooltip": "1.0.7",
|
||||
"@sentry/react": "8.41.0",
|
||||
@@ -83,6 +83,7 @@
|
||||
"color": "^4.2.1",
|
||||
"color-alpha": "1.1.3",
|
||||
"cross-env": "^7.0.3",
|
||||
"crypto-js": "4.2.0",
|
||||
"css-loader": "5.0.0",
|
||||
"css-minimizer-webpack-plugin": "5.0.1",
|
||||
"d3-hierarchy": "3.1.2",
|
||||
@@ -112,7 +113,7 @@
|
||||
"overlayscrollbars": "^2.8.1",
|
||||
"overlayscrollbars-react": "^0.5.6",
|
||||
"papaparse": "5.4.1",
|
||||
"posthog-js": "1.215.5",
|
||||
"posthog-js": "1.298.0",
|
||||
"rc-tween-one": "3.0.6",
|
||||
"react": "18.2.0",
|
||||
"react-addons-update": "15.6.3",
|
||||
@@ -149,7 +150,6 @@
|
||||
"tsconfig-paths-webpack-plugin": "^3.5.1",
|
||||
"typescript": "^4.0.5",
|
||||
"uplot": "1.6.31",
|
||||
"userpilot": "1.3.9",
|
||||
"uuid": "^8.3.2",
|
||||
"web-vitals": "^0.2.4",
|
||||
"webpack": "5.94.0",
|
||||
@@ -186,6 +186,7 @@
|
||||
"@types/color": "^3.0.3",
|
||||
"@types/compression-webpack-plugin": "^9.0.0",
|
||||
"@types/copy-webpack-plugin": "^8.0.1",
|
||||
"@types/crypto-js": "4.2.2",
|
||||
"@types/dompurify": "^2.4.0",
|
||||
"@types/event-source-polyfill": "^1.0.0",
|
||||
"@types/fontfaceobserver": "2.1.0",
|
||||
|
||||
@@ -7,11 +7,12 @@ import AppLoading from 'components/AppLoading/AppLoading';
|
||||
import KBarCommandPalette from 'components/KBarCommandPalette/KBarCommandPalette';
|
||||
import NotFound from 'components/NotFound';
|
||||
import Spinner from 'components/Spinner';
|
||||
import UserpilotRouteTracker from 'components/UserpilotRouteTracker/UserpilotRouteTracker';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import ROUTES from 'constants/routes';
|
||||
import AppLayout from 'container/AppLayout';
|
||||
import Hex from 'crypto-js/enc-hex';
|
||||
import HmacSHA256 from 'crypto-js/hmac-sha256';
|
||||
import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys';
|
||||
import { useThemeConfig } from 'hooks/useDarkMode';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
@@ -33,7 +34,6 @@ import { Suspense, useCallback, useEffect, useState } from 'react';
|
||||
import { Route, Router, Switch } from 'react-router-dom';
|
||||
import { CompatRouter } from 'react-router-dom-v5-compat';
|
||||
import { LicenseStatus } from 'types/api/licensesV3/getActive';
|
||||
import { Userpilot } from 'userpilot';
|
||||
import { extractDomain } from 'utils/app';
|
||||
|
||||
import { Home } from './pageComponents';
|
||||
@@ -84,9 +84,9 @@ function App(): JSX.Element {
|
||||
email,
|
||||
name: displayName,
|
||||
company_name: orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
deployment_name: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
deployment_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
role,
|
||||
@@ -94,9 +94,9 @@ function App(): JSX.Element {
|
||||
|
||||
const groupTraits = {
|
||||
name: orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
deployment_name: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
deployment_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
};
|
||||
@@ -111,37 +111,23 @@ function App(): JSX.Element {
|
||||
if (window && window.Appcues) {
|
||||
window.Appcues.identify(id, {
|
||||
name: displayName,
|
||||
|
||||
tenant_id: hostNameParts[0],
|
||||
deployment_name: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
deployment_url: hostname,
|
||||
company_domain: domain,
|
||||
|
||||
companyName: orgName,
|
||||
email,
|
||||
paidUser: !!trialInfo?.trialConvertedToSubscription,
|
||||
});
|
||||
}
|
||||
|
||||
Userpilot.identify(email, {
|
||||
email,
|
||||
name: displayName,
|
||||
orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
|
||||
});
|
||||
|
||||
posthog?.identify(id, {
|
||||
email,
|
||||
name: displayName,
|
||||
orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
deployment_name: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
deployment_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
|
||||
@@ -149,9 +135,9 @@ function App(): JSX.Element {
|
||||
|
||||
posthog?.group('company', orgId, {
|
||||
name: orgName,
|
||||
tenant_id: hostNameParts[0],
|
||||
deployment_name: hostNameParts[0],
|
||||
data_region: hostNameParts[1],
|
||||
tenant_url: hostname,
|
||||
deployment_url: hostname,
|
||||
company_domain: domain,
|
||||
source: 'signoz-ui',
|
||||
isPaidUser: !!trialInfo?.trialConvertedToSubscription,
|
||||
@@ -270,11 +256,20 @@ function App(): JSX.Element {
|
||||
!showAddCreditCardModal &&
|
||||
(isCloudUser || isEnterpriseSelfHostedUser)
|
||||
) {
|
||||
const email = user.email || '';
|
||||
const secret = process.env.PYLON_IDENTITY_SECRET || '';
|
||||
let emailHash = '';
|
||||
|
||||
if (email && secret) {
|
||||
emailHash = HmacSHA256(email, Hex.parse(secret)).toString(Hex);
|
||||
}
|
||||
|
||||
window.pylon = {
|
||||
chat_settings: {
|
||||
app_id: process.env.PYLON_APP_ID,
|
||||
email: user.email,
|
||||
name: user.displayName || user.email,
|
||||
email_hash: emailHash,
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -308,10 +303,6 @@ function App(): JSX.Element {
|
||||
});
|
||||
}
|
||||
|
||||
if (process.env.USERPILOT_KEY) {
|
||||
Userpilot.initialize(process.env.USERPILOT_KEY);
|
||||
}
|
||||
|
||||
if (!isSentryInitialized) {
|
||||
Sentry.init({
|
||||
dsn: process.env.SENTRY_DSN,
|
||||
@@ -372,7 +363,6 @@ function App(): JSX.Element {
|
||||
<Router history={history}>
|
||||
<CompatRouter>
|
||||
<KBarCommandPaletteProvider>
|
||||
<UserpilotRouteTracker />
|
||||
<KBarCommandPalette />
|
||||
<NotificationProvider>
|
||||
<ErrorModalProvider>
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import { LogEventAxiosInstance as axios } from 'api';
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { EventSuccessPayloadProps } from 'types/api/events/types';
|
||||
|
||||
@@ -11,9 +13,14 @@ const logEvent = async (
|
||||
rateLimited?: boolean,
|
||||
): Promise<SuccessResponse<EventSuccessPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
// add tenant_url to attributes
|
||||
// add deployment_url and user_email to attributes
|
||||
const { hostname } = window.location;
|
||||
const updatedAttributes = { ...attributes, tenant_url: hostname };
|
||||
const userEmail = getLocalStorageApi(LOCALSTORAGE.LOGGED_IN_USER_EMAIL);
|
||||
const updatedAttributes = {
|
||||
...attributes,
|
||||
deployment_url: hostname,
|
||||
user_email: userEmail,
|
||||
};
|
||||
const response = await axios.post('/event', {
|
||||
eventName,
|
||||
attributes: updatedAttributes,
|
||||
|
||||
@@ -232,7 +232,7 @@ describe('getYAxisFormattedValue - units (full precision legacy assertions)', ()
|
||||
).toBe('1%');
|
||||
expect(
|
||||
testFullPrecisionGetYAxisFormattedValue('1.00555555559595876', 'percent'),
|
||||
).toBe('1.005555555595958%');
|
||||
).toBe('1.005555555595959%');
|
||||
});
|
||||
|
||||
test('ratio', () => {
|
||||
@@ -359,7 +359,7 @@ describe('getYAxisFormattedValue - precision option tests', () => {
|
||||
's',
|
||||
PrecisionOptionsEnum.FULL,
|
||||
),
|
||||
).toBe('26254299141484417000000 µs');
|
||||
).toBe('26.254299141484417 µs');
|
||||
|
||||
expect(
|
||||
getYAxisFormattedValue('4353.81', 'ms', PrecisionOptionsEnum.FULL),
|
||||
|
||||
@@ -149,6 +149,7 @@ export const getGraphOptions = (
|
||||
scales: {
|
||||
x: {
|
||||
stacked: isStacked,
|
||||
offset: false,
|
||||
grid: {
|
||||
display: true,
|
||||
color: getGridColor(),
|
||||
|
||||
@@ -101,19 +101,10 @@ export const getYAxisFormattedValue = (
|
||||
if (numValue === Infinity) return '∞';
|
||||
if (numValue === -Infinity) return '-∞';
|
||||
|
||||
const decimalPlaces = value.split('.')[1]?.length || undefined;
|
||||
|
||||
// Use custom formatter for the 'none' format honoring precision
|
||||
if (format === 'none') {
|
||||
return formatDecimalWithLeadingZeros(numValue, precision);
|
||||
}
|
||||
|
||||
// For all other standard formats, delegate to grafana/data's built-in formatter.
|
||||
const computeDecimals = (): number | undefined => {
|
||||
if (precision === PrecisionOptionsEnum.FULL) {
|
||||
return decimalPlaces && decimalPlaces >= DEFAULT_SIGNIFICANT_DIGITS
|
||||
? decimalPlaces
|
||||
: DEFAULT_SIGNIFICANT_DIGITS;
|
||||
return DEFAULT_SIGNIFICANT_DIGITS;
|
||||
}
|
||||
return precision;
|
||||
};
|
||||
@@ -130,6 +121,11 @@ export const getYAxisFormattedValue = (
|
||||
};
|
||||
|
||||
try {
|
||||
// Use custom formatter for the 'none' format honoring precision
|
||||
if (format === 'none') {
|
||||
return formatDecimalWithLeadingZeros(numValue, precision);
|
||||
}
|
||||
|
||||
const formatter = getValueFormat(format);
|
||||
const formattedValue = formatter(numValue, computeDecimals(), undefined);
|
||||
if (formattedValue.text && formattedValue.text.includes('.')) {
|
||||
|
||||
@@ -471,11 +471,13 @@ function LogsFormatOptionsMenu({
|
||||
rootClassName="format-options-popover"
|
||||
destroyTooltipOnHide
|
||||
>
|
||||
<Button
|
||||
className="periscope-btn ghost"
|
||||
icon={<Sliders size={14} />}
|
||||
data-testid="periscope-btn-format-options"
|
||||
/>
|
||||
<Tooltip title="Options">
|
||||
<Button
|
||||
className="periscope-btn ghost"
|
||||
icon={<Sliders size={14} />}
|
||||
data-testid="periscope-btn-format-options"
|
||||
/>
|
||||
</Tooltip>
|
||||
</Popover>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -300,6 +300,10 @@
|
||||
}
|
||||
}
|
||||
.qb-trace-operator-button-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
|
||||
&-text {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
|
||||
@@ -2,8 +2,74 @@ import './QueryFooter.styles.scss';
|
||||
|
||||
/* eslint-disable react/require-default-props */
|
||||
import { Button, Tooltip, Typography } from 'antd';
|
||||
import WarningPopover from 'components/WarningPopover/WarningPopover';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { DraftingCompass, Plus, Sigma } from 'lucide-react';
|
||||
import BetaTag from 'periscope/components/BetaTag/BetaTag';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
function TraceOperatorSection({
|
||||
addTraceOperator,
|
||||
}: {
|
||||
addTraceOperator?: () => void;
|
||||
}): JSX.Element {
|
||||
const { currentQuery, panelType } = useQueryBuilder();
|
||||
|
||||
const showTraceOperatorWarning = useMemo(() => {
|
||||
const isListViewPanel =
|
||||
panelType === PANEL_TYPES.LIST || panelType === PANEL_TYPES.TRACE;
|
||||
const hasMultipleQueries = currentQuery.builder.queryData.length > 1;
|
||||
const hasTraceOperator =
|
||||
currentQuery.builder.queryTraceOperator &&
|
||||
currentQuery.builder.queryTraceOperator.length > 0;
|
||||
return isListViewPanel && hasMultipleQueries && !hasTraceOperator;
|
||||
}, [
|
||||
currentQuery?.builder?.queryData,
|
||||
currentQuery?.builder?.queryTraceOperator,
|
||||
panelType,
|
||||
]);
|
||||
|
||||
const traceOperatorWarning = useMemo(() => {
|
||||
if (currentQuery.builder.queryData.length === 0) return '';
|
||||
const firstQuery = currentQuery.builder.queryData[0];
|
||||
return `Currently, you are only seeing results from query ${firstQuery.queryName}. Add a trace operator to combine results of multiple queries.`;
|
||||
}, [currentQuery]);
|
||||
return (
|
||||
<div className="qb-trace-operator-button-container">
|
||||
<Tooltip
|
||||
title={
|
||||
<div style={{ textAlign: 'center' }}>
|
||||
Add Trace Matching
|
||||
<Typography.Link
|
||||
href="https://signoz.io/docs/userguide/query-builder-v5/#multi-query-analysis-trace-operators"
|
||||
target="_blank"
|
||||
style={{ textDecoration: 'underline' }}
|
||||
>
|
||||
{' '}
|
||||
<br />
|
||||
Learn more
|
||||
</Typography.Link>
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<Button
|
||||
className="add-trace-operator-button periscope-btn"
|
||||
icon={<DraftingCompass size={16} />}
|
||||
onClick={(): void => addTraceOperator?.()}
|
||||
>
|
||||
<div className="qb-trace-operator-button-container-text">
|
||||
Add Trace Matching
|
||||
<BetaTag />
|
||||
</div>
|
||||
</Button>
|
||||
</Tooltip>
|
||||
{showTraceOperatorWarning && (
|
||||
<WarningPopover message={traceOperatorWarning} />
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default function QueryFooter({
|
||||
addNewBuilderQuery,
|
||||
@@ -60,35 +126,7 @@ export default function QueryFooter({
|
||||
</div>
|
||||
)}
|
||||
{showAddTraceOperator && (
|
||||
<div className="qb-trace-operator-button-container">
|
||||
<Tooltip
|
||||
title={
|
||||
<div style={{ textAlign: 'center' }}>
|
||||
Add Trace Matching
|
||||
<Typography.Link
|
||||
href="https://signoz.io/docs/userguide/query-builder-v5/#multi-query-analysis-trace-operators"
|
||||
target="_blank"
|
||||
style={{ textDecoration: 'underline' }}
|
||||
>
|
||||
{' '}
|
||||
<br />
|
||||
Learn more
|
||||
</Typography.Link>
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<Button
|
||||
className="add-trace-operator-button periscope-btn "
|
||||
icon={<DraftingCompass size={16} />}
|
||||
onClick={(): void => addTraceOperator?.()}
|
||||
>
|
||||
<div className="qb-trace-operator-button-container-text">
|
||||
Add Trace Matching
|
||||
<BetaTag />
|
||||
</div>
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</div>
|
||||
<TraceOperatorSection addTraceOperator={addTraceOperator} />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
convertAggregationToExpression,
|
||||
convertFiltersToExpression,
|
||||
convertFiltersToExpressionWithExistingQuery,
|
||||
formatValueForExpression,
|
||||
removeKeysFromExpression,
|
||||
} from '../utils';
|
||||
|
||||
@@ -1193,3 +1194,220 @@ describe('removeKeysFromExpression', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatValueForExpression', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('Variable values', () => {
|
||||
it('should return variable values as-is', () => {
|
||||
expect(formatValueForExpression('$variable')).toBe('$variable');
|
||||
expect(formatValueForExpression('$env')).toBe('$env');
|
||||
expect(formatValueForExpression(' $variable ')).toBe(' $variable ');
|
||||
});
|
||||
|
||||
it('should return variable arrays as-is', () => {
|
||||
expect(formatValueForExpression(['$var1', '$var2'])).toBe('$var1,$var2');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Numeric string values', () => {
|
||||
it('should return numeric strings with quotes', () => {
|
||||
expect(formatValueForExpression('123')).toBe("'123'");
|
||||
expect(formatValueForExpression('0')).toBe("'0'");
|
||||
expect(formatValueForExpression('100000')).toBe("'100000'");
|
||||
expect(formatValueForExpression('-42')).toBe("'-42'");
|
||||
expect(formatValueForExpression('3.14')).toBe("'3.14'");
|
||||
expect(formatValueForExpression(' 456 ')).toBe("' 456 '");
|
||||
});
|
||||
|
||||
it('should handle numeric strings with IN operator', () => {
|
||||
expect(formatValueForExpression('123', 'IN')).toBe("['123']");
|
||||
expect(formatValueForExpression(['123', '456'], 'IN')).toBe(
|
||||
"['123', '456']",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Quoted string values', () => {
|
||||
it('should return already quoted strings as-is', () => {
|
||||
expect(formatValueForExpression("'quoted'")).toBe("'quoted'");
|
||||
expect(formatValueForExpression('"double-quoted"')).toBe('"double-quoted"');
|
||||
expect(formatValueForExpression('`backticked`')).toBe('`backticked`');
|
||||
expect(formatValueForExpression("'100000'")).toBe("'100000'");
|
||||
});
|
||||
|
||||
it('should preserve quoted strings in arrays', () => {
|
||||
expect(formatValueForExpression(["'value1'", "'value2'"])).toBe(
|
||||
"['value1', 'value2']",
|
||||
);
|
||||
expect(formatValueForExpression(["'100000'", "'200000'"], 'IN')).toBe(
|
||||
"['100000', '200000']",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Regular string values', () => {
|
||||
it('should wrap regular strings in single quotes', () => {
|
||||
expect(formatValueForExpression('hello')).toBe("'hello'");
|
||||
expect(formatValueForExpression('api-gateway')).toBe("'api-gateway'");
|
||||
expect(formatValueForExpression('test value')).toBe("'test value'");
|
||||
});
|
||||
|
||||
it('should escape single quotes in strings', () => {
|
||||
expect(formatValueForExpression("user's data")).toBe("'user\\'s data'");
|
||||
expect(formatValueForExpression("John's")).toBe("'John\\'s'");
|
||||
expect(formatValueForExpression("it's a test")).toBe("'it\\'s a test'");
|
||||
});
|
||||
|
||||
it('should handle empty strings', () => {
|
||||
expect(formatValueForExpression('')).toBe("''");
|
||||
});
|
||||
|
||||
it('should handle strings with special characters', () => {
|
||||
expect(formatValueForExpression('/api/v1/users')).toBe("'/api/v1/users'");
|
||||
expect(formatValueForExpression('user@example.com')).toBe(
|
||||
"'user@example.com'",
|
||||
);
|
||||
expect(formatValueForExpression('Contains "quotes"')).toBe(
|
||||
'\'Contains "quotes"\'',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Number values', () => {
|
||||
it('should convert numbers to strings without quotes', () => {
|
||||
expect(formatValueForExpression(123)).toBe('123');
|
||||
expect(formatValueForExpression(0)).toBe('0');
|
||||
expect(formatValueForExpression(-42)).toBe('-42');
|
||||
expect(formatValueForExpression(100000)).toBe('100000');
|
||||
expect(formatValueForExpression(3.14)).toBe('3.14');
|
||||
});
|
||||
|
||||
it('should handle numbers with IN operator', () => {
|
||||
expect(formatValueForExpression(123, 'IN')).toBe('[123]');
|
||||
expect(formatValueForExpression([100, 200] as any, 'IN')).toBe('[100, 200]');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Boolean values', () => {
|
||||
it('should convert booleans to strings without quotes', () => {
|
||||
expect(formatValueForExpression(true)).toBe('true');
|
||||
expect(formatValueForExpression(false)).toBe('false');
|
||||
});
|
||||
|
||||
it('should handle booleans with IN operator', () => {
|
||||
expect(formatValueForExpression(true, 'IN')).toBe('[true]');
|
||||
expect(formatValueForExpression([true, false] as any, 'IN')).toBe(
|
||||
'[true, false]',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Array values', () => {
|
||||
it('should format array of strings', () => {
|
||||
expect(formatValueForExpression(['a', 'b', 'c'])).toBe("['a', 'b', 'c']");
|
||||
expect(formatValueForExpression(['service1', 'service2'])).toBe(
|
||||
"['service1', 'service2']",
|
||||
);
|
||||
});
|
||||
|
||||
it('should format array of numeric strings', () => {
|
||||
expect(formatValueForExpression(['123', '456', '789'])).toBe(
|
||||
"['123', '456', '789']",
|
||||
);
|
||||
});
|
||||
|
||||
it('should format array of numbers', () => {
|
||||
expect(formatValueForExpression([1, 2, 3] as any)).toBe('[1, 2, 3]');
|
||||
expect(formatValueForExpression([100, 200, 300] as any)).toBe(
|
||||
'[100, 200, 300]',
|
||||
);
|
||||
});
|
||||
|
||||
it('should format mixed array types', () => {
|
||||
expect(formatValueForExpression(['hello', 123, true] as any)).toBe(
|
||||
"['hello', 123, true]",
|
||||
);
|
||||
});
|
||||
|
||||
it('should format array with quoted values', () => {
|
||||
expect(formatValueForExpression(["'quoted'", 'regular'])).toBe(
|
||||
"['quoted', 'regular']",
|
||||
);
|
||||
});
|
||||
|
||||
it('should format array with empty strings', () => {
|
||||
expect(formatValueForExpression(['', 'value'])).toBe("['', 'value']");
|
||||
});
|
||||
});
|
||||
|
||||
describe('IN and NOT IN operators', () => {
|
||||
it('should format single value as array for IN operator', () => {
|
||||
expect(formatValueForExpression('value', 'IN')).toBe("['value']");
|
||||
expect(formatValueForExpression(123, 'IN')).toBe('[123]');
|
||||
expect(formatValueForExpression('123', 'IN')).toBe("['123']");
|
||||
});
|
||||
|
||||
it('should format array for IN operator', () => {
|
||||
expect(formatValueForExpression(['a', 'b'], 'IN')).toBe("['a', 'b']");
|
||||
expect(formatValueForExpression(['123', '456'], 'IN')).toBe(
|
||||
"['123', '456']",
|
||||
);
|
||||
});
|
||||
|
||||
it('should format single value as array for NOT IN operator', () => {
|
||||
expect(formatValueForExpression('value', 'NOT IN')).toBe("['value']");
|
||||
expect(formatValueForExpression('value', 'not in')).toBe("['value']");
|
||||
});
|
||||
|
||||
it('should format array for NOT IN operator', () => {
|
||||
expect(formatValueForExpression(['a', 'b'], 'NOT IN')).toBe("['a', 'b']");
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases', () => {
|
||||
it('should handle strings that look like numbers but have quotes', () => {
|
||||
expect(formatValueForExpression("'123'")).toBe("'123'");
|
||||
expect(formatValueForExpression('"456"')).toBe('"456"');
|
||||
expect(formatValueForExpression('`789`')).toBe('`789`');
|
||||
});
|
||||
|
||||
it('should handle strings with leading/trailing whitespace', () => {
|
||||
expect(formatValueForExpression(' hello ')).toBe("' hello '");
|
||||
expect(formatValueForExpression(' 123 ')).toBe("' 123 '");
|
||||
});
|
||||
|
||||
it('should handle very large numbers', () => {
|
||||
expect(formatValueForExpression('999999999')).toBe("'999999999'");
|
||||
expect(formatValueForExpression(999999999)).toBe('999999999');
|
||||
});
|
||||
|
||||
it('should handle decimal numbers', () => {
|
||||
expect(formatValueForExpression('123.456')).toBe("'123.456'");
|
||||
expect(formatValueForExpression(123.456)).toBe('123.456');
|
||||
});
|
||||
|
||||
it('should handle negative numbers', () => {
|
||||
expect(formatValueForExpression('-100')).toBe("'-100'");
|
||||
expect(formatValueForExpression(-100)).toBe('-100');
|
||||
});
|
||||
|
||||
it('should handle strings that are not valid numbers', () => {
|
||||
expect(formatValueForExpression('123abc')).toBe("'123abc'");
|
||||
expect(formatValueForExpression('abc123')).toBe("'abc123'");
|
||||
expect(formatValueForExpression('12.34.56')).toBe("'12.34.56'");
|
||||
});
|
||||
|
||||
it('should handle empty array', () => {
|
||||
expect(formatValueForExpression([])).toBe('[]');
|
||||
expect(formatValueForExpression([], 'IN')).toBe('[]');
|
||||
});
|
||||
|
||||
it('should handle array with single element', () => {
|
||||
expect(formatValueForExpression(['single'])).toBe("['single']");
|
||||
expect(formatValueForExpression([123] as any)).toBe('[123]');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -24,7 +24,7 @@ import {
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource, ReduceOperators } from 'types/common/queryBuilder';
|
||||
import { extractQueryPairs } from 'utils/queryContextUtils';
|
||||
import { unquote } from 'utils/stringUtils';
|
||||
import { isQuoted, unquote } from 'utils/stringUtils';
|
||||
import { isFunctionOperator, isNonValueOperator } from 'utils/tokenUtils';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
@@ -38,49 +38,57 @@ const isArrayOperator = (operator: string): boolean => {
|
||||
return arrayOperators.includes(operator);
|
||||
};
|
||||
|
||||
const isVariable = (value: string | string[] | number | boolean): boolean => {
|
||||
const isVariable = (
|
||||
value: (string | number | boolean)[] | string | number | boolean,
|
||||
): boolean => {
|
||||
if (Array.isArray(value)) {
|
||||
return value.some((v) => typeof v === 'string' && v.trim().startsWith('$'));
|
||||
}
|
||||
return typeof value === 'string' && value.trim().startsWith('$');
|
||||
};
|
||||
|
||||
/**
|
||||
* Formats a single value for use in expression strings.
|
||||
* Strings are quoted and escaped, while numbers and booleans are converted to strings.
|
||||
*/
|
||||
const formatSingleValue = (v: string | number | boolean): string => {
|
||||
if (typeof v === 'string') {
|
||||
// Preserve already-quoted strings
|
||||
if (isQuoted(v)) {
|
||||
return v;
|
||||
}
|
||||
// Quote and escape single quotes in strings
|
||||
return `'${v.replace(/'/g, "\\'")}'`;
|
||||
}
|
||||
// Convert numbers and booleans to strings without quotes
|
||||
return String(v);
|
||||
};
|
||||
|
||||
/**
|
||||
* Format a value for the expression string
|
||||
* @param value - The value to format
|
||||
* @param operator - The operator being used (to determine if array is needed)
|
||||
* @returns Formatted value string
|
||||
*/
|
||||
const formatValueForExpression = (
|
||||
value: string[] | string | number | boolean,
|
||||
export const formatValueForExpression = (
|
||||
value: (string | number | boolean)[] | string | number | boolean,
|
||||
operator?: string,
|
||||
): string => {
|
||||
if (isVariable(value)) {
|
||||
return String(value);
|
||||
}
|
||||
|
||||
// For IN operators, ensure value is always an array
|
||||
if (isArrayOperator(operator || '')) {
|
||||
const arrayValue = Array.isArray(value) ? value : [value];
|
||||
return `[${arrayValue
|
||||
.map((v) =>
|
||||
typeof v === 'string' ? `'${v.replace(/'/g, "\\'")}'` : String(v),
|
||||
)
|
||||
.join(', ')}]`;
|
||||
return `[${arrayValue.map(formatSingleValue).join(', ')}]`;
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
// Handle array values (e.g., for IN operations)
|
||||
return `[${value
|
||||
.map((v) =>
|
||||
typeof v === 'string' ? `'${v.replace(/'/g, "\\'")}'` : String(v),
|
||||
)
|
||||
.join(', ')}]`;
|
||||
return `[${value.map(formatSingleValue).join(', ')}]`;
|
||||
}
|
||||
|
||||
if (typeof value === 'string') {
|
||||
// Add single quotes around all string values and escape internal single quotes
|
||||
return `'${value.replace(/'/g, "\\'")}'`;
|
||||
return formatSingleValue(value);
|
||||
}
|
||||
|
||||
return String(value);
|
||||
@@ -136,14 +144,43 @@ export const convertFiltersToExpression = (
|
||||
};
|
||||
};
|
||||
|
||||
const formatValuesForFilter = (value: string | string[]): string | string[] => {
|
||||
if (Array.isArray(value)) {
|
||||
return value.map((v) => (typeof v === 'string' ? unquote(v) : String(v)));
|
||||
}
|
||||
/**
|
||||
* Converts a string value to its appropriate type (number, boolean, or string)
|
||||
* for use in filter objects. This is the inverse of formatSingleValue.
|
||||
*/
|
||||
function formatSingleValueForFilter(
|
||||
value: string | number | boolean,
|
||||
): string | number | boolean {
|
||||
if (typeof value === 'string') {
|
||||
return unquote(value);
|
||||
const trimmed = value.trim();
|
||||
|
||||
// Try to convert numeric strings to numbers
|
||||
if (trimmed !== '' && !Number.isNaN(Number(trimmed))) {
|
||||
return Number(trimmed);
|
||||
}
|
||||
|
||||
// Convert boolean strings to booleans
|
||||
if (trimmed === 'true' || trimmed === 'false') {
|
||||
return trimmed === 'true';
|
||||
}
|
||||
}
|
||||
return String(value);
|
||||
|
||||
// Return non-string values as-is, or string values that couldn't be converted
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats values for filter objects, converting string representations
|
||||
* to their proper types (numbers, booleans) when appropriate.
|
||||
*/
|
||||
const formatValuesForFilter = (
|
||||
value: (string | number | boolean)[] | number | boolean | string,
|
||||
): (string | number | boolean)[] | number | boolean | string => {
|
||||
if (Array.isArray(value)) {
|
||||
return value.map(formatSingleValueForFilter);
|
||||
}
|
||||
|
||||
return formatSingleValueForFilter(value);
|
||||
};
|
||||
|
||||
export const convertExpressionToFilters = (
|
||||
|
||||
@@ -178,7 +178,7 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
|
||||
if (SELECTED_OPERATORS.includes(filterSync.op)) {
|
||||
if (isArray(filterSync.value)) {
|
||||
filterSync.value.forEach((val) => {
|
||||
filterState[val] = true;
|
||||
filterState[String(val)] = true;
|
||||
});
|
||||
} else if (typeof filterSync.value === 'string') {
|
||||
filterState[filterSync.value] = true;
|
||||
@@ -191,7 +191,7 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
|
||||
filterState = setDefaultValues(attributeValues, true);
|
||||
if (isArray(filterSync.value)) {
|
||||
filterSync.value.forEach((val) => {
|
||||
filterState[val] = false;
|
||||
filterState[String(val)] = false;
|
||||
});
|
||||
} else if (typeof filterSync.value === 'string') {
|
||||
filterState[filterSync.value] = false;
|
||||
|
||||
@@ -1,223 +0,0 @@
|
||||
import { render } from '@testing-library/react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import { Userpilot } from 'userpilot';
|
||||
|
||||
import UserpilotRouteTracker from './UserpilotRouteTracker';
|
||||
|
||||
// Mock constants
|
||||
const INITIAL_PATH = '/initial';
|
||||
const TIMER_DELAY = 100;
|
||||
|
||||
// Mock the userpilot module
|
||||
jest.mock('userpilot', () => ({
|
||||
Userpilot: {
|
||||
reload: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock location state
|
||||
let mockLocation = {
|
||||
pathname: INITIAL_PATH,
|
||||
search: '',
|
||||
hash: '',
|
||||
state: null,
|
||||
};
|
||||
|
||||
// Mock react-router-dom
|
||||
jest.mock('react-router-dom', () => {
|
||||
const originalModule = jest.requireActual('react-router-dom');
|
||||
|
||||
return {
|
||||
...originalModule,
|
||||
useLocation: jest.fn(() => mockLocation),
|
||||
};
|
||||
});
|
||||
|
||||
describe('UserpilotRouteTracker', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
// Reset timers
|
||||
jest.useFakeTimers();
|
||||
// Reset error mock implementation
|
||||
(Userpilot.reload as jest.Mock).mockImplementation(() => {});
|
||||
// Reset location to initial state
|
||||
mockLocation = {
|
||||
pathname: INITIAL_PATH,
|
||||
search: '',
|
||||
hash: '',
|
||||
state: null,
|
||||
};
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
it('calls Userpilot.reload on initial render', () => {
|
||||
render(
|
||||
<MemoryRouter>
|
||||
<UserpilotRouteTracker />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
// Fast-forward timer to trigger the setTimeout in reloadUserpilot
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(TIMER_DELAY);
|
||||
});
|
||||
|
||||
expect(Userpilot.reload).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('calls Userpilot.reload when pathname changes', () => {
|
||||
const { rerender } = render(
|
||||
<MemoryRouter>
|
||||
<UserpilotRouteTracker />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
// Fast-forward initial render timer
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(TIMER_DELAY);
|
||||
});
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Create a new location object with different pathname
|
||||
const newLocation = {
|
||||
...mockLocation,
|
||||
pathname: '/new-path',
|
||||
};
|
||||
|
||||
// Update the mock location with new path and trigger re-render
|
||||
act(() => {
|
||||
mockLocation = newLocation;
|
||||
// Force a component update with the new location
|
||||
rerender(
|
||||
<MemoryRouter>
|
||||
<UserpilotRouteTracker />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
});
|
||||
|
||||
// Fast-forward timer to allow the setTimeout to execute
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(TIMER_DELAY);
|
||||
});
|
||||
|
||||
expect(Userpilot.reload).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('calls Userpilot.reload when search parameters change', () => {
|
||||
const { rerender } = render(
|
||||
<MemoryRouter>
|
||||
<UserpilotRouteTracker />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
// Fast-forward initial render timer
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(TIMER_DELAY);
|
||||
});
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Create a new location object with different search params
|
||||
const newLocation = {
|
||||
...mockLocation,
|
||||
search: '?param=value',
|
||||
};
|
||||
|
||||
// Update the mock location with new search and trigger re-render
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
act(() => {
|
||||
mockLocation = newLocation;
|
||||
// Force a component update with the new location
|
||||
rerender(
|
||||
<MemoryRouter>
|
||||
<UserpilotRouteTracker />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
});
|
||||
|
||||
// Fast-forward timer to allow the setTimeout to execute
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(TIMER_DELAY);
|
||||
});
|
||||
|
||||
expect(Userpilot.reload).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('handles errors in Userpilot.reload gracefully', () => {
|
||||
// Mock console.error to prevent test output noise and capture calls
|
||||
const consoleErrorSpy = jest
|
||||
.spyOn(console, 'error')
|
||||
.mockImplementation(() => {});
|
||||
|
||||
// Instead of using the component, we test the error handling behavior directly
|
||||
const errorMsg = 'Error message';
|
||||
|
||||
// Set up a function that has the same error handling behavior as in component
|
||||
const testErrorHandler = (): void => {
|
||||
try {
|
||||
if (typeof Userpilot !== 'undefined' && Userpilot.reload) {
|
||||
Userpilot.reload();
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Userpilot] Error reloading on route change:', error);
|
||||
}
|
||||
};
|
||||
|
||||
// Make Userpilot.reload throw an error
|
||||
(Userpilot.reload as jest.Mock).mockImplementation(() => {
|
||||
throw new Error(errorMsg);
|
||||
});
|
||||
|
||||
// Execute the function that should handle errors
|
||||
testErrorHandler();
|
||||
|
||||
// Verify error was logged
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
'[Userpilot] Error reloading on route change:',
|
||||
expect.any(Error),
|
||||
);
|
||||
|
||||
// Restore console mock
|
||||
consoleErrorSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('does not call Userpilot.reload when same route is rendered again', () => {
|
||||
const { rerender } = render(
|
||||
<MemoryRouter>
|
||||
<UserpilotRouteTracker />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
// Fast-forward initial render timer
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(TIMER_DELAY);
|
||||
});
|
||||
jest.clearAllMocks();
|
||||
|
||||
act(() => {
|
||||
mockLocation = {
|
||||
pathname: mockLocation.pathname,
|
||||
search: mockLocation.search,
|
||||
hash: mockLocation.hash,
|
||||
state: mockLocation.state,
|
||||
};
|
||||
// Force a component update with the same location
|
||||
rerender(
|
||||
<MemoryRouter>
|
||||
<UserpilotRouteTracker />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
});
|
||||
|
||||
// Fast-forward timer
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(TIMER_DELAY);
|
||||
});
|
||||
|
||||
// Should not call reload since path and search are the same
|
||||
expect(Userpilot.reload).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -1,60 +0,0 @@
|
||||
import { useCallback, useEffect, useRef } from 'react';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { Userpilot } from 'userpilot';
|
||||
|
||||
/**
|
||||
* UserpilotRouteTracker - A component that tracks route changes and calls Userpilot.reload
|
||||
* on actual page changes (pathname changes or significant query parameter changes).
|
||||
*
|
||||
* This component renders nothing and is designed to be placed once high in the component tree.
|
||||
*/
|
||||
function UserpilotRouteTracker(): null {
|
||||
const location = useLocation();
|
||||
const prevPathRef = useRef<string>(location.pathname);
|
||||
const prevSearchRef = useRef<string>(location.search);
|
||||
const isFirstRenderRef = useRef<boolean>(true);
|
||||
|
||||
// Function to reload Userpilot safely - using useCallback to avoid dependency issues
|
||||
const reloadUserpilot = useCallback((): void => {
|
||||
try {
|
||||
if (typeof Userpilot !== 'undefined' && Userpilot.reload) {
|
||||
setTimeout(() => {
|
||||
Userpilot.reload();
|
||||
}, 100);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[Userpilot] Error reloading on route change:', error);
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Handle first render
|
||||
useEffect(() => {
|
||||
if (isFirstRenderRef.current) {
|
||||
isFirstRenderRef.current = false;
|
||||
reloadUserpilot();
|
||||
}
|
||||
}, [reloadUserpilot]);
|
||||
|
||||
// Handle route/query changes
|
||||
useEffect(() => {
|
||||
// Skip first render as it's handled by the effect above
|
||||
if (isFirstRenderRef.current) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if the path has changed or if significant query params have changed
|
||||
const pathChanged = location.pathname !== prevPathRef.current;
|
||||
const searchChanged = location.search !== prevSearchRef.current;
|
||||
|
||||
if (pathChanged || searchChanged) {
|
||||
// Update refs
|
||||
prevPathRef.current = location.pathname;
|
||||
prevSearchRef.current = location.search;
|
||||
reloadUserpilot();
|
||||
}
|
||||
}, [location.pathname, location.search, reloadUserpilot]);
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
export default UserpilotRouteTracker;
|
||||
@@ -7,7 +7,7 @@ import ErrorIcon from 'assets/Error';
|
||||
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
||||
import { BookOpenText, ChevronsDown, TriangleAlert } from 'lucide-react';
|
||||
import KeyValueLabel from 'periscope/components/KeyValueLabel';
|
||||
import { ReactNode } from 'react';
|
||||
import { ReactNode, useMemo } from 'react';
|
||||
import { Warning } from 'types/api';
|
||||
|
||||
interface WarningContentProps {
|
||||
@@ -106,19 +106,51 @@ export function WarningContent({ warning }: WarningContentProps): JSX.Element {
|
||||
);
|
||||
}
|
||||
|
||||
function PopoverMessage({
|
||||
message,
|
||||
}: {
|
||||
message: string | ReactNode;
|
||||
}): JSX.Element {
|
||||
return (
|
||||
<section className="warning-content">
|
||||
<section className="warning-content__summary-section">
|
||||
<header className="warning-content__summary">
|
||||
<div className="warning-content__summary-left">
|
||||
<div className="warning-content__summary-text">
|
||||
<p className="warning-content__warning-message">{message}</p>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
</section>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
|
||||
interface WarningPopoverProps extends PopoverProps {
|
||||
children?: ReactNode;
|
||||
warningData: Warning;
|
||||
warningData?: Warning;
|
||||
message?: string | ReactNode;
|
||||
}
|
||||
|
||||
function WarningPopover({
|
||||
children,
|
||||
warningData,
|
||||
message = '',
|
||||
...popoverProps
|
||||
}: WarningPopoverProps): JSX.Element {
|
||||
const content = useMemo(() => {
|
||||
if (message) {
|
||||
return <PopoverMessage message={message} />;
|
||||
}
|
||||
if (warningData) {
|
||||
return <WarningContent warning={warningData} />;
|
||||
}
|
||||
return null;
|
||||
}, [message, warningData]);
|
||||
|
||||
return (
|
||||
<Popover
|
||||
content={<WarningContent warning={warningData} />}
|
||||
content={content}
|
||||
overlayStyle={{ padding: 0, maxWidth: '600px' }}
|
||||
overlayInnerStyle={{ padding: 0 }}
|
||||
autoAdjustOverflow
|
||||
@@ -137,6 +169,8 @@ function WarningPopover({
|
||||
|
||||
WarningPopover.defaultProps = {
|
||||
children: undefined,
|
||||
warningData: null,
|
||||
message: null,
|
||||
};
|
||||
|
||||
export default WarningPopover;
|
||||
|
||||
@@ -244,6 +244,10 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
// Add border-bottom to table cells when pagination is not present
|
||||
.ant-spin-container:not(:has(.ant-pagination)) .ant-table-cell {
|
||||
border-bottom: 1px solid var(--bg-slate-500) !important;
|
||||
}
|
||||
|
||||
.endpoints-table-container {
|
||||
display: flex;
|
||||
@@ -422,30 +426,28 @@
|
||||
gap: 8px;
|
||||
.endpoint-meta-data-pill {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--bg-slate-300);
|
||||
width: fit-content;
|
||||
overflow: hidden;
|
||||
box-sizing: content-box;
|
||||
.endpoint-meta-data-label {
|
||||
display: flex;
|
||||
padding: 6px 8px;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
border-right: 1px solid var(--bg-slate-300);
|
||||
color: var(--text-vanilla-100);
|
||||
font-size: 14px;
|
||||
line-height: 18px; /* 128.571% */
|
||||
letter-spacing: -0.07px;
|
||||
padding: 6px 8px;
|
||||
background: var(--bg-slate-500);
|
||||
height: calc(100% - 12px);
|
||||
}
|
||||
|
||||
.endpoint-meta-data-value {
|
||||
display: flex;
|
||||
padding: 6px 8px;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
color: var(--text-vanilla-400);
|
||||
background: var(--bg-slate-400);
|
||||
height: calc(100% - 12px);
|
||||
font-size: 14px;
|
||||
line-height: 18px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -453,9 +455,23 @@
|
||||
.endpoint-details-filters-container {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
height: 36px;
|
||||
box-sizing: content-box;
|
||||
.ant-select-selector {
|
||||
border: none !important;
|
||||
}
|
||||
|
||||
.endpoint-details-filters-container-dropdown {
|
||||
width: 120px;
|
||||
border-right: 1px solid var(--bg-slate-500);
|
||||
height: 36px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
.ant-select-single {
|
||||
height: 32px;
|
||||
}
|
||||
}
|
||||
|
||||
.endpoint-details-filters-container-search {
|
||||
@@ -996,7 +1012,6 @@
|
||||
|
||||
.lightMode {
|
||||
.ant-drawer-header {
|
||||
border-bottom: 1px solid var(--bg-vanilla-400);
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
|
||||
@@ -1007,6 +1022,25 @@
|
||||
}
|
||||
|
||||
.domain-detail-drawer {
|
||||
.endpoint-details-card,
|
||||
.status-code-table-container,
|
||||
.endpoint-details-filters-container,
|
||||
.endpoint-details-filters-container-dropdown,
|
||||
.ant-radio-button-wrapper,
|
||||
.views-tabs-container,
|
||||
.ant-btn-default.tab,
|
||||
.tab::before,
|
||||
.endpoint-meta-data-pill,
|
||||
.endpoint-meta-data-label,
|
||||
.endpoints-table-container,
|
||||
.group-by-label,
|
||||
.ant-select-selector,
|
||||
.ant-drawer-header {
|
||||
border-color: var(--bg-vanilla-300) !important;
|
||||
}
|
||||
.views-tabs .tab::before {
|
||||
background: var(--bg-vanilla-300);
|
||||
}
|
||||
.title {
|
||||
color: var(--text-ink-300);
|
||||
}
|
||||
@@ -1031,7 +1065,6 @@
|
||||
|
||||
.selected_view {
|
||||
background: var(--bg-vanilla-300);
|
||||
border: 1px solid var(--bg-slate-300);
|
||||
color: var(--text-ink-400);
|
||||
}
|
||||
|
||||
@@ -1160,7 +1193,11 @@
|
||||
}
|
||||
}
|
||||
|
||||
.top-services-content {
|
||||
border-color: var(--bg-vanilla-300);
|
||||
}
|
||||
.dependent-services-container {
|
||||
border: none;
|
||||
padding: 10px 12px;
|
||||
.top-services-item {
|
||||
display: flex;
|
||||
@@ -1187,11 +1224,31 @@
|
||||
}
|
||||
|
||||
.top-services-item-progress-bar {
|
||||
background-color: var(--bg-vanilla-300);
|
||||
border: 1px solid var(--bg-slate-300);
|
||||
background-color: var(--bg-vanilla-200);
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
.ant-table {
|
||||
.ant-table-thead > tr > th {
|
||||
color: var(--text-ink-300);
|
||||
}
|
||||
|
||||
.ant-table-cell {
|
||||
&,
|
||||
&:has(.top-services-item-latency) {
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
color: var(--text-ink-300);
|
||||
}
|
||||
|
||||
.ant-table-tbody > tr:hover > td {
|
||||
background: var(--bg-vanilla-200);
|
||||
}
|
||||
.table-row-dark {
|
||||
background: var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
|
||||
.top-services-item-percentage {
|
||||
color: var(--text-ink-300);
|
||||
@@ -1225,4 +1282,8 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
// Add border-bottom to table cells when pagination is not present
|
||||
.ant-spin-container:not(:has(.ant-pagination)) .ant-table-cell {
|
||||
border-bottom: 1px solid var(--bg-vanilla-300) !important;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -57,7 +57,8 @@ describe('Request AWS integration', () => {
|
||||
expect(capturedPayload.attributes).toEqual({
|
||||
screen: 'AWS integration details',
|
||||
integration: 's3 sync',
|
||||
tenant_url: 'localhost',
|
||||
deployment_url: 'localhost',
|
||||
user_email: null,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -274,6 +274,7 @@ function Login(): JSX.Element {
|
||||
autoFocus
|
||||
disabled={versionLoading}
|
||||
className="login-form-input"
|
||||
onPressEnter={onNextHandler}
|
||||
/>
|
||||
</FormContainer.Item>
|
||||
</ParentContainer>
|
||||
|
||||
@@ -8,11 +8,7 @@ import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { AVAILABLE_EXPORT_PANEL_TYPES } from 'constants/panelTypes';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import {
|
||||
initialFilters,
|
||||
initialQueriesMap,
|
||||
PANEL_TYPES,
|
||||
} from 'constants/queryBuilder';
|
||||
import { initialFilters, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { DEFAULT_PER_PAGE_VALUE } from 'container/Controls/config';
|
||||
import ExplorerOptionWrapper from 'container/ExplorerOptions/ExplorerOptionWrapper';
|
||||
import { ChangeViewFunctionType } from 'container/ExplorerOptions/types';
|
||||
@@ -101,12 +97,7 @@ function LogsExplorerViewsContainer({
|
||||
const currentMinTimeRef = useRef<number>(minTime);
|
||||
|
||||
// Context
|
||||
const {
|
||||
currentQuery,
|
||||
stagedQuery,
|
||||
panelType,
|
||||
updateAllQueriesOperators,
|
||||
} = useQueryBuilder();
|
||||
const { stagedQuery, panelType } = useQueryBuilder();
|
||||
|
||||
const selectedPanelType = panelType || PANEL_TYPES.LIST;
|
||||
|
||||
@@ -136,13 +127,8 @@ function LogsExplorerViewsContainer({
|
||||
}, [stagedQuery, activeLogId]);
|
||||
|
||||
const exportDefaultQuery = useMemo(
|
||||
() =>
|
||||
updateAllQueriesOperators(
|
||||
currentQuery || initialQueriesMap.logs,
|
||||
selectedPanelType,
|
||||
DataSource.LOGS,
|
||||
),
|
||||
[currentQuery, selectedPanelType, updateAllQueriesOperators],
|
||||
() => getExportQueryData(requestData, selectedPanelType),
|
||||
[selectedPanelType, requestData],
|
||||
);
|
||||
|
||||
const {
|
||||
@@ -279,9 +265,7 @@ function LogsExplorerViewsContainer({
|
||||
|
||||
const widgetId = v4();
|
||||
|
||||
const query = getExportQueryData(requestData, selectedPanelType);
|
||||
|
||||
if (!query) return;
|
||||
if (!exportDefaultQuery) return;
|
||||
|
||||
logEvent('Logs Explorer: Add to dashboard successful', {
|
||||
panelType: selectedPanelType,
|
||||
@@ -290,7 +274,7 @@ function LogsExplorerViewsContainer({
|
||||
});
|
||||
|
||||
const dashboardEditView = generateExportToDashboardLink({
|
||||
query,
|
||||
query: exportDefaultQuery,
|
||||
panelType: panelTypeParam,
|
||||
dashboardId: dashboard.id,
|
||||
widgetId,
|
||||
@@ -298,7 +282,7 @@ function LogsExplorerViewsContainer({
|
||||
|
||||
safeNavigate(dashboardEditView);
|
||||
},
|
||||
[safeNavigate, requestData, selectedPanelType],
|
||||
[safeNavigate, exportDefaultQuery, selectedPanelType],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
|
||||
@@ -74,7 +74,7 @@ export interface ITag {
|
||||
id?: string;
|
||||
key: BaseAutocompleteData;
|
||||
op: string;
|
||||
value: string[] | string | number | boolean;
|
||||
value: (string | number | boolean)[] | string | number | boolean;
|
||||
}
|
||||
|
||||
interface CustomTagProps {
|
||||
@@ -300,7 +300,8 @@ function QueryBuilderSearchV2(
|
||||
currentFilterItem?.key?.dataType ?? DataTypes.EMPTY,
|
||||
tagType: currentFilterItem?.key?.type ?? '',
|
||||
searchText: isArray(currentFilterItem?.value)
|
||||
? currentFilterItem?.value?.[currentFilterItem.value.length - 1] || ''
|
||||
? String(currentFilterItem?.value?.[currentFilterItem.value.length - 1]) ||
|
||||
''
|
||||
: currentFilterItem?.value?.toString() || '',
|
||||
},
|
||||
{
|
||||
|
||||
@@ -6,6 +6,8 @@
|
||||
font-weight: 400;
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
.dot {
|
||||
@@ -25,16 +27,21 @@
|
||||
.left-section {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
width: 90%;
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
gap: 8px;
|
||||
|
||||
.value {
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
}
|
||||
.right-section {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
flex-shrink: 0;
|
||||
|
||||
.data-type {
|
||||
display: flex;
|
||||
@@ -45,6 +52,7 @@
|
||||
gap: 4px;
|
||||
border-radius: 20px;
|
||||
background: rgba(255, 255, 255, 0.08);
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.type-tag {
|
||||
@@ -56,6 +64,7 @@
|
||||
gap: 4px;
|
||||
border-radius: 50px;
|
||||
text-transform: capitalize;
|
||||
white-space: nowrap;
|
||||
|
||||
&.tag {
|
||||
border-radius: 50px;
|
||||
|
||||
@@ -5,7 +5,13 @@ import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { Dispatch, SetStateAction, useEffect, useMemo } from 'react';
|
||||
import {
|
||||
Dispatch,
|
||||
MutableRefObject,
|
||||
SetStateAction,
|
||||
useEffect,
|
||||
useMemo,
|
||||
} from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { Warning } from 'types/api';
|
||||
@@ -21,6 +27,7 @@ function TimeSeriesViewContainer({
|
||||
isFilterApplied,
|
||||
setWarning,
|
||||
setIsLoadingQueries,
|
||||
queryKeyRef,
|
||||
}: TimeSeriesViewProps): JSX.Element {
|
||||
const { stagedQuery, currentQuery, panelType } = useQueryBuilder();
|
||||
|
||||
@@ -48,6 +55,22 @@ function TimeSeriesViewContainer({
|
||||
return isValid.every(Boolean);
|
||||
}, [currentQuery]);
|
||||
|
||||
const queryKey = useMemo(
|
||||
() => [
|
||||
REACT_QUERY_KEY.GET_QUERY_RANGE,
|
||||
globalSelectedTime,
|
||||
maxTime,
|
||||
minTime,
|
||||
stagedQuery,
|
||||
],
|
||||
[globalSelectedTime, maxTime, minTime, stagedQuery],
|
||||
);
|
||||
|
||||
if (queryKeyRef) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
queryKeyRef.current = queryKey;
|
||||
}
|
||||
|
||||
const { data, isLoading, isFetching, isError, error } = useGetQueryRange(
|
||||
{
|
||||
query: stagedQuery || initialQueriesMap[dataSource],
|
||||
@@ -61,13 +84,7 @@ function TimeSeriesViewContainer({
|
||||
// ENTITY_VERSION_V4,
|
||||
ENTITY_VERSION_V5,
|
||||
{
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_QUERY_RANGE,
|
||||
globalSelectedTime,
|
||||
maxTime,
|
||||
minTime,
|
||||
stagedQuery,
|
||||
],
|
||||
queryKey,
|
||||
enabled: !!stagedQuery && panelType === PANEL_TYPES.TIME_SERIES,
|
||||
},
|
||||
);
|
||||
@@ -111,10 +128,12 @@ interface TimeSeriesViewProps {
|
||||
isFilterApplied: boolean;
|
||||
setWarning: Dispatch<SetStateAction<Warning | undefined>>;
|
||||
setIsLoadingQueries: Dispatch<SetStateAction<boolean>>;
|
||||
queryKeyRef?: MutableRefObject<any>;
|
||||
}
|
||||
|
||||
TimeSeriesViewContainer.defaultProps = {
|
||||
dataSource: DataSource.TRACES,
|
||||
queryKeyRef: undefined,
|
||||
};
|
||||
|
||||
export default TimeSeriesViewContainer;
|
||||
|
||||
@@ -14,6 +14,7 @@ import NoLogs from 'container/NoLogs/NoLogs';
|
||||
import { useOptionsMenu } from 'container/OptionsMenu';
|
||||
import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import TraceExplorerControls from 'container/TracesExplorer/Controls';
|
||||
import { getListViewQuery } from 'container/TracesExplorer/explorerUtils';
|
||||
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { Pagination } from 'hooks/queryPagination';
|
||||
@@ -22,12 +23,12 @@ import useDragColumns from 'hooks/useDragColumns';
|
||||
import { getDraggedColumns } from 'hooks/useDragColumns/utils';
|
||||
import useUrlQueryData from 'hooks/useUrlQueryData';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { ArrowUp10, Minus } from 'lucide-react';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import {
|
||||
Dispatch,
|
||||
memo,
|
||||
MutableRefObject,
|
||||
SetStateAction,
|
||||
useCallback,
|
||||
useEffect,
|
||||
@@ -50,12 +51,14 @@ interface ListViewProps {
|
||||
isFilterApplied: boolean;
|
||||
setWarning: Dispatch<SetStateAction<Warning | undefined>>;
|
||||
setIsLoadingQueries: Dispatch<SetStateAction<boolean>>;
|
||||
queryKeyRef?: MutableRefObject<any>;
|
||||
}
|
||||
|
||||
function ListView({
|
||||
isFilterApplied,
|
||||
setWarning,
|
||||
setIsLoadingQueries,
|
||||
queryKeyRef,
|
||||
}: ListViewProps): JSX.Element {
|
||||
const {
|
||||
stagedQuery,
|
||||
@@ -92,35 +95,10 @@ function ListView({
|
||||
const paginationConfig =
|
||||
paginationQueryData ?? getDefaultPaginationConfig(PER_PAGE_OPTIONS);
|
||||
|
||||
const requestQuery = useMemo(() => {
|
||||
const query = stagedQuery
|
||||
? cloneDeep(stagedQuery)
|
||||
: cloneDeep(initialQueriesMap.traces);
|
||||
|
||||
if (query.builder.queryData[0]) {
|
||||
query.builder.queryData[0].orderBy = [
|
||||
{
|
||||
columnName: orderBy.split(':')[0],
|
||||
order: orderBy.split(':')[1] as 'asc' | 'desc',
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
// add order by to trace operator
|
||||
if (
|
||||
query.builder.queryTraceOperator &&
|
||||
query.builder.queryTraceOperator.length > 0
|
||||
) {
|
||||
query.builder.queryTraceOperator[0].orderBy = [
|
||||
{
|
||||
columnName: orderBy.split(':')[0],
|
||||
order: orderBy.split(':')[1] as 'asc' | 'desc',
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
return query;
|
||||
}, [stagedQuery, orderBy]);
|
||||
const requestQuery = useMemo(
|
||||
() => getListViewQuery(stagedQuery || initialQueriesMap.traces, orderBy),
|
||||
[stagedQuery, orderBy],
|
||||
);
|
||||
|
||||
const queryKey = useMemo(
|
||||
() => [
|
||||
@@ -146,6 +124,11 @@ function ListView({
|
||||
],
|
||||
);
|
||||
|
||||
if (queryKeyRef) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
queryKeyRef.current = queryKey;
|
||||
}
|
||||
|
||||
const { data, isFetching, isLoading, isError, error } = useGetQueryRange(
|
||||
{
|
||||
query: requestQuery,
|
||||
@@ -293,4 +276,8 @@ function ListView({
|
||||
);
|
||||
}
|
||||
|
||||
ListView.defaultProps = {
|
||||
queryKeyRef: undefined,
|
||||
};
|
||||
|
||||
export default memo(ListView);
|
||||
|
||||
@@ -6,7 +6,14 @@ import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { QueryTable } from 'container/QueryTable';
|
||||
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { Dispatch, memo, SetStateAction, useEffect, useMemo } from 'react';
|
||||
import {
|
||||
Dispatch,
|
||||
memo,
|
||||
MutableRefObject,
|
||||
SetStateAction,
|
||||
useEffect,
|
||||
useMemo,
|
||||
} from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { Warning } from 'types/api';
|
||||
@@ -17,9 +24,11 @@ import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
function TableView({
|
||||
setWarning,
|
||||
setIsLoadingQueries,
|
||||
queryKeyRef,
|
||||
}: {
|
||||
setWarning: Dispatch<SetStateAction<Warning | undefined>>;
|
||||
setIsLoadingQueries: Dispatch<SetStateAction<boolean>>;
|
||||
queryKeyRef?: MutableRefObject<any>;
|
||||
}): JSX.Element {
|
||||
const { stagedQuery, panelType } = useQueryBuilder();
|
||||
|
||||
@@ -28,6 +37,22 @@ function TableView({
|
||||
GlobalReducer
|
||||
>((state) => state.globalTime);
|
||||
|
||||
const queryKey = useMemo(
|
||||
() => [
|
||||
REACT_QUERY_KEY.GET_QUERY_RANGE,
|
||||
globalSelectedTime,
|
||||
maxTime,
|
||||
minTime,
|
||||
stagedQuery,
|
||||
],
|
||||
[globalSelectedTime, maxTime, minTime, stagedQuery],
|
||||
);
|
||||
|
||||
if (queryKeyRef) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
queryKeyRef.current = queryKey;
|
||||
}
|
||||
|
||||
const { data, isLoading, isFetching, isError, error } = useGetQueryRange(
|
||||
{
|
||||
query: stagedQuery || initialQueriesMap.traces,
|
||||
@@ -40,13 +65,7 @@ function TableView({
|
||||
},
|
||||
ENTITY_VERSION_V5,
|
||||
{
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_QUERY_RANGE,
|
||||
globalSelectedTime,
|
||||
maxTime,
|
||||
minTime,
|
||||
stagedQuery,
|
||||
],
|
||||
queryKey,
|
||||
enabled: !!stagedQuery && panelType === PANEL_TYPES.TABLE,
|
||||
},
|
||||
);
|
||||
@@ -89,4 +108,8 @@ function TableView({
|
||||
);
|
||||
}
|
||||
|
||||
TableView.defaultProps = {
|
||||
queryKeyRef: undefined,
|
||||
};
|
||||
|
||||
export default memo(TableView);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import { Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import ErrorInPlace from 'components/ErrorInPlace/ErrorInPlace';
|
||||
import ListViewOrderBy from 'components/OrderBy/ListViewOrderBy';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { QueryParams } from 'constants/query';
|
||||
@@ -9,19 +9,18 @@ import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import EmptyLogsSearch from 'container/EmptyLogsSearch/EmptyLogsSearch';
|
||||
import NoLogs from 'container/NoLogs/NoLogs';
|
||||
import { getListViewQuery } from 'container/TracesExplorer/explorerUtils';
|
||||
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { Pagination } from 'hooks/queryPagination';
|
||||
import useUrlQueryData from 'hooks/useUrlQueryData';
|
||||
import { ArrowUp10, Minus } from 'lucide-react';
|
||||
import {
|
||||
Dispatch,
|
||||
memo,
|
||||
MutableRefObject,
|
||||
SetStateAction,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useState,
|
||||
} from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { AppState } from 'store/reducers';
|
||||
@@ -30,7 +29,6 @@ import APIError from 'types/api/error';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import DOCLINKS from 'utils/docLinks';
|
||||
import { transformBuilderQueryFields } from 'utils/queryTransformers';
|
||||
|
||||
import TraceExplorerControls from '../Controls';
|
||||
import { TracesLoading } from '../TraceLoading/TraceLoading';
|
||||
@@ -41,15 +39,16 @@ interface TracesViewProps {
|
||||
isFilterApplied: boolean;
|
||||
setWarning: Dispatch<SetStateAction<Warning | undefined>>;
|
||||
setIsLoadingQueries: Dispatch<SetStateAction<boolean>>;
|
||||
queryKeyRef?: MutableRefObject<any>;
|
||||
}
|
||||
|
||||
function TracesView({
|
||||
isFilterApplied,
|
||||
setWarning,
|
||||
setIsLoadingQueries,
|
||||
queryKeyRef,
|
||||
}: TracesViewProps): JSX.Element {
|
||||
const { stagedQuery, panelType } = useQueryBuilder();
|
||||
const [orderBy, setOrderBy] = useState<string>('timestamp:desc');
|
||||
|
||||
const { selectedTime: globalSelectedTime, maxTime, minTime } = useSelector<
|
||||
AppState,
|
||||
@@ -61,21 +60,34 @@ function TracesView({
|
||||
);
|
||||
|
||||
const transformedQuery = useMemo(
|
||||
() =>
|
||||
transformBuilderQueryFields(stagedQuery || initialQueriesMap.traces, {
|
||||
orderBy: [
|
||||
{
|
||||
columnName: orderBy.split(':')[0],
|
||||
order: orderBy.split(':')[1] as 'asc' | 'desc',
|
||||
},
|
||||
],
|
||||
}),
|
||||
[stagedQuery, orderBy],
|
||||
() => getListViewQuery(stagedQuery || initialQueriesMap.traces),
|
||||
[stagedQuery],
|
||||
);
|
||||
|
||||
const handleOrderChange = useCallback((value: string) => {
|
||||
setOrderBy(value);
|
||||
}, []);
|
||||
const queryKey = useMemo(
|
||||
() => [
|
||||
REACT_QUERY_KEY.GET_QUERY_RANGE,
|
||||
globalSelectedTime,
|
||||
maxTime,
|
||||
minTime,
|
||||
stagedQuery,
|
||||
panelType,
|
||||
paginationQueryData,
|
||||
],
|
||||
[
|
||||
globalSelectedTime,
|
||||
maxTime,
|
||||
minTime,
|
||||
stagedQuery,
|
||||
panelType,
|
||||
paginationQueryData,
|
||||
],
|
||||
);
|
||||
|
||||
if (queryKeyRef) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
queryKeyRef.current = queryKey;
|
||||
}
|
||||
|
||||
const { data, isLoading, isFetching, isError, error } = useGetQueryRange(
|
||||
{
|
||||
@@ -92,16 +104,7 @@ function TracesView({
|
||||
},
|
||||
ENTITY_VERSION_V5,
|
||||
{
|
||||
queryKey: [
|
||||
REACT_QUERY_KEY.GET_QUERY_RANGE,
|
||||
globalSelectedTime,
|
||||
maxTime,
|
||||
minTime,
|
||||
stagedQuery,
|
||||
panelType,
|
||||
paginationQueryData,
|
||||
orderBy,
|
||||
],
|
||||
queryKey,
|
||||
enabled: !!stagedQuery && panelType === PANEL_TYPES.TRACE,
|
||||
},
|
||||
);
|
||||
@@ -148,18 +151,6 @@ function TracesView({
|
||||
</Typography>
|
||||
|
||||
<div className="trace-explorer-controls">
|
||||
<div className="order-by-container">
|
||||
<div className="order-by-label">
|
||||
Order by <Minus size={14} /> <ArrowUp10 size={14} />
|
||||
</div>
|
||||
|
||||
<ListViewOrderBy
|
||||
value={orderBy}
|
||||
onChange={handleOrderChange}
|
||||
dataSource={DataSource.TRACES}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<TraceExplorerControls
|
||||
isLoading={isLoading}
|
||||
totalCount={responseData?.length || 0}
|
||||
@@ -203,4 +194,8 @@ function TracesView({
|
||||
);
|
||||
}
|
||||
|
||||
TracesView.defaultProps = {
|
||||
queryKeyRef: undefined,
|
||||
};
|
||||
|
||||
export default memo(TracesView);
|
||||
|
||||
75
frontend/src/container/TracesExplorer/explorerUtils.ts
Normal file
75
frontend/src/container/TracesExplorer/explorerUtils.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { OptionsQuery } from 'container/OptionsMenu/types';
|
||||
import { cloneDeep, set } from 'lodash-es';
|
||||
import { OrderByPayload, Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
export const getListViewQuery = (
|
||||
stagedQuery: Query,
|
||||
orderBy?: string,
|
||||
): Query => {
|
||||
const query = stagedQuery
|
||||
? cloneDeep(stagedQuery)
|
||||
: cloneDeep(initialQueriesMap.traces);
|
||||
|
||||
const orderByPayload: OrderByPayload[] = orderBy
|
||||
? [
|
||||
{
|
||||
columnName: orderBy.split(':')[0],
|
||||
order: orderBy.split(':')[1] as 'asc' | 'desc',
|
||||
},
|
||||
]
|
||||
: [];
|
||||
|
||||
for (let i = 0; i < query.builder.queryData.length; i++) {
|
||||
const queryData = query.builder.queryData[i];
|
||||
queryData.groupBy = [];
|
||||
queryData.having = {
|
||||
expression: '',
|
||||
};
|
||||
queryData.orderBy = orderByPayload;
|
||||
}
|
||||
|
||||
if (
|
||||
query.builder.queryTraceOperator &&
|
||||
query.builder.queryTraceOperator.length > 0
|
||||
) {
|
||||
for (let i = 0; i < query.builder.queryTraceOperator.length; i++) {
|
||||
const queryTraceOperator = query.builder.queryTraceOperator[i];
|
||||
queryTraceOperator.groupBy = [];
|
||||
queryTraceOperator.having = {
|
||||
expression: '',
|
||||
};
|
||||
queryTraceOperator.orderBy = orderByPayload;
|
||||
}
|
||||
}
|
||||
|
||||
return query;
|
||||
};
|
||||
|
||||
export const getQueryByPanelType = (
|
||||
stagedQuery: Query,
|
||||
panelType: PANEL_TYPES,
|
||||
): Query => {
|
||||
if (panelType === PANEL_TYPES.LIST || panelType === PANEL_TYPES.TRACE) {
|
||||
return getListViewQuery(stagedQuery);
|
||||
}
|
||||
return stagedQuery;
|
||||
};
|
||||
|
||||
export const getExportQueryData = (
|
||||
query: Query,
|
||||
panelType: PANEL_TYPES,
|
||||
options: OptionsQuery,
|
||||
): Query => {
|
||||
if (panelType === PANEL_TYPES.LIST) {
|
||||
const updatedQuery = cloneDeep(query);
|
||||
set(
|
||||
updatedQuery,
|
||||
'builder.queryData[0].selectColumns',
|
||||
options.selectColumns,
|
||||
);
|
||||
|
||||
return updatedQuery;
|
||||
}
|
||||
return query;
|
||||
};
|
||||
@@ -63,7 +63,8 @@ export function convertOperatorLabelForExceptions(
|
||||
export function formatStringValuesForTrace(
|
||||
val: TagFilterItem['value'] = [],
|
||||
): string[] {
|
||||
return !Array.isArray(val) ? [String(val)] : val;
|
||||
// IN QB V5 we can pass array of all (boolean, number, string) values. To make this compatible with the old version, we need to convert the array to a string array.
|
||||
return !Array.isArray(val) ? [String(val)] : val.map((item) => String(item));
|
||||
}
|
||||
|
||||
export const convertCompositeQueryToTraceSelectedTags = (
|
||||
|
||||
@@ -451,6 +451,12 @@ export const getUPlotChartOptions = ({
|
||||
(self): void => {
|
||||
const selection = self.select;
|
||||
if (selection) {
|
||||
// Cleanup any visible "View Traces" buttons when drag selection occurs
|
||||
const activeButtons = document.querySelectorAll(
|
||||
'.view-onclick-show-button',
|
||||
);
|
||||
activeButtons.forEach((btn) => btn.remove());
|
||||
|
||||
const startTime = self.posToVal(selection.left, 'x');
|
||||
const endTime = self.posToVal(selection.left + selection.width, 'x');
|
||||
|
||||
|
||||
36
frontend/src/pages/TracesExplorer/constants.ts
Normal file
36
frontend/src/pages/TracesExplorer/constants.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
export const TOOLBAR_VIEWS = {
|
||||
list: {
|
||||
name: 'list',
|
||||
label: 'List',
|
||||
show: true,
|
||||
key: 'list',
|
||||
},
|
||||
timeseries: {
|
||||
name: 'timeseries',
|
||||
label: 'Timeseries',
|
||||
disabled: false,
|
||||
show: true,
|
||||
key: 'timeseries',
|
||||
},
|
||||
trace: {
|
||||
name: 'trace',
|
||||
label: 'Trace',
|
||||
disabled: false,
|
||||
show: true,
|
||||
key: 'trace',
|
||||
},
|
||||
table: {
|
||||
name: 'table',
|
||||
label: 'Table',
|
||||
disabled: false,
|
||||
show: true,
|
||||
key: 'table',
|
||||
},
|
||||
clickhouse: {
|
||||
name: 'clickhouse',
|
||||
label: 'Clickhouse',
|
||||
disabled: false,
|
||||
show: false,
|
||||
key: 'clickhouse',
|
||||
},
|
||||
};
|
||||
@@ -1,7 +1,6 @@
|
||||
import './TracesExplorer.styles.scss';
|
||||
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { Callout } from '@signozhq/callout';
|
||||
import { Card } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import cx from 'classnames';
|
||||
@@ -13,12 +12,15 @@ import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { AVAILABLE_EXPORT_PANEL_TYPES } from 'constants/panelTypes';
|
||||
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import ExplorerOptionWrapper from 'container/ExplorerOptions/ExplorerOptionWrapper';
|
||||
import ExportPanel from 'container/ExportPanel';
|
||||
import { useOptionsMenu } from 'container/OptionsMenu';
|
||||
import LeftToolbarActions from 'container/QueryBuilder/components/ToolbarActions/LeftToolbarActions';
|
||||
import RightToolbarActions from 'container/QueryBuilder/components/ToolbarActions/RightToolbarActions';
|
||||
import TimeSeriesView from 'container/TimeSeriesView';
|
||||
import Toolbar from 'container/Toolbar/Toolbar';
|
||||
import {
|
||||
getExportQueryData,
|
||||
getQueryByPanelType,
|
||||
} from 'container/TracesExplorer/explorerUtils';
|
||||
import ListView from 'container/TracesExplorer/ListView';
|
||||
import { defaultSelectedColumns } from 'container/TracesExplorer/ListView/configs';
|
||||
import QuerySection from 'container/TracesExplorer/QuerySection';
|
||||
@@ -27,36 +29,35 @@ import TracesView from 'container/TracesExplorer/TracesView';
|
||||
import { useGetPanelTypesQueryParam } from 'hooks/queryBuilder/useGetPanelTypesQueryParam';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
|
||||
import { useHandleExplorerTabChange } from 'hooks/useHandleExplorerTabChange';
|
||||
import {
|
||||
ICurrentQueryData,
|
||||
useHandleExplorerTabChange,
|
||||
} from 'hooks/useHandleExplorerTabChange';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import { cloneDeep, isEmpty, set } from 'lodash-es';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import { ExplorerViews } from 'pages/LogsExplorer/utils';
|
||||
import { TOOLBAR_VIEWS } from 'pages/TracesExplorer/constants';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useSearchParams } from 'react-router-dom-v5-compat';
|
||||
import { Warning } from 'types/api';
|
||||
import { Dashboard } from 'types/api/dashboard/getAll';
|
||||
import {
|
||||
IBuilderTraceOperator,
|
||||
Query,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { generateExportToDashboardLink } from 'utils/dashboard/generateExportToDashboardLink';
|
||||
import {
|
||||
getExplorerViewForPanelType,
|
||||
explorerViewToPanelType,
|
||||
getExplorerViewFromUrl,
|
||||
} from 'utils/explorerUtils';
|
||||
import { v4 } from 'uuid';
|
||||
|
||||
function TracesExplorer(): JSX.Element {
|
||||
const {
|
||||
currentQuery,
|
||||
panelType,
|
||||
updateAllQueriesOperators,
|
||||
handleRunQuery,
|
||||
stagedQuery,
|
||||
handleSetConfig,
|
||||
updateQueriesData,
|
||||
} = useQueryBuilder();
|
||||
|
||||
const { options } = useOptionsMenu({
|
||||
@@ -69,6 +70,7 @@ function TracesExplorer(): JSX.Element {
|
||||
});
|
||||
|
||||
const [searchParams] = useSearchParams();
|
||||
const listQueryKeyRef = useRef<any>();
|
||||
|
||||
// Get panel type from URL
|
||||
const panelTypesFromUrl = useGetPanelTypesQueryParam(PANEL_TYPES.LIST);
|
||||
@@ -78,103 +80,45 @@ function TracesExplorer(): JSX.Element {
|
||||
getExplorerViewFromUrl(searchParams, panelTypesFromUrl),
|
||||
);
|
||||
|
||||
const [warning, setWarning] = useState<Warning | undefined>(undefined);
|
||||
const [isOpen, setOpen] = useState<boolean>(true);
|
||||
|
||||
const defaultQuery = useMemo(
|
||||
(): Query =>
|
||||
updateAllQueriesOperators(
|
||||
initialQueriesMap.traces,
|
||||
PANEL_TYPES.LIST,
|
||||
DataSource.TRACES,
|
||||
),
|
||||
[updateAllQueriesOperators],
|
||||
);
|
||||
|
||||
const { handleExplorerTabChange } = useHandleExplorerTabChange();
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
|
||||
// Update selected view when panel type from URL changes
|
||||
useEffect(() => {
|
||||
if (panelTypesFromUrl) {
|
||||
const newView = getExplorerViewForPanelType(panelTypesFromUrl);
|
||||
if (newView && newView !== selectedView) {
|
||||
setSelectedView(newView);
|
||||
}
|
||||
}
|
||||
}, [panelTypesFromUrl, selectedView]);
|
||||
|
||||
const [shouldReset, setShouldReset] = useState(false);
|
||||
|
||||
const [defaultQuery, setDefaultQuery] = useState<Query>(() =>
|
||||
updateAllQueriesOperators(
|
||||
initialQueriesMap.traces,
|
||||
PANEL_TYPES.LIST,
|
||||
DataSource.TRACES,
|
||||
),
|
||||
);
|
||||
|
||||
const handleChangeSelectedView = useCallback(
|
||||
(view: ExplorerViews): void => {
|
||||
if (selectedView === ExplorerViews.LIST) {
|
||||
handleSetConfig(PANEL_TYPES.LIST, DataSource.TRACES);
|
||||
}
|
||||
|
||||
if (
|
||||
(selectedView === ExplorerViews.TRACE ||
|
||||
selectedView === ExplorerViews.LIST) &&
|
||||
stagedQuery?.builder?.queryTraceOperator &&
|
||||
stagedQuery.builder.queryTraceOperator.length > 0
|
||||
) {
|
||||
// remove order by from trace operator
|
||||
set(stagedQuery, 'builder.queryTraceOperator[0].orderBy', []);
|
||||
}
|
||||
|
||||
if (view === ExplorerViews.LIST || view === ExplorerViews.TRACE) {
|
||||
// loop through all the queries and remove the group by
|
||||
|
||||
const updateQuery = updateQueriesData(
|
||||
currentQuery,
|
||||
'queryData',
|
||||
(item) => ({ ...item, groupBy: [], orderBy: [] }),
|
||||
);
|
||||
|
||||
setDefaultQuery(updateQuery);
|
||||
|
||||
setShouldReset(true);
|
||||
}
|
||||
(view: ExplorerViews, querySearchParameters?: ICurrentQueryData): void => {
|
||||
handleSetConfig(explorerViewToPanelType[view], DataSource.TRACES);
|
||||
|
||||
setSelectedView(view);
|
||||
|
||||
handleExplorerTabChange(
|
||||
view === ExplorerViews.TIMESERIES ? PANEL_TYPES.TIME_SERIES : view,
|
||||
explorerViewToPanelType[view],
|
||||
querySearchParameters,
|
||||
);
|
||||
},
|
||||
[
|
||||
selectedView,
|
||||
currentQuery,
|
||||
stagedQuery,
|
||||
handleExplorerTabChange,
|
||||
handleSetConfig,
|
||||
updateQueriesData,
|
||||
],
|
||||
[handleExplorerTabChange, handleSetConfig],
|
||||
);
|
||||
|
||||
const listQuery = useMemo(() => {
|
||||
if (!stagedQuery || stagedQuery.builder.queryData.length < 1) return null;
|
||||
|
||||
return stagedQuery.builder.queryData.find((item) => !item.disabled) || null;
|
||||
}, [stagedQuery]);
|
||||
|
||||
const exportDefaultQuery = useMemo(
|
||||
() =>
|
||||
updateAllQueriesOperators(
|
||||
currentQuery || initialQueriesMap.traces,
|
||||
PANEL_TYPES.TIME_SERIES,
|
||||
DataSource.TRACES,
|
||||
getQueryByPanelType(
|
||||
stagedQuery || initialQueriesMap.traces,
|
||||
panelType || PANEL_TYPES.LIST,
|
||||
),
|
||||
[currentQuery, updateAllQueriesOperators],
|
||||
[stagedQuery, panelType],
|
||||
);
|
||||
|
||||
const getUpdatedQueryForExport = useCallback((): Query => {
|
||||
const updatedQuery = cloneDeep(currentQuery);
|
||||
|
||||
set(
|
||||
updatedQuery,
|
||||
'builder.queryData[0].selectColumns',
|
||||
options.selectColumns,
|
||||
);
|
||||
|
||||
return updatedQuery;
|
||||
}, [currentQuery, options.selectColumns]);
|
||||
|
||||
const handleExport = useCallback(
|
||||
(dashboard: Dashboard | null, isNewDashboard?: boolean): void => {
|
||||
if (!dashboard || !panelType) return;
|
||||
@@ -185,10 +129,11 @@ function TracesExplorer(): JSX.Element {
|
||||
|
||||
const widgetId = v4();
|
||||
|
||||
const query =
|
||||
panelType === PANEL_TYPES.LIST
|
||||
? getUpdatedQueryForExport()
|
||||
: exportDefaultQuery;
|
||||
const query = getExportQueryData(
|
||||
exportDefaultQuery,
|
||||
panelTypeParam,
|
||||
options,
|
||||
);
|
||||
|
||||
logEvent('Traces Explorer: Add to dashboard successful', {
|
||||
panelType,
|
||||
@@ -205,56 +150,11 @@ function TracesExplorer(): JSX.Element {
|
||||
|
||||
safeNavigate(dashboardEditView);
|
||||
},
|
||||
[exportDefaultQuery, panelType, safeNavigate, getUpdatedQueryForExport],
|
||||
[exportDefaultQuery, panelType, safeNavigate, options],
|
||||
);
|
||||
|
||||
useShareBuilderUrl({ defaultValue: defaultQuery, forceReset: shouldReset });
|
||||
useShareBuilderUrl({ defaultValue: defaultQuery });
|
||||
|
||||
const hasMultipleQueries = useMemo(
|
||||
() => currentQuery?.builder?.queryData?.length > 1,
|
||||
[currentQuery],
|
||||
);
|
||||
|
||||
const traceOperator = useMemo((): IBuilderTraceOperator | undefined => {
|
||||
if (
|
||||
currentQuery.builder.queryTraceOperator &&
|
||||
currentQuery.builder.queryTraceOperator.length > 0
|
||||
) {
|
||||
return currentQuery.builder.queryTraceOperator[0];
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}, [currentQuery.builder.queryTraceOperator]);
|
||||
|
||||
const showTraceOperatorCallout = useMemo(
|
||||
() =>
|
||||
(selectedView === ExplorerViews.LIST ||
|
||||
selectedView === ExplorerViews.TRACE) &&
|
||||
hasMultipleQueries &&
|
||||
!traceOperator,
|
||||
[selectedView, hasMultipleQueries, traceOperator],
|
||||
);
|
||||
|
||||
const traceOperatorCalloutDescription = useMemo(() => {
|
||||
if (currentQuery.builder.queryData.length === 0) return '';
|
||||
const firstQuery = currentQuery.builder.queryData[0];
|
||||
return `Please use a Trace Operator to combine results of multiple span queries. Else you'd only see the results from query "${firstQuery.queryName}"`;
|
||||
}, [currentQuery]);
|
||||
|
||||
useEffect(() => {
|
||||
if (shouldReset) {
|
||||
setShouldReset(false);
|
||||
setDefaultQuery(
|
||||
updateAllQueriesOperators(
|
||||
initialQueriesMap.traces,
|
||||
PANEL_TYPES.LIST,
|
||||
DataSource.TRACES,
|
||||
),
|
||||
);
|
||||
}
|
||||
}, [shouldReset, updateAllQueriesOperators]);
|
||||
|
||||
const [isOpen, setOpen] = useState<boolean>(true);
|
||||
const logEventCalledRef = useRef(false);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -264,51 +164,13 @@ function TracesExplorer(): JSX.Element {
|
||||
}
|
||||
}, []);
|
||||
|
||||
const toolbarViews = useMemo(
|
||||
() => ({
|
||||
list: {
|
||||
name: 'list',
|
||||
label: 'List',
|
||||
show: true,
|
||||
key: 'list',
|
||||
},
|
||||
timeseries: {
|
||||
name: 'timeseries',
|
||||
label: 'Timeseries',
|
||||
disabled: false,
|
||||
show: true,
|
||||
key: 'timeseries',
|
||||
},
|
||||
trace: {
|
||||
name: 'trace',
|
||||
label: 'Trace',
|
||||
disabled: false,
|
||||
show: true,
|
||||
key: 'trace',
|
||||
},
|
||||
table: {
|
||||
name: 'table',
|
||||
label: 'Table',
|
||||
disabled: false,
|
||||
show: true,
|
||||
key: 'table',
|
||||
},
|
||||
clickhouse: {
|
||||
name: 'clickhouse',
|
||||
label: 'Clickhouse',
|
||||
disabled: false,
|
||||
show: false,
|
||||
key: 'clickhouse',
|
||||
},
|
||||
}),
|
||||
[],
|
||||
);
|
||||
|
||||
const isFilterApplied = useMemo(() => !isEmpty(listQuery?.filters?.items), [
|
||||
listQuery,
|
||||
]);
|
||||
|
||||
const [warning, setWarning] = useState<Warning | undefined>(undefined);
|
||||
const isFilterApplied = useMemo(() => {
|
||||
// if any of the non-disabled queries has filters applied, return true
|
||||
const result = stagedQuery?.builder?.queryData?.filter(
|
||||
(item) => !isEmpty(item.filters?.items) && !item.disabled,
|
||||
);
|
||||
return !!result?.length;
|
||||
}, [stagedQuery]);
|
||||
|
||||
return (
|
||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||
@@ -335,7 +197,7 @@ function TracesExplorer(): JSX.Element {
|
||||
<LeftToolbarActions
|
||||
showFilter={isOpen}
|
||||
handleFilterVisibilityChange={(): void => setOpen(!isOpen)}
|
||||
items={toolbarViews}
|
||||
items={TOOLBAR_VIEWS}
|
||||
selectedView={selectedView}
|
||||
onChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
@@ -347,6 +209,7 @@ function TracesExplorer(): JSX.Element {
|
||||
<RightToolbarActions
|
||||
onStageRunQuery={(): void => handleRunQuery()}
|
||||
isLoadingQueries={isLoadingQueries}
|
||||
listQueryKeyRef={listQueryKeyRef}
|
||||
/>
|
||||
}
|
||||
/>
|
||||
@@ -358,29 +221,13 @@ function TracesExplorer(): JSX.Element {
|
||||
</ExplorerCard>
|
||||
|
||||
<div className="traces-explorer-views">
|
||||
<div className="traces-explorer-export-panel">
|
||||
<ExportPanel
|
||||
query={exportDefaultQuery}
|
||||
isLoading={false}
|
||||
onExport={handleExport}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{showTraceOperatorCallout && (
|
||||
<Callout
|
||||
type="info"
|
||||
size="small"
|
||||
showIcon
|
||||
description={traceOperatorCalloutDescription}
|
||||
/>
|
||||
)}
|
||||
|
||||
{selectedView === ExplorerViews.LIST && (
|
||||
<div className="trace-explorer-list-view">
|
||||
<ListView
|
||||
isFilterApplied={isFilterApplied}
|
||||
setWarning={setWarning}
|
||||
setIsLoadingQueries={setIsLoadingQueries}
|
||||
queryKeyRef={listQueryKeyRef}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
@@ -391,6 +238,7 @@ function TracesExplorer(): JSX.Element {
|
||||
isFilterApplied={isFilterApplied}
|
||||
setWarning={setWarning}
|
||||
setIsLoadingQueries={setIsLoadingQueries}
|
||||
queryKeyRef={listQueryKeyRef}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
@@ -402,6 +250,7 @@ function TracesExplorer(): JSX.Element {
|
||||
isFilterApplied={isFilterApplied}
|
||||
setWarning={setWarning}
|
||||
setIsLoadingQueries={setIsLoadingQueries}
|
||||
queryKeyRef={listQueryKeyRef}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
@@ -411,6 +260,7 @@ function TracesExplorer(): JSX.Element {
|
||||
<TableView
|
||||
setWarning={setWarning}
|
||||
setIsLoadingQueries={setIsLoadingQueries}
|
||||
queryKeyRef={listQueryKeyRef}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
@@ -421,6 +271,7 @@ function TracesExplorer(): JSX.Element {
|
||||
query={exportDefaultQuery}
|
||||
sourcepage={DataSource.TRACES}
|
||||
onExport={handleExport}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
import listOrgPreferences from 'api/v1/org/preferences/list';
|
||||
import get from 'api/v1/user/me/get';
|
||||
import listUserPreferences from 'api/v1/user/preferences/list';
|
||||
@@ -77,6 +78,7 @@ export function AppProvider({ children }: PropsWithChildren): JSX.Element {
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFetchingUser && userData && userData.data) {
|
||||
setLocalStorageApi(LOCALSTORAGE.LOGGED_IN_USER_EMAIL, userData.data.email);
|
||||
setUser((prev) => ({
|
||||
...prev,
|
||||
...userData.data,
|
||||
|
||||
@@ -35,7 +35,7 @@ export interface TagFilterItem {
|
||||
id: string;
|
||||
key?: BaseAutocompleteData;
|
||||
op: string;
|
||||
value: string[] | string | number | boolean;
|
||||
value: (string | number | boolean)[] | string | number | boolean;
|
||||
}
|
||||
|
||||
export interface TagFilter {
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { IBuilderQuery, Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
/**
|
||||
* Transforms a query by modifying specific fields in the builder queries
|
||||
* @param query - The original query object
|
||||
* @param fieldOverrides - Partial object containing fields to override in each builder query
|
||||
* @returns A new query object with the modified fields
|
||||
*/
|
||||
export const transformBuilderQueryFields = (
|
||||
query: Query,
|
||||
fieldOverrides: Partial<IBuilderQuery>,
|
||||
): Query => {
|
||||
// Create a deep copy of the query
|
||||
const transformedQuery: Query = cloneDeep(query);
|
||||
|
||||
// Update the specified fields for each query in the builder
|
||||
if (transformedQuery.builder?.queryData) {
|
||||
transformedQuery.builder.queryData = transformedQuery.builder.queryData.map(
|
||||
(queryItem) => ({
|
||||
...queryItem,
|
||||
...fieldOverrides,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
return transformedQuery;
|
||||
};
|
||||
@@ -11,3 +11,8 @@ export function unquote(str: string): string {
|
||||
|
||||
return trimmed;
|
||||
}
|
||||
|
||||
export function isQuoted(str: string): boolean {
|
||||
const trimmed = str.trim();
|
||||
return trimmed.length >= 2 && /^(["'`])(.*)\1$/.test(trimmed);
|
||||
}
|
||||
|
||||
@@ -23,7 +23,6 @@ const plugins = [
|
||||
PYLON_APP_ID: process.env.PYLON_APP_ID,
|
||||
APPCUES_APP_ID: process.env.APPCUES_APP_ID,
|
||||
POSTHOG_KEY: process.env.POSTHOG_KEY,
|
||||
USERPILOT_KEY: process.env.USERPILOT_KEY,
|
||||
SENTRY_AUTH_TOKEN: process.env.SENTRY_AUTH_TOKEN,
|
||||
SENTRY_ORG: process.env.SENTRY_ORG,
|
||||
SENTRY_PROJECT_ID: process.env.SENTRY_PROJECT_ID,
|
||||
@@ -40,9 +39,9 @@ const plugins = [
|
||||
FRONTEND_API_ENDPOINT: process.env.FRONTEND_API_ENDPOINT,
|
||||
WEBSOCKET_API_ENDPOINT: process.env.WEBSOCKET_API_ENDPOINT,
|
||||
PYLON_APP_ID: process.env.PYLON_APP_ID,
|
||||
PYLON_IDENTITY_SECRET: process.env.PYLON_IDENTITY_SECRET,
|
||||
APPCUES_APP_ID: process.env.APPCUES_APP_ID,
|
||||
POSTHOG_KEY: process.env.POSTHOG_KEY,
|
||||
USERPILOT_KEY: process.env.USERPILOT_KEY,
|
||||
SENTRY_AUTH_TOKEN: process.env.SENTRY_AUTH_TOKEN,
|
||||
SENTRY_ORG: process.env.SENTRY_ORG,
|
||||
SENTRY_PROJECT_ID: process.env.SENTRY_PROJECT_ID,
|
||||
|
||||
@@ -28,7 +28,6 @@ const plugins = [
|
||||
PYLON_APP_ID: process.env.PYLON_APP_ID,
|
||||
APPCUES_APP_ID: process.env.APPCUES_APP_ID,
|
||||
POSTHOG_KEY: process.env.POSTHOG_KEY,
|
||||
USERPILOT_KEY: process.env.USERPILOT_KEY,
|
||||
SENTRY_AUTH_TOKEN: process.env.SENTRY_AUTH_TOKEN,
|
||||
SENTRY_ORG: process.env.SENTRY_ORG,
|
||||
SENTRY_PROJECT_ID: process.env.SENTRY_PROJECT_ID,
|
||||
@@ -50,9 +49,9 @@ const plugins = [
|
||||
FRONTEND_API_ENDPOINT: process.env.FRONTEND_API_ENDPOINT,
|
||||
WEBSOCKET_API_ENDPOINT: process.env.WEBSOCKET_API_ENDPOINT,
|
||||
PYLON_APP_ID: process.env.PYLON_APP_ID,
|
||||
PYLON_IDENTITY_SECRET: process.env.PYLON_IDENTITY_SECRET,
|
||||
APPCUES_APP_ID: process.env.APPCUES_APP_ID,
|
||||
POSTHOG_KEY: process.env.POSTHOG_KEY,
|
||||
USERPILOT_KEY: process.env.USERPILOT_KEY,
|
||||
SENTRY_AUTH_TOKEN: process.env.SENTRY_AUTH_TOKEN,
|
||||
SENTRY_ORG: process.env.SENTRY_ORG,
|
||||
SENTRY_PROJECT_ID: process.env.SENTRY_PROJECT_ID,
|
||||
|
||||
@@ -3385,30 +3385,6 @@
|
||||
strict-event-emitter "^0.2.4"
|
||||
web-encoding "^1.1.5"
|
||||
|
||||
"@ndhoule/each@^2.0.1":
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@ndhoule/each/-/each-2.0.1.tgz#bbed372a603e0713a3193c706a73ddebc5b426a9"
|
||||
integrity sha512-wHuJw6x+rF6Q9Skgra++KccjBozCr9ymtna0FhxmV/8xT/hZ2ExGYR8SV8prg8x4AH/7mzDYErNGIVHuzHeybw==
|
||||
dependencies:
|
||||
"@ndhoule/keys" "^2.0.0"
|
||||
|
||||
"@ndhoule/includes@^2.0.1":
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@ndhoule/includes/-/includes-2.0.1.tgz#051ff5eb042c8fa17e7158f0a8a70172e1affaa5"
|
||||
integrity sha512-Q8zN6f3yIhxgBwZ5ldLozHqJlc/fRQ5+hFFsPMFeC9SJvz0nq8vG9hoRXL1c1iaNFQd7yAZIy2igQpERoFqxqg==
|
||||
dependencies:
|
||||
"@ndhoule/each" "^2.0.1"
|
||||
|
||||
"@ndhoule/keys@^2.0.0":
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@ndhoule/keys/-/keys-2.0.0.tgz#3d64ae677c65a261747bf3a457c62eb292a4e0ce"
|
||||
integrity sha512-vtCqKBC1Av6dsBA8xpAO+cgk051nfaI+PnmTZep2Px0vYrDvpUmLxv7z40COlWH5yCpu3gzNhepk+02yiQiZNw==
|
||||
|
||||
"@ndhoule/pick@^2.0.0":
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@ndhoule/pick/-/pick-2.0.0.tgz#e1eb1a6ca3243eef56daa095c3a1612c74a52156"
|
||||
integrity sha512-xkYtpf1pRd8egwvl5tJcdGu+GBd6ZZH3S/zoIQ9txEI+pHF9oTIlxMC9G4CB3sRugAeLgu8qYJGl3tnxWq74Qw==
|
||||
|
||||
"@nodelib/fs.scandir@2.1.5":
|
||||
version "2.1.5"
|
||||
resolved "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz"
|
||||
@@ -3529,18 +3505,25 @@
|
||||
resolved "https://registry.npmjs.org/@petamoriken/float16/-/float16-3.8.0.tgz"
|
||||
integrity sha512-AhVAm6SQ+zgxIiOzwVdUcDmKlu/qU39FiYD2UD6kQQaVenrn0dGZewIghWAENGQsvC+1avLCuT+T2/3Gsp/W3w==
|
||||
|
||||
"@playwright/test@1.54.1":
|
||||
version "1.54.1"
|
||||
resolved "https://registry.yarnpkg.com/@playwright/test/-/test-1.54.1.tgz#a76333e5c2cba5f12f96a6da978bba3ab073c7e6"
|
||||
integrity sha512-FS8hQ12acieG2dYSksmLOF7BNxnVf2afRJdCuM1eMSxj6QTSE6G4InGF7oApGgDb65MX7AwMVlIkpru0yZA4Xw==
|
||||
"@playwright/test@1.55.1":
|
||||
version "1.55.1"
|
||||
resolved "https://registry.yarnpkg.com/@playwright/test/-/test-1.55.1.tgz#80f775d5f948cd3ef550fcc45ef99986d3ffb36c"
|
||||
integrity sha512-IVAh/nOJaw6W9g+RJVlIQJ6gSiER+ae6mKQ5CX1bERzQgbC1VSeBlwdvczT7pxb0GWiyrxH4TGKbMfDb4Sq/ig==
|
||||
dependencies:
|
||||
playwright "1.54.1"
|
||||
playwright "1.55.1"
|
||||
|
||||
"@polka/url@^1.0.0-next.20":
|
||||
version "1.0.0-next.21"
|
||||
resolved "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.21.tgz"
|
||||
integrity sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g==
|
||||
|
||||
"@posthog/core@1.6.0":
|
||||
version "1.6.0"
|
||||
resolved "https://registry.yarnpkg.com/@posthog/core/-/core-1.6.0.tgz#a5b63a30950a8dfe87d4bf335ab24005c7ce1278"
|
||||
integrity sha512-Tbh8UACwbb7jFdDC7wwXHtfNzO+4wKh3VbyMHmp2UBe6w1jliJixexTJNfkqdGZm+ht3M10mcKvGGPnoZ2zLBg==
|
||||
dependencies:
|
||||
cross-spawn "^7.0.6"
|
||||
|
||||
"@radix-ui/primitive@1.0.1":
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@radix-ui/primitive/-/primitive-1.0.1.tgz#e46f9958b35d10e9f6dc71c497305c22e3e55dbd"
|
||||
@@ -4685,6 +4668,11 @@
|
||||
tapable "^2.0.0"
|
||||
webpack "^5.1.0"
|
||||
|
||||
"@types/crypto-js@4.2.2":
|
||||
version "4.2.2"
|
||||
resolved "https://registry.yarnpkg.com/@types/crypto-js/-/crypto-js-4.2.2.tgz#771c4a768d94eb5922cc202a3009558204df0cea"
|
||||
integrity sha512-sDOLlVbHhXpAUAL0YHDUUwDZf3iN4Bwi4W6a0W0b+QcAezUbRtH4FVb+9J4h+XFPW7l/gQ9F8qC7P+Ec4k8QVQ==
|
||||
|
||||
"@types/d3-array@3.0.3":
|
||||
version "3.0.3"
|
||||
resolved "https://registry.yarnpkg.com/@types/d3-array/-/d3-array-3.0.3.tgz#87d990bf504d14ad6b16766979d04e943c046dac"
|
||||
@@ -7518,11 +7506,6 @@ compare-func@^2.0.0:
|
||||
array-ify "^1.0.0"
|
||||
dot-prop "^5.1.0"
|
||||
|
||||
component-indexof@0.0.3:
|
||||
version "0.0.3"
|
||||
resolved "https://registry.yarnpkg.com/component-indexof/-/component-indexof-0.0.3.tgz#11d091312239eb8f32c8f25ae9cb002ffe8d3c24"
|
||||
integrity sha512-puDQKvx/64HZXb4hBwIcvQLaLgux8o1CbWl39s41hrIIZDl1lJiD5jc22gj3RBeGK0ovxALDYpIbyjqDUUl0rw==
|
||||
|
||||
compressible@~2.0.16:
|
||||
version "2.0.18"
|
||||
resolved "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz"
|
||||
@@ -7733,7 +7716,7 @@ cross-fetch@3.1.5:
|
||||
dependencies:
|
||||
node-fetch "2.6.7"
|
||||
|
||||
cross-spawn@7.0.5, cross-spawn@^6.0.5, cross-spawn@^7.0.1, cross-spawn@^7.0.2, cross-spawn@^7.0.3:
|
||||
cross-spawn@7.0.5, cross-spawn@^6.0.5, cross-spawn@^7.0.1, cross-spawn@^7.0.2, cross-spawn@^7.0.3, cross-spawn@^7.0.6:
|
||||
version "7.0.5"
|
||||
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.5.tgz#910aac880ff5243da96b728bc6521a5f6c2f2f82"
|
||||
integrity sha512-ZVJrKKYunU38/76t0RMOulHOnUcbU9GbpWKAOZ0mhjr7CX6FVrH+4FrAapSOekrgFQ3f/8gwMEuIft0aKq6Hug==
|
||||
@@ -7742,6 +7725,11 @@ cross-spawn@7.0.5, cross-spawn@^6.0.5, cross-spawn@^7.0.1, cross-spawn@^7.0.2, c
|
||||
shebang-command "^2.0.0"
|
||||
which "^2.0.1"
|
||||
|
||||
crypto-js@4.2.0:
|
||||
version "4.2.0"
|
||||
resolved "https://registry.yarnpkg.com/crypto-js/-/crypto-js-4.2.0.tgz#4d931639ecdfd12ff80e8186dba6af2c2e856631"
|
||||
integrity sha512-KALDyEYgpY+Rlob/iriUtjV6d5Eq+Y191A5g4UqLAi8CyGP9N1+FdVbkc1SxKc2r4YAYqG8JzO2KGL+AizD70Q==
|
||||
|
||||
css-box-model@^1.2.0:
|
||||
version "1.2.1"
|
||||
resolved "https://registry.yarnpkg.com/css-box-model/-/css-box-model-1.2.1.tgz#59951d3b81fd6b2074a62d49444415b0d2b4d7c1"
|
||||
@@ -11241,11 +11229,6 @@ is-wsl@^3.1.0:
|
||||
dependencies:
|
||||
is-inside-container "^1.0.0"
|
||||
|
||||
is@^3.1.0:
|
||||
version "3.3.0"
|
||||
resolved "https://registry.yarnpkg.com/is/-/is-3.3.0.tgz#61cff6dd3c4193db94a3d62582072b44e5645d79"
|
||||
integrity sha512-nW24QBoPcFGGHJGUwnfpI7Yc5CdqWNdsyHQszVE/z2pKHXzh7FZ5GWhJqSyaQ9wMkQnsTx+kAI8bHlCX4tKdbg==
|
||||
|
||||
isarray@0.0.1:
|
||||
version "0.0.1"
|
||||
resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf"
|
||||
@@ -13616,11 +13599,6 @@ nwsapi@^2.2.0:
|
||||
resolved "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.4.tgz"
|
||||
integrity sha512-NHj4rzRo0tQdijE9ZqAx6kYDcoRwYwSYzCA8MY3JzfxlrvEU0jhnhJT9BhqhJs7I/dKcrDm6TyulaRqZPIhN5g==
|
||||
|
||||
obj-case@^0.2.0:
|
||||
version "0.2.1"
|
||||
resolved "https://registry.yarnpkg.com/obj-case/-/obj-case-0.2.1.tgz#13a554d04e5ca32dfd9d566451fd2b0e11007f1a"
|
||||
integrity sha512-PquYBBTy+Y6Ob/O2574XHhDtHJlV1cJHMCgW+rDRc9J5hhmRelJB3k5dTK/3cVmFVtzvAKuENeuLpoyTzMzkOg==
|
||||
|
||||
object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1:
|
||||
version "4.1.1"
|
||||
resolved "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz"
|
||||
@@ -14197,17 +14175,17 @@ pkg-dir@^7.0.0:
|
||||
dependencies:
|
||||
find-up "^6.3.0"
|
||||
|
||||
playwright-core@1.54.1:
|
||||
version "1.54.1"
|
||||
resolved "https://registry.yarnpkg.com/playwright-core/-/playwright-core-1.54.1.tgz#d32edcce048c9d83ceac31e294a7b60ef586960b"
|
||||
integrity sha512-Nbjs2zjj0htNhzgiy5wu+3w09YetDx5pkrpI/kZotDlDUaYk0HVA5xrBVPdow4SAUIlhgKcJeJg4GRKW6xHusA==
|
||||
playwright-core@1.55.1:
|
||||
version "1.55.1"
|
||||
resolved "https://registry.yarnpkg.com/playwright-core/-/playwright-core-1.55.1.tgz#5d3bb1846bc4289d364ea1a9dcb33f14545802e9"
|
||||
integrity sha512-Z6Mh9mkwX+zxSlHqdr5AOcJnfp+xUWLCt9uKV18fhzA8eyxUd8NUWzAjxUh55RZKSYwDGX0cfaySdhZJGMoJ+w==
|
||||
|
||||
playwright@1.54.1:
|
||||
version "1.54.1"
|
||||
resolved "https://registry.yarnpkg.com/playwright/-/playwright-1.54.1.tgz#128d66a8d5182b5330e6440be3a72ca313362788"
|
||||
integrity sha512-peWpSwIBmSLi6aW2auvrUtf2DqY16YYcCMO8rTVx486jKmDTJg7UAhyrraP98GB8BoPURZP8+nxO7TSd4cPr5g==
|
||||
playwright@1.55.1:
|
||||
version "1.55.1"
|
||||
resolved "https://registry.yarnpkg.com/playwright/-/playwright-1.55.1.tgz#8a9954e9e61ed1ab479212af9be336888f8b3f0e"
|
||||
integrity sha512-cJW4Xd/G3v5ovXtJJ52MAOclqeac9S/aGGgRzLabuF8TnIb6xHvMzKIa6JmrRzUkeXJgfL1MhukP0NK6l39h3A==
|
||||
dependencies:
|
||||
playwright-core "1.54.1"
|
||||
playwright-core "1.55.1"
|
||||
optionalDependencies:
|
||||
fsevents "2.3.2"
|
||||
|
||||
@@ -14530,15 +14508,16 @@ postcss@^8.4.35:
|
||||
picocolors "^1.1.1"
|
||||
source-map-js "^1.2.1"
|
||||
|
||||
posthog-js@1.215.5:
|
||||
version "1.215.5"
|
||||
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.215.5.tgz#0512cfdb919da960b809c5f686ca147f9c2922ba"
|
||||
integrity sha512-42lPur+xvkp51pHz2FQ7Y+KHdZ4eQSNIhUO03EECvc2UsmnM0FiVTrF1bcLwHZMaWfR26gOeuOAAjTUV9tinJg==
|
||||
posthog-js@1.298.0:
|
||||
version "1.298.0"
|
||||
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.298.0.tgz#54730988a753220aef54af0c4e69960633917450"
|
||||
integrity sha512-Zwzsf7TO8qJ6DFLuUlQSsT/5OIOcxSBZlKOSk3satkEnwKdmnBXUuxgVXRHrvq1kj7OB2PVAPgZiQ8iHHj9DRA==
|
||||
dependencies:
|
||||
"@posthog/core" "1.6.0"
|
||||
core-js "^3.38.1"
|
||||
fflate "^0.4.8"
|
||||
preact "^10.19.3"
|
||||
web-vitals "^4.2.0"
|
||||
web-vitals "^4.2.4"
|
||||
|
||||
preact@^10.19.3:
|
||||
version "10.22.0"
|
||||
@@ -17907,17 +17886,6 @@ use-sync-external-store@^1.0.0:
|
||||
resolved "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.2.0.tgz"
|
||||
integrity sha512-eEgnFxGQ1Ife9bzYs6VLi8/4X6CObHMw9Qr9tPY43iKwsPw8xE8+EFsf/2cFZ5S3esXgpWgtSCtLNS41F+sKPA==
|
||||
|
||||
userpilot@1.3.9:
|
||||
version "1.3.9"
|
||||
resolved "https://registry.yarnpkg.com/userpilot/-/userpilot-1.3.9.tgz#6374083f3e84cbf1fc825133588b5b499054271b"
|
||||
integrity sha512-V0QIuIlAJPB8s3j+qtv7BW7NKSXthlZWuowIu+IZOMGLgUbqQTaSW5m1Ct4wJviPKUNOi8kbhCXN4c4b3zcJzg==
|
||||
dependencies:
|
||||
"@ndhoule/includes" "^2.0.1"
|
||||
"@ndhoule/pick" "^2.0.0"
|
||||
component-indexof "0.0.3"
|
||||
is "^3.1.0"
|
||||
obj-case "^0.2.0"
|
||||
|
||||
util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz"
|
||||
@@ -18124,7 +18092,7 @@ web-vitals@^0.2.4:
|
||||
resolved "https://registry.npmjs.org/web-vitals/-/web-vitals-0.2.4.tgz"
|
||||
integrity sha512-6BjspCO9VriYy12z356nL6JBS0GYeEcA457YyRzD+dD6XYCQ75NKhcOHUMHentOE7OcVCIXXDvOm0jKFfQG2Gg==
|
||||
|
||||
web-vitals@^4.2.0:
|
||||
web-vitals@^4.2.4:
|
||||
version "4.2.4"
|
||||
resolved "https://registry.yarnpkg.com/web-vitals/-/web-vitals-4.2.4.tgz#1d20bc8590a37769bd0902b289550936069184b7"
|
||||
integrity sha512-r4DIlprAGwJ7YM11VZp4R884m0Vmgr6EAKe3P+kO0PPj3Unqyvv59rczf6UiGcb9Z8QxZVcqKNwv/g0WNdWwsw==
|
||||
|
||||
10
go.mod
10
go.mod
@@ -8,7 +8,7 @@ require (
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.40.1
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.2
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
|
||||
github.com/SigNoz/signoz-otel-collector v0.129.4
|
||||
github.com/SigNoz/signoz-otel-collector v0.129.10-rc.7
|
||||
github.com/antlr4-go/antlr/v4 v4.13.1
|
||||
github.com/antonmedv/expr v1.15.3
|
||||
github.com/cespare/xxhash/v2 v2.3.0
|
||||
@@ -37,7 +37,6 @@ require (
|
||||
github.com/openfga/api/proto v0.0.0-20250909172242-b4b2a12f5c67
|
||||
github.com/openfga/language/pkg/go v0.2.0-beta.2.0.20250428093642-7aeebe78bbfe
|
||||
github.com/opentracing/opentracing-go v1.2.0
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/prometheus/alertmanager v0.28.1
|
||||
github.com/prometheus/client_golang v1.23.2
|
||||
@@ -87,12 +86,19 @@ require (
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/bytedance/gopkg v0.1.3 // indirect
|
||||
github.com/bytedance/sonic v1.14.1 // indirect
|
||||
github.com/bytedance/sonic/loader v0.3.0 // indirect
|
||||
github.com/cloudwego/base64x v0.1.6 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||
github.com/redis/go-redis/extra/rediscmd/v9 v9.15.1 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||
github.com/uptrace/opentelemetry-go-extra/otelsql v0.3.2 // indirect
|
||||
go.opentelemetry.io/collector/config/configretry v1.34.0 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.2 // indirect
|
||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670 // indirect
|
||||
modernc.org/libc v1.66.10 // indirect
|
||||
modernc.org/mathutil v1.7.1 // indirect
|
||||
modernc.org/memory v1.11.0 // indirect
|
||||
|
||||
18
go.sum
18
go.sum
@@ -106,8 +106,8 @@ github.com/SigNoz/expr v1.17.7-beta h1:FyZkleM5dTQ0O6muQfwGpoH5A2ohmN/XTasRCO72g
|
||||
github.com/SigNoz/expr v1.17.7-beta/go.mod h1:8/vRC7+7HBzESEqt5kKpYXxrxkr31SaO8r40VO/1IT4=
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkbj57eGXx8H3ZJ4zhmQXBnrW523ktj8=
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc=
|
||||
github.com/SigNoz/signoz-otel-collector v0.129.4 h1:DGDu9y1I1FU+HX4eECPGmfhnXE4ys4yr7LL6znbf6to=
|
||||
github.com/SigNoz/signoz-otel-collector v0.129.4/go.mod h1:xyR+coBzzO04p6Eu+ql2RVYUl/jFD+8hD9lArcc9U7g=
|
||||
github.com/SigNoz/signoz-otel-collector v0.129.10-rc.7 h1:r8/+t3ARWek9+X5aH05qavdA9ATbkssfssHh/zjzsEM=
|
||||
github.com/SigNoz/signoz-otel-collector v0.129.10-rc.7/go.mod h1:4eJCRUd/P4OiCHXvGYZK8q6oyBVGJFVj/G6qKSoN/TQ=
|
||||
github.com/Yiling-J/theine-go v0.6.2 h1:1GeoXeQ0O0AUkiwj2S9Jc0Mzx+hpqzmqsJ4kIC4M9AY=
|
||||
github.com/Yiling-J/theine-go v0.6.2/go.mod h1:08QpMa5JZ2pKN+UJCRrCasWYO1IKCdl54Xa836rpmDU=
|
||||
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
|
||||
@@ -162,6 +162,12 @@ github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs=
|
||||
github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c=
|
||||
github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA=
|
||||
github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0=
|
||||
github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M=
|
||||
github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM=
|
||||
github.com/bytedance/sonic v1.14.1 h1:FBMC0zVz5XUmE4z9wF4Jey0An5FueFvOsTKKKtwIl7w=
|
||||
github.com/bytedance/sonic v1.14.1/go.mod h1:gi6uhQLMbTdeP0muCnrjHLeCUPyb70ujhnNlhOylAFc=
|
||||
github.com/bytedance/sonic/loader v0.3.0 h1:dskwH8edlzNMctoruo8FPTJDF3vLtDT0sXZwvZJyqeA=
|
||||
github.com/bytedance/sonic/loader v0.3.0/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI=
|
||||
github.com/cactus/go-statsd-client/statsd v0.0.0-20200423205355-cb0885a1018c/go.mod h1:l/bIBLeOl9eX+wxJAzxS4TveKRtAqlyDpHjhkfO0MEI=
|
||||
github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
|
||||
github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
|
||||
@@ -178,6 +184,8 @@ github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMn
|
||||
github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag=
|
||||
github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M=
|
||||
github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU=
|
||||
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
||||
github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
|
||||
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
|
||||
@@ -786,8 +794,6 @@ github.com/ovh/go-ovh v1.7.0/go.mod h1:cTVDnl94z4tl8pP1uZ/8jlVxntjSIf09bNcQ5TJSC
|
||||
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
||||
github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY=
|
||||
github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc=
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
|
||||
github.com/paulmach/orb v0.11.1 h1:3koVegMC4X/WeiXYz9iswopaTwMem53NzTJuTF20JzU=
|
||||
github.com/paulmach/orb v0.11.1/go.mod h1:5mULz1xQfs3bmQm63QEJA6lNGujuRafwA5S/EnuLaLU=
|
||||
github.com/paulmach/protoscan v0.2.1/go.mod h1:SpcSwydNLrxUGSDvXvO0P7g7AuhJ7lcKfDlhJCDw2gY=
|
||||
@@ -993,6 +999,8 @@ github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc/go.mod h1:bciPuU6GH
|
||||
github.com/trivago/tgo v1.0.7 h1:uaWH/XIy9aWYWpjm2CU3RpcqZXmX2ysQ9/Go+d9gyrM=
|
||||
github.com/trivago/tgo v1.0.7/go.mod h1:w4dpD+3tzNIIiIfkWWa85w5/B77tlvdZckQ+6PkFnhc=
|
||||
github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM=
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
||||
github.com/uptrace/bun v1.2.9 h1:OOt2DlIcRUMSZPr6iXDFg/LaQd59kOxbAjpIVHddKRs=
|
||||
github.com/uptrace/bun v1.2.9/go.mod h1:r2ZaaGs9Ru5bpGTr8GQfp8jp+TlCav9grYCPOu2CJSg=
|
||||
github.com/uptrace/bun/dialect/pgdialect v1.2.9 h1:caf5uFbOGiXvadV6pA5gn87k0awFFxL1kuuY3SpxnWk=
|
||||
@@ -1237,6 +1245,8 @@ go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI=
|
||||
go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU=
|
||||
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
|
||||
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670 h1:18EFjUmQOcUvxNYSkA6jO9VAiXCnxFY6NyDX0bHDmkU=
|
||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
|
||||
@@ -24,28 +24,26 @@ var (
|
||||
var _ authn.CallbackAuthN = (*AuthN)(nil)
|
||||
|
||||
type AuthN struct {
|
||||
oidcProvider *oidc.Provider
|
||||
store authtypes.AuthNStore
|
||||
store authtypes.AuthNStore
|
||||
}
|
||||
|
||||
func New(ctx context.Context, store authtypes.AuthNStore) (*AuthN, error) {
|
||||
oidcProvider, err := oidc.NewProvider(ctx, issuerURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &AuthN{
|
||||
oidcProvider: oidcProvider,
|
||||
store: store,
|
||||
store: store,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *AuthN) LoginURL(ctx context.Context, siteURL *url.URL, authDomain *authtypes.AuthDomain) (string, error) {
|
||||
oidcProvider, err := oidc.NewProvider(ctx, issuerURL)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if authDomain.AuthDomainConfig().AuthNProvider != authtypes.AuthNProviderGoogleAuth {
|
||||
return "", errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthDomainMismatch, "domain type is not google")
|
||||
}
|
||||
|
||||
oauth2Config := a.oauth2Config(siteURL, authDomain)
|
||||
oauth2Config := a.oauth2Config(siteURL, authDomain, oidcProvider)
|
||||
|
||||
return oauth2Config.AuthCodeURL(
|
||||
authtypes.NewState(siteURL, authDomain.StorableAuthDomain().ID).URL.String(),
|
||||
@@ -54,6 +52,11 @@ func (a *AuthN) LoginURL(ctx context.Context, siteURL *url.URL, authDomain *auth
|
||||
}
|
||||
|
||||
func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtypes.CallbackIdentity, error) {
|
||||
oidcProvider, err := oidc.NewProvider(ctx, issuerURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := query.Get("error"); err != "" {
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "google: error while authenticating").WithAdditional(query.Get("error_description"))
|
||||
}
|
||||
@@ -68,7 +71,7 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
|
||||
return nil, err
|
||||
}
|
||||
|
||||
oauth2Config := a.oauth2Config(state.URL, authDomain)
|
||||
oauth2Config := a.oauth2Config(state.URL, authDomain, oidcProvider)
|
||||
token, err := oauth2Config.Exchange(ctx, query.Get("code"))
|
||||
if err != nil {
|
||||
var retrieveError *oauth2.RetrieveError
|
||||
@@ -84,7 +87,7 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
|
||||
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "google: no id_token in token response")
|
||||
}
|
||||
|
||||
verifier := a.oidcProvider.Verifier(&oidc.Config{ClientID: authDomain.AuthDomainConfig().Google.ClientID})
|
||||
verifier := oidcProvider.Verifier(&oidc.Config{ClientID: authDomain.AuthDomainConfig().Google.ClientID})
|
||||
idToken, err := verifier.Verify(ctx, rawIDToken)
|
||||
if err != nil {
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "google: failed to verify token").WithAdditional(err.Error())
|
||||
@@ -114,11 +117,11 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
|
||||
|
||||
}
|
||||
|
||||
func (a *AuthN) oauth2Config(siteURL *url.URL, authDomain *authtypes.AuthDomain) *oauth2.Config {
|
||||
func (a *AuthN) oauth2Config(siteURL *url.URL, authDomain *authtypes.AuthDomain, provider *oidc.Provider) *oauth2.Config {
|
||||
return &oauth2.Config{
|
||||
ClientID: authDomain.AuthDomainConfig().Google.ClientID,
|
||||
ClientSecret: authDomain.AuthDomainConfig().Google.ClientSecret,
|
||||
Endpoint: a.oidcProvider.Endpoint(),
|
||||
Endpoint: provider.Endpoint(),
|
||||
Scopes: scopes,
|
||||
RedirectURL: (&url.URL{
|
||||
Scheme: siteURL.Scheme,
|
||||
|
||||
@@ -27,7 +27,7 @@ func (a *AuthN) Authenticate(ctx context.Context, email string, password string,
|
||||
}
|
||||
|
||||
if !factorPassword.Equals(password) {
|
||||
return nil, errors.New(errors.TypeUnauthenticated, types.ErrCodeIncorrectPassword, "invalid email orpassword")
|
||||
return nil, errors.New(errors.TypeUnauthenticated, types.ErrCodeIncorrectPassword, "invalid email or password")
|
||||
}
|
||||
|
||||
return authtypes.NewIdentity(user.ID, orgID, user.Email, user.Role), nil
|
||||
|
||||
10
pkg/cache/config.go
vendored
10
pkg/cache/config.go
vendored
@@ -1,14 +1,12 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
)
|
||||
|
||||
type Memory struct {
|
||||
TTL time.Duration `mapstructure:"ttl"`
|
||||
CleanupInterval time.Duration `mapstructure:"cleanup_interval"`
|
||||
NumCounters int64 `mapstructure:"num_counters"`
|
||||
MaxCost int64 `mapstructure:"max_cost"`
|
||||
}
|
||||
|
||||
type Redis struct {
|
||||
@@ -32,8 +30,8 @@ func newConfig() factory.Config {
|
||||
return &Config{
|
||||
Provider: "memory",
|
||||
Memory: Memory{
|
||||
TTL: time.Hour * 168,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
NumCounters: 10 * 10000, // 10k cache entries * 10x as per ristretto
|
||||
MaxCost: 1 << 27, // 128 MB
|
||||
},
|
||||
Redis: Redis{
|
||||
Host: "localhost",
|
||||
|
||||
90
pkg/cache/memorycache/provider.go
vendored
90
pkg/cache/memorycache/provider.go
vendored
@@ -11,14 +11,15 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/types/cachetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
gocache "github.com/patrickmn/go-cache"
|
||||
"github.com/dgraph-io/ristretto/v2"
|
||||
semconv "go.opentelemetry.io/collector/semconv/v1.6.1"
|
||||
"go.opentelemetry.io/otel/attribute"
|
||||
"go.opentelemetry.io/otel/metric"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
cc *gocache.Cache
|
||||
cc *ristretto.Cache[string, any]
|
||||
config cache.Config
|
||||
settings factory.ScopedProviderSettings
|
||||
}
|
||||
@@ -30,8 +31,62 @@ func NewFactory() factory.ProviderFactory[cache.Cache, cache.Config] {
|
||||
func New(ctx context.Context, settings factory.ProviderSettings, config cache.Config) (cache.Cache, error) {
|
||||
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/cache/memorycache")
|
||||
|
||||
cc, err := ristretto.NewCache(&ristretto.Config[string, any]{
|
||||
NumCounters: config.Memory.NumCounters,
|
||||
MaxCost: config.Memory.MaxCost,
|
||||
BufferItems: 64,
|
||||
Metrics: true,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
meter := scopedProviderSettings.Meter()
|
||||
telemetry, err := newMetrics(meter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, err = meter.RegisterCallback(func(ctx context.Context, o metric.Observer) error {
|
||||
metrics := cc.Metrics
|
||||
attributes := []attribute.KeyValue{
|
||||
attribute.String("provider", "memorycache"),
|
||||
}
|
||||
o.ObserveFloat64(telemetry.cacheRatio, metrics.Ratio(), metric.WithAttributes(attributes...))
|
||||
o.ObserveInt64(telemetry.cacheHits, int64(metrics.Hits()), metric.WithAttributes(attributes...))
|
||||
o.ObserveInt64(telemetry.cacheMisses, int64(metrics.Misses()), metric.WithAttributes(attributes...))
|
||||
o.ObserveInt64(telemetry.costAdded, int64(metrics.CostAdded()), metric.WithAttributes(attributes...))
|
||||
o.ObserveInt64(telemetry.costEvicted, int64(metrics.CostEvicted()), metric.WithAttributes(attributes...))
|
||||
o.ObserveInt64(telemetry.keysAdded, int64(metrics.KeysAdded()), metric.WithAttributes(attributes...))
|
||||
o.ObserveInt64(telemetry.keysEvicted, int64(metrics.KeysEvicted()), metric.WithAttributes(attributes...))
|
||||
o.ObserveInt64(telemetry.keysUpdated, int64(metrics.KeysUpdated()), metric.WithAttributes(attributes...))
|
||||
o.ObserveInt64(telemetry.setsDropped, int64(metrics.SetsDropped()), metric.WithAttributes(attributes...))
|
||||
o.ObserveInt64(telemetry.setsRejected, int64(metrics.SetsRejected()), metric.WithAttributes(attributes...))
|
||||
o.ObserveInt64(telemetry.getsDropped, int64(metrics.GetsDropped()), metric.WithAttributes(attributes...))
|
||||
o.ObserveInt64(telemetry.getsKept, int64(metrics.GetsKept()), metric.WithAttributes(attributes...))
|
||||
o.ObserveInt64(telemetry.totalCost, int64(cc.MaxCost()), metric.WithAttributes(attributes...))
|
||||
return nil
|
||||
},
|
||||
telemetry.cacheRatio,
|
||||
telemetry.cacheHits,
|
||||
telemetry.cacheMisses,
|
||||
telemetry.costAdded,
|
||||
telemetry.costEvicted,
|
||||
telemetry.keysAdded,
|
||||
telemetry.keysEvicted,
|
||||
telemetry.keysUpdated,
|
||||
telemetry.setsDropped,
|
||||
telemetry.setsRejected,
|
||||
telemetry.getsDropped,
|
||||
telemetry.getsKept,
|
||||
telemetry.totalCost,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &provider{
|
||||
cc: gocache.New(config.Memory.TTL, config.Memory.CleanupInterval),
|
||||
cc: cc,
|
||||
settings: scopedProviderSettings,
|
||||
config: config,
|
||||
}, nil
|
||||
@@ -51,19 +106,32 @@ func (provider *provider) Set(ctx context.Context, orgID valuer.UUID, cacheKey s
|
||||
}
|
||||
|
||||
if cloneable, ok := data.(cachetypes.Cloneable); ok {
|
||||
span.SetAttributes(attribute.Bool("db.cloneable", true))
|
||||
span.SetAttributes(attribute.Bool("memory.cloneable", true))
|
||||
span.SetAttributes(attribute.Int64("memory.cost", 1))
|
||||
toCache := cloneable.Clone()
|
||||
provider.cc.Set(strings.Join([]string{orgID.StringValue(), cacheKey}, "::"), toCache, ttl)
|
||||
// In case of contention we are choosing to evict the cloneable entries first hence cost is set to 1
|
||||
if ok := provider.cc.SetWithTTL(strings.Join([]string{orgID.StringValue(), cacheKey}, "::"), toCache, 1, ttl); !ok {
|
||||
return errors.New(errors.TypeInternal, errors.CodeInternal, "error writing to cache")
|
||||
}
|
||||
|
||||
provider.cc.Wait()
|
||||
return nil
|
||||
}
|
||||
|
||||
span.SetAttributes(attribute.Bool("db.cloneable", false))
|
||||
toCache, err := data.MarshalBinary()
|
||||
cost := int64(len(toCache))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
provider.cc.Set(strings.Join([]string{orgID.StringValue(), cacheKey}, "::"), toCache, ttl)
|
||||
span.SetAttributes(attribute.Bool("memory.cloneable", false))
|
||||
span.SetAttributes(attribute.Int64("memory.cost", cost))
|
||||
|
||||
if ok := provider.cc.SetWithTTL(strings.Join([]string{orgID.StringValue(), cacheKey}, "::"), toCache, 1, ttl); !ok {
|
||||
return errors.New(errors.TypeInternal, errors.CodeInternal, "error writing to cache")
|
||||
}
|
||||
|
||||
provider.cc.Wait()
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -86,7 +154,7 @@ func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, cacheKey s
|
||||
}
|
||||
|
||||
if cloneable, ok := cachedData.(cachetypes.Cloneable); ok {
|
||||
span.SetAttributes(attribute.Bool("db.cloneable", true))
|
||||
span.SetAttributes(attribute.Bool("memory.cloneable", true))
|
||||
// check if the destination value is settable
|
||||
dstv := reflect.ValueOf(dest)
|
||||
if !dstv.Elem().CanSet() {
|
||||
@@ -107,7 +175,7 @@ func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, cacheKey s
|
||||
}
|
||||
|
||||
if fromCache, ok := cachedData.([]byte); ok {
|
||||
span.SetAttributes(attribute.Bool("db.cloneable", false))
|
||||
span.SetAttributes(attribute.Bool("memory.cloneable", false))
|
||||
if err = dest.UnmarshalBinary(fromCache); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -126,11 +194,11 @@ func (provider *provider) Delete(ctx context.Context, orgID valuer.UUID, cacheKe
|
||||
))
|
||||
defer span.End()
|
||||
|
||||
provider.cc.Delete(strings.Join([]string{orgID.StringValue(), cacheKey}, "::"))
|
||||
provider.cc.Del(strings.Join([]string{orgID.StringValue(), cacheKey}, "::"))
|
||||
}
|
||||
|
||||
func (provider *provider) DeleteMany(_ context.Context, orgID valuer.UUID, cacheKeys []string) {
|
||||
for _, cacheKey := range cacheKeys {
|
||||
provider.cc.Delete(strings.Join([]string{orgID.StringValue(), cacheKey}, "::"))
|
||||
provider.cc.Del(strings.Join([]string{orgID.StringValue(), cacheKey}, "::"))
|
||||
}
|
||||
}
|
||||
|
||||
28
pkg/cache/memorycache/provider_test.go
vendored
28
pkg/cache/memorycache/provider_test.go
vendored
@@ -55,8 +55,8 @@ func (cacheable *CacheableB) UnmarshalBinary(data []byte) error {
|
||||
|
||||
func TestCloneableSetWithNilPointer(t *testing.T) {
|
||||
cache, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cache.Memory{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}})
|
||||
require.NoError(t, err)
|
||||
|
||||
@@ -66,8 +66,8 @@ func TestCloneableSetWithNilPointer(t *testing.T) {
|
||||
|
||||
func TestCacheableSetWithNilPointer(t *testing.T) {
|
||||
cache, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cache.Memory{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}})
|
||||
require.NoError(t, err)
|
||||
|
||||
@@ -77,8 +77,8 @@ func TestCacheableSetWithNilPointer(t *testing.T) {
|
||||
|
||||
func TestCloneableSetGet(t *testing.T) {
|
||||
cache, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cache.Memory{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}})
|
||||
require.NoError(t, err)
|
||||
|
||||
@@ -106,8 +106,8 @@ func TestCloneableSetGet(t *testing.T) {
|
||||
|
||||
func TestCacheableSetGet(t *testing.T) {
|
||||
cache, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cache.Memory{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}})
|
||||
require.NoError(t, err)
|
||||
|
||||
@@ -135,8 +135,8 @@ func TestCacheableSetGet(t *testing.T) {
|
||||
|
||||
func TestGetWithNilPointer(t *testing.T) {
|
||||
cache, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cache.Memory{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}})
|
||||
require.NoError(t, err)
|
||||
|
||||
@@ -146,8 +146,8 @@ func TestGetWithNilPointer(t *testing.T) {
|
||||
|
||||
func TestSetGetWithDifferentTypes(t *testing.T) {
|
||||
cache, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cache.Memory{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}})
|
||||
require.NoError(t, err)
|
||||
|
||||
@@ -167,8 +167,8 @@ func TestSetGetWithDifferentTypes(t *testing.T) {
|
||||
|
||||
func TestCloneableConcurrentSetGet(t *testing.T) {
|
||||
cache, err := New(context.Background(), factorytest.NewSettings(), cache.Config{Provider: "memory", Memory: cache.Memory{
|
||||
TTL: 10 * time.Second,
|
||||
CleanupInterval: 10 * time.Second,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}})
|
||||
require.NoError(t, err)
|
||||
|
||||
|
||||
110
pkg/cache/memorycache/telemetry.go
vendored
Normal file
110
pkg/cache/memorycache/telemetry.go
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
package memorycache
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"go.opentelemetry.io/otel/metric"
|
||||
)
|
||||
|
||||
type telemetry struct {
|
||||
cacheRatio metric.Float64ObservableGauge
|
||||
cacheHits metric.Int64ObservableGauge
|
||||
cacheMisses metric.Int64ObservableGauge
|
||||
costAdded metric.Int64ObservableGauge
|
||||
costEvicted metric.Int64ObservableGauge
|
||||
keysAdded metric.Int64ObservableGauge
|
||||
keysEvicted metric.Int64ObservableGauge
|
||||
keysUpdated metric.Int64ObservableGauge
|
||||
setsDropped metric.Int64ObservableGauge
|
||||
setsRejected metric.Int64ObservableGauge
|
||||
getsDropped metric.Int64ObservableGauge
|
||||
getsKept metric.Int64ObservableGauge
|
||||
totalCost metric.Int64ObservableGauge
|
||||
}
|
||||
|
||||
func newMetrics(meter metric.Meter) (*telemetry, error) {
|
||||
var errs error
|
||||
cacheRatio, err := meter.Float64ObservableGauge("signoz.cache.ratio", metric.WithDescription("Ratio is the number of Hits over all accesses (Hits + Misses). This is the percentage of successful Get calls."), metric.WithUnit("1"))
|
||||
if err != nil {
|
||||
errs = errors.Join(errs, err)
|
||||
}
|
||||
|
||||
cacheHits, err := meter.Int64ObservableGauge("signoz.cache.hits", metric.WithDescription("Hits is the number of Get calls where a value was found for the corresponding key."))
|
||||
if err != nil {
|
||||
errs = errors.Join(errs, err)
|
||||
}
|
||||
|
||||
cacheMisses, err := meter.Int64ObservableGauge("signoz.cache.misses", metric.WithDescription("Misses is the number of Get calls where a value was not found for the corresponding key"))
|
||||
if err != nil {
|
||||
errs = errors.Join(errs, err)
|
||||
}
|
||||
|
||||
costAdded, err := meter.Int64ObservableGauge("signoz.cache.cost.added", metric.WithDescription("CostAdded is the sum of costs that have been added (successful Set calls)"))
|
||||
if err != nil {
|
||||
errs = errors.Join(errs, err)
|
||||
}
|
||||
|
||||
costEvicted, err := meter.Int64ObservableGauge("signoz.cache.cost.evicted", metric.WithDescription("CostEvicted is the sum of all costs that have been evicted"))
|
||||
if err != nil {
|
||||
errs = errors.Join(errs, err)
|
||||
}
|
||||
|
||||
keysAdded, err := meter.Int64ObservableGauge("signoz.cache.keys.added", metric.WithDescription("KeysAdded is the total number of Set calls where a new key-value item was added"))
|
||||
if err != nil {
|
||||
errs = errors.Join(errs, err)
|
||||
}
|
||||
|
||||
keysEvicted, err := meter.Int64ObservableGauge("signoz.cache.keys.evicted", metric.WithDescription("KeysEvicted is the total number of keys evicted"))
|
||||
if err != nil {
|
||||
errs = errors.Join(errs, err)
|
||||
}
|
||||
|
||||
keysUpdated, err := meter.Int64ObservableGauge("signoz.cache.keys.updated", metric.WithDescription("KeysUpdated is the total number of Set calls where the value was updated"))
|
||||
if err != nil {
|
||||
errs = errors.Join(errs, err)
|
||||
}
|
||||
|
||||
setsDropped, err := meter.Int64ObservableGauge("signoz.cache.sets.dropped", metric.WithDescription("SetsDropped is the number of Set calls that don't make it into internal buffers (due to contention or some other reason)"))
|
||||
if err != nil {
|
||||
errs = errors.Join(errs, err)
|
||||
}
|
||||
|
||||
setsRejected, err := meter.Int64ObservableGauge("signoz.cache.sets.rejected", metric.WithDescription("SetsRejected is the number of Set calls rejected by the policy (TinyLFU)"))
|
||||
if err != nil {
|
||||
errs = errors.Join(errs, err)
|
||||
}
|
||||
|
||||
getsDropped, err := meter.Int64ObservableGauge("signoz.cache.gets.dropped", metric.WithDescription("GetsDropped is the number of Get calls that don't make it into internal buffers (due to contention or some other reason)"))
|
||||
if err != nil {
|
||||
errs = errors.Join(errs, err)
|
||||
}
|
||||
|
||||
getsKept, err := meter.Int64ObservableGauge("signoz.cache.gets.kept", metric.WithDescription("GetsKept is the number of Get calls that make it into internal buffers"))
|
||||
if err != nil {
|
||||
errs = errors.Join(errs, err)
|
||||
}
|
||||
|
||||
totalCost, err := meter.Int64ObservableGauge("signoz.cache.total.cost", metric.WithDescription("TotalCost is the available cost configured for the cache"))
|
||||
if err != nil {
|
||||
errs = errors.Join(errs, err)
|
||||
}
|
||||
|
||||
if errs != nil {
|
||||
return nil, errs
|
||||
}
|
||||
|
||||
return &telemetry{
|
||||
cacheRatio: cacheRatio,
|
||||
cacheHits: cacheHits,
|
||||
cacheMisses: cacheMisses,
|
||||
costAdded: costAdded,
|
||||
costEvicted: costEvicted,
|
||||
keysAdded: keysAdded,
|
||||
keysEvicted: keysEvicted,
|
||||
keysUpdated: keysUpdated,
|
||||
setsDropped: setsDropped,
|
||||
setsRejected: setsRejected,
|
||||
getsDropped: getsDropped,
|
||||
getsKept: getsKept,
|
||||
totalCost: totalCost,
|
||||
}, nil
|
||||
}
|
||||
@@ -208,3 +208,18 @@ func WrapUnexpectedf(cause error, code Code, format string, args ...any) *base {
|
||||
func NewUnexpectedf(code Code, format string, args ...any) *base {
|
||||
return Newf(TypeInvalidInput, code, format, args...)
|
||||
}
|
||||
|
||||
// NewMethodNotAllowedf is a wrapper around Newf with TypeMethodNotAllowed.
|
||||
func NewMethodNotAllowedf(code Code, format string, args ...any) *base {
|
||||
return Newf(TypeMethodNotAllowed, code, format, args...)
|
||||
}
|
||||
|
||||
// WrapTimeoutf is a wrapper around Wrapf with TypeTimeout.
|
||||
func WrapTimeoutf(cause error, code Code, format string, args ...any) *base {
|
||||
return Wrapf(cause, TypeTimeout, code, format, args...)
|
||||
}
|
||||
|
||||
// NewTimeoutf is a wrapper around Newf with TypeTimeout.
|
||||
func NewTimeoutf(code Code, format string, args ...any) *base {
|
||||
return Newf(TypeTimeout, code, format, args...)
|
||||
}
|
||||
|
||||
@@ -13,16 +13,18 @@ import (
|
||||
)
|
||||
|
||||
type noopInstrumentation struct {
|
||||
logger *slog.Logger
|
||||
meterProvider sdkmetric.MeterProvider
|
||||
tracerProvider sdktrace.TracerProvider
|
||||
logger *slog.Logger
|
||||
meterProvider sdkmetric.MeterProvider
|
||||
tracerProvider sdktrace.TracerProvider
|
||||
prometheusRegistry *prometheus.Registry
|
||||
}
|
||||
|
||||
func New() instrumentation.Instrumentation {
|
||||
return &noopInstrumentation{
|
||||
logger: slog.New(slog.DiscardHandler),
|
||||
meterProvider: noopmetric.NewMeterProvider(),
|
||||
tracerProvider: nooptrace.NewTracerProvider(),
|
||||
logger: slog.New(slog.DiscardHandler),
|
||||
meterProvider: noopmetric.NewMeterProvider(),
|
||||
tracerProvider: nooptrace.NewTracerProvider(),
|
||||
prometheusRegistry: prometheus.NewRegistry(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,7 +41,7 @@ func (i *noopInstrumentation) TracerProvider() sdktrace.TracerProvider {
|
||||
}
|
||||
|
||||
func (i *noopInstrumentation) PrometheusRegisterer() prometheus.Registerer {
|
||||
return prometheus.NewRegistry()
|
||||
return i.prometheusRegistry
|
||||
}
|
||||
|
||||
func (i *noopInstrumentation) ToProviderSettings() factory.ProviderSettings {
|
||||
|
||||
141
pkg/modules/promote/implpromote/handler.go
Normal file
141
pkg/modules/promote/implpromote/handler.go
Normal file
@@ -0,0 +1,141 @@
|
||||
package implpromote
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/promotetypes"
|
||||
)
|
||||
|
||||
type handler struct {
|
||||
module promote.Module
|
||||
}
|
||||
|
||||
func NewHandler(module promote.Module) promote.Handler {
|
||||
return &handler{module: module}
|
||||
}
|
||||
|
||||
func (h *handler) HandlePromote(w http.ResponseWriter, r *http.Request) {
|
||||
_, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, errors.NewInternalf(errors.CodeInternal, "failed to get org id from context"))
|
||||
return
|
||||
}
|
||||
|
||||
switch r.Method {
|
||||
case http.MethodGet:
|
||||
h.GetPromotedAndIndexedPaths(w, r)
|
||||
return
|
||||
case http.MethodPost:
|
||||
h.PromotePaths(w, r)
|
||||
return
|
||||
case http.MethodDelete:
|
||||
h.DropIndex(w, r)
|
||||
return
|
||||
default:
|
||||
render.Error(w, errors.NewMethodNotAllowedf(errors.CodeMethodNotAllowed, "method not allowed"))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func (h *handler) DropIndex(w http.ResponseWriter, r *http.Request) {
|
||||
var req promotetypes.PromotePath
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
render.Error(w, errors.NewInvalidInputf(errors.CodeInvalidInput, "Invalid data"))
|
||||
return
|
||||
}
|
||||
|
||||
err := h.module.DropIndex(r.Context(), req)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, nil)
|
||||
}
|
||||
|
||||
func (h *handler) PromotePaths(w http.ResponseWriter, r *http.Request) {
|
||||
var req []promotetypes.PromotePath
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
render.Error(w, errors.NewInvalidInputf(errors.CodeInvalidInput, "Invalid data"))
|
||||
return
|
||||
}
|
||||
|
||||
// Delegate all processing to the reader
|
||||
err := h.module.PromoteAndIndexPaths(r.Context(), req...)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, nil)
|
||||
}
|
||||
|
||||
func (h *handler) GetPromotedAndIndexedPaths(w http.ResponseWriter, r *http.Request) {
|
||||
response, err := func() ([]promotetypes.PromotePath, error) {
|
||||
indexes, err := h.module.ListBodySkipIndexes(r.Context())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
aggr := map[string][]promotetypes.WrappedIndex{}
|
||||
for _, index := range indexes {
|
||||
path, columnType, err := schemamigrator.UnfoldJSONSubColumnIndexExpr(index.Expression)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// clean backticks from the path
|
||||
path = strings.ReplaceAll(path, "`", "")
|
||||
|
||||
aggr[path] = append(aggr[path], promotetypes.WrappedIndex{
|
||||
ColumnType: columnType,
|
||||
Type: index.Type,
|
||||
Granularity: index.Granularity,
|
||||
})
|
||||
}
|
||||
promotedPaths, err := h.module.ListPromotedPaths(r.Context())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response := []promotetypes.PromotePath{}
|
||||
for _, path := range promotedPaths {
|
||||
fullPath := telemetrylogs.BodyPromotedColumnPrefix + path
|
||||
path = telemetrylogs.BodyJSONStringSearchPrefix + path
|
||||
item := promotetypes.PromotePath{
|
||||
Path: path,
|
||||
Promote: true,
|
||||
}
|
||||
indexes, ok := aggr[fullPath]
|
||||
if ok {
|
||||
item.Indexes = indexes
|
||||
delete(aggr, fullPath)
|
||||
}
|
||||
response = append(response, item)
|
||||
}
|
||||
|
||||
// add the paths that are not promoted but have indexes
|
||||
for path, indexes := range aggr {
|
||||
path := strings.TrimPrefix(path, telemetrylogs.BodyJSONColumnPrefix)
|
||||
path = telemetrylogs.BodyJSONStringSearchPrefix + path
|
||||
response = append(response, promotetypes.PromotePath{
|
||||
Path: path,
|
||||
Indexes: indexes,
|
||||
})
|
||||
}
|
||||
return response, nil
|
||||
}()
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(w, http.StatusOK, response)
|
||||
}
|
||||
238
pkg/modules/promote/implpromote/module.go
Normal file
238
pkg/modules/promote/implpromote/module.go
Normal file
@@ -0,0 +1,238 @@
|
||||
package implpromote
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"maps"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/promotetypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
)
|
||||
|
||||
var (
|
||||
CodeFailedToPrepareBatch = errors.MustNewCode("failed_to_prepare_batch_promoted_paths")
|
||||
CodeFailedToSendBatch = errors.MustNewCode("failed_to_send_batch_promoted_paths")
|
||||
CodeFailedToAppendPath = errors.MustNewCode("failed_to_append_path_promoted_paths")
|
||||
CodeFailedToCreateIndex = errors.MustNewCode("failed_to_create_index_promoted_paths")
|
||||
CodeFailedToDropIndex = errors.MustNewCode("failed_to_drop_index_promoted_paths")
|
||||
CodeFailedToQueryPromotedPaths = errors.MustNewCode("failed_to_query_promoted_paths")
|
||||
)
|
||||
|
||||
type module struct {
|
||||
store telemetrystore.TelemetryStore
|
||||
}
|
||||
|
||||
func NewModule(store telemetrystore.TelemetryStore) promote.Module {
|
||||
return &module{store: store}
|
||||
}
|
||||
|
||||
func (m *module) ListBodySkipIndexes(ctx context.Context) ([]schemamigrator.Index, error) {
|
||||
return telemetrymetadata.ListLogsJSONIndexes(ctx, m.store)
|
||||
}
|
||||
|
||||
func (m *module) ListPromotedPaths(ctx context.Context) ([]string, error) {
|
||||
paths, err := telemetrymetadata.ListPromotedPaths(ctx, m.store.ClickhouseDB())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return slices.Collect(maps.Keys(paths)), nil
|
||||
}
|
||||
|
||||
// PromotePaths inserts provided JSON paths into the promoted paths table for logs queries.
|
||||
func (m *module) PromotePaths(ctx context.Context, paths []string) error {
|
||||
if len(paths) == 0 {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "paths cannot be empty")
|
||||
}
|
||||
|
||||
batch, err := m.store.ClickhouseDB().PrepareBatch(ctx,
|
||||
fmt.Sprintf("INSERT INTO %s.%s (path, created_at) VALUES", telemetrymetadata.DBName,
|
||||
telemetrymetadata.PromotedPathsTableName))
|
||||
if err != nil {
|
||||
return errors.WrapInternalf(err, CodeFailedToPrepareBatch, "failed to prepare batch")
|
||||
}
|
||||
|
||||
nowMs := uint64(time.Now().UnixMilli())
|
||||
for _, p := range paths {
|
||||
trimmed := strings.TrimSpace(p)
|
||||
if trimmed == "" {
|
||||
continue
|
||||
}
|
||||
if err := batch.Append(trimmed, nowMs); err != nil {
|
||||
_ = batch.Abort()
|
||||
return errors.WrapInternalf(err, CodeFailedToAppendPath, "failed to append path")
|
||||
}
|
||||
}
|
||||
|
||||
if err := batch.Send(); err != nil {
|
||||
return errors.WrapInternalf(err, CodeFailedToSendBatch, "failed to send batch")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// createIndexes creates string ngram + token filter indexes on JSON path subcolumns for LIKE queries.
|
||||
func (m *module) createIndexes(ctx context.Context, indexes []schemamigrator.Index) error {
|
||||
if len(indexes) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, index := range indexes {
|
||||
alterStmt := schemamigrator.AlterTableAddIndex{
|
||||
Database: telemetrylogs.DBName,
|
||||
Table: telemetrylogs.LogsV2LocalTableName,
|
||||
Index: index,
|
||||
}
|
||||
op := alterStmt.OnCluster(m.store.Cluster())
|
||||
if err := m.store.ClickhouseDB().Exec(ctx, op.ToSQL()); err != nil {
|
||||
return errors.WrapInternalf(err, CodeFailedToCreateIndex, "failed to create index")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *module) DropIndex(ctx context.Context, path promotetypes.PromotePath) error {
|
||||
// validate the paths
|
||||
if err := path.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
promoted, err := telemetrymetadata.IsPathPromoted(ctx, m.store.ClickhouseDB(), path.Path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
parentColumn := telemetrylogs.LogsV2BodyJSONColumn
|
||||
if promoted {
|
||||
parentColumn = telemetrylogs.LogsV2BodyPromotedColumn
|
||||
}
|
||||
|
||||
for _, index := range path.Indexes {
|
||||
typeIndex := schemamigrator.IndexTypeTokenBF
|
||||
switch {
|
||||
case strings.HasPrefix(index.Type, string(schemamigrator.IndexTypeNGramBF)):
|
||||
typeIndex = schemamigrator.IndexTypeNGramBF
|
||||
case strings.HasPrefix(index.Type, string(schemamigrator.IndexTypeTokenBF)):
|
||||
typeIndex = schemamigrator.IndexTypeTokenBF
|
||||
case strings.HasPrefix(index.Type, string(schemamigrator.IndexTypeMinMax)):
|
||||
typeIndex = schemamigrator.IndexTypeMinMax
|
||||
default:
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid index type: %s", index.Type)
|
||||
}
|
||||
|
||||
alterStmt := schemamigrator.AlterTableDropIndex{
|
||||
Database: telemetrylogs.DBName,
|
||||
Table: telemetrylogs.LogsV2LocalTableName,
|
||||
Index: schemamigrator.Index{
|
||||
Name: schemamigrator.JSONSubColumnIndexName(parentColumn, path.Path, index.JSONDataType.StringValue(), typeIndex),
|
||||
Expression: schemamigrator.JSONSubColumnIndexExpr(parentColumn, path.Path, index.JSONDataType.StringValue()),
|
||||
Type: index.Type,
|
||||
Granularity: index.Granularity,
|
||||
},
|
||||
}
|
||||
op := alterStmt.OnCluster(m.store.Cluster())
|
||||
if err := m.store.ClickhouseDB().Exec(ctx, op.ToSQL()); err != nil {
|
||||
return errors.WrapInternalf(err, CodeFailedToDropIndex, "failed to drop index")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// PromoteAndIndexPaths handles promoting paths and creating indexes in one call.
|
||||
func (m *module) PromoteAndIndexPaths(
|
||||
ctx context.Context,
|
||||
paths ...promotetypes.PromotePath,
|
||||
) error {
|
||||
if len(paths) == 0 {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "paths cannot be empty")
|
||||
}
|
||||
|
||||
// validate the paths
|
||||
for _, path := range paths {
|
||||
if err := path.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
sb := sqlbuilder.NewSelectBuilder().From(fmt.Sprintf("%s.%s", telemetrymetadata.DBName, telemetrymetadata.PromotedPathsTableName)).Select("path")
|
||||
cond := []string{}
|
||||
for _, path := range paths {
|
||||
cond = append(cond, sb.Equal("path", path.Path))
|
||||
}
|
||||
sb.Where(sb.Or(cond...))
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
rows, err := m.store.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return errors.WrapInternalf(err, CodeFailedToQueryPromotedPaths, "failed to query promoted paths")
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
// Load existing promoted paths once
|
||||
existingPromotedPaths := make(map[string]struct{})
|
||||
for rows.Next() {
|
||||
var p string
|
||||
if err := rows.Scan(&p); err == nil {
|
||||
existingPromotedPaths[p] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
var toInsert []string
|
||||
indexes := []schemamigrator.Index{}
|
||||
for _, it := range paths {
|
||||
if it.Promote {
|
||||
if _, promoted := existingPromotedPaths[it.Path]; !promoted {
|
||||
toInsert = append(toInsert, it.Path)
|
||||
}
|
||||
}
|
||||
if len(it.Indexes) > 0 {
|
||||
parentColumn := telemetrylogs.LogsV2BodyJSONColumn
|
||||
// if the path is already promoted or is being promoted, add it to the promoted column
|
||||
if _, promoted := existingPromotedPaths[it.Path]; promoted || it.Promote {
|
||||
parentColumn = telemetrylogs.LogsV2BodyPromotedColumn
|
||||
}
|
||||
|
||||
for _, index := range it.Indexes {
|
||||
typeIndex := schemamigrator.IndexTypeTokenBF
|
||||
switch {
|
||||
case strings.HasPrefix(index.Type, string(schemamigrator.IndexTypeNGramBF)):
|
||||
typeIndex = schemamigrator.IndexTypeNGramBF
|
||||
case strings.HasPrefix(index.Type, string(schemamigrator.IndexTypeTokenBF)):
|
||||
typeIndex = schemamigrator.IndexTypeTokenBF
|
||||
case strings.HasPrefix(index.Type, string(schemamigrator.IndexTypeMinMax)):
|
||||
typeIndex = schemamigrator.IndexTypeMinMax
|
||||
default:
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid index type: %s", index.Type)
|
||||
}
|
||||
indexes = append(indexes, schemamigrator.Index{
|
||||
Name: schemamigrator.JSONSubColumnIndexName(parentColumn, it.Path, index.JSONDataType.StringValue(), typeIndex),
|
||||
Expression: schemamigrator.JSONSubColumnIndexExpr(parentColumn, it.Path, index.JSONDataType.StringValue()),
|
||||
Type: index.Type,
|
||||
Granularity: index.Granularity,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(toInsert) > 0 {
|
||||
err := m.PromotePaths(ctx, toInsert)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if len(indexes) > 0 {
|
||||
if err := m.createIndexes(ctx, indexes); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
20
pkg/modules/promote/promote.go
Normal file
20
pkg/modules/promote/promote.go
Normal file
@@ -0,0 +1,20 @@
|
||||
package promote
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz/pkg/types/promotetypes"
|
||||
)
|
||||
|
||||
type Module interface {
|
||||
ListBodySkipIndexes(ctx context.Context) ([]schemamigrator.Index, error)
|
||||
ListPromotedPaths(ctx context.Context) ([]string, error)
|
||||
PromoteAndIndexPaths(ctx context.Context, paths ...promotetypes.PromotePath) error
|
||||
DropIndex(ctx context.Context, path promotetypes.PromotePath) error
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
HandlePromote(w http.ResponseWriter, r *http.Request)
|
||||
}
|
||||
@@ -339,8 +339,8 @@ func createBenchmarkBucketCache(tb testing.TB) BucketCache {
|
||||
config := cache.Config{
|
||||
Provider: "memory",
|
||||
Memory: cache.Memory{
|
||||
TTL: time.Hour * 168,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
},
|
||||
}
|
||||
memCache, err := cachetest.New(config)
|
||||
|
||||
@@ -26,8 +26,8 @@ func createTestCache(t *testing.T) cache.Cache {
|
||||
config := cache.Config{
|
||||
Provider: "memory",
|
||||
Memory: cache.Memory{
|
||||
TTL: time.Hour * 168,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
},
|
||||
}
|
||||
memCache, err := cachetest.New(config)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package agentConf
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types/opamptypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
@@ -25,6 +24,6 @@ type AgentFeature interface {
|
||||
// TODO(Raj): maybe refactor agentConf further and clean this up
|
||||
serializedSettingsUsed string,
|
||||
|
||||
apiErr *model.ApiError,
|
||||
err error,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -7,16 +7,26 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/opamptypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/pkg/errors"
|
||||
"go.uber.org/zap"
|
||||
"golang.org/x/exp/slices"
|
||||
)
|
||||
|
||||
var (
|
||||
CodeConfigVersionNotFound = errors.MustNewCode("config_version_not_found")
|
||||
CodeElementTypeRequired = errors.MustNewCode("element_type_required")
|
||||
CodeConfigElementsRequired = errors.MustNewCode("config_elements_required")
|
||||
CodeConfigVersionInsertFailed = errors.MustNewCode("config_version_insert_failed")
|
||||
CodeConfigElementInsertFailed = errors.MustNewCode("config_element_insert_failed")
|
||||
CodeConfigDeployStatusUpdateFailed = errors.MustNewCode("config_deploy_status_update_failed")
|
||||
CodeConfigHistoryGetFailed = errors.MustNewCode("config_history_get_failed")
|
||||
)
|
||||
|
||||
// Repo handles DDL and DML ops on ingestion rules
|
||||
type Repo struct {
|
||||
store sqlstore.SQLStore
|
||||
@@ -24,7 +34,7 @@ type Repo struct {
|
||||
|
||||
func (r *Repo) GetConfigHistory(
|
||||
ctx context.Context, orgId valuer.UUID, typ opamptypes.ElementType, limit int,
|
||||
) ([]opamptypes.AgentConfigVersion, *model.ApiError) {
|
||||
) ([]opamptypes.AgentConfigVersion, error) {
|
||||
var c []opamptypes.AgentConfigVersion
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&c).
|
||||
@@ -39,7 +49,7 @@ func (r *Repo) GetConfigHistory(
|
||||
Scan(ctx)
|
||||
|
||||
if err != nil {
|
||||
return nil, model.InternalError(err)
|
||||
return nil, errors.WrapInternalf(err, CodeConfigHistoryGetFailed, "failed to get config history")
|
||||
}
|
||||
|
||||
incompleteStatuses := []opamptypes.DeployStatus{opamptypes.DeployInitiated, opamptypes.Deploying}
|
||||
@@ -54,7 +64,7 @@ func (r *Repo) GetConfigHistory(
|
||||
|
||||
func (r *Repo) GetConfigVersion(
|
||||
ctx context.Context, orgId valuer.UUID, typ opamptypes.ElementType, v int,
|
||||
) (*opamptypes.AgentConfigVersion, *model.ApiError) {
|
||||
) (*opamptypes.AgentConfigVersion, error) {
|
||||
var c opamptypes.AgentConfigVersion
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&c).
|
||||
@@ -69,9 +79,9 @@ func (r *Repo) GetConfigVersion(
|
||||
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, model.NotFoundError(err)
|
||||
return nil, errors.WrapNotFoundf(err, CodeConfigVersionNotFound, "config version not found")
|
||||
}
|
||||
return nil, model.InternalError(err)
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to get config version")
|
||||
}
|
||||
|
||||
return &c, nil
|
||||
@@ -79,7 +89,7 @@ func (r *Repo) GetConfigVersion(
|
||||
|
||||
func (r *Repo) GetLatestVersion(
|
||||
ctx context.Context, orgId valuer.UUID, typ opamptypes.ElementType,
|
||||
) (*opamptypes.AgentConfigVersion, *model.ApiError) {
|
||||
) (*opamptypes.AgentConfigVersion, error) {
|
||||
var c opamptypes.AgentConfigVersion
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&c).
|
||||
@@ -93,9 +103,9 @@ func (r *Repo) GetLatestVersion(
|
||||
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, model.NotFoundError(err)
|
||||
return nil, errors.WrapNotFoundf(err, CodeConfigVersionNotFound, "config latest version not found")
|
||||
}
|
||||
return nil, model.InternalError(err)
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to get latest config version")
|
||||
}
|
||||
|
||||
return &c, nil
|
||||
@@ -103,18 +113,16 @@ func (r *Repo) GetLatestVersion(
|
||||
|
||||
func (r *Repo) insertConfig(
|
||||
ctx context.Context, orgId valuer.UUID, userId valuer.UUID, c *opamptypes.AgentConfigVersion, elements []string,
|
||||
) (fnerr *model.ApiError) {
|
||||
) error {
|
||||
|
||||
if c.ElementType.StringValue() == "" {
|
||||
return model.BadRequest(fmt.Errorf(
|
||||
"element type is required for creating agent config version",
|
||||
))
|
||||
return errors.NewInvalidInputf(CodeElementTypeRequired, "element type is required for creating agent config version")
|
||||
}
|
||||
|
||||
// allowing empty elements for logs - use case is deleting all pipelines
|
||||
if len(elements) == 0 && c.ElementType != opamptypes.ElementTypeLogPipelines {
|
||||
zap.L().Error("insert config called with no elements ", zap.String("ElementType", c.ElementType.StringValue()))
|
||||
return model.BadRequest(fmt.Errorf("config must have atleast one element"))
|
||||
return errors.NewInvalidInputf(CodeConfigElementsRequired, "config must have atleast one element")
|
||||
}
|
||||
|
||||
if c.Version != 0 {
|
||||
@@ -122,15 +130,13 @@ func (r *Repo) insertConfig(
|
||||
// in a monotonically increasing order starting with 1. hence, we reject insert
|
||||
// requests with version anything other than 0. here, 0 indicates un-assigned
|
||||
zap.L().Error("invalid version assignment while inserting agent config", zap.Int("version", c.Version), zap.String("ElementType", c.ElementType.StringValue()))
|
||||
return model.BadRequest(fmt.Errorf(
|
||||
"user defined versions are not supported in the agent config",
|
||||
))
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "user defined versions are not supported in the agent config")
|
||||
}
|
||||
|
||||
configVersion, err := r.GetLatestVersion(ctx, orgId, c.ElementType)
|
||||
if err != nil && err.Type() != model.ErrorNotFound {
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
zap.L().Error("failed to fetch latest config version", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("failed to fetch latest config version"))
|
||||
return err
|
||||
}
|
||||
|
||||
if configVersion != nil {
|
||||
@@ -141,7 +147,7 @@ func (r *Repo) insertConfig(
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if fnerr != nil {
|
||||
if err != nil {
|
||||
// remove all the damage (invalid rows from db)
|
||||
r.store.BunDB().NewDelete().Model(new(opamptypes.AgentConfigVersion)).Where("id = ?", c.ID).Where("org_id = ?", orgId).Exec(ctx)
|
||||
r.store.BunDB().NewDelete().Model(new(opamptypes.AgentConfigElement)).Where("version_id = ?", c.ID).Exec(ctx)
|
||||
@@ -153,10 +159,9 @@ func (r *Repo) insertConfig(
|
||||
NewInsert().
|
||||
Model(c).
|
||||
Exec(ctx)
|
||||
|
||||
if dbErr != nil {
|
||||
zap.L().Error("error in inserting config version: ", zap.Error(dbErr))
|
||||
return model.InternalError(errors.Wrap(dbErr, "failed to insert ingestion rule"))
|
||||
return errors.WrapInternalf(dbErr, CodeConfigVersionInsertFailed, "failed to insert config version")
|
||||
}
|
||||
|
||||
for _, e := range elements {
|
||||
@@ -172,7 +177,7 @@ func (r *Repo) insertConfig(
|
||||
}
|
||||
_, dbErr = r.store.BunDB().NewInsert().Model(agentConfigElement).Exec(ctx)
|
||||
if dbErr != nil {
|
||||
return model.InternalError(dbErr)
|
||||
return errors.WrapInternalf(dbErr, CodeConfigElementInsertFailed, "failed to insert config element")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -214,8 +219,7 @@ func (r *Repo) updateDeployStatus(ctx context.Context,
|
||||
|
||||
func (r *Repo) updateDeployStatusByHash(
|
||||
ctx context.Context, orgId valuer.UUID, confighash string, status string, result string,
|
||||
) *model.ApiError {
|
||||
|
||||
) error {
|
||||
_, err := r.store.BunDB().NewUpdate().
|
||||
Model(new(opamptypes.AgentConfigVersion)).
|
||||
Set("deploy_status = ?", status).
|
||||
@@ -225,7 +229,7 @@ func (r *Repo) updateDeployStatusByHash(
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to update deploy status", zap.Error(err))
|
||||
return model.InternalError(errors.Wrap(err, "failed to update deploy status"))
|
||||
return errors.WrapInternalf(err, CodeConfigDeployStatusUpdateFailed, "failed to update deploy status")
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
|
||||
filterprocessor "github.com/SigNoz/signoz/pkg/query-service/app/opamp/otelconfig/filterprocessor"
|
||||
tsp "github.com/SigNoz/signoz/pkg/query-service/app/opamp/otelconfig/tailsampler"
|
||||
@@ -16,13 +17,16 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/opamptypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
"github.com/pkg/errors"
|
||||
"go.uber.org/zap"
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
var m *Manager
|
||||
|
||||
var (
|
||||
CodeConfigVersionNoConfig = errors.MustNewCode("config_version_no_config")
|
||||
)
|
||||
|
||||
func init() {
|
||||
m = &Manager{}
|
||||
}
|
||||
@@ -103,16 +107,14 @@ func (m *Manager) RecommendAgentConfig(orgId valuer.UUID, currentConfYaml []byte
|
||||
|
||||
for _, feature := range m.agentFeatures {
|
||||
featureType := opamptypes.NewElementType(string(feature.AgentFeatureType()))
|
||||
latestConfig, apiErr := GetLatestVersion(context.Background(), orgId, featureType)
|
||||
if apiErr != nil && apiErr.Type() != model.ErrorNotFound {
|
||||
return nil, "", errors.Wrap(apiErr.ToError(), "failed to get latest agent config version")
|
||||
latestConfig, err := GetLatestVersion(context.Background(), orgId, featureType)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
updatedConf, serializedSettingsUsed, apiErr := feature.RecommendAgentConfig(orgId, recommendation, latestConfig)
|
||||
if apiErr != nil {
|
||||
return nil, "", errors.Wrap(apiErr.ToError(), fmt.Sprintf(
|
||||
"failed to generate agent config recommendation for %s", featureType,
|
||||
))
|
||||
updatedConf, serializedSettingsUsed, err := feature.RecommendAgentConfig(orgId, recommendation, latestConfig)
|
||||
if err != nil {
|
||||
return nil, "", errors.WithAdditionalf(err, "agent config recommendation for %s failed", featureType)
|
||||
}
|
||||
recommendation = updatedConf
|
||||
|
||||
@@ -178,26 +180,26 @@ func (m *Manager) ReportConfigDeploymentStatus(
|
||||
|
||||
func GetLatestVersion(
|
||||
ctx context.Context, orgId valuer.UUID, elementType opamptypes.ElementType,
|
||||
) (*opamptypes.AgentConfigVersion, *model.ApiError) {
|
||||
) (*opamptypes.AgentConfigVersion, error) {
|
||||
return m.GetLatestVersion(ctx, orgId, elementType)
|
||||
}
|
||||
|
||||
func GetConfigVersion(
|
||||
ctx context.Context, orgId valuer.UUID, elementType opamptypes.ElementType, version int,
|
||||
) (*opamptypes.AgentConfigVersion, *model.ApiError) {
|
||||
) (*opamptypes.AgentConfigVersion, error) {
|
||||
return m.GetConfigVersion(ctx, orgId, elementType, version)
|
||||
}
|
||||
|
||||
func GetConfigHistory(
|
||||
ctx context.Context, orgId valuer.UUID, typ opamptypes.ElementType, limit int,
|
||||
) ([]opamptypes.AgentConfigVersion, *model.ApiError) {
|
||||
) ([]opamptypes.AgentConfigVersion, error) {
|
||||
return m.GetConfigHistory(ctx, orgId, typ, limit)
|
||||
}
|
||||
|
||||
// StartNewVersion launches a new config version for given set of elements
|
||||
func StartNewVersion(
|
||||
ctx context.Context, orgId valuer.UUID, userId valuer.UUID, eleType opamptypes.ElementType, elementIds []string,
|
||||
) (*opamptypes.AgentConfigVersion, *model.ApiError) {
|
||||
) (*opamptypes.AgentConfigVersion, error) {
|
||||
|
||||
// create a new version
|
||||
cfg := opamptypes.NewAgentConfigVersion(orgId, userId, eleType)
|
||||
@@ -217,17 +219,16 @@ func NotifyConfigUpdate(ctx context.Context) {
|
||||
m.notifyConfigUpdateSubscribers()
|
||||
}
|
||||
|
||||
func Redeploy(ctx context.Context, orgId valuer.UUID, typ opamptypes.ElementType, version int) *model.ApiError {
|
||||
|
||||
func Redeploy(ctx context.Context, orgId valuer.UUID, typ opamptypes.ElementType, version int) error {
|
||||
configVersion, err := GetConfigVersion(ctx, orgId, typ, version)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to fetch config version during redeploy", zap.Error(err))
|
||||
return model.WrapApiError(err, "failed to fetch details of the config version")
|
||||
return err
|
||||
}
|
||||
|
||||
if configVersion == nil || (configVersion != nil && configVersion.Config == "") {
|
||||
zap.L().Debug("config version has no conf yaml", zap.Any("configVersion", configVersion))
|
||||
return model.BadRequest(fmt.Errorf("the config version can not be redeployed"))
|
||||
return errors.NewInvalidInputf(CodeConfigVersionNoConfig, "the config version can not be redeployed")
|
||||
}
|
||||
switch typ {
|
||||
case opamptypes.ElementTypeSamplingRules:
|
||||
@@ -246,7 +247,7 @@ func Redeploy(ctx context.Context, orgId valuer.UUID, typ opamptypes.ElementType
|
||||
configHash, err := opamp.UpsertControlProcessors(ctx, "traces", processorConf, m.OnConfigUpdate)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to call agent config update for trace processor", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("failed to deploy the config"))
|
||||
return errors.WithAdditionalf(err, "failed to deploy the config")
|
||||
}
|
||||
|
||||
m.updateDeployStatus(ctx, orgId, opamptypes.ElementTypeSamplingRules, version, opamptypes.DeployInitiated.StringValue(), "Deployment started", configHash, configVersion.Config)
|
||||
|
||||
@@ -43,6 +43,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/traces/tracedetail"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
|
||||
chErrors "github.com/SigNoz/signoz/pkg/query-service/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/metrics"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
@@ -154,6 +155,7 @@ type ClickHouseReader struct {
|
||||
|
||||
fluxIntervalForTraceDetail time.Duration
|
||||
cache cache.Cache
|
||||
cacheForTraceDetail cache.Cache
|
||||
metadataDB string
|
||||
metadataTable string
|
||||
}
|
||||
@@ -165,21 +167,14 @@ func NewReader(
|
||||
prometheus prometheus.Prometheus,
|
||||
cluster string,
|
||||
fluxIntervalForTraceDetail time.Duration,
|
||||
cacheForTraceDetail cache.Cache,
|
||||
cache cache.Cache,
|
||||
) *ClickHouseReader {
|
||||
options := NewOptions(primaryNamespace, archiveNamespace)
|
||||
return NewReaderFromClickhouseConnection(options, sqlDB, telemetryStore, prometheus, cluster, fluxIntervalForTraceDetail, cache)
|
||||
}
|
||||
|
||||
func NewReaderFromClickhouseConnection(
|
||||
options *Options,
|
||||
sqlDB sqlstore.SQLStore,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
prometheus prometheus.Prometheus,
|
||||
cluster string,
|
||||
fluxIntervalForTraceDetail time.Duration,
|
||||
cache cache.Cache,
|
||||
) *ClickHouseReader {
|
||||
if options == nil {
|
||||
options = NewOptions(primaryNamespace, archiveNamespace)
|
||||
}
|
||||
|
||||
logsTableName := options.primary.LogsTableV2
|
||||
logsLocalTableName := options.primary.LogsLocalTableV2
|
||||
traceTableName := options.primary.TraceIndexTableV3
|
||||
@@ -221,6 +216,7 @@ func NewReaderFromClickhouseConnection(
|
||||
traceSummaryTable: options.primary.TraceSummaryTable,
|
||||
fluxIntervalForTraceDetail: fluxIntervalForTraceDetail,
|
||||
cache: cache,
|
||||
cacheForTraceDetail: cacheForTraceDetail,
|
||||
metadataDB: options.primary.MetadataDB,
|
||||
metadataTable: options.primary.MetadataTable,
|
||||
}
|
||||
@@ -859,7 +855,7 @@ func (r *ClickHouseReader) GetSpansForTrace(ctx context.Context, traceID string,
|
||||
|
||||
func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadataCache(ctx context.Context, orgID valuer.UUID, traceID string) (*model.GetWaterfallSpansForTraceWithMetadataCache, error) {
|
||||
cachedTraceData := new(model.GetWaterfallSpansForTraceWithMetadataCache)
|
||||
err := r.cache.Get(ctx, orgID, strings.Join([]string{"getWaterfallSpansForTraceWithMetadata", traceID}, "-"), cachedTraceData)
|
||||
err := r.cacheForTraceDetail.Get(ctx, orgID, strings.Join([]string{"getWaterfallSpansForTraceWithMetadata", traceID}, "-"), cachedTraceData)
|
||||
if err != nil {
|
||||
zap.L().Debug("error in retrieving getWaterfallSpansForTraceWithMetadata cache", zap.Error(err), zap.String("traceID", traceID))
|
||||
return nil, err
|
||||
@@ -1044,7 +1040,7 @@ func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Con
|
||||
}
|
||||
|
||||
zap.L().Info("getWaterfallSpansForTraceWithMetadata: processing pre cache", zap.Duration("duration", time.Since(processingBeforeCache)), zap.String("traceID", traceID))
|
||||
cacheErr := r.cache.Set(ctx, orgID, strings.Join([]string{"getWaterfallSpansForTraceWithMetadata", traceID}, "-"), &traceCache, time.Minute*5)
|
||||
cacheErr := r.cacheForTraceDetail.Set(ctx, orgID, strings.Join([]string{"getWaterfallSpansForTraceWithMetadata", traceID}, "-"), &traceCache, time.Minute*5)
|
||||
if cacheErr != nil {
|
||||
zap.L().Debug("failed to store cache for getWaterfallSpansForTraceWithMetadata", zap.String("traceID", traceID), zap.Error(err))
|
||||
}
|
||||
@@ -1069,7 +1065,7 @@ func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Con
|
||||
|
||||
func (r *ClickHouseReader) GetFlamegraphSpansForTraceCache(ctx context.Context, orgID valuer.UUID, traceID string) (*model.GetFlamegraphSpansForTraceCache, error) {
|
||||
cachedTraceData := new(model.GetFlamegraphSpansForTraceCache)
|
||||
err := r.cache.Get(ctx, orgID, strings.Join([]string{"getFlamegraphSpansForTrace", traceID}, "-"), cachedTraceData)
|
||||
err := r.cacheForTraceDetail.Get(ctx, orgID, strings.Join([]string{"getFlamegraphSpansForTrace", traceID}, "-"), cachedTraceData)
|
||||
if err != nil {
|
||||
zap.L().Debug("error in retrieving getFlamegraphSpansForTrace cache", zap.Error(err), zap.String("traceID", traceID))
|
||||
return nil, err
|
||||
@@ -1205,7 +1201,7 @@ func (r *ClickHouseReader) GetFlamegraphSpansForTrace(ctx context.Context, orgID
|
||||
}
|
||||
|
||||
zap.L().Info("getFlamegraphSpansForTrace: processing pre cache", zap.Duration("duration", time.Since(processingBeforeCache)), zap.String("traceID", traceID))
|
||||
cacheErr := r.cache.Set(ctx, orgID, strings.Join([]string{"getFlamegraphSpansForTrace", traceID}, "-"), &traceCache, time.Minute*5)
|
||||
cacheErr := r.cacheForTraceDetail.Set(ctx, orgID, strings.Join([]string{"getFlamegraphSpansForTrace", traceID}, "-"), &traceCache, time.Minute*5)
|
||||
if cacheErr != nil {
|
||||
zap.L().Debug("failed to store cache for getFlamegraphSpansForTrace", zap.String("traceID", traceID), zap.Error(err))
|
||||
}
|
||||
@@ -1293,7 +1289,12 @@ func (r *ClickHouseReader) setTTLLogs(ctx context.Context, orgID string, params
|
||||
coldStorageDuration = int(params.ToColdStorageDuration)
|
||||
}
|
||||
|
||||
tableNameArray := []string{r.logsDB + "." + r.logsLocalTableV2, r.logsDB + "." + r.logsResourceLocalTableV2}
|
||||
tableNameArray := []string{
|
||||
r.logsDB + "." + r.logsLocalTableV2,
|
||||
r.logsDB + "." + r.logsResourceLocalTableV2,
|
||||
getLocalTableName(r.logsDB + "." + r.logsAttributeKeys),
|
||||
getLocalTableName(r.logsDB + "." + r.logsResourceKeys),
|
||||
}
|
||||
|
||||
// check if there is existing things to be done
|
||||
for _, tableName := range tableNameArray {
|
||||
@@ -1327,9 +1328,19 @@ func (r *ClickHouseReader) setTTLLogs(ctx context.Context, orgID string, params
|
||||
params.ToColdStorageDuration, params.ColdStorageVolume)
|
||||
}
|
||||
|
||||
ttlLogsV2AttributeKeys := fmt.Sprintf(
|
||||
"ALTER TABLE %v ON CLUSTER %s MODIFY TTL timestamp + "+
|
||||
"INTERVAL %v SECOND DELETE", tableNameArray[2], r.cluster, params.DelDuration)
|
||||
|
||||
ttlLogsV2ResourceKeys := fmt.Sprintf(
|
||||
"ALTER TABLE %v ON CLUSTER %s MODIFY TTL timestamp + "+
|
||||
"INTERVAL %v SECOND DELETE", tableNameArray[3], r.cluster, params.DelDuration)
|
||||
|
||||
ttlPayload := map[string]string{
|
||||
tableNameArray[0]: ttlLogsV2,
|
||||
tableNameArray[1]: ttlLogsV2Resource,
|
||||
tableNameArray[2]: ttlLogsV2AttributeKeys,
|
||||
tableNameArray[3]: ttlLogsV2ResourceKeys,
|
||||
}
|
||||
|
||||
// set the ttl if nothing is pending/ no errors
|
||||
@@ -1435,6 +1446,7 @@ func (r *ClickHouseReader) setTTLTraces(ctx context.Context, orgID string, param
|
||||
r.TraceDB + "." + signozUsageExplorerTable,
|
||||
r.TraceDB + "." + defaultDependencyGraphTable,
|
||||
r.TraceDB + "." + r.traceSummaryTable,
|
||||
r.TraceDB + "." + r.spanAttributesKeysTable,
|
||||
}
|
||||
|
||||
coldStorageDuration := -1
|
||||
@@ -1502,7 +1514,7 @@ func (r *ClickHouseReader) setTTLTraces(ctx context.Context, orgID string, param
|
||||
req = fmt.Sprintf(ttlV2Resource, tableName, r.cluster, params.DelDuration)
|
||||
}
|
||||
|
||||
if len(params.ColdStorageVolume) > 0 {
|
||||
if len(params.ColdStorageVolume) > 0 && !strings.HasSuffix(distributedTableName, r.spanAttributesKeysTable) {
|
||||
if strings.HasSuffix(distributedTableName, r.traceResourceTableV3) {
|
||||
req += fmt.Sprintf(ttlTracesV2ResourceColdStorage, params.ToColdStorageDuration, params.ColdStorageVolume)
|
||||
} else {
|
||||
@@ -1649,6 +1661,12 @@ func (r *ClickHouseReader) SetTTLV2(ctx context.Context, orgID string, params *m
|
||||
tableNames := []string{
|
||||
r.logsDB + "." + r.logsLocalTableV2,
|
||||
r.logsDB + "." + r.logsResourceLocalTableV2,
|
||||
getLocalTableName(r.logsDB + "." + r.logsAttributeKeys),
|
||||
getLocalTableName(r.logsDB + "." + r.logsResourceKeys),
|
||||
}
|
||||
distributedTableNames := []string{
|
||||
r.logsDB + "." + r.logsTableV2,
|
||||
r.logsDB + "." + r.logsResourceTableV2,
|
||||
}
|
||||
|
||||
for _, tableName := range tableNames {
|
||||
@@ -1669,11 +1687,17 @@ func (r *ClickHouseReader) SetTTLV2(ctx context.Context, orgID string, params *m
|
||||
queries := []string{
|
||||
fmt.Sprintf(`ALTER TABLE %s ON CLUSTER %s MODIFY COLUMN _retention_days UInt16 DEFAULT %s`,
|
||||
tableNames[0], r.cluster, multiIfExpr),
|
||||
// for distributed table
|
||||
fmt.Sprintf(`ALTER TABLE %s ON CLUSTER %s MODIFY COLUMN _retention_days UInt16 DEFAULT %s`,
|
||||
distributedTableNames[0], r.cluster, multiIfExpr),
|
||||
}
|
||||
|
||||
if len(params.ColdStorageVolume) > 0 && coldStorageDuration > 0 {
|
||||
queries = append(queries, fmt.Sprintf(`ALTER TABLE %s ON CLUSTER %s MODIFY COLUMN _retention_days_cold UInt16 DEFAULT %d`,
|
||||
tableNames[0], r.cluster, coldStorageDuration))
|
||||
// for distributed table
|
||||
queries = append(queries, fmt.Sprintf(`ALTER TABLE %s ON CLUSTER %s MODIFY COLUMN _retention_days_cold UInt16 DEFAULT %d`,
|
||||
distributedTableNames[0], r.cluster, coldStorageDuration))
|
||||
|
||||
queries = append(queries, fmt.Sprintf(`ALTER TABLE %s ON CLUSTER %s MODIFY TTL toDateTime(timestamp / 1000000000) + toIntervalDay(_retention_days) DELETE, toDateTime(timestamp / 1000000000) + toIntervalDay(_retention_days_cold) TO VOLUME '%s' SETTINGS materialize_ttl_after_modify=0`,
|
||||
tableNames[0], r.cluster, params.ColdStorageVolume))
|
||||
@@ -1684,18 +1708,40 @@ func (r *ClickHouseReader) SetTTLV2(ctx context.Context, orgID string, params *m
|
||||
resourceQueries := []string{
|
||||
fmt.Sprintf(`ALTER TABLE %s ON CLUSTER %s MODIFY COLUMN _retention_days UInt16 DEFAULT %s`,
|
||||
tableNames[1], r.cluster, resourceMultiIfExpr),
|
||||
// for distributed table
|
||||
fmt.Sprintf(`ALTER TABLE %s ON CLUSTER %s MODIFY COLUMN _retention_days UInt16 DEFAULT %s`,
|
||||
distributedTableNames[1], r.cluster, resourceMultiIfExpr),
|
||||
}
|
||||
|
||||
if len(params.ColdStorageVolume) > 0 && coldStorageDuration > 0 {
|
||||
resourceQueries = append(resourceQueries, fmt.Sprintf(`ALTER TABLE %s ON CLUSTER %s MODIFY COLUMN _retention_days_cold UInt16 DEFAULT %d`,
|
||||
tableNames[1], r.cluster, coldStorageDuration))
|
||||
|
||||
// for distributed table
|
||||
resourceQueries = append(resourceQueries, fmt.Sprintf(`ALTER TABLE %s ON CLUSTER %s MODIFY COLUMN _retention_days_cold UInt16 DEFAULT %d`,
|
||||
distributedTableNames[1], r.cluster, coldStorageDuration))
|
||||
resourceQueries = append(resourceQueries, fmt.Sprintf(`ALTER TABLE %s ON CLUSTER %s MODIFY TTL toDateTime(seen_at_ts_bucket_start) + toIntervalSecond(1800) + toIntervalDay(_retention_days) DELETE, toDateTime(seen_at_ts_bucket_start) + toIntervalSecond(1800) + toIntervalDay(_retention_days_cold) TO VOLUME '%s' SETTINGS materialize_ttl_after_modify=0`,
|
||||
tableNames[1], r.cluster, params.ColdStorageVolume))
|
||||
}
|
||||
|
||||
ttlPayload[tableNames[1]] = resourceQueries
|
||||
|
||||
// NOTE: Since logs support custom rule based retention, that makes it difficult to identify which attributes, resource keys
|
||||
// we need to keep, hence choosing MAX for safe side and not to create any complex solution for this.
|
||||
maxRetentionTTL := params.DefaultTTLDays
|
||||
for _, rule := range params.TTLConditions {
|
||||
maxRetentionTTL = max(maxRetentionTTL, rule.TTLDays)
|
||||
}
|
||||
|
||||
ttlPayload[tableNames[2]] = []string{
|
||||
fmt.Sprintf("ALTER TABLE %s ON CLUSTER %s MODIFY TTL timestamp + toIntervalDay(%d) DELETE SETTINGS materialize_ttl_after_modify=0",
|
||||
tableNames[2], r.cluster, maxRetentionTTL),
|
||||
}
|
||||
|
||||
ttlPayload[tableNames[3]] = []string{
|
||||
fmt.Sprintf("ALTER TABLE %s ON CLUSTER %s MODIFY TTL timestamp + toIntervalDay(%d) DELETE SETTINGS materialize_ttl_after_modify=0",
|
||||
tableNames[3], r.cluster, maxRetentionTTL),
|
||||
}
|
||||
|
||||
ttlConditionsJSON, err := json.Marshal(params.TTLConditions)
|
||||
if err != nil {
|
||||
return nil, errorsV2.Wrapf(err, errorsV2.TypeInternal, errorsV2.CodeInternal, "error marshalling TTL condition")
|
||||
|
||||
@@ -5,9 +5,9 @@ import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/modules/thirdpartyapi"
|
||||
|
||||
"io"
|
||||
@@ -549,6 +549,7 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
router.HandleFunc("/api/v1/settings/ttl", am.ViewAccess(aH.getTTL)).Methods(http.MethodGet)
|
||||
router.HandleFunc("/api/v2/settings/ttl", am.AdminAccess(aH.setCustomRetentionTTL)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v2/settings/ttl", am.ViewAccess(aH.getCustomRetentionTTL)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/settings/apdex", am.AdminAccess(aH.Signoz.Handlers.Apdex.Set)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/settings/apdex", am.ViewAccess(aH.Signoz.Handlers.Apdex.Get)).Methods(http.MethodGet)
|
||||
|
||||
@@ -1791,7 +1792,7 @@ func (aH *APIHandler) GetWaterfallSpansForTraceWithMetadata(w http.ResponseWrite
|
||||
}
|
||||
traceID := mux.Vars(r)["traceId"]
|
||||
if traceID == "" {
|
||||
RespondError(w, model.BadRequest(errors.New("traceID is required")), nil)
|
||||
render.Error(w, errors.NewInvalidInputf(errors.CodeInvalidInput, "traceID is required"))
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1825,7 +1826,7 @@ func (aH *APIHandler) GetFlamegraphSpansForTrace(w http.ResponseWriter, r *http.
|
||||
|
||||
traceID := mux.Vars(r)["traceId"]
|
||||
if traceID == "" {
|
||||
RespondError(w, model.BadRequest(errors.New("traceID is required")), nil)
|
||||
render.Error(w, errors.NewInvalidInputf(errors.CodeInvalidInput, "traceID is required"))
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1926,9 +1927,9 @@ func (aH *APIHandler) setTTL(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
ctx := r.Context()
|
||||
claims, errv2 := authtypes.ClaimsFromContext(ctx)
|
||||
if errv2 != nil {
|
||||
RespondError(w, &model.ApiError{Err: errors.New("failed to get org id from context"), Typ: model.ErrorInternal}, nil)
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(w, errors.NewInternalf(errors.CodeInternal, "failed to get org id from context"))
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1995,17 +1996,15 @@ func (aH *APIHandler) getTTL(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
ctx := r.Context()
|
||||
claims, errv2 := authtypes.ClaimsFromContext(ctx)
|
||||
if errv2 != nil {
|
||||
RespondError(w, &model.ApiError{Err: errors.New("failed to get org id from context"), Typ: model.ErrorInternal}, nil)
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
result, apiErr := aH.reader.GetTTL(r.Context(), claims.OrgID, ttlParams)
|
||||
if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
|
||||
return
|
||||
}
|
||||
|
||||
aH.WriteJSON(w, r, result)
|
||||
}
|
||||
|
||||
@@ -2070,7 +2069,7 @@ func (aH *APIHandler) getHealth(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
func (aH *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
if aH.SetupCompleted {
|
||||
RespondError(w, &model.ApiError{Err: errors.New("self-registration is disabled"), Typ: model.ErrorBadData}, nil)
|
||||
render.Error(w, errors.NewInvalidInputf(errors.CodeInvalidInput, "self-registration is disabled"))
|
||||
return
|
||||
}
|
||||
|
||||
@@ -3453,7 +3452,7 @@ func (aH *APIHandler) InstallIntegration(w http.ResponseWriter, r *http.Request)
|
||||
}
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
RespondError(w, model.UnauthorizedError(errors.New("unauthorized")), nil)
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -4022,6 +4021,9 @@ func (aH *APIHandler) RegisterLogsRoutes(router *mux.Router, am *middleware.Auth
|
||||
subRouter.HandleFunc("/pipelines/preview", am.ViewAccess(aH.PreviewLogsPipelinesHandler)).Methods(http.MethodPost)
|
||||
subRouter.HandleFunc("/pipelines/{version}", am.ViewAccess(aH.ListLogsPipelinesHandler)).Methods(http.MethodGet)
|
||||
subRouter.HandleFunc("/pipelines", am.EditAccess(aH.CreateLogsPipeline)).Methods(http.MethodPost)
|
||||
|
||||
// Promote and index JSON paths used in logs
|
||||
subRouter.HandleFunc("/promote_paths", am.AdminAccess(aH.Signoz.Handlers.Promote.HandlePromote)).Methods(http.MethodGet, http.MethodPost, http.MethodDelete)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) logFields(w http.ResponseWriter, r *http.Request) {
|
||||
@@ -4064,7 +4066,7 @@ func (aH *APIHandler) logAggregate(w http.ResponseWriter, r *http.Request) {
|
||||
aH.WriteJSON(w, r, model.GetLogsAggregatesResponse{})
|
||||
}
|
||||
|
||||
func parseAgentConfigVersion(r *http.Request) (int, *model.ApiError) {
|
||||
func parseAgentConfigVersion(r *http.Request) (int, error) {
|
||||
versionString := mux.Vars(r)["version"]
|
||||
|
||||
if versionString == "latest" {
|
||||
@@ -4074,11 +4076,11 @@ func parseAgentConfigVersion(r *http.Request) (int, *model.ApiError) {
|
||||
version64, err := strconv.ParseInt(versionString, 0, 8)
|
||||
|
||||
if err != nil {
|
||||
return 0, model.BadRequestStr("invalid version number")
|
||||
return 0, errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid version number")
|
||||
}
|
||||
|
||||
if version64 <= 0 {
|
||||
return 0, model.BadRequestStr("invalid version number")
|
||||
return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid version number")
|
||||
}
|
||||
|
||||
return int(version64), nil
|
||||
@@ -4088,16 +4090,13 @@ func (aH *APIHandler) PreviewLogsPipelinesHandler(w http.ResponseWriter, r *http
|
||||
req := logparsingpipeline.PipelinesPreviewRequest{}
|
||||
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
render.Error(w, errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to decode request body"))
|
||||
return
|
||||
}
|
||||
|
||||
resultLogs, apiErr := aH.LogsParsingPipelineController.PreviewLogsPipelines(
|
||||
r.Context(), &req,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
resultLogs, err := aH.LogsParsingPipelineController.PreviewLogsPipelines(r.Context(), &req)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -4105,9 +4104,9 @@ func (aH *APIHandler) PreviewLogsPipelinesHandler(w http.ResponseWriter, r *http
|
||||
}
|
||||
|
||||
func (aH *APIHandler) ListLogsPipelinesHandler(w http.ResponseWriter, r *http.Request) {
|
||||
claims, errv2 := authtypes.ClaimsFromContext(r.Context())
|
||||
if errv2 != nil {
|
||||
render.Error(w, errv2)
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -4119,35 +4118,33 @@ func (aH *APIHandler) ListLogsPipelinesHandler(w http.ResponseWriter, r *http.Re
|
||||
|
||||
version, err := parseAgentConfigVersion(r)
|
||||
if err != nil {
|
||||
RespondError(w, model.WrapApiError(err, "Failed to parse agent config version"), nil)
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
var payload *logparsingpipeline.PipelinesResponse
|
||||
var apierr *model.ApiError
|
||||
|
||||
if version != -1 {
|
||||
payload, apierr = aH.listLogsPipelinesByVersion(context.Background(), orgID, version)
|
||||
payload, err = aH.listLogsPipelinesByVersion(r.Context(), orgID, version)
|
||||
} else {
|
||||
payload, apierr = aH.listLogsPipelines(context.Background(), orgID)
|
||||
payload, err = aH.listLogsPipelines(r.Context(), orgID)
|
||||
}
|
||||
|
||||
if apierr != nil {
|
||||
RespondError(w, apierr, payload)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
aH.Respond(w, payload)
|
||||
}
|
||||
|
||||
// listLogsPipelines lists logs piplines for latest version
|
||||
func (aH *APIHandler) listLogsPipelines(ctx context.Context, orgID valuer.UUID) (
|
||||
*logparsingpipeline.PipelinesResponse, *model.ApiError,
|
||||
*logparsingpipeline.PipelinesResponse, error,
|
||||
) {
|
||||
// get lateset agent config
|
||||
latestVersion := -1
|
||||
lastestConfig, err := agentConf.GetLatestVersion(ctx, orgID, opamptypes.ElementTypeLogPipelines)
|
||||
if err != nil && err.Type() != model.ErrorNotFound {
|
||||
return nil, model.WrapApiError(err, "failed to get latest agent config version")
|
||||
if err != nil && !errorsV2.Ast(err, errorsV2.TypeNotFound) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if lastestConfig != nil {
|
||||
@@ -4156,14 +4153,14 @@ func (aH *APIHandler) listLogsPipelines(ctx context.Context, orgID valuer.UUID)
|
||||
|
||||
payload, err := aH.LogsParsingPipelineController.GetPipelinesByVersion(ctx, orgID, latestVersion)
|
||||
if err != nil {
|
||||
return nil, model.WrapApiError(err, "failed to get pipelines")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// todo(Nitya): make a new API for history pagination
|
||||
limit := 10
|
||||
history, err := agentConf.GetConfigHistory(ctx, orgID, opamptypes.ElementTypeLogPipelines, limit)
|
||||
if err != nil {
|
||||
return nil, model.WrapApiError(err, "failed to get config history")
|
||||
return nil, err
|
||||
}
|
||||
payload.History = history
|
||||
return payload, nil
|
||||
@@ -4171,18 +4168,18 @@ func (aH *APIHandler) listLogsPipelines(ctx context.Context, orgID valuer.UUID)
|
||||
|
||||
// listLogsPipelinesByVersion lists pipelines along with config version history
|
||||
func (aH *APIHandler) listLogsPipelinesByVersion(ctx context.Context, orgID valuer.UUID, version int) (
|
||||
*logparsingpipeline.PipelinesResponse, *model.ApiError,
|
||||
*logparsingpipeline.PipelinesResponse, error,
|
||||
) {
|
||||
payload, err := aH.LogsParsingPipelineController.GetPipelinesByVersion(ctx, orgID, version)
|
||||
if err != nil {
|
||||
return nil, model.WrapApiError(err, "failed to get pipelines by version")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// todo(Nitya): make a new API for history pagination
|
||||
limit := 10
|
||||
history, err := agentConf.GetConfigHistory(ctx, orgID, opamptypes.ElementTypeLogPipelines, limit)
|
||||
if err != nil {
|
||||
return nil, model.WrapApiError(err, "failed to retrieve agent config history")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
payload.History = history
|
||||
@@ -4218,14 +4215,14 @@ func (aH *APIHandler) CreateLogsPipeline(w http.ResponseWriter, r *http.Request)
|
||||
createPipeline := func(
|
||||
ctx context.Context,
|
||||
postable []pipelinetypes.PostablePipeline,
|
||||
) (*logparsingpipeline.PipelinesResponse, *model.ApiError) {
|
||||
) (*logparsingpipeline.PipelinesResponse, error) {
|
||||
if len(postable) == 0 {
|
||||
zap.L().Warn("found no pipelines in the http request, this will delete all the pipelines")
|
||||
}
|
||||
|
||||
validationErr := aH.LogsParsingPipelineController.ValidatePipelines(ctx, postable)
|
||||
if validationErr != nil {
|
||||
return nil, validationErr
|
||||
err := aH.LogsParsingPipelineController.ValidatePipelines(ctx, postable)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return aH.LogsParsingPipelineController.ApplyPipelines(ctx, orgID, userID, postable)
|
||||
@@ -4233,7 +4230,7 @@ func (aH *APIHandler) CreateLogsPipeline(w http.ResponseWriter, r *http.Request)
|
||||
|
||||
res, err := createPipeline(r.Context(), req.Pipelines)
|
||||
if err != nil {
|
||||
RespondError(w, err, nil)
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -106,7 +106,7 @@ func (c *Controller) Uninstall(ctx context.Context, orgId string, req *Uninstall
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Controller) GetPipelinesForInstalledIntegrations(ctx context.Context, orgId string) ([]pipelinetypes.GettablePipeline, *model.ApiError) {
|
||||
func (c *Controller) GetPipelinesForInstalledIntegrations(ctx context.Context, orgId string) ([]pipelinetypes.GettablePipeline, error) {
|
||||
return c.mgr.GetPipelinesForInstalledIntegrations(ctx, orgId)
|
||||
}
|
||||
|
||||
|
||||
@@ -256,7 +256,7 @@ func (m *Manager) UninstallIntegration(
|
||||
func (m *Manager) GetPipelinesForInstalledIntegrations(
|
||||
ctx context.Context,
|
||||
orgId string,
|
||||
) ([]pipelinetypes.GettablePipeline, *model.ApiError) {
|
||||
) ([]pipelinetypes.GettablePipeline, error) {
|
||||
installedIntegrations, apiErr := m.getInstalledIntegrations(ctx, orgId)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
|
||||
@@ -8,15 +8,23 @@ import (
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
coreModel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||
"github.com/pkg/errors"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
var lockLogsPipelineSpec sync.RWMutex
|
||||
|
||||
var (
|
||||
CodeCollectorConfigUnmarshalFailed = errors.MustNewCode("collector_config_unmarshal_failed")
|
||||
CodeCollectorConfigMarshalFailed = errors.MustNewCode("collector_config_marshal_failed")
|
||||
CodeCollectorConfigServiceNotFound = errors.MustNewCode("collector_config_service_not_found")
|
||||
CodeCollectorConfigServiceMarshalFailed = errors.MustNewCode("collector_config_service_marshal_failed")
|
||||
CodeCollectorConfigServiceUnmarshalFailed = errors.MustNewCode("collector_config_service_unmarshal_failed")
|
||||
CodeCollectorConfigLogsPipelineNotFound = errors.MustNewCode("collector_config_logs_pipeline_not_found")
|
||||
)
|
||||
|
||||
// check if the processors already exist
|
||||
// if yes then update the processor.
|
||||
// if something doesn't exists then remove it.
|
||||
@@ -57,15 +65,15 @@ type otelPipeline struct {
|
||||
|
||||
func getOtelPipelineFromConfig(config map[string]interface{}) (*otelPipeline, error) {
|
||||
if _, ok := config["service"]; !ok {
|
||||
return nil, fmt.Errorf("service not found in OTEL config")
|
||||
return nil, errors.NewInvalidInputf(CodeCollectorConfigServiceNotFound, "service not found in OTEL config")
|
||||
}
|
||||
b, err := json.Marshal(config["service"])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, errors.WrapInternalf(err, CodeCollectorConfigServiceMarshalFailed, "could not marshal OTEL config")
|
||||
}
|
||||
p := otelPipeline{}
|
||||
if err := json.Unmarshal(b, &p); err != nil {
|
||||
return nil, err
|
||||
return nil, errors.WrapInternalf(err, CodeCollectorConfigServiceUnmarshalFailed, "could not unmarshal OTEL config")
|
||||
}
|
||||
return &p, nil
|
||||
}
|
||||
@@ -163,21 +171,16 @@ func checkDuplicateString(pipeline []string) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func GenerateCollectorConfigWithPipelines(
|
||||
config []byte,
|
||||
pipelines []pipelinetypes.GettablePipeline,
|
||||
) ([]byte, *coreModel.ApiError) {
|
||||
func GenerateCollectorConfigWithPipelines(config []byte, pipelines []pipelinetypes.GettablePipeline) ([]byte, error) {
|
||||
var collectorConf map[string]interface{}
|
||||
err := yaml.Unmarshal([]byte(config), &collectorConf)
|
||||
if err != nil {
|
||||
return nil, coreModel.BadRequest(err)
|
||||
return nil, errors.WrapInvalidInputf(err, CodeCollectorConfigUnmarshalFailed, "could not unmarshal collector config")
|
||||
}
|
||||
|
||||
signozPipelineProcessors, signozPipelineProcNames, err := PreparePipelineProcessor(pipelines)
|
||||
if err != nil {
|
||||
return nil, coreModel.BadRequest(errors.Wrap(
|
||||
err, "could not prepare otel collector processors for log pipelines",
|
||||
))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Escape any `$`s as `$$$` in config generated for pipelines, to ensure any occurrences
|
||||
@@ -186,9 +189,7 @@ func GenerateCollectorConfigWithPipelines(
|
||||
procConf := signozPipelineProcessors[procName]
|
||||
serializedProcConf, err := yaml.Marshal(procConf)
|
||||
if err != nil {
|
||||
return nil, coreModel.InternalError(fmt.Errorf(
|
||||
"could not marshal processor config for %s: %w", procName, err,
|
||||
))
|
||||
return nil, errors.WrapInternalf(err, CodeCollectorConfigMarshalFailed, "could not marshal processor config for %s", procName)
|
||||
}
|
||||
escapedSerializedConf := strings.ReplaceAll(
|
||||
string(serializedProcConf), "$", "$$",
|
||||
@@ -197,9 +198,7 @@ func GenerateCollectorConfigWithPipelines(
|
||||
var escapedConf map[string]interface{}
|
||||
err = yaml.Unmarshal([]byte(escapedSerializedConf), &escapedConf)
|
||||
if err != nil {
|
||||
return nil, coreModel.InternalError(fmt.Errorf(
|
||||
"could not unmarshal dollar escaped processor config for %s: %w", procName, err,
|
||||
))
|
||||
return nil, errors.WrapInternalf(err, CodeCollectorConfigUnmarshalFailed, "could not unmarshal dollar escaped processor config for %s", procName)
|
||||
}
|
||||
|
||||
signozPipelineProcessors[procName] = escapedConf
|
||||
@@ -211,12 +210,10 @@ func GenerateCollectorConfigWithPipelines(
|
||||
// build the new processor list in service.pipelines.logs
|
||||
p, err := getOtelPipelineFromConfig(collectorConf)
|
||||
if err != nil {
|
||||
return nil, coreModel.BadRequest(err)
|
||||
return nil, err
|
||||
}
|
||||
if p.Pipelines.Logs == nil {
|
||||
return nil, coreModel.InternalError(fmt.Errorf(
|
||||
"logs pipeline doesn't exist",
|
||||
))
|
||||
return nil, errors.NewInternalf(CodeCollectorConfigLogsPipelineNotFound, "logs pipeline doesn't exist")
|
||||
}
|
||||
|
||||
updatedProcessorList, _ := buildCollectorPipelineProcessorsList(p.Pipelines.Logs.Processors, signozPipelineProcNames)
|
||||
@@ -227,7 +224,7 @@ func GenerateCollectorConfigWithPipelines(
|
||||
|
||||
updatedConf, err := yaml.Marshal(collectorConf)
|
||||
if err != nil {
|
||||
return nil, coreModel.BadRequest(err)
|
||||
return nil, errors.WrapInternalf(err, CodeCollectorConfigMarshalFailed, "could not marshal collector config")
|
||||
}
|
||||
|
||||
return updatedConf, nil
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
@@ -17,20 +18,23 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
"github.com/pkg/errors"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
var (
|
||||
CodeRawPipelinesMarshalFailed = errors.MustNewCode("raw_pipelines_marshal_failed")
|
||||
)
|
||||
|
||||
// Controller takes care of deployment cycle of log parsing pipelines.
|
||||
type LogParsingPipelineController struct {
|
||||
Repo
|
||||
|
||||
GetIntegrationPipelines func(context.Context, string) ([]pipelinetypes.GettablePipeline, *model.ApiError)
|
||||
GetIntegrationPipelines func(context.Context, string) ([]pipelinetypes.GettablePipeline, error)
|
||||
}
|
||||
|
||||
func NewLogParsingPipelinesController(
|
||||
sqlStore sqlstore.SQLStore,
|
||||
getIntegrationPipelines func(context.Context, string) ([]pipelinetypes.GettablePipeline, *model.ApiError),
|
||||
getIntegrationPipelines func(context.Context, string) ([]pipelinetypes.GettablePipeline, error),
|
||||
) (*LogParsingPipelineController, error) {
|
||||
repo := NewRepo(sqlStore)
|
||||
return &LogParsingPipelineController{
|
||||
@@ -53,7 +57,7 @@ func (ic *LogParsingPipelineController) ApplyPipelines(
|
||||
orgID valuer.UUID,
|
||||
userID valuer.UUID,
|
||||
postable []pipelinetypes.PostablePipeline,
|
||||
) (*PipelinesResponse, *model.ApiError) {
|
||||
) (*PipelinesResponse, error) {
|
||||
var pipelines []pipelinetypes.GettablePipeline
|
||||
|
||||
// scan through postable pipelines, to select the existing pipelines or insert missing ones
|
||||
@@ -68,9 +72,9 @@ func (ic *LogParsingPipelineController) ApplyPipelines(
|
||||
// the same pipeline id.
|
||||
r.ID = uuid.NewString()
|
||||
r.OrderID = idx + 1
|
||||
pipeline, apiErr := ic.insertPipeline(ctx, orgID, &r)
|
||||
if apiErr != nil {
|
||||
return nil, model.WrapApiError(apiErr, "failed to insert pipeline")
|
||||
pipeline, err := ic.insertPipeline(ctx, orgID, &r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
pipelines = append(pipelines, *pipeline)
|
||||
|
||||
@@ -90,13 +94,12 @@ func (ic *LogParsingPipelineController) ApplyPipelines(
|
||||
return ic.GetPipelinesByVersion(ctx, orgID, cfg.Version)
|
||||
}
|
||||
|
||||
func (ic *LogParsingPipelineController) ValidatePipelines(
|
||||
ctx context.Context,
|
||||
func (ic *LogParsingPipelineController) ValidatePipelines(ctx context.Context,
|
||||
postedPipelines []pipelinetypes.PostablePipeline,
|
||||
) *model.ApiError {
|
||||
) error {
|
||||
for _, p := range postedPipelines {
|
||||
if err := p.IsValid(); err != nil {
|
||||
return model.BadRequestStr(err.Error())
|
||||
return errors.WithAdditionalf(err, "invalid pipeline: %s", p.Name)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -121,39 +124,29 @@ func (ic *LogParsingPipelineController) ValidatePipelines(
|
||||
}
|
||||
|
||||
sampleLogs := []model.SignozLog{{Body: ""}}
|
||||
_, _, simulationErr := SimulatePipelinesProcessing(
|
||||
ctx, gettablePipelines, sampleLogs,
|
||||
)
|
||||
if simulationErr != nil {
|
||||
return model.BadRequest(fmt.Errorf(
|
||||
"invalid pipelines config: %w", simulationErr.ToError(),
|
||||
))
|
||||
}
|
||||
|
||||
return nil
|
||||
_, _, err := SimulatePipelinesProcessing(ctx, gettablePipelines, sampleLogs)
|
||||
return err
|
||||
}
|
||||
|
||||
// Returns effective list of pipelines including user created
|
||||
// pipelines and pipelines for installed integrations
|
||||
func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
|
||||
ctx context.Context, orgID valuer.UUID, version int,
|
||||
) ([]pipelinetypes.GettablePipeline, *model.ApiError) {
|
||||
) ([]pipelinetypes.GettablePipeline, error) {
|
||||
|
||||
result := []pipelinetypes.GettablePipeline{}
|
||||
if version >= 0 {
|
||||
savedPipelines, errors := ic.getPipelinesByVersion(ctx, orgID.String(), version)
|
||||
if errors != nil {
|
||||
zap.L().Error("failed to get pipelines for version", zap.Int("version", version), zap.Errors("errors", errors))
|
||||
return nil, model.InternalError(fmt.Errorf("failed to get pipelines for given version %v", errors))
|
||||
savedPipelines, err := ic.getPipelinesByVersion(ctx, orgID.String(), version)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to get pipelines for version", zap.Int("version", version), zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
result = savedPipelines
|
||||
}
|
||||
|
||||
integrationPipelines, apiErr := ic.GetIntegrationPipelines(ctx, orgID.String())
|
||||
if apiErr != nil {
|
||||
return nil, model.WrapApiError(
|
||||
apiErr, "could not get pipelines for installed integrations",
|
||||
)
|
||||
integrationPipelines, err := ic.GetIntegrationPipelines(ctx, orgID.String())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Filter out any integration pipelines included in pipelines saved by user
|
||||
@@ -194,12 +187,11 @@ func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
|
||||
// GetPipelinesByVersion responds with version info and associated pipelines
|
||||
func (ic *LogParsingPipelineController) GetPipelinesByVersion(
|
||||
ctx context.Context, orgId valuer.UUID, version int,
|
||||
) (*PipelinesResponse, *model.ApiError) {
|
||||
|
||||
pipelines, errors := ic.getEffectivePipelinesByVersion(ctx, orgId, version)
|
||||
if errors != nil {
|
||||
zap.L().Error("failed to get pipelines for version", zap.Int("version", version), zap.Error(errors))
|
||||
return nil, model.InternalError(fmt.Errorf("failed to get pipelines for given version %v", errors))
|
||||
) (*PipelinesResponse, error) {
|
||||
pipelines, err := ic.getEffectivePipelinesByVersion(ctx, orgId, version)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to get pipelines for version", zap.Int("version", version), zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var configVersion *opamptypes.AgentConfigVersion
|
||||
@@ -207,7 +199,7 @@ func (ic *LogParsingPipelineController) GetPipelinesByVersion(
|
||||
cv, err := agentConf.GetConfigVersion(ctx, orgId, opamptypes.ElementTypeLogPipelines, version)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to get config for version", zap.Int("version", version), zap.Error(err))
|
||||
return nil, model.WrapApiError(err, "failed to get config for given version")
|
||||
return nil, err
|
||||
}
|
||||
configVersion = cv
|
||||
}
|
||||
@@ -231,11 +223,8 @@ type PipelinesPreviewResponse struct {
|
||||
func (ic *LogParsingPipelineController) PreviewLogsPipelines(
|
||||
ctx context.Context,
|
||||
request *PipelinesPreviewRequest,
|
||||
) (*PipelinesPreviewResponse, *model.ApiError) {
|
||||
result, collectorLogs, err := SimulatePipelinesProcessing(
|
||||
ctx, request.Pipelines, request.Logs,
|
||||
)
|
||||
|
||||
) (*PipelinesPreviewResponse, error) {
|
||||
result, collectorLogs, err := SimulatePipelinesProcessing(ctx, request.Pipelines, request.Logs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -256,33 +245,27 @@ func (pc *LogParsingPipelineController) RecommendAgentConfig(
|
||||
orgId valuer.UUID,
|
||||
currentConfYaml []byte,
|
||||
configVersion *opamptypes.AgentConfigVersion,
|
||||
) (
|
||||
recommendedConfYaml []byte,
|
||||
serializedSettingsUsed string,
|
||||
apiErr *model.ApiError,
|
||||
) {
|
||||
) ([]byte, string, error) {
|
||||
pipelinesVersion := -1
|
||||
if configVersion != nil {
|
||||
pipelinesVersion = configVersion.Version
|
||||
}
|
||||
|
||||
pipelinesResp, apiErr := pc.GetPipelinesByVersion(
|
||||
pipelinesResp, err := pc.GetPipelinesByVersion(
|
||||
context.Background(), orgId, pipelinesVersion,
|
||||
)
|
||||
if apiErr != nil {
|
||||
return nil, "", apiErr
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
updatedConf, apiErr := GenerateCollectorConfigWithPipelines(
|
||||
currentConfYaml, pipelinesResp.Pipelines,
|
||||
)
|
||||
if apiErr != nil {
|
||||
return nil, "", model.WrapApiError(apiErr, "could not marshal yaml for updated conf")
|
||||
updatedConf, err := GenerateCollectorConfigWithPipelines(currentConfYaml, pipelinesResp.Pipelines)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
rawPipelineData, err := json.Marshal(pipelinesResp.Pipelines)
|
||||
if err != nil {
|
||||
return nil, "", model.BadRequest(errors.Wrap(err, "could not serialize pipelines to JSON"))
|
||||
return nil, "", errors.WrapInternalf(err, CodeRawPipelinesMarshalFailed, "could not serialize pipelines to JSON")
|
||||
}
|
||||
|
||||
return updatedConf, string(rawPipelineData), nil
|
||||
|
||||
@@ -6,13 +6,13 @@ import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/pkg/errors"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
@@ -33,24 +33,18 @@ func NewRepo(sqlStore sqlstore.SQLStore) Repo {
|
||||
// insertPipeline stores a given postable pipeline to database
|
||||
func (r *Repo) insertPipeline(
|
||||
ctx context.Context, orgID valuer.UUID, postable *pipelinetypes.PostablePipeline,
|
||||
) (*pipelinetypes.GettablePipeline, *model.ApiError) {
|
||||
) (*pipelinetypes.GettablePipeline, error) {
|
||||
if err := postable.IsValid(); err != nil {
|
||||
return nil, model.BadRequest(errors.Wrap(err,
|
||||
"pipeline is not valid",
|
||||
))
|
||||
return nil, errors.WithAdditionalf(err, "pipeline is not valid")
|
||||
}
|
||||
|
||||
rawConfig, err := json.Marshal(postable.Config)
|
||||
if err != nil {
|
||||
return nil, model.BadRequest(errors.Wrap(err,
|
||||
"failed to unmarshal postable pipeline config",
|
||||
))
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to unmarshal postable pipeline config")
|
||||
}
|
||||
filter, err := json.Marshal(postable.Filter)
|
||||
if err != nil {
|
||||
return nil, model.BadRequest(errors.Wrap(err,
|
||||
"failed to marshal postable pipeline filter",
|
||||
))
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to marshal postable pipeline filter")
|
||||
}
|
||||
|
||||
claims, errv2 := authtypes.ClaimsFromContext(ctx)
|
||||
@@ -85,10 +79,9 @@ func (r *Repo) insertPipeline(
|
||||
_, err = r.sqlStore.BunDB().NewInsert().
|
||||
Model(&insertRow.StoreablePipeline).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("error in inserting pipeline data", zap.Error(err))
|
||||
return nil, model.InternalError(errors.Wrap(err, "failed to insert pipeline"))
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to insert pipeline")
|
||||
}
|
||||
|
||||
return insertRow, nil
|
||||
@@ -97,8 +90,7 @@ func (r *Repo) insertPipeline(
|
||||
// getPipelinesByVersion returns pipelines associated with a given version
|
||||
func (r *Repo) getPipelinesByVersion(
|
||||
ctx context.Context, orgID string, version int,
|
||||
) ([]pipelinetypes.GettablePipeline, []error) {
|
||||
var errors []error
|
||||
) ([]pipelinetypes.GettablePipeline, error) {
|
||||
storablePipelines := []pipelinetypes.StoreablePipeline{}
|
||||
err := r.sqlStore.BunDB().NewSelect().
|
||||
Model(&storablePipelines).
|
||||
@@ -110,7 +102,7 @@ func (r *Repo) getPipelinesByVersion(
|
||||
Order("p.order_id ASC").
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, []error{fmt.Errorf("failed to get pipelines from db: %v", err)}
|
||||
return nil, errors.WrapInternalf(err, CodePipelinesGetFailed, "failed to get pipelines from db")
|
||||
}
|
||||
|
||||
gettablePipelines := make([]pipelinetypes.GettablePipeline, len(storablePipelines))
|
||||
@@ -118,23 +110,24 @@ func (r *Repo) getPipelinesByVersion(
|
||||
return gettablePipelines, nil
|
||||
}
|
||||
|
||||
var errs []error
|
||||
for i := range storablePipelines {
|
||||
gettablePipelines[i].StoreablePipeline = storablePipelines[i]
|
||||
if err := gettablePipelines[i].ParseRawConfig(); err != nil {
|
||||
errors = append(errors, err)
|
||||
errs = append(errs, err)
|
||||
}
|
||||
if err := gettablePipelines[i].ParseFilter(); err != nil {
|
||||
errors = append(errors, err)
|
||||
errs = append(errs, err)
|
||||
}
|
||||
}
|
||||
|
||||
return gettablePipelines, errors
|
||||
return gettablePipelines, errors.Join(errs...)
|
||||
}
|
||||
|
||||
// GetPipelines returns pipeline and errors (if any)
|
||||
func (r *Repo) GetPipeline(
|
||||
ctx context.Context, orgID string, id string,
|
||||
) (*pipelinetypes.GettablePipeline, *model.ApiError) {
|
||||
) (*pipelinetypes.GettablePipeline, error) {
|
||||
storablePipelines := []pipelinetypes.StoreablePipeline{}
|
||||
|
||||
err := r.sqlStore.BunDB().NewSelect().
|
||||
@@ -144,12 +137,12 @@ func (r *Repo) GetPipeline(
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to get ingestion pipeline from db", zap.Error(err))
|
||||
return nil, model.InternalError(errors.Wrap(err, "failed to get ingestion pipeline from db"))
|
||||
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to get ingestion pipeline from db")
|
||||
}
|
||||
|
||||
if len(storablePipelines) == 0 {
|
||||
zap.L().Warn("No row found for ingestion pipeline id", zap.String("id", id))
|
||||
return nil, model.NotFoundError(fmt.Errorf("no row found for ingestion pipeline id %v", id))
|
||||
return nil, errors.NewNotFoundf(errors.CodeNotFound, "no row found for ingestion pipeline id %v", id)
|
||||
}
|
||||
|
||||
if len(storablePipelines) == 1 {
|
||||
@@ -157,20 +150,16 @@ func (r *Repo) GetPipeline(
|
||||
gettablePipeline.StoreablePipeline = storablePipelines[0]
|
||||
if err := gettablePipeline.ParseRawConfig(); err != nil {
|
||||
zap.L().Error("invalid pipeline config found", zap.String("id", id), zap.Error(err))
|
||||
return nil, model.InternalError(
|
||||
errors.Wrap(err, "found an invalid pipeline config"),
|
||||
)
|
||||
return nil, err
|
||||
}
|
||||
if err := gettablePipeline.ParseFilter(); err != nil {
|
||||
zap.L().Error("invalid pipeline filter found", zap.String("id", id), zap.Error(err))
|
||||
return nil, model.InternalError(
|
||||
errors.Wrap(err, "found an invalid pipeline filter"),
|
||||
)
|
||||
return nil, err
|
||||
}
|
||||
return &gettablePipeline, nil
|
||||
}
|
||||
|
||||
return nil, model.InternalError(fmt.Errorf("multiple pipelines with same id"))
|
||||
return nil, errors.NewInternalf(errors.CodeInternal, "multiple pipelines with same id")
|
||||
}
|
||||
|
||||
func (r *Repo) DeletePipeline(ctx context.Context, orgID string, id string) error {
|
||||
|
||||
@@ -3,6 +3,8 @@ package logparsingpipeline
|
||||
import "github.com/SigNoz/signoz/pkg/errors"
|
||||
|
||||
var (
|
||||
CodeInvalidOperatorType = errors.MustNewCode("operator_type_mismatch")
|
||||
CodeFieldNilCheckType = errors.MustNewCode("operator_field_nil_check")
|
||||
CodeInvalidOperatorType = errors.MustNewCode("operator_type_mismatch")
|
||||
CodeFieldNilCheckType = errors.MustNewCode("operator_field_nil_check")
|
||||
CodePipelinesGetFailed = errors.MustNewCode("pipelines_get_failed")
|
||||
CodeProcessorFactoryMapFailed = errors.MustNewCode("processor_factory_map_failed")
|
||||
)
|
||||
|
||||
@@ -9,22 +9,16 @@ import (
|
||||
"github.com/SigNoz/signoz-otel-collector/pkg/collectorsimulator"
|
||||
_ "github.com/SigNoz/signoz-otel-collector/pkg/parser/grok"
|
||||
"github.com/SigNoz/signoz-otel-collector/processor/signozlogspipelineprocessor"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||
"github.com/pkg/errors"
|
||||
"go.opentelemetry.io/collector/otelcol"
|
||||
"go.opentelemetry.io/collector/pdata/pcommon"
|
||||
"go.opentelemetry.io/collector/pdata/plog"
|
||||
)
|
||||
|
||||
func SimulatePipelinesProcessing(
|
||||
ctx context.Context,
|
||||
pipelines []pipelinetypes.GettablePipeline,
|
||||
logs []model.SignozLog,
|
||||
) (
|
||||
output []model.SignozLog, collectorWarnAndErrorLogs []string, apiErr *model.ApiError,
|
||||
) {
|
||||
|
||||
func SimulatePipelinesProcessing(ctx context.Context, pipelines []pipelinetypes.GettablePipeline, logs []model.SignozLog) (
|
||||
[]model.SignozLog, []string, error) {
|
||||
if len(pipelines) < 1 {
|
||||
return logs, nil, nil
|
||||
}
|
||||
@@ -42,13 +36,9 @@ func SimulatePipelinesProcessing(
|
||||
}
|
||||
simulatorInputPLogs := SignozLogsToPLogs(logs)
|
||||
|
||||
processorFactories, err := otelcol.MakeFactoryMap(
|
||||
signozlogspipelineprocessor.NewFactory(),
|
||||
)
|
||||
processorFactories, err := otelcol.MakeFactoryMap(signozlogspipelineprocessor.NewFactory())
|
||||
if err != nil {
|
||||
return nil, nil, model.InternalError(errors.Wrap(
|
||||
err, "could not construct processor factory map",
|
||||
))
|
||||
return nil, nil, errors.WrapInternalf(err, CodeProcessorFactoryMapFailed, "could not construct processor factory map")
|
||||
}
|
||||
|
||||
// Pipelines translate to logtransformprocessors in otel collector config.
|
||||
@@ -60,9 +50,9 @@ func SimulatePipelinesProcessing(
|
||||
timeout := time.Millisecond * time.Duration(len(pipelines)*100+100)
|
||||
|
||||
configGenerator := func(baseConf []byte) ([]byte, error) {
|
||||
updatedConf, apiErr := GenerateCollectorConfigWithPipelines(baseConf, pipelines)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr.ToError()
|
||||
updatedConf, err := GenerateCollectorConfigWithPipelines(baseConf, pipelines)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return updatedConf, nil
|
||||
}
|
||||
@@ -76,14 +66,9 @@ func SimulatePipelinesProcessing(
|
||||
)
|
||||
if simulationErr != nil {
|
||||
if errors.Is(simulationErr, collectorsimulator.ErrInvalidConfig) {
|
||||
apiErr = model.BadRequest(simulationErr)
|
||||
} else {
|
||||
apiErr = model.InternalError(simulationErr)
|
||||
return nil, nil, errors.WrapInvalidInputf(simulationErr, errors.CodeInvalidInput, "invalid config")
|
||||
}
|
||||
|
||||
return nil, collectorErrs, model.WrapApiError(apiErr,
|
||||
"could not simulate log pipelines processing.\nCollector errors",
|
||||
)
|
||||
return nil, nil, errors.WrapInternalf(simulationErr, errors.CodeInternal, "could not simulate log pipelines processing")
|
||||
}
|
||||
|
||||
outputSignozLogs := PLogsToSignozLogs(outputPLogs)
|
||||
@@ -98,6 +83,7 @@ func SimulatePipelinesProcessing(
|
||||
delete(sigLog.Attributes_int64, inputOrderAttribute)
|
||||
}
|
||||
|
||||
collectorWarnAndErrorLogs := []string{}
|
||||
for _, log := range collectorErrs {
|
||||
// if log is empty or log comes from featuregate.go, then remove it
|
||||
if log == "" || strings.Contains(log, "featuregate.go") {
|
||||
|
||||
@@ -404,7 +404,7 @@ func buildLogsQuery(panelType v3.PanelType, start, end, step int64, mq *v3.Build
|
||||
// if noop create the query and return
|
||||
if mq.AggregateOperator == v3.AggregateOperatorNoOp {
|
||||
// with noop any filter or different order by other than ts will use new table
|
||||
sqlSelect := constants.LogsSQLSelectV2
|
||||
sqlSelect := constants.LogsSQLSelectV2()
|
||||
queryTmpl := sqlSelect + "from signoz_logs.%s where %s%s order by %s"
|
||||
query := fmt.Sprintf(queryTmpl, DISTRIBUTED_LOGS_V2, timeFilter, filterSubQuery, orderBy)
|
||||
return query, nil
|
||||
@@ -488,7 +488,7 @@ func buildLogsLiveTailQuery(mq *v3.BuilderQuery) (string, error) {
|
||||
// the reader will add the timestamp and id filters
|
||||
switch mq.AggregateOperator {
|
||||
case v3.AggregateOperatorNoOp:
|
||||
query := constants.LogsSQLSelectV2 + "from signoz_logs." + DISTRIBUTED_LOGS_V2 + " where "
|
||||
query := constants.LogsSQLSelectV2() + "from signoz_logs." + DISTRIBUTED_LOGS_V2 + " where "
|
||||
if len(filterSubQuery) > 0 {
|
||||
query = query + filterSubQuery + " AND "
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func Test_getClickhouseKey(t *testing.T) {
|
||||
@@ -1210,9 +1211,8 @@ func TestPrepareLogsQuery(t *testing.T) {
|
||||
t.Errorf("PrepareLogsQuery() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
if got != tt.want {
|
||||
t.Errorf("PrepareLogsQuery() = %v, want %v", got, tt.want)
|
||||
}
|
||||
|
||||
assert.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,25 +3,27 @@ package opamp
|
||||
import (
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
model "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/opamp/otelconfig"
|
||||
coreModel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/knadh/koanf/parsers/yaml"
|
||||
"github.com/open-telemetry/opamp-go/protobufs"
|
||||
"go.opentelemetry.io/collector/confmap"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
var (
|
||||
CodeNoAgentsAvailable = errors.MustNewCode("no_agents_available")
|
||||
CodeOpAmpServerDown = errors.MustNewCode("opamp_server_down")
|
||||
CodeMultipleAgentsNotSupported = errors.MustNewCode("multiple_agents_not_supported")
|
||||
)
|
||||
|
||||
// inserts or updates ingestion controller processors depending
|
||||
// on the signal (metrics or traces)
|
||||
func UpsertControlProcessors(
|
||||
ctx context.Context,
|
||||
signal string,
|
||||
processors map[string]interface{},
|
||||
callback model.OnChangeCallback,
|
||||
) (hash string, fnerr *coreModel.ApiError) {
|
||||
func UpsertControlProcessors(ctx context.Context, signal string,
|
||||
processors map[string]interface{}, callback model.OnChangeCallback,
|
||||
) (string, error) {
|
||||
// note: only processors enabled through tracesPipelinePlan will be added
|
||||
// to pipeline. To enable or disable processors from pipeline, call
|
||||
// AddToTracePipeline() or RemoveFromTracesPipeline() prior to calling
|
||||
@@ -31,33 +33,24 @@ func UpsertControlProcessors(
|
||||
|
||||
if signal != string(Metrics) && signal != string(Traces) {
|
||||
zap.L().Error("received invalid signal int UpsertControlProcessors", zap.String("signal", signal))
|
||||
fnerr = coreModel.BadRequest(fmt.Errorf(
|
||||
"signal not supported in ingestion rules: %s", signal,
|
||||
))
|
||||
return
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "signal not supported in ingestion rules: %s", signal)
|
||||
}
|
||||
|
||||
if opAmpServer == nil {
|
||||
fnerr = coreModel.UnavailableError(fmt.Errorf(
|
||||
"opamp server is down, unable to push config to agent at this moment",
|
||||
))
|
||||
return
|
||||
return "", errors.NewInternalf(CodeOpAmpServerDown, "opamp server is down, unable to push config to agent at this moment")
|
||||
}
|
||||
|
||||
agents := opAmpServer.agents.GetAllAgents()
|
||||
if len(agents) == 0 {
|
||||
fnerr = coreModel.UnavailableError(fmt.Errorf("no agents available at the moment"))
|
||||
return
|
||||
return "", errors.NewInternalf(CodeNoAgentsAvailable, "no agents available at the moment")
|
||||
}
|
||||
|
||||
if len(agents) > 1 && signal == string(Traces) {
|
||||
zap.L().Debug("found multiple agents. this feature is not supported for traces pipeline (sampling rules)")
|
||||
fnerr = coreModel.BadRequest(fmt.Errorf("multiple agents not supported in sampling rules"))
|
||||
return
|
||||
return "", errors.NewInvalidInputf(CodeMultipleAgentsNotSupported, "multiple agents not supported in sampling rules")
|
||||
}
|
||||
|
||||
hash := ""
|
||||
for _, agent := range agents {
|
||||
|
||||
agenthash, err := addIngestionControlToAgent(agent, signal, processors, false)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to push ingestion rules config to agent", zap.String("agentID", agent.AgentID), zap.Error(err))
|
||||
|
||||
@@ -238,8 +238,8 @@ func TestFindMissingTimeRangesZeroFreshNess(t *testing.T) {
|
||||
}
|
||||
|
||||
opts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}
|
||||
c, err := cachetest.New(cache.Config{Provider: "memory", Memory: opts})
|
||||
require.NoError(t, err)
|
||||
@@ -458,8 +458,8 @@ func TestFindMissingTimeRangesWithFluxInterval(t *testing.T) {
|
||||
}
|
||||
|
||||
opts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}
|
||||
c, err := cachetest.New(cache.Config{Provider: "memory", Memory: opts})
|
||||
require.NoError(t, err)
|
||||
@@ -631,8 +631,8 @@ func TestQueryRange(t *testing.T) {
|
||||
},
|
||||
}
|
||||
cacheOpts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}
|
||||
c, err := cachetest.New(cache.Config{Provider: "memory", Memory: cacheOpts})
|
||||
require.NoError(t, err)
|
||||
@@ -748,8 +748,8 @@ func TestQueryRangeValueType(t *testing.T) {
|
||||
},
|
||||
}
|
||||
cacheOpts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}
|
||||
c, err := cachetest.New(cache.Config{Provider: "memory", Memory: cacheOpts})
|
||||
require.NoError(t, err)
|
||||
@@ -911,8 +911,8 @@ func TestQueryRangeTimeShiftWithCache(t *testing.T) {
|
||||
},
|
||||
}
|
||||
cacheOpts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}
|
||||
c, err := cachetest.New(cache.Config{Provider: "memory", Memory: cacheOpts})
|
||||
require.NoError(t, err)
|
||||
@@ -1017,8 +1017,8 @@ func TestQueryRangeTimeShiftWithLimitAndCache(t *testing.T) {
|
||||
},
|
||||
}
|
||||
cacheOpts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}
|
||||
c, err := cachetest.New(cache.Config{Provider: "memory", Memory: cacheOpts})
|
||||
require.NoError(t, err)
|
||||
@@ -1094,8 +1094,8 @@ func TestQueryRangeValueTypePromQL(t *testing.T) {
|
||||
},
|
||||
}
|
||||
cacheOpts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}
|
||||
c, err := cachetest.New(cache.Config{Provider: "memory", Memory: cacheOpts})
|
||||
require.NoError(t, err)
|
||||
@@ -1402,14 +1402,15 @@ func Test_querier_Traces_runWindowBasedListQueryDesc(t *testing.T) {
|
||||
}
|
||||
|
||||
// Create reader and querier
|
||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(
|
||||
options,
|
||||
reader := clickhouseReader.NewReader(
|
||||
nil,
|
||||
telemetryStore,
|
||||
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
||||
"",
|
||||
time.Duration(time.Second),
|
||||
nil,
|
||||
nil,
|
||||
options,
|
||||
)
|
||||
|
||||
q := &querier{
|
||||
@@ -1626,14 +1627,15 @@ func Test_querier_Traces_runWindowBasedListQueryAsc(t *testing.T) {
|
||||
}
|
||||
|
||||
// Create reader and querier
|
||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(
|
||||
options,
|
||||
reader := clickhouseReader.NewReader(
|
||||
nil,
|
||||
telemetryStore,
|
||||
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
||||
"",
|
||||
time.Duration(time.Second),
|
||||
nil,
|
||||
nil,
|
||||
options,
|
||||
)
|
||||
|
||||
q := &querier{
|
||||
@@ -1925,14 +1927,15 @@ func Test_querier_Logs_runWindowBasedListQueryDesc(t *testing.T) {
|
||||
}
|
||||
|
||||
// Create reader and querier
|
||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(
|
||||
options,
|
||||
reader := clickhouseReader.NewReader(
|
||||
nil,
|
||||
telemetryStore,
|
||||
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
||||
"",
|
||||
time.Duration(time.Second),
|
||||
nil,
|
||||
nil,
|
||||
options,
|
||||
)
|
||||
|
||||
q := &querier{
|
||||
@@ -2151,14 +2154,15 @@ func Test_querier_Logs_runWindowBasedListQueryAsc(t *testing.T) {
|
||||
}
|
||||
|
||||
// Create reader and querier
|
||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(
|
||||
options,
|
||||
reader := clickhouseReader.NewReader(
|
||||
nil,
|
||||
telemetryStore,
|
||||
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
||||
"",
|
||||
time.Duration(time.Second),
|
||||
nil,
|
||||
nil,
|
||||
options,
|
||||
)
|
||||
|
||||
q := &querier{
|
||||
|
||||
@@ -238,8 +238,8 @@ func TestV2FindMissingTimeRangesZeroFreshNess(t *testing.T) {
|
||||
}
|
||||
|
||||
opts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}
|
||||
c, err := cachetest.New(cache.Config{Provider: "memory", Memory: opts})
|
||||
require.NoError(t, err)
|
||||
@@ -458,8 +458,8 @@ func TestV2FindMissingTimeRangesWithFluxInterval(t *testing.T) {
|
||||
}
|
||||
|
||||
opts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}
|
||||
c, err := cachetest.New(cache.Config{Provider: "memory", Memory: opts})
|
||||
require.NoError(t, err)
|
||||
@@ -638,8 +638,8 @@ func TestV2QueryRangePanelGraph(t *testing.T) {
|
||||
},
|
||||
}
|
||||
cacheOpts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}
|
||||
c, err := cachetest.New(cache.Config{Provider: "memory", Memory: cacheOpts})
|
||||
require.NoError(t, err)
|
||||
@@ -793,8 +793,8 @@ func TestV2QueryRangeValueType(t *testing.T) {
|
||||
},
|
||||
}
|
||||
cacheOpts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}
|
||||
c, err := cachetest.New(cache.Config{Provider: "memory", Memory: cacheOpts})
|
||||
require.NoError(t, err)
|
||||
@@ -960,8 +960,8 @@ func TestV2QueryRangeTimeShiftWithCache(t *testing.T) {
|
||||
},
|
||||
}
|
||||
cacheOpts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}
|
||||
c, err := cachetest.New(cache.Config{Provider: "memory", Memory: cacheOpts})
|
||||
require.NoError(t, err)
|
||||
@@ -1068,8 +1068,8 @@ func TestV2QueryRangeTimeShiftWithLimitAndCache(t *testing.T) {
|
||||
},
|
||||
}
|
||||
cacheOpts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}
|
||||
c, err := cachetest.New(cache.Config{Provider: "memory", Memory: cacheOpts})
|
||||
require.NoError(t, err)
|
||||
@@ -1147,8 +1147,8 @@ func TestV2QueryRangeValueTypePromQL(t *testing.T) {
|
||||
},
|
||||
}
|
||||
cacheOpts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}
|
||||
c, err := cachetest.New(cache.Config{Provider: "memory", Memory: cacheOpts})
|
||||
require.NoError(t, err)
|
||||
@@ -1454,14 +1454,15 @@ func Test_querier_Traces_runWindowBasedListQueryDesc(t *testing.T) {
|
||||
}
|
||||
|
||||
// Create reader and querier
|
||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(
|
||||
options,
|
||||
reader := clickhouseReader.NewReader(
|
||||
nil,
|
||||
telemetryStore,
|
||||
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
||||
"",
|
||||
time.Duration(time.Second),
|
||||
nil,
|
||||
nil,
|
||||
options,
|
||||
)
|
||||
|
||||
q := &querier{
|
||||
@@ -1678,14 +1679,15 @@ func Test_querier_Traces_runWindowBasedListQueryAsc(t *testing.T) {
|
||||
}
|
||||
|
||||
// Create reader and querier
|
||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(
|
||||
options,
|
||||
reader := clickhouseReader.NewReader(
|
||||
nil,
|
||||
telemetryStore,
|
||||
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
||||
"",
|
||||
time.Duration(time.Second),
|
||||
nil,
|
||||
nil,
|
||||
options,
|
||||
)
|
||||
|
||||
q := &querier{
|
||||
@@ -1976,14 +1978,15 @@ func Test_querier_Logs_runWindowBasedListQueryDesc(t *testing.T) {
|
||||
}
|
||||
|
||||
// Create reader and querier
|
||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(
|
||||
options,
|
||||
reader := clickhouseReader.NewReader(
|
||||
nil,
|
||||
telemetryStore,
|
||||
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
||||
"",
|
||||
time.Duration(time.Second),
|
||||
nil,
|
||||
nil,
|
||||
options,
|
||||
)
|
||||
|
||||
q := &querier{
|
||||
@@ -2202,14 +2205,15 @@ func Test_querier_Logs_runWindowBasedListQueryAsc(t *testing.T) {
|
||||
}
|
||||
|
||||
// Create reader and querier
|
||||
reader := clickhouseReader.NewReaderFromClickhouseConnection(
|
||||
options,
|
||||
reader := clickhouseReader.NewReader(
|
||||
nil,
|
||||
telemetryStore,
|
||||
prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}),
|
||||
"",
|
||||
time.Duration(time.Second),
|
||||
nil,
|
||||
nil,
|
||||
options,
|
||||
)
|
||||
|
||||
q := &querier{
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
_ "net/http/pprof" // http profiler
|
||||
"slices"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache/memorycache"
|
||||
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
|
||||
|
||||
"github.com/gorilla/handlers"
|
||||
@@ -74,13 +75,26 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cacheForTraceDetail, err := memorycache.New(context.TODO(), signoz.Instrumentation.ToProviderSettings(), cache.Config{
|
||||
Provider: "memory",
|
||||
Memory: cache.Memory{
|
||||
NumCounters: 10 * 10000,
|
||||
MaxCost: 1 << 27, // 128 MB
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
reader := clickhouseReader.NewReader(
|
||||
signoz.SQLStore,
|
||||
signoz.TelemetryStore,
|
||||
signoz.Prometheus,
|
||||
signoz.TelemetryStore.Cluster(),
|
||||
config.Querier.FluxInterval,
|
||||
cacheForTraceDetail,
|
||||
signoz.Cache,
|
||||
nil,
|
||||
)
|
||||
|
||||
rm, err := makeRulesManager(
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -216,13 +217,6 @@ const (
|
||||
"CAST((attributes_bool_key, attributes_bool_value), 'Map(String, Bool)') as attributes_bool," +
|
||||
"CAST((resources_string_key, resources_string_value), 'Map(String, String)') as resources_string," +
|
||||
"CAST((scope_string_key, scope_string_value), 'Map(String, String)') as scope "
|
||||
LogsSQLSelectV2 = "SELECT " +
|
||||
"timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, " +
|
||||
"attributes_string, " +
|
||||
"attributes_number, " +
|
||||
"attributes_bool, " +
|
||||
"resources_string, " +
|
||||
"scope_string "
|
||||
TracesExplorerViewSQLSelectWithSubQuery = "(SELECT traceID, durationNano, " +
|
||||
"serviceName, name FROM %s.%s WHERE parentSpanID = '' AND %s ORDER BY durationNano DESC LIMIT 1 BY traceID"
|
||||
TracesExplorerViewSQLSelectBeforeSubQuery = "SELECT subQuery.serviceName as `subQuery.serviceName`, subQuery.name as `subQuery.name`, count() AS " +
|
||||
@@ -692,6 +686,7 @@ var StaticFieldsTraces = map[string]v3.AttributeKey{}
|
||||
var IsDotMetricsEnabled = false
|
||||
var PreferSpanMetrics = false
|
||||
var MaxJSONFlatteningDepth = 1
|
||||
var BodyJSONQueryEnabled = GetOrDefaultEnv("BODY_JSON_QUERY_ENABLED", "false") == "true"
|
||||
|
||||
func init() {
|
||||
StaticFieldsTraces = maps.Clone(NewStaticFieldsTraces)
|
||||
@@ -732,3 +727,15 @@ const InspectMetricsMaxTimeDiff = 1800000
|
||||
|
||||
const DotMetricsEnabled = "DOT_METRICS_ENABLED"
|
||||
const maxJSONFlatteningDepth = "MAX_JSON_FLATTENING_DEPTH"
|
||||
|
||||
func LogsSQLSelectV2() string {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
columns := []string{"timestamp", "id", "trace_id", "span_id", "trace_flags", "severity_text", "severity_number", "scope_name", "scope_version", "body"}
|
||||
if BodyJSONQueryEnabled {
|
||||
columns = append(columns, "body_json", "body_json_promoted")
|
||||
}
|
||||
columns = append(columns, "attributes_string", "attributes_number", "attributes_bool", "resources_string", "scope_string")
|
||||
sb.Select(columns...)
|
||||
query, _ := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return query + " " // add space to avoid concatenation issues
|
||||
}
|
||||
|
||||
@@ -12,9 +12,12 @@ import (
|
||||
// AlertState denotes the state of an active alert.
|
||||
type AlertState int
|
||||
|
||||
// The enum values are ordered by priority (lowest to highest).
|
||||
// When determining overall rule state, higher numeric values take precedence.
|
||||
const (
|
||||
StateInactive AlertState = iota
|
||||
StatePending
|
||||
StateRecovering
|
||||
StateFiring
|
||||
StateNoData
|
||||
StateDisabled
|
||||
@@ -32,6 +35,8 @@ func (s AlertState) String() string {
|
||||
return "nodata"
|
||||
case StateDisabled:
|
||||
return "disabled"
|
||||
case StateRecovering:
|
||||
return "recovering"
|
||||
}
|
||||
panic(errors.Errorf("unknown alert state: %d", s))
|
||||
}
|
||||
@@ -58,6 +63,8 @@ func (s *AlertState) UnmarshalJSON(b []byte) error {
|
||||
*s = StateNoData
|
||||
case "disabled":
|
||||
*s = StateDisabled
|
||||
case "recovering":
|
||||
*s = StateRecovering
|
||||
default:
|
||||
*s = StateInactive
|
||||
}
|
||||
@@ -83,6 +90,8 @@ func (s *AlertState) Scan(value interface{}) error {
|
||||
*s = StateNoData
|
||||
case "disabled":
|
||||
*s = StateDisabled
|
||||
case "recovering":
|
||||
*s = StateRecovering
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -5,11 +5,16 @@ import (
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
expr "github.com/antonmedv/expr"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
var (
|
||||
CodeExprCompilationFailed = errors.MustNewCode("expr_compilation_failed")
|
||||
)
|
||||
|
||||
var logOperatorsToExpr = map[v3.FilterOperator]string{
|
||||
v3.FilterOperatorEqual: "==",
|
||||
v3.FilterOperatorNotEqual: "!=",
|
||||
@@ -50,7 +55,7 @@ func Parse(filters *v3.FilterSet) (string, error) {
|
||||
var res []string
|
||||
for _, v := range filters.Items {
|
||||
if _, ok := logOperatorsToExpr[v.Operator]; !ok {
|
||||
return "", fmt.Errorf("operator not supported")
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "operator not supported: %s", v.Operator)
|
||||
}
|
||||
|
||||
name := getName(v.Key)
|
||||
@@ -108,7 +113,7 @@ func Parse(filters *v3.FilterSet) (string, error) {
|
||||
q := strings.Join(res, " "+strings.ToLower(filters.Operator)+" ")
|
||||
_, err := expr.Compile(q)
|
||||
if err != nil {
|
||||
return "", err
|
||||
return "", errors.WrapInternalf(err, CodeExprCompilationFailed, "failed to compile expression: %s", q)
|
||||
}
|
||||
|
||||
return q, nil
|
||||
|
||||
@@ -3,7 +3,6 @@ package querycache_test
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/cache/cachetest"
|
||||
@@ -17,8 +16,8 @@ import (
|
||||
func TestFindMissingTimeRanges(t *testing.T) {
|
||||
// Initialize the mock cache
|
||||
opts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}
|
||||
c, err := cachetest.New(cache.Config{Provider: "memory", Memory: opts})
|
||||
require.NoError(t, err)
|
||||
@@ -243,8 +242,8 @@ func TestFindMissingTimeRanges(t *testing.T) {
|
||||
func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
// Initialize the mock cache
|
||||
opts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}
|
||||
c, err := cachetest.New(cache.Config{Provider: "memory", Memory: opts})
|
||||
require.NoError(t, err)
|
||||
@@ -590,8 +589,8 @@ func TestFindMissingTimeRangesV2(t *testing.T) {
|
||||
func TestMergeWithCachedSeriesData(t *testing.T) {
|
||||
// Initialize the mock cache
|
||||
opts := cache.Memory{
|
||||
TTL: 5 * time.Minute,
|
||||
CleanupInterval: 10 * time.Minute,
|
||||
NumCounters: 10 * 1000,
|
||||
MaxCost: 1 << 26,
|
||||
}
|
||||
c, err := cachetest.New(cache.Config{Provider: "memory", Memory: opts})
|
||||
require.NoError(t, err)
|
||||
|
||||
@@ -191,6 +191,26 @@ func (r *BaseRule) currentAlerts() []*ruletypes.Alert {
|
||||
return alerts
|
||||
}
|
||||
|
||||
// ActiveAlertsLabelFP returns a map of active alert labels fingerprint and
|
||||
// the fingerprint is computed using the QueryResultLables.Hash() method.
|
||||
// We use the QueryResultLables instead of labels as these labels are raw labels
|
||||
// that we get from the sample.
|
||||
// This is useful in cases where we want to check if an alert is still active
|
||||
// based on the labels we have.
|
||||
func (r *BaseRule) ActiveAlertsLabelFP() map[uint64]struct{} {
|
||||
r.mtx.Lock()
|
||||
defer r.mtx.Unlock()
|
||||
|
||||
activeAlerts := make(map[uint64]struct{}, len(r.Active))
|
||||
for _, alert := range r.Active {
|
||||
if alert == nil || alert.QueryResultLables == nil {
|
||||
continue
|
||||
}
|
||||
activeAlerts[alert.QueryResultLables.Hash()] = struct{}{}
|
||||
}
|
||||
return activeAlerts
|
||||
}
|
||||
|
||||
func (r *BaseRule) EvalDelay() time.Duration {
|
||||
return r.evalDelay
|
||||
}
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/require"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
)
|
||||
@@ -74,7 +75,7 @@ func TestBaseRule_RequireMinPoints(t *testing.T) {
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
_, err := test.rule.Threshold.ShouldAlert(*test.series, "")
|
||||
_, err := test.rule.Threshold.Eval(*test.series, "", ruletypes.EvalData{})
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, len(test.series.Points) >= test.rule.ruleCondition.RequiredNumPoints, test.shouldAlert)
|
||||
})
|
||||
|
||||
@@ -4,13 +4,14 @@ import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"log/slog"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/go-openapi/strfmt"
|
||||
|
||||
@@ -159,7 +159,9 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (interface{}, error)
|
||||
continue
|
||||
}
|
||||
|
||||
results, err := r.Threshold.ShouldAlert(toCommonSeries(series), r.Unit())
|
||||
results, err := r.Threshold.Eval(toCommonSeries(series), r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -233,6 +235,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (interface{}, error)
|
||||
Value: result.V,
|
||||
GeneratorURL: r.GeneratorURL(),
|
||||
Receivers: ruleReceiverMap[lbs.Map()[ruletypes.LabelThresholdName]],
|
||||
IsRecovering: result.IsRecovering,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -245,6 +248,9 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (interface{}, error)
|
||||
if alert, ok := r.Active[h]; ok && alert.State != model.StateInactive {
|
||||
alert.Value = a.Value
|
||||
alert.Annotations = a.Annotations
|
||||
// Update the recovering and missing state of existing alert
|
||||
alert.IsRecovering = a.IsRecovering
|
||||
alert.Missing = a.Missing
|
||||
if v, ok := alert.Labels.Map()[ruletypes.LabelThresholdName]; ok {
|
||||
alert.Receivers = ruleReceiverMap[v]
|
||||
}
|
||||
@@ -304,6 +310,29 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (interface{}, error)
|
||||
})
|
||||
}
|
||||
|
||||
// We need to change firing alert to recovering if the returned sample meets recovery threshold
|
||||
changeAlertingToRecovering := a.State == model.StateFiring && a.IsRecovering
|
||||
// We need to change recovering alerts to firing if the returned sample meets target threshold
|
||||
changeRecoveringToFiring := a.State == model.StateRecovering && !a.IsRecovering && !a.Missing
|
||||
// in any of the above case we need to update the status of alert
|
||||
if changeAlertingToRecovering || changeRecoveringToFiring {
|
||||
state := model.StateRecovering
|
||||
if changeRecoveringToFiring {
|
||||
state = model.StateFiring
|
||||
}
|
||||
a.State = state
|
||||
r.logger.DebugContext(ctx, "converting alert state", "name", r.Name(), "state", state)
|
||||
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
|
||||
RuleID: r.ID(),
|
||||
RuleName: r.Name(),
|
||||
State: state,
|
||||
StateChanged: true,
|
||||
UnixMilli: ts.UnixMilli(),
|
||||
Labels: model.LabelsString(labelsJSON),
|
||||
Fingerprint: a.QueryResultLables.Hash(),
|
||||
Value: a.Value,
|
||||
})
|
||||
}
|
||||
}
|
||||
r.health = ruletypes.HealthGood
|
||||
r.lastError = err
|
||||
|
||||
@@ -23,7 +23,7 @@ func getVectorValues(vectors []ruletypes.Sample) []float64 {
|
||||
return values
|
||||
}
|
||||
|
||||
func TestPromRuleShouldAlert(t *testing.T) {
|
||||
func TestPromRuleEval(t *testing.T) {
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Test Rule",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
@@ -696,7 +696,7 @@ func TestPromRuleShouldAlert(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
resultVectors, err := rule.Threshold.ShouldAlert(toCommonSeries(c.values), rule.Unit())
|
||||
resultVectors, err := rule.Threshold.Eval(toCommonSeries(c.values), rule.Unit(), ruletypes.EvalData{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Compare full result vector with expected vector
|
||||
|
||||
@@ -24,6 +24,8 @@ type Rule interface {
|
||||
HoldDuration() time.Duration
|
||||
State() model.AlertState
|
||||
ActiveAlerts() []*ruletypes.Alert
|
||||
// ActiveAlertsLabelFP returns a map of active alert labels fingerprint
|
||||
ActiveAlertsLabelFP() map[uint64]struct{}
|
||||
|
||||
PreferredChannels() []string
|
||||
|
||||
|
||||
@@ -488,7 +488,9 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
|
||||
continue
|
||||
}
|
||||
}
|
||||
resultSeries, err := r.Threshold.ShouldAlert(*series, r.Unit())
|
||||
resultSeries, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -565,7 +567,9 @@ func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUI
|
||||
continue
|
||||
}
|
||||
}
|
||||
resultSeries, err := r.Threshold.ShouldAlert(*series, r.Unit())
|
||||
resultSeries, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -666,13 +670,14 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (interface{}, er
|
||||
// Links with timestamps should go in annotations since labels
|
||||
// is used alert grouping, and we want to group alerts with the same
|
||||
// label set, but different timestamps, together.
|
||||
if r.typ == ruletypes.AlertTypeTraces {
|
||||
switch r.typ {
|
||||
case ruletypes.AlertTypeTraces:
|
||||
link := r.prepareLinksToTraces(ctx, ts, smpl.Metric)
|
||||
if link != "" && r.hostFromSource() != "" {
|
||||
r.logger.InfoContext(ctx, "adding traces link to annotations", "link", fmt.Sprintf("%s/traces-explorer?%s", r.hostFromSource(), link))
|
||||
annotations = append(annotations, labels.Label{Name: "related_traces", Value: fmt.Sprintf("%s/traces-explorer?%s", r.hostFromSource(), link)})
|
||||
}
|
||||
} else if r.typ == ruletypes.AlertTypeLogs {
|
||||
case ruletypes.AlertTypeLogs:
|
||||
link := r.prepareLinksToLogs(ctx, ts, smpl.Metric)
|
||||
if link != "" && r.hostFromSource() != "" {
|
||||
r.logger.InfoContext(ctx, "adding logs link to annotations", "link", fmt.Sprintf("%s/logs/logs-explorer?%s", r.hostFromSource(), link))
|
||||
@@ -698,6 +703,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (interface{}, er
|
||||
GeneratorURL: r.GeneratorURL(),
|
||||
Receivers: ruleReceiverMap[lbs.Map()[ruletypes.LabelThresholdName]],
|
||||
Missing: smpl.IsMissing,
|
||||
IsRecovering: smpl.IsRecovering,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -711,6 +717,9 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (interface{}, er
|
||||
|
||||
alert.Value = a.Value
|
||||
alert.Annotations = a.Annotations
|
||||
// Update the recovering and missing state of existing alert
|
||||
alert.IsRecovering = a.IsRecovering
|
||||
alert.Missing = a.Missing
|
||||
if v, ok := alert.Labels.Map()[ruletypes.LabelThresholdName]; ok {
|
||||
alert.Receivers = ruleReceiverMap[v]
|
||||
}
|
||||
@@ -735,6 +744,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (interface{}, er
|
||||
delete(r.Active, fp)
|
||||
}
|
||||
if a.State != model.StateInactive {
|
||||
r.logger.DebugContext(ctx, "converting firing alert to inActive", "name", r.Name())
|
||||
a.State = model.StateInactive
|
||||
a.ResolvedAt = ts
|
||||
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
|
||||
@@ -752,6 +762,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (interface{}, er
|
||||
}
|
||||
|
||||
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration {
|
||||
r.logger.DebugContext(ctx, "converting pending alert to firing", "name", r.Name())
|
||||
a.State = model.StateFiring
|
||||
a.FiredAt = ts
|
||||
state := model.StateFiring
|
||||
@@ -769,6 +780,30 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (interface{}, er
|
||||
Value: a.Value,
|
||||
})
|
||||
}
|
||||
|
||||
// We need to change firing alert to recovering if the returned sample meets recovery threshold
|
||||
changeAlertingToRecovering := a.State == model.StateFiring && a.IsRecovering
|
||||
// We need to change recovering alerts to firing if the returned sample meets target threshold
|
||||
changeRecoveringToFiring := a.State == model.StateRecovering && !a.IsRecovering && !a.Missing
|
||||
// in any of the above case we need to update the status of alert
|
||||
if changeAlertingToRecovering || changeRecoveringToFiring {
|
||||
state := model.StateRecovering
|
||||
if changeRecoveringToFiring {
|
||||
state = model.StateFiring
|
||||
}
|
||||
a.State = state
|
||||
r.logger.DebugContext(ctx, "converting alert state", "name", r.Name(), "state", state)
|
||||
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
|
||||
RuleID: r.ID(),
|
||||
RuleName: r.Name(),
|
||||
State: state,
|
||||
StateChanged: true,
|
||||
UnixMilli: ts.UnixMilli(),
|
||||
Labels: model.LabelsString(labelsJSON),
|
||||
Fingerprint: a.QueryResultLables.Hash(),
|
||||
Value: a.Value,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
currentState := r.State()
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -198,7 +198,6 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
FieldMapper: v.fieldMapper,
|
||||
ConditionBuilder: v.conditionBuilder,
|
||||
FullTextColumn: v.fullTextColumn,
|
||||
JsonBodyPrefix: v.jsonBodyPrefix,
|
||||
JsonKeyToKey: v.jsonKeyToKey,
|
||||
}, 0, 0,
|
||||
)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user