Compare commits

..

2 Commits

Author SHA1 Message Date
YounixM
3476f3032d format 2024-06-22 14:45:47 +05:30
YounixM
4b757b382f feat: resizable table 2024-06-22 14:04:55 +05:30
916 changed files with 8926 additions and 53398 deletions

6
.github/CODEOWNERS vendored
View File

@@ -5,6 +5,6 @@
/frontend/ @YounixM
/frontend/src/container/MetricsApplication @srikanthccv
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
/deploy/ @SigNoz/devops
/sample-apps/ @SigNoz/devops
.github @SigNoz/devops
/deploy/ @prashant-shahi
/sample-apps/ @prashant-shahi
.github @prashant-shahi

View File

@@ -8,13 +8,6 @@ on:
- release/v*
jobs:
check-no-ee-references:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Run check
run: make check-no-ee-references
build-frontend:
runs-on: ubuntu-latest
steps:
@@ -43,6 +36,7 @@ jobs:
run: |
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' > frontend/.env
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
echo 'CLARITY_PROJECT_ID="${{ secrets.CLARITY_PROJECT_ID }}"' >> frontend/.env
- name: Install dependencies
run: cd frontend && yarn install
- name: Run ESLint

View File

@@ -9,6 +9,7 @@ on:
- v*
jobs:
image-build-and-push-query-service:
runs-on: ubuntu-latest
steps:
@@ -150,13 +151,13 @@ jobs:
run: |
echo 'INTERCOM_APP_ID="${{ secrets.INTERCOM_APP_ID }}"' > frontend/.env
echo 'SEGMENT_ID="${{ secrets.SEGMENT_ID }}"' >> frontend/.env
echo 'CLARITY_PROJECT_ID="${{ secrets.CLARITY_PROJECT_ID }}"' >> frontend/.env
echo 'SENTRY_AUTH_TOKEN="${{ secrets.SENTRY_AUTH_TOKEN }}"' >> frontend/.env
echo 'SENTRY_ORG="${{ secrets.SENTRY_ORG }}"' >> frontend/.env
echo 'SENTRY_PROJECT_ID="${{ secrets.SENTRY_PROJECT_ID }}"' >> frontend/.env
echo 'SENTRY_DSN="${{ secrets.SENTRY_DSN }}"' >> frontend/.env
echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env
echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
- name: Install dependencies
working-directory: frontend
run: yarn install

View File

@@ -30,7 +30,6 @@ jobs:
GCP_PROJECT: ${{ secrets.GCP_PROJECT }}
GCP_ZONE: ${{ secrets.GCP_ZONE }}
GCP_INSTANCE: ${{ secrets.GCP_INSTANCE }}
CLOUDSDK_CORE_DISABLE_PROMPTS: 1
run: |
read -r -d '' COMMAND <<EOF || true
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
@@ -52,4 +51,4 @@ jobs:
make build-frontend-amd64
make run-testing
EOF
gcloud beta compute ssh ${GCP_INSTANCE} --zone ${GCP_ZONE} --ssh-key-expire-after=15m --tunnel-through-iap --project ${GCP_PROJECT} --command "${COMMAND}"
gcloud compute ssh ${GCP_INSTANCE} --zone ${GCP_ZONE} --ssh-key-expire-after=15m --tunnel-through-iap --project ${GCP_PROJECT} --command "${COMMAND}"

View File

@@ -30,7 +30,6 @@ jobs:
GCP_PROJECT: ${{ secrets.GCP_PROJECT }}
GCP_ZONE: ${{ secrets.GCP_ZONE }}
GCP_INSTANCE: ${{ secrets.GCP_INSTANCE }}
CLOUDSDK_CORE_DISABLE_PROMPTS: 1
run: |
read -r -d '' COMMAND <<EOF || true
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
@@ -53,4 +52,4 @@ jobs:
make build-frontend-amd64
make run-testing
EOF
gcloud beta compute ssh ${GCP_INSTANCE} --zone ${GCP_ZONE} --ssh-key-expire-after=15m --tunnel-through-iap --project ${GCP_PROJECT} --command "${COMMAND}"
gcloud compute ssh ${GCP_INSTANCE} --zone ${GCP_ZONE} --ssh-key-expire-after=15m --tunnel-through-iap --project ${GCP_PROJECT} --command "${COMMAND}"

3
.gitignore vendored
View File

@@ -67,6 +67,3 @@ e2e/.auth
# go
vendor/
**/main/**
# git-town
.git-branches.toml

View File

@@ -0,0 +1,7 @@
#!/bin/sh
# It Comments out the Line Query-Service & Frontend Section of deploy/docker/clickhouse-setup/docker-compose.yaml
# Update the Line Numbers when deploy/docker/clickhouse-setup/docker-compose.yaml chnages.
# Docs Ref.: https://github.com/SigNoz/signoz/blob/main/CONTRIBUTING.md#contribute-to-frontend-with-docker-installation-of-signoz
sed -i 38,62's/.*/# &/' .././deploy/docker/clickhouse-setup/docker-compose.yaml

View File

@@ -347,7 +347,7 @@ curl -sL https://github.com/SigNoz/signoz/raw/develop/sample-apps/hotrod/hotrod-
```bash
kubectl -n sample-application run strzal --image=djbingham/curl \
--restart='OnFailure' -i --tty --rm --command -- curl -X POST -F \
'user_count=6' -F 'spawn_rate=2' http://locust-master:8089/swarm
'locust_count=6' -F 'hatch_rate=2' http://locust-master:8089/swarm
```
**5.1.3 To stop the load generation:**

View File

@@ -178,15 +178,6 @@ clear-swarm-ch:
@docker run --rm -v "$(PWD)/$(SWARM_DIRECTORY)/data:/pwd" busybox \
sh -c "cd /pwd && rm -rf clickhouse*/* zookeeper-*/*"
check-no-ee-references:
@echo "Checking for 'ee' package references in 'pkg' directory..."
@if grep -R --include="*.go" '.*/ee/.*' pkg/; then \
echo "Error: Found references to 'ee' packages in 'pkg' directory"; \
exit 1; \
else \
echo "No references to 'ee' packages found in 'pkg' directory"; \
fi
test:
go test ./pkg/query-service/app/metrics/...
go test ./pkg/query-service/cache/...
@@ -197,4 +188,3 @@ test:
go test ./pkg/query-service/tests/integration/...
go test ./pkg/query-service/rules/...
go test ./pkg/query-service/collectorsimulator/...
go test ./pkg/query-service/postprocess/...

View File

@@ -198,14 +198,14 @@ Not sure how to get started? Just ping us on `#contributing` in our [slack commu
#### Frontend
- [Palash Gupta](https://github.com/palashgdev)
- [Yunus M](https://github.com/YounixM)
- [Vikrant Gupta](https://github.com/vikrantgupta25)
- [Sagar Rajput](https://github.com/SagarRajput-7)
- [Rajat Dabade](https://github.com/Rajat-Dabade)
#### DevOps
- [Prashant Shahi](https://github.com/prashant-shahi)
- [Vibhu Pandey](https://github.com/grandwizard28)
- [Dhawal Sanghvi](https://github.com/dhawal1248)
<br /><br />

View File

@@ -23,9 +23,6 @@
[1]: https://github.com/pocoproject/poco/blob/poco-1.9.4-release/Foundation/include/Poco/Logger.h#L105-L114
-->
<level>information</level>
<formatting>
<type>json</type>
</formatting>
<log>/var/log/clickhouse-server/clickhouse-server.log</log>
<errorlog>/var/log/clickhouse-server/clickhouse-server.err.log</errorlog>
<!-- Rotation policy
@@ -652,12 +649,12 @@
See https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/replication/#creating-replicated-tables
-->
<!--
<macros>
<shard>01</shard>
<replica>example01-01-1</replica>
</macros>
-->
<!-- Reloading interval for embedded dictionaries, in seconds. Default: 3600. -->

View File

@@ -146,7 +146,7 @@ services:
condition: on-failure
query-service:
image: signoz/query-service:0.49.1
image: signoz/query-service:0.46.0
command:
[
"-config=/root/config/prometheus.yml",
@@ -186,7 +186,7 @@ services:
<<: *db-depend
frontend:
image: signoz/frontend:0.48.0
image: signoz/frontend:0.46.0
deploy:
restart_policy:
condition: on-failure
@@ -199,7 +199,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector:
image: signoz/signoz-otel-collector:0.102.2
image: signoz/signoz-otel-collector:0.88.24
command:
[
"--config=/etc/otel-collector-config.yaml",
@@ -211,7 +211,6 @@ services:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
- ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml
- /var/lib/docker/containers:/var/lib/docker/containers:ro
- /:/hostfs:ro
environment:
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}},dockerswarm.service.name={{.Service.Name}},dockerswarm.task.name={{.Task.Name}}
- DOCKER_MULTI_NODE_CLUSTER=false
@@ -238,7 +237,7 @@ services:
- query-service
otel-collector-migrator:
image: signoz/signoz-schema-migrator:0.102.2
image: signoz/signoz-schema-migrator:0.88.24
deploy:
restart_policy:
condition: on-failure

View File

@@ -36,7 +36,6 @@ receivers:
# endpoint: 0.0.0.0:6832
hostmetrics:
collection_interval: 30s
root_path: /hostfs
scrapers:
cpu: {}
load: {}
@@ -154,8 +153,6 @@ extensions:
service:
telemetry:
logs:
encoding: json
metrics:
address: 0.0.0.0:8888
extensions: [health_check, zpages, pprof]

View File

@@ -23,9 +23,6 @@
[1]: https://github.com/pocoproject/poco/blob/poco-1.9.4-release/Foundation/include/Poco/Logger.h#L105-L114
-->
<level>information</level>
<formatting>
<type>json</type>
</formatting>
<log>/var/log/clickhouse-server/clickhouse-server.log</log>
<errorlog>/var/log/clickhouse-server/clickhouse-server.err.log</errorlog>
<!-- Rotation policy
@@ -652,12 +649,12 @@
See https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/replication/#creating-replicated-tables
-->
<!--
<macros>
<shard>01</shard>
<replica>example01-01-1</replica>
</macros>
-->
<!-- Reloading interval for embedded dictionaries, in seconds. Default: 3600. -->

View File

@@ -66,7 +66,7 @@ services:
- --storage.path=/data
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.2}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.24}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@@ -81,7 +81,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
otel-collector:
container_name: signoz-otel-collector
image: signoz/signoz-otel-collector:0.102.2
image: signoz/signoz-otel-collector:0.88.24
command:
[
"--config=/etc/otel-collector-config.yaml",
@@ -93,8 +93,6 @@ services:
volumes:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
- ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml
- /var/lib/docker/containers:/var/lib/docker/containers:ro
- /:/hostfs:ro
environment:
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
ports:

View File

@@ -164,7 +164,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service:
image: signoz/query-service:${DOCKER_TAG:-0.49.1}
image: signoz/query-service:${DOCKER_TAG:-0.46.0}
container_name: signoz-query-service
command:
[
@@ -204,7 +204,7 @@ services:
<<: *db-depend
frontend:
image: signoz/frontend:${DOCKER_TAG:-0.49.1}
image: signoz/frontend:${DOCKER_TAG:-0.46.0}
container_name: signoz-frontend
restart: on-failure
depends_on:
@@ -216,7 +216,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.2}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.24}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@@ -230,7 +230,7 @@ services:
otel-collector:
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.2}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.24}
container_name: signoz-otel-collector
command:
[
@@ -244,7 +244,6 @@ services:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
- ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml
- /var/lib/docker/containers:/var/lib/docker/containers:ro
- /:/hostfs:ro
environment:
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
- DOCKER_MULTI_NODE_CLUSTER=false

View File

@@ -164,7 +164,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service:
image: signoz/query-service:${DOCKER_TAG:-0.49.1}
image: signoz/query-service:${DOCKER_TAG:-0.46.0}
container_name: signoz-query-service
command:
[
@@ -203,7 +203,7 @@ services:
<<: *db-depend
frontend:
image: signoz/frontend:${DOCKER_TAG:-0.49.1}
image: signoz/frontend:${DOCKER_TAG:-0.46.0}
container_name: signoz-frontend
restart: on-failure
depends_on:
@@ -215,7 +215,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.2}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.24}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@@ -229,7 +229,7 @@ services:
otel-collector:
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.2}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.24}
container_name: signoz-otel-collector
command:
[
@@ -243,7 +243,6 @@ services:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
- ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml
- /var/lib/docker/containers:/var/lib/docker/containers:ro
- /:/hostfs:ro
environment:
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
- DOCKER_MULTI_NODE_CLUSTER=false

View File

@@ -36,7 +36,6 @@ receivers:
# endpoint: 0.0.0.0:6832
hostmetrics:
collection_interval: 30s
root_path: /hostfs
scrapers:
cpu: {}
load: {}
@@ -158,8 +157,6 @@ exporters:
service:
telemetry:
logs:
encoding: json
metrics:
address: 0.0.0.0:8888
extensions:

View File

@@ -1,8 +1,3 @@
map $http_upgrade $connection_upgrade {
default upgrade;
'' close;
}
server {
listen 3301;
server_name _;
@@ -47,14 +42,6 @@ server {
proxy_read_timeout 600s;
}
location /ws {
proxy_pass http://query-service:8080/ws;
proxy_http_version 1.1;
proxy_set_header Upgrade "websocket";
proxy_set_header Connection "upgrade";
proxy_read_timeout 86400;
}
# redirect server error pages to the static page /50x.html
#
error_page 500 502 503 504 /50x.html;

View File

@@ -389,7 +389,7 @@ trap bye EXIT
URL="https://api.segment.io/v1/track"
HEADER_1="Content-Type: application/json"
HEADER_2="Authorization: Basic OWtScko3b1BDR1BFSkxGNlFqTVBMdDVibGpGaFJRQnI="
HEADER_2="Authorization: Basic NEdtb2E0aXhKQVVIeDJCcEp4c2p3QTFiRWZud0VlUno6"
send_event() {
error=""

View File

@@ -1,32 +0,0 @@
package anomaly
import (
"context"
)
type DailyProvider struct {
BaseSeasonalProvider
}
var _ BaseProvider = (*DailyProvider)(nil)
func (dp *DailyProvider) GetBaseSeasonalProvider() *BaseSeasonalProvider {
return &dp.BaseSeasonalProvider
}
// NewDailyProvider uses the same generic option type
func NewDailyProvider(opts ...GenericProviderOption[*DailyProvider]) *DailyProvider {
dp := &DailyProvider{
BaseSeasonalProvider: BaseSeasonalProvider{},
}
for _, opt := range opts {
opt(dp)
}
return dp
}
func (p *DailyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
return nil, nil
}

View File

@@ -1,32 +0,0 @@
package anomaly
import (
"context"
)
type HourlyProvider struct {
BaseSeasonalProvider
}
var _ BaseProvider = (*HourlyProvider)(nil)
func (hp *HourlyProvider) GetBaseSeasonalProvider() *BaseSeasonalProvider {
return &hp.BaseSeasonalProvider
}
// NewHourlyProvider now uses the generic option type
func NewHourlyProvider(opts ...GenericProviderOption[*HourlyProvider]) *HourlyProvider {
hp := &HourlyProvider{
BaseSeasonalProvider: BaseSeasonalProvider{},
}
for _, opt := range opts {
opt(hp)
}
return hp
}
func (p *HourlyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
return nil, nil
}

View File

@@ -1,188 +0,0 @@
package anomaly
import (
"math"
"time"
"go.signoz.io/signoz/pkg/query-service/common"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
)
type Seasonality string
const (
SeasonalityHourly Seasonality = "hourly"
SeasonalityDaily Seasonality = "daily"
SeasonalityWeekly Seasonality = "weekly"
)
func (s Seasonality) IsValid() bool {
switch s {
case SeasonalityHourly, SeasonalityDaily, SeasonalityWeekly:
return true
default:
return false
}
}
type GetAnomaliesRequest struct {
Params *v3.QueryRangeParamsV3
Seasonality Seasonality
}
type GetAnomaliesResponse struct {
Results []*v3.Result
}
// anomalyParams is the params for anomaly detection
// prediction = avg(past_period_query) + avg(current_season_query) - avg(past_season_query)
//
// ^ ^
// | |
// (rounded value for past peiod) + (seasonal growth)
//
// score = abs(value - prediction) / stddev (current_season_query)
type anomalyQueryParams struct {
// CurrentPeriodQuery is the query range params for period user is looking at or eval window
// Example: (now-5m, now), (now-30m, now), (now-1h, now)
// The results obtained from this query are used to compare with predicted values
// and to detect anomalies
CurrentPeriodQuery *v3.QueryRangeParamsV3
// PastPeriodQuery is the query range params for past seasonal period
// Example: For weekly seasonality, (now-1w-4h-5m, now-1w)
// : For daily seasonality, (now-1d-2h-5m, now-1d)
// : For hourly seasonality, (now-1h-30m-5m, now-1h)
PastPeriodQuery *v3.QueryRangeParamsV3
// CurrentSeasonQuery is the query range params for current period (seasonal)
// Example: For weekly seasonality, this is the query range params for the (now-1w-5m, now)
// : For daily seasonality, this is the query range params for the (now-1d-5m, now)
// : For hourly seasonality, this is the query range params for the (now-1h-5m, now)
CurrentSeasonQuery *v3.QueryRangeParamsV3
// PastSeasonQuery is the query range params for past seasonal period to the current season
// Example: For weekly seasonality, this is the query range params for the (now-2w-5m, now-1w)
// : For daily seasonality, this is the query range params for the (now-2d-5m, now-1d)
// : For hourly seasonality, this is the query range params for the (now-2h-5m, now-1h)
PastSeasonQuery *v3.QueryRangeParamsV3
}
func copyCompositeQuery(req *v3.QueryRangeParamsV3) *v3.CompositeQuery {
deepCopyCompositeQuery := *req.CompositeQuery
deepCopyCompositeQuery.BuilderQueries = make(map[string]*v3.BuilderQuery)
for k, v := range req.CompositeQuery.BuilderQueries {
query := *v
deepCopyCompositeQuery.BuilderQueries[k] = &query
}
return &deepCopyCompositeQuery
}
func updateStepInterval(req *v3.QueryRangeParamsV3) {
start := req.Start
end := req.End
req.Step = int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60))
for _, q := range req.CompositeQuery.BuilderQueries {
// If the step interval is less than the minimum allowed step interval, set it to the minimum allowed step interval
if minStep := common.MinAllowedStepInterval(start, end); q.StepInterval < minStep {
q.StepInterval = minStep
}
}
}
func prepareAnomalyQueryParams(req *v3.QueryRangeParamsV3, seasonality Seasonality) *anomalyQueryParams {
start := req.Start
end := req.End
currentPeriodQuery := &v3.QueryRangeParamsV3{
Start: start,
End: end,
CompositeQuery: req.CompositeQuery,
Variables: make(map[string]interface{}, 0),
NoCache: false,
}
updateStepInterval(currentPeriodQuery)
var pastPeriodStart, pastPeriodEnd int64
switch seasonality {
// for one week period, we fetch the data from the past week with 4 hours offset
case SeasonalityWeekly:
pastPeriodStart = start - 166*time.Hour.Milliseconds() - 4*time.Hour.Milliseconds()
pastPeriodEnd = end - 166*time.Hour.Milliseconds()
// for one day period, we fetch the data from the past day with 2 hours offset
case SeasonalityDaily:
pastPeriodStart = start - 23*time.Hour.Milliseconds() - 2*time.Hour.Milliseconds()
pastPeriodEnd = end - 23*time.Hour.Milliseconds()
// for one hour period, we fetch the data from the past hour with 30 minutes offset
case SeasonalityHourly:
pastPeriodStart = start - 1*time.Hour.Milliseconds() - 30*time.Minute.Milliseconds()
pastPeriodEnd = end - 1*time.Hour.Milliseconds()
}
pastPeriodQuery := &v3.QueryRangeParamsV3{
Start: pastPeriodStart,
End: pastPeriodEnd,
CompositeQuery: copyCompositeQuery(req),
Variables: make(map[string]interface{}, 0),
NoCache: false,
}
updateStepInterval(pastPeriodQuery)
// seasonality growth trend
var currentGrowthPeriodStart, currentGrowthPeriodEnd int64
switch seasonality {
case SeasonalityWeekly:
currentGrowthPeriodStart = start - 7*24*time.Hour.Milliseconds()
currentGrowthPeriodEnd = end
case SeasonalityDaily:
currentGrowthPeriodStart = start - 23*time.Hour.Milliseconds()
currentGrowthPeriodEnd = end
case SeasonalityHourly:
currentGrowthPeriodStart = start - 1*time.Hour.Milliseconds()
currentGrowthPeriodEnd = end
}
currentGrowthQuery := &v3.QueryRangeParamsV3{
Start: currentGrowthPeriodStart,
End: currentGrowthPeriodEnd,
CompositeQuery: copyCompositeQuery(req),
Variables: make(map[string]interface{}, 0),
NoCache: false,
}
updateStepInterval(currentGrowthQuery)
var pastGrowthPeriodStart, pastGrowthPeriodEnd int64
switch seasonality {
case SeasonalityWeekly:
pastGrowthPeriodStart = start - 14*24*time.Hour.Milliseconds()
pastGrowthPeriodEnd = start - 7*24*time.Hour.Milliseconds()
case SeasonalityDaily:
pastGrowthPeriodStart = start - 2*time.Hour.Milliseconds()
pastGrowthPeriodEnd = start - 1*time.Hour.Milliseconds()
case SeasonalityHourly:
pastGrowthPeriodStart = start - 2*time.Hour.Milliseconds()
pastGrowthPeriodEnd = start - 1*time.Hour.Milliseconds()
}
pastGrowthQuery := &v3.QueryRangeParamsV3{
Start: pastGrowthPeriodStart,
End: pastGrowthPeriodEnd,
CompositeQuery: copyCompositeQuery(req),
Variables: make(map[string]interface{}, 0),
NoCache: false,
}
updateStepInterval(pastGrowthQuery)
return &anomalyQueryParams{
CurrentPeriodQuery: currentPeriodQuery,
PastPeriodQuery: pastPeriodQuery,
CurrentSeasonQuery: currentGrowthQuery,
PastSeasonQuery: pastGrowthQuery,
}
}
type anomalyQueryResults struct {
CurrentPeriodResults []*v3.Result
PastPeriodResults []*v3.Result
CurrentSeasonResults []*v3.Result
PastSeasonResults []*v3.Result
}

View File

@@ -1,9 +0,0 @@
package anomaly
import (
"context"
)
type Provider interface {
GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error)
}

View File

@@ -1,229 +0,0 @@
package anomaly
import (
"context"
"math"
"go.signoz.io/signoz/pkg/query-service/cache"
"go.signoz.io/signoz/pkg/query-service/interfaces"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
"go.signoz.io/signoz/pkg/query-service/utils/labels"
"go.uber.org/zap"
)
// BaseProvider is an interface that includes common methods for all provider types
type BaseProvider interface {
GetBaseSeasonalProvider() *BaseSeasonalProvider
}
// GenericProviderOption is a generic type for provider options
type GenericProviderOption[T BaseProvider] func(T)
func WithCache[T BaseProvider](cache cache.Cache) GenericProviderOption[T] {
return func(p T) {
p.GetBaseSeasonalProvider().cache = cache
}
}
func WithKeyGenerator[T BaseProvider](keyGenerator cache.KeyGenerator) GenericProviderOption[T] {
return func(p T) {
p.GetBaseSeasonalProvider().keyGenerator = keyGenerator
}
}
func WithFeatureLookup[T BaseProvider](ff interfaces.FeatureLookup) GenericProviderOption[T] {
return func(p T) {
p.GetBaseSeasonalProvider().ff = ff
}
}
func WithReader[T BaseProvider](reader interfaces.Reader) GenericProviderOption[T] {
return func(p T) {
p.GetBaseSeasonalProvider().reader = reader
}
}
type BaseSeasonalProvider struct {
querierV2 interfaces.Querier
reader interfaces.Reader
cache cache.Cache
keyGenerator cache.KeyGenerator
ff interfaces.FeatureLookup
}
func (p *BaseSeasonalProvider) getQueryParams(req *GetAnomaliesRequest) *anomalyQueryParams {
if !req.Seasonality.IsValid() {
req.Seasonality = SeasonalityWeekly
}
return prepareAnomalyQueryParams(req.Params, req.Seasonality)
}
func (p *BaseSeasonalProvider) getResults(ctx context.Context, params *anomalyQueryParams) (*anomalyQueryResults, error) {
currentPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentPeriodQuery, nil)
if err != nil {
return nil, err
}
pastPeriodResults, _, err := p.querierV2.QueryRange(ctx, params.PastPeriodQuery, nil)
if err != nil {
return nil, err
}
currentSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.CurrentSeasonQuery, nil)
if err != nil {
return nil, err
}
pastSeasonResults, _, err := p.querierV2.QueryRange(ctx, params.PastSeasonQuery, nil)
if err != nil {
return nil, err
}
return &anomalyQueryResults{
CurrentPeriodResults: currentPeriodResults,
PastPeriodResults: pastPeriodResults,
CurrentSeasonResults: currentSeasonResults,
PastSeasonResults: pastSeasonResults,
}, nil
}
func (p *BaseSeasonalProvider) getMatchingSeries(queryResult *v3.Result, series *v3.Series) *v3.Series {
for _, curr := range queryResult.Series {
currLabels := labels.FromMap(curr.Labels)
seriesLabels := labels.FromMap(series.Labels)
if currLabels.Hash() == seriesLabels.Hash() {
return curr
}
}
return nil
}
func (p *BaseSeasonalProvider) getAvg(series *v3.Series) float64 {
var sum float64
for _, smpl := range series.Points {
sum += smpl.Value
}
return sum / float64(len(series.Points))
}
func (p *BaseSeasonalProvider) getStdDev(series *v3.Series) float64 {
avg := p.getAvg(series)
var sum float64
for _, smpl := range series.Points {
sum += math.Pow(smpl.Value-avg, 2)
}
return math.Sqrt(sum / float64(len(series.Points)))
}
func (p *BaseSeasonalProvider) getPredictedSeries(series, prevSeries, currentSeasonSeries, pastSeasonSeries *v3.Series) *v3.Series {
predictedSeries := &v3.Series{
Labels: series.Labels,
LabelsArray: series.LabelsArray,
Points: []v3.Point{},
}
for _, curr := range series.Points {
predictedValue := p.getAvg(prevSeries) + p.getAvg(currentSeasonSeries) - p.getAvg(pastSeasonSeries)
predictedSeries.Points = append(predictedSeries.Points, v3.Point{
Timestamp: curr.Timestamp,
Value: predictedValue,
})
}
return predictedSeries
}
func (p *BaseSeasonalProvider) getExpectedValue(_, prevSeries, currentSeasonSeries, pastSeasonSeries *v3.Series) float64 {
prevSeriesAvg := p.getAvg(prevSeries)
currentSeasonSeriesAvg := p.getAvg(currentSeasonSeries)
pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries)
zap.L().Debug("getExpectedValue",
zap.Float64("prevSeriesAvg", prevSeriesAvg),
zap.Float64("currentSeasonSeriesAvg", currentSeasonSeriesAvg),
zap.Float64("pastSeasonSeriesAvg", pastSeasonSeriesAvg),
zap.Float64("expectedValue", prevSeriesAvg+currentSeasonSeriesAvg-pastSeasonSeriesAvg),
)
return prevSeriesAvg + currentSeasonSeriesAvg - pastSeasonSeriesAvg
}
func (p *BaseSeasonalProvider) getScore(series, prevSeries, weekSeries, weekPrevSeries *v3.Series, value float64) float64 {
expectedValue := p.getExpectedValue(series, prevSeries, weekSeries, weekPrevSeries)
return (value - expectedValue) / p.getStdDev(weekSeries)
}
func (p *BaseSeasonalProvider) getAnomalyScores(series, prevSeries, currentSeasonSeries, pastSeasonSeries *v3.Series) *v3.Series {
anomalyScoreSeries := &v3.Series{
Labels: series.Labels,
LabelsArray: series.LabelsArray,
Points: []v3.Point{},
}
for _, curr := range series.Points {
anomalyScore := p.getScore(series, prevSeries, currentSeasonSeries, pastSeasonSeries, curr.Value)
anomalyScoreSeries.Points = append(anomalyScoreSeries.Points, v3.Point{
Timestamp: curr.Timestamp,
Value: anomalyScore,
})
}
return anomalyScoreSeries
}
func (p *BaseSeasonalProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
anomalyParams := p.getQueryParams(req)
anomalyQueryResults, err := p.getResults(ctx, anomalyParams)
if err != nil {
return nil, err
}
currentPeriodResultsMap := make(map[string]*v3.Result)
for _, result := range anomalyQueryResults.CurrentPeriodResults {
currentPeriodResultsMap[result.QueryName] = result
}
pastPeriodResultsMap := make(map[string]*v3.Result)
for _, result := range anomalyQueryResults.PastPeriodResults {
pastPeriodResultsMap[result.QueryName] = result
}
currentSeasonResultsMap := make(map[string]*v3.Result)
for _, result := range anomalyQueryResults.CurrentSeasonResults {
currentSeasonResultsMap[result.QueryName] = result
}
pastSeasonResultsMap := make(map[string]*v3.Result)
for _, result := range anomalyQueryResults.PastSeasonResults {
pastSeasonResultsMap[result.QueryName] = result
}
for _, result := range currentPeriodResultsMap {
pastPeriodResult, ok := pastPeriodResultsMap[result.QueryName]
if !ok {
continue
}
currentSeasonResult, ok := currentSeasonResultsMap[result.QueryName]
if !ok {
continue
}
pastSeasonResult, ok := pastSeasonResultsMap[result.QueryName]
if !ok {
continue
}
for _, series := range result.Series {
pastPeriodSeries := p.getMatchingSeries(pastPeriodResult, series)
currentSeasonSeries := p.getMatchingSeries(currentSeasonResult, series)
pastSeasonSeries := p.getMatchingSeries(pastSeasonResult, series)
predictedSeries := p.getPredictedSeries(series, pastPeriodSeries, currentSeasonSeries, pastSeasonSeries)
result.PredictedSeries = append(result.PredictedSeries, predictedSeries)
anomalyScoreSeries := p.getAnomalyScores(series, pastPeriodSeries, currentSeasonSeries, pastSeasonSeries)
result.AnomalyScores = append(result.AnomalyScores, anomalyScoreSeries)
}
}
return &GetAnomaliesResponse{
Results: anomalyQueryResults.CurrentPeriodResults,
}, nil
}

View File

@@ -1,31 +0,0 @@
package anomaly
import (
"context"
)
type WeeklyProvider struct {
BaseSeasonalProvider
}
var _ BaseProvider = (*WeeklyProvider)(nil)
func (wp *WeeklyProvider) GetBaseSeasonalProvider() *BaseSeasonalProvider {
return &wp.BaseSeasonalProvider
}
func NewWeeklyProvider(opts ...GenericProviderOption[*WeeklyProvider]) *WeeklyProvider {
wp := &WeeklyProvider{
BaseSeasonalProvider: BaseSeasonalProvider{},
}
for _, opt := range opts {
opt(wp)
}
return wp
}
func (p *WeeklyProvider) GetAnomalies(ctx context.Context, req *GetAnomaliesRequest) (*GetAnomaliesResponse, error) {
return nil, nil
}

View File

@@ -24,6 +24,7 @@ import (
type APIHandlerOptions struct {
DataConnector interfaces.DataConnector
SkipConfig *basemodel.SkipConfig
PreferDelta bool
PreferSpanMetrics bool
MaxIdleConns int
MaxOpenConns int
@@ -38,8 +39,7 @@ type APIHandlerOptions struct {
Cache cache.Cache
Gateway *httputil.ReverseProxy
// Querier Influx Interval
FluxInterval time.Duration
UseLogsNewSchema bool
FluxInterval time.Duration
}
type APIHandler struct {
@@ -53,6 +53,7 @@ func NewAPIHandler(opts APIHandlerOptions) (*APIHandler, error) {
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
Reader: opts.DataConnector,
SkipConfig: opts.SkipConfig,
PerferDelta: opts.PreferDelta,
PreferSpanMetrics: opts.PreferSpanMetrics,
MaxIdleConns: opts.MaxIdleConns,
MaxOpenConns: opts.MaxOpenConns,
@@ -64,7 +65,6 @@ func NewAPIHandler(opts APIHandlerOptions) (*APIHandler, error) {
LogsParsingPipelineController: opts.LogsParsingPipelineController,
Cache: opts.Cache,
FluxInterval: opts.FluxInterval,
UseLogsNewSchema: opts.UseLogsNewSchema,
})
if err != nil {

View File

@@ -1,9 +1,7 @@
package api
import (
"errors"
"net/http"
"strings"
"github.com/gorilla/mux"
"go.signoz.io/signoz/pkg/query-service/app/dashboards"
@@ -31,10 +29,6 @@ func (ah *APIHandler) lockUnlockDashboard(w http.ResponseWriter, r *http.Request
// Get the dashboard UUID from the request
uuid := mux.Vars(r)["uuid"]
if strings.HasPrefix(uuid,"integration") {
RespondError(w, &model.ApiError{Typ: model.ErrorForbidden, Err: errors.New("dashboards created by integrations cannot be unlocked")}, "You are not authorized to lock/unlock this dashboard")
return
}
dashboard, err := dashboards.GetDashboard(r.Context(), uuid)
if err != nil {
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, err.Error())

View File

@@ -1,48 +1,17 @@
package api
import (
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"time"
"go.signoz.io/signoz/ee/query-service/constants"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
"go.uber.org/zap"
)
func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
featureSet, err := ah.FF().GetFeatureFlags()
if err != nil {
ah.HandleError(w, err, http.StatusInternalServerError)
return
}
if constants.FetchFeatures == "true" {
zap.L().Debug("fetching license")
license, err := ah.LM().GetRepo().GetActiveLicense(ctx)
if err != nil {
zap.L().Error("failed to fetch license", zap.Error(err))
} else if license == nil {
zap.L().Debug("no active license found")
} else {
licenseKey := license.Key
zap.L().Debug("fetching zeus features")
zeusFeatures, err := fetchZeusFeatures(constants.ZeusFeaturesURL, licenseKey)
if err == nil {
zap.L().Debug("fetched zeus features", zap.Any("features", zeusFeatures))
// merge featureSet and zeusFeatures in featureSet with higher priority to zeusFeatures
featureSet = MergeFeatureSets(zeusFeatures, featureSet)
} else {
zap.L().Error("failed to fetch zeus features", zap.Error(err))
}
}
}
if ah.opts.PreferSpanMetrics {
for idx := range featureSet {
feature := &featureSet[idx]
@@ -51,96 +20,5 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
}
}
}
ah.Respond(w, featureSet)
}
// fetchZeusFeatures makes an HTTP GET request to the /zeusFeatures endpoint
// and returns the FeatureSet.
func fetchZeusFeatures(url, licenseKey string) (basemodel.FeatureSet, error) {
// Check if the URL is empty
if url == "" {
return nil, fmt.Errorf("url is empty")
}
// Check if the licenseKey is empty
if licenseKey == "" {
return nil, fmt.Errorf("licenseKey is empty")
}
// Creating an HTTP client with a timeout for better control
client := &http.Client{
Timeout: 10 * time.Second,
}
// Creating a new GET request
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return nil, fmt.Errorf("failed to create request: %w", err)
}
// Setting the custom header
req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey)
// Making the GET request
resp, err := client.Do(req)
if err != nil {
return nil, fmt.Errorf("failed to make GET request: %w", err)
}
defer func() {
if resp != nil {
resp.Body.Close()
}
}()
// Check for non-OK status code
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("%w: %d %s", errors.New("received non-OK HTTP status code"), resp.StatusCode, http.StatusText(resp.StatusCode))
}
// Reading and decoding the response body
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, fmt.Errorf("failed to read response body: %w", err)
}
var zeusResponse ZeusFeaturesResponse
if err := json.Unmarshal(body, &zeusResponse); err != nil {
return nil, fmt.Errorf("%w: %v", errors.New("failed to decode response body"), err)
}
if zeusResponse.Status != "success" {
return nil, fmt.Errorf("%w: %s", errors.New("failed to fetch zeus features"), zeusResponse.Status)
}
return zeusResponse.Data, nil
}
type ZeusFeaturesResponse struct {
Status string `json:"status"`
Data basemodel.FeatureSet `json:"data"`
}
// MergeFeatureSets merges two FeatureSet arrays with precedence to zeusFeatures.
func MergeFeatureSets(zeusFeatures, internalFeatures basemodel.FeatureSet) basemodel.FeatureSet {
// Create a map to store the merged features
featureMap := make(map[string]basemodel.Feature)
// Add all features from the otherFeatures set to the map
for _, feature := range internalFeatures {
featureMap[feature.Name] = feature
}
// Add all features from the zeusFeatures set to the map
// If a feature already exists (i.e., same name), the zeusFeature will overwrite it
for _, feature := range zeusFeatures {
featureMap[feature.Name] = feature
}
// Convert the map back to a FeatureSet slice
var mergedFeatures basemodel.FeatureSet
for _, feature := range featureMap {
mergedFeatures = append(mergedFeatures, feature)
}
return mergedFeatures
}

View File

@@ -1,88 +0,0 @@
package api
import (
"testing"
"github.com/stretchr/testify/assert"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
)
func TestMergeFeatureSets(t *testing.T) {
tests := []struct {
name string
zeusFeatures basemodel.FeatureSet
internalFeatures basemodel.FeatureSet
expected basemodel.FeatureSet
}{
{
name: "empty zeusFeatures and internalFeatures",
zeusFeatures: basemodel.FeatureSet{},
internalFeatures: basemodel.FeatureSet{},
expected: basemodel.FeatureSet{},
},
{
name: "non-empty zeusFeatures and empty internalFeatures",
zeusFeatures: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
},
internalFeatures: basemodel.FeatureSet{},
expected: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
},
},
{
name: "empty zeusFeatures and non-empty internalFeatures",
zeusFeatures: basemodel.FeatureSet{},
internalFeatures: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
},
expected: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
},
},
{
name: "non-empty zeusFeatures and non-empty internalFeatures with no conflicts",
zeusFeatures: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature3", Active: false},
},
internalFeatures: basemodel.FeatureSet{
{Name: "Feature2", Active: true},
{Name: "Feature4", Active: false},
},
expected: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: true},
{Name: "Feature3", Active: false},
{Name: "Feature4", Active: false},
},
},
{
name: "non-empty zeusFeatures and non-empty internalFeatures with conflicts",
zeusFeatures: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
},
internalFeatures: basemodel.FeatureSet{
{Name: "Feature1", Active: false},
{Name: "Feature3", Active: true},
},
expected: basemodel.FeatureSet{
{Name: "Feature1", Active: true},
{Name: "Feature2", Active: false},
{Name: "Feature3", Active: true},
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
actual := MergeFeatureSets(test.zeusFeatures, test.internalFeatures)
assert.ElementsMatch(t, test.expected, actual)
})
}
}

View File

@@ -0,0 +1,401 @@
package db
import (
"context"
"crypto/md5"
"encoding/json"
"fmt"
"reflect"
"regexp"
"sort"
"strings"
"time"
"go.signoz.io/signoz/ee/query-service/model"
baseconst "go.signoz.io/signoz/pkg/query-service/constants"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
"go.signoz.io/signoz/pkg/query-service/utils"
"go.uber.org/zap"
)
// GetMetricResultEE runs the query and returns list of time series
func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string) ([]*basemodel.Series, string, error) {
defer utils.Elapsed("GetMetricResult", nil)()
zap.L().Info("Executing metric result query: ", zap.String("query", query))
var hash string
// If getSubTreeSpans function is used in the clickhouse query
if strings.Contains(query, "getSubTreeSpans(") {
var err error
query, hash, err = r.getSubTreeSpansCustomFunction(ctx, query, hash)
if err == fmt.Errorf("no spans found for the given query") {
return nil, "", nil
}
if err != nil {
return nil, "", err
}
}
rows, err := r.conn.Query(ctx, query)
if err != nil {
zap.L().Error("Error in processing query", zap.Error(err))
return nil, "", fmt.Errorf("error in processing query")
}
var (
columnTypes = rows.ColumnTypes()
columnNames = rows.Columns()
vars = make([]interface{}, len(columnTypes))
)
for i := range columnTypes {
vars[i] = reflect.New(columnTypes[i].ScanType()).Interface()
}
// when group by is applied, each combination of cartesian product
// of attributes is separate series. each item in metricPointsMap
// represent a unique series.
metricPointsMap := make(map[string][]basemodel.MetricPoint)
// attribute key-value pairs for each group selection
attributesMap := make(map[string]map[string]string)
defer rows.Close()
for rows.Next() {
if err := rows.Scan(vars...); err != nil {
return nil, "", err
}
var groupBy []string
var metricPoint basemodel.MetricPoint
groupAttributes := make(map[string]string)
// Assuming that the end result row contains a timestamp, value and option labels
// Label key and value are both strings.
for idx, v := range vars {
colName := columnNames[idx]
switch v := v.(type) {
case *string:
// special case for returning all labels
if colName == "fullLabels" {
var metric map[string]string
err := json.Unmarshal([]byte(*v), &metric)
if err != nil {
return nil, "", err
}
for key, val := range metric {
groupBy = append(groupBy, val)
groupAttributes[key] = val
}
} else {
groupBy = append(groupBy, *v)
groupAttributes[colName] = *v
}
case *time.Time:
metricPoint.Timestamp = v.UnixMilli()
case *float64:
metricPoint.Value = *v
case **float64:
// ch seems to return this type when column is derived from
// SELECT count(*)/ SELECT count(*)
floatVal := *v
if floatVal != nil {
metricPoint.Value = *floatVal
}
case *float32:
float32Val := float32(*v)
metricPoint.Value = float64(float32Val)
case *uint8, *uint64, *uint16, *uint32:
if _, ok := baseconst.ReservedColumnTargetAliases[colName]; ok {
metricPoint.Value = float64(reflect.ValueOf(v).Elem().Uint())
} else {
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint()))
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Uint())
}
case *int8, *int16, *int32, *int64:
if _, ok := baseconst.ReservedColumnTargetAliases[colName]; ok {
metricPoint.Value = float64(reflect.ValueOf(v).Elem().Int())
} else {
groupBy = append(groupBy, fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int()))
groupAttributes[colName] = fmt.Sprintf("%v", reflect.ValueOf(v).Elem().Int())
}
default:
zap.L().Error("invalid var found in metric builder query result", zap.Any("var", v), zap.String("colName", colName))
}
}
sort.Strings(groupBy)
key := strings.Join(groupBy, "")
attributesMap[key] = groupAttributes
metricPointsMap[key] = append(metricPointsMap[key], metricPoint)
}
var seriesList []*basemodel.Series
for key := range metricPointsMap {
points := metricPointsMap[key]
// first point in each series could be invalid since the
// aggregations are applied with point from prev series
if len(points) != 0 && len(points) > 1 {
points = points[1:]
}
attributes := attributesMap[key]
series := basemodel.Series{Labels: attributes, Points: points}
seriesList = append(seriesList, &series)
}
// err = r.conn.Exec(ctx, "DROP TEMPORARY TABLE IF EXISTS getSubTreeSpans"+hash)
// if err != nil {
// zap.L().Error("Error in dropping temporary table: ", err)
// return nil, err
// }
if hash == "" {
return seriesList, hash, nil
} else {
return seriesList, "getSubTreeSpans" + hash, nil
}
}
func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, query string, hash string) (string, string, error) {
zap.L().Debug("Executing getSubTreeSpans function")
// str1 := `select fromUnixTimestamp64Milli(intDiv( toUnixTimestamp64Milli ( timestamp ), 100) * 100) AS interval, toFloat64(count()) as count from (select timestamp, spanId, parentSpanId, durationNano from getSubTreeSpans(select * from signoz_traces.signoz_index_v2 where serviceName='frontend' and name='/driver.DriverService/FindNearest' and traceID='00000000000000004b0a863cb5ed7681') where name='FindDriverIDs' group by interval order by interval asc;`
// process the query to fetch subTree query
var subtreeInput string
query, subtreeInput, hash = processQuery(query, hash)
err := r.conn.Exec(ctx, "DROP TABLE IF EXISTS getSubTreeSpans"+hash)
if err != nil {
zap.L().Error("Error in dropping temporary table", zap.Error(err))
return query, hash, err
}
// Create temporary table to store the getSubTreeSpans() results
zap.L().Debug("Creating temporary table getSubTreeSpans", zap.String("hash", hash))
err = r.conn.Exec(ctx, "CREATE TABLE IF NOT EXISTS "+"getSubTreeSpans"+hash+" (timestamp DateTime64(9) CODEC(DoubleDelta, LZ4), traceID FixedString(32) CODEC(ZSTD(1)), spanID String CODEC(ZSTD(1)), parentSpanID String CODEC(ZSTD(1)), rootSpanID String CODEC(ZSTD(1)), serviceName LowCardinality(String) CODEC(ZSTD(1)), name LowCardinality(String) CODEC(ZSTD(1)), rootName LowCardinality(String) CODEC(ZSTD(1)), durationNano UInt64 CODEC(T64, ZSTD(1)), kind Int8 CODEC(T64, ZSTD(1)), tagMap Map(LowCardinality(String), String) CODEC(ZSTD(1)), events Array(String) CODEC(ZSTD(2))) ENGINE = MergeTree() ORDER BY (timestamp)")
if err != nil {
zap.L().Error("Error in creating temporary table", zap.Error(err))
return query, hash, err
}
var getSpansSubQueryDBResponses []model.GetSpansSubQueryDBResponse
getSpansSubQuery := subtreeInput
// Execute the subTree query
zap.L().Debug("Executing subTree query", zap.String("query", getSpansSubQuery))
err = r.conn.Select(ctx, &getSpansSubQueryDBResponses, getSpansSubQuery)
// zap.L().Info(getSpansSubQuery)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return query, hash, fmt.Errorf("error in processing sql query")
}
var searchScanResponses []basemodel.SearchSpanDBResponseItem
// TODO : @ankit: I think the algorithm does not need to assume that subtrees are from the same TraceID. We can take this as an improvement later.
// Fetch all the spans from of same TraceID so that we can build subtree
modelQuery := fmt.Sprintf("SELECT timestamp, traceID, model FROM %s.%s WHERE traceID=$1", r.TraceDB, r.SpansTable)
if len(getSpansSubQueryDBResponses) == 0 {
return query, hash, fmt.Errorf("no spans found for the given query")
}
zap.L().Debug("Executing query to fetch all the spans from the same TraceID: ", zap.String("modelQuery", modelQuery))
err = r.conn.Select(ctx, &searchScanResponses, modelQuery, getSpansSubQueryDBResponses[0].TraceID)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return query, hash, fmt.Errorf("error in processing sql query")
}
// Process model to fetch the spans
zap.L().Debug("Processing model to fetch the spans")
searchSpanResponses := []basemodel.SearchSpanResponseItem{}
for _, item := range searchScanResponses {
var jsonItem basemodel.SearchSpanResponseItem
json.Unmarshal([]byte(item.Model), &jsonItem)
jsonItem.TimeUnixNano = uint64(item.Timestamp.UnixNano())
if jsonItem.Events == nil {
jsonItem.Events = []string{}
}
searchSpanResponses = append(searchSpanResponses, jsonItem)
}
// Build the subtree and store all the subtree spans in temporary table getSubTreeSpans+hash
// Use map to store pointer to the spans to avoid duplicates and save memory
zap.L().Debug("Building the subtree to store all the subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash))
treeSearchResponse, err := getSubTreeAlgorithm(searchSpanResponses, getSpansSubQueryDBResponses)
if err != nil {
zap.L().Error("Error in getSubTreeAlgorithm function", zap.Error(err))
return query, hash, err
}
zap.L().Debug("Preparing batch to store subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash))
statement, err := r.conn.PrepareBatch(context.Background(), fmt.Sprintf("INSERT INTO getSubTreeSpans"+hash))
if err != nil {
zap.L().Error("Error in preparing batch statement", zap.Error(err))
return query, hash, err
}
for _, span := range treeSearchResponse {
var parentID string
if len(span.References) > 0 && span.References[0].RefType == "CHILD_OF" {
parentID = span.References[0].SpanId
}
err = statement.Append(
time.Unix(0, int64(span.TimeUnixNano)),
span.TraceID,
span.SpanID,
parentID,
span.RootSpanID,
span.ServiceName,
span.Name,
span.RootName,
uint64(span.DurationNano),
int8(span.Kind),
span.TagMap,
span.Events,
)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return query, hash, err
}
}
zap.L().Debug("Inserting the subtree spans in temporary table getSubTreeSpans", zap.String("hash", hash))
err = statement.Send()
if err != nil {
zap.L().Error("Error in sending statement", zap.Error(err))
return query, hash, err
}
return query, hash, nil
}
//lint:ignore SA4009 return hash is feeded to the query
func processQuery(query string, hash string) (string, string, string) {
re3 := regexp.MustCompile(`getSubTreeSpans`)
submatchall3 := re3.FindAllStringIndex(query, -1)
getSubtreeSpansMatchIndex := submatchall3[0][1]
query2countParenthesis := query[getSubtreeSpansMatchIndex:]
sqlCompleteIndex := 0
countParenthesisImbalance := 0
for i, char := range query2countParenthesis {
if string(char) == "(" {
countParenthesisImbalance += 1
}
if string(char) == ")" {
countParenthesisImbalance -= 1
}
if countParenthesisImbalance == 0 {
sqlCompleteIndex = i
break
}
}
subtreeInput := query2countParenthesis[1:sqlCompleteIndex]
// hash the subtreeInput
hmd5 := md5.Sum([]byte(subtreeInput))
hash = fmt.Sprintf("%x", hmd5)
// Reformat the query to use the getSubTreeSpans function
query = query[:getSubtreeSpansMatchIndex] + hash + " " + query2countParenthesis[sqlCompleteIndex+1:]
return query, subtreeInput, hash
}
// getSubTreeAlgorithm is an algorithm to build the subtrees of the spans and return the list of spans
func getSubTreeAlgorithm(payload []basemodel.SearchSpanResponseItem, getSpansSubQueryDBResponses []model.GetSpansSubQueryDBResponse) (map[string]*basemodel.SearchSpanResponseItem, error) {
var spans []*model.SpanForTraceDetails
for _, spanItem := range payload {
var parentID string
if len(spanItem.References) > 0 && spanItem.References[0].RefType == "CHILD_OF" {
parentID = spanItem.References[0].SpanId
}
span := &model.SpanForTraceDetails{
TimeUnixNano: spanItem.TimeUnixNano,
SpanID: spanItem.SpanID,
TraceID: spanItem.TraceID,
ServiceName: spanItem.ServiceName,
Name: spanItem.Name,
Kind: spanItem.Kind,
DurationNano: spanItem.DurationNano,
TagMap: spanItem.TagMap,
ParentID: parentID,
Events: spanItem.Events,
HasError: spanItem.HasError,
}
spans = append(spans, span)
}
zap.L().Debug("Building Tree")
roots, err := buildSpanTrees(&spans)
if err != nil {
return nil, err
}
searchSpansResult := make(map[string]*basemodel.SearchSpanResponseItem)
// Every span which was fetched from getSubTree Input SQL query is considered root
// For each root, get the subtree spans
for _, getSpansSubQueryDBResponse := range getSpansSubQueryDBResponses {
targetSpan := &model.SpanForTraceDetails{}
// zap.L().Debug("Building tree for span id: " + getSpansSubQueryDBResponse.SpanID + " " + strconv.Itoa(i+1) + " of " + strconv.Itoa(len(getSpansSubQueryDBResponses)))
// Search target span object in the tree
for _, root := range roots {
targetSpan, err = breadthFirstSearch(root, getSpansSubQueryDBResponse.SpanID)
if targetSpan != nil {
break
}
if err != nil {
zap.L().Error("Error during BreadthFirstSearch()", zap.Error(err))
return nil, err
}
}
if targetSpan == nil {
return nil, nil
}
// Build subtree for the target span
// Mark the target span as root by setting parent ID as empty string
targetSpan.ParentID = ""
preParents := []*model.SpanForTraceDetails{targetSpan}
children := []*model.SpanForTraceDetails{}
// Get the subtree child spans
for i := 0; len(preParents) != 0; i++ {
parents := []*model.SpanForTraceDetails{}
for _, parent := range preParents {
children = append(children, parent.Children...)
parents = append(parents, parent.Children...)
}
preParents = parents
}
resultSpans := children
// Add the target span to the result spans
resultSpans = append(resultSpans, targetSpan)
for _, item := range resultSpans {
references := []basemodel.OtelSpanRef{
{
TraceId: item.TraceID,
SpanId: item.ParentID,
RefType: "CHILD_OF",
},
}
if item.Events == nil {
item.Events = []string{}
}
searchSpansResult[item.SpanID] = &basemodel.SearchSpanResponseItem{
TimeUnixNano: item.TimeUnixNano,
SpanID: item.SpanID,
TraceID: item.TraceID,
ServiceName: item.ServiceName,
Name: item.Name,
Kind: item.Kind,
References: references,
DurationNano: item.DurationNano,
TagMap: item.TagMap,
Events: item.Events,
HasError: item.HasError,
RootSpanID: getSpansSubQueryDBResponse.SpanID,
RootName: targetSpan.Name,
}
}
}
return searchSpansResult, nil
}

View File

@@ -25,9 +25,8 @@ func NewDataConnector(
maxOpenConns int,
dialTimeout time.Duration,
cluster string,
useLogsNewSchema bool,
) *ClickhouseReader {
ch := basechr.NewReader(localDB, promConfigPath, lm, maxIdleConns, maxOpenConns, dialTimeout, cluster, useLogsNewSchema)
ch := basechr.NewReader(localDB, promConfigPath, lm, maxIdleConns, maxOpenConns, dialTimeout, cluster)
return &ClickhouseReader{
conn: ch.GetConn(),
appdb: localDB,

View File

@@ -1,15 +1,14 @@
package app
import (
"bufio"
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"net"
"net/http"
"net/http/httputil"
_ "net/http/pprof" // http profiler
"os"
"regexp"
@@ -28,10 +27,7 @@ import (
"go.signoz.io/signoz/ee/query-service/dao"
"go.signoz.io/signoz/ee/query-service/integrations/gateway"
"go.signoz.io/signoz/ee/query-service/interfaces"
"go.signoz.io/signoz/ee/query-service/rules"
baseauth "go.signoz.io/signoz/pkg/query-service/auth"
"go.signoz.io/signoz/pkg/query-service/migrate"
"go.signoz.io/signoz/pkg/query-service/model"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
licensepkg "go.signoz.io/signoz/ee/query-service/license"
@@ -45,7 +41,6 @@ import (
"go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline"
"go.signoz.io/signoz/pkg/query-service/app/opamp"
opAmpModel "go.signoz.io/signoz/pkg/query-service/app/opamp/model"
"go.signoz.io/signoz/pkg/query-service/app/preferences"
"go.signoz.io/signoz/pkg/query-service/cache"
baseconst "go.signoz.io/signoz/pkg/query-service/constants"
"go.signoz.io/signoz/pkg/query-service/healthcheck"
@@ -53,7 +48,7 @@ import (
baseint "go.signoz.io/signoz/pkg/query-service/interfaces"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
pqle "go.signoz.io/signoz/pkg/query-service/pqlEngine"
baserules "go.signoz.io/signoz/pkg/query-service/rules"
rules "go.signoz.io/signoz/pkg/query-service/rules"
"go.signoz.io/signoz/pkg/query-service/telemetry"
"go.signoz.io/signoz/pkg/query-service/utils"
"go.uber.org/zap"
@@ -69,6 +64,7 @@ type ServerOptions struct {
// alert specific params
DisableRules bool
RuleRepoURL string
PreferDelta bool
PreferSpanMetrics bool
MaxIdleConns int
MaxOpenConns int
@@ -77,13 +73,12 @@ type ServerOptions struct {
FluxInterval string
Cluster string
GatewayUrl string
UseLogsNewSchema bool
}
// Server runs HTTP api service
type Server struct {
serverOptions *ServerOptions
ruleManager *baserules.Manager
ruleManager *rules.Manager
// public http router
httpConn net.Listener
@@ -116,10 +111,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
baseexplorer.InitWithDSN(baseconst.RELATIONAL_DATASOURCE_PATH)
if err := preferences.InitDB(baseconst.RELATIONAL_DATASOURCE_PATH); err != nil {
return nil, err
}
localDB, err := dashboards.InitDB(baseconst.RELATIONAL_DATASOURCE_PATH)
if err != nil {
@@ -128,13 +119,33 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
localDB.SetMaxOpenConns(10)
gatewayProxy, err := gateway.NewProxy(serverOptions.GatewayUrl, gateway.RoutePrefix)
if err != nil {
return nil, err
gatewayFeature := basemodel.Feature{
Name: "GATEWAY",
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
}
//Activate this feature if the url is not empty
var gatewayProxy *httputil.ReverseProxy
if serverOptions.GatewayUrl == "" {
gatewayFeature.Active = false
gatewayProxy, err = gateway.NewNoopProxy()
if err != nil {
return nil, err
}
} else {
zap.L().Info("Enabling gateway feature flag ...")
gatewayFeature.Active = true
gatewayProxy, err = gateway.NewProxy(serverOptions.GatewayUrl, gateway.RoutePrefix)
if err != nil {
return nil, err
}
}
// initiate license manager
lm, err := licensepkg.StartManager("sqlite", localDB)
lm, err := licensepkg.StartManager("sqlite", localDB, gatewayFeature)
if err != nil {
return nil, err
}
@@ -155,7 +166,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
serverOptions.MaxOpenConns,
serverOptions.DialTimeout,
serverOptions.Cluster,
serverOptions.UseLogsNewSchema,
)
go qb.Start(readerReady)
reader = qb
@@ -178,21 +188,12 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
localDB,
reader,
serverOptions.DisableRules,
lm,
serverOptions.UseLogsNewSchema,
)
lm)
if err != nil {
return nil, err
}
go func() {
err = migrate.ClickHouseMigrate(reader.GetConn(), serverOptions.Cluster)
if err != nil {
zap.L().Error("error while running clickhouse migrations", zap.Error(err))
}
}()
// initiate opamp
_, err = opAmpModel.InitDB(localDB)
if err != nil {
@@ -255,6 +256,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
apiOpts := api.APIHandlerOptions{
DataConnector: reader,
SkipConfig: skipConfig,
PreferDelta: serverOptions.PreferDelta,
PreferSpanMetrics: serverOptions.PreferSpanMetrics,
MaxIdleConns: serverOptions.MaxIdleConns,
MaxOpenConns: serverOptions.MaxOpenConns,
@@ -269,7 +271,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
Cache: c,
FluxInterval: fluxInterval,
Gateway: gatewayProxy,
UseLogsNewSchema: serverOptions.UseLogsNewSchema,
}
apiHandler, err := api.NewAPIHandler(apiOpts)
@@ -324,7 +325,7 @@ func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server,
// ip here for alert manager
AllowedOrigins: []string{"*"},
AllowedMethods: []string{"GET", "DELETE", "POST", "PUT", "PATCH"},
AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "SIGNOZ-API-KEY", "X-SIGNOZ-QUERY-ID", "Sec-WebSocket-Protocol"},
AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "SIGNOZ-API-KEY"},
})
handler := c.Handler(r)
@@ -341,17 +342,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, e
// add auth middleware
getUserFromRequest := func(r *http.Request) (*basemodel.UserPayload, error) {
user, err := auth.GetUserFromRequest(r, apiHandler)
if err != nil {
return nil, err
}
if user.User.OrgId == "" {
return nil, model.UnauthorizedError(errors.New("orgId is missing in the claims"))
}
return user, nil
return auth.GetUserFromRequest(r, apiHandler)
}
am := baseapp.NewAuthMiddleware(getUserFromRequest)
@@ -365,13 +356,11 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, e
apiHandler.RegisterIntegrationRoutes(r, am)
apiHandler.RegisterQueryRangeV3Routes(r, am)
apiHandler.RegisterQueryRangeV4Routes(r, am)
apiHandler.RegisterWebSocketPaths(r, am)
apiHandler.RegisterMessagingQueuesRoutes(r, am)
c := cors.New(cors.Options{
AllowedOrigins: []string{"*"},
AllowedMethods: []string{"GET", "DELETE", "POST", "PUT", "PATCH", "OPTIONS"},
AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "cache-control", "X-SIGNOZ-QUERY-ID", "Sec-WebSocket-Protocol"},
AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "cache-control"},
})
handler := c.Handler(r)
@@ -383,7 +372,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, e
}, nil
}
// TODO(remove): Implemented at pkg/http/middleware/logging.go
// loggingMiddleware is used for logging public api calls
func loggingMiddleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
@@ -395,7 +383,6 @@ func loggingMiddleware(next http.Handler) http.Handler {
})
}
// TODO(remove): Implemented at pkg/http/middleware/logging.go
// loggingMiddlewarePrivate is used for logging private api calls
// from internal services like alert manager
func loggingMiddlewarePrivate(next http.Handler) http.Handler {
@@ -408,41 +395,27 @@ func loggingMiddlewarePrivate(next http.Handler) http.Handler {
})
}
// TODO(remove): Implemented at pkg/http/middleware/logging.go
type loggingResponseWriter struct {
http.ResponseWriter
statusCode int
}
// TODO(remove): Implemented at pkg/http/middleware/logging.go
func NewLoggingResponseWriter(w http.ResponseWriter) *loggingResponseWriter {
// WriteHeader(int) is not called if our response implicitly returns 200 OK, so
// we default to that status code.
return &loggingResponseWriter{w, http.StatusOK}
}
// TODO(remove): Implemented at pkg/http/middleware/logging.go
func (lrw *loggingResponseWriter) WriteHeader(code int) {
lrw.statusCode = code
lrw.ResponseWriter.WriteHeader(code)
}
// TODO(remove): Implemented at pkg/http/middleware/logging.go
// Flush implements the http.Flush interface.
func (lrw *loggingResponseWriter) Flush() {
lrw.ResponseWriter.(http.Flusher).Flush()
}
// TODO(remove): Implemented at pkg/http/middleware/logging.go
// Support websockets
func (lrw *loggingResponseWriter) Hijack() (net.Conn, *bufio.ReadWriter, error) {
h, ok := lrw.ResponseWriter.(http.Hijacker)
if !ok {
return nil, nil, errors.New("hijack not supported")
}
return h.Hijack()
}
func extractQueryRangeData(path string, r *http.Request) (map[string]interface{}, bool) {
pathToExtractBodyFromV3 := "/api/v3/query_range"
pathToExtractBodyFromV4 := "/api/v4/query_range"
@@ -579,7 +552,6 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
})
}
// TODO(remove): Implemented at pkg/http/middleware/timeout.go
func setTimeoutMiddleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
@@ -733,8 +705,7 @@ func makeRulesManager(
db *sqlx.DB,
ch baseint.Reader,
disableRules bool,
fm baseint.FeatureLookup,
useLogsNewSchema bool) (*baserules.Manager, error) {
fm baseint.FeatureLookup) (*rules.Manager, error) {
// create engine
pqle, err := pqle.FromConfigPath(promConfigPath)
@@ -750,9 +721,12 @@ func makeRulesManager(
}
// create manager opts
managerOpts := &baserules.ManagerOptions{
managerOpts := &rules.ManagerOptions{
NotifierOpts: notifierOpts,
PqlEngine: pqle,
Queriers: &rules.Queriers{
PqlEngine: pqle,
Ch: ch.GetConn(),
},
RepoURL: ruleRepoURL,
DBConn: db,
Context: context.Background(),
@@ -760,14 +734,10 @@ func makeRulesManager(
DisableRules: disableRules,
FeatureFlags: fm,
Reader: ch,
EvalDelay: baseconst.GetEvalDelay(),
PrepareTaskFunc: rules.PrepareTaskFunc,
UseLogsNewSchema: useLogsNewSchema,
}
// create Manager
manager, err := baserules.NewManager(managerOpts)
manager, err := rules.NewManager(managerOpts)
if err != nil {
return nil, fmt.Errorf("rule manager error: %v", err)
}

View File

@@ -11,8 +11,8 @@ const (
var LicenseSignozIo = "https://license.signoz.io/api/v1"
var LicenseAPIKey = GetOrDefaultEnv("SIGNOZ_LICENSE_API_KEY", "")
var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "")
var FetchFeatures = GetOrDefaultEnv("FETCH_FEATURES", "false")
var ZeusFeaturesURL = GetOrDefaultEnv("ZEUS_FEATURES_URL", "ZeusFeaturesURL")
var SpanRenderLimitStr = GetOrDefaultEnv("SPAN_RENDER_LIMIT", "2500")
var MaxSpansInTraceStr = GetOrDefaultEnv("MAX_SPANS_IN_TRACE", "250000")
func GetOrDefaultEnv(key string, fallback string) string {
v := os.Getenv(key)

View File

@@ -20,14 +20,11 @@ import (
func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (*basemodel.User, basemodel.BaseApiError) {
// get auth domain from email domain
domain, apierr := m.GetDomainByEmail(ctx, email)
if apierr != nil {
zap.L().Error("failed to get domain from email", zap.Error(apierr))
return nil, model.InternalErrorStr("failed to get domain from email")
}
if domain == nil {
zap.L().Error("email domain does not match any authenticated domain", zap.String("email", email))
return nil, model.InternalErrorStr("email domain does not match any authenticated domain")
}
hash, err := baseauth.PasswordHash(utils.GeneratePassowrd())
if err != nil {

View File

@@ -5,5 +5,5 @@ import (
)
func NewNoopProxy() (*httputil.ReverseProxy, error) {
return &httputil.ReverseProxy{}, nil
return nil, nil
}

View File

@@ -147,7 +147,7 @@ func (lm *Manager) GetLicenses(ctx context.Context) (response []model.License, a
for _, l := range licenses {
l.ParsePlan()
if lm.activeLicense != nil && l.Key == lm.activeLicense.Key {
if l.Key == lm.activeLicense.Key {
l.IsCurrent = true
}

View File

@@ -87,9 +87,9 @@ func main() {
var ruleRepoURL string
var cluster string
var useLogsNewSchema bool
var cacheConfigPath, fluxInterval string
var enableQueryServiceLogOTLPExport bool
var preferDelta bool
var preferSpanMetrics bool
var maxIdleConns int
@@ -97,17 +97,17 @@ func main() {
var dialTimeout time.Duration
var gatewayUrl string
flag.BoolVar(&useLogsNewSchema, "use-logs-new-schema", false, "use logs_v2 schema for logs")
flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)")
flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)")
flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)")
flag.BoolVar(&preferDelta, "prefer-delta", false, "(prefer delta over cumulative metrics)")
flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)")
flag.IntVar(&maxIdleConns, "max-idle-conns", 50, "(number of connections to maintain in the pool.)")
flag.IntVar(&maxOpenConns, "max-open-conns", 100, "(max connections for use at any time.)")
flag.DurationVar(&dialTimeout, "dial-timeout", 5*time.Second, "(the maximum time to establish a connection.)")
flag.StringVar(&ruleRepoURL, "rules.repo-url", baseconst.AlertHelpPage, "(host address used to build rule link in alert messages)")
flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)")
flag.StringVar(&fluxInterval, "flux-interval", "5m", "(the interval to exclude data from being cached to avoid incorrect cache for data in motion)")
flag.StringVar(&fluxInterval, "flux-interval", "5m", "(cache config to use)")
flag.BoolVar(&enableQueryServiceLogOTLPExport, "enable.query.service.log.otlp.export", false, "(enable query service log otlp export)")
flag.StringVar(&cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')")
flag.StringVar(&gatewayUrl, "gateway-url", "", "(url to the gateway)")
@@ -125,6 +125,7 @@ func main() {
HTTPHostPort: baseconst.HTTPHostPort,
PromConfigPath: promConfigPath,
SkipTopLvlOpsPath: skipTopLvlOpsPath,
PreferDelta: preferDelta,
PreferSpanMetrics: preferSpanMetrics,
PrivateHostPort: baseconst.PrivateHostPort,
DisableRules: disableRules,
@@ -136,7 +137,6 @@ func main() {
FluxInterval: fluxInterval,
Cluster: cluster,
GatewayUrl: gatewayUrl,
UseLogsNewSchema: useLogsNewSchema,
}
// Read the jwt secret key

View File

@@ -11,9 +11,6 @@ const Enterprise = "ENTERPRISE_PLAN"
const DisableUpsell = "DISABLE_UPSELL"
const Onboarding = "ONBOARDING"
const ChatSupport = "CHAT_SUPPORT"
const Gateway = "GATEWAY"
const PremiumSupport = "PREMIUM_SUPPORT"
const QueryBuilderSearchV2 = "QUERY_BUILDER_SEARCH_V2"
var BasicPlan = basemodel.FeatureSet{
basemodel.Feature{
@@ -114,27 +111,6 @@ var BasicPlan = basemodel.FeatureSet{
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: Gateway,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: PremiumSupport,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: QueryBuilderSearchV2,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
}
var ProPlan = basemodel.FeatureSet{
@@ -229,27 +205,6 @@ var ProPlan = basemodel.FeatureSet{
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: Gateway,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: PremiumSupport,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: QueryBuilderSearchV2,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
}
var EnterprisePlan = basemodel.FeatureSet{
@@ -358,25 +313,4 @@ var EnterprisePlan = basemodel.FeatureSet{
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: Gateway,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: PremiumSupport,
Active: true,
Usage: 0,
UsageLimit: -1,
Route: "",
},
basemodel.Feature{
Name: QueryBuilderSearchV2,
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
},
}

View File

@@ -1,70 +0,0 @@
package rules
import (
"fmt"
"time"
baserules "go.signoz.io/signoz/pkg/query-service/rules"
)
func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error) {
rules := make([]baserules.Rule, 0)
var task baserules.Task
ruleId := baserules.RuleIdFromTaskName(opts.TaskName)
if opts.Rule.RuleType == baserules.RuleTypeThreshold {
// create a threshold rule
tr, err := baserules.NewThresholdRule(
ruleId,
opts.Rule,
opts.FF,
opts.Reader,
opts.UseLogsNewSchema,
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
)
if err != nil {
return task, err
}
rules = append(rules, tr)
// create ch rule task for evalution
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.RuleDB)
} else if opts.Rule.RuleType == baserules.RuleTypeProm {
// create promql rule
pr, err := baserules.NewPromRule(
ruleId,
opts.Rule,
opts.Logger,
opts.Reader,
opts.ManagerOpts.PqlEngine,
)
if err != nil {
return task, err
}
rules = append(rules, pr)
// create promql rule task for evalution
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.RuleDB)
} else {
return nil, fmt.Errorf("unsupported rule type. Supported types: %s, %s", baserules.RuleTypeProm, baserules.RuleTypeThreshold)
}
return task, nil
}
// newTask returns an appropriate group for
// rule type
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, ruleDB baserules.RuleDB) baserules.Task {
if taskType == baserules.TaskTypeCh {
return baserules.NewRuleTask(name, "", frequency, rules, opts, notify, ruleDB)
}
return baserules.NewPromRuleTask(name, "", frequency, rules, opts, notify, ruleDB)
}

View File

@@ -9,7 +9,6 @@ const config: Config.InitialOptions = {
modulePathIgnorePatterns: ['dist'],
moduleNameMapper: {
'\\.(css|less|scss)$': '<rootDir>/__mocks__/cssMock.ts',
'\\.md$': '<rootDir>/__mocks__/cssMock.ts',
},
globals: {
extensionsToTreatAsEsm: ['.ts'],

View File

@@ -34,6 +34,8 @@
"@dnd-kit/core": "6.1.0",
"@dnd-kit/modifiers": "7.0.0",
"@dnd-kit/sortable": "8.0.0",
"@dnd-kit/utilities": "3.2.2",
"@faker-js/faker": "8.4.1",
"@grafana/data": "^9.5.2",
"@mdx-js/loader": "2.3.0",
"@mdx-js/react": "2.3.0",
@@ -43,6 +45,7 @@
"@sentry/react": "7.102.1",
"@sentry/webpack-plugin": "2.16.0",
"@signozhq/design-tokens": "0.0.8",
"@tanstack/react-table": "8.17.3",
"@uiw/react-md-editor": "3.23.5",
"@visx/group": "3.3.0",
"@visx/shape": "3.5.0",
@@ -51,7 +54,7 @@
"ansi-to-html": "0.7.2",
"antd": "5.11.0",
"antd-table-saveas-excel": "2.2.1",
"axios": "1.7.4",
"axios": "1.6.4",
"babel-eslint": "^10.1.0",
"babel-jest": "^29.6.4",
"babel-loader": "9.1.3",
@@ -88,7 +91,6 @@
"lucide-react": "0.379.0",
"mini-css-extract-plugin": "2.4.5",
"papaparse": "5.4.1",
"posthog-js": "1.160.3",
"rc-tween-one": "3.0.6",
"react": "18.2.0",
"react-addons-update": "15.6.3",
@@ -110,8 +112,6 @@
"react-syntax-highlighter": "15.5.0",
"react-use": "^17.3.2",
"react-virtuoso": "4.0.3",
"overlayscrollbars-react": "^0.5.6",
"overlayscrollbars": "^2.8.1",
"redux": "^4.0.5",
"redux-thunk": "^2.3.0",
"rehype-raw": "7.0.0",

View File

@@ -1 +0,0 @@
<svg width="14" height="14" fill="none" xmlns="http://www.w3.org/2000/svg"><g clip-path="url(#prefix__clip0_4344_1236)" stroke="#C0C1C3" stroke-width="1.167" stroke-linecap="round" stroke-linejoin="round"><path d="M4.667 1.167H2.333c-.644 0-1.166.522-1.166 1.166v2.334c0 .644.522 1.166 1.166 1.166h2.334c.644 0 1.166-.522 1.166-1.166V2.333c0-.644-.522-1.166-1.166-1.166zM8.167 1.167a1.17 1.17 0 011.166 1.166v2.334a1.17 1.17 0 01-1.166 1.166M11.667 1.167a1.17 1.17 0 011.166 1.166v2.334a1.17 1.17 0 01-1.166 1.166M5.833 10.5H2.917c-.992 0-1.75-.758-1.75-1.75v-.583"/><path d="M4.083 12.25l1.75-1.75-1.75-1.75M11.667 8.167H9.333c-.644 0-1.166.522-1.166 1.166v2.334c0 .644.522 1.166 1.166 1.166h2.334c.644 0 1.166-.522 1.166-1.166V9.333c0-.644-.522-1.166-1.166-1.166z"/></g><defs><clipPath id="prefix__clip0_4344_1236"><path fill="#fff" d="M0 0h14v14H0z"/></clipPath></defs></svg>

Before

Width:  |  Height:  |  Size: 878 B

View File

@@ -1 +0,0 @@
<svg width="14" height="14" fill="none" xmlns="http://www.w3.org/2000/svg"><g clip-path="url(#prefix__clip0_4062_7291)" stroke-width="1.167" stroke-linecap="round" stroke-linejoin="round"><path d="M7 12.833A5.833 5.833 0 107 1.167a5.833 5.833 0 000 11.666z" fill="#E5484D" stroke="#E5484D"/><path d="M8.75 5.25l-3.5 3.5M5.25 5.25l3.5 3.5" stroke="#121317"/></g><defs><clipPath id="prefix__clip0_4062_7291"><path fill="#fff" d="M0 0h14v14H0z"/></clipPath></defs></svg>

Before

Width:  |  Height:  |  Size: 467 B

View File

@@ -1,30 +0,0 @@
{
"breadcrumb": "Messaging Queues",
"header": "Kafka / Overview",
"overview": {
"title": "Start sending data in as little as 20 minutes",
"subtitle": "Connect and Monitor Your Data Streams"
},
"configureConsumer": {
"title": "Configure Consumer",
"description": "Add consumer data sources to gain insights and enhance monitoring.",
"button": "Get Started"
},
"configureProducer": {
"title": "Configure Producer",
"description": "Add producer data sources to gain insights and enhance monitoring.",
"button": "Get Started"
},
"monitorKafka": {
"title": "Monitor kafka",
"description": "Add your Kafka source to gain insights and enhance activity tracking.",
"button": "Get Started"
},
"summarySection": {
"viewDetailsButton": "View Details"
},
"confirmModal": {
"content": "Before navigating to the details page, please make sure you have configured all the required setup to ensure correct data monitoring.",
"okText": "Proceed"
}
}

View File

@@ -1,8 +0,0 @@
{
"invite_user": "Invite your teammates",
"invite": "Invite",
"skip": "Skip",
"invite_user_helper_text": "Not the right person to get started? No worries! Invite someone who can.",
"select_use_case": "Select a use-case to get started",
"get_started": "Get Started"
}

View File

@@ -38,7 +38,5 @@
"LIST_LICENSES": "SigNoz | List of Licenses",
"WORKSPACE_LOCKED": "SigNoz | Workspace Locked",
"SUPPORT": "SigNoz | Support",
"DEFAULT": "Open source Observability Platform | SigNoz",
"ALERT_HISTORY": "SigNoz | Alert Rule History",
"ALERT_OVERVIEW": "SigNoz | Alert Rule Overview"
"DEFAULT": "Open source Observability Platform | SigNoz"
}

View File

@@ -1,22 +0,0 @@
{
"trialPlanExpired": "Trial Plan Expired",
"gotQuestions": "Got Questions?",
"contactUs": "Contact Us",
"upgradeToContinue": "Upgrade to Continue",
"upgradeNow": "Upgrade now to keep enjoying all the great features youve been using.",
"yourDataIsSafe": "Your data is safe with us until",
"actNow": "Act now to avoid any disruptions and continue where you left off.",
"contactAdmin": "Contact your admin to proceed with the upgrade.",
"continueMyJourney": "Continue My Journey",
"needMoreTime": "Need More Time?",
"extendTrial": "Extend Trial",
"extendTrialMsgPart1": "If you have a specific reason why you were not able to finish your PoC in the trial period, please write to us on",
"extendTrialMsgPart2": "with the reason. Sometimes we can extend trial by a few days on a case by case basis",
"whyChooseSignoz": "Why choose Signoz",
"enterpriseGradeObservability": "Enterprise-grade Observability",
"observabilityDescription": "Get access to observability at any scale with advanced security and compliance.",
"continueToUpgrade": "Continue to Upgrade",
"youAreInGoodCompany": "You are in good company",
"faqs": "FAQs",
"somethingWentWrong": "Something went wrong"
}

View File

@@ -6,6 +6,5 @@
"share": "Share",
"save": "Save",
"edit": "Edit",
"logged_in": "Logged In",
"pending_data_placeholder": "Just a bit of patience, just a little bits enough ⎯ were getting your {{dataSource}}!"
"logged_in": "Logged In"
}

View File

@@ -1,7 +1,6 @@
{
"create_dashboard": "Create Dashboard",
"import_json": "Import Dashboard JSON",
"view_template": "View templates",
"import_grafana_json": "Import Grafana JSON",
"copy_to_clipboard": "Copy To ClipBoard",
"download_json": "Download JSON",

View File

@@ -1,30 +0,0 @@
{
"breadcrumb": "Messaging Queues",
"header": "Kafka / Overview",
"overview": {
"title": "Start sending data in as little as 20 minutes",
"subtitle": "Connect and Monitor Your Data Streams"
},
"configureConsumer": {
"title": "Configure Consumer",
"description": "Add consumer data sources to gain insights and enhance monitoring.",
"button": "Get Started"
},
"configureProducer": {
"title": "Configure Producer",
"description": "Add producer data sources to gain insights and enhance monitoring.",
"button": "Get Started"
},
"monitorKafka": {
"title": "Monitor kafka",
"description": "Add your Kafka source to gain insights and enhance activity tracking.",
"button": "Get Started"
},
"summarySection": {
"viewDetailsButton": "View Details"
},
"confirmModal": {
"content": "Before navigating to the details page, please make sure you have configured all the required setup to ensure correct data monitoring.",
"okText": "Proceed"
}
}

View File

@@ -1,8 +0,0 @@
{
"invite_user": "Invite your teammates",
"invite": "Invite",
"skip": "Skip",
"invite_user_helper_text": "Not the right person to get started? No worries! Invite someone who can.",
"select_use_case": "Select a use-case to get started",
"get_started": "Get Started"
}

View File

@@ -49,8 +49,5 @@
"TRACES_SAVE_VIEWS": "SigNoz | Traces Saved Views",
"DEFAULT": "Open source Observability Platform | SigNoz",
"SHORTCUTS": "SigNoz | Shortcuts",
"INTEGRATIONS": "SigNoz | Integrations",
"ALERT_HISTORY": "SigNoz | Alert Rule History",
"ALERT_OVERVIEW": "SigNoz | Alert Rule Overview",
"MESSAGING_QUEUES": "SigNoz | Messaging Queues"
"INTEGRATIONS": "SigNoz | Integrations"
}

View File

@@ -1,22 +0,0 @@
{
"trialPlanExpired": "Trial Plan Expired",
"gotQuestions": "Got Questions?",
"contactUs": "Contact Us",
"upgradeToContinue": "Upgrade to Continue",
"upgradeNow": "Upgrade now to keep enjoying all the great features youve been using.",
"yourDataIsSafe": "Your data is safe with us until",
"actNow": "Act now to avoid any disruptions and continue where you left off.",
"contactAdmin": "Contact your admin to proceed with the upgrade.",
"continueMyJourney": "Continue My Journey",
"needMoreTime": "Need More Time?",
"extendTrial": "Extend Trial",
"extendTrialMsgPart1": "If you have a specific reason why you were not able to finish your PoC in the trial period, please write to us on",
"extendTrialMsgPart2": "with the reason. Sometimes we can extend trial by a few days on a case by case basis",
"whyChooseSignoz": "Why choose Signoz",
"enterpriseGradeObservability": "Enterprise-grade Observability",
"observabilityDescription": "Get access to observability at any scale with advanced security and compliance.",
"continueToUpgrade": "Continue to Upgrade",
"youAreInGoodCompany": "You are in good company",
"faqs": "FAQs",
"somethingWentWrong": "Something went wrong"
}

View File

@@ -76,8 +76,9 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
isUserFetching: false,
},
});
if (!isLoggedIn) {
history.push(ROUTES.LOGIN, { from: pathname });
history.push(ROUTES.LOGIN);
}
};

View File

@@ -1,7 +1,6 @@
import { ConfigProvider } from 'antd';
import getLocalStorageApi from 'api/browser/localstorage/get';
import setLocalStorageApi from 'api/browser/localstorage/set';
import logEvent from 'api/common/logEvent';
import NotFound from 'components/NotFound';
import Spinner from 'components/Spinner';
import { FeatureKeys } from 'constants/features';
@@ -18,8 +17,6 @@ import { NotificationProvider } from 'hooks/useNotifications';
import { ResourceProvider } from 'hooks/useResourceAttribute';
import history from 'lib/history';
import { identity, pick, pickBy } from 'lodash-es';
import posthog from 'posthog-js';
import AlertRuleProvider from 'providers/Alert';
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
import { QueryBuilderProvider } from 'providers/QueryBuilder';
import { Suspense, useEffect, useState } from 'react';
@@ -41,7 +38,7 @@ import defaultRoutes, {
function App(): JSX.Element {
const themeConfig = useThemeConfig();
const { data: licenseData } = useLicense();
const { data } = useLicense();
const [routes, setRoutes] = useState<AppRoutes[]>(defaultRoutes);
const { role, isLoggedIn: isLoggedInState, user, org } = useSelector<
AppState,
@@ -50,7 +47,7 @@ function App(): JSX.Element {
const dispatch = useDispatch<Dispatch<AppActions>>();
const { trackPageView } = useAnalytics();
const { trackPageView, trackEvent } = useAnalytics();
const { hostname, pathname } = window.location;
@@ -67,14 +64,6 @@ function App(): JSX.Element {
allFlags.find((flag) => flag.name === FeatureKeys.CHAT_SUPPORT)?.active ||
false;
const isPremiumSupportEnabled =
allFlags.find((flag) => flag.name === FeatureKeys.PREMIUM_SUPPORT)?.active ||
false;
const showAddCreditCardModal =
!isPremiumSupportEnabled &&
!licenseData?.payload?.trialConvertedToSubscription;
dispatch({
type: UPDATE_FEATURE_FLAG_RESPONSE,
payload: {
@@ -91,7 +80,7 @@ function App(): JSX.Element {
setRoutes(newRoutes);
}
if (isLoggedInState && isChatSupportEnabled && !showAddCreditCardModal) {
if (isLoggedInState && isChatSupportEnabled) {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
window.Intercom('boot', {
@@ -103,10 +92,10 @@ function App(): JSX.Element {
});
const isOnBasicPlan =
licenseData?.payload?.licenses?.some(
data?.payload?.licenses?.some(
(license) =>
license.isCurrent && license.planKey === LICENSE_PLAN_KEY.BASIC_PLAN,
) || licenseData?.payload?.licenses === null;
) || data?.payload?.licenses === null;
const enableAnalytics = (user: User): void => {
const orgName =
@@ -123,7 +112,9 @@ function App(): JSX.Element {
};
const sanitizedIdentifyPayload = pickBy(identifyPayload, identity);
const domain = extractDomain(email);
const hostNameParts = hostname.split('.');
const groupTraits = {
@@ -136,29 +127,10 @@ function App(): JSX.Element {
};
window.analytics.identify(email, sanitizedIdentifyPayload);
window.analytics.group(domain, groupTraits);
posthog?.identify(email, {
email,
name,
orgName,
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
tenant_url: hostname,
company_domain: domain,
source: 'signoz-ui',
isPaidUser: !!licenseData?.payload?.trialConvertedToSubscription,
});
posthog?.group('company', domain, {
name: orgName,
tenant_id: hostNameParts[0],
data_region: hostNameParts[1],
tenant_url: hostname,
company_domain: domain,
source: 'signoz-ui',
isPaidUser: !!licenseData?.payload?.trialConvertedToSubscription,
});
window.clarity('identify', email, name);
};
useEffect(() => {
@@ -172,6 +144,10 @@ function App(): JSX.Element {
!isIdentifiedUser
) {
setLocalStorageApi(LOCALSTORAGE.IS_IDENTIFIED_USER, 'true');
if (isCloudUserVal) {
enableAnalytics(user);
}
}
if (
@@ -208,7 +184,7 @@ function App(): JSX.Element {
LOCALSTORAGE.THEME_ANALYTICS_V1,
);
if (!isThemeAnalyticsSent) {
logEvent('Theme Analytics', {
trackEvent('Theme Analytics', {
theme: isDarkMode ? THEME_MODE.DARK : THEME_MODE.LIGHT,
user: pick(user, ['email', 'userId', 'name']),
org,
@@ -219,11 +195,6 @@ function App(): JSX.Element {
console.error('Failed to parse local storage theme analytics event');
}
}
if (isCloudUserVal && user && user.email) {
enableAnalytics(user);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [user]);
@@ -236,24 +207,22 @@ function App(): JSX.Element {
<QueryBuilderProvider>
<DashboardProvider>
<KeyboardHotkeysProvider>
<AlertRuleProvider>
<AppLayout>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Switch>
{routes.map(({ path, component, exact }) => (
<Route
key={`${path}`}
exact={exact}
path={path}
component={component}
/>
))}
<AppLayout>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Switch>
{routes.map(({ path, component, exact }) => (
<Route
key={`${path}`}
exact={exact}
path={path}
component={component}
/>
))}
<Route path="*" component={NotFound} />
</Switch>
</Suspense>
</AppLayout>
</AlertRuleProvider>
<Route path="*" component={NotFound} />
</Switch>
</Suspense>
</AppLayout>
</KeyboardHotkeysProvider>
</DashboardProvider>
</QueryBuilderProvider>

View File

@@ -92,14 +92,6 @@ export const CreateNewAlerts = Loadable(
() => import(/* webpackChunkName: "Create Alerts" */ 'pages/CreateAlert'),
);
export const AlertHistory = Loadable(
() => import(/* webpackChunkName: "Alert History" */ 'pages/AlertList'),
);
export const AlertOverview = Loadable(
() => import(/* webpackChunkName: "Alert Overview" */ 'pages/AlertList'),
);
export const CreateAlertChannelAlerts = Loadable(
() =>
import(/* webpackChunkName: "Create Channels" */ 'pages/AlertChannelCreate'),
@@ -212,15 +204,3 @@ export const InstalledIntegrations = Loadable(
/* webpackChunkName: "InstalledIntegrations" */ 'pages/IntegrationsModulePage'
),
);
export const MessagingQueues = Loadable(
() =>
import(/* webpackChunkName: "MessagingQueues" */ 'pages/MessagingQueues'),
);
export const MQDetailPage = Loadable(
() =>
import(
/* webpackChunkName: "MQDetailPage" */ 'pages/MessagingQueues/MQDetailPage'
),
);

View File

@@ -2,8 +2,6 @@ import ROUTES from 'constants/routes';
import { RouteProps } from 'react-router-dom';
import {
AlertHistory,
AlertOverview,
AllAlertChannels,
AllErrors,
APIKeys,
@@ -25,8 +23,6 @@ import {
LogsExplorer,
LogsIndexToFields,
LogsSaveViews,
MessagingQueues,
MQDetailPage,
MySettings,
NewDashboardPage,
OldLogsExplorer,
@@ -173,20 +169,6 @@ const routes: AppRoutes[] = [
isPrivate: true,
key: 'ALERTS_NEW',
},
{
path: ROUTES.ALERT_HISTORY,
exact: true,
component: AlertHistory,
isPrivate: true,
key: 'ALERT_HISTORY',
},
{
path: ROUTES.ALERT_OVERVIEW,
exact: true,
component: AlertOverview,
isPrivate: true,
key: 'ALERT_OVERVIEW',
},
{
path: ROUTES.TRACE,
exact: true,
@@ -369,20 +351,6 @@ const routes: AppRoutes[] = [
isPrivate: true,
key: 'INTEGRATIONS',
},
{
path: ROUTES.MESSAGING_QUEUES,
exact: true,
component: MessagingQueues,
key: 'MESSAGING_QUEUES',
isPrivate: true,
},
{
path: ROUTES.MESSAGING_QUEUES_DETAIL,
exact: true,
component: MQDetailPage,
key: 'MESSAGING_QUEUES_DETAIL',
isPrivate: true,
},
];
export const SUPPORT_ROUTE: AppRoutes = {

View File

@@ -9,9 +9,9 @@ export function ErrorResponseHandler(error: AxiosError): ErrorResponse {
// making the error status code as standard Error Status Code
const statusCode = response.status as ErrorStatusCode;
const { data } = response as AxiosResponse;
if (statusCode >= 400 && statusCode < 500) {
const { data } = response as AxiosResponse;
if (statusCode === 404) {
return {
statusCode,
@@ -34,11 +34,12 @@ export function ErrorResponseHandler(error: AxiosError): ErrorResponse {
body: JSON.stringify((response.data as any).data),
};
}
return {
statusCode,
payload: null,
error: 'Something went wrong',
message: data?.error,
message: null,
};
}
if (request) {

View File

@@ -1,20 +1,26 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/alerts/create';
const create = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
const response = await axios.post('/rules', {
...props.data,
});
try {
const response = await axios.post('/rules', {
...props.data,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default create;

View File

@@ -1,18 +1,24 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/alerts/delete';
const deleteAlerts = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
const response = await axios.delete(`/rules/${props.id}`);
try {
const response = await axios.delete(`/rules/${props.id}`);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data.rules,
};
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data.rules,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default deleteAlerts;

View File

@@ -1,16 +1,24 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/alerts/get';
const get = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
const response = await axios.get(`/rules/${props.id}`);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
};
try {
const response = await axios.get(`/rules/${props.id}`);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default get;

View File

@@ -1,20 +1,26 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/alerts/patch';
const patch = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
const response = await axios.patch(`/rules/${props.id}`, {
...props.data,
});
try {
const response = await axios.patch(`/rules/${props.id}`, {
...props.data,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default patch;

View File

@@ -1,20 +1,26 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/alerts/save';
const put = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
const response = await axios.put(`/rules/${props.id}`, {
...props.data,
});
try {
const response = await axios.put(`/rules/${props.id}`, {
...props.data,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default put;

View File

@@ -1,28 +0,0 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { AlertRuleStatsPayload } from 'types/api/alerts/def';
import { RuleStatsProps } from 'types/api/alerts/ruleStats';
const ruleStats = async (
props: RuleStatsProps,
): Promise<SuccessResponse<AlertRuleStatsPayload> | ErrorResponse> => {
try {
const response = await axios.post(`/rules/${props.id}/history/stats`, {
start: props.start,
end: props.end,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default ruleStats;

View File

@@ -1,33 +0,0 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { AlertRuleTimelineGraphResponsePayload } from 'types/api/alerts/def';
import { GetTimelineGraphRequestProps } from 'types/api/alerts/timelineGraph';
const timelineGraph = async (
props: GetTimelineGraphRequestProps,
): Promise<
SuccessResponse<AlertRuleTimelineGraphResponsePayload> | ErrorResponse
> => {
try {
const response = await axios.post(
`/rules/${props.id}/history/overall_status`,
{
start: props.start,
end: props.end,
},
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default timelineGraph;

View File

@@ -1,36 +0,0 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { AlertRuleTimelineTableResponsePayload } from 'types/api/alerts/def';
import { GetTimelineTableRequestProps } from 'types/api/alerts/timelineTable';
const timelineTable = async (
props: GetTimelineTableRequestProps,
): Promise<
SuccessResponse<AlertRuleTimelineTableResponsePayload> | ErrorResponse
> => {
try {
const response = await axios.post(`/rules/${props.id}/history/timeline`, {
start: props.start,
end: props.end,
offset: props.offset,
limit: props.limit,
order: props.order,
state: props.state,
// TODO(shaheer): implement filters
filters: props.filters,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default timelineTable;

View File

@@ -1,33 +0,0 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { AlertRuleTopContributorsPayload } from 'types/api/alerts/def';
import { TopContributorsProps } from 'types/api/alerts/topContributors';
const topContributors = async (
props: TopContributorsProps,
): Promise<
SuccessResponse<AlertRuleTopContributorsPayload> | ErrorResponse
> => {
try {
const response = await axios.post(
`/rules/${props.id}/history/top_contributors`,
{
start: props.start,
end: props.end,
},
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default topContributors;

View File

@@ -3,7 +3,7 @@ const apiV1 = '/api/v1/';
export const apiV2 = '/api/v2/';
export const apiV3 = '/api/v3/';
export const apiV4 = '/api/v4/';
export const gatewayApiV1 = '/api/gateway/v1/';
export const apiAlertManager = '/api/alertmanager/';
export const gatewayApiV1 = '/api/gateway/v1';
export const apiAlertManager = '/api/alertmanager';
export default apiV1;

View File

@@ -1,62 +0,0 @@
import getLocalStorageApi from 'api/browser/localstorage/get';
import { ENVIRONMENT } from 'constants/env';
import { LOCALSTORAGE } from 'constants/localStorage';
import { isEmpty } from 'lodash-es';
export interface WsDataEvent {
read_rows: number;
read_bytes: number;
elapsed_ms: number;
}
interface GetQueryStatsProps {
queryId: string;
setData: React.Dispatch<React.SetStateAction<WsDataEvent | undefined>>;
}
function getURL(baseURL: string, queryId: string): URL | string {
if (baseURL && !isEmpty(baseURL)) {
return `${baseURL}/ws/query_progress?q=${queryId}`;
}
const url = new URL(`/ws/query_progress?q=${queryId}`, window.location.href);
if (window.location.protocol === 'http:') {
url.protocol = 'ws';
} else {
url.protocol = 'wss';
}
return url;
}
export function getQueryStats(props: GetQueryStatsProps): void {
const { queryId, setData } = props;
const token = getLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN) || '';
// https://github.com/whatwg/websockets/issues/20 reason for not using the relative URLs
const url = getURL(ENVIRONMENT.wsURL, queryId);
const socket = new WebSocket(url, token);
socket.addEventListener('message', (event) => {
try {
const parsedData = JSON.parse(event?.data);
setData(parsedData);
} catch {
setData(event?.data);
}
});
socket.addEventListener('error', (event) => {
console.error(event);
});
socket.addEventListener('close', (event) => {
// 1000 is a normal closure status code
if (event.code !== 1000) {
console.error('WebSocket closed with error:', event);
} else {
console.error('WebSocket closed normally.');
}
});
}

View File

@@ -1,4 +1,4 @@
import { ApiBaseInstance as axios } from 'api';
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
@@ -21,7 +21,6 @@ const logEvent = async (
payload: response.data.data,
};
} catch (error) {
console.error(error);
return ErrorResponseHandler(error as AxiosError);
}
};

View File

@@ -1,8 +1,6 @@
import { ApiV2Instance as axios } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import getStartEndRangeTime from 'lib/getStartEndRangeTime';
import store from 'store';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
Props,
@@ -13,26 +11,7 @@ const dashboardVariablesQuery = async (
props: Props,
): Promise<SuccessResponse<VariableResponseProps> | ErrorResponse> => {
try {
const { globalTime } = store.getState();
const { start, end } = getStartEndRangeTime({
type: 'GLOBAL_TIME',
interval: globalTime.selectedTime,
});
const timeVariables: Record<string, number> = {
start_timestamp_ms: parseInt(start, 10) * 1e3,
end_timestamp_ms: parseInt(end, 10) * 1e3,
start_timestamp_nano: parseInt(start, 10) * 1e9,
end_timestamp_nano: parseInt(end, 10) * 1e9,
start_timestamp: parseInt(start, 10),
end_timestamp: parseInt(end, 10),
};
const payload = { ...props };
payload.variables = { ...payload.variables, ...timeVariables };
const response = await axios.post(`/variables/query`, payload);
const response = await axios.post(`/variables/query`, props);
return {
statusCode: 200,

View File

@@ -96,10 +96,6 @@ const interceptorRejected = async (
}
};
const interceptorRejectedBase = async (
value: AxiosResponse<any>,
): Promise<AxiosResponse<any>> => Promise.reject(value);
const instance = axios.create({
baseURL: `${ENVIRONMENT.baseURL}${apiV1}`,
});
@@ -144,18 +140,6 @@ ApiV4Instance.interceptors.response.use(
ApiV4Instance.interceptors.request.use(interceptorsRequestResponse);
//
// axios Base
export const ApiBaseInstance = axios.create({
baseURL: `${ENVIRONMENT.baseURL}${apiV1}`,
});
ApiBaseInstance.interceptors.response.use(
interceptorsResponse,
interceptorRejectedBase,
);
ApiBaseInstance.interceptors.request.use(interceptorsRequestResponse);
//
// gateway Api V1
export const GatewayApiV1Instance = axios.create({
baseURL: `${ENVIRONMENT.baseURL}${gatewayApiV1}`,

View File

@@ -12,13 +12,10 @@ export const getMetricsQueryRange = async (
props: QueryRangePayload,
version: string,
signal: AbortSignal,
headers?: Record<string, string>,
): Promise<SuccessResponse<MetricRangePayloadV3> | ErrorResponse> => {
try {
if (version && version === ENTITY_VERSION_V4) {
const response = await ApiV4Instance.post('/query_range', props, {
signal,
});
const response = await ApiV4Instance.post('/query_range', props, { signal });
return {
statusCode: 200,
@@ -29,10 +26,7 @@ export const getMetricsQueryRange = async (
};
}
const response = await ApiV3Instance.post('/query_range', props, {
signal,
headers,
});
const response = await ApiV3Instance.post('/query_range', props, { signal });
return {
statusCode: 200,

View File

@@ -1,20 +1,7 @@
import axios from 'api';
import { isNil } from 'lodash-es';
interface GetTopLevelOperationsProps {
service?: string;
start?: number;
end?: number;
}
const getTopLevelOperations = async (
props: GetTopLevelOperationsProps,
): Promise<ServiceDataProps> => {
const response = await axios.post(`/service/top_level_operations`, {
start: !isNil(props.start) ? `${props.start}` : undefined,
end: !isNil(props.end) ? `${props.end}` : undefined,
service: props.service,
});
const getTopLevelOperations = async (): Promise<ServiceDataProps> => {
const response = await axios.post(`/service/top_level_operations`);
return response.data;
};

View File

@@ -1,7 +1,6 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { Dayjs } from 'dayjs';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { Recurrence } from './getAllDowntimeSchedules';
@@ -12,8 +11,8 @@ export interface DowntimeSchedulePayload {
alertIds: string[];
schedule: {
timezone?: string;
startTime?: string | Dayjs;
endTime?: string | Dayjs;
startTime?: string;
endTime?: string;
recurrence?: Recurrence;
};
}

View File

@@ -1,6 +1,6 @@
import axios from 'api';
import { AxiosError, AxiosResponse } from 'axios';
import { Option } from 'container/PlannedDowntime/PlannedDowntimeutils';
import { Option } from 'container/PlannedDowntime/DropdownWithSubMenu/DropdownWithSubMenu';
import { useQuery, UseQueryResult } from 'react-query';
export type Recurrence = {
@@ -28,7 +28,6 @@ export interface DowntimeSchedules {
createdBy: string | null;
updatedAt: string | null;
updatedBy: string | null;
kind: string | null;
}
export type PayloadProps = { data: DowntimeSchedules[] };

View File

@@ -1,63 +0,0 @@
import { ApiV3Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError, AxiosResponse } from 'axios';
import { baseAutoCompleteIdKeysOrder } from 'constants/queryBuilder';
import { encode } from 'js-base64';
import { createIdFromObjectFields } from 'lib/createIdFromObjectFields';
import createQueryParams from 'lib/createQueryParams';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
IGetAttributeSuggestionsPayload,
IGetAttributeSuggestionsSuccessResponse,
} from 'types/api/queryBuilder/getAttributeSuggestions';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
export const getAttributeSuggestions = async ({
searchText,
dataSource,
filters,
}: IGetAttributeSuggestionsPayload): Promise<
SuccessResponse<IGetAttributeSuggestionsSuccessResponse> | ErrorResponse
> => {
try {
let base64EncodedFiltersString;
try {
// the replace function is to remove the padding at the end of base64 encoded string which is auto added to make it a multiple of 4
// why ? because the current working of qs doesn't work well with padding
base64EncodedFiltersString = encode(JSON.stringify(filters)).replace(
/=+$/,
'',
);
} catch {
// default base64 encoded string for empty filters object
base64EncodedFiltersString = 'eyJpdGVtcyI6W10sIm9wIjoiQU5EIn0';
}
const response: AxiosResponse<{
data: IGetAttributeSuggestionsSuccessResponse;
}> = await ApiV3Instance.get(
`/filter_suggestions?${createQueryParams({
searchText,
dataSource,
existingFilter: base64EncodedFiltersString,
})}`,
);
const payload: BaseAutocompleteData[] =
response.data.data.attributes?.map(({ id: _, ...item }) => ({
...item,
id: createIdFromObjectFields(item, baseAutoCompleteIdKeysOrder),
})) || [];
return {
statusCode: 200,
error: null,
message: response.statusText,
payload: {
attributes: payload,
example_queries: response.data.data.example_queries,
},
};
} catch (e) {
return ErrorResponseHandler(e as AxiosError);
}
};

View File

@@ -1,41 +0,0 @@
interface ConfigureIconProps {
width?: number;
height?: number;
fill?: string;
}
function ConfigureIcon({
width,
height,
fill,
}: ConfigureIconProps): JSX.Element {
return (
<svg
xmlns="http://www.w3.org/2000/svg"
width={width}
height={height}
fill={fill}
>
<path
stroke="#C0C1C3"
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth="1.333"
d="M9.71 4.745a.576.576 0 000 .806l.922.922a.576.576 0 00.806 0l2.171-2.171a3.455 3.455 0 01-4.572 4.572l-3.98 3.98a1.222 1.222 0 11-1.727-1.728l3.98-3.98a3.455 3.455 0 014.572-4.572L9.717 4.739l-.006.006z"
/>
<path
stroke="#C0C1C3"
strokeLinecap="round"
strokeWidth="1.333"
d="M4 7L2.527 5.566a1.333 1.333 0 01-.013-1.898l.81-.81a1.333 1.333 0 011.991.119L5.333 3m5.417 7.988l1.179 1.178m0 0l-.138.138a.833.833 0 00.387 1.397v0a.833.833 0 00.792-.219l.446-.446a.833.833 0 00.176-.917v0a.833.833 0 00-1.355-.261l-.308.308z"
/>
</svg>
);
}
ConfigureIcon.defaultProps = {
width: 16,
height: 16,
fill: 'none',
};
export default ConfigureIcon;

View File

@@ -1,65 +0,0 @@
interface LogsIconProps {
width?: number;
height?: number;
fill?: string;
strokeColor?: string;
strokeWidth?: number;
}
function LogsIcon({
width,
height,
fill,
strokeColor,
strokeWidth,
}: LogsIconProps): JSX.Element {
return (
<svg
xmlns="http://www.w3.org/2000/svg"
width={width}
height={height}
fill={fill}
>
<path
stroke={strokeColor}
strokeWidth={strokeWidth}
d="M2.917 3.208v7.875"
/>
<ellipse
cx="6.417"
cy="3.208"
stroke={strokeColor}
strokeWidth={strokeWidth}
rx="3.5"
ry="1.458"
/>
<ellipse cx="6.417" cy="3.165" fill={strokeColor} rx="0.875" ry="0.365" />
<path
stroke={strokeColor}
strokeWidth={strokeWidth}
d="M9.917 11.083c0 .645-1.567 1.167-3.5 1.167s-3.5-.522-3.5-1.167"
/>
<path
stroke={strokeColor}
strokeLinecap="round"
strokeWidth={strokeWidth}
d="M5.25 6.417v1.117c0 .028.02.053.049.057l1.652.276A.058.058 0 017 7.924v1.993"
/>
<path
stroke={strokeColor}
strokeWidth={strokeWidth}
d="M9.917 3.208v3.103c0 .046.05.074.089.05L12.182 5a.058.058 0 01.088.035l.264 1.059a.058.058 0 01-.013.053l-2.59 2.877a.058.058 0 00-.014.04v2.018"
/>
</svg>
);
}
LogsIcon.defaultProps = {
width: 14,
height: 14,
fill: 'none',
strokeColor: '#C0C1C3',
strokeWidth: 1.167,
};
export default LogsIcon;

View File

@@ -1,39 +0,0 @@
interface SeverityCriticalIconProps {
width?: number;
height?: number;
fill?: string;
stroke?: string;
}
function SeverityCriticalIcon({
width,
height,
fill,
stroke,
}: SeverityCriticalIconProps): JSX.Element {
return (
<svg
width={width}
height={height}
fill={fill}
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M.99707.666056.99707 2.99939M.99707 5.33337H.991237M3.00293.666056 3.00293 2.99939M3.00293 5.33337H2.9971M5.00879.666056V2.99939M5.00879 5.33337H5.00296"
stroke={stroke}
strokeWidth="1.16667"
strokeLinecap="round"
strokeLinejoin="round"
/>
</svg>
);
}
SeverityCriticalIcon.defaultProps = {
width: 6,
height: 6,
fill: 'none',
stroke: '#F56C87',
};
export default SeverityCriticalIcon;

View File

@@ -1,42 +0,0 @@
interface SeverityErrorIconProps {
width?: number;
height?: number;
fill?: string;
stroke?: string;
strokeWidth?: string;
}
function SeverityErrorIcon({
width,
height,
fill,
stroke,
strokeWidth,
}: SeverityErrorIconProps): JSX.Element {
return (
<svg
width={width}
height={height}
fill={fill}
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M1.00781.957845 1.00781 2.99951M1.00781 5.04175H1.00228"
stroke={stroke}
strokeWidth={strokeWidth}
strokeLinecap="round"
strokeLinejoin="round"
/>
</svg>
);
}
SeverityErrorIcon.defaultProps = {
width: 2,
height: 6,
fill: 'none',
stroke: '#F56C87',
strokeWidth: '1.02083',
};
export default SeverityErrorIcon;

View File

@@ -1,46 +0,0 @@
interface SeverityInfoIconProps {
width?: number;
height?: number;
fill?: string;
stroke?: string;
}
function SeverityInfoIcon({
width,
height,
fill,
stroke,
}: SeverityInfoIconProps): JSX.Element {
return (
<svg
width={width}
height={height}
fill={fill}
xmlns="http://www.w3.org/2000/svg"
>
<rect
width={width}
height={height}
rx="3.5"
fill={stroke}
fillOpacity=".2"
/>
<path
d="M7 9.33346V7.00012M7 4.66675H7.00583"
stroke={stroke}
strokeWidth="1.16667"
strokeLinecap="round"
strokeLinejoin="round"
/>
</svg>
);
}
SeverityInfoIcon.defaultProps = {
width: 14,
height: 14,
fill: 'none',
stroke: '#7190F9',
};
export default SeverityInfoIcon;

View File

@@ -1,42 +0,0 @@
interface SeverityWarningIconProps {
width?: number;
height?: number;
fill?: string;
stroke?: string;
strokeWidth?: string;
}
function SeverityWarningIcon({
width,
height,
fill,
stroke,
strokeWidth,
}: SeverityWarningIconProps): JSX.Element {
return (
<svg
width={width}
height={height}
fill={fill}
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M1.00732.957845 1.00732 2.99951M1.00732 5.04175H1.00179"
stroke={stroke}
strokeWidth={strokeWidth}
strokeLinecap="round"
strokeLinejoin="round"
/>
</svg>
);
}
SeverityWarningIcon.defaultProps = {
width: 2,
height: 6,
fill: 'none',
stroke: '#FFD778',
strokeWidth: '0.978299',
};
export default SeverityWarningIcon;

View File

@@ -1,27 +0,0 @@
import { Color } from '@signozhq/design-tokens';
import { useIsDarkMode } from 'hooks/useDarkMode';
function GroupByIcon(): JSX.Element {
const isDarkMode = useIsDarkMode();
return (
<svg width="14" height="14" fill="none" xmlns="http://www.w3.org/2000/svg">
<g
clipPath="url(#prefix__clip0_4344_1236)"
stroke={isDarkMode ? Color.BG_VANILLA_100 : Color.BG_INK_500}
strokeWidth="1.167"
strokeLinecap="round"
strokeLinejoin="round"
>
<path d="M4.667 1.167H2.333c-.644 0-1.166.522-1.166 1.166v2.334c0 .644.522 1.166 1.166 1.166h2.334c.644 0 1.166-.522 1.166-1.166V2.333c0-.644-.522-1.166-1.166-1.166zM8.167 1.167a1.17 1.17 0 011.166 1.166v2.334a1.17 1.17 0 01-1.166 1.166M11.667 1.167a1.17 1.17 0 011.166 1.166v2.334a1.17 1.17 0 01-1.166 1.166M5.833 10.5H2.917c-.992 0-1.75-.758-1.75-1.75v-.583" />
<path d="M4.083 12.25l1.75-1.75-1.75-1.75M11.667 8.167H9.333c-.644 0-1.166.522-1.166 1.166v2.334c0 .644.522 1.166 1.166 1.166h2.334c.644 0 1.166-.522 1.166-1.166V9.333c0-.644-.522-1.166-1.166-1.166z" />
</g>
<defs>
<clipPath id="prefix__clip0_4344_1236">
<path fill="#fff" d="M0 0h14v14H0z" />
</clipPath>
</defs>
</svg>
);
}
export default GroupByIcon;

View File

@@ -1,14 +0,0 @@
.reset-button {
display: flex;
justify-content: space-between;
align-items: center;
background: var(--bg-ink-300);
border: 1px solid var(--bg-slate-400);
}
.lightMode {
.reset-button {
background: var(--bg-vanilla-100);
border-color: var(--bg-vanilla-300);
}
}

View File

@@ -1,11 +0,0 @@
import './Filters.styles.scss';
import DateTimeSelector from 'container/TopNav/DateTimeSelectionV2';
export function Filters(): JSX.Element {
return (
<div className="filters">
<DateTimeSelector showAutoRefresh={false} hideShareModal showResetButton />
</div>
);
}

View File

@@ -1,137 +0,0 @@
import { Button, Modal, Typography } from 'antd';
import updateCreditCardApi from 'api/billing/checkout';
import logEvent from 'api/common/logEvent';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import useLicense from 'hooks/useLicense';
import { useNotifications } from 'hooks/useNotifications';
import { CreditCard, X } from 'lucide-react';
import { useEffect, useState } from 'react';
import { useMutation } from 'react-query';
import { useLocation } from 'react-router-dom';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { CheckoutSuccessPayloadProps } from 'types/api/billing/checkout';
import { License } from 'types/api/licenses/def';
export default function ChatSupportGateway(): JSX.Element {
const { notifications } = useNotifications();
const [activeLicense, setActiveLicense] = useState<License | null>(null);
const [isAddCreditCardModalOpen, setIsAddCreditCardModalOpen] = useState(
false,
);
const { data: licenseData, isFetching } = useLicense();
useEffect(() => {
const activeValidLicense =
licenseData?.payload?.licenses?.find(
(license) => license.isCurrent === true,
) || null;
setActiveLicense(activeValidLicense);
}, [licenseData, isFetching]);
const handleBillingOnSuccess = (
data: ErrorResponse | SuccessResponse<CheckoutSuccessPayloadProps, unknown>,
): void => {
if (data?.payload?.redirectURL) {
const newTab = document.createElement('a');
newTab.href = data.payload.redirectURL;
newTab.target = '_blank';
newTab.rel = 'noopener noreferrer';
newTab.click();
}
};
const handleBillingOnError = (): void => {
notifications.error({
message: SOMETHING_WENT_WRONG,
});
};
const { mutate: updateCreditCard, isLoading: isLoadingBilling } = useMutation(
updateCreditCardApi,
{
onSuccess: (data) => {
handleBillingOnSuccess(data);
},
onError: handleBillingOnError,
},
);
const { pathname } = useLocation();
const handleAddCreditCard = (): void => {
logEvent('Add Credit card modal: Clicked', {
source: `intercom icon`,
page: pathname,
});
updateCreditCard({
licenseKey: activeLicense?.key || '',
successURL: window.location.href,
cancelURL: window.location.href,
});
};
return (
<>
<div className="chat-support-gateway">
<Button
className="chat-support-gateway-btn"
onClick={(): void => {
logEvent('Disabled Chat Support: Clicked', {
source: `intercom icon`,
page: pathname,
});
setIsAddCreditCardModalOpen(true);
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 28 32"
className="chat-support-gateway-btn-icon"
>
<path d="M28 32s-4.714-1.855-8.527-3.34H3.437C1.54 28.66 0 27.026 0 25.013V3.644C0 1.633 1.54 0 3.437 0h21.125c1.898 0 3.437 1.632 3.437 3.645v18.404H28V32zm-4.139-11.982a.88.88 0 00-1.292-.105c-.03.026-3.015 2.681-8.57 2.681-5.486 0-8.517-2.636-8.571-2.684a.88.88 0 00-1.29.107 1.01 1.01 0 00-.219.708.992.992 0 00.318.664c.142.128 3.537 3.15 9.762 3.15 6.226 0 9.621-3.022 9.763-3.15a.992.992 0 00.317-.664 1.01 1.01 0 00-.218-.707z" />
</svg>
</Button>
</div>
{/* Add Credit Card Modal */}
<Modal
className="add-credit-card-modal"
title={<span className="title">Add Credit Card for Chat Support</span>}
open={isAddCreditCardModalOpen}
closable
onCancel={(): void => setIsAddCreditCardModalOpen(false)}
destroyOnClose
footer={[
<Button
key="cancel"
onClick={(): void => setIsAddCreditCardModalOpen(false)}
className="cancel-btn"
icon={<X size={16} />}
>
Cancel
</Button>,
<Button
key="submit"
type="primary"
icon={<CreditCard size={16} />}
size="middle"
loading={isLoadingBilling}
disabled={isLoadingBilling}
onClick={handleAddCreditCard}
className="add-credit-card-btn"
>
Add Credit Card
</Button>,
]}
>
<Typography.Text className="add-credit-card-text">
You&apos;re currently on <span className="highlight-text">Trial plan</span>
. Add a credit card to access SigNoz chat support to your workspace.
</Typography.Text>
</Modal>
</>
);
}

View File

@@ -5,13 +5,7 @@ import { Button, Dropdown, MenuProps } from 'antd';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useState } from 'react';
function DropDown({
element,
onDropDownItemClick,
}: {
element: JSX.Element[];
onDropDownItemClick?: MenuProps['onClick'];
}): JSX.Element {
function DropDown({ element }: { element: JSX.Element[] }): JSX.Element {
const isDarkMode = useIsDarkMode();
const items: MenuProps['items'] = element.map(
@@ -29,7 +23,6 @@ function DropDown({
items,
onMouseEnter: (): void => setDdOpen(true),
onMouseLeave: (): void => setDdOpen(false),
onClick: (item): void => onDropDownItemClick?.(item),
}}
open={isDdOpen}
>
@@ -47,8 +40,4 @@ function DropDown({
);
}
DropDown.defaultProps = {
onDropDownItemClick: (): void => {},
};
export default DropDown;

View File

@@ -139,7 +139,6 @@ export const getGraphOptions = (
},
scales: {
x: {
stacked: isStacked,
grid: {
display: true,
color: getGridColor(),
@@ -166,7 +165,6 @@ export const getGraphOptions = (
ticks: { color: getAxisLabelColor(currentTheme) },
},
y: {
stacked: isStacked,
display: true,
grid: {
display: true,
@@ -180,6 +178,9 @@ export const getGraphOptions = (
},
},
},
stacked: {
display: isStacked === undefined ? false : 'auto',
},
},
elements: {
line: {

View File

@@ -1,191 +0,0 @@
import './LaunchChatSupport.styles.scss';
import { Button, Modal, Tooltip, Typography } from 'antd';
import updateCreditCardApi from 'api/billing/checkout';
import logEvent from 'api/common/logEvent';
import cx from 'classnames';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { FeatureKeys } from 'constants/features';
import useFeatureFlags from 'hooks/useFeatureFlag';
import useLicense from 'hooks/useLicense';
import { useNotifications } from 'hooks/useNotifications';
import { defaultTo } from 'lodash-es';
import { CreditCard, HelpCircle, X } from 'lucide-react';
import { useEffect, useState } from 'react';
import { useMutation } from 'react-query';
import { useLocation } from 'react-router-dom';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { CheckoutSuccessPayloadProps } from 'types/api/billing/checkout';
import { License } from 'types/api/licenses/def';
import { isCloudUser } from 'utils/app';
export interface LaunchChatSupportProps {
eventName: string;
attributes: Record<string, unknown>;
message?: string;
buttonText?: string;
className?: string;
onHoverText?: string;
intercomMessageDisabled?: boolean;
}
// eslint-disable-next-line sonarjs/cognitive-complexity
function LaunchChatSupport({
attributes,
eventName,
message = '',
buttonText = '',
className = '',
onHoverText = '',
intercomMessageDisabled = false,
}: LaunchChatSupportProps): JSX.Element | null {
const isChatSupportEnabled = useFeatureFlags(FeatureKeys.CHAT_SUPPORT)?.active;
const isCloudUserVal = isCloudUser();
const { notifications } = useNotifications();
const { data: licenseData, isFetching } = useLicense();
const [activeLicense, setActiveLicense] = useState<License | null>(null);
const [isAddCreditCardModalOpen, setIsAddCreditCardModalOpen] = useState(
false,
);
const { pathname } = useLocation();
const isPremiumChatSupportEnabled =
useFeatureFlags(FeatureKeys.PREMIUM_SUPPORT)?.active || false;
const showAddCreditCardModal =
!isPremiumChatSupportEnabled &&
!licenseData?.payload?.trialConvertedToSubscription;
useEffect(() => {
const activeValidLicense =
licenseData?.payload?.licenses?.find(
(license) => license.isCurrent === true,
) || null;
setActiveLicense(activeValidLicense);
}, [licenseData, isFetching]);
const handleFacingIssuesClick = (): void => {
if (showAddCreditCardModal) {
logEvent('Disabled Chat Support: Clicked', {
source: `facing issues button`,
page: pathname,
...attributes,
});
setIsAddCreditCardModalOpen(true);
} else {
logEvent(eventName, attributes);
if (window.Intercom && !intercomMessageDisabled) {
window.Intercom('showNewMessage', defaultTo(message, ''));
}
}
};
const handleBillingOnSuccess = (
data: ErrorResponse | SuccessResponse<CheckoutSuccessPayloadProps, unknown>,
): void => {
if (data?.payload?.redirectURL) {
const newTab = document.createElement('a');
newTab.href = data.payload.redirectURL;
newTab.target = '_blank';
newTab.rel = 'noopener noreferrer';
newTab.click();
}
};
const handleBillingOnError = (): void => {
notifications.error({
message: SOMETHING_WENT_WRONG,
});
};
const { mutate: updateCreditCard, isLoading: isLoadingBilling } = useMutation(
updateCreditCardApi,
{
onSuccess: (data) => {
handleBillingOnSuccess(data);
},
onError: handleBillingOnError,
},
);
const handleAddCreditCard = (): void => {
logEvent('Add Credit card modal: Clicked', {
source: `facing issues button`,
page: pathname,
...attributes,
});
updateCreditCard({
licenseKey: activeLicense?.key || '',
successURL: window.location.href,
cancelURL: window.location.href,
});
};
return isCloudUserVal && isChatSupportEnabled ? ( // Note: we would need to move this condition to license based in future
<div className="facing-issue-button">
<Tooltip
title={onHoverText}
autoAdjustOverflow
style={{ padding: 8 }}
overlayClassName="tooltip-overlay"
>
<Button
className={cx('periscope-btn', 'facing-issue-button', className)}
onClick={handleFacingIssuesClick}
icon={<HelpCircle size={14} />}
>
{buttonText || 'Facing issues?'}
</Button>
</Tooltip>
{/* Add Credit Card Modal */}
<Modal
className="add-credit-card-modal"
title={<span className="title">Add Credit Card for Chat Support</span>}
open={isAddCreditCardModalOpen}
closable
onCancel={(): void => setIsAddCreditCardModalOpen(false)}
destroyOnClose
footer={[
<Button
key="cancel"
onClick={(): void => setIsAddCreditCardModalOpen(false)}
className="cancel-btn"
icon={<X size={16} />}
>
Cancel
</Button>,
<Button
key="submit"
type="primary"
icon={<CreditCard size={16} />}
size="middle"
loading={isLoadingBilling}
disabled={isLoadingBilling}
onClick={handleAddCreditCard}
className="add-credit-card-btn"
>
Add Credit Card
</Button>,
]}
>
<Typography.Text className="add-credit-card-text">
You&apos;re currently on <span className="highlight-text">Trial plan</span>
. Add a credit card to access SigNoz chat support to your workspace.
</Typography.Text>
</Modal>
</div>
) : null;
}
LaunchChatSupport.defaultProps = {
message: '',
buttonText: '',
className: '',
onHoverText: '',
intercomMessageDisabled: false,
};
export default LaunchChatSupport;

View File

@@ -1,22 +1,14 @@
import { DrawerProps } from 'antd';
import { AddToQueryHOCProps } from 'components/Logs/AddToQueryHOC';
import { ActionItemProps } from 'container/LogDetailedView/ActionItem';
import { IField } from 'types/api/logs/fields';
import { ILog } from 'types/api/logs/log';
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { VIEWS } from './constants';
export type LogDetailProps = {
log: ILog | null;
selectedTab: VIEWS;
onGroupByAttribute?: (
fieldKey: string,
isJSON?: boolean,
dataType?: DataTypes,
) => Promise<void>;
isListViewPanel?: boolean;
listViewPanelSelectedFields?: IField[] | null;
} & Pick<AddToQueryHOCProps, 'onAddToQuery'> &
Partial<Pick<ActionItemProps, 'onClickActionItem'>> &
Pick<DrawerProps, 'onClose'>;

View File

@@ -52,7 +52,7 @@
.log-body {
font-family: 'SF Mono';
font-family: 'Geist Mono';
font-family: 'Space Mono', monospace;
font-size: var(--font-size-sm);
font-weight: var(--font-weight-normal);

View File

@@ -2,23 +2,14 @@
import './LogDetails.styles.scss';
import { Color, Spacing } from '@signozhq/design-tokens';
import Convert from 'ansi-to-html';
import { Button, Divider, Drawer, Radio, Tooltip, Typography } from 'antd';
import { RadioChangeEvent } from 'antd/lib';
import cx from 'classnames';
import { LogType } from 'components/Logs/LogStateIndicator/LogStateIndicator';
import { LOCALSTORAGE } from 'constants/localStorage';
import ContextView from 'container/LogDetailedView/ContextView/ContextView';
import JSONView from 'container/LogDetailedView/JsonView';
import Overview from 'container/LogDetailedView/Overview';
import {
aggregateAttributesResourcesToString,
removeEscapeCharacters,
unescapeString,
} from 'container/LogDetailedView/utils';
import { useOptionsMenu } from 'container/OptionsMenu';
import dompurify from 'dompurify';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { aggregateAttributesResourcesToString } from 'container/LogDetailedView/utils';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useNotifications } from 'hooks/useNotifications';
import {
@@ -30,27 +21,21 @@ import {
TextSelect,
X,
} from 'lucide-react';
import { useMemo, useState } from 'react';
import { useState } from 'react';
import { useCopyToClipboard } from 'react-use';
import { Query, TagFilter } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource, StringOperators } from 'types/common/queryBuilder';
import { FORBID_DOM_PURIFY_TAGS } from 'utils/app';
import { VIEW_TYPES, VIEWS } from './constants';
import { LogDetailProps } from './LogDetail.interfaces';
import QueryBuilderSearchWrapper from './QueryBuilderSearchWrapper';
const convert = new Convert();
function LogDetail({
log,
onClose,
onAddToQuery,
onGroupByAttribute,
onClickActionItem,
selectedTab,
isListViewPanel = false,
listViewPanelSelectedFields,
}: LogDetailProps): JSX.Element {
const [, copyToClipboard] = useCopyToClipboard();
const [selectedView, setSelectedView] = useState<VIEWS>(selectedTab);
@@ -60,19 +45,6 @@ function LogDetail({
const [contextQuery, setContextQuery] = useState<Query | undefined>();
const [filters, setFilters] = useState<TagFilter | null>(null);
const [isEdit, setIsEdit] = useState<boolean>(false);
const { initialDataSource, stagedQuery } = useQueryBuilder();
const listQuery = useMemo(() => {
if (!stagedQuery || stagedQuery.builder.queryData.length < 1) return null;
return stagedQuery.builder.queryData.find((item) => !item.disabled) || null;
}, [stagedQuery]);
const { options } = useOptionsMenu({
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
dataSource: initialDataSource || DataSource.LOGS,
aggregateOperator: listQuery?.aggregateOperator || StringOperators.NOOP,
});
const isDarkMode = useIsDarkMode();
@@ -99,17 +71,6 @@ function LogDetail({
}
};
const htmlBody = useMemo(
() => ({
__html: convert.toHtml(
dompurify.sanitize(unescapeString(log?.body || ''), {
FORBID_TAGS: [...FORBID_DOM_PURIFY_TAGS],
}),
),
}),
[log?.body],
);
const handleJSONCopy = (): void => {
copyToClipboard(LogJsonData);
notifications.success({
@@ -147,8 +108,8 @@ function LogDetail({
>
<div className="log-detail-drawer__log">
<Divider type="vertical" className={cx('log-type-indicator', logType)} />
<Tooltip title={removeEscapeCharacters(log?.body)} placement="left">
<div className="log-body" dangerouslySetInnerHTML={htmlBody} />
<Tooltip title={log?.body} placement="left">
<Typography.Text className="log-body">{log?.body}</Typography.Text>
</Tooltip>
<div className="log-overflow-shadow">&nbsp;</div>
@@ -230,10 +191,7 @@ function LogDetail({
logData={log}
onAddToQuery={onAddToQuery}
onClickActionItem={onClickActionItem}
onGroupByAttribute={onGroupByAttribute}
isListViewPanel={isListViewPanel}
selectedOptions={options}
listViewPanelSelectedFields={listViewPanelSelectedFields}
/>
)}
{selectedView === VIEW_TYPES.JSON && <JSONView logData={log} />}

View File

@@ -1,16 +1,3 @@
.addToQueryContainer {
cursor: pointer;
display: flex;
align-items: center;
&.small {
line-height: 16px;
}
&.medium {
line-height: 20px;
}
&.large {
line-height: 24px;
}
}

View File

@@ -1,21 +1,18 @@
import './AddToQueryHOC.styles.scss';
import { Popover } from 'antd';
import cx from 'classnames';
import { OPERATORS } from 'constants/queryBuilder';
import { FontSize } from 'container/OptionsMenu/types';
import { memo, MouseEvent, ReactNode, useMemo } from 'react';
function AddToQueryHOC({
fieldKey,
fieldValue,
onAddToQuery,
fontSize,
children,
}: AddToQueryHOCProps): JSX.Element {
const handleQueryAdd = (event: MouseEvent<HTMLDivElement>): void => {
event.stopPropagation();
onAddToQuery(fieldKey, fieldValue, OPERATORS['=']);
onAddToQuery(fieldKey, fieldValue, OPERATORS.IN);
};
const popOverContent = useMemo(() => <span>Add to query: {fieldKey}</span>, [
@@ -24,7 +21,7 @@ function AddToQueryHOC({
return (
// eslint-disable-next-line jsx-a11y/click-events-have-key-events, jsx-a11y/no-static-element-interactions
<div className={cx('addToQueryContainer', fontSize)} onClick={handleQueryAdd}>
<div className="addToQueryContainer" onClick={handleQueryAdd}>
<Popover placement="top" content={popOverContent}>
{children}
</Popover>
@@ -36,7 +33,6 @@ export interface AddToQueryHOCProps {
fieldKey: string;
fieldValue: string;
onAddToQuery: (fieldKey: string, fieldValue: string, operator: string) => void;
fontSize: FontSize;
children: ReactNode;
}

View File

@@ -4,7 +4,6 @@ import { ReactNode, useCallback, useEffect } from 'react';
import { useCopyToClipboard } from 'react-use';
function CopyClipboardHOC({
entityKey,
textToCopy,
children,
}: CopyClipboardHOCProps): JSX.Element {
@@ -12,15 +11,11 @@ function CopyClipboardHOC({
const { notifications } = useNotifications();
useEffect(() => {
if (value.value) {
const key = entityKey || '';
const notificationMessage = `${key} copied to clipboard`;
notifications.success({
message: notificationMessage,
message: 'Copied to clipboard',
});
}
}, [value, notifications, entityKey]);
}, [value, notifications]);
const onClick = useCallback((): void => {
setCopy(textToCopy);
@@ -39,7 +34,6 @@ function CopyClipboardHOC({
}
interface CopyClipboardHOCProps {
entityKey: string | undefined;
textToCopy: string;
children: ReactNode;
}

View File

@@ -6,21 +6,6 @@
font-weight: 400;
line-height: 18px; /* 128.571% */
letter-spacing: -0.07px;
&.small {
font-size: 11px;
line-height: 16px;
}
&.medium {
font-size: 13px;
line-height: 20px;
}
&.large {
font-size: 14px;
line-height: 24px;
}
}
.log-value {
color: var(--text-vanilla-400, #c0c1c3);
@@ -29,21 +14,6 @@
font-weight: 400;
line-height: 18px; /* 128.571% */
letter-spacing: -0.07px;
&.small {
font-size: 11px;
line-height: 16px;
}
&.medium {
font-size: 13px;
line-height: 20px;
}
&.large {
font-size: 14px;
line-height: 24px;
}
}
.log-line {
display: flex;
@@ -70,20 +40,6 @@
font-weight: 400;
line-height: 18px; /* 128.571% */
letter-spacing: -0.07px;
&.small {
font-size: 11px;
line-height: 16px;
}
&.medium {
font-size: 13px;
line-height: 20px;
}
&.large {
font-size: 14px;
line-height: 24px;
}
}
.selected-log-value {
@@ -96,37 +52,12 @@
line-height: 18px;
letter-spacing: -0.07px;
font-size: 14px;
&.small {
font-size: 11px;
line-height: 16px;
}
&.medium {
font-size: 13px;
line-height: 20px;
}
&.large {
font-size: 14px;
line-height: 24px;
}
}
.selected-log-kv {
min-height: 24px;
display: flex;
align-items: center;
&.small {
min-height: 16px;
}
&.medium {
min-height: 20px;
}
&.large {
min-height: 24px;
}
}
}

View File

@@ -3,11 +3,8 @@ import './ListLogView.styles.scss';
import { blue } from '@ant-design/colors';
import Convert from 'ansi-to-html';
import { Typography } from 'antd';
import cx from 'classnames';
import LogDetail from 'components/LogDetail';
import { VIEW_TYPES } from 'components/LogDetail/constants';
import { unescapeString } from 'container/LogDetailedView/utils';
import { FontSize } from 'container/OptionsMenu/types';
import dayjs from 'dayjs';
import dompurify from 'dompurify';
import { useActiveLog } from 'hooks/logs/useActiveLog';
@@ -42,7 +39,6 @@ interface LogFieldProps {
fieldKey: string;
fieldValue: string;
linesPerRow?: number;
fontSize: FontSize;
}
type LogSelectedFieldProps = Omit<LogFieldProps, 'linesPerRow'> &
@@ -52,12 +48,11 @@ function LogGeneralField({
fieldKey,
fieldValue,
linesPerRow = 1,
fontSize,
}: LogFieldProps): JSX.Element {
const html = useMemo(
() => ({
__html: convert.toHtml(
dompurify.sanitize(unescapeString(fieldValue), {
dompurify.sanitize(fieldValue, {
FORBID_TAGS: [...FORBID_DOM_PURIFY_TAGS],
}),
),
@@ -67,12 +62,12 @@ function LogGeneralField({
return (
<TextContainer>
<Text ellipsis type="secondary" className={cx('log-field-key', fontSize)}>
<Text ellipsis type="secondary" className="log-field-key">
{`${fieldKey} : `}
</Text>
<LogText
dangerouslySetInnerHTML={html}
className={cx('log-value', fontSize)}
className="log-value"
linesPerRow={linesPerRow > 1 ? linesPerRow : undefined}
/>
</TextContainer>
@@ -83,7 +78,6 @@ function LogSelectedField({
fieldKey = '',
fieldValue = '',
onAddToQuery,
fontSize,
}: LogSelectedFieldProps): JSX.Element {
return (
<div className="log-selected-fields">
@@ -91,22 +85,16 @@ function LogSelectedField({
fieldKey={fieldKey}
fieldValue={fieldValue}
onAddToQuery={onAddToQuery}
fontSize={fontSize}
>
<Typography.Text>
<span
style={{ color: blue[4] }}
className={cx('selected-log-field-key', fontSize)}
>
<span style={{ color: blue[4] }} className="selected-log-field-key">
{fieldKey}
</span>
</Typography.Text>
</AddToQueryHOC>
<Typography.Text ellipsis className={cx('selected-log-kv', fontSize)}>
<span className={cx('selected-log-field-key', fontSize)}>{': '}</span>
<span className={cx('selected-log-value', fontSize)}>
{fieldValue || "''"}
</span>
<Typography.Text ellipsis className="selected-log-kv">
<span className="selected-log-field-key">{': '}</span>
<span className="selected-log-value">{fieldValue || "''"}</span>
</Typography.Text>
</div>
);
@@ -119,7 +107,6 @@ type ListLogViewProps = {
onAddToQuery: AddToQueryHOCProps['onAddToQuery'];
activeLog?: ILog | null;
linesPerRow: number;
fontSize: FontSize;
};
function ListLogView({
@@ -129,7 +116,6 @@ function ListLogView({
onAddToQuery,
activeLog,
linesPerRow,
fontSize,
}: ListLogViewProps): JSX.Element {
const flattenLogData = useMemo(() => FlatLogData(logData), [logData]);
@@ -142,7 +128,6 @@ function ListLogView({
onAddToQuery: handleAddToQuery,
onSetActiveLog: handleSetActiveContextLog,
onClearActiveLog: handleClearActiveContextLog,
onGroupByAttribute,
} = useActiveLog();
const isDarkMode = useIsDarkMode();
@@ -200,7 +185,6 @@ function ListLogView({
onMouseEnter={handleMouseEnter}
onMouseLeave={handleMouseLeave}
onClick={handleDetailedView}
fontSize={fontSize}
>
<div className="log-line">
<LogStateIndicator
@@ -208,28 +192,18 @@ function ListLogView({
isActive={
activeLog?.id === logData.id || activeContextLog?.id === logData.id
}
fontSize={fontSize}
/>
<div>
<LogContainer fontSize={fontSize}>
<LogContainer>
<LogGeneralField
fieldKey="Log"
fieldValue={flattenLogData.body}
linesPerRow={linesPerRow}
fontSize={fontSize}
/>
{flattenLogData.stream && (
<LogGeneralField
fieldKey="Stream"
fieldValue={flattenLogData.stream}
fontSize={fontSize}
/>
<LogGeneralField fieldKey="Stream" fieldValue={flattenLogData.stream} />
)}
<LogGeneralField
fieldKey="Timestamp"
fieldValue={timestampValue}
fontSize={fontSize}
/>
<LogGeneralField fieldKey="Timestamp" fieldValue={timestampValue} />
{updatedSelecedFields.map((field) =>
isValidLogField(flattenLogData[field.name] as never) ? (
@@ -238,7 +212,6 @@ function ListLogView({
fieldKey={field.name}
fieldValue={flattenLogData[field.name] as never}
onAddToQuery={onAddToQuery}
fontSize={fontSize}
/>
) : null,
)}
@@ -259,7 +232,6 @@ function ListLogView({
onAddToQuery={handleAddToQuery}
selectedTab={VIEW_TYPES.CONTEXT}
onClose={handlerClearActiveContextLog}
onGroupByAttribute={onGroupByAttribute}
/>
)}
</>

Some files were not shown because too many files have changed in this diff Show More