Compare commits

..

2 Commits

Author SHA1 Message Date
nityanandagohain
781732f25a Merge remote-tracking branch 'origin/develop' into issue_4777 2024-03-30 19:22:20 +05:30
nityanandagohain
77e55a0ec9 feat: allow query restrictions for log queries 2024-03-30 18:41:53 +05:30
920 changed files with 10561 additions and 74424 deletions

View File

@@ -1,31 +0,0 @@
name: Jest Coverage - changed files
on:
pull_request:
branches: develop
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
ref: "refs/heads/develop"
token: ${{ secrets.GITHUB_TOKEN }} # Provide the GitHub token for authentication
- name: Fetch branch
run: git fetch origin ${{ github.event.pull_request.head.ref }}
- run: |
git checkout ${{ github.event.pull_request.head.sha }}
- uses: actions/setup-node@v4
with:
node-version: lts/*
- name: Install dependencies
run: cd frontend && npm install -g yarn && yarn
- name: npm run test:changedsince
run: cd frontend && npm run i18n:generate-hash && npm run test:changedsince

View File

@@ -9,46 +9,34 @@ jobs:
name: Deploy latest develop branch to staging
runs-on: ubuntu-latest
environment: staging
permissions:
contents: 'read'
id-token: 'write'
steps:
- id: 'auth'
uses: 'google-github-actions/auth@v2'
with:
workload_identity_provider: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }}
service_account: ${{ secrets.GCP_SERVICE_ACCOUNT }}
- name: 'sdk'
uses: 'google-github-actions/setup-gcloud@v2'
- name: 'ssh'
shell: bash
- name: Executing remote ssh commands using ssh key
uses: appleboy/ssh-action@v1.0.3
env:
GITHUB_BRANCH: ${{ github.head_ref || github.ref_name }}
GITHUB_BRANCH: develop
GITHUB_SHA: ${{ github.sha }}
GCP_PROJECT: ${{ secrets.GCP_PROJECT }}
GCP_ZONE: ${{ secrets.GCP_ZONE }}
GCP_INSTANCE: ${{ secrets.GCP_INSTANCE }}
run: |
read -r -d '' COMMAND <<EOF || true
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
echo "GITHUB_SHA: ${GITHUB_SHA}"
export DOCKER_TAG="${GITHUB_SHA:0:7}" # needed for child process to access it
export OTELCOL_TAG="main"
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
docker system prune --force
docker pull signoz/signoz-otel-collector:main
docker pull signoz/signoz-schema-migrator:main
cd ~/signoz
git status
git add .
git stash push -m "stashed on $(date --iso-8601=seconds)"
git fetch origin
git checkout ${GITHUB_BRANCH}
git pull
make build-ee-query-service-amd64
make build-frontend-amd64
make run-testing
EOF
gcloud compute ssh ${GCP_INSTANCE} --zone ${GCP_ZONE} --ssh-key-expire-after=15m --tunnel-through-iap --project ${GCP_PROJECT} --command "${COMMAND}"
with:
host: ${{ secrets.HOST_DNS }}
username: ${{ secrets.USERNAME }}
key: ${{ secrets.SSH_KEY }}
envs: GITHUB_BRANCH,GITHUB_SHA
command_timeout: 60m
script: |
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
echo "GITHUB_SHA: ${GITHUB_SHA}"
export DOCKER_TAG="${GITHUB_SHA:0:7}" # needed for child process to access it
export OTELCOL_TAG="main"
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
docker system prune --force
docker pull signoz/signoz-otel-collector:main
docker pull signoz/signoz-schema-migrator:main
cd ~/signoz
git status
git add .
git stash push -m "stashed on $(date --iso-8601=seconds)"
git fetch origin
git checkout ${GITHUB_BRANCH}
git pull
make build-ee-query-service-amd64
make build-frontend-amd64
make run-signoz

View File

@@ -9,47 +9,35 @@ jobs:
runs-on: ubuntu-latest
environment: testing
if: ${{ github.event.label.name == 'testing-deploy' }}
permissions:
contents: 'read'
id-token: 'write'
steps:
- id: 'auth'
uses: 'google-github-actions/auth@v2'
with:
workload_identity_provider: ${{ secrets.GCP_WORKLOAD_IDENTITY_PROVIDER }}
service_account: ${{ secrets.GCP_SERVICE_ACCOUNT }}
- name: 'sdk'
uses: 'google-github-actions/setup-gcloud@v2'
- name: 'ssh'
shell: bash
- name: Executing remote ssh commands using ssh key
uses: appleboy/ssh-action@v1.0.3
env:
GITHUB_BRANCH: ${{ github.head_ref || github.ref_name }}
GITHUB_SHA: ${{ github.sha }}
GCP_PROJECT: ${{ secrets.GCP_PROJECT }}
GCP_ZONE: ${{ secrets.GCP_ZONE }}
GCP_INSTANCE: ${{ secrets.GCP_INSTANCE }}
run: |
read -r -d '' COMMAND <<EOF || true
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
echo "GITHUB_SHA: ${GITHUB_SHA}"
export DOCKER_TAG="${GITHUB_SHA:0:7}" # needed for child process to access it
export DEV_BUILD="1"
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
docker system prune --force
cd ~/signoz
git status
git add .
git stash push -m "stashed on $(date --iso-8601=seconds)"
git fetch origin
git checkout develop
git pull
# This is added to include the scenerio when new commit in PR is force-pushed
git branch -D ${GITHUB_BRANCH}
git checkout --track origin/${GITHUB_BRANCH}
make build-ee-query-service-amd64
make build-frontend-amd64
make run-testing
EOF
gcloud compute ssh ${GCP_INSTANCE} --zone ${GCP_ZONE} --ssh-key-expire-after=15m --tunnel-through-iap --project ${GCP_PROJECT} --command "${COMMAND}"
with:
host: ${{ secrets.HOST_DNS }}
username: ${{ secrets.USERNAME }}
key: ${{ secrets.SSH_KEY }}
envs: GITHUB_BRANCH,GITHUB_SHA
command_timeout: 60m
script: |
echo "GITHUB_BRANCH: ${GITHUB_BRANCH}"
echo "GITHUB_SHA: ${GITHUB_SHA}"
export DOCKER_TAG="${GITHUB_SHA:0:7}" # needed for child process to access it
export DEV_BUILD="1"
export PATH="/usr/local/go/bin/:$PATH" # needed for Golang to work
docker system prune --force
cd ~/signoz
git status
git add .
git stash push -m "stashed on $(date --iso-8601=seconds)"
git fetch origin
git checkout develop
git pull
# This is added to include the scenerio when new commit in PR is force-pushed
git branch -D ${GITHUB_BRANCH}
git checkout --track origin/${GITHUB_BRANCH}
make build-ee-query-service-amd64
make build-frontend-amd64
make run-signoz

7
.gitignore vendored
View File

@@ -47,7 +47,6 @@ ee/query-service/signoz.db
ee/query-service/tests/test-deploy/data/
# local data
*.backup
*.db
/deploy/docker/clickhouse-setup/data/
/deploy/docker-swarm/clickhouse-setup/data/
@@ -62,8 +61,4 @@ e2e/test-results/
e2e/playwright-report/
e2e/blob-report/
e2e/playwright/.cache/
e2e/.auth
# go
vendor/
**/main/**
e2e/.auth

View File

@@ -156,9 +156,6 @@ pull-signoz:
run-signoz:
@docker-compose -f $(STANDALONE_DIRECTORY)/docker-compose.yaml up --build -d
run-testing:
@docker-compose -f $(STANDALONE_DIRECTORY)/docker-compose.testing.yaml up --build -d
down-signoz:
@docker-compose -f $(STANDALONE_DIRECTORY)/docker-compose.yaml down -v

View File

@@ -22,7 +22,7 @@ x-clickhouse-defaults: &clickhouse-defaults
"wget",
"--spider",
"-q",
"0.0.0.0:8123/ping"
"localhost:8123/ping"
]
interval: 30s
timeout: 5s
@@ -146,7 +146,7 @@ services:
condition: on-failure
query-service:
image: signoz/query-service:0.46.0
image: signoz/query-service:0.39.0
command:
[
"-config=/root/config/prometheus.yml",
@@ -186,7 +186,7 @@ services:
<<: *db-depend
frontend:
image: signoz/frontend:0.46.0
image: signoz/frontend:0.39.0
deploy:
restart_policy:
condition: on-failure
@@ -199,7 +199,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector:
image: signoz/signoz-otel-collector:0.88.24
image: signoz/signoz-otel-collector:0.88.12
command:
[
"--config=/etc/otel-collector-config.yaml",
@@ -237,7 +237,7 @@ services:
- query-service
otel-collector-migrator:
image: signoz/signoz-schema-migrator:0.88.24
image: signoz/signoz-schema-migrator:0.88.12
deploy:
restart_policy:
condition: on-failure

View File

@@ -77,16 +77,7 @@ processors:
# This is added to ensure the uniqueness of the timeseries
# Otherwise, identical timeseries produced by multiple replicas of
# collectors result in incorrect APM metrics
- name: signoz.collector.id
- name: service.version
- name: browser.platform
- name: browser.mobile
- name: k8s.cluster.name
- name: k8s.node.name
- name: k8s.namespace.name
- name: host.name
- name: host.type
- name: container.name
- name: 'signoz.collector.id'
# memory_limiter:
# # 80% of maximum memory up to 2G
# limit_mib: 1500
@@ -117,30 +108,21 @@ processors:
# Otherwise, identical timeseries produced by multiple replicas of
# collectors result in incorrect APM metrics
- name: signoz.collector.id
- name: service.version
- name: browser.platform
- name: browser.mobile
- name: k8s.cluster.name
- name: k8s.node.name
- name: k8s.namespace.name
- name: host.name
- name: host.type
- name: container.name
exporters:
clickhousetraces:
datasource: tcp://clickhouse:9000/signoz_traces
datasource: tcp://clickhouse:9000/?database=signoz_traces
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
low_cardinal_exception_grouping: ${LOW_CARDINAL_EXCEPTION_GROUPING}
clickhousemetricswrite:
endpoint: tcp://clickhouse:9000/signoz_metrics
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
resource_to_telemetry_conversion:
enabled: true
clickhousemetricswrite/prometheus:
endpoint: tcp://clickhouse:9000/signoz_metrics
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
# logging: {}
clickhouselogsexporter:
dsn: tcp://clickhouse:9000/signoz_logs
dsn: tcp://clickhouse:9000/
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
timeout: 10s
extensions:

View File

@@ -22,4 +22,4 @@ rule_files:
scrape_configs: []
remote_read:
- url: tcp://clickhouse:9000/signoz_metrics
- url: tcp://clickhouse:9000/?database=signoz_metrics

View File

@@ -46,7 +46,7 @@ services:
"wget",
"--spider",
"-q",
"0.0.0.0:8123/ping"
"localhost:8123/ping"
]
interval: 30s
timeout: 5s
@@ -66,7 +66,7 @@ services:
- --storage.path=/data
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.24}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.12}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@@ -81,7 +81,7 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
otel-collector:
container_name: signoz-otel-collector
image: signoz/signoz-otel-collector:0.88.24
image: signoz/signoz-otel-collector:0.88.12
command:
[
"--config=/etc/otel-collector-config.yaml",

View File

@@ -1,307 +0,0 @@
version: "2.4"
x-clickhouse-defaults: &clickhouse-defaults
restart: on-failure
# addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab
image: clickhouse/clickhouse-server:24.1.2-alpine
tty: true
depends_on:
- zookeeper-1
# - zookeeper-2
# - zookeeper-3
logging:
options:
max-size: 50m
max-file: "3"
healthcheck:
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
test:
[
"CMD",
"wget",
"--spider",
"-q",
"0.0.0.0:8123/ping"
]
interval: 30s
timeout: 5s
retries: 3
ulimits:
nproc: 65535
nofile:
soft: 262144
hard: 262144
x-db-depend: &db-depend
depends_on:
clickhouse:
condition: service_healthy
otel-collector-migrator:
condition: service_completed_successfully
# clickhouse-2:
# condition: service_healthy
# clickhouse-3:
# condition: service_healthy
services:
zookeeper-1:
image: bitnami/zookeeper:3.7.1
container_name: signoz-zookeeper-1
hostname: zookeeper-1
user: root
ports:
- "2181:2181"
- "2888:2888"
- "3888:3888"
volumes:
- ./data/zookeeper-1:/bitnami/zookeeper
environment:
- ZOO_SERVER_ID=1
# - ZOO_SERVERS=0.0.0.0:2888:3888,zookeeper-2:2888:3888,zookeeper-3:2888:3888
- ALLOW_ANONYMOUS_LOGIN=yes
- ZOO_AUTOPURGE_INTERVAL=1
# zookeeper-2:
# image: bitnami/zookeeper:3.7.0
# container_name: signoz-zookeeper-2
# hostname: zookeeper-2
# user: root
# ports:
# - "2182:2181"
# - "2889:2888"
# - "3889:3888"
# volumes:
# - ./data/zookeeper-2:/bitnami/zookeeper
# environment:
# - ZOO_SERVER_ID=2
# - ZOO_SERVERS=zookeeper-1:2888:3888,0.0.0.0:2888:3888,zookeeper-3:2888:3888
# - ALLOW_ANONYMOUS_LOGIN=yes
# - ZOO_AUTOPURGE_INTERVAL=1
# zookeeper-3:
# image: bitnami/zookeeper:3.7.0
# container_name: signoz-zookeeper-3
# hostname: zookeeper-3
# user: root
# ports:
# - "2183:2181"
# - "2890:2888"
# - "3890:3888"
# volumes:
# - ./data/zookeeper-3:/bitnami/zookeeper
# environment:
# - ZOO_SERVER_ID=3
# - ZOO_SERVERS=zookeeper-1:2888:3888,zookeeper-2:2888:3888,0.0.0.0:2888:3888
# - ALLOW_ANONYMOUS_LOGIN=yes
# - ZOO_AUTOPURGE_INTERVAL=1
clickhouse:
<<: *clickhouse-defaults
container_name: signoz-clickhouse
hostname: clickhouse
ports:
- "9000:9000"
- "8123:8123"
- "9181:9181"
volumes:
- ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
- ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
- ./custom-function.xml:/etc/clickhouse-server/custom-function.xml
- ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
# - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
- ./data/clickhouse/:/var/lib/clickhouse/
- ./user_scripts:/var/lib/clickhouse/user_scripts/
# clickhouse-2:
# <<: *clickhouse-defaults
# container_name: signoz-clickhouse-2
# hostname: clickhouse-2
# ports:
# - "9001:9000"
# - "8124:8123"
# - "9182:9181"
# volumes:
# - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
# - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
# - ./custom-function.xml:/etc/clickhouse-server/custom-function.xml
# - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
# - ./data/clickhouse-2/:/var/lib/clickhouse/
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
# clickhouse-3:
# <<: *clickhouse-defaults
# container_name: signoz-clickhouse-3
# hostname: clickhouse-3
# ports:
# - "9002:9000"
# - "8125:8123"
# - "9183:9181"
# volumes:
# - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
# - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml
# - ./custom-function.xml:/etc/clickhouse-server/custom-function.xml
# - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
# # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml
# - ./data/clickhouse-3/:/var/lib/clickhouse/
# - ./user_scripts:/var/lib/clickhouse/user_scripts/
alertmanager:
image: signoz/alertmanager:${ALERTMANAGER_TAG:-0.23.5}
container_name: signoz-alertmanager
volumes:
- ./data/alertmanager:/data
depends_on:
query-service:
condition: service_healthy
restart: on-failure
command:
- --queryService.url=http://query-service:8085
- --storage.path=/data
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service:
image: signoz/query-service:${DOCKER_TAG:-0.46.0}
container_name: signoz-query-service
command:
[
"-config=/root/config/prometheus.yml",
"-gateway-url=https://api.staging.signoz.cloud"
# "--prefer-delta=true"
]
# ports:
# - "6060:6060" # pprof port
# - "8080:8080" # query-service port
volumes:
- ./prometheus.yml:/root/config/prometheus.yml
- ../dashboards:/root/config/dashboards
- ./data/signoz/:/var/lib/signoz/
environment:
- ClickHouseUrl=tcp://clickhouse:9000
- ALERTMANAGER_API_PREFIX=http://alertmanager:9093/api/
- SIGNOZ_LOCAL_DB_PATH=/var/lib/signoz/signoz.db
- DASHBOARDS_PATH=/root/config/dashboards
- STORAGE=clickhouse
- GODEBUG=netdns=go
- TELEMETRY_ENABLED=true
- DEPLOYMENT_TYPE=docker-standalone-amd
restart: on-failure
healthcheck:
test:
[
"CMD",
"wget",
"--spider",
"-q",
"localhost:8080/api/v1/health"
]
interval: 30s
timeout: 5s
retries: 3
<<: *db-depend
frontend:
image: signoz/frontend:${DOCKER_TAG:-0.46.0}
container_name: signoz-frontend
restart: on-failure
depends_on:
- alertmanager
- query-service
ports:
- "3301:3301"
volumes:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.24}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
depends_on:
clickhouse:
condition: service_healthy
# clickhouse-2:
# condition: service_healthy
# clickhouse-3:
# condition: service_healthy
otel-collector:
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.24}
container_name: signoz-otel-collector
command:
[
"--config=/etc/otel-collector-config.yaml",
"--manager-config=/etc/manager-config.yaml",
"--copy-path=/var/tmp/collector-config.yaml",
"--feature-gates=-pkg.translator.prometheus.NormalizeName"
]
user: root # required for reading docker container logs
volumes:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
- ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml
- /var/lib/docker/containers:/var/lib/docker/containers:ro
environment:
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux
- DOCKER_MULTI_NODE_CLUSTER=false
- LOW_CARDINAL_EXCEPTION_GROUPING=false
ports:
# - "1777:1777" # pprof extension
- "4317:4317" # OTLP gRPC receiver
- "4318:4318" # OTLP HTTP receiver
# - "8888:8888" # OtelCollector internal metrics
# - "8889:8889" # signoz spanmetrics exposed by the agent
# - "9411:9411" # Zipkin port
# - "13133:13133" # health check extension
# - "14250:14250" # Jaeger gRPC
# - "14268:14268" # Jaeger thrift HTTP
# - "55678:55678" # OpenCensus receiver
# - "55679:55679" # zPages extension
restart: on-failure
depends_on:
clickhouse:
condition: service_healthy
otel-collector-migrator:
condition: service_completed_successfully
query-service:
condition: service_healthy
logspout:
image: "gliderlabs/logspout:v3.2.14"
container_name: signoz-logspout
volumes:
- /etc/hostname:/etc/host_hostname:ro
- /var/run/docker.sock:/var/run/docker.sock
command: syslog+tcp://otel-collector:2255
depends_on:
- otel-collector
restart: on-failure
hotrod:
image: jaegertracing/example-hotrod:1.30
container_name: hotrod
logging:
options:
max-size: 50m
max-file: "3"
command: [ "all" ]
environment:
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
load-hotrod:
image: "signoz/locust:1.2.3"
container_name: load-hotrod
hostname: load-hotrod
environment:
ATTACKED_HOST: http://hotrod:8080
LOCUST_MODE: standalone
NO_PROXY: standalone
TASK_DELAY_FROM: 5
TASK_DELAY_TO: 30
QUIET_MODE: "${QUIET_MODE:-false}"
LOCUST_OPTS: "--headless -u 10 -r 1"
volumes:
- ../common/locust-scripts:/locust

View File

@@ -21,7 +21,7 @@ x-clickhouse-defaults: &clickhouse-defaults
"wget",
"--spider",
"-q",
"0.0.0.0:8123/ping"
"localhost:8123/ping"
]
interval: 30s
timeout: 5s
@@ -164,11 +164,11 @@ services:
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
query-service:
image: signoz/query-service:${DOCKER_TAG:-0.46.0}
image: signoz/query-service:${DOCKER_TAG:-0.39.0}
container_name: signoz-query-service
command:
[
"-config=/root/config/prometheus.yml"
"-config=/root/config/prometheus.yml",
# "--prefer-delta=true"
]
# ports:
@@ -203,7 +203,7 @@ services:
<<: *db-depend
frontend:
image: signoz/frontend:${DOCKER_TAG:-0.46.0}
image: signoz/frontend:${DOCKER_TAG:-0.39.0}
container_name: signoz-frontend
restart: on-failure
depends_on:
@@ -215,7 +215,7 @@ services:
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
otel-collector-migrator:
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.24}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.88.12}
container_name: otel-migrator
command:
- "--dsn=tcp://clickhouse:9000"
@@ -229,7 +229,7 @@ services:
otel-collector:
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.24}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.88.12}
container_name: signoz-otel-collector
command:
[

View File

@@ -1,64 +0,0 @@
<clickhouse>
<logger>
<!-- Possible levels [1]:
- none (turns off logging)
- fatal
- critical
- error
- warning
- notice
- information
- debug
- trace
[1]: https://github.com/pocoproject/poco/blob/poco-1.9.4-release/Foundation/include/Poco/Logger.h#L105-L114
-->
<level>information</level>
<log>/var/log/clickhouse-keeper/clickhouse-keeper.log</log>
<errorlog>/var/log/clickhouse-keeper/clickhouse-keeper.err.log</errorlog>
<!-- Rotation policy
See https://github.com/pocoproject/poco/blob/poco-1.9.4-release/Foundation/include/Poco/FileChannel.h#L54-L85
-->
<size>1000M</size>
<count>10</count>
<!-- <console>1</console> --> <!-- Default behavior is autodetection (log to console if not daemon mode and is tty) -->
</logger>
<listen_host>0.0.0.0</listen_host>
<max_connections>4096</max_connections>
<keeper_server>
<tcp_port>9181</tcp_port>
<!-- Must be unique among all keeper serves -->
<server_id>1</server_id>
<log_storage_path>/var/lib/clickhouse/coordination/logs</log_storage_path>
<snapshot_storage_path>/var/lib/clickhouse/coordination/snapshots</snapshot_storage_path>
<coordination_settings>
<operation_timeout_ms>10000</operation_timeout_ms>
<min_session_timeout_ms>10000</min_session_timeout_ms>
<session_timeout_ms>100000</session_timeout_ms>
<raft_logs_level>information</raft_logs_level>
<compress_logs>false</compress_logs>
<!-- All settings listed in https://github.com/ClickHouse/ClickHouse/blob/master/src/Coordination/CoordinationSettings.h -->
</coordination_settings>
<!-- enable sanity hostname checks for cluster configuration (e.g. if localhost is used with remote endpoints) -->
<hostname_checks_enabled>true</hostname_checks_enabled>
<raft_configuration>
<server>
<id>1</id>
<!-- Internal port and hostname -->
<hostname>clickhouses-keeper-1</hostname>
<port>9234</port>
</server>
<!-- Add more servers here -->
</raft_configuration>
</keeper_server>
</clickhouse>

View File

@@ -75,16 +75,7 @@ processors:
# This is added to ensure the uniqueness of the timeseries
# Otherwise, identical timeseries produced by multiple replicas of
# collectors result in incorrect APM metrics
- name: signoz.collector.id
- name: service.version
- name: browser.platform
- name: browser.mobile
- name: k8s.cluster.name
- name: k8s.node.name
- name: k8s.namespace.name
- name: host.name
- name: host.type
- name: container.name
- name: 'signoz.collector.id'
# memory_limiter:
# # 80% of maximum memory up to 2G
# limit_mib: 1500
@@ -120,15 +111,6 @@ processors:
# Otherwise, identical timeseries produced by multiple replicas of
# collectors result in incorrect APM metrics
- name: signoz.collector.id
- name: service.version
- name: browser.platform
- name: browser.mobile
- name: k8s.cluster.name
- name: k8s.node.name
- name: k8s.namespace.name
- name: host.name
- name: host.type
- name: container.name
extensions:
health_check:
@@ -140,20 +122,21 @@ extensions:
exporters:
clickhousetraces:
datasource: tcp://clickhouse:9000/signoz_traces
datasource: tcp://clickhouse:9000/?database=signoz_traces
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
low_cardinal_exception_grouping: ${LOW_CARDINAL_EXCEPTION_GROUPING}
clickhousemetricswrite:
endpoint: tcp://clickhouse:9000/signoz_metrics
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
resource_to_telemetry_conversion:
enabled: true
clickhousemetricswrite/prometheus:
endpoint: tcp://clickhouse:9000/signoz_metrics
endpoint: tcp://clickhouse:9000/?database=signoz_metrics
# logging: {}
clickhouselogsexporter:
dsn: tcp://clickhouse:9000/signoz_logs
dsn: tcp://clickhouse:9000/
docker_multi_node_cluster: ${DOCKER_MULTI_NODE_CLUSTER}
timeout: 10s
# logging: {}
service:
telemetry:

View File

@@ -22,4 +22,4 @@ rule_files:
scrape_configs: []
remote_read:
- url: tcp://clickhouse:9000/signoz_metrics
- url: tcp://clickhouse:9000/?database=signoz_metrics

View File

@@ -2,12 +2,10 @@ package api
import (
"net/http"
"net/http/httputil"
"time"
"github.com/gorilla/mux"
"go.signoz.io/signoz/ee/query-service/dao"
"go.signoz.io/signoz/ee/query-service/integrations/gateway"
"go.signoz.io/signoz/ee/query-service/interfaces"
"go.signoz.io/signoz/ee/query-service/license"
"go.signoz.io/signoz/ee/query-service/usage"
@@ -37,7 +35,6 @@ type APIHandlerOptions struct {
IntegrationsController *integrations.Controller
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
Cache cache.Cache
Gateway *httputil.ReverseProxy
// Querier Influx Interval
FluxInterval time.Duration
}
@@ -98,10 +95,6 @@ func (ah *APIHandler) AppDao() dao.ModelDao {
return ah.opts.AppDao
}
func (ah *APIHandler) Gateway() *httputil.ReverseProxy {
return ah.opts.Gateway
}
func (ah *APIHandler) CheckFeature(f string) bool {
err := ah.FF().CheckFeature(f)
return err == nil
@@ -159,6 +152,7 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddlew
router.HandleFunc("/api/v1/register", am.OpenAccess(ah.registerUser)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/login", am.OpenAccess(ah.loginUser)).Methods(http.MethodPost)
router.HandleFunc("/api/v1/traces/{traceId}", am.ViewAccess(ah.searchTraces)).Methods(http.MethodGet)
router.HandleFunc("/api/v2/metrics/query_range", am.ViewAccess(ah.queryRangeMetricsV2)).Methods(http.MethodPost)
// PAT APIs
router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.createPAT)).Methods(http.MethodPost)
@@ -177,9 +171,6 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddlew
am.ViewAccess(ah.listLicensesV2)).
Methods(http.MethodGet)
// Gateway
router.PathPrefix(gateway.RoutePrefix).HandlerFunc(am.AdminAccess(ah.ServeGatewayHTTP))
ah.APIHandler.RegisterRoutes(router, am)
}

View File

@@ -14,6 +14,7 @@ import (
"go.signoz.io/signoz/ee/query-service/constants"
"go.signoz.io/signoz/ee/query-service/model"
"go.signoz.io/signoz/pkg/query-service/auth"
baseauth "go.signoz.io/signoz/pkg/query-service/auth"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
)
@@ -35,14 +36,14 @@ func (ah *APIHandler) loginUser(w http.ResponseWriter, r *http.Request) {
req := basemodel.LoginRequest{}
err := parseRequest(r, &req)
if err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
ctx := context.Background()
if req.Email != "" && ah.CheckFeature(model.SSO) {
var apierr *basemodel.ApiError
var apierr basemodel.BaseApiError
_, apierr = ah.AppDao().CanUsePassword(ctx, req.Email)
if apierr != nil && !apierr.IsNil() {
RespondError(w, apierr, nil)
@@ -50,7 +51,7 @@ func (ah *APIHandler) loginUser(w http.ResponseWriter, r *http.Request) {
}
// if all looks good, call auth
resp, err := baseauth.Login(ctx, &req)
resp, err := auth.Login(ctx, &req)
if ah.HandleError(w, err, http.StatusUnauthorized) {
return
}
@@ -74,7 +75,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
requestBody, err := io.ReadAll(r.Body)
if err != nil {
zap.L().Error("received no input in api", zap.Error(err))
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
@@ -82,7 +83,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
if err != nil {
zap.L().Error("received invalid user registration request", zap.Error(err))
RespondError(w, basemodel.BadRequest(fmt.Errorf("failed to register user")), nil)
RespondError(w, model.BadRequest(fmt.Errorf("failed to register user")), nil)
return
}
@@ -90,13 +91,13 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
invite, err := baseauth.ValidateInvite(ctx, req)
if err != nil {
zap.L().Error("failed to validate invite token", zap.Error(err))
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
if invite == nil {
zap.L().Error("failed to validate invite token: it is either empty or invalid", zap.Error(err))
RespondError(w, basemodel.BadRequest(basemodel.ErrSignupFailed{}), nil)
RespondError(w, model.BadRequest(basemodel.ErrSignupFailed{}), nil)
return
}
@@ -104,7 +105,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
domain, apierr := ah.AppDao().GetDomainByEmail(ctx, invite.Email)
if apierr != nil {
zap.L().Error("failed to get domain from email", zap.Error(apierr))
RespondError(w, basemodel.InternalError(basemodel.ErrSignupFailed{}), nil)
RespondError(w, model.InternalError(basemodel.ErrSignupFailed{}), nil)
}
precheckResp := &basemodel.PrecheckResponse{
@@ -120,7 +121,7 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
return
}
var precheckError *basemodel.ApiError
var precheckError basemodel.BaseApiError
precheckResp, precheckError = ah.AppDao().PrecheckLogin(ctx, user.Email, req.SourceUrl)
if precheckError != nil {
@@ -129,8 +130,8 @@ func (ah *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
} else {
// no-sso, validate password
if err := baseauth.ValidatePassword(req.Password); err != nil {
RespondError(w, basemodel.InternalError(fmt.Errorf("password is not in a valid format")), nil)
if err := auth.ValidatePassword(req.Password); err != nil {
RespondError(w, model.InternalError(fmt.Errorf("password is not in a valid format")), nil)
return
}
@@ -155,7 +156,7 @@ func (ah *APIHandler) getInvite(w http.ResponseWriter, r *http.Request) {
inviteObject, err := baseauth.GetInvite(context.Background(), token)
if err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
@@ -240,11 +241,6 @@ func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request)
// prepare google callback handler using parsedState -
// which contains redirect URL (front-end endpoint)
callbackHandler, err := domain.PrepareGoogleOAuthProvider(parsedState)
if err != nil {
zap.L().Error("[receiveGoogleAuth] failed to prepare google oauth provider", zap.String("domain", domain.String()), zap.Error(err))
handleSsoError(w, r, redirectUri)
return
}
identity, err := callbackHandler.HandleCallback(r)
if err != nil {

View File

@@ -1,13 +1,12 @@
package api
import (
"net/http"
"github.com/gorilla/mux"
"go.signoz.io/signoz/pkg/query-service/app/dashboards"
"go.signoz.io/signoz/pkg/query-service/auth"
"go.signoz.io/signoz/pkg/query-service/common"
"go.signoz.io/signoz/pkg/query-service/model"
"net/http"
)
func (ah *APIHandler) lockDashboard(w http.ResponseWriter, r *http.Request) {

View File

@@ -9,7 +9,6 @@ import (
"github.com/google/uuid"
"github.com/gorilla/mux"
"go.signoz.io/signoz/ee/query-service/model"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
)
func (ah *APIHandler) listDomainsByOrg(w http.ResponseWriter, r *http.Request) {
@@ -28,12 +27,12 @@ func (ah *APIHandler) postDomain(w http.ResponseWriter, r *http.Request) {
req := model.OrgDomain{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
if err := req.ValidNew(); err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
@@ -51,18 +50,18 @@ func (ah *APIHandler) putDomain(w http.ResponseWriter, r *http.Request) {
domainIdStr := mux.Vars(r)["id"]
domainId, err := uuid.Parse(domainIdStr)
if err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
req := model.OrgDomain{Id: domainId}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
req.Id = domainId
if err := req.Valid(nil); err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
}
if apierr := ah.AppDao().UpdateDomain(ctx, &req); apierr != nil {
@@ -78,7 +77,7 @@ func (ah *APIHandler) deleteDomain(w http.ResponseWriter, r *http.Request) {
domainId, err := uuid.Parse(domainIdStr)
if err != nil {
RespondError(w, basemodel.BadRequest(fmt.Errorf("invalid domain id")), nil)
RespondError(w, model.BadRequest(fmt.Errorf("invalid domain id")), nil)
return
}

View File

@@ -1,34 +0,0 @@
package api
import (
"net/http"
"strings"
"go.signoz.io/signoz/ee/query-service/integrations/gateway"
)
func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
if !strings.HasPrefix(req.URL.Path, gateway.RoutePrefix+gateway.AllowedPrefix) {
rw.WriteHeader(http.StatusNotFound)
return
}
license, err := ah.LM().GetRepo().GetActiveLicense(ctx)
if err != nil {
RespondError(rw, err, nil)
return
}
//Create headers
var licenseKey string
if license != nil {
licenseKey = license.Key
}
req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey)
req.Header.Set("X-Consumer-Username", "lid:00000000-0000-0000-0000-000000000000")
req.Header.Set("X-Consumer-Groups", "ns:default")
ah.Gateway().ServeHTTP(rw, req)
}

View File

@@ -9,7 +9,6 @@ import (
"go.signoz.io/signoz/ee/query-service/constants"
"go.signoz.io/signoz/ee/query-service/model"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
"go.uber.org/zap"
)
@@ -72,12 +71,12 @@ func (ah *APIHandler) applyLicense(w http.ResponseWriter, r *http.Request) {
var l model.License
if err := json.NewDecoder(r.Body).Decode(&l); err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
if l.Key == "" {
RespondError(w, basemodel.BadRequest(fmt.Errorf("license key is required")), nil)
RespondError(w, model.BadRequest(fmt.Errorf("license key is required")), nil)
return
}
license, apiError := ah.LM().Activate(r.Context(), l.Key)
@@ -101,20 +100,20 @@ func (ah *APIHandler) checkout(w http.ResponseWriter, r *http.Request) {
hClient := &http.Client{}
req, err := http.NewRequest("POST", constants.LicenseSignozIo+"/checkout", r.Body)
if err != nil {
RespondError(w, basemodel.InternalError(err), nil)
RespondError(w, model.InternalError(err), nil)
return
}
req.Header.Add("X-SigNoz-SecretKey", constants.LicenseAPIKey)
licenseResp, err := hClient.Do(req)
if err != nil {
RespondError(w, basemodel.InternalError(err), nil)
RespondError(w, model.InternalError(err), nil)
return
}
// decode response body
var resp checkoutResponse
if err := json.NewDecoder(licenseResp.Body).Decode(&resp); err != nil {
RespondError(w, basemodel.InternalError(err), nil)
RespondError(w, model.InternalError(err), nil)
return
}
@@ -125,7 +124,7 @@ func (ah *APIHandler) getBilling(w http.ResponseWriter, r *http.Request) {
licenseKey := r.URL.Query().Get("licenseKey")
if licenseKey == "" {
RespondError(w, basemodel.BadRequest(fmt.Errorf("license key is required")), nil)
RespondError(w, model.BadRequest(fmt.Errorf("license key is required")), nil)
return
}
@@ -134,20 +133,20 @@ func (ah *APIHandler) getBilling(w http.ResponseWriter, r *http.Request) {
hClient := &http.Client{}
req, err := http.NewRequest("GET", billingURL, nil)
if err != nil {
RespondError(w, basemodel.InternalError(err), nil)
RespondError(w, model.InternalError(err), nil)
return
}
req.Header.Add("X-SigNoz-SecretKey", constants.LicenseAPIKey)
billingResp, err := hClient.Do(req)
if err != nil {
RespondError(w, basemodel.InternalError(err), nil)
RespondError(w, model.InternalError(err), nil)
return
}
// decode response body
var billingResponse billingDetails
if err := json.NewDecoder(billingResp.Body).Decode(&billingResponse); err != nil {
RespondError(w, basemodel.InternalError(err), nil)
RespondError(w, model.InternalError(err), nil)
return
}
@@ -252,20 +251,20 @@ func (ah *APIHandler) portalSession(w http.ResponseWriter, r *http.Request) {
hClient := &http.Client{}
req, err := http.NewRequest("POST", constants.LicenseSignozIo+"/portal", r.Body)
if err != nil {
RespondError(w, basemodel.InternalError(err), nil)
RespondError(w, model.InternalError(err), nil)
return
}
req.Header.Add("X-SigNoz-SecretKey", constants.LicenseAPIKey)
licenseResp, err := hClient.Do(req)
if err != nil {
RespondError(w, basemodel.InternalError(err), nil)
RespondError(w, model.InternalError(err), nil)
return
}
// decode response body
var resp checkoutResponse
if err := json.NewDecoder(licenseResp.Body).Decode(&resp); err != nil {
RespondError(w, basemodel.InternalError(err), nil)
RespondError(w, model.InternalError(err), nil)
return
}

View File

@@ -0,0 +1,236 @@
package api
import (
"bytes"
"fmt"
"net/http"
"sync"
"text/template"
"time"
"go.signoz.io/signoz/pkg/query-service/app/metrics"
"go.signoz.io/signoz/pkg/query-service/app/parser"
"go.signoz.io/signoz/pkg/query-service/constants"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
querytemplate "go.signoz.io/signoz/pkg/query-service/utils/queryTemplate"
"go.uber.org/zap"
)
func (ah *APIHandler) queryRangeMetricsV2(w http.ResponseWriter, r *http.Request) {
if !ah.CheckFeature(basemodel.CustomMetricsFunction) {
zap.L().Info("CustomMetricsFunction feature is not enabled in this plan")
ah.APIHandler.QueryRangeMetricsV2(w, r)
return
}
metricsQueryRangeParams, apiErrorObj := parser.ParseMetricQueryRangeParams(r)
if apiErrorObj != nil {
zap.L().Error("Error in parsing metric query params", zap.Error(apiErrorObj.Err))
RespondError(w, apiErrorObj, nil)
return
}
// prometheus instant query needs same timestamp
if metricsQueryRangeParams.CompositeMetricQuery.PanelType == basemodel.QUERY_VALUE &&
metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.PROM {
metricsQueryRangeParams.Start = metricsQueryRangeParams.End
}
// round up the end to nearest multiple
if metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.QUERY_BUILDER {
end := (metricsQueryRangeParams.End) / 1000
step := metricsQueryRangeParams.Step
metricsQueryRangeParams.End = (end / step * step) * 1000
}
type channelResult struct {
Series []*basemodel.Series
TableName string
Err error
Name string
Query string
}
execClickHouseQueries := func(queries map[string]string) ([]*basemodel.Series, []string, error, map[string]string) {
var seriesList []*basemodel.Series
var tableName []string
ch := make(chan channelResult, len(queries))
var wg sync.WaitGroup
for name, query := range queries {
wg.Add(1)
go func(name, query string) {
defer wg.Done()
seriesList, tableName, err := ah.opts.DataConnector.GetMetricResultEE(r.Context(), query)
for _, series := range seriesList {
series.QueryName = name
}
if err != nil {
ch <- channelResult{Err: fmt.Errorf("error in query-%s: %v", name, err), Name: name, Query: query}
return
}
ch <- channelResult{Series: seriesList, TableName: tableName}
}(name, query)
}
wg.Wait()
close(ch)
var errs []error
errQuriesByName := make(map[string]string)
// read values from the channel
for r := range ch {
if r.Err != nil {
errs = append(errs, r.Err)
errQuriesByName[r.Name] = r.Query
continue
}
seriesList = append(seriesList, r.Series...)
tableName = append(tableName, r.TableName)
}
if len(errs) != 0 {
return nil, nil, fmt.Errorf("encountered multiple errors: %s", metrics.FormatErrs(errs, "\n")), errQuriesByName
}
return seriesList, tableName, nil, nil
}
execPromQueries := func(metricsQueryRangeParams *basemodel.QueryRangeParamsV2) ([]*basemodel.Series, error, map[string]string) {
var seriesList []*basemodel.Series
ch := make(chan channelResult, len(metricsQueryRangeParams.CompositeMetricQuery.PromQueries))
var wg sync.WaitGroup
for name, query := range metricsQueryRangeParams.CompositeMetricQuery.PromQueries {
if query.Disabled {
continue
}
wg.Add(1)
go func(name string, query *basemodel.PromQuery) {
var seriesList []*basemodel.Series
defer wg.Done()
tmpl := template.New("promql-query")
tmpl, tmplErr := tmpl.Parse(query.Query)
if tmplErr != nil {
ch <- channelResult{Err: fmt.Errorf("error in parsing query-%s: %v", name, tmplErr), Name: name, Query: query.Query}
return
}
var queryBuf bytes.Buffer
tmplErr = tmpl.Execute(&queryBuf, metricsQueryRangeParams.Variables)
if tmplErr != nil {
ch <- channelResult{Err: fmt.Errorf("error in parsing query-%s: %v", name, tmplErr), Name: name, Query: query.Query}
return
}
query.Query = queryBuf.String()
queryModel := basemodel.QueryRangeParams{
Start: time.UnixMilli(metricsQueryRangeParams.Start),
End: time.UnixMilli(metricsQueryRangeParams.End),
Step: time.Duration(metricsQueryRangeParams.Step * int64(time.Second)),
Query: query.Query,
}
promResult, _, err := ah.opts.DataConnector.GetQueryRangeResult(r.Context(), &queryModel)
if err != nil {
ch <- channelResult{Err: fmt.Errorf("error in query-%s: %v", name, err), Name: name, Query: query.Query}
return
}
matrix, _ := promResult.Matrix()
for _, v := range matrix {
var s basemodel.Series
s.QueryName = name
s.Labels = v.Metric.Copy().Map()
for _, p := range v.Floats {
s.Points = append(s.Points, basemodel.MetricPoint{Timestamp: p.T, Value: p.F})
}
seriesList = append(seriesList, &s)
}
ch <- channelResult{Series: seriesList}
}(name, query)
}
wg.Wait()
close(ch)
var errs []error
errQuriesByName := make(map[string]string)
// read values from the channel
for r := range ch {
if r.Err != nil {
errs = append(errs, r.Err)
errQuriesByName[r.Name] = r.Query
continue
}
seriesList = append(seriesList, r.Series...)
}
if len(errs) != 0 {
return nil, fmt.Errorf("encountered multiple errors: %s", metrics.FormatErrs(errs, "\n")), errQuriesByName
}
return seriesList, nil, nil
}
var seriesList []*basemodel.Series
var tableName []string
var err error
var errQuriesByName map[string]string
switch metricsQueryRangeParams.CompositeMetricQuery.QueryType {
case basemodel.QUERY_BUILDER:
runQueries := metrics.PrepareBuilderMetricQueries(metricsQueryRangeParams, constants.SIGNOZ_TIMESERIES_TABLENAME)
if runQueries.Err != nil {
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: runQueries.Err}, nil)
return
}
seriesList, tableName, err, errQuriesByName = execClickHouseQueries(runQueries.Queries)
case basemodel.CLICKHOUSE:
queries := make(map[string]string)
for name, chQuery := range metricsQueryRangeParams.CompositeMetricQuery.ClickHouseQueries {
if chQuery.Disabled {
continue
}
tmpl := template.New("clickhouse-query")
tmpl, err := tmpl.Parse(chQuery.Query)
if err != nil {
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}, nil)
return
}
var query bytes.Buffer
// replace go template variables
querytemplate.AssignReservedVars(metricsQueryRangeParams)
err = tmpl.Execute(&query, metricsQueryRangeParams.Variables)
if err != nil {
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}, nil)
return
}
queries[name] = query.String()
}
seriesList, tableName, err, errQuriesByName = execClickHouseQueries(queries)
case basemodel.PROM:
seriesList, err, errQuriesByName = execPromQueries(metricsQueryRangeParams)
default:
err = fmt.Errorf("invalid query type")
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}, errQuriesByName)
return
}
if err != nil {
apiErrObj := &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}
RespondError(w, apiErrObj, errQuriesByName)
return
}
if metricsQueryRangeParams.CompositeMetricQuery.PanelType == basemodel.QUERY_VALUE &&
len(seriesList) > 1 &&
(metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.QUERY_BUILDER ||
metricsQueryRangeParams.CompositeMetricQuery.QueryType == basemodel.CLICKHOUSE) {
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: fmt.Errorf("invalid: query resulted in more than one series for value type")}, nil)
return
}
type ResponseFormat struct {
ResultType string `json:"resultType"`
Result []*basemodel.Series `json:"result"`
TableName []string `json:"tableName"`
}
resp := ResponseFormat{ResultType: "matrix", Result: seriesList, TableName: tableName}
ah.Respond(w, resp)
}

View File

@@ -31,13 +31,13 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
req := model.CreatePATRequestBody{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
user, err := auth.GetUserFromRequest(r)
if err != nil {
RespondError(w, &basemodel.ApiError{
Typ: basemodel.ErrorUnauthorized,
RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: err,
}, nil)
return
@@ -49,7 +49,7 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
}
err = validatePATRequest(pat)
if err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
@@ -66,7 +66,7 @@ func (ah *APIHandler) createPAT(w http.ResponseWriter, r *http.Request) {
}
zap.L().Info("Got Create PAT request", zap.Any("pat", pat))
var apierr *basemodel.ApiError
var apierr basemodel.BaseApiError
if pat, apierr = ah.AppDao().CreatePAT(ctx, pat); apierr != nil {
RespondError(w, apierr, nil)
return
@@ -93,14 +93,14 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
req := model.PAT{}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
user, err := auth.GetUserFromRequest(r)
if err != nil {
RespondError(w, &basemodel.ApiError{
Typ: basemodel.ErrorUnauthorized,
RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: err,
}, nil)
return
@@ -108,7 +108,7 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
err = validatePATRequest(req)
if err != nil {
RespondError(w, basemodel.BadRequest(err), nil)
RespondError(w, model.BadRequest(err), nil)
return
}
@@ -116,7 +116,7 @@ func (ah *APIHandler) updatePAT(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
req.UpdatedAt = time.Now().Unix()
zap.L().Info("Got Update PAT request", zap.Any("pat", req))
var apierr *basemodel.ApiError
var apierr basemodel.BaseApiError
if apierr = ah.AppDao().UpdatePAT(ctx, req, id); apierr != nil {
RespondError(w, apierr, nil)
return
@@ -129,8 +129,8 @@ func (ah *APIHandler) getPATs(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
user, err := auth.GetUserFromRequest(r)
if err != nil {
RespondError(w, &basemodel.ApiError{
Typ: basemodel.ErrorUnauthorized,
RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: err,
}, nil)
return
@@ -149,8 +149,8 @@ func (ah *APIHandler) revokePAT(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
user, err := auth.GetUserFromRequest(r)
if err != nil {
RespondError(w, &basemodel.ApiError{
Typ: basemodel.ErrorUnauthorized,
RespondError(w, &model.ApiError{
Typ: model.ErrorUnauthorized,
Err: err,
}, nil)
return

View File

@@ -7,6 +7,6 @@ import (
basemodel "go.signoz.io/signoz/pkg/query-service/model"
)
func RespondError(w http.ResponseWriter, apiErr *basemodel.ApiError, data interface{}) {
baseapp.RespondError(w, apiErr)
func RespondError(w http.ResponseWriter, apiErr basemodel.BaseApiError, data interface{}) {
baseapp.RespondError(w, apiErr, data)
}

View File

@@ -2,8 +2,11 @@ package api
import (
"net/http"
"strconv"
"go.signoz.io/signoz/ee/query-service/app/db"
"go.signoz.io/signoz/ee/query-service/constants"
"go.signoz.io/signoz/ee/query-service/model"
baseapp "go.signoz.io/signoz/pkg/query-service/app"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
"go.uber.org/zap"
@@ -16,13 +19,17 @@ func (ah *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
ah.APIHandler.SearchTraces(w, r)
return
}
searchTracesParams, err := baseapp.ParseSearchTracesParams(r)
traceId, spanId, levelUpInt, levelDownInt, err := baseapp.ParseSearchTracesParams(r)
if err != nil {
RespondError(w, &basemodel.ApiError{Typ: basemodel.ErrorBadData, Err: err}, "Error reading params")
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading params")
return
}
result, err := ah.opts.DataConnector.SearchTraces(r.Context(), searchTracesParams, db.SmartTraceAlgorithm)
spanLimit, err := strconv.Atoi(constants.SpanLimitStr)
if err != nil {
zap.L().Error("Error during strconv.Atoi() on SPAN_LIMIT env variable", zap.Error(err))
return
}
result, err := ah.opts.DataConnector.SearchTraces(r.Context(), traceId, spanId, levelUpInt, levelDownInt, spanLimit, db.SmartTraceAlgorithm)
if ah.HandleError(w, err, http.StatusBadRequest) {
return
}

View File

@@ -26,10 +26,10 @@ func (r *ClickhouseReader) GetMetricResultEE(ctx context.Context, query string)
var hash string
// If getSubTreeSpans function is used in the clickhouse query
if strings.Contains(query, "getSubTreeSpans(") {
if strings.Index(query, "getSubTreeSpans(") != -1 {
var err error
query, hash, err = r.getSubTreeSpansCustomFunction(ctx, query, hash)
if err == fmt.Errorf("no spans found for the given query") {
if err == fmt.Errorf("No spans found for the given query") {
return nil, "", nil
}
if err != nil {
@@ -183,7 +183,7 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return query, hash, fmt.Errorf("error in processing sql query")
return query, hash, fmt.Errorf("Error in processing sql query")
}
var searchScanResponses []basemodel.SearchSpanDBResponseItem
@@ -193,14 +193,14 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu
modelQuery := fmt.Sprintf("SELECT timestamp, traceID, model FROM %s.%s WHERE traceID=$1", r.TraceDB, r.SpansTable)
if len(getSpansSubQueryDBResponses) == 0 {
return query, hash, fmt.Errorf("no spans found for the given query")
return query, hash, fmt.Errorf("No spans found for the given query")
}
zap.L().Debug("Executing query to fetch all the spans from the same TraceID: ", zap.String("modelQuery", modelQuery))
err = r.conn.Select(ctx, &searchScanResponses, modelQuery, getSpansSubQueryDBResponses[0].TraceID)
if err != nil {
zap.L().Error("Error in processing sql query", zap.Error(err))
return query, hash, fmt.Errorf("error in processing sql query")
return query, hash, fmt.Errorf("Error in processing sql query")
}
// Process model to fetch the spans
@@ -263,7 +263,6 @@ func (r *ClickhouseReader) getSubTreeSpansCustomFunction(ctx context.Context, qu
return query, hash, nil
}
//lint:ignore SA4009 return hash is feeded to the query
func processQuery(query string, hash string) (string, string, string) {
re3 := regexp.MustCompile(`getSubTreeSpans`)

View File

@@ -13,11 +13,6 @@ import (
func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanId string, levelUp int, levelDown int, spanLimit int) ([]basemodel.SearchSpansResult, error) {
var spans []*model.SpanForTraceDetails
// if targetSpanId is null or not present then randomly select a span as targetSpanId
if (targetSpanId == "" || targetSpanId == "null") && len(payload) > 0 {
targetSpanId = payload[0].SpanID
}
// Build a slice of spans from the payload
for _, spanItem := range payload {
var parentID string
@@ -61,7 +56,7 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
// If the target span is not found, return span not found error
if targetSpan == nil {
return nil, errors.New("span not found")
return nil, errors.New("Span not found")
}
// Build the final result
@@ -118,9 +113,8 @@ func SmartTraceAlgorithm(payload []basemodel.SearchSpanResponseItem, targetSpanI
}
searchSpansResult := []basemodel.SearchSpansResult{{
Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References", "Events", "HasError"},
Events: make([][]interface{}, len(resultSpansSet)),
IsSubTree: true,
Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References", "Events", "HasError"},
Events: make([][]interface{}, len(resultSpansSet)),
},
}
@@ -219,7 +213,7 @@ func breadthFirstSearch(spansPtr *model.SpanForTraceDetails, targetId string) (*
}
for _, child := range current.Children {
if ok := visited[child.SpanID]; !ok {
if ok, _ := visited[child.SpanID]; !ok {
queue = append(queue, child)
}
}

View File

@@ -8,10 +8,8 @@ import (
"io"
"net"
"net/http"
"net/http/httputil"
_ "net/http/pprof" // http profiler
"os"
"regexp"
"time"
"github.com/gorilla/handlers"
@@ -25,9 +23,9 @@ import (
"go.signoz.io/signoz/ee/query-service/auth"
"go.signoz.io/signoz/ee/query-service/constants"
"go.signoz.io/signoz/ee/query-service/dao"
"go.signoz.io/signoz/ee/query-service/integrations/gateway"
"go.signoz.io/signoz/ee/query-service/interfaces"
baseauth "go.signoz.io/signoz/pkg/query-service/auth"
baseInterface "go.signoz.io/signoz/pkg/query-service/interfaces"
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
licensepkg "go.signoz.io/signoz/ee/query-service/license"
@@ -72,13 +70,14 @@ type ServerOptions struct {
CacheConfigPath string
FluxInterval string
Cluster string
GatewayUrl string
}
// Server runs HTTP api service
type Server struct {
serverOptions *ServerOptions
conn net.Listener
ruleManager *rules.Manager
separatePorts bool
// public http router
httpConn net.Listener
@@ -88,6 +87,9 @@ type Server struct {
privateConn net.Listener
privateHTTP *http.Server
// feature flags
featureLookup baseint.FeatureLookup
// Usage manager
usageManager *usage.Manager
@@ -119,33 +121,8 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
localDB.SetMaxOpenConns(10)
gatewayFeature := basemodel.Feature{
Name: "GATEWAY",
Active: false,
Usage: 0,
UsageLimit: -1,
Route: "",
}
//Activate this feature if the url is not empty
var gatewayProxy *httputil.ReverseProxy
if serverOptions.GatewayUrl == "" {
gatewayFeature.Active = false
gatewayProxy, err = gateway.NewNoopProxy()
if err != nil {
return nil, err
}
} else {
zap.L().Info("Enabling gateway feature flag ...")
gatewayFeature.Active = true
gatewayProxy, err = gateway.NewProxy(serverOptions.GatewayUrl, gateway.RoutePrefix)
if err != nil {
return nil, err
}
}
// initiate license manager
lm, err := licensepkg.StartManager("sqlite", localDB, gatewayFeature)
lm, err := licensepkg.StartManager("sqlite", localDB)
if err != nil {
return nil, err
}
@@ -270,7 +247,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
LogsParsingPipelineController: logParsingPipelineController,
Cache: c,
FluxInterval: fluxInterval,
Gateway: gatewayProxy,
}
apiHandler, err := api.NewAPIHandler(apiOpts)
@@ -311,7 +287,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server, error) {
r := baseapp.NewRouter()
r := mux.NewRouter()
r.Use(baseapp.LogCommentEnricher)
r.Use(setTimeoutMiddleware)
@@ -338,7 +314,7 @@ func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server,
func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, error) {
r := baseapp.NewRouter()
r := mux.NewRouter()
// add auth middleware
getUserFromRequest := func(r *http.Request) (*basemodel.UserPayload, error) {
@@ -352,6 +328,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, e
r.Use(loggingMiddleware)
apiHandler.RegisterRoutes(r, am)
apiHandler.RegisterMetricsRoutes(r, am)
apiHandler.RegisterLogsRoutes(r, am)
apiHandler.RegisterIntegrationRoutes(r, am)
apiHandler.RegisterQueryRangeV3Routes(r, am)
@@ -379,7 +356,7 @@ func loggingMiddleware(next http.Handler) http.Handler {
path, _ := route.GetPathTemplate()
startTime := time.Now()
next.ServeHTTP(w, r)
zap.L().Info(path, zap.Duration("timeTaken", time.Since(startTime)), zap.String("path", path))
zap.L().Info(path+"\ttimeTaken:"+time.Now().Sub(startTime).String(), zap.Duration("timeTaken", time.Now().Sub(startTime)), zap.String("path", path))
})
}
@@ -391,7 +368,7 @@ func loggingMiddlewarePrivate(next http.Handler) http.Handler {
path, _ := route.GetPathTemplate()
startTime := time.Now()
next.ServeHTTP(w, r)
zap.L().Info(path, zap.Duration("timeTaken", time.Since(startTime)), zap.String("path", path), zap.Bool("tprivatePort", true))
zap.L().Info(path+"\tprivatePort: true \ttimeTaken"+time.Now().Sub(startTime).String(), zap.Duration("timeTaken", time.Now().Sub(startTime)), zap.String("path", path), zap.Bool("tprivatePort", true))
})
}
@@ -416,14 +393,13 @@ func (lrw *loggingResponseWriter) Flush() {
lrw.ResponseWriter.(http.Flusher).Flush()
}
func extractQueryRangeData(path string, r *http.Request) (map[string]interface{}, bool) {
pathToExtractBodyFromV3 := "/api/v3/query_range"
pathToExtractBodyFromV4 := "/api/v4/query_range"
func extractQueryRangeV3Data(path string, r *http.Request) (map[string]interface{}, bool) {
pathToExtractBodyFrom := "/api/v3/query_range"
data := map[string]interface{}{}
var postData *v3.QueryRangeParamsV3
if (r.Method == "POST") && ((path == pathToExtractBodyFromV3) || (path == pathToExtractBodyFromV4)) {
if path == pathToExtractBodyFrom && (r.Method == "POST") {
if r.Body != nil {
bodyBytes, err := io.ReadAll(r.Body)
if err != nil {
@@ -441,25 +417,6 @@ func extractQueryRangeData(path string, r *http.Request) (map[string]interface{}
return nil, false
}
referrer := r.Header.Get("Referer")
dashboardMatched, err := regexp.MatchString(`/dashboard/[a-zA-Z0-9\-]+/(new|edit)(?:\?.*)?$`, referrer)
if err != nil {
zap.L().Error("error while matching the referrer", zap.Error(err))
}
alertMatched, err := regexp.MatchString(`/alerts/(new|edit)(?:\?.*)?$`, referrer)
if err != nil {
zap.L().Error("error while matching the alert: ", zap.Error(err))
}
logsExplorerMatched, err := regexp.MatchString(`/logs/logs-explorer(?:\?.*)?$`, referrer)
if err != nil {
zap.L().Error("error while matching the logs explorer: ", zap.Error(err))
}
traceExplorerMatched, err := regexp.MatchString(`/traces-explorer(?:\?.*)?$`, referrer)
if err != nil {
zap.L().Error("error while matching the trace explorer: ", zap.Error(err))
}
signozMetricsUsed := false
signozLogsUsed := false
signozTracesUsed := false
@@ -488,20 +445,6 @@ func extractQueryRangeData(path string, r *http.Request) (map[string]interface{}
data["tracesUsed"] = signozTracesUsed
userEmail, err := baseauth.GetEmailFromJwt(r.Context())
if err == nil {
// switch case to set data["screen"] based on the referrer
switch {
case dashboardMatched:
data["screen"] = "panel"
case alertMatched:
data["screen"] = "alert"
case logsExplorerMatched:
data["screen"] = "logs-explorer"
case traceExplorerMatched:
data["screen"] = "traces-explorer"
default:
data["screen"] = "unknown"
return data, true
}
telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_API, data, userEmail, true, false)
}
}
@@ -529,7 +472,7 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
route := mux.CurrentRoute(r)
path, _ := route.GetPathTemplate()
queryRangeData, metadataExists := extractQueryRangeData(path, r)
queryRangeV3data, metadataExists := extractQueryRangeV3Data(path, r)
getActiveLogs(path, r)
lrw := NewLoggingResponseWriter(w)
@@ -537,7 +480,7 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler {
data := map[string]interface{}{"path": path, "statusCode": lrw.statusCode}
if metadataExists {
for key, value := range queryRangeData {
for key, value := range queryRangeV3data {
data[key] = value
}
}
@@ -705,7 +648,7 @@ func makeRulesManager(
db *sqlx.DB,
ch baseint.Reader,
disableRules bool,
fm baseint.FeatureLookup) (*rules.Manager, error) {
fm baseInterface.FeatureLookup) (*rules.Manager, error) {
// create engine
pqle, err := pqle.FromConfigPath(promConfigPath)
@@ -733,7 +676,6 @@ func makeRulesManager(
Logger: nil,
DisableRules: disableRules,
FeatureFlags: fm,
Reader: ch,
}
// create Manager

View File

@@ -11,8 +11,7 @@ const (
var LicenseSignozIo = "https://license.signoz.io/api/v1"
var LicenseAPIKey = GetOrDefaultEnv("SIGNOZ_LICENSE_API_KEY", "")
var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "")
var SpanRenderLimitStr = GetOrDefaultEnv("SPAN_RENDER_LIMIT", "2500")
var MaxSpansInTraceStr = GetOrDefaultEnv("MAX_SPANS_IN_TRACE", "250000")
var SpanLimitStr = GetOrDefaultEnv("SPAN_LIMIT", "5000")
func GetOrDefaultEnv(key string, fallback string) string {
v := os.Getenv(key)

View File

@@ -21,24 +21,24 @@ type ModelDao interface {
DB() *sqlx.DB
// auth methods
CanUsePassword(ctx context.Context, email string) (bool, *basemodel.ApiError)
PrepareSsoRedirect(ctx context.Context, redirectUri, email string) (redirectURL string, apierr *basemodel.ApiError)
CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError)
PrepareSsoRedirect(ctx context.Context, redirectUri, email string) (redirectURL string, apierr basemodel.BaseApiError)
GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*model.OrgDomain, error)
// org domain (auth domains) CRUD ops
ListDomains(ctx context.Context, orgId string) ([]model.OrgDomain, *basemodel.ApiError)
GetDomain(ctx context.Context, id uuid.UUID) (*model.OrgDomain, *basemodel.ApiError)
CreateDomain(ctx context.Context, d *model.OrgDomain) *basemodel.ApiError
UpdateDomain(ctx context.Context, domain *model.OrgDomain) *basemodel.ApiError
DeleteDomain(ctx context.Context, id uuid.UUID) *basemodel.ApiError
GetDomainByEmail(ctx context.Context, email string) (*model.OrgDomain, *basemodel.ApiError)
ListDomains(ctx context.Context, orgId string) ([]model.OrgDomain, basemodel.BaseApiError)
GetDomain(ctx context.Context, id uuid.UUID) (*model.OrgDomain, basemodel.BaseApiError)
CreateDomain(ctx context.Context, d *model.OrgDomain) basemodel.BaseApiError
UpdateDomain(ctx context.Context, domain *model.OrgDomain) basemodel.BaseApiError
DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError
GetDomainByEmail(ctx context.Context, email string) (*model.OrgDomain, basemodel.BaseApiError)
CreatePAT(ctx context.Context, p model.PAT) (model.PAT, *basemodel.ApiError)
UpdatePAT(ctx context.Context, p model.PAT, id string) *basemodel.ApiError
GetPAT(ctx context.Context, pat string) (*model.PAT, *basemodel.ApiError)
UpdatePATLastUsed(ctx context.Context, pat string, lastUsed int64) *basemodel.ApiError
GetPATByID(ctx context.Context, id string) (*model.PAT, *basemodel.ApiError)
GetUserByPAT(ctx context.Context, token string) (*basemodel.UserPayload, *basemodel.ApiError)
ListPATs(ctx context.Context) ([]model.PAT, *basemodel.ApiError)
RevokePAT(ctx context.Context, id string, userID string) *basemodel.ApiError
CreatePAT(ctx context.Context, p model.PAT) (model.PAT, basemodel.BaseApiError)
UpdatePAT(ctx context.Context, p model.PAT, id string) (basemodel.BaseApiError)
GetPAT(ctx context.Context, pat string) (*model.PAT, basemodel.BaseApiError)
UpdatePATLastUsed(ctx context.Context, pat string, lastUsed int64) basemodel.BaseApiError
GetPATByID(ctx context.Context, id string) (*model.PAT, basemodel.BaseApiError)
GetUserByPAT(ctx context.Context, token string) (*basemodel.UserPayload, basemodel.BaseApiError)
ListPATs(ctx context.Context) ([]model.PAT, basemodel.BaseApiError)
RevokePAT(ctx context.Context, id string, userID string) basemodel.BaseApiError
}

View File

@@ -17,19 +17,19 @@ import (
"go.uber.org/zap"
)
func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (*basemodel.User, *basemodel.ApiError) {
func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (*basemodel.User, basemodel.BaseApiError) {
// get auth domain from email domain
domain, apierr := m.GetDomainByEmail(ctx, email)
if apierr != nil {
zap.L().Error("failed to get domain from email", zap.Error(apierr))
return nil, basemodel.InternalError(fmt.Errorf("failed to get domain from email"))
return nil, model.InternalErrorStr("failed to get domain from email")
}
hash, err := baseauth.PasswordHash(utils.GeneratePassowrd())
if err != nil {
zap.L().Error("failed to generate password hash when registering a user via SSO redirect", zap.Error(err))
return nil, basemodel.InternalError(fmt.Errorf("failed to generate password hash"))
return nil, model.InternalErrorStr("failed to generate password hash")
}
group, apiErr := m.GetGroupByName(ctx, baseconst.ViewerGroup)
@@ -61,12 +61,12 @@ func (m *modelDao) createUserForSAMLRequest(ctx context.Context, email string) (
// PrepareSsoRedirect prepares redirect page link after SSO response
// is successfully parsed (i.e. valid email is available)
func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email string) (redirectURL string, apierr *basemodel.ApiError) {
func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email string) (redirectURL string, apierr basemodel.BaseApiError) {
userPayload, apierr := m.GetUserByEmail(ctx, email)
if !apierr.IsNil() {
zap.L().Error("failed to get user with email received from auth provider", zap.String("error", apierr.Error()))
return "", basemodel.BadRequest(fmt.Errorf("invalid user email received from the auth provider"))
return "", model.BadRequestStr("invalid user email received from the auth provider")
}
user := &basemodel.User{}
@@ -85,7 +85,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st
tokenStore, err := baseauth.GenerateJWTForUser(user)
if err != nil {
zap.L().Error("failed to generate token for SSO login user", zap.Error(err))
return "", basemodel.InternalError(fmt.Errorf("failed to generate token for the user"))
return "", model.InternalErrorStr("failed to generate token for the user")
}
return fmt.Sprintf("%s?jwt=%s&usr=%s&refreshjwt=%s",
@@ -95,7 +95,7 @@ func (m *modelDao) PrepareSsoRedirect(ctx context.Context, redirectUri, email st
tokenStore.RefreshJwt), nil
}
func (m *modelDao) CanUsePassword(ctx context.Context, email string) (bool, *basemodel.ApiError) {
func (m *modelDao) CanUsePassword(ctx context.Context, email string) (bool, basemodel.BaseApiError) {
domain, apierr := m.GetDomainByEmail(ctx, email)
if apierr != nil {
return false, apierr
@@ -110,7 +110,7 @@ func (m *modelDao) CanUsePassword(ctx context.Context, email string) (bool, *bas
}
if userPayload.Role != baseconst.AdminGroup {
return false, basemodel.BadRequest(fmt.Errorf("auth method not supported"))
return false, model.BadRequest(fmt.Errorf("auth method not supported"))
}
}
@@ -120,7 +120,7 @@ func (m *modelDao) CanUsePassword(ctx context.Context, email string) (bool, *bas
// PrecheckLogin is called when the login or signup page is loaded
// to check sso login is to be prompted
func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (*basemodel.PrecheckResponse, *basemodel.ApiError) {
func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (*basemodel.PrecheckResponse, basemodel.BaseApiError) {
// assume user is valid unless proven otherwise
resp := &basemodel.PrecheckResponse{IsUser: true, CanSelfRegister: false}
@@ -144,7 +144,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
ssoAvailable = false
default:
zap.L().Error("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err))
return resp, &basemodel.ApiError{Err: err, Typ: basemodel.ErrorBadData}
return resp, model.BadRequestStr(err.Error())
}
}
@@ -177,7 +177,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
siteUrl, err := url.Parse(escapedUrl)
if err != nil {
zap.L().Error("failed to parse referer", zap.Error(err))
return resp, basemodel.InternalError(fmt.Errorf("failed to generate login request"))
return resp, model.InternalError(fmt.Errorf("failed to generate login request"))
}
// build Idp URL that will authenticat the user
@@ -186,7 +186,7 @@ func (m *modelDao) PrecheckLogin(ctx context.Context, email, sourceUrl string) (
if err != nil {
zap.L().Error("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), zap.Error(err))
return resp, basemodel.InternalError(err)
return resp, model.InternalError(err)
}
// set SSO to true, as the url is generated correctly

View File

@@ -76,47 +76,47 @@ func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url
}
// GetDomainByName returns org domain for a given domain name
func (m *modelDao) GetDomainByName(ctx context.Context, name string) (*model.OrgDomain, *basemodel.ApiError) {
func (m *modelDao) GetDomainByName(ctx context.Context, name string) (*model.OrgDomain, basemodel.BaseApiError) {
stored := StoredDomain{}
err := m.DB().Get(&stored, `SELECT * FROM org_domains WHERE name=$1 LIMIT 1`, name)
if err != nil {
if err == sql.ErrNoRows {
return nil, basemodel.BadRequest(fmt.Errorf("invalid domain name"))
return nil, model.BadRequest(fmt.Errorf("invalid domain name"))
}
return nil, basemodel.InternalError(err)
return nil, model.InternalError(err)
}
domain := &model.OrgDomain{Id: stored.Id, Name: stored.Name, OrgId: stored.OrgId}
if err := domain.LoadConfig(stored.Data); err != nil {
return nil, basemodel.InternalError(err)
return nil, model.InternalError(err)
}
return domain, nil
}
// GetDomain returns org domain for a given domain id
func (m *modelDao) GetDomain(ctx context.Context, id uuid.UUID) (*model.OrgDomain, *basemodel.ApiError) {
func (m *modelDao) GetDomain(ctx context.Context, id uuid.UUID) (*model.OrgDomain, basemodel.BaseApiError) {
stored := StoredDomain{}
err := m.DB().Get(&stored, `SELECT * FROM org_domains WHERE id=$1 LIMIT 1`, id)
if err != nil {
if err == sql.ErrNoRows {
return nil, basemodel.BadRequest(fmt.Errorf("invalid domain id"))
return nil, model.BadRequest(fmt.Errorf("invalid domain id"))
}
return nil, basemodel.InternalError(err)
return nil, model.InternalError(err)
}
domain := &model.OrgDomain{Id: stored.Id, Name: stored.Name, OrgId: stored.OrgId}
if err := domain.LoadConfig(stored.Data); err != nil {
return nil, basemodel.InternalError(err)
return nil, model.InternalError(err)
}
return domain, nil
}
// ListDomains gets the list of auth domains by org id
func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]model.OrgDomain, *basemodel.ApiError) {
func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]model.OrgDomain, basemodel.BaseApiError) {
domains := []model.OrgDomain{}
stored := []StoredDomain{}
@@ -126,7 +126,7 @@ func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]model.OrgDo
if err == sql.ErrNoRows {
return []model.OrgDomain{}, nil
}
return nil, basemodel.InternalError(err)
return nil, model.InternalError(err)
}
for _, s := range stored {
@@ -141,20 +141,20 @@ func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]model.OrgDo
}
// CreateDomain creates a new auth domain
func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) *basemodel.ApiError {
func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) basemodel.BaseApiError {
if domain.Id == uuid.Nil {
domain.Id = uuid.New()
}
if domain.OrgId == "" || domain.Name == "" {
return basemodel.BadRequest(fmt.Errorf("domain creation failed, missing fields: OrgId, Name "))
return model.BadRequest(fmt.Errorf("domain creation failed, missing fields: OrgId, Name "))
}
configJson, err := json.Marshal(domain)
if err != nil {
zap.L().Error("failed to unmarshal domain config", zap.Error(err))
return basemodel.InternalError(fmt.Errorf("domain creation failed"))
return model.InternalError(fmt.Errorf("domain creation failed"))
}
_, err = m.DB().ExecContext(ctx,
@@ -168,24 +168,24 @@ func (m *modelDao) CreateDomain(ctx context.Context, domain *model.OrgDomain) *b
if err != nil {
zap.L().Error("failed to insert domain in db", zap.Error(err))
return basemodel.InternalError(fmt.Errorf("domain creation failed"))
return model.InternalError(fmt.Errorf("domain creation failed"))
}
return nil
}
// UpdateDomain updates stored config params for a domain
func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) *basemodel.ApiError {
func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) basemodel.BaseApiError {
if domain.Id == uuid.Nil {
zap.L().Error("domain update failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
return basemodel.InternalError(fmt.Errorf("domain update failed"))
return model.InternalError(fmt.Errorf("domain update failed"))
}
configJson, err := json.Marshal(domain)
if err != nil {
zap.L().Error("domain update failed", zap.Error(err))
return basemodel.InternalError(fmt.Errorf("domain update failed"))
return model.InternalError(fmt.Errorf("domain update failed"))
}
_, err = m.DB().ExecContext(ctx,
@@ -196,18 +196,18 @@ func (m *modelDao) UpdateDomain(ctx context.Context, domain *model.OrgDomain) *b
if err != nil {
zap.L().Error("domain update failed", zap.Error(err))
return basemodel.InternalError(fmt.Errorf("domain update failed"))
return model.InternalError(fmt.Errorf("domain update failed"))
}
return nil
}
// DeleteDomain deletes an org domain
func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) *basemodel.ApiError {
func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError {
if id == uuid.Nil {
zap.L().Error("domain delete failed", zap.Error(fmt.Errorf("OrgDomain.Id is null")))
return basemodel.InternalError(fmt.Errorf("domain delete failed"))
return model.InternalError(fmt.Errorf("domain delete failed"))
}
_, err := m.DB().ExecContext(ctx,
@@ -216,21 +216,21 @@ func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) *basemodel.Ap
if err != nil {
zap.L().Error("domain delete failed", zap.Error(err))
return basemodel.InternalError(fmt.Errorf("domain delete failed"))
return model.InternalError(fmt.Errorf("domain delete failed"))
}
return nil
}
func (m *modelDao) GetDomainByEmail(ctx context.Context, email string) (*model.OrgDomain, *basemodel.ApiError) {
func (m *modelDao) GetDomainByEmail(ctx context.Context, email string) (*model.OrgDomain, basemodel.BaseApiError) {
if email == "" {
return nil, basemodel.BadRequest(fmt.Errorf("could not find auth domain, missing fields: email "))
return nil, model.BadRequest(fmt.Errorf("could not find auth domain, missing fields: email "))
}
components := strings.Split(email, "@")
if len(components) < 2 {
return nil, basemodel.BadRequest(fmt.Errorf("invalid email address"))
return nil, model.BadRequest(fmt.Errorf("invalid email address"))
}
parsedDomain := components[1]
@@ -242,12 +242,12 @@ func (m *modelDao) GetDomainByEmail(ctx context.Context, email string) (*model.O
if err == sql.ErrNoRows {
return nil, nil
}
return nil, basemodel.InternalError(err)
return nil, model.InternalError(err)
}
domain := &model.OrgDomain{Id: stored.Id, Name: stored.Name, OrgId: stored.OrgId}
if err := domain.LoadConfig(stored.Data); err != nil {
return nil, basemodel.InternalError(err)
return nil, model.InternalError(err)
}
return domain, nil
}

View File

@@ -11,7 +11,7 @@ import (
"go.uber.org/zap"
)
func (m *modelDao) CreatePAT(ctx context.Context, p model.PAT) (model.PAT, *basemodel.ApiError) {
func (m *modelDao) CreatePAT(ctx context.Context, p model.PAT) (model.PAT, basemodel.BaseApiError) {
result, err := m.DB().ExecContext(ctx,
"INSERT INTO personal_access_tokens (user_id, token, role, name, created_at, expires_at, updated_at, updated_by_user_id, last_used, revoked) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)",
p.UserID,
@@ -27,12 +27,12 @@ func (m *modelDao) CreatePAT(ctx context.Context, p model.PAT) (model.PAT, *base
)
if err != nil {
zap.L().Error("Failed to insert PAT in db, err: %v", zap.Error(err))
return model.PAT{}, basemodel.InternalError(fmt.Errorf("PAT insertion failed"))
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
}
id, err := result.LastInsertId()
if err != nil {
zap.L().Error("Failed to get last inserted id, err: %v", zap.Error(err))
return model.PAT{}, basemodel.InternalError(fmt.Errorf("PAT insertion failed"))
return model.PAT{}, model.InternalError(fmt.Errorf("PAT insertion failed"))
}
p.Id = strconv.Itoa(int(id))
createdByUser, _ := m.GetUser(ctx, p.UserID)
@@ -53,7 +53,7 @@ func (m *modelDao) CreatePAT(ctx context.Context, p model.PAT) (model.PAT, *base
return p, nil
}
func (m *modelDao) UpdatePAT(ctx context.Context, p model.PAT, id string) *basemodel.ApiError {
func (m *modelDao) UpdatePAT(ctx context.Context, p model.PAT, id string) basemodel.BaseApiError {
_, err := m.DB().ExecContext(ctx,
"UPDATE personal_access_tokens SET role=$1, name=$2, updated_at=$3, updated_by_user_id=$4 WHERE id=$5 and revoked=false;",
p.Role,
@@ -63,29 +63,29 @@ func (m *modelDao) UpdatePAT(ctx context.Context, p model.PAT, id string) *basem
id)
if err != nil {
zap.L().Error("Failed to update PAT in db, err: %v", zap.Error(err))
return basemodel.InternalError(fmt.Errorf("PAT update failed"))
return model.InternalError(fmt.Errorf("PAT update failed"))
}
return nil
}
func (m *modelDao) UpdatePATLastUsed(ctx context.Context, token string, lastUsed int64) *basemodel.ApiError {
func (m *modelDao) UpdatePATLastUsed(ctx context.Context, token string, lastUsed int64) basemodel.BaseApiError {
_, err := m.DB().ExecContext(ctx,
"UPDATE personal_access_tokens SET last_used=$1 WHERE token=$2 and revoked=false;",
lastUsed,
token)
if err != nil {
zap.L().Error("Failed to update PAT last used in db, err: %v", zap.Error(err))
return basemodel.InternalError(fmt.Errorf("PAT last used update failed"))
return model.InternalError(fmt.Errorf("PAT last used update failed"))
}
return nil
}
func (m *modelDao) ListPATs(ctx context.Context) ([]model.PAT, *basemodel.ApiError) {
func (m *modelDao) ListPATs(ctx context.Context) ([]model.PAT, basemodel.BaseApiError) {
pats := []model.PAT{}
if err := m.DB().Select(&pats, "SELECT * FROM personal_access_tokens WHERE revoked=false ORDER by updated_at DESC;"); err != nil {
zap.L().Error("Failed to fetch PATs err: %v", zap.Error(err))
return nil, basemodel.InternalError(fmt.Errorf("failed to fetch PATs"))
return nil, model.InternalError(fmt.Errorf("failed to fetch PATs"))
}
for i := range pats {
createdByUser, _ := m.GetUser(ctx, pats[i].UserID)
@@ -123,28 +123,28 @@ func (m *modelDao) ListPATs(ctx context.Context) ([]model.PAT, *basemodel.ApiErr
return pats, nil
}
func (m *modelDao) RevokePAT(ctx context.Context, id string, userID string) *basemodel.ApiError {
func (m *modelDao) RevokePAT(ctx context.Context, id string, userID string) basemodel.BaseApiError {
updatedAt := time.Now().Unix()
_, err := m.DB().ExecContext(ctx,
"UPDATE personal_access_tokens SET revoked=true, updated_by_user_id = $1, updated_at=$2 WHERE id=$3",
userID, updatedAt, id)
if err != nil {
zap.L().Error("Failed to revoke PAT in db, err: %v", zap.Error(err))
return basemodel.InternalError(fmt.Errorf("PAT revoke failed"))
return model.InternalError(fmt.Errorf("PAT revoke failed"))
}
return nil
}
func (m *modelDao) GetPAT(ctx context.Context, token string) (*model.PAT, *basemodel.ApiError) {
func (m *modelDao) GetPAT(ctx context.Context, token string) (*model.PAT, basemodel.BaseApiError) {
pats := []model.PAT{}
if err := m.DB().Select(&pats, `SELECT * FROM personal_access_tokens WHERE token=? and revoked=false;`, token); err != nil {
return nil, basemodel.InternalError(fmt.Errorf("failed to fetch PAT"))
return nil, model.InternalError(fmt.Errorf("failed to fetch PAT"))
}
if len(pats) != 1 {
return nil, &basemodel.ApiError{
Typ: basemodel.ErrorInternal,
return nil, &model.ApiError{
Typ: model.ErrorInternal,
Err: fmt.Errorf("found zero or multiple PATs with same token, %s", token),
}
}
@@ -152,16 +152,16 @@ func (m *modelDao) GetPAT(ctx context.Context, token string) (*model.PAT, *basem
return &pats[0], nil
}
func (m *modelDao) GetPATByID(ctx context.Context, id string) (*model.PAT, *basemodel.ApiError) {
func (m *modelDao) GetPATByID(ctx context.Context, id string) (*model.PAT, basemodel.BaseApiError) {
pats := []model.PAT{}
if err := m.DB().Select(&pats, `SELECT * FROM personal_access_tokens WHERE id=? and revoked=false;`, id); err != nil {
return nil, basemodel.InternalError(fmt.Errorf("failed to fetch PAT"))
return nil, model.InternalError(fmt.Errorf("failed to fetch PAT"))
}
if len(pats) != 1 {
return nil, &basemodel.ApiError{
Typ: basemodel.ErrorInternal,
return nil, &model.ApiError{
Typ: model.ErrorInternal,
Err: fmt.Errorf("found zero or multiple PATs with same token"),
}
}
@@ -170,7 +170,7 @@ func (m *modelDao) GetPATByID(ctx context.Context, id string) (*model.PAT, *base
}
// deprecated
func (m *modelDao) GetUserByPAT(ctx context.Context, token string) (*basemodel.UserPayload, *basemodel.ApiError) {
func (m *modelDao) GetUserByPAT(ctx context.Context, token string) (*basemodel.UserPayload, basemodel.BaseApiError) {
users := []basemodel.UserPayload{}
query := `SELECT
@@ -186,12 +186,12 @@ func (m *modelDao) GetUserByPAT(ctx context.Context, token string) (*basemodel.U
WHERE u.id = p.user_id and p.token=? and p.expires_at >= strftime('%s', 'now');`
if err := m.DB().Select(&users, query, token); err != nil {
return nil, basemodel.InternalError(fmt.Errorf("failed to fetch user from PAT, err: %v", err))
return nil, model.InternalError(fmt.Errorf("failed to fetch user from PAT, err: %v", err))
}
if len(users) != 1 {
return nil, &basemodel.ApiError{
Typ: basemodel.ErrorInternal,
return nil, &model.ApiError{
Typ: model.ErrorInternal,
Err: fmt.Errorf("found zero or multiple users with same PAT token"),
}
}

View File

@@ -1,9 +0,0 @@
package gateway
import (
"net/http/httputil"
)
func NewNoopProxy() (*httputil.ReverseProxy, error) {
return nil, nil
}

View File

@@ -1,66 +0,0 @@
package gateway
import (
"net/http"
"net/http/httputil"
"net/url"
"path"
"strings"
)
const (
RoutePrefix string = "/api/gateway"
AllowedPrefix string = "/v1/workspaces/me"
)
type proxy struct {
url *url.URL
stripPath string
}
func NewProxy(u string, stripPath string) (*httputil.ReverseProxy, error) {
url, err := url.Parse(u)
if err != nil {
return nil, err
}
proxy := &proxy{url: url, stripPath: stripPath}
return &httputil.ReverseProxy{
Rewrite: proxy.rewrite,
ModifyResponse: proxy.modifyResponse,
ErrorHandler: proxy.errorHandler,
}, nil
}
func (p *proxy) rewrite(pr *httputil.ProxyRequest) {
pr.SetURL(p.url)
pr.SetXForwarded()
pr.Out.URL.Path = cleanPath(strings.ReplaceAll(pr.Out.URL.Path, p.stripPath, ""))
}
func (p *proxy) modifyResponse(res *http.Response) error {
return nil
}
func (p *proxy) errorHandler(rw http.ResponseWriter, req *http.Request, err error) {
rw.WriteHeader(http.StatusBadGateway)
}
func cleanPath(p string) string {
if p == "" {
return "/"
}
if p[0] != '/' {
p = "/" + p
}
np := path.Clean(p)
if p[len(p)-1] == '/' && np != "/" {
if len(p) == len(np)+1 && strings.HasPrefix(p, np) {
np = p
} else {
np += "/"
}
}
return np
}

View File

@@ -1,61 +0,0 @@
package gateway
import (
"context"
"net/http"
"net/http/httputil"
"net/url"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestProxyRewrite(t *testing.T) {
testCases := []struct {
name string
url *url.URL
stripPath string
in *url.URL
expected *url.URL
}{
{
name: "SamePathAdded",
url: &url.URL{Scheme: "http", Host: "backend", Path: "/path1"},
stripPath: "/strip",
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/path1/path1"},
},
{
name: "NoStripPathInput",
url: &url.URL{Scheme: "http", Host: "backend"},
stripPath: "",
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/strip/path1"},
},
{
name: "NoStripPathPresentInReq",
url: &url.URL{Scheme: "http", Host: "backend"},
stripPath: "/not-found",
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/strip/path1"},
},
}
for _, tc := range testCases {
proxy, err := NewProxy(tc.url.String(), tc.stripPath)
require.NoError(t, err)
inReq, err := http.NewRequest(http.MethodGet, tc.in.String(), nil)
require.NoError(t, err)
proxyReq := &httputil.ProxyRequest{
In: inReq,
Out: inReq.Clone(context.Background()),
}
proxy.Rewrite(proxyReq)
assert.Equal(t, tc.expected.Host, proxyReq.Out.URL.Host)
assert.Equal(t, tc.expected.Scheme, proxyReq.Out.URL.Scheme)
assert.Equal(t, tc.expected.Path, proxyReq.Out.URL.Path)
assert.Equal(t, tc.expected.Query(), proxyReq.Out.URL.Query())
}
}

View File

@@ -2,6 +2,11 @@ package signozio
type status string
const (
statusSuccess status = "success"
statusError status = "error"
)
type ActivationResult struct {
Status status `json:"status"`
Data *ActivationResponse `json:"data,omitempty"`

View File

@@ -13,7 +13,6 @@ import (
"go.signoz.io/signoz/ee/query-service/constants"
"go.signoz.io/signoz/ee/query-service/model"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
)
var C *Client
@@ -38,7 +37,7 @@ func init() {
}
// ActivateLicense sends key to license.signoz.io and gets activation data
func ActivateLicense(key, siteId string) (*ActivationResponse, *basemodel.ApiError) {
func ActivateLicense(key, siteId string) (*ActivationResponse, *model.ApiError) {
licenseReq := map[string]string{
"key": key,
"siteId": siteId,
@@ -49,13 +48,13 @@ func ActivateLicense(key, siteId string) (*ActivationResponse, *basemodel.ApiErr
if err != nil {
zap.L().Error("failed to connect to license.signoz.io", zap.Error(err))
return nil, basemodel.BadRequest(fmt.Errorf("unable to connect with license.signoz.io, please check your network connection"))
return nil, model.BadRequest(fmt.Errorf("unable to connect with license.signoz.io, please check your network connection"))
}
httpBody, err := io.ReadAll(httpResponse.Body)
if err != nil {
zap.L().Error("failed to read activation response from license.signoz.io", zap.Error(err))
return nil, basemodel.BadRequest(fmt.Errorf("failed to read activation response from license.signoz.io"))
return nil, model.BadRequest(fmt.Errorf("failed to read activation response from license.signoz.io"))
}
defer httpResponse.Body.Close()
@@ -65,22 +64,22 @@ func ActivateLicense(key, siteId string) (*ActivationResponse, *basemodel.ApiErr
err = json.Unmarshal(httpBody, &result)
if err != nil {
zap.L().Error("failed to marshal activation response from license.signoz.io", zap.Error(err))
return nil, basemodel.InternalError(errors.Wrap(err, "failed to marshal license activation response"))
return nil, model.InternalError(errors.Wrap(err, "failed to marshal license activation response"))
}
switch httpResponse.StatusCode {
case 200, 201:
return result.Data, nil
case 400, 401:
return nil, basemodel.BadRequest(fmt.Errorf(fmt.Sprintf("failed to activate: %s", result.Error)))
return nil, model.BadRequest(fmt.Errorf(fmt.Sprintf("failed to activate: %s", result.Error)))
default:
return nil, basemodel.InternalError(fmt.Errorf(fmt.Sprintf("failed to activate: %s", result.Error)))
return nil, model.InternalError(fmt.Errorf(fmt.Sprintf("failed to activate: %s", result.Error)))
}
}
// ValidateLicense validates the license key
func ValidateLicense(activationId string) (*ActivationResponse, *basemodel.ApiError) {
func ValidateLicense(activationId string) (*ActivationResponse, *model.ApiError) {
validReq := map[string]string{
"activationId": activationId,
}
@@ -89,12 +88,12 @@ func ValidateLicense(activationId string) (*ActivationResponse, *basemodel.ApiEr
response, err := http.Post(C.Prefix+"/licenses/validate", APPLICATION_JSON, bytes.NewBuffer(reqString))
if err != nil {
return nil, basemodel.BadRequest(errors.Wrap(err, "unable to connect with license.signoz.io, please check your network connection"))
return nil, model.BadRequest(errors.Wrap(err, "unable to connect with license.signoz.io, please check your network connection"))
}
body, err := io.ReadAll(response.Body)
if err != nil {
return nil, basemodel.BadRequest(errors.Wrap(err, "failed to read validation response from license.signoz.io"))
return nil, model.BadRequest(errors.Wrap(err, "failed to read validation response from license.signoz.io"))
}
defer response.Body.Close()
@@ -104,14 +103,14 @@ func ValidateLicense(activationId string) (*ActivationResponse, *basemodel.ApiEr
a := ActivationResult{}
err = json.Unmarshal(body, &a)
if err != nil {
return nil, basemodel.BadRequest(errors.Wrap(err, "failed to marshal license validation response"))
return nil, model.BadRequest(errors.Wrap(err, "failed to marshal license validation response"))
}
return a.Data, nil
case 400, 401:
return nil, basemodel.BadRequest(errors.Wrap(fmt.Errorf(string(body)),
return nil, model.BadRequest(errors.Wrap(fmt.Errorf(string(body)),
"bad request error received from license.signoz.io"))
default:
return nil, basemodel.InternalError(errors.Wrap(fmt.Errorf(string(body)),
return nil, model.InternalError(errors.Wrap(fmt.Errorf(string(body)),
"internal error received from license.signoz.io"))
}
@@ -128,21 +127,21 @@ func NewPostRequestWithCtx(ctx context.Context, url string, contentType string,
}
// SendUsage reports the usage of signoz to license server
func SendUsage(ctx context.Context, usage model.UsagePayload) *basemodel.ApiError {
func SendUsage(ctx context.Context, usage model.UsagePayload) *model.ApiError {
reqString, _ := json.Marshal(usage)
req, err := NewPostRequestWithCtx(ctx, C.Prefix+"/usage", APPLICATION_JSON, bytes.NewBuffer(reqString))
if err != nil {
return basemodel.BadRequest(errors.Wrap(err, "unable to create http request"))
return model.BadRequest(errors.Wrap(err, "unable to create http request"))
}
res, err := http.DefaultClient.Do(req)
if err != nil {
return basemodel.BadRequest(errors.Wrap(err, "unable to connect with license.signoz.io, please check your network connection"))
return model.BadRequest(errors.Wrap(err, "unable to connect with license.signoz.io, please check your network connection"))
}
body, err := io.ReadAll(res.Body)
if err != nil {
return basemodel.BadRequest(errors.Wrap(err, "failed to read usage response from license.signoz.io"))
return model.BadRequest(errors.Wrap(err, "failed to read usage response from license.signoz.io"))
}
defer res.Body.Close()
@@ -151,10 +150,10 @@ func SendUsage(ctx context.Context, usage model.UsagePayload) *basemodel.ApiErro
case 200, 201:
return nil
case 400, 401:
return basemodel.BadRequest(errors.Wrap(fmt.Errorf(string(body)),
return model.BadRequest(errors.Wrap(fmt.Errorf(string(body)),
"bad request error received from license.signoz.io"))
default:
return basemodel.InternalError(errors.Wrap(fmt.Errorf(string(body)),
return model.InternalError(errors.Wrap(fmt.Errorf(string(body)),
"internal error received from license.signoz.io"))
}
}

View File

@@ -48,9 +48,8 @@ func (r *Repo) GetLicenses(ctx context.Context) ([]model.License, error) {
return licenses, nil
}
// GetActiveLicense fetches the latest active license from DB.
// If the license is not present, expect a nil license and a nil error in the output.
func (r *Repo) GetActiveLicense(ctx context.Context) (*model.License, *basemodel.ApiError) {
// GetActiveLicense fetches the latest active license from DB
func (r *Repo) GetActiveLicense(ctx context.Context) (*model.License, error) {
var err error
licenses := []model.License{}
@@ -58,7 +57,7 @@ func (r *Repo) GetActiveLicense(ctx context.Context) (*model.License, *basemodel
err = r.db.Select(&licenses, query)
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("failed to get active licenses from db: %v", err))
return nil, fmt.Errorf("failed to get active licenses from db: %v", err)
}
var active *model.License
@@ -111,7 +110,7 @@ func (r *Repo) UpdatePlanDetails(ctx context.Context,
planDetails string) error {
if key == "" {
return fmt.Errorf("update plan details failed: license key is required")
return fmt.Errorf("Update Plan Details failed: license key is required")
}
query := `UPDATE licenses

View File

@@ -49,7 +49,8 @@ type Manager struct {
activeFeatures basemodel.FeatureSet
}
func StartManager(dbType string, db *sqlx.DB, features ...basemodel.Feature) (*Manager, error) {
func StartManager(dbType string, db *sqlx.DB) (*Manager, error) {
if LM != nil {
return LM, nil
}
@@ -65,7 +66,7 @@ func StartManager(dbType string, db *sqlx.DB, features ...basemodel.Feature) (*M
repo: &repo,
}
if err := m.start(features...); err != nil {
if err := m.start(); err != nil {
return m, err
}
LM = m
@@ -73,8 +74,8 @@ func StartManager(dbType string, db *sqlx.DB, features ...basemodel.Feature) (*M
}
// start loads active license in memory and initiates validator
func (lm *Manager) start(features ...basemodel.Feature) error {
err := lm.LoadActiveLicense(features...)
func (lm *Manager) start() error {
err := lm.LoadActiveLicense()
return err
}
@@ -84,7 +85,7 @@ func (lm *Manager) Stop() {
<-lm.terminated
}
func (lm *Manager) SetActive(l *model.License, features ...basemodel.Feature) {
func (lm *Manager) SetActive(l *model.License) {
lm.mutex.Lock()
defer lm.mutex.Unlock()
@@ -93,7 +94,7 @@ func (lm *Manager) SetActive(l *model.License, features ...basemodel.Feature) {
}
lm.activeLicense = l
lm.activeFeatures = append(l.FeatureSet, features...)
lm.activeFeatures = l.FeatureSet
// set default features
setDefaultFeatures(lm)
@@ -115,13 +116,14 @@ func setDefaultFeatures(lm *Manager) {
}
// LoadActiveLicense loads the most recent active license
func (lm *Manager) LoadActiveLicense(features ...basemodel.Feature) error {
func (lm *Manager) LoadActiveLicense() error {
var err error
active, err := lm.repo.GetActiveLicense(context.Background())
if err != nil {
return err
}
if active != nil {
lm.SetActive(active, features...)
lm.SetActive(active)
} else {
zap.L().Info("No active license found, defaulting to basic plan")
// if no active license is found, we default to basic(free) plan with all default features
@@ -137,11 +139,11 @@ func (lm *Manager) LoadActiveLicense(features ...basemodel.Feature) error {
return nil
}
func (lm *Manager) GetLicenses(ctx context.Context) (response []model.License, apiError *basemodel.ApiError) {
func (lm *Manager) GetLicenses(ctx context.Context) (response []model.License, apiError *model.ApiError) {
licenses, err := lm.repo.GetLicenses(ctx)
if err != nil {
return nil, basemodel.InternalError(err)
return nil, model.InternalError(err)
}
for _, l := range licenses {
@@ -212,8 +214,8 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) {
response, apiError := validate.ValidateLicense(lm.activeLicense.ActivationId)
if apiError != nil {
zap.L().Error("failed to validate license", zap.Any("apiError", apiError))
return apiError
zap.L().Error("failed to validate license", zap.Error(apiError.Err))
return apiError.Err
}
if response.PlanDetails == lm.activeLicense.PlanDetails {
@@ -255,7 +257,7 @@ func (lm *Manager) Validate(ctx context.Context) (reterr error) {
}
// Activate activates a license key with signoz server
func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *model.License, errResponse *basemodel.ApiError) {
func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *model.License, errResponse *model.ApiError) {
defer func() {
if errResponse != nil {
userEmail, err := auth.GetEmailFromJwt(ctx)
@@ -268,7 +270,7 @@ func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *m
response, apiError := validate.ActivateLicense(key, "")
if apiError != nil {
zap.L().Error("failed to activate license", zap.Any("apiError", apiError))
zap.L().Error("failed to activate license", zap.Error(apiError.Err))
return nil, apiError
}
@@ -283,14 +285,14 @@ func (lm *Manager) Activate(ctx context.Context, key string) (licenseResponse *m
if err != nil {
zap.L().Error("failed to activate license", zap.Error(err))
return nil, basemodel.InternalError(err)
return nil, model.InternalError(err)
}
// store the license before activating it
err = lm.repo.InsertLicense(ctx, l)
if err != nil {
zap.L().Error("failed to activate license", zap.Error(err))
return nil, basemodel.InternalError(err)
return nil, model.InternalError(err)
}
// license is valid, activate it

View File

@@ -32,7 +32,7 @@ func InitDB(db *sqlx.DB) error {
_, err = db.Exec(table_schema)
if err != nil {
return fmt.Errorf("error in creating licenses table: %s", err.Error())
return fmt.Errorf("Error in creating licenses table: %s", err.Error())
}
table_schema = `CREATE TABLE IF NOT EXISTS feature_status (
@@ -45,7 +45,7 @@ func InitDB(db *sqlx.DB) error {
_, err = db.Exec(table_schema)
if err != nil {
return fmt.Errorf("error in creating feature_status table: %s", err.Error())
return fmt.Errorf("Error in creating feature_status table: %s", err.Error())
}
return nil

View File

@@ -15,7 +15,6 @@ import (
"go.signoz.io/signoz/ee/query-service/app"
"go.signoz.io/signoz/pkg/query-service/auth"
baseconst "go.signoz.io/signoz/pkg/query-service/constants"
"go.signoz.io/signoz/pkg/query-service/migrate"
"go.signoz.io/signoz/pkg/query-service/version"
"google.golang.org/grpc"
"google.golang.org/grpc/credentials/insecure"
@@ -51,8 +50,7 @@ func initZapLog(enableQueryServiceLogOTLPExport bool) *zap.Logger {
)
if enableQueryServiceLogOTLPExport {
ctx, cancel := context.WithTimeout(ctx, time.Second*30)
defer cancel()
ctx, _ := context.WithTimeout(ctx, time.Second*30)
conn, err := grpc.DialContext(ctx, baseconst.OTLPTarget, grpc.WithBlock(), grpc.WithTransportCredentials(insecure.NewCredentials()))
if err != nil {
log.Fatalf("failed to establish connection: %v", err)
@@ -95,7 +93,6 @@ func main() {
var maxIdleConns int
var maxOpenConns int
var dialTimeout time.Duration
var gatewayUrl string
flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)")
flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)")
@@ -110,7 +107,6 @@ func main() {
flag.StringVar(&fluxInterval, "flux-interval", "5m", "(cache config to use)")
flag.BoolVar(&enableQueryServiceLogOTLPExport, "enable.query.service.log.otlp.export", false, "(enable query service log otlp export)")
flag.StringVar(&cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')")
flag.StringVar(&gatewayUrl, "gateway-url", "", "(url to the gateway)")
flag.Parse()
@@ -136,7 +132,6 @@ func main() {
CacheConfigPath: cacheConfigPath,
FluxInterval: fluxInterval,
Cluster: cluster,
GatewayUrl: gatewayUrl,
}
// Read the jwt secret key
@@ -148,12 +143,6 @@ func main() {
zap.L().Info("JWT secret key set successfully.")
}
if err := migrate.Migrate(baseconst.RELATIONAL_DATASOURCE_PATH); err != nil {
zap.L().Error("Failed to migrate", zap.Error(err))
} else {
zap.L().Info("Migration successful")
}
server, err := app.NewServer(serverOptions)
if err != nil {
zap.L().Fatal("Failed to create server", zap.Error(err))

View File

@@ -104,7 +104,7 @@ func (od *OrgDomain) GetSAMLCert() string {
// requesting OAuth and also used in processing response from google
func (od *OrgDomain) PrepareGoogleOAuthProvider(siteUrl *url.URL) (sso.OAuthCallbackProvider, error) {
if od.GoogleAuthConfig == nil {
return nil, fmt.Errorf("GOOGLE OAUTH is not setup correctly for this domain")
return nil, fmt.Errorf("Google auth is not setup correctly for this domain")
}
return od.GoogleAuthConfig.GetProvider(od.Name, siteUrl)

View File

@@ -1,5 +1,107 @@
package model
import (
"fmt"
basemodel "go.signoz.io/signoz/pkg/query-service/model"
)
type ApiError struct {
Typ basemodel.ErrorType
Err error
}
func (a *ApiError) Type() basemodel.ErrorType {
return a.Typ
}
func (a *ApiError) ToError() error {
if a != nil {
return a.Err
}
return a.Err
}
func (a *ApiError) Error() string {
return a.Err.Error()
}
func (a *ApiError) IsNil() bool {
return a == nil || a.Err == nil
}
// NewApiError returns a ApiError object of given type
func NewApiError(typ basemodel.ErrorType, err error) *ApiError {
return &ApiError{
Typ: typ,
Err: err,
}
}
// BadRequest returns a ApiError object of bad request
func BadRequest(err error) *ApiError {
return &ApiError{
Typ: basemodel.ErrorBadData,
Err: err,
}
}
// BadRequestStr returns a ApiError object of bad request for string input
func BadRequestStr(s string) *ApiError {
return &ApiError{
Typ: basemodel.ErrorBadData,
Err: fmt.Errorf(s),
}
}
// InternalError returns a ApiError object of internal type
func InternalError(err error) *ApiError {
return &ApiError{
Typ: basemodel.ErrorInternal,
Err: err,
}
}
// InternalErrorStr returns a ApiError object of internal type for string input
func InternalErrorStr(s string) *ApiError {
return &ApiError{
Typ: basemodel.ErrorInternal,
Err: fmt.Errorf(s),
}
}
var (
ErrorNone basemodel.ErrorType = ""
ErrorTimeout basemodel.ErrorType = "timeout"
ErrorCanceled basemodel.ErrorType = "canceled"
ErrorExec basemodel.ErrorType = "execution"
ErrorBadData basemodel.ErrorType = "bad_data"
ErrorInternal basemodel.ErrorType = "internal"
ErrorUnavailable basemodel.ErrorType = "unavailable"
ErrorNotFound basemodel.ErrorType = "not_found"
ErrorNotImplemented basemodel.ErrorType = "not_implemented"
ErrorUnauthorized basemodel.ErrorType = "unauthorized"
ErrorForbidden basemodel.ErrorType = "forbidden"
ErrorConflict basemodel.ErrorType = "conflict"
ErrorStreamingNotSupported basemodel.ErrorType = "streaming is not supported"
)
func init() {
ErrorNone = basemodel.ErrorNone
ErrorTimeout = basemodel.ErrorTimeout
ErrorCanceled = basemodel.ErrorCanceled
ErrorExec = basemodel.ErrorExec
ErrorBadData = basemodel.ErrorBadData
ErrorInternal = basemodel.ErrorInternal
ErrorUnavailable = basemodel.ErrorUnavailable
ErrorNotFound = basemodel.ErrorNotFound
ErrorNotImplemented = basemodel.ErrorNotImplemented
ErrorUnauthorized = basemodel.ErrorUnauthorized
ErrorForbidden = basemodel.ErrorForbidden
ErrorConflict = basemodel.ErrorConflict
ErrorStreamingNotSupported = basemodel.ErrorStreamingNotSupported
}
type ErrUnsupportedAuth struct{}
func (errUnsupportedAuth ErrUnsupportedAuth) Error() string {

View File

@@ -52,14 +52,14 @@ var BasicPlan = basemodel.FeatureSet{
Name: basemodel.QueryBuilderPanels,
Active: true,
Usage: 0,
UsageLimit: -1,
UsageLimit: 20,
Route: "",
},
basemodel.Feature{
Name: basemodel.QueryBuilderAlerts,
Active: true,
Usage: 0,
UsageLimit: -1,
UsageLimit: 10,
Route: "",
},
basemodel.Feature{

View File

@@ -53,7 +53,7 @@ func New(dbType string, modelDao dao.ModelDao, licenseRepo *license.Repo, clickh
tenantID := ""
if len(hostNameRegexMatches) == 2 {
tenantID = hostNameRegexMatches[1]
tenantID = strings.TrimSuffix(tenantID, "-clickhouse")
tenantID = strings.TrimRight(tenantID, "-clickhouse")
}
m := &Manager{
@@ -190,7 +190,7 @@ func (lm *Manager) UploadUsageWithExponentalBackOff(ctx context.Context, payload
} else if apiErr != nil {
// sleeping for exponential backoff
sleepDuration := RetryInterval * time.Duration(i)
zap.L().Error("failed to upload snapshot retrying after %v secs : %v", zap.Duration("sleepDuration", sleepDuration), zap.Any("apiErr", apiErr))
zap.L().Error("failed to upload snapshot retrying after %v secs : %v", zap.Duration("sleepDuration", sleepDuration), zap.Error(apiErr.Err))
time.Sleep(sleepDuration)
} else {
break

3
frontend/.gitignore vendored
View File

@@ -1,3 +0,0 @@
# Sentry Config File
.env.sentry-build-plugin

View File

@@ -1,4 +1,4 @@
FROM nginx:1.26-alpine
FROM nginx:1.25.2-alpine
# Add Maintainer Info
LABEL maintainer="signoz"

View File

@@ -4,7 +4,6 @@ const config: Config.InitialOptions = {
clearMocks: true,
coverageDirectory: 'coverage',
coverageReporters: ['text', 'cobertura', 'html', 'json-summary'],
collectCoverageFrom: ['src/**/*.{ts,tsx}'],
moduleFileExtensions: ['ts', 'tsx', 'js', 'json'],
modulePathIgnorePatterns: ['dist'],
moduleNameMapper: {
@@ -21,6 +20,8 @@ const config: Config.InitialOptions = {
transform: {
'^.+\\.(ts|tsx)?$': 'ts-jest',
'^.+\\.(js|jsx)$': 'babel-jest',
'^.+\\.(css|scss|sass|less)$': 'jest-preview/transforms/css',
'^(?!.*\\.(js|jsx|mjs|cjs|ts|tsx|css|json)$)': 'jest-preview/transforms/file',
},
transformIgnorePatterns: [
'node_modules/(?!(lodash-es|react-dnd|core-dnd|@react-dnd|dnd-core|react-dnd-html5-backend|axios|@signozhq/design-tokens|d3-interpolate|d3-color)/)',
@@ -34,14 +35,6 @@ const config: Config.InitialOptions = {
browsers: ['chromium', 'firefox', 'webkit'],
},
},
coverageThreshold: {
global: {
statements: 80,
branches: 65,
functions: 80,
lines: 80,
},
},
};
export default config;

View File

@@ -13,15 +13,15 @@
"jest": "jest",
"jest:coverage": "jest --coverage",
"jest:watch": "jest --watch",
"jest-preview": "jest-preview",
"test:debug": "npm-run-all -p test jest-preview",
"postinstall": "is-ci || yarn husky:configure",
"playwright": "npm run i18n:generate-hash && NODE_ENV=testing playwright test --config=./playwright.config.ts",
"playwright:local:debug": "PWDEBUG=console yarn playwright --headed --browser=chromium",
"playwright:codegen:local": "playwright codegen http://localhost:3301",
"playwright:codegen:local:auth": "yarn playwright:codegen:local --load-storage=tests/auth.json",
"husky:configure": "cd .. && husky install frontend/.husky && cd frontend && chmod ug+x .husky/*",
"commitlint": "commitlint --edit $1",
"test": "jest --coverage",
"test:changedsince": "jest --changedSince=develop --coverage --silent"
"commitlint": "commitlint --edit $1"
},
"engines": {
"node": ">=16.15.0"
@@ -41,17 +41,14 @@
"@radix-ui/react-tabs": "1.0.4",
"@radix-ui/react-tooltip": "1.0.7",
"@sentry/react": "7.102.1",
"@sentry/webpack-plugin": "2.16.0",
"@sentry/webpack-plugin": "2.14.2",
"@signozhq/design-tokens": "0.0.8",
"@uiw/react-md-editor": "3.23.5",
"@visx/group": "3.3.0",
"@visx/shape": "3.5.0",
"@visx/tooltip": "3.3.0",
"@xstate/react": "^3.0.0",
"ansi-to-html": "0.7.2",
"antd": "5.11.0",
"antd-table-saveas-excel": "2.2.1",
"axios": "1.6.4",
"axios": "1.6.2",
"babel-eslint": "^10.1.0",
"babel-jest": "^29.6.4",
"babel-loader": "9.1.3",
@@ -85,10 +82,9 @@
"less": "^4.1.2",
"less-loader": "^10.2.0",
"lodash-es": "^4.17.21",
"lucide-react": "0.379.0",
"lucide-react": "0.321.0",
"mini-css-extract-plugin": "2.4.5",
"papaparse": "5.4.1",
"rc-tween-one": "3.0.6",
"react": "18.2.0",
"react-addons-update": "15.6.3",
"react-beautiful-dnd": "13.1.1",
@@ -125,7 +121,6 @@
"web-vitals": "^0.2.4",
"webpack": "5.88.2",
"webpack-dev-server": "^4.15.1",
"webpack-retry-chunk-load-plugin": "3.1.1",
"xstate": "^4.31.0"
},
"browserslist": {
@@ -182,7 +177,7 @@
"@types/webpack-dev-server": "^4.7.2",
"@typescript-eslint/eslint-plugin": "^4.33.0",
"@typescript-eslint/parser": "^4.33.0",
"autoprefixer": "10.4.19",
"autoprefixer": "^9.0.0",
"babel-plugin-styled-components": "^1.12.0",
"compression-webpack-plugin": "9.0.0",
"copy-webpack-plugin": "^8.1.0",
@@ -205,12 +200,12 @@
"husky": "^7.0.4",
"is-ci": "^3.0.1",
"jest-playwright-preset": "^1.7.2",
"jest-preview": "0.3.1",
"jest-styled-components": "^7.0.8",
"lint-staged": "^12.5.0",
"msw": "1.3.2",
"npm-run-all": "latest",
"portfinder-sync": "^0.0.2",
"postcss": "8.4.38",
"prettier": "2.2.1",
"raw-loader": "4.0.2",
"react-hooks-testing-library": "0.6.0",
@@ -236,7 +231,6 @@
"@types/react-dom": "18.0.10",
"debug": "4.3.4",
"semver": "7.5.4",
"xml2js": "0.5.0",
"phin": "^3.7.1"
"xml2js": "0.5.0"
}
}

View File

@@ -1,10 +0,0 @@
<svg width="33" height="32" viewBox="0 0 33 32" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M4.99715 27.2944C4.70156 27.2944 4.74156 27.6477 4.74156 28.3143C4.74156 28.981 4.70156 29.3543 5.05493 29.3543C5.40831 29.3543 27.7778 29.3143 28.0134 29.2965C28.2489 29.2765 28.1889 28.4143 28.1889 28.081C28.1889 27.6699 28.2467 27.3166 27.9156 27.2966C27.5822 27.2766 5.11494 27.2944 4.99715 27.2944Z" fill="#ED6D30"/>
<path d="M5.07275 21.8602L5.09498 27.3132L27.7956 27.291L27.8467 21.7135L27.3466 21.1536L5.255 21.1158L5.07275 21.8602Z" fill="#F78A51"/>
<path d="M5.53728 21.4707L5.07278 21.8596L5.07056 22.724C5.07056 22.724 5.22169 22.8306 5.37282 22.7551C5.52395 22.6795 5.73508 22.5329 5.92177 22.5173C6.21959 22.4951 6.19514 22.7795 6.48184 22.7795C6.76855 22.7795 7.02858 22.4929 7.27083 22.4929C7.51308 22.4929 7.62421 22.7995 7.88202 22.784C8.13983 22.7684 8.28429 22.5084 8.60655 22.5173C8.86436 22.524 8.90881 22.784 9.22663 22.784C9.54445 22.784 9.70669 22.4818 9.97784 22.4818C10.249 22.4818 10.3379 22.8018 10.6401 22.8018C10.9424 22.8018 11.0246 22.4818 11.3713 22.4818C11.7181 22.4818 11.6892 22.784 11.9759 22.7529C12.2626 22.7218 12.2915 22.4729 12.6382 22.4573C12.9849 22.4418 13.0204 22.784 13.3227 22.784C13.625 22.784 13.6161 22.5373 13.8739 22.5373C14.1317 22.5373 18.9145 22.5262 19.0968 22.5262C19.279 22.5262 19.559 22.8462 19.8613 22.8462C20.1636 22.8462 20.0791 22.504 20.4103 22.4951C20.6081 22.4907 20.9925 22.824 21.2192 22.824C21.4459 22.824 21.5282 22.4818 21.7838 22.4662C22.0393 22.4507 22.4194 22.844 22.7217 22.8129C23.0239 22.7818 22.8728 22.4796 23.0995 22.4507C23.3262 22.4196 23.7796 22.784 24.0818 22.7973C24.3841 22.8129 24.1885 22.404 24.5041 22.404C24.8197 22.404 25.0642 22.7507 25.3953 22.7662C25.7265 22.7818 25.502 22.4196 25.8332 22.3884C26.1643 22.3573 26.4066 22.8418 26.7244 22.8106C27.0422 22.7795 26.9066 22.4329 27.1778 22.4173C27.4489 22.4018 27.8267 22.644 27.8267 22.644L27.8401 21.7063L14.7807 17.582L5.53728 21.4707Z" fill="#ED6D30"/>
<path d="M13.8049 29.3267C13.8049 29.3267 13.8605 22.7804 13.8516 22.6204C13.8405 22.4271 14.0116 22.3804 14.1494 22.3804C14.2871 22.3804 18.8558 22.3804 18.9935 22.3804C19.1313 22.3804 19.2113 22.4827 19.2224 22.6093C19.2335 22.736 19.2002 29.3156 19.2002 29.3156L13.8049 29.3267Z" fill="#51362F"/>
<path d="M4.15465 18.7244C4.15465 18.7244 3.23898 20.7487 3.24787 20.902C3.25676 21.0553 3.51234 21.9864 3.92128 22.0109C4.48135 22.0442 4.58359 21.5531 4.67693 21.5531C4.77028 21.5531 4.89474 22.0331 5.21478 22.0797C5.58816 22.1331 5.85708 21.5331 6.00154 21.5331C6.14601 21.5331 6.21713 22.0553 6.55495 22.0553C6.89277 22.0553 7.25281 21.4909 7.38616 21.502C7.51951 21.5131 7.64842 22.102 7.92401 22.102C8.20182 22.102 8.47296 21.5998 8.71299 21.5753C8.83745 21.5642 8.95525 22.1375 9.18194 22.1464C9.40864 22.1575 9.79535 21.5531 9.99093 21.5531C10.1865 21.5531 10.3399 22.1775 10.6377 22.1486C10.9355 22.1197 11.3378 21.5642 11.48 21.5642C11.6222 21.5642 11.7778 22.1264 12.0112 22.1375C12.2223 22.1464 12.5713 21.6087 12.7135 21.5998C12.8557 21.5909 13.0269 22.1486 13.2625 22.1486C13.498 22.1486 13.7536 21.5442 13.9492 21.5331C14.1448 21.522 14.227 22.102 14.4626 22.102C14.6982 22.102 15.0471 21.5175 15.2627 21.5087C15.4783 21.4975 15.5961 22.0686 15.8117 22.0686C16.0272 22.0686 16.2673 21.4887 16.4206 21.482C16.6584 21.4731 16.8096 22.0464 17.1385 22.0575C17.4674 22.0686 17.6008 21.5042 17.8564 21.5042C18.1119 21.5042 18.1853 22.0375 18.472 22.0486C18.7587 22.0597 18.9943 21.4953 19.2099 21.5042C19.4254 21.5153 19.5677 22.0264 19.8055 22.0264C20.0433 22.0264 20.2767 21.5042 20.4522 21.5131C20.6256 21.5242 20.8634 22.0464 21.099 22.0464C21.3346 22.0464 21.5302 21.5064 21.6435 21.502C21.8613 21.4953 22.0836 22.0664 22.3102 22.0464C22.5369 22.0264 22.7992 21.4642 22.9948 21.4731C23.1904 21.4842 23.4904 22.1108 23.726 22.0909C23.9616 22.0709 24.1616 21.4753 24.3772 21.4842C24.5928 21.4931 24.7661 22.0331 25.0395 22.0331C25.2906 22.0331 25.4306 21.5175 25.6573 21.5064C25.884 21.4953 26.0952 21.9997 26.3308 21.9753C26.5663 21.9509 26.6619 21.482 26.8686 21.4731C27.0731 21.462 27.3753 22.0042 27.6731 21.9931C27.971 21.982 28.1243 21.562 28.2888 21.5531C28.4532 21.5442 28.5955 22.0109 28.9955 22.0042C29.3556 21.9997 29.8267 21.3264 29.7334 20.8554C29.6401 20.3843 28.3599 18.5066 28.3599 18.5066L4.15465 18.7244Z" fill="#6C4D43"/>
<path d="M6.09496 13.357C6.09496 13.357 4.90148 15.0328 4.1925 16.5641C3.48352 18.0954 3.21016 19.0022 3.16571 19.8956C3.12126 20.7691 3.24794 20.9024 3.24794 20.9024L4.54366 19.4867C4.54366 19.4867 4.55699 20.8247 4.65256 20.838C4.74813 20.8513 5.74603 19.4578 5.8127 19.4445C5.8816 19.4311 5.8816 20.8513 5.97717 20.8513C6.07274 20.8513 7.09731 19.4178 7.16621 19.4178C7.2351 19.4178 7.26177 20.838 7.34401 20.838C7.42624 20.838 8.35524 19.3911 8.42414 19.4045C8.49304 19.4178 8.73751 20.9202 8.81975 20.9202C8.90198 20.9202 9.76209 19.3911 9.85765 19.3911C9.95322 19.3911 10.0621 20.9758 10.171 20.9758C10.2799 20.9758 11.1267 19.4467 11.1956 19.4467C11.2645 19.4467 11.5379 20.9625 11.6468 20.9491C11.7557 20.9358 12.5069 19.4467 12.5758 19.4734C12.6447 19.5 12.8225 20.9358 12.9447 20.9358C13.0669 20.9358 13.7226 19.4334 13.8315 19.4334C13.9404 19.4334 14.216 20.8913 14.2982 20.8913C14.3804 20.8913 15.0627 19.4289 15.145 19.4156C15.2272 19.4023 15.665 21.0269 15.8006 21.0269C15.9362 21.0269 16.3474 19.5245 16.4429 19.5378C16.5385 19.5512 17.1808 20.9713 17.2341 20.9713C17.2875 20.9713 17.7675 19.4823 17.8209 19.4823C17.8742 19.4823 18.5165 20.8335 18.6121 20.8491C18.7076 20.8624 19.0632 19.4978 19.1321 19.5245C19.201 19.5512 19.8567 20.958 19.9389 20.9713C20.0211 20.9847 20.3078 19.4956 20.3901 19.4956C20.4723 19.4956 21.3724 21.1336 21.4413 21.1202C21.5102 21.1069 21.5925 19.4667 21.6725 19.4534C21.7547 19.44 22.8326 21.0647 22.9148 21.0513C22.9971 21.038 22.9548 19.3978 23.0104 19.3978C23.066 19.3978 23.9527 20.9269 24.075 20.9136C24.1972 20.9002 24.3061 19.48 24.3884 19.48C24.4706 19.48 25.4529 21.1469 25.5774 21.1336C25.7019 21.1202 25.6041 19.5756 25.6596 19.5623C25.7152 19.5489 26.8198 20.9558 26.8753 20.9424C26.9309 20.9291 26.9153 19.4267 27.0109 19.4134C27.1065 19.4 28.131 20.8758 28.2266 20.8469C28.3222 20.8202 28.3355 19.3445 28.3911 19.3311C28.4466 19.3178 29.7268 20.8535 29.7268 20.8535C29.7268 20.8535 29.9757 19.5178 29.5357 18.2377C29.0956 16.9575 28.0266 15.1595 27.5087 14.395C26.9931 13.6304 26.6909 13.277 26.6909 13.277L14.0648 11.6591L6.09496 13.357Z" fill="#A37F69"/>
<path d="M10.4736 8.22084C10.4736 8.22084 8.78668 9.88105 7.98214 10.8412C7.17759 11.8013 6.09301 13.3548 6.09301 13.3548C6.09301 13.3548 5.69963 15.1728 5.8152 15.1862C5.93299 15.1995 7.08647 13.4615 7.19093 13.4726C7.29539 13.4859 7.02202 15.2239 7.12648 15.2506C7.23093 15.2773 8.51554 13.4482 8.57999 13.4348C8.64444 13.4215 8.3733 15.2373 8.4622 15.2639C8.5511 15.2906 9.85126 13.4482 9.92905 13.4482C10.0068 13.4482 10.1113 15.1484 10.2135 15.1484C10.3158 15.1484 11.1736 13.4237 11.2514 13.4348C11.3292 13.4482 11.5115 15.2128 11.6404 15.2373C11.7693 15.2639 12.3671 13.4082 12.4716 13.3948C12.576 13.3815 12.8339 15.3417 12.9516 15.3417C13.0694 15.3417 13.6917 13.4215 13.7695 13.4215C13.8473 13.4215 14.0429 15.3417 14.1718 15.3417C14.3007 15.3417 14.8852 13.3837 14.963 13.3837C15.0408 13.3837 15.5986 15.2639 15.6898 15.2395C15.7809 15.2128 16.2743 13.3593 16.3654 13.3704C16.4565 13.3837 16.8833 15.1862 17.041 15.2128C17.1966 15.2395 17.6122 13.4615 17.7411 13.4615C17.87 13.4615 18.2079 15.4329 18.3634 15.4329C18.519 15.4329 18.8702 13.4615 18.948 13.4615C19.0257 13.4615 19.7392 15.4084 19.857 15.4195C19.9747 15.4329 20.1037 13.5637 20.2459 13.5504C20.3881 13.5371 21.1549 15.4195 21.2327 15.4062C21.3105 15.3929 21.3749 13.5637 21.4527 13.5504C21.5305 13.5371 22.3995 15.2639 22.5417 15.2639C22.684 15.2639 22.5929 13.4726 22.724 13.4859C22.8529 13.4993 24.1508 15.3662 24.2686 15.3662C24.3864 15.3662 23.9308 13.4193 24.0353 13.3948C24.1397 13.3682 25.5021 15.4706 25.6443 15.4306C25.7866 15.3906 25.2821 13.5237 25.371 13.4971C25.4621 13.4704 26.8756 15.3262 27.0067 15.2751C27.1356 15.2239 26.7 13.277 26.7 13.277C26.7 13.277 25.3976 11.5768 24.7242 10.7478C24.0486 9.91661 22.9862 8.81425 22.9862 8.81425L17.7478 6.19836L10.4736 8.22084Z" fill="#BD9177"/>
<path d="M10.4734 8.2202C10.4734 8.2202 9.83556 9.42236 9.96447 9.49791C10.0934 9.57346 11.6736 8.05576 11.8269 8.09354C11.9803 8.13131 11.3157 9.70012 11.5336 9.75123C11.7514 9.80234 12.7959 8.0291 12.9248 8.05354C13.0515 8.07798 12.6559 9.77567 12.8604 9.84011C13.0649 9.90455 13.945 7.9891 14.085 8.01576C14.225 8.04021 14.1872 9.929 14.3139 9.94233C14.4406 9.95566 15.0918 8.10465 15.1807 8.10465C15.2696 8.10465 15.5252 10.0579 15.6785 10.069C15.8319 10.0823 16.2897 8.03576 16.3919 8.03576C16.4942 8.03576 17.0053 9.96677 17.172 9.96677C17.3387 9.96677 17.4387 8.01799 17.5276 7.98021C17.6165 7.94244 18.3633 9.85122 18.5767 9.85122C18.7611 9.85122 18.4478 7.95132 18.5633 7.92466C18.6789 7.90021 19.7368 9.889 19.9546 9.87789C20.1724 9.86456 19.7946 8.02243 19.8968 8.02243C19.9991 8.02243 21.1681 9.86456 21.3592 9.86456C21.5504 9.86456 20.9592 7.99132 21.0747 7.96466C21.1903 7.94021 22.9305 9.60679 23.0328 9.58013C23.135 9.55568 22.9817 8.81128 22.9817 8.81128C22.9817 8.81128 18.7833 4.49595 16.4342 4.48484C14.0339 4.47151 10.4734 8.2202 10.4734 8.2202Z" fill="#D2A590"/>
</svg>

Before

Width:  |  Height:  |  Size: 9.1 KiB

View File

@@ -1 +0,0 @@
<svg width="32" height="32" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M13.72 12.839l-9.054.92s.05.649.236.798c.178.142 5.617.066 11.048.088 5.433.023 10.82.125 10.944.072.249-.107.249-.992.249-.992l-13.424-.886zM16.55 7.787l-12.623-.32s.275.61.637.813c.523.29 3.71.889 11.518.918 7.808.028 10.635-.4 11.317-.678.58-.238 1.215-1.576 1.215-1.576l-12.064.843z" fill="#8A1E0C"/><path d="M21.95 8.658v1.335l2.176-.087V8.542l-2.176.116z" fill="#8A1E0C"/><path d="M21.948 9.566h2.177v16.797l-2.206.294.029-17.09z" fill="#EB2901"/><path d="M21.355 26.19c-.111.193-.111 2.297-.007 2.444.105.147 3.242.104 3.326 0 .085-.104.063-2.38 0-2.464-.062-.085-3.235-.125-3.32.02z" fill="#474C4F"/><path d="M8.462 9.85V8.488l2.042.125v1.22l-2.042.017z" fill="#8A1E0C"/><path d="M8.462 9.55l-.038 17.051 2.08-.207V9.566l-2.042-.015z" fill="#EB2901"/><path d="M7.804 25.919c-.073.073-.147 2.36-.02 2.464.125.104 3.14.129 3.244.024.105-.104.085-2.304.023-2.43-.063-.127-3.142-.163-3.247-.058z" fill="#474C4F"/><path d="M14.788 8.107v4.876l2.393-.33V8.108h-2.393z" fill="#EB2901"/><path d="M27.067 11.978c-.115-.16-.482-.138-.482-.138l-1.137-.013c.002-.398-.01-.913-.078-.996-.116-.137-4.542-.09-4.702.047-.091.078-.11.527-.107.898-2.738-.027-5.99-.058-8.83-.076 0-.384-.012-.849-.078-.915-.116-.116-4.22-.185-4.38-.07-.113.083-.136.647-.138.97-1.384.002-2.275.013-2.34.04-.322.137-.137 2.042-.137 2.042l22.476.16c.002.002.049-1.787-.067-1.95z" fill="#EB2901"/><path d="M3.93 6.942s-.646-.34-1.377-1.573c-.509-.858-.595-1.658-.387-1.778.21-.12 2.154 1.08 5.745 1.616a60.81 60.81 0 008.173.644c2.884.027 5.717-.135 8.397-.644 3.62-.689 4.906-1.436 5.264-1.316.36.12-.109 1.227-.369 1.78-.178.376-.944 1.77-1.515 1.87-.411.072-19.953-.09-19.953-.09l-3.977-.509z" fill="#474C4F"/><path d="M3.31 5.724c-.108.137-.057.457.212 1.06.107.237.415.782.529.917 0 0 2.982.756 11.977.7 8.995-.055 12.108-.62 12.108-.62s.911-1.277.745-1.32c-.096-.024-4.847.98-12.909.898C7.911 7.277 3.311 5.724 3.311 5.724z" fill="#EB2901"/></svg>

Before

Width:  |  Height:  |  Size: 2.0 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 5.2 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 6.1 KiB

View File

@@ -0,0 +1 @@
<svg width="24" height="24" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M12 2c1 2.538 2.5 2.962 3.5 3.808.942.78 1.481 1.845 1.5 2.961 0 1.122-.527 2.198-1.464 2.992C14.598 12.554 13.326 13 12 13s-2.598-.446-3.536-1.24C7.527 10.968 7 9.892 7 8.77c0-.255 0-.508.1-.762.085.25.236.48.443.673.207.193.463.342.75.437a2.334 2.334 0 001.767-.128c.263-.135.485-.32.65-.539.166-.22.269-.468.301-.727a1.452 1.452 0 00-.11-.765 1.699 1.699 0 00-.501-.644C8 4.115 11 2 12 2zM17 16l-5 6-5-6h10z" stroke="#fff" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/></svg>

After

Width:  |  Height:  |  Size: 581 B

View File

@@ -1 +0,0 @@
<svg width="14" height="14" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M1.305 13.063c.74.739 1.637.482 2.156-.109.53-.604.813-.956.813-.956.66-.973 3.392-4.227 5.724-6.568a2.638 2.638 0 002.74-.434 2.648 2.648 0 00.922-2.041.155.155 0 00-.23-.132l-1.607.927a1.64 1.64 0 01-1.076-1.864l1.6-.923a.153.153 0 00.077-.134.153.153 0 00-.077-.133 2.65 2.65 0 00-3.66 3.563C6.11 6.826 2.966 9.604 2.15 10.223c0 0-.492.356-.962.84-.464.476-.636 1.245.117 1.999zm.542-1.137a.592.592 0 111.184 0 .592.592 0 01-1.184 0z" fill="#82AEC0"/><path d="M8.334 4.61l.353-.35a2.63 2.63 0 01-.212-2.039c.073-.12.189-.249.262-.171-.03.946.245 1.931.902 2.611.327.338.752.582 1.207.696.224.057.458.082.69.069.137-.008.519-.149.596-.044v.004a2.656 2.656 0 01-2.135.043 38.176 38.176 0 00-1.903 2.05c.262-.495 1.034-1.408 1.241-1.757a.412.412 0 00-.036-.464c-.207-.255-.633-.493-.965-.649z" fill="#2F7889"/><path d="M5.186 8.529c.06-.062.004-.167-.08-.148-.158.035-.386.125-.657.345-.531.43-1.934 1.595-2.107 1.825-.173.23.522-.003.767-.047.2-.036 1.602-1.48 2.077-1.975zM10.048 1.104c-.296.212-.563.465-.84.701-.072.061-.177.122-.25.065-.08-.064-.03-.191.03-.274C9.512.874 10.493.358 11.442.563c-.5.161-.95.223-1.395.541z" fill="#B9E4EA"/><path d="M12.408 3.583a2.1 2.1 0 01-.371.19c-.112.031-.43-.092-.522-.166l1.183-.772c.043-.028.087-.056.137-.072a.546.546 0 01.185-.014c.087.004.51-.01.56.064.05.075-.126.149-.183.183-.33.197-.66.391-.99.587zM7.867 7.687L6.624 6.254c-.45.423-.895.835-1.321 1.225l.362-.078a.482.482 0 01.439.13l.58.65c.122.122.142.334.096.5l-.065.308c.367-.423.755-.862 1.152-1.302z" fill="#2F7889"/><g><path d="M13.378 12.86l-.744.643a.686.686 0 01-.968-.072L2.84 2.779l1.135-.853 9.459 9.976a.668.668 0 01-.057.957z" fill="#A06841"/><path d="M3.648 3.752l2.1 2.535c.328-.493.494-1.084.629-1.83l-2.028-2.14a1.838 1.838 0 00-.414.48 2.17 2.17 0 00-.287.955z" fill="#7D5133"/><path d="M7.81.438C5.885.416 5.17.588 4.098 1.515l-.966.835c-.35.302-.815.566-.742 1.089.027.19.086.384.05.573-.034.179-.242.268-.39.166-.139-.096-.292-.214-.463-.234a.588.588 0 00-.45.14l-.747.664s-.107.434.729 1.38c.835.946 1.373.878 1.373.878l.702-.618a.53.53 0 00.176-.412c-.003-.184-.11-.326-.174-.49-.013-.031-.083-.143.04-.244.109-.094.333-.062.46-.027.129.034.25.088.38.122.25.065.369-.051.543-.201L6.013 3.93c.619-.536-.325-1.474-.325-1.474C5.244 1.953 7.941.687 7.941.687c.198-.069.138-.246-.13-.249z" fill="#82AEC0"/><path d="M4.076 5.338a.504.504 0 00.14.016v-.02c-.011-.12-.077-.23-.144-.33A7.18 7.18 0 002.545 3.33a1.683 1.683 0 00-.154-.111.726.726 0 00-.002.22c.027.19.086.384.05.573-.038.196-.242.25-.399.177a3.27 3.27 0 011.011 1.027c.035.056.07.115.11.168a.2.2 0 01.075-.14c.109-.095.333-.063.46-.029.13.034.25.088.38.123zM1.778 5.573c.585.613.914 1.247.734 1.42-.179.17-.799-.186-1.384-.797C.542 5.584.21 4.92.388 4.748c.18-.171.804.213 1.39.825z" fill="#2F7889"/><path d="M4.057 2.41c.465-.198.88-.623 1.422-1.09A2.53 2.53 0 016.03.964c.076-.035.048-.149-.036-.148-.278.005-.527.09-.772.196-.342.149-.644.374-.935.608-.2.16-.67.555-.965.805-.055.047-.012.12.06.12.208.002.325.014.674-.135zM1.124 4.352c-.196.221.055.281.496.646.311.257.642.018.645-.223.003-.216-.052-.333-.366-.53-.315-.199-.597-.093-.775.107z" fill="#B9E4EA"/></g></svg>

Before

Width:  |  Height:  |  Size: 3.2 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 204 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 1.7 MiB

View File

@@ -108,7 +108,7 @@
"user_tooltip_more_help": "More details on how to create alerts",
"choose_alert_type": "Choose a type for the alert",
"metric_based_alert": "Metric based Alert",
"metric_based_alert_desc": "Send a notification when a condition occurs in the metric data.",
"metric_based_alert_desc": "Send a notification when a condition occurs in the metric data",
"log_based_alert": "Log-based Alert",
"log_based_alert_desc": "Send a notification when a condition occurs in the logs data.",
"traces_based_alert": "Trace-based Alert",

View File

@@ -15,7 +15,6 @@
"button_test_channel": "Test",
"button_return": "Back",
"field_channel_name": "Name",
"field_send_resolved": "Send resolved alerts",
"field_channel_type": "Type",
"field_webhook_url": "Webhook URL",
"field_slack_recipient": "Recipient",

View File

@@ -1,6 +1,6 @@
{
"create_dashboard": "Create Dashboard",
"import_json": "Import Dashboard JSON",
"import_json": "Import JSON",
"import_grafana_json": "Import Grafana JSON",
"copy_to_clipboard": "Copy To ClipBoard",
"download_json": "Download JSON",
@@ -9,14 +9,13 @@
"upload_json_file": "Upload JSON file",
"paste_json_below": "Paste JSON below",
"error_upload_json": "Invalid JSON",
"import_and_next": "Import and Next",
"load_json": "Load JSON",
"import_dashboard_by_pasting": "Import dashboard by pasting JSON or importing JSON file",
"error_loading_json": "Error loading JSON file",
"empty_json_not_allowed": "Empty JSON is not allowed",
"new_dashboard_title": "Sample Title",
"layout_saved_successfully": "Layout saved successfully",
"add_panel": "Add Panel",
"add_row": "Add Row",
"save_layout": "Save Layout",
"variable_updated_successfully": "Variable updated successfully",
"error_while_updating_variable": "Error while updating variable",

View File

@@ -1,3 +0,0 @@
{
"delete_confirm_message": "Are you sure you want to delete {{keyName}}? Deleting an ingestion key is irreversible and cannot be undone."
}

View File

@@ -108,7 +108,7 @@
"user_tooltip_more_help": "More details on how to create alerts",
"choose_alert_type": "Choose a type for the alert",
"metric_based_alert": "Metric based Alert",
"metric_based_alert_desc": "Send a notification when a condition occurs in the metric data.",
"metric_based_alert_desc": "Send a notification when a condition occurs in the metric data",
"log_based_alert": "Log-based Alert",
"log_based_alert_desc": "Send a notification when a condition occurs in the logs data.",
"traces_based_alert": "Trace-based Alert",

View File

@@ -15,7 +15,6 @@
"button_test_channel": "Test",
"button_return": "Back",
"field_channel_name": "Name",
"field_send_resolved": "Send resolved alerts",
"field_channel_type": "Type",
"field_webhook_url": "Webhook URL",
"field_slack_recipient": "Recipient",

View File

@@ -1,6 +1,6 @@
{
"create_dashboard": "Create Dashboard",
"import_json": "Import Dashboard JSON",
"import_json": "Import JSON",
"import_grafana_json": "Import Grafana JSON",
"copy_to_clipboard": "Copy To ClipBoard",
"download_json": "Download JSON",
@@ -9,14 +9,13 @@
"upload_json_file": "Upload JSON file",
"paste_json_below": "Paste JSON below",
"error_upload_json": "Invalid JSON",
"import_and_next": "Import and Next",
"load_json": "Load JSON",
"import_dashboard_by_pasting": "Import dashboard by pasting JSON or importing JSON file",
"error_loading_json": "Error loading JSON file",
"empty_json_not_allowed": "Empty JSON is not allowed",
"new_dashboard_title": "Sample Title",
"layout_saved_successfully": "Layout saved successfully",
"add_panel": "Add Panel",
"add_row": "Add Row",
"save_layout": "Save Layout",
"full_view": "Full Screen View",
"variable_updated_successfully": "Variable updated successfully",

View File

@@ -1,4 +0,0 @@
{
"delete_confirm_message": "Are you sure you want to delete {{keyName}}? Deleting an ingestion key is irreversible and cannot be undone.",
"delete_limit_confirm_message": "Are you sure you want to delete {{limit_name}} limit for ingestion key {{keyName}}?"
}

View File

@@ -48,5 +48,5 @@
"TRACES_SAVE_VIEWS": "SigNoz | Traces Saved Views",
"DEFAULT": "Open source Observability Platform | SigNoz",
"SHORTCUTS": "SigNoz | Shortcuts",
"INTEGRATIONS": "SigNoz | Integrations"
"INTEGRATIONS_INSTALLED": "SigNoz | Integrations"
}

View File

@@ -9,14 +9,13 @@ import ROUTES from 'constants/routes';
import AppLayout from 'container/AppLayout';
import useAnalytics from 'hooks/analytics/useAnalytics';
import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys';
import { useIsDarkMode, useThemeConfig } from 'hooks/useDarkMode';
import { THEME_MODE } from 'hooks/useDarkMode/constant';
import { useThemeConfig } from 'hooks/useDarkMode';
import useGetFeatureFlag from 'hooks/useGetFeatureFlag';
import useLicense, { LICENSE_PLAN_KEY } from 'hooks/useLicense';
import { NotificationProvider } from 'hooks/useNotifications';
import { ResourceProvider } from 'hooks/useResourceAttribute';
import history from 'lib/history';
import { identity, pick, pickBy } from 'lodash-es';
import { identity, pickBy } from 'lodash-es';
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
import { QueryBuilderProvider } from 'providers/QueryBuilder';
import { Suspense, useEffect, useState } from 'react';
@@ -47,14 +46,12 @@ function App(): JSX.Element {
const dispatch = useDispatch<Dispatch<AppActions>>();
const { trackPageView, trackEvent } = useAnalytics();
const { trackPageView } = useAnalytics();
const { hostname, pathname } = window.location;
const isCloudUserVal = isCloudUser();
const isDarkMode = useIsDarkMode();
const featureResponse = useGetFeatureFlag((allFlags) => {
const isOnboardingEnabled =
allFlags.find((flag) => flag.name === FeatureKeys.ONBOARDING)?.active ||
@@ -150,11 +147,7 @@ function App(): JSX.Element {
}
}
if (
isOnBasicPlan ||
(isLoggedInState && role && role !== 'ADMIN') ||
!(isCloudUserVal || isEECloudUser())
) {
if (isOnBasicPlan || (isLoggedInState && role && role !== 'ADMIN')) {
const newRoutes = routes.filter((route) => route?.path !== ROUTES.BILLING);
setRoutes(newRoutes);
}
@@ -177,27 +170,6 @@ function App(): JSX.Element {
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [pathname]);
useEffect(() => {
if (user && user?.email && user?.userId && user?.name) {
try {
const isThemeAnalyticsSent = getLocalStorageApi(
LOCALSTORAGE.THEME_ANALYTICS_V1,
);
if (!isThemeAnalyticsSent) {
trackEvent('Theme Analytics', {
theme: isDarkMode ? THEME_MODE.DARK : THEME_MODE.LIGHT,
user: pick(user, ['email', 'userId', 'name']),
org,
});
setLocalStorageApi(LOCALSTORAGE.THEME_ANALYTICS_V1, 'true');
}
} catch {
console.error('Failed to parse local storage theme analytics event');
}
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [user]);
return (
<ConfigProvider theme={themeConfig}>
<Router history={history}>

View File

@@ -7,14 +7,7 @@ export const ServicesTablePage = Loadable(
export const ServiceMetricsPage = Loadable(
() =>
import(
/* webpackChunkName: "ServiceMetricsPage" */ 'pages/MetricsApplication/MetricsApplication'
),
);
export const ServiceTopLevelOperationsPage = Loadable(
() =>
import(
/* webpackChunkName: "ServiceMetricsPage" */ 'pages/ServiceTopLevelOperations'
/* webpackChunkName: "ServiceMetricsPage" */ 'pages/MetricsApplication'
),
);
@@ -204,3 +197,11 @@ export const InstalledIntegrations = Loadable(
/* webpackChunkName: "InstalledIntegrations" */ 'pages/IntegrationsModulePage'
),
);
export const IntegrationsMarketPlace = Loadable(
// eslint-disable-next-line sonarjs/no-identical-functions
() =>
import(
/* webpackChunkName: "IntegrationsMarketPlace" */ 'pages/IntegrationsModulePage'
),
);

View File

@@ -15,6 +15,7 @@ import {
ErrorDetails,
IngestionSettings,
InstalledIntegrations,
IntegrationsMarketPlace,
LicensePage,
ListAllALertsPage,
LiveLogs,
@@ -33,7 +34,6 @@ import {
ServiceMapPage,
ServiceMetricsPage,
ServicesTablePage,
ServiceTopLevelOperationsPage,
SettingsPage,
ShortcutsPage,
SignupPage,
@@ -85,13 +85,6 @@ const routes: AppRoutes[] = [
isPrivate: true,
key: 'SERVICE_METRICS',
},
{
path: ROUTES.SERVICE_TOP_LEVEL_OPERATIONS,
exact: true,
component: ServiceTopLevelOperationsPage,
isPrivate: true,
key: 'SERVICE_TOP_LEVEL_OPERATIONS',
},
{
path: ROUTES.SERVICE_MAP,
component: ServiceMapPage,
@@ -345,11 +338,18 @@ const routes: AppRoutes[] = [
key: 'SHORTCUTS',
},
{
path: ROUTES.INTEGRATIONS,
path: ROUTES.INTEGRATIONS_INSTALLED,
exact: true,
component: InstalledIntegrations,
isPrivate: true,
key: 'INTEGRATIONS',
key: 'INTEGRATIONS_INSTALLED',
},
{
path: ROUTES.INTEGRATIONS_MARKETPLACE,
exact: true,
component: IntegrationsMarketPlace,
isPrivate: true,
key: 'INTEGRATIONS_MARKETPLACE',
},
];

View File

@@ -16,7 +16,7 @@ export function ErrorResponseHandler(error: AxiosError): ErrorResponse {
return {
statusCode,
payload: null,
error: data.errorType || data.type,
error: data.errorType,
message: null,
};
}

View File

@@ -1,29 +0,0 @@
import { GatewayApiV1Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
CreateIngestionKeyProps,
IngestionKeyProps,
} from 'types/api/ingestionKeys/types';
const createIngestionKey = async (
props: CreateIngestionKeyProps,
): Promise<SuccessResponse<IngestionKeyProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.post('/workspaces/me/keys', {
...props,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default createIngestionKey;

View File

@@ -1,26 +0,0 @@
import { GatewayApiV1Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { AllIngestionKeyProps } from 'types/api/ingestionKeys/types';
const deleteIngestionKey = async (
id: string,
): Promise<SuccessResponse<AllIngestionKeyProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.delete(
`/workspaces/me/keys/${id}`,
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default deleteIngestionKey;

View File

@@ -1,21 +0,0 @@
import { GatewayApiV1Instance } from 'api';
import { AxiosResponse } from 'axios';
import {
AllIngestionKeyProps,
GetIngestionKeyProps,
} from 'types/api/ingestionKeys/types';
export const getAllIngestionKeys = (
props: GetIngestionKeyProps,
): Promise<AxiosResponse<AllIngestionKeyProps>> => {
// eslint-disable-next-line @typescript-eslint/naming-convention
const { search, per_page, page } = props;
const BASE_URL = '/workspaces/me/keys';
const URL_QUERY_PARAMS =
search && search.length > 0
? `/search?name=${search}&page=1&per_page=100`
: `?page=${page}&per_page=${per_page}`;
return GatewayApiV1Instance.get(`${BASE_URL}${URL_QUERY_PARAMS}`);
};

View File

@@ -1,65 +0,0 @@
/* eslint-disable @typescript-eslint/no-throw-literal */
import { GatewayApiV1Instance } from 'api';
import axios from 'axios';
import {
AddLimitProps,
LimitSuccessProps,
} from 'types/api/ingestionKeys/limits/types';
interface SuccessResponse<T> {
statusCode: number;
error: null;
message: string;
payload: T;
}
interface ErrorResponse {
statusCode: number;
error: string;
message: string;
payload: null;
}
const createLimitForIngestionKey = async (
props: AddLimitProps,
): Promise<SuccessResponse<LimitSuccessProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.post(
`/workspaces/me/keys/${props.keyID}/limits`,
{
...props,
},
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
if (axios.isAxiosError(error)) {
// Axios error
const errResponse: ErrorResponse = {
statusCode: error.response?.status || 500,
error: error.response?.data?.error,
message: error.response?.data?.status || 'An error occurred',
payload: null,
};
throw errResponse;
} else {
// Non-Axios error
const errResponse: ErrorResponse = {
statusCode: 500,
error: 'Unknown error',
message: 'An unknown error occurred',
payload: null,
};
throw errResponse;
}
}
};
export default createLimitForIngestionKey;

View File

@@ -1,26 +0,0 @@
import { GatewayApiV1Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { AllIngestionKeyProps } from 'types/api/ingestionKeys/types';
const deleteLimitsForIngestionKey = async (
id: string,
): Promise<SuccessResponse<AllIngestionKeyProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.delete(
`/workspaces/me/limits/${id}`,
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default deleteLimitsForIngestionKey;

View File

@@ -1,65 +0,0 @@
/* eslint-disable @typescript-eslint/no-throw-literal */
import { GatewayApiV1Instance } from 'api';
import axios from 'axios';
import {
LimitSuccessProps,
UpdateLimitProps,
} from 'types/api/ingestionKeys/limits/types';
interface SuccessResponse<T> {
statusCode: number;
error: null;
message: string;
payload: T;
}
interface ErrorResponse {
statusCode: number;
error: string;
message: string;
payload: null;
}
const updateLimitForIngestionKey = async (
props: UpdateLimitProps,
): Promise<SuccessResponse<LimitSuccessProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.patch(
`/workspaces/me/limits/${props.limitID}`,
{
config: props.config,
},
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
if (axios.isAxiosError(error)) {
// Axios error
const errResponse: ErrorResponse = {
statusCode: error.response?.status || 500,
error: error.response?.data?.error,
message: error.response?.data?.status || 'An error occurred',
payload: null,
};
throw errResponse;
} else {
// Non-Axios error
const errResponse: ErrorResponse = {
statusCode: 500,
error: 'Unknown error',
message: 'An unknown error occurred',
payload: null,
};
throw errResponse;
}
}
};
export default updateLimitForIngestionKey;

View File

@@ -1,32 +0,0 @@
import { GatewayApiV1Instance } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
IngestionKeysPayloadProps,
UpdateIngestionKeyProps,
} from 'types/api/ingestionKeys/types';
const updateIngestionKey = async (
props: UpdateIngestionKeyProps,
): Promise<SuccessResponse<IngestionKeysPayloadProps> | ErrorResponse> => {
try {
const response = await GatewayApiV1Instance.patch(
`/workspaces/me/keys/${props.id}`,
{
...props.data,
},
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default updateIngestionKey;

View File

@@ -3,7 +3,6 @@ const apiV1 = '/api/v1/';
export const apiV2 = '/api/v2/';
export const apiV3 = '/api/v3/';
export const apiV4 = '/api/v4/';
export const gatewayApiV1 = '/api/gateway/v1';
export const apiAlertManager = '/api/alertmanager';
export default apiV1;

View File

@@ -12,7 +12,7 @@ const create = async (
name: props.name,
email_configs: [
{
send_resolved: props.send_resolved,
send_resolved: true,
to: props.to,
html: props.html,
headers: props.headers,

View File

@@ -12,7 +12,7 @@ const create = async (
name: props.name,
msteams_configs: [
{
send_resolved: props.send_resolved,
send_resolved: true,
webhook_url: props.webhook_url,
title: props.title,
text: props.text,

View File

@@ -12,7 +12,7 @@ const create = async (
name: props.name,
pagerduty_configs: [
{
send_resolved: props.send_resolved,
send_resolved: true,
routing_key: props.routing_key,
client: props.client,
client_url: props.client_url,

View File

@@ -12,7 +12,7 @@ const create = async (
name: props.name,
slack_configs: [
{
send_resolved: props.send_resolved,
send_resolved: true,
api_url: props.api_url,
channel: props.channel,
title: props.title,

View File

@@ -30,7 +30,7 @@ const create = async (
name: props.name,
webhook_configs: [
{
send_resolved: props.send_resolved,
send_resolved: true,
url: props.api_url,
http_config: httpConfig,
},

View File

@@ -12,7 +12,7 @@ const editEmail = async (
name: props.name,
email_configs: [
{
send_resolved: props.send_resolved,
send_resolved: true,
to: props.to,
html: props.html,
headers: props.headers,

View File

@@ -12,7 +12,7 @@ const editMsTeams = async (
name: props.name,
msteams_configs: [
{
send_resolved: props.send_resolved,
send_resolved: true,
webhook_url: props.webhook_url,
title: props.title,
text: props.text,

View File

@@ -12,7 +12,7 @@ const editOpsgenie = async (
name: props.name,
opsgenie_configs: [
{
send_resolved: props.send_resolved,
send_resolved: true,
api_key: props.api_key,
description: props.description,
priority: props.priority,

View File

@@ -12,7 +12,7 @@ const editPager = async (
name: props.name,
pagerduty_configs: [
{
send_resolved: props.send_resolved,
send_resolved: true,
routing_key: props.routing_key,
client: props.client,
client_url: props.client_url,

View File

@@ -12,7 +12,7 @@ const editSlack = async (
name: props.name,
slack_configs: [
{
send_resolved: props.send_resolved,
send_resolved: true,
api_url: props.api_url,
channel: props.channel,
title: props.title,

View File

@@ -29,7 +29,7 @@ const editWebhook = async (
name: props.name,
webhook_configs: [
{
send_resolved: props.send_resolved,
send_resolved: true,
url: props.api_url,
http_config: httpConfig,
},

View File

@@ -1,28 +0,0 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { EventSuccessPayloadProps } from 'types/api/events/types';
const logEvent = async (
eventName: string,
attributes: Record<string, unknown>,
): Promise<SuccessResponse<EventSuccessPayloadProps> | ErrorResponse> => {
try {
const response = await axios.post('/event', {
eventName,
attributes,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default logEvent;

View File

@@ -1,20 +1,26 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { PayloadProps, Props } from 'types/api/dashboard/update';
const updateDashboard = async (
props: Props,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
const response = await axios.put(`/dashboards/${props.uuid}`, {
...props.data,
});
try {
const response = await axios.put(`/dashboards/${props.uuid}`, {
...props.data,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default updateDashboard;

View File

@@ -9,13 +9,7 @@ import { ENVIRONMENT } from 'constants/env';
import { LOCALSTORAGE } from 'constants/localStorage';
import store from 'store';
import apiV1, {
apiAlertManager,
apiV2,
apiV3,
apiV4,
gatewayApiV1,
} from './apiV1';
import apiV1, { apiAlertManager, apiV2, apiV3, apiV4 } from './apiV1';
import { Logout } from './utils';
const interceptorsResponse = (
@@ -140,19 +134,6 @@ ApiV4Instance.interceptors.response.use(
ApiV4Instance.interceptors.request.use(interceptorsRequestResponse);
//
// gateway Api V1
export const GatewayApiV1Instance = axios.create({
baseURL: `${ENVIRONMENT.baseURL}${gatewayApiV1}`,
});
GatewayApiV1Instance.interceptors.response.use(
interceptorsResponse,
interceptorRejected,
);
GatewayApiV1Instance.interceptors.request.use(interceptorsRequestResponse);
//
AxiosAlertManagerInstance.interceptors.response.use(
interceptorsResponse,
interceptorRejected,

View File

@@ -1,4 +1,4 @@
import { ApiV4Instance } from 'api';
import axios from 'api';
import { AxiosResponse } from 'axios';
import { MetricMetaProps } from 'types/api/metrics/getApDex';
@@ -6,6 +6,4 @@ export const getMetricMeta = (
metricName: string,
servicename: string,
): Promise<AxiosResponse<MetricMetaProps>> =>
ApiV4Instance.get(
`/metric/metric_metadata?metricName=${metricName}&serviceName=${servicename}`,
);
axios.get(`/metric_meta?metricName=${metricName}&serviceName=${servicename}`);

View File

@@ -0,0 +1,27 @@
import { ApiV2Instance as axios } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
MetricNameProps,
MetricNamesPayloadProps,
} from 'types/api/metrics/getMetricName';
export const getMetricName = async (
props: MetricNameProps,
): Promise<SuccessResponse<MetricNamesPayloadProps> | ErrorResponse> => {
try {
const response = await axios.get(
`/metrics/autocomplete/list?match=${props || ''}`,
);
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};

View File

@@ -1,7 +1,6 @@
import { ApiV3Instance as axios } from 'api';
import { ApiV2Instance as axios } from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import createQueryParams from 'lib/createQueryParams';
import { ErrorResponse, SuccessResponse } from 'types/api';
import {
TagKeyProps,
@@ -9,19 +8,15 @@ import {
TagValueProps,
TagValuesPayloadProps,
} from 'types/api/metrics/getResourceAttributes';
import { DataSource, MetricAggregateOperator } from 'types/common/queryBuilder';
export const getResourceAttributesTagKeys = async (
props: TagKeyProps,
): Promise<SuccessResponse<TagKeysPayloadProps> | ErrorResponse> => {
try {
const response = await axios.get(
`/autocomplete/attribute_keys?${createQueryParams({
aggregateOperator: MetricAggregateOperator.RATE,
searchText: props.match,
dataSource: DataSource.METRICS,
aggregateAttribute: props.metricName,
})}`,
`/metrics/autocomplete/tagKey?metricName=${props.metricName}${
props.match ? `&match=${props.match}` : ''
}`,
);
return {
@@ -40,13 +35,7 @@ export const getResourceAttributesTagValues = async (
): Promise<SuccessResponse<TagValuesPayloadProps> | ErrorResponse> => {
try {
const response = await axios.get(
`/autocomplete/attribute_values?${createQueryParams({
aggregateOperator: MetricAggregateOperator.RATE,
dataSource: DataSource.METRICS,
aggregateAttribute: props.metricName,
attributeKey: props.tagKey,
searchText: '',
})}`,
`/metrics/autocomplete/tagValue?metricName=${props.metricName}&tagKey=${props.tagKey}`,
);
return {

View File

@@ -1,44 +0,0 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { Recurrence } from './getAllDowntimeSchedules';
export interface DowntimeSchedulePayload {
name: string;
description?: string;
alertIds: string[];
schedule: {
timezone?: string;
startTime?: string;
endTime?: string;
recurrence?: Recurrence;
};
}
export interface PayloadProps {
status: string;
data: string;
}
const createDowntimeSchedule = async (
props: DowntimeSchedulePayload,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try {
const response = await axios.post('/downtime_schedules', {
...props,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default createDowntimeSchedule;

View File

@@ -1,19 +0,0 @@
import axios from 'api';
import { useMutation, UseMutationResult } from 'react-query';
export interface DeleteDowntimeScheduleProps {
id?: number;
}
export interface DeleteSchedulePayloadProps {
status: string;
data: string;
}
export const useDeleteDowntimeSchedule = (
props: DeleteDowntimeScheduleProps,
): UseMutationResult<DeleteSchedulePayloadProps, Error, number> =>
useMutation({
mutationKey: [props.id],
mutationFn: () => axios.delete(`/downtime_schedules/${props.id}`),
});

View File

@@ -1,50 +0,0 @@
import axios from 'api';
import { AxiosError, AxiosResponse } from 'axios';
import { Option } from 'container/PlannedDowntime/DropdownWithSubMenu/DropdownWithSubMenu';
import { useQuery, UseQueryResult } from 'react-query';
export type Recurrence = {
startTime?: string | null;
endTime?: string | null;
duration?: number | string | null;
repeatType?: string | Option | null;
repeatOn?: string[] | null;
};
type Schedule = {
timezone: string | null;
startTime: string | null;
endTime: string | null;
recurrence: Recurrence | null;
};
export interface DowntimeSchedules {
id: number;
name: string | null;
description: string | null;
schedule: Schedule | null;
alertIds: string[] | null;
createdAt: string | null;
createdBy: string | null;
updatedAt: string | null;
updatedBy: string | null;
}
export type PayloadProps = { data: DowntimeSchedules[] };
export const getAllDowntimeSchedules = async (
props?: GetAllDowntimeSchedulesPayloadProps,
): Promise<AxiosResponse<PayloadProps>> =>
axios.get('/downtime_schedules', { params: props });
export interface GetAllDowntimeSchedulesPayloadProps {
active?: boolean;
recurrence?: boolean;
}
export const useGetAllDowntimeSchedules = (
props?: GetAllDowntimeSchedulesPayloadProps,
): UseQueryResult<AxiosResponse<PayloadProps>, AxiosError> =>
useQuery<AxiosResponse<PayloadProps>, AxiosError>({
queryKey: ['getAllDowntimeSchedules', props],
queryFn: () => getAllDowntimeSchedules(props),
});

View File

@@ -1,37 +0,0 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { DowntimeSchedulePayload } from './createDowntimeSchedule';
export interface DowntimeScheduleUpdatePayload {
data: DowntimeSchedulePayload;
id?: number;
}
export interface PayloadProps {
status: string;
data: string;
}
const updateDowntimeSchedule = async (
props: DowntimeScheduleUpdatePayload,
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
try {
const response = await axios.put(`/downtime_schedules/${props.id}`, {
...props.data,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};
export default updateDowntimeSchedule;

View File

@@ -1,176 +0,0 @@
export default function ApacheIcon(): JSX.Element {
return (
<svg
width="14"
height="14"
viewBox="0 0 14 14"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M8.7112 1.05739C8.3796 1.30831 8.08524 1.60498 7.83691 1.93853L8.17977 2.58567C8.40218 2.26279 8.64677 1.95576 8.91177 1.66681C8.93063 1.64581 8.94091 1.63596 8.94091 1.63596L8.91177 1.66681C8.66003 1.95965 8.43081 2.27111 8.22606 2.59853C8.67305 2.56672 9.11808 2.51165 9.55934 2.43353C9.60936 2.25525 9.62374 2.06886 9.60168 1.88502C9.57962 1.70118 9.52154 1.52349 9.43077 1.3621C9.43077 1.3621 9.09991 0.828957 8.7112 1.05739Z"
fill="url(#paint0_linear_2061_4195)"
/>
<path
d="M7.55932 6.49365L7.42432 6.51722L7.49332 6.50651C7.51432 6.50265 7.53703 6.49837 7.55932 6.49365Z"
fill="#BE202E"
/>
<path
opacity="0.35"
d="M7.55932 6.49365L7.42432 6.51722L7.49332 6.50651C7.51432 6.50265 7.53703 6.49837 7.55932 6.49365Z"
fill="#BE202E"
/>
<path
d="M7.67407 5.92858L7.6955 5.92558C7.72464 5.9213 7.75336 5.91616 7.78122 5.91016L7.67493 5.92816L7.67407 5.92858Z"
fill="#BE202E"
/>
<path
opacity="0.35"
d="M7.67407 5.92858L7.6955 5.92558C7.72464 5.9213 7.75336 5.91616 7.78122 5.91016L7.67493 5.92816L7.67407 5.92858Z"
fill="#BE202E"
/>
<path
d="M7.16872 4.25736C7.273 4.0625 7.37858 3.87221 7.48543 3.6865C7.59629 3.49393 7.70829 3.3075 7.82143 3.12721L7.84115 3.09507C7.95315 2.91793 8.066 2.74779 8.17972 2.58464L7.83686 1.9375L7.75886 2.03393C7.65986 2.15736 7.55743 2.29107 7.452 2.43036C7.33329 2.58893 7.21115 2.75779 7.08729 2.93564C6.97286 3.09979 6.85672 3.27164 6.74058 3.44993C6.64158 3.60121 6.54258 3.75721 6.444 3.91707L6.43286 3.93507L6.879 4.8145C6.97443 4.62707 7.071 4.44136 7.16872 4.25736Z"
fill="url(#paint1_linear_2061_4195)"
/>
<path
d="M5.13606 9.22519C5.07692 9.38748 5.01763 9.55305 4.95821 9.72191L4.95563 9.72919L4.93035 9.80076C4.89049 9.91476 4.85535 10.015 4.77563 10.2503C4.92444 10.3467 5.0416 10.4847 5.11249 10.6472C5.10283 10.4581 5.01907 10.2804 4.87935 10.1526C5.16043 10.1981 5.44862 10.1654 5.71236 10.0581C5.97609 9.95074 6.20521 9.77291 6.37463 9.54405C6.40102 9.50088 6.42464 9.45607 6.44535 9.40991C6.37459 9.49741 6.28141 9.56408 6.17575 9.6028C6.07008 9.64152 5.95589 9.65084 5.84535 9.62976C6.20937 9.49539 6.51802 9.24316 6.72221 8.91319C6.76978 8.83691 6.81563 8.75376 6.86278 8.66162C6.6974 8.84328 6.48756 8.97872 6.25393 9.05463C6.02029 9.13053 5.77091 9.14426 5.53035 9.09448L5.16949 9.13391C5.15706 9.16434 5.14721 9.19476 5.13606 9.22519Z"
fill="url(#paint2_linear_2061_4195)"
/>
<path
d="M5.30448 8.417C5.38248 8.21528 5.46276 8.01157 5.54533 7.80586C5.62419 7.60871 5.70519 7.41057 5.78833 7.21143C5.87148 7.01228 5.95633 6.81228 6.0429 6.61143C6.1309 6.40828 6.22076 6.20557 6.31248 6.00328C6.40419 5.801 6.49633 5.60257 6.5889 5.408C6.62262 5.33714 6.65662 5.26643 6.6909 5.19586C6.75005 5.07414 6.80962 4.95343 6.86962 4.83371C6.87262 4.82728 6.87605 4.82086 6.87948 4.81443L6.4329 3.93457L6.41105 3.97014C6.30733 4.14157 6.20362 4.313 6.10205 4.49128C6.00048 4.66957 5.89848 4.853 5.80205 5.03814C5.7189 5.19443 5.63776 5.35157 5.55862 5.50957L5.51148 5.606C5.41419 5.80614 5.32633 5.99943 5.24705 6.18543C5.15705 6.396 5.07762 6.59686 5.00876 6.788C4.9629 6.91357 4.92305 7.03486 4.88362 7.151C4.85233 7.25043 4.82276 7.34986 4.79448 7.451C4.72762 7.68471 4.67048 7.91771 4.62305 8.15L5.07133 9.035C5.13076 8.87671 5.19162 8.71614 5.2539 8.55328L5.30448 8.417Z"
fill="url(#paint3_linear_2061_4195)"
/>
<path
d="M4.615 8.18082C4.55868 8.46014 4.51975 8.74268 4.49843 9.02682C4.49843 9.03668 4.49843 9.04653 4.49629 9.05639C4.3547 8.8778 4.1801 8.72808 3.982 8.61539C4.24526 8.95089 4.41819 9.34823 4.48429 9.76953C4.28982 9.78674 4.0942 9.75338 3.91643 9.67268C4.05057 9.80984 4.21714 9.91096 4.40072 9.96668C4.14974 10.0146 3.9168 10.1307 3.72743 10.3022C3.97375 10.178 4.25059 10.1271 4.525 10.1557C4.21858 11.024 3.91129 11.9827 3.604 13.0001C3.64664 12.988 3.6856 12.9655 3.71739 12.9346C3.74918 12.9037 3.7728 12.8654 3.78615 12.8231C3.841 12.6388 4.20443 11.4298 4.77443 9.84068L4.82372 9.70439L4.83743 9.66625C4.89772 9.49968 4.96015 9.32968 5.02472 9.15625L5.06758 9.03753V9.03539L4.62143 8.15039C4.61929 8.16025 4.61672 8.17053 4.615 8.18082Z"
fill="url(#paint4_linear_2061_4195)"
/>
<path
d="M6.94843 4.89059L6.90986 4.96987C6.87129 5.04959 6.83214 5.13102 6.79243 5.21416C6.74957 5.30416 6.70671 5.3963 6.66386 5.49059C6.64157 5.53816 6.621 5.58573 6.59743 5.63416C6.53057 5.7793 6.46286 5.92916 6.39429 6.08373C6.30857 6.27402 6.22286 6.47173 6.13714 6.67687C6.054 6.87259 5.96971 7.07516 5.88429 7.28459C5.80314 7.48459 5.721 7.68973 5.63786 7.90002C5.56386 8.08888 5.48914 8.28273 5.41371 8.48159C5.40986 8.49145 5.40643 8.50087 5.403 8.51073C5.32814 8.70873 5.25257 8.91187 5.17629 9.12016L5.17114 9.1343L5.532 9.09487L5.51057 9.09102C6.039 8.98351 6.52026 8.7126 6.88629 8.31659C7.0686 8.1182 7.22683 7.89896 7.35771 7.66345C7.47147 7.46033 7.57252 7.25036 7.66029 7.03473C7.74386 6.83245 7.824 6.61387 7.90114 6.37645C7.79448 6.43086 7.68084 6.47037 7.56343 6.49388C7.54157 6.49859 7.52057 6.50287 7.49657 6.50673C7.47257 6.51059 7.45071 6.51445 7.42757 6.51745C7.80325 6.36305 8.10458 6.06924 8.26843 5.69759C8.12155 5.79773 7.95733 5.86966 7.78414 5.90973C7.75586 5.91616 7.72757 5.92087 7.69843 5.92516L7.677 5.92816C7.80484 5.87656 7.92565 5.80903 8.03657 5.72716C8.05843 5.71087 8.07943 5.69373 8.1 5.67573C8.13129 5.64873 8.16086 5.62045 8.18914 5.59002C8.20714 5.57116 8.22471 5.55145 8.24186 5.53087C8.28293 5.48179 8.32059 5.42996 8.35457 5.37573C8.36529 5.35859 8.376 5.34145 8.38629 5.32345C8.39957 5.29773 8.41243 5.27245 8.42486 5.24716C8.481 5.13402 8.526 5.03288 8.56157 4.94459C8.57957 4.90173 8.595 4.85888 8.60829 4.82116C8.61386 4.80616 8.619 4.79116 8.62371 4.7783C8.63786 4.73545 8.64943 4.69816 8.65843 4.66473C8.66941 4.62595 8.67828 4.58661 8.685 4.54688C8.67015 4.5586 8.65454 4.56934 8.63829 4.57902C8.4822 4.66169 8.31419 4.71953 8.14029 4.75045H8.13257L8.08157 4.75859L8.09057 4.75473L6.957 4.87902L6.94843 4.89059Z"
fill="url(#paint5_linear_2061_4195)"
/>
<path
d="M8.22475 2.59871C8.12404 2.75343 8.01389 2.92914 7.89561 3.12843L7.87675 3.16014C7.77446 3.33157 7.66604 3.52114 7.55147 3.72886C7.45261 3.90771 7.34989 4.10014 7.24332 4.30614C7.15018 4.48586 7.05418 4.67671 6.95532 4.87871L8.08889 4.75443C8.33914 4.65567 8.55501 4.48583 8.70989 4.26586C8.74804 4.211 8.78618 4.15357 8.82432 4.09443C8.94089 3.91271 9.05489 3.71257 9.15689 3.51371C9.25018 3.33322 9.33429 3.14813 9.40889 2.95914C9.44758 2.86102 9.48092 2.76087 9.50875 2.65914C9.52932 2.58028 9.54561 2.50571 9.55804 2.43457C9.11675 2.51236 8.67172 2.56715 8.22475 2.59871Z"
fill="url(#paint6_linear_2061_4195)"
/>
<path
d="M7.49234 6.50635C7.46963 6.5102 7.44648 6.51406 7.42334 6.51706C7.44648 6.51406 7.47134 6.5102 7.49234 6.50635Z"
fill="#BE202E"
/>
<path
opacity="0.35"
d="M7.49234 6.50635C7.46963 6.5102 7.44648 6.51406 7.42334 6.51706C7.44648 6.51406 7.47134 6.5102 7.49234 6.50635Z"
fill="#BE202E"
/>
<path
d="M7.49234 6.50635C7.46963 6.5102 7.44648 6.51406 7.42334 6.51706C7.44648 6.51406 7.47134 6.5102 7.49234 6.50635Z"
fill="url(#paint7_linear_2061_4195)"
/>
<defs>
<linearGradient
id="paint0_linear_2061_4195"
x1="7.26579"
y1="1.16584"
x2="9.7782"
y2="0.46843"
gradientUnits="userSpaceOnUse"
>
<stop stopColor="#F69923" />
<stop offset="0.312" stopColor="#F79A23" />
<stop offset="0.838" stopColor="#E97826" />
</linearGradient>
<linearGradient
id="paint1_linear_2061_4195"
x1="1.76109"
y1="12.4382"
x2="6.87606"
y2="1.48267"
gradientUnits="userSpaceOnUse"
>
<stop offset="0.323" stopColor="#9E2064" />
<stop offset="0.63" stopColor="#C92037" />
<stop offset="0.751" stopColor="#CD2335" />
<stop offset="1" stopColor="#E97826" />
</linearGradient>
<linearGradient
id="paint2_linear_2061_4195"
x1="3.47784"
y1="11.6287"
x2="6.5258"
y2="5.10046"
gradientUnits="userSpaceOnUse"
>
<stop stopColor="#282662" />
<stop offset="0.095" stopColor="#662E8D" />
<stop offset="0.788" stopColor="#9F2064" />
<stop offset="0.949" stopColor="#CD2032" />
</linearGradient>
<linearGradient
id="paint3_linear_2061_4195"
x1="1.94596"
y1="11.7748"
x2="7.06094"
y2="0.819304"
gradientUnits="userSpaceOnUse"
>
<stop offset="0.323" stopColor="#9E2064" />
<stop offset="0.63" stopColor="#C92037" />
<stop offset="0.751" stopColor="#CD2335" />
<stop offset="1" stopColor="#E97826" />
</linearGradient>
<linearGradient
id="paint4_linear_2061_4195"
x1="2.46768"
y1="11.0455"
x2="5.15578"
y2="5.28798"
gradientUnits="userSpaceOnUse"
>
<stop stopColor="#282662" />
<stop offset="0.095" stopColor="#662E8D" />
<stop offset="0.788" stopColor="#9F2064" />
<stop offset="0.949" stopColor="#CD2032" />
</linearGradient>
<linearGradient
id="paint5_linear_2061_4195"
x1="3.08048"
y1="12.3044"
x2="8.19546"
y2="1.3489"
gradientUnits="userSpaceOnUse"
>
<stop offset="0.323" stopColor="#9E2064" />
<stop offset="0.63" stopColor="#C92037" />
<stop offset="0.751" stopColor="#CD2335" />
<stop offset="1" stopColor="#E97826" />
</linearGradient>
<linearGradient
id="paint6_linear_2061_4195"
x1="2.70679"
y1="12.9581"
x2="7.82195"
y2="2.00223"
gradientUnits="userSpaceOnUse"
>
<stop offset="0.323" stopColor="#9E2064" />
<stop offset="0.63" stopColor="#C92037" />
<stop offset="0.751" stopColor="#CD2335" />
<stop offset="1" stopColor="#E97826" />
</linearGradient>
<linearGradient
id="paint7_linear_2061_4195"
x1="3.41759"
y1="12.4619"
x2="8.53277"
y2="1.50629"
gradientUnits="userSpaceOnUse"
>
<stop offset="0.323" stopColor="#9E2064" />
<stop offset="0.63" stopColor="#C92037" />
<stop offset="0.751" stopColor="#CD2335" />
<stop offset="1" stopColor="#E97826" />
</linearGradient>
</defs>
</svg>
);
}

Some files were not shown because too many files have changed in this diff Show More