Compare commits
16 Commits
v0.51.0-cl
...
v0.51.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4192fd573d | ||
|
|
ca13d80205 | ||
|
|
59121bd932 | ||
|
|
aef935a817 | ||
|
|
f300518d61 | ||
|
|
18b608a1d8 | ||
|
|
738d62c9cf | ||
|
|
38e694cd36 | ||
|
|
1281330c52 | ||
|
|
7b7cca7db7 | ||
|
|
3134e8c1cf | ||
|
|
d00024b64a | ||
|
|
4360cd0397 | ||
|
|
a688b6c60e | ||
|
|
8d84ce8f06 | ||
|
|
09ea7b9eb5 |
@@ -146,7 +146,7 @@ services:
|
||||
condition: on-failure
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:0.49.1
|
||||
image: signoz/query-service:0.51.0
|
||||
command:
|
||||
[
|
||||
"-config=/root/config/prometheus.yml",
|
||||
@@ -199,7 +199,7 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:0.102.2
|
||||
image: signoz/signoz-otel-collector:0.102.3
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
@@ -237,7 +237,7 @@ services:
|
||||
- query-service
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:0.102.2
|
||||
image: signoz/signoz-schema-migrator:0.102.3
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -66,7 +66,7 @@ services:
|
||||
- --storage.path=/data
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.2}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.3}
|
||||
container_name: otel-migrator
|
||||
command:
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
@@ -81,7 +81,7 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
otel-collector:
|
||||
container_name: signoz-otel-collector
|
||||
image: signoz/signoz-otel-collector:0.102.2
|
||||
image: signoz/signoz-otel-collector:0.102.3
|
||||
command:
|
||||
[
|
||||
"--config=/etc/otel-collector-config.yaml",
|
||||
|
||||
@@ -164,7 +164,7 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.49.1}
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.51.0}
|
||||
container_name: signoz-query-service
|
||||
command:
|
||||
[
|
||||
@@ -204,7 +204,7 @@ services:
|
||||
<<: *db-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.49.1}
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.51.0}
|
||||
container_name: signoz-frontend
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
@@ -216,7 +216,7 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.2}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.3}
|
||||
container_name: otel-migrator
|
||||
command:
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
@@ -230,7 +230,7 @@ services:
|
||||
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.2}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.3}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
[
|
||||
|
||||
@@ -164,7 +164,7 @@ services:
|
||||
# Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md`
|
||||
|
||||
query-service:
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.49.1}
|
||||
image: signoz/query-service:${DOCKER_TAG:-0.51.0}
|
||||
container_name: signoz-query-service
|
||||
command:
|
||||
[
|
||||
@@ -203,7 +203,7 @@ services:
|
||||
<<: *db-depend
|
||||
|
||||
frontend:
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.49.1}
|
||||
image: signoz/frontend:${DOCKER_TAG:-0.51.0}
|
||||
container_name: signoz-frontend
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
@@ -215,7 +215,7 @@ services:
|
||||
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.2}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.3}
|
||||
container_name: otel-migrator
|
||||
command:
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
@@ -229,7 +229,7 @@ services:
|
||||
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.2}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.102.3}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
[
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/dashboards"
|
||||
@@ -29,6 +31,10 @@ func (ah *APIHandler) lockUnlockDashboard(w http.ResponseWriter, r *http.Request
|
||||
|
||||
// Get the dashboard UUID from the request
|
||||
uuid := mux.Vars(r)["uuid"]
|
||||
if strings.HasPrefix(uuid,"integration") {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorForbidden, Err: errors.New("dashboards created by integrations cannot be unlocked")}, "You are not authorized to lock/unlock this dashboard")
|
||||
return
|
||||
}
|
||||
dashboard, err := dashboards.GetDashboard(r.Context(), uuid)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, err.Error())
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net"
|
||||
@@ -27,6 +28,7 @@ import (
|
||||
"go.signoz.io/signoz/ee/query-service/integrations/gateway"
|
||||
"go.signoz.io/signoz/ee/query-service/interfaces"
|
||||
baseauth "go.signoz.io/signoz/pkg/query-service/auth"
|
||||
"go.signoz.io/signoz/pkg/query-service/model"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
|
||||
licensepkg "go.signoz.io/signoz/ee/query-service/license"
|
||||
@@ -40,6 +42,7 @@ import (
|
||||
"go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/opamp"
|
||||
opAmpModel "go.signoz.io/signoz/pkg/query-service/app/opamp/model"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/preferences"
|
||||
"go.signoz.io/signoz/pkg/query-service/cache"
|
||||
baseconst "go.signoz.io/signoz/pkg/query-service/constants"
|
||||
"go.signoz.io/signoz/pkg/query-service/healthcheck"
|
||||
@@ -109,6 +112,10 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
|
||||
baseexplorer.InitWithDSN(baseconst.RELATIONAL_DATASOURCE_PATH)
|
||||
|
||||
if err := preferences.InitDB(baseconst.RELATIONAL_DATASOURCE_PATH); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
localDB, err := dashboards.InitDB(baseconst.RELATIONAL_DATASOURCE_PATH)
|
||||
|
||||
if err != nil {
|
||||
@@ -319,7 +326,17 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler) (*http.Server, e
|
||||
|
||||
// add auth middleware
|
||||
getUserFromRequest := func(r *http.Request) (*basemodel.UserPayload, error) {
|
||||
return auth.GetUserFromRequest(r, apiHandler)
|
||||
user, err := auth.GetUserFromRequest(r, apiHandler)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if user.User.OrgId == "" {
|
||||
return nil, model.UnauthorizedError(errors.New("orgId is missing in the claims"))
|
||||
}
|
||||
|
||||
return user, nil
|
||||
}
|
||||
am := baseapp.NewAuthMiddleware(getUserFromRequest)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import axios from 'api';
|
||||
import { ApiBaseInstance as axios } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
@@ -21,6 +21,7 @@ const logEvent = async (
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -96,6 +96,10 @@ const interceptorRejected = async (
|
||||
}
|
||||
};
|
||||
|
||||
const interceptorRejectedBase = async (
|
||||
value: AxiosResponse<any>,
|
||||
): Promise<AxiosResponse<any>> => Promise.reject(value);
|
||||
|
||||
const instance = axios.create({
|
||||
baseURL: `${ENVIRONMENT.baseURL}${apiV1}`,
|
||||
});
|
||||
@@ -140,6 +144,18 @@ ApiV4Instance.interceptors.response.use(
|
||||
ApiV4Instance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// axios Base
|
||||
export const ApiBaseInstance = axios.create({
|
||||
baseURL: `${ENVIRONMENT.baseURL}${apiV1}`,
|
||||
});
|
||||
|
||||
ApiBaseInstance.interceptors.response.use(
|
||||
interceptorsResponse,
|
||||
interceptorRejectedBase,
|
||||
);
|
||||
ApiBaseInstance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// gateway Api V1
|
||||
export const GatewayApiV1Instance = axios.create({
|
||||
baseURL: `${ENVIRONMENT.baseURL}${gatewayApiV1}`,
|
||||
|
||||
@@ -34,6 +34,7 @@ import dayjs, { Dayjs } from 'dayjs';
|
||||
import { useGetAllIngestionsKeys } from 'hooks/IngestionKeys/useGetAllIngestionKeys';
|
||||
import useDebouncedFn from 'hooks/useDebouncedFunction';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { isNil } from 'lodash-es';
|
||||
import {
|
||||
ArrowUpRight,
|
||||
CalendarClock,
|
||||
@@ -605,243 +606,250 @@ function MultiIngestionSettings(): JSX.Element {
|
||||
|
||||
<div className="limits-data">
|
||||
<div className="signals">
|
||||
{SIGNALS.map((signal) => (
|
||||
<div className="signal" key={signal}>
|
||||
<div className="header">
|
||||
<div className="signal-name">{signal}</div>
|
||||
<div className="actions">
|
||||
{hasLimits(signal) ? (
|
||||
<>
|
||||
{SIGNALS.map((signal) => {
|
||||
const hasValidDayLimit = !isNil(limits[signal]?.config?.day?.size);
|
||||
const hasValidSecondLimit = !isNil(
|
||||
limits[signal]?.config?.second?.size,
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="signal" key={signal}>
|
||||
<div className="header">
|
||||
<div className="signal-name">{signal}</div>
|
||||
<div className="actions">
|
||||
{hasLimits(signal) ? (
|
||||
<>
|
||||
<Button
|
||||
className="periscope-btn ghost"
|
||||
icon={<PenLine size={14} />}
|
||||
disabled={!!(activeAPIKey?.id === APIKey.id && activeSignal)}
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
e.preventDefault();
|
||||
enableEditLimitMode(APIKey, limits[signal]);
|
||||
}}
|
||||
/>
|
||||
|
||||
<Button
|
||||
className="periscope-btn ghost"
|
||||
icon={<Trash2 color={Color.BG_CHERRY_500} size={14} />}
|
||||
disabled={!!(activeAPIKey?.id === APIKey.id && activeSignal)}
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
e.preventDefault();
|
||||
showDeleteLimitModal(APIKey, limits[signal]);
|
||||
}}
|
||||
/>
|
||||
</>
|
||||
) : (
|
||||
<Button
|
||||
className="periscope-btn ghost"
|
||||
icon={<PenLine size={14} />}
|
||||
className="periscope-btn"
|
||||
size="small"
|
||||
shape="round"
|
||||
icon={<PlusIcon size={14} />}
|
||||
disabled={!!(activeAPIKey?.id === APIKey.id && activeSignal)}
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
e.preventDefault();
|
||||
enableEditLimitMode(APIKey, limits[signal]);
|
||||
}}
|
||||
/>
|
||||
|
||||
<Button
|
||||
className="periscope-btn ghost"
|
||||
icon={<Trash2 color={Color.BG_CHERRY_500} size={14} />}
|
||||
disabled={!!(activeAPIKey?.id === APIKey.id && activeSignal)}
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
e.preventDefault();
|
||||
showDeleteLimitModal(APIKey, limits[signal]);
|
||||
enableEditLimitMode(APIKey, {
|
||||
id: signal,
|
||||
signal,
|
||||
config: {},
|
||||
});
|
||||
}}
|
||||
/>
|
||||
</>
|
||||
) : (
|
||||
<Button
|
||||
className="periscope-btn"
|
||||
size="small"
|
||||
shape="round"
|
||||
icon={<PlusIcon size={14} />}
|
||||
disabled={!!(activeAPIKey?.id === APIKey.id && activeSignal)}
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
e.preventDefault();
|
||||
>
|
||||
Limits
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
enableEditLimitMode(APIKey, {
|
||||
id: signal,
|
||||
signal,
|
||||
config: {},
|
||||
});
|
||||
<div className="signal-limit-values">
|
||||
{activeAPIKey?.id === APIKey.id &&
|
||||
activeSignal?.signal === signal &&
|
||||
isEditAddLimitOpen ? (
|
||||
<Form
|
||||
name="edit-ingestion-key-limit-form"
|
||||
key="addEditLimitForm"
|
||||
form={addEditLimitForm}
|
||||
autoComplete="off"
|
||||
initialValues={{
|
||||
dailyLimit: bytesToGb(limits[signal]?.config?.day?.size),
|
||||
secondsLimit: bytesToGb(limits[signal]?.config?.second?.size),
|
||||
}}
|
||||
className="edit-ingestion-key-limit-form"
|
||||
>
|
||||
Limits
|
||||
</Button>
|
||||
<div className="signal-limit-edit-mode">
|
||||
<div className="daily-limit">
|
||||
<div className="heading">
|
||||
<div className="title"> Daily limit </div>
|
||||
<div className="subtitle">
|
||||
Add a limit for data ingested daily{' '}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="size">
|
||||
<Form.Item name="dailyLimit">
|
||||
<InputNumber
|
||||
addonAfter={
|
||||
<Select defaultValue="GiB" disabled>
|
||||
<Option value="TiB"> TiB</Option>
|
||||
<Option value="GiB"> GiB</Option>
|
||||
<Option value="MiB"> MiB </Option>
|
||||
<Option value="KiB"> KiB </Option>
|
||||
</Select>
|
||||
}
|
||||
/>
|
||||
</Form.Item>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="second-limit">
|
||||
<div className="heading">
|
||||
<div className="title"> Per Second limit </div>
|
||||
<div className="subtitle">
|
||||
{' '}
|
||||
Add a limit for data ingested every second{' '}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="size">
|
||||
<Form.Item name="secondsLimit">
|
||||
<InputNumber
|
||||
addonAfter={
|
||||
<Select defaultValue="GiB" disabled>
|
||||
<Option value="TiB"> TiB</Option>
|
||||
<Option value="GiB"> GiB</Option>
|
||||
<Option value="MiB"> MiB </Option>
|
||||
<Option value="KiB"> KiB </Option>
|
||||
</Select>
|
||||
}
|
||||
/>
|
||||
</Form.Item>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{activeAPIKey?.id === APIKey.id &&
|
||||
activeSignal.signal === signal &&
|
||||
!isLoadingLimitForKey &&
|
||||
hasCreateLimitForIngestionKeyError &&
|
||||
createLimitForIngestionKeyError &&
|
||||
createLimitForIngestionKeyError?.error && (
|
||||
<div className="error">
|
||||
{createLimitForIngestionKeyError?.error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{activeAPIKey?.id === APIKey.id &&
|
||||
activeSignal.signal === signal &&
|
||||
!isLoadingLimitForKey &&
|
||||
hasUpdateLimitForIngestionKeyError &&
|
||||
updateLimitForIngestionKeyError && (
|
||||
<div className="error">
|
||||
{updateLimitForIngestionKeyError?.error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{activeAPIKey?.id === APIKey.id &&
|
||||
activeSignal.signal === signal &&
|
||||
isEditAddLimitOpen && (
|
||||
<div className="signal-limit-save-discard">
|
||||
<Button
|
||||
type="primary"
|
||||
className="periscope-btn primary"
|
||||
size="small"
|
||||
disabled={
|
||||
isLoadingLimitForKey || isLoadingUpdatedLimitForKey
|
||||
}
|
||||
loading={
|
||||
isLoadingLimitForKey || isLoadingUpdatedLimitForKey
|
||||
}
|
||||
onClick={(): void => {
|
||||
if (!hasLimits(signal)) {
|
||||
handleAddLimit(APIKey, signal);
|
||||
} else {
|
||||
handleUpdateLimit(APIKey, limits[signal]);
|
||||
}
|
||||
}}
|
||||
>
|
||||
Save
|
||||
</Button>
|
||||
<Button
|
||||
type="default"
|
||||
className="periscope-btn"
|
||||
size="small"
|
||||
disabled={
|
||||
isLoadingLimitForKey || isLoadingUpdatedLimitForKey
|
||||
}
|
||||
onClick={handleDiscardSaveLimit}
|
||||
>
|
||||
Discard
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</Form>
|
||||
) : (
|
||||
<div className="signal-limit-view-mode">
|
||||
<div className="signal-limit-value">
|
||||
<div className="limit-type">
|
||||
Daily <Minus size={16} />{' '}
|
||||
</div>
|
||||
|
||||
<div className="limit-value">
|
||||
{hasValidDayLimit ? (
|
||||
<>
|
||||
{getYAxisFormattedValue(
|
||||
(limits[signal]?.metric?.day?.size || 0).toString(),
|
||||
'bytes',
|
||||
)}{' '}
|
||||
/{' '}
|
||||
{getYAxisFormattedValue(
|
||||
(limits[signal]?.config?.day?.size || 0).toString(),
|
||||
'bytes',
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Infinity size={16} /> NO LIMIT
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="signal-limit-value">
|
||||
<div className="limit-type">
|
||||
Seconds <Minus size={16} />
|
||||
</div>
|
||||
|
||||
<div className="limit-value">
|
||||
{hasValidSecondLimit ? (
|
||||
<>
|
||||
{getYAxisFormattedValue(
|
||||
(limits[signal]?.metric?.second?.size || 0).toString(),
|
||||
'bytes',
|
||||
)}{' '}
|
||||
/{' '}
|
||||
{getYAxisFormattedValue(
|
||||
(limits[signal]?.config?.second?.size || 0).toString(),
|
||||
'bytes',
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Infinity size={16} /> NO LIMIT
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="signal-limit-values">
|
||||
{activeAPIKey?.id === APIKey.id &&
|
||||
activeSignal?.signal === signal &&
|
||||
isEditAddLimitOpen ? (
|
||||
<Form
|
||||
name="edit-ingestion-key-limit-form"
|
||||
key="addEditLimitForm"
|
||||
form={addEditLimitForm}
|
||||
autoComplete="off"
|
||||
initialValues={{
|
||||
dailyLimit: bytesToGb(limits[signal]?.config?.day?.size),
|
||||
secondsLimit: bytesToGb(limits[signal]?.config?.second?.size),
|
||||
}}
|
||||
className="edit-ingestion-key-limit-form"
|
||||
>
|
||||
<div className="signal-limit-edit-mode">
|
||||
<div className="daily-limit">
|
||||
<div className="heading">
|
||||
<div className="title"> Daily limit </div>
|
||||
<div className="subtitle">
|
||||
Add a limit for data ingested daily{' '}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="size">
|
||||
<Form.Item name="dailyLimit">
|
||||
<InputNumber
|
||||
addonAfter={
|
||||
<Select defaultValue="GiB" disabled>
|
||||
<Option value="TiB"> TiB</Option>
|
||||
<Option value="GiB"> GiB</Option>
|
||||
<Option value="MiB"> MiB </Option>
|
||||
<Option value="KiB"> KiB </Option>
|
||||
</Select>
|
||||
}
|
||||
/>
|
||||
</Form.Item>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="second-limit">
|
||||
<div className="heading">
|
||||
<div className="title"> Per Second limit </div>
|
||||
<div className="subtitle">
|
||||
{' '}
|
||||
Add a limit for data ingested every second{' '}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="size">
|
||||
<Form.Item name="secondsLimit">
|
||||
<InputNumber
|
||||
addonAfter={
|
||||
<Select defaultValue="GiB" disabled>
|
||||
<Option value="TiB"> TiB</Option>
|
||||
<Option value="GiB"> GiB</Option>
|
||||
<Option value="MiB"> MiB </Option>
|
||||
<Option value="KiB"> KiB </Option>
|
||||
</Select>
|
||||
}
|
||||
/>
|
||||
</Form.Item>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{activeAPIKey?.id === APIKey.id &&
|
||||
activeSignal.signal === signal &&
|
||||
!isLoadingLimitForKey &&
|
||||
hasCreateLimitForIngestionKeyError &&
|
||||
createLimitForIngestionKeyError &&
|
||||
createLimitForIngestionKeyError?.error && (
|
||||
<div className="error">
|
||||
{createLimitForIngestionKeyError?.error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{activeAPIKey?.id === APIKey.id &&
|
||||
activeSignal.signal === signal &&
|
||||
!isLoadingLimitForKey &&
|
||||
hasUpdateLimitForIngestionKeyError &&
|
||||
updateLimitForIngestionKeyError && (
|
||||
<div className="error">
|
||||
{updateLimitForIngestionKeyError?.error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{activeAPIKey?.id === APIKey.id &&
|
||||
activeSignal.signal === signal &&
|
||||
isEditAddLimitOpen && (
|
||||
<div className="signal-limit-save-discard">
|
||||
<Button
|
||||
type="primary"
|
||||
className="periscope-btn primary"
|
||||
size="small"
|
||||
disabled={
|
||||
isLoadingLimitForKey || isLoadingUpdatedLimitForKey
|
||||
}
|
||||
loading={
|
||||
isLoadingLimitForKey || isLoadingUpdatedLimitForKey
|
||||
}
|
||||
onClick={(): void => {
|
||||
if (!hasLimits(signal)) {
|
||||
handleAddLimit(APIKey, signal);
|
||||
} else {
|
||||
handleUpdateLimit(APIKey, limits[signal]);
|
||||
}
|
||||
}}
|
||||
>
|
||||
Save
|
||||
</Button>
|
||||
<Button
|
||||
type="default"
|
||||
className="periscope-btn"
|
||||
size="small"
|
||||
disabled={
|
||||
isLoadingLimitForKey || isLoadingUpdatedLimitForKey
|
||||
}
|
||||
onClick={handleDiscardSaveLimit}
|
||||
>
|
||||
Discard
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</Form>
|
||||
) : (
|
||||
<div className="signal-limit-view-mode">
|
||||
<div className="signal-limit-value">
|
||||
<div className="limit-type">
|
||||
Daily <Minus size={16} />{' '}
|
||||
</div>
|
||||
|
||||
<div className="limit-value">
|
||||
{limits[signal]?.config?.day?.size ? (
|
||||
<>
|
||||
{getYAxisFormattedValue(
|
||||
(limits[signal]?.metric?.day?.size || 0).toString(),
|
||||
'bytes',
|
||||
)}{' '}
|
||||
/{' '}
|
||||
{getYAxisFormattedValue(
|
||||
(limits[signal]?.config?.day?.size || 0).toString(),
|
||||
'bytes',
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Infinity size={16} /> NO LIMIT
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="signal-limit-value">
|
||||
<div className="limit-type">
|
||||
Seconds <Minus size={16} />
|
||||
</div>
|
||||
|
||||
<div className="limit-value">
|
||||
{limits[signal]?.config?.second?.size ? (
|
||||
<>
|
||||
{getYAxisFormattedValue(
|
||||
(limits[signal]?.metric?.second?.size || 0).toString(),
|
||||
'bytes',
|
||||
)}{' '}
|
||||
/{' '}
|
||||
{getYAxisFormattedValue(
|
||||
(limits[signal]?.config?.second?.size || 0).toString(),
|
||||
'bytes',
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Infinity size={16} /> NO LIMIT
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,100 @@
|
||||
import { getNonIntegrationDashboardById } from 'mocks-server/__mockdata__/dashboards';
|
||||
import { server } from 'mocks-server/server';
|
||||
import { rest } from 'msw';
|
||||
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
import { MemoryRouter, useLocation } from 'react-router-dom';
|
||||
import { fireEvent, render, screen, waitFor } from 'tests/test-utils';
|
||||
|
||||
import DashboardDescription from '..';
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: jest.fn(),
|
||||
useRouteMatch: jest.fn().mockReturnValue({
|
||||
params: {
|
||||
dashboardId: 4,
|
||||
},
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock(
|
||||
'container/TopNav/DateTimeSelectionV2/index.tsx',
|
||||
() =>
|
||||
function MockDateTimeSelection(): JSX.Element {
|
||||
return <div>MockDateTimeSelection</div>;
|
||||
},
|
||||
);
|
||||
|
||||
describe('Dashboard landing page actions header tests', () => {
|
||||
it('unlock dashboard should be disabled for integrations created dashboards', async () => {
|
||||
const mockLocation = {
|
||||
pathname: `${process.env.FRONTEND_API_ENDPOINT}/dashboard/4`,
|
||||
search: '',
|
||||
};
|
||||
(useLocation as jest.Mock).mockReturnValue(mockLocation);
|
||||
const { getByTestId } = render(
|
||||
<MemoryRouter initialEntries={['/dashboard/4']}>
|
||||
<DashboardProvider>
|
||||
<DashboardDescription
|
||||
handle={{
|
||||
active: false,
|
||||
enter: (): Promise<void> => Promise.resolve(),
|
||||
exit: (): Promise<void> => Promise.resolve(),
|
||||
node: { current: null },
|
||||
}}
|
||||
/>
|
||||
</DashboardProvider>
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
await waitFor(() =>
|
||||
expect(getByTestId('dashboard-title')).toHaveTextContent('thor'),
|
||||
);
|
||||
|
||||
const dashboardSettingsTrigger = getByTestId('options');
|
||||
|
||||
await fireEvent.click(dashboardSettingsTrigger);
|
||||
|
||||
const lockUnlockButton = screen.getByTestId('lock-unlock-dashboard');
|
||||
|
||||
await waitFor(() => expect(lockUnlockButton).toBeDisabled());
|
||||
});
|
||||
it('unlock dashboard should not be disabled for non integration created dashboards', async () => {
|
||||
const mockLocation = {
|
||||
pathname: `${process.env.FRONTEND_API_ENDPOINT}/dashboard/4`,
|
||||
search: '',
|
||||
};
|
||||
(useLocation as jest.Mock).mockReturnValue(mockLocation);
|
||||
server.use(
|
||||
rest.get('http://localhost/api/v1/dashboards/4', (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(getNonIntegrationDashboardById)),
|
||||
),
|
||||
);
|
||||
const { getByTestId } = render(
|
||||
<MemoryRouter initialEntries={['/dashboard/4']}>
|
||||
<DashboardProvider>
|
||||
<DashboardDescription
|
||||
handle={{
|
||||
active: false,
|
||||
enter: (): Promise<void> => Promise.resolve(),
|
||||
exit: (): Promise<void> => Promise.resolve(),
|
||||
node: { current: null },
|
||||
}}
|
||||
/>
|
||||
</DashboardProvider>
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
await waitFor(() =>
|
||||
expect(getByTestId('dashboard-title')).toHaveTextContent('thor'),
|
||||
);
|
||||
|
||||
const dashboardSettingsTrigger = getByTestId('options');
|
||||
|
||||
await fireEvent.click(dashboardSettingsTrigger);
|
||||
|
||||
const lockUnlockButton = screen.getByTestId('lock-unlock-dashboard');
|
||||
|
||||
await waitFor(() => expect(lockUnlockButton).not.toBeDisabled());
|
||||
});
|
||||
});
|
||||
@@ -1,7 +1,16 @@
|
||||
import './Description.styles.scss';
|
||||
|
||||
import { PlusOutlined } from '@ant-design/icons';
|
||||
import { Button, Card, Input, Modal, Popover, Tag, Typography } from 'antd';
|
||||
import {
|
||||
Button,
|
||||
Card,
|
||||
Input,
|
||||
Modal,
|
||||
Popover,
|
||||
Tag,
|
||||
Tooltip,
|
||||
Typography,
|
||||
} from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import FacingIssueBtn from 'components/facingIssueBtn/FacingIssueBtn';
|
||||
import { dashboardHelpMessage } from 'components/facingIssueBtn/util';
|
||||
@@ -308,7 +317,9 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
|
||||
alt="dashboard-img"
|
||||
style={{ width: '16px', height: '16px' }}
|
||||
/>
|
||||
<Typography.Text className="dashboard-title">{title}</Typography.Text>
|
||||
<Typography.Text className="dashboard-title" data-testid="dashboard-title">
|
||||
{title}
|
||||
</Typography.Text>
|
||||
{isDashboardLocked && <LockKeyhole size={14} />}
|
||||
</div>
|
||||
<div className="right-section">
|
||||
@@ -334,13 +345,22 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
|
||||
<div className="menu-content">
|
||||
<section className="section-1">
|
||||
{(isAuthor || role === USER_ROLES.ADMIN) && (
|
||||
<Button
|
||||
type="text"
|
||||
icon={<LockKeyhole size={14} />}
|
||||
onClick={handleLockDashboardToggle}
|
||||
<Tooltip
|
||||
title={
|
||||
selectedDashboard?.created_by === 'integration' &&
|
||||
'Dashboards created by integrations cannot be unlocked'
|
||||
}
|
||||
>
|
||||
{isDashboardLocked ? 'Unlock Dashboard' : 'Lock Dashboard'}
|
||||
</Button>
|
||||
<Button
|
||||
type="text"
|
||||
icon={<LockKeyhole size={14} />}
|
||||
disabled={selectedDashboard?.created_by === 'integration'}
|
||||
onClick={handleLockDashboardToggle}
|
||||
data-testid="lock-unlock-dashboard"
|
||||
>
|
||||
{isDashboardLocked ? 'Unlock Dashboard' : 'Lock Dashboard'}
|
||||
</Button>
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
{!isDashboardLocked && editDashboard && (
|
||||
|
||||
@@ -31,7 +31,14 @@ function TopNav(): JSX.Element | null {
|
||||
[location.pathname],
|
||||
);
|
||||
|
||||
if (isSignUpPage || isDisabled || isRouteToSkip) {
|
||||
const isNewAlertsLandingPage = useMemo(
|
||||
() =>
|
||||
matchPath(location.pathname, { path: ROUTES.ALERTS_NEW, exact: true }) &&
|
||||
!location.search,
|
||||
[location.pathname, location.search],
|
||||
);
|
||||
|
||||
if (isSignUpPage || isDisabled || isRouteToSkip || isNewAlertsLandingPage) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@@ -32,16 +32,6 @@ const useAnalytics = (): any => {
|
||||
}
|
||||
};
|
||||
|
||||
// useEffect(() => {
|
||||
// // Perform any setup or cleanup related to the analytics library
|
||||
// // For example, initialize analytics library here
|
||||
|
||||
// // Clean-up function (optional)
|
||||
// return () => {
|
||||
// // Perform cleanup if needed
|
||||
// };
|
||||
// }, []); // The empty dependency array ensures that this effect runs only once when the component mounts
|
||||
|
||||
return { trackPageView, trackEvent };
|
||||
};
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
export const dashboardSuccessResponse = {
|
||||
status: 'success',
|
||||
data: [
|
||||
@@ -48,3 +49,53 @@ export const dashboardEmptyState = {
|
||||
status: 'sucsess',
|
||||
data: [],
|
||||
};
|
||||
|
||||
export const getDashboardById = {
|
||||
status: 'success',
|
||||
data: {
|
||||
id: 1,
|
||||
uuid: '1',
|
||||
created_at: '2022-11-16T13:29:47.064874419Z',
|
||||
created_by: 'integration',
|
||||
updated_at: '2024-05-21T06:41:30.546630961Z',
|
||||
updated_by: 'thor@avengers.io',
|
||||
isLocked: true,
|
||||
data: {
|
||||
collapsableRowsMigrated: true,
|
||||
description: '',
|
||||
name: '',
|
||||
panelMap: {},
|
||||
tags: ['linux'],
|
||||
title: 'thor',
|
||||
uploadedGrafana: false,
|
||||
uuid: '',
|
||||
version: '',
|
||||
variables: {},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const getNonIntegrationDashboardById = {
|
||||
status: 'success',
|
||||
data: {
|
||||
id: 1,
|
||||
uuid: '1',
|
||||
created_at: '2022-11-16T13:29:47.064874419Z',
|
||||
created_by: 'thor',
|
||||
updated_at: '2024-05-21T06:41:30.546630961Z',
|
||||
updated_by: 'thor@avengers.io',
|
||||
isLocked: true,
|
||||
data: {
|
||||
collapsableRowsMigrated: true,
|
||||
description: '',
|
||||
name: '',
|
||||
panelMap: {},
|
||||
tags: ['linux'],
|
||||
title: 'thor',
|
||||
uploadedGrafana: false,
|
||||
uuid: '',
|
||||
version: '',
|
||||
variables: {},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
import { rest } from 'msw';
|
||||
|
||||
import { billingSuccessResponse } from './__mockdata__/billing';
|
||||
import { dashboardSuccessResponse } from './__mockdata__/dashboards';
|
||||
import {
|
||||
dashboardSuccessResponse,
|
||||
getDashboardById,
|
||||
} from './__mockdata__/dashboards';
|
||||
import { explorerView } from './__mockdata__/explorer_views';
|
||||
import { inviteUser } from './__mockdata__/invite_user';
|
||||
import { licensesSuccessResponse } from './__mockdata__/licenses';
|
||||
@@ -133,7 +136,6 @@ export const handlers = [
|
||||
res(ctx.status(200), ctx.json(licensesSuccessResponse)),
|
||||
),
|
||||
|
||||
// ?licenseKey=58707e3d-3bdb-44e7-8c89-a9be237939f4
|
||||
rest.get('http://localhost/api/v1/billing', (req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(billingSuccessResponse)),
|
||||
),
|
||||
@@ -142,6 +144,10 @@ export const handlers = [
|
||||
res(ctx.status(200), ctx.json(dashboardSuccessResponse)),
|
||||
),
|
||||
|
||||
rest.get('http://localhost/api/v1/dashboards/4', (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(getDashboardById)),
|
||||
),
|
||||
|
||||
rest.get('http://localhost/api/v1/invite', (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(inviteUser)),
|
||||
),
|
||||
|
||||
@@ -42,6 +42,7 @@ const mockStored = (role?: string): any =>
|
||||
accessJwt: '',
|
||||
refreshJwt: '',
|
||||
},
|
||||
isLoggedIn: true,
|
||||
org: [
|
||||
{
|
||||
createdAt: 0,
|
||||
|
||||
2
go.mod
2
go.mod
@@ -6,7 +6,7 @@ require (
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.20.0
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.2
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
|
||||
github.com/SigNoz/signoz-otel-collector v0.102.2
|
||||
github.com/SigNoz/signoz-otel-collector v0.102.3
|
||||
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974
|
||||
github.com/SigNoz/zap_otlp/zap_otlp_sync v0.0.0-20230822164844-1b861a431974
|
||||
github.com/antonmedv/expr v1.15.3
|
||||
|
||||
4
go.sum
4
go.sum
@@ -64,8 +64,8 @@ github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkb
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc=
|
||||
github.com/SigNoz/prometheus v1.11.1 h1:roM8ugYf4UxaeKKujEeBvoX7ybq3IrS+TB26KiRtIJg=
|
||||
github.com/SigNoz/prometheus v1.11.1/go.mod h1:uv4mQwZQtx7y4GQ6EdHOi8Wsk07uHNn2XHd1zM85m6I=
|
||||
github.com/SigNoz/signoz-otel-collector v0.102.2 h1:SmjsBZjMjTVVpuOlfJXlsDJQbdefQP/9Wz3CyzSuZuU=
|
||||
github.com/SigNoz/signoz-otel-collector v0.102.2/go.mod h1:ISAXYhZenojCWg6CdDJtPMpfS6Zwc08+uoxH25tc6Y0=
|
||||
github.com/SigNoz/signoz-otel-collector v0.102.3 h1:q6iS5kqqwopwC2pS2UvYL3IiJMP75UdyK6d+rculXn4=
|
||||
github.com/SigNoz/signoz-otel-collector v0.102.3/go.mod h1:61WqwhnrtFjwj1FyfDYMXjxFx8gWgKok1Xy1C6LbjWo=
|
||||
github.com/SigNoz/zap_otlp v0.1.0 h1:T7rRcFN87GavY8lDGZj0Z3Xv6OhJA6Pj3I9dNPmqvRc=
|
||||
github.com/SigNoz/zap_otlp v0.1.0/go.mod h1:lcHvbDbRgvDnPxo9lDlaL1JK2PyOyouP/C3ynnYIvyo=
|
||||
github.com/SigNoz/zap_otlp/zap_otlp_encoder v0.0.0-20230822164844-1b861a431974 h1:PKVgdf83Yw+lZJbFtNGBgqXiXNf3+kOXW2qZ7Ms7OaY=
|
||||
|
||||
@@ -706,21 +706,25 @@ func (r *ClickHouseReader) GetServicesList(ctx context.Context) (*[]string, erro
|
||||
return &services, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetTopLevelOperations(ctx context.Context, skipConfig *model.SkipConfig, start, end time.Time) (*map[string][]string, *map[string][]string, *model.ApiError) {
|
||||
func (r *ClickHouseReader) GetTopLevelOperations(ctx context.Context, skipConfig *model.SkipConfig, start, end time.Time, services []string) (*map[string][]string, *model.ApiError) {
|
||||
|
||||
start = start.In(time.UTC)
|
||||
|
||||
// The `top_level_operations` that have `time` >= start
|
||||
operations := map[string][]string{}
|
||||
// All top level operations for a service
|
||||
allOperations := map[string][]string{}
|
||||
query := fmt.Sprintf(`SELECT DISTINCT name, serviceName, time FROM %s.%s`, r.TraceDB, r.topLevelOperationsTable)
|
||||
// We can't use the `end` because the `top_level_operations` table has the most recent instances of the operations
|
||||
// We can only use the `start` time to filter the operations
|
||||
query := fmt.Sprintf(`SELECT name, serviceName, max(time) as ts FROM %s.%s WHERE time >= @start`, r.TraceDB, r.topLevelOperationsTable)
|
||||
if len(services) > 0 {
|
||||
query += ` AND serviceName IN @services`
|
||||
}
|
||||
query += ` GROUP BY name, serviceName ORDER BY ts DESC LIMIT 5000`
|
||||
|
||||
rows, err := r.db.Query(ctx, query)
|
||||
rows, err := r.db.Query(ctx, query, clickhouse.Named("start", start), clickhouse.Named("services", services))
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query")}
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in processing sql query")}
|
||||
}
|
||||
|
||||
defer rows.Close()
|
||||
@@ -728,25 +732,17 @@ func (r *ClickHouseReader) GetTopLevelOperations(ctx context.Context, skipConfig
|
||||
var name, serviceName string
|
||||
var t time.Time
|
||||
if err := rows.Scan(&name, &serviceName, &t); err != nil {
|
||||
return nil, nil, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error in reading data")}
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: fmt.Errorf("error in reading data")}
|
||||
}
|
||||
if _, ok := operations[serviceName]; !ok {
|
||||
operations[serviceName] = []string{}
|
||||
}
|
||||
if _, ok := allOperations[serviceName]; !ok {
|
||||
allOperations[serviceName] = []string{}
|
||||
operations[serviceName] = []string{"overflow_operation"}
|
||||
}
|
||||
if skipConfig.ShouldSkip(serviceName, name) {
|
||||
continue
|
||||
}
|
||||
allOperations[serviceName] = append(allOperations[serviceName], name)
|
||||
// We can't use the `end` because the `top_level_operations` table has the most recent instances of the operations
|
||||
// We can only use the `start` time to filter the operations
|
||||
if t.After(start) {
|
||||
operations[serviceName] = append(operations[serviceName], name)
|
||||
}
|
||||
operations[serviceName] = append(operations[serviceName], name)
|
||||
}
|
||||
return &operations, &allOperations, nil
|
||||
return &operations, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.GetServicesParams, skipConfig *model.SkipConfig) (*[]model.ServiceItem, *model.ApiError) {
|
||||
@@ -755,7 +751,7 @@ func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.G
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: ErrNoIndexTable}
|
||||
}
|
||||
|
||||
topLevelOps, allTopLevelOps, apiErr := r.GetTopLevelOperations(ctx, skipConfig, *queryParams.Start, *queryParams.End)
|
||||
topLevelOps, apiErr := r.GetTopLevelOperations(ctx, skipConfig, *queryParams.Start, *queryParams.End, nil)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
@@ -779,7 +775,7 @@ func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.G
|
||||
// the top level operations are high, we want to warn to let user know the issue
|
||||
// with the instrumentation
|
||||
serviceItem.DataWarning = model.DataWarning{
|
||||
TopLevelOps: (*allTopLevelOps)[svc],
|
||||
TopLevelOps: (*topLevelOps)[svc],
|
||||
}
|
||||
|
||||
// default max_query_size = 262144
|
||||
@@ -868,7 +864,7 @@ func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.G
|
||||
|
||||
func (r *ClickHouseReader) GetServiceOverview(ctx context.Context, queryParams *model.GetServiceOverviewParams, skipConfig *model.SkipConfig) (*[]model.ServiceOverviewItem, *model.ApiError) {
|
||||
|
||||
topLevelOps, _, apiErr := r.GetTopLevelOperations(ctx, skipConfig, *queryParams.Start, *queryParams.End)
|
||||
topLevelOps, apiErr := r.GetTopLevelOperations(ctx, skipConfig, *queryParams.Start, *queryParams.End, nil)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
@@ -5005,3 +5001,27 @@ func (r *ClickHouseReader) LiveTailLogsV3(ctx context.Context, query string, tim
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetMinAndMaxTimestampForTraceID(ctx context.Context, traceID []string) (int64, int64, error) {
|
||||
var minTime, maxTime time.Time
|
||||
|
||||
query := fmt.Sprintf("SELECT min(timestamp), max(timestamp) FROM %s.%s WHERE traceID IN ('%s')",
|
||||
r.TraceDB, r.SpansTable, strings.Join(traceID, "','"))
|
||||
|
||||
zap.L().Debug("GetMinAndMaxTimestampForTraceID", zap.String("query", query))
|
||||
|
||||
err := r.db.QueryRow(ctx, query).Scan(&minTime, &maxTime)
|
||||
if err != nil {
|
||||
zap.L().Error("Error while executing query", zap.Error(err))
|
||||
return 0, 0, err
|
||||
}
|
||||
|
||||
if minTime.IsZero() || maxTime.IsZero() {
|
||||
zap.L().Debug("minTime or maxTime is zero")
|
||||
return 0, 0, nil
|
||||
}
|
||||
|
||||
zap.L().Debug("GetMinAndMaxTimestampForTraceID", zap.Any("minTime", minTime), zap.Any("maxTime", maxTime))
|
||||
|
||||
return minTime.UnixNano(), maxTime.UnixNano(), nil
|
||||
}
|
||||
|
||||
@@ -29,12 +29,14 @@ import (
|
||||
logsv3 "go.signoz.io/signoz/pkg/query-service/app/logs/v3"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/metrics"
|
||||
metricsv3 "go.signoz.io/signoz/pkg/query-service/app/metrics/v3"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/preferences"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/querier"
|
||||
querierV2 "go.signoz.io/signoz/pkg/query-service/app/querier/v2"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/queryBuilder"
|
||||
tracesV3 "go.signoz.io/signoz/pkg/query-service/app/traces/v3"
|
||||
"go.signoz.io/signoz/pkg/query-service/auth"
|
||||
"go.signoz.io/signoz/pkg/query-service/cache"
|
||||
"go.signoz.io/signoz/pkg/query-service/common"
|
||||
"go.signoz.io/signoz/pkg/query-service/constants"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
"go.signoz.io/signoz/pkg/query-service/postprocess"
|
||||
@@ -398,6 +400,22 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *AuthMiddleware) {
|
||||
|
||||
router.HandleFunc("/api/v1/disks", am.ViewAccess(aH.getDisks)).Methods(http.MethodGet)
|
||||
|
||||
// === Preference APIs ===
|
||||
|
||||
// user actions
|
||||
router.HandleFunc("/api/v1/user/preferences", am.ViewAccess(aH.getAllUserPreferences)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/user/preferences/{preferenceId}", am.ViewAccess(aH.getUserPreference)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/user/preferences/{preferenceId}", am.ViewAccess(aH.updateUserPreference)).Methods(http.MethodPut)
|
||||
|
||||
// org actions
|
||||
router.HandleFunc("/api/v1/org/preferences", am.AdminAccess(aH.getAllOrgPreferences)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/org/preferences/{preferenceId}", am.AdminAccess(aH.getOrgPreference)).Methods(http.MethodGet)
|
||||
|
||||
router.HandleFunc("/api/v1/org/preferences/{preferenceId}", am.AdminAccess(aH.updateOrgPreference)).Methods(http.MethodPut)
|
||||
|
||||
// === Authentication APIs ===
|
||||
router.HandleFunc("/api/v1/invite", am.AdminAccess(aH.inviteUser)).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v1/invite/{token}", am.OpenAccess(aH.getInvite)).Methods(http.MethodGet)
|
||||
@@ -1329,8 +1347,44 @@ func (aH *APIHandler) getServiceOverview(w http.ResponseWriter, r *http.Request)
|
||||
func (aH *APIHandler) getServicesTopLevelOps(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
var start, end time.Time
|
||||
var services []string
|
||||
|
||||
result, _, apiErr := aH.reader.GetTopLevelOperations(r.Context(), aH.skipConfig, start, end)
|
||||
type topLevelOpsParams struct {
|
||||
Service string `json:"service"`
|
||||
Start string `json:"start"`
|
||||
End string `json:"end"`
|
||||
}
|
||||
|
||||
var params topLevelOpsParams
|
||||
err := json.NewDecoder(r.Body).Decode(¶ms)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in getting req body for get top operations API", zap.Error(err))
|
||||
}
|
||||
|
||||
if params.Service != "" {
|
||||
services = []string{params.Service}
|
||||
}
|
||||
|
||||
startEpoch := params.Start
|
||||
if startEpoch != "" {
|
||||
startEpochInt, err := strconv.ParseInt(startEpoch, 10, 64)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading start time")
|
||||
return
|
||||
}
|
||||
start = time.Unix(0, startEpochInt)
|
||||
}
|
||||
endEpoch := params.End
|
||||
if endEpoch != "" {
|
||||
endEpochInt, err := strconv.ParseInt(endEpoch, 10, 64)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, "Error reading end time")
|
||||
return
|
||||
}
|
||||
end = time.Unix(0, endEpochInt)
|
||||
}
|
||||
|
||||
result, apiErr := aH.reader.GetTopLevelOperations(r.Context(), aH.skipConfig, start, end, services)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
@@ -2192,6 +2246,115 @@ func (aH *APIHandler) WriteJSON(w http.ResponseWriter, r *http.Request, response
|
||||
w.Write(resp)
|
||||
}
|
||||
|
||||
// Preferences
|
||||
|
||||
func (ah *APIHandler) getUserPreference(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
preferenceId := mux.Vars(r)["preferenceId"]
|
||||
user := common.GetUserFromContext(r.Context())
|
||||
|
||||
preference, apiErr := preferences.GetUserPreference(
|
||||
r.Context(), preferenceId, user.User.OrgId, user.User.Id,
|
||||
)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, preference)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) updateUserPreference(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
preferenceId := mux.Vars(r)["preferenceId"]
|
||||
user := common.GetUserFromContext(r.Context())
|
||||
req := preferences.UpdatePreference{}
|
||||
|
||||
err := json.NewDecoder(r.Body).Decode(&req)
|
||||
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
preference, apiErr := preferences.UpdateUserPreference(r.Context(), preferenceId, req.PreferenceValue, user.User.Id)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, preference)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getAllUserPreferences(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
user := common.GetUserFromContext(r.Context())
|
||||
preference, apiErr := preferences.GetAllUserPreferences(
|
||||
r.Context(), user.User.OrgId, user.User.Id,
|
||||
)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, preference)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getOrgPreference(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
preferenceId := mux.Vars(r)["preferenceId"]
|
||||
user := common.GetUserFromContext(r.Context())
|
||||
preference, apiErr := preferences.GetOrgPreference(
|
||||
r.Context(), preferenceId, user.User.OrgId,
|
||||
)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, preference)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) updateOrgPreference(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
preferenceId := mux.Vars(r)["preferenceId"]
|
||||
req := preferences.UpdatePreference{}
|
||||
user := common.GetUserFromContext(r.Context())
|
||||
|
||||
err := json.NewDecoder(r.Body).Decode(&req)
|
||||
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
preference, apiErr := preferences.UpdateOrgPreference(r.Context(), preferenceId, req.PreferenceValue, user.User.OrgId)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, preference)
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getAllOrgPreferences(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
user := common.GetUserFromContext(r.Context())
|
||||
preference, apiErr := preferences.GetAllOrgPreferences(
|
||||
r.Context(), user.User.OrgId,
|
||||
)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, preference)
|
||||
}
|
||||
|
||||
// Integrations
|
||||
func (ah *APIHandler) RegisterIntegrationRoutes(router *mux.Router, am *AuthMiddleware) {
|
||||
subRouter := router.PathPrefix("/api/v1/integrations").Subrouter()
|
||||
@@ -3050,6 +3213,22 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
|
||||
}
|
||||
}
|
||||
|
||||
// WARN: Only works for AND operator in traces query
|
||||
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
// check if traceID is used as filter (with equal/similar operator) in traces query if yes add timestamp filter to queryRange params
|
||||
isUsed, traceIDs := tracesV3.TraceIdFilterUsedWithEqual(queryRangeParams)
|
||||
if isUsed == true && len(traceIDs) > 0 {
|
||||
zap.L().Debug("traceID used as filter in traces query")
|
||||
// query signoz_spans table with traceID to get min and max timestamp
|
||||
min, max, err := aH.reader.GetMinAndMaxTimestampForTraceID(ctx, traceIDs)
|
||||
if err == nil {
|
||||
// add timestamp filter to queryRange params
|
||||
tracesV3.AddTimestampFilters(min, max, queryRangeParams)
|
||||
zap.L().Debug("post adding timestamp filter in traces query", zap.Any("queryRangeParams", queryRangeParams))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result, errQuriesByName, err = aH.querier.QueryRange(ctx, queryRangeParams, spanKeys)
|
||||
|
||||
if err != nil {
|
||||
@@ -3319,6 +3498,22 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
|
||||
}
|
||||
}
|
||||
|
||||
// WARN: Only works for AND operator in traces query
|
||||
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
// check if traceID is used as filter (with equal/similar operator) in traces query if yes add timestamp filter to queryRange params
|
||||
isUsed, traceIDs := tracesV3.TraceIdFilterUsedWithEqual(queryRangeParams)
|
||||
if isUsed == true && len(traceIDs) > 0 {
|
||||
zap.L().Debug("traceID used as filter in traces query")
|
||||
// query signoz_spans table with traceID to get min and max timestamp
|
||||
min, max, err := aH.reader.GetMinAndMaxTimestampForTraceID(ctx, traceIDs)
|
||||
if err == nil {
|
||||
// add timestamp filter to queryRange params
|
||||
tracesV3.AddTimestampFilters(min, max, queryRangeParams)
|
||||
zap.L().Debug("post adding timestamp filter in traces query", zap.Any("queryRangeParams", queryRangeParams))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(ctx, queryRangeParams, spanKeys)
|
||||
|
||||
if err != nil {
|
||||
|
||||
@@ -110,6 +110,13 @@ service:
|
||||
|
||||
```
|
||||
|
||||
### If using non-default nginx log format, adjust log parsing regex
|
||||
|
||||
If you are using a [custom nginx log format](https://docs.nginx.com/nginx/admin-guide/monitoring/logging/#setting-up-the-access-log),
|
||||
please adjust the regex used for parsing logs in the receivers named
|
||||
`filelog/nginx-access-logs` and `filelog/nginx-error-logs` in collector config.
|
||||
|
||||
|
||||
#### Set Environment Variables
|
||||
|
||||
Set the following environment variables in your otel-collector environment:
|
||||
|
||||
37
pkg/query-service/app/preferences/map.go
Normal file
37
pkg/query-service/app/preferences/map.go
Normal file
@@ -0,0 +1,37 @@
|
||||
package preferences
|
||||
|
||||
var preferenceMap = map[string]Preference{
|
||||
"DASHBOARDS_LIST_VIEW": {
|
||||
Key: "DASHBOARDS_LIST_VIEW",
|
||||
Name: "Dashboards List View",
|
||||
Description: "",
|
||||
ValueType: "string",
|
||||
DefaultValue: "grid",
|
||||
AllowedValues: []interface{}{"grid", "list"},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user", "org"},
|
||||
},
|
||||
"LOGS_TOOLBAR_COLLAPSED": {
|
||||
Key: "LOGS_TOOLBAR_COLLAPSED",
|
||||
Name: "Logs toolbar",
|
||||
Description: "",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user", "org"},
|
||||
},
|
||||
"MAX_DEPTH_ALLOWED": {
|
||||
Key: "MAX_DEPTH_ALLOWED",
|
||||
Name: "Max Depth Allowed",
|
||||
Description: "",
|
||||
ValueType: "integer",
|
||||
DefaultValue: 10,
|
||||
IsDiscreteValues: false,
|
||||
Range: Range{
|
||||
Min: 0,
|
||||
Max: 100,
|
||||
},
|
||||
AllowedScopes: []string{"user", "org"},
|
||||
},
|
||||
}
|
||||
544
pkg/query-service/app/preferences/model.go
Normal file
544
pkg/query-service/app/preferences/model.go
Normal file
@@ -0,0 +1,544 @@
|
||||
package preferences
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"go.signoz.io/signoz/ee/query-service/model"
|
||||
)
|
||||
|
||||
type Range struct {
|
||||
Min int64 `json:"min"`
|
||||
Max int64 `json:"max"`
|
||||
}
|
||||
|
||||
type Preference struct {
|
||||
Key string `json:"key"`
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
ValueType string `json:"valueType"`
|
||||
DefaultValue interface{} `json:"defaultValue"`
|
||||
AllowedValues []interface{} `json:"allowedValues"`
|
||||
IsDiscreteValues bool `json:"isDiscreteValues"`
|
||||
Range Range `json:"range"`
|
||||
AllowedScopes []string `json:"allowedScopes"`
|
||||
}
|
||||
|
||||
func (p *Preference) ErrorValueTypeMismatch() *model.ApiError {
|
||||
return &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("the preference value is not of expected type: %s", p.ValueType)}
|
||||
}
|
||||
|
||||
const (
|
||||
PreferenceValueTypeInteger string = "integer"
|
||||
PreferenceValueTypeFloat string = "float"
|
||||
PreferenceValueTypeString string = "string"
|
||||
PreferenceValueTypeBoolean string = "boolean"
|
||||
)
|
||||
|
||||
const (
|
||||
OrgAllowedScope string = "org"
|
||||
UserAllowedScope string = "user"
|
||||
)
|
||||
|
||||
func (p *Preference) checkIfInAllowedValues(preferenceValue interface{}) (bool, *model.ApiError) {
|
||||
|
||||
switch p.ValueType {
|
||||
case PreferenceValueTypeInteger:
|
||||
_, ok := preferenceValue.(int64)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
case PreferenceValueTypeFloat:
|
||||
_, ok := preferenceValue.(float64)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
case PreferenceValueTypeString:
|
||||
_, ok := preferenceValue.(string)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
case PreferenceValueTypeBoolean:
|
||||
_, ok := preferenceValue.(bool)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
}
|
||||
isInAllowedValues := false
|
||||
for _, value := range p.AllowedValues {
|
||||
switch p.ValueType {
|
||||
case PreferenceValueTypeInteger:
|
||||
allowedValue, ok := value.(int64)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
|
||||
if allowedValue == preferenceValue {
|
||||
isInAllowedValues = true
|
||||
}
|
||||
case PreferenceValueTypeFloat:
|
||||
allowedValue, ok := value.(float64)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
|
||||
if allowedValue == preferenceValue {
|
||||
isInAllowedValues = true
|
||||
}
|
||||
case PreferenceValueTypeString:
|
||||
allowedValue, ok := value.(string)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
|
||||
if allowedValue == preferenceValue {
|
||||
isInAllowedValues = true
|
||||
}
|
||||
case PreferenceValueTypeBoolean:
|
||||
allowedValue, ok := value.(bool)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
|
||||
if allowedValue == preferenceValue {
|
||||
isInAllowedValues = true
|
||||
}
|
||||
}
|
||||
}
|
||||
return isInAllowedValues, nil
|
||||
}
|
||||
|
||||
func (p *Preference) IsValidValue(preferenceValue interface{}) *model.ApiError {
|
||||
typeSafeValue := preferenceValue
|
||||
switch p.ValueType {
|
||||
case PreferenceValueTypeInteger:
|
||||
val, ok := preferenceValue.(int64)
|
||||
if !ok {
|
||||
floatVal, ok := preferenceValue.(float64)
|
||||
if !ok || floatVal != float64(int64(floatVal)) {
|
||||
return p.ErrorValueTypeMismatch()
|
||||
}
|
||||
val = int64(floatVal)
|
||||
typeSafeValue = val
|
||||
}
|
||||
if !p.IsDiscreteValues {
|
||||
if val < p.Range.Min || val > p.Range.Max {
|
||||
return &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("the preference value is not in the range specified, min: %v , max:%v", p.Range.Min, p.Range.Max)}
|
||||
}
|
||||
}
|
||||
case PreferenceValueTypeString:
|
||||
_, ok := preferenceValue.(string)
|
||||
if !ok {
|
||||
return p.ErrorValueTypeMismatch()
|
||||
}
|
||||
case PreferenceValueTypeFloat:
|
||||
_, ok := preferenceValue.(float64)
|
||||
if !ok {
|
||||
return p.ErrorValueTypeMismatch()
|
||||
}
|
||||
case PreferenceValueTypeBoolean:
|
||||
_, ok := preferenceValue.(bool)
|
||||
if !ok {
|
||||
return p.ErrorValueTypeMismatch()
|
||||
}
|
||||
}
|
||||
|
||||
// check the validity of the value being part of allowed values or the range specified if any
|
||||
if p.IsDiscreteValues {
|
||||
if p.AllowedValues != nil {
|
||||
isInAllowedValues, valueMisMatchErr := p.checkIfInAllowedValues(typeSafeValue)
|
||||
|
||||
if valueMisMatchErr != nil {
|
||||
return valueMisMatchErr
|
||||
}
|
||||
if !isInAllowedValues {
|
||||
return &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("the preference value is not in the list of allowedValues: %v", p.AllowedValues)}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Preference) IsEnabledForScope(scope string) bool {
|
||||
isPreferenceEnabledForGivenScope := false
|
||||
if p.AllowedScopes != nil {
|
||||
for _, allowedScope := range p.AllowedScopes {
|
||||
if allowedScope == strings.ToLower(scope) {
|
||||
isPreferenceEnabledForGivenScope = true
|
||||
}
|
||||
}
|
||||
}
|
||||
return isPreferenceEnabledForGivenScope
|
||||
}
|
||||
|
||||
func (p *Preference) SanitizeValue(preferenceValue interface{}) interface{} {
|
||||
switch p.ValueType {
|
||||
case PreferenceValueTypeBoolean:
|
||||
if preferenceValue == "1" || preferenceValue == true {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
default:
|
||||
return preferenceValue
|
||||
}
|
||||
}
|
||||
|
||||
type AllPreferences struct {
|
||||
Preference
|
||||
Value interface{} `json:"value"`
|
||||
}
|
||||
|
||||
type PreferenceKV struct {
|
||||
PreferenceId string `json:"preference_id" db:"preference_id"`
|
||||
PreferenceValue interface{} `json:"preference_value" db:"preference_value"`
|
||||
}
|
||||
|
||||
type UpdatePreference struct {
|
||||
PreferenceValue interface{} `json:"preference_value"`
|
||||
}
|
||||
|
||||
var db *sqlx.DB
|
||||
|
||||
func InitDB(datasourceName string) error {
|
||||
var err error
|
||||
db, err = sqlx.Open("sqlite3", datasourceName)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// create the user preference table
|
||||
tableSchema := `
|
||||
PRAGMA foreign_keys = ON;
|
||||
CREATE TABLE IF NOT EXISTS user_preference(
|
||||
preference_id TEXT NOT NULL,
|
||||
preference_value TEXT,
|
||||
user_id TEXT NOT NULL,
|
||||
PRIMARY KEY (preference_id,user_id),
|
||||
FOREIGN KEY (user_id)
|
||||
REFERENCES users(id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
);`
|
||||
|
||||
_, err = db.Exec(tableSchema)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error in creating user_preference table: %s", err.Error())
|
||||
}
|
||||
|
||||
// create the org preference table
|
||||
tableSchema = `
|
||||
PRAGMA foreign_keys = ON;
|
||||
CREATE TABLE IF NOT EXISTS org_preference(
|
||||
preference_id TEXT NOT NULL,
|
||||
preference_value TEXT,
|
||||
org_id TEXT NOT NULL,
|
||||
PRIMARY KEY (preference_id,org_id),
|
||||
FOREIGN KEY (org_id)
|
||||
REFERENCES organizations(id)
|
||||
ON UPDATE CASCADE
|
||||
ON DELETE CASCADE
|
||||
);`
|
||||
|
||||
_, err = db.Exec(tableSchema)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error in creating org_preference table: %s", err.Error())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// org preference functions
|
||||
func GetOrgPreference(ctx context.Context, preferenceId string, orgId string) (*PreferenceKV, *model.ApiError) {
|
||||
// check if the preference key exists or not
|
||||
preference, seen := preferenceMap[preferenceId]
|
||||
if !seen {
|
||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("no such preferenceId exists: %s", preferenceId)}
|
||||
}
|
||||
|
||||
// check if the preference is enabled for org scope or not
|
||||
isPreferenceEnabled := preference.IsEnabledForScope(OrgAllowedScope)
|
||||
if !isPreferenceEnabled {
|
||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("preference is not enabled at org scope: %s", preferenceId)}
|
||||
}
|
||||
|
||||
// fetch the value from the database
|
||||
var orgPreference PreferenceKV
|
||||
query := `SELECT preference_id , preference_value FROM org_preference WHERE preference_id=$1 AND org_id=$2;`
|
||||
err := db.Get(&orgPreference, query, preferenceId, orgId)
|
||||
|
||||
// if the value doesn't exist in db then return the default value
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return &PreferenceKV{
|
||||
PreferenceId: preferenceId,
|
||||
PreferenceValue: preference.DefaultValue,
|
||||
}, nil
|
||||
}
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in fetching the org preference: %s", err.Error())}
|
||||
|
||||
}
|
||||
|
||||
// else return the value fetched from the org_preference table
|
||||
return &PreferenceKV{
|
||||
PreferenceId: preferenceId,
|
||||
PreferenceValue: preference.SanitizeValue(orgPreference.PreferenceValue),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func UpdateOrgPreference(ctx context.Context, preferenceId string, preferenceValue interface{}, orgId string) (*PreferenceKV, *model.ApiError) {
|
||||
// check if the preference key exists or not
|
||||
preference, seen := preferenceMap[preferenceId]
|
||||
if !seen {
|
||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("no such preferenceId exists: %s", preferenceId)}
|
||||
}
|
||||
|
||||
// check if the preference is enabled at org scope or not
|
||||
isPreferenceEnabled := preference.IsEnabledForScope(OrgAllowedScope)
|
||||
if !isPreferenceEnabled {
|
||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("preference is not enabled at org scope: %s", preferenceId)}
|
||||
}
|
||||
|
||||
err := preference.IsValidValue(preferenceValue)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// update the values in the org_preference table and return the key and the value
|
||||
query := `INSERT INTO org_preference(preference_id,preference_value,org_id) VALUES($1,$2,$3)
|
||||
ON CONFLICT(preference_id,org_id) DO
|
||||
UPDATE SET preference_value= $2 WHERE preference_id=$1 AND org_id=$3;`
|
||||
|
||||
_, dberr := db.Exec(query, preferenceId, preferenceValue, orgId)
|
||||
|
||||
if dberr != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in setting the preference value: %s", dberr.Error())}
|
||||
}
|
||||
|
||||
return &PreferenceKV{
|
||||
PreferenceId: preferenceId,
|
||||
PreferenceValue: preferenceValue,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func GetAllOrgPreferences(ctx context.Context, orgId string) (*[]AllPreferences, *model.ApiError) {
|
||||
// filter out all the org enabled preferences from the preference variable
|
||||
allOrgPreferences := []AllPreferences{}
|
||||
|
||||
// fetch all the org preference values stored in org_preference table
|
||||
orgPreferenceValues := []PreferenceKV{}
|
||||
|
||||
query := `SELECT preference_id,preference_value FROM org_preference WHERE org_id=$1;`
|
||||
err := db.Select(&orgPreferenceValues, query, orgId)
|
||||
|
||||
if err != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in getting all org preference values: %s", err)}
|
||||
}
|
||||
|
||||
// create a map of key vs values from the above response
|
||||
preferenceValueMap := map[string]interface{}{}
|
||||
|
||||
for _, preferenceValue := range orgPreferenceValues {
|
||||
preferenceValueMap[preferenceValue.PreferenceId] = preferenceValue.PreferenceValue
|
||||
}
|
||||
|
||||
// update in the above filtered list wherver value present in the map
|
||||
for _, preference := range preferenceMap {
|
||||
isEnabledForOrgScope := preference.IsEnabledForScope(OrgAllowedScope)
|
||||
if isEnabledForOrgScope {
|
||||
preferenceWithValue := AllPreferences{}
|
||||
preferenceWithValue.Key = preference.Key
|
||||
preferenceWithValue.Name = preference.Name
|
||||
preferenceWithValue.Description = preference.Description
|
||||
preferenceWithValue.AllowedScopes = preference.AllowedScopes
|
||||
preferenceWithValue.AllowedValues = preference.AllowedValues
|
||||
preferenceWithValue.DefaultValue = preference.DefaultValue
|
||||
preferenceWithValue.Range = preference.Range
|
||||
preferenceWithValue.ValueType = preference.ValueType
|
||||
preferenceWithValue.IsDiscreteValues = preference.IsDiscreteValues
|
||||
value, seen := preferenceValueMap[preference.Key]
|
||||
|
||||
if seen {
|
||||
preferenceWithValue.Value = value
|
||||
} else {
|
||||
preferenceWithValue.Value = preference.DefaultValue
|
||||
}
|
||||
|
||||
preferenceWithValue.Value = preference.SanitizeValue(preferenceWithValue.Value)
|
||||
allOrgPreferences = append(allOrgPreferences, preferenceWithValue)
|
||||
}
|
||||
}
|
||||
return &allOrgPreferences, nil
|
||||
}
|
||||
|
||||
// user preference functions
|
||||
func GetUserPreference(ctx context.Context, preferenceId string, orgId string, userId string) (*PreferenceKV, *model.ApiError) {
|
||||
// check if the preference key exists
|
||||
preference, seen := preferenceMap[preferenceId]
|
||||
if !seen {
|
||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("no such preferenceId exists: %s", preferenceId)}
|
||||
}
|
||||
|
||||
preferenceValue := PreferenceKV{
|
||||
PreferenceId: preferenceId,
|
||||
PreferenceValue: preference.DefaultValue,
|
||||
}
|
||||
|
||||
// check if the preference is enabled at user scope
|
||||
isPreferenceEnabledAtUserScope := preference.IsEnabledForScope(UserAllowedScope)
|
||||
if !isPreferenceEnabledAtUserScope {
|
||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("preference is not enabled at user scope: %s", preferenceId)}
|
||||
}
|
||||
|
||||
isPreferenceEnabledAtOrgScope := preference.IsEnabledForScope(OrgAllowedScope)
|
||||
// get the value from the org scope if enabled at org scope
|
||||
if isPreferenceEnabledAtOrgScope {
|
||||
orgPreference := PreferenceKV{}
|
||||
|
||||
query := `SELECT preference_id , preference_value FROM org_preference WHERE preference_id=$1 AND org_id=$2;`
|
||||
|
||||
err := db.Get(&orgPreference, query, preferenceId, orgId)
|
||||
|
||||
// if there is error in getting values and its not an empty rows error return from here
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in getting org preference values: %s", err.Error())}
|
||||
}
|
||||
|
||||
// if there is no error update the preference value with value from org preference
|
||||
if err == nil {
|
||||
preferenceValue.PreferenceValue = orgPreference.PreferenceValue
|
||||
}
|
||||
}
|
||||
|
||||
// get the value from the user_preference table, if exists return this value else the one calculated in the above step
|
||||
userPreference := PreferenceKV{}
|
||||
|
||||
query := `SELECT preference_id, preference_value FROM user_preference WHERE preference_id=$1 AND user_id=$2;`
|
||||
err := db.Get(&userPreference, query, preferenceId, userId)
|
||||
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in getting user preference values: %s", err.Error())}
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
preferenceValue.PreferenceValue = userPreference.PreferenceValue
|
||||
}
|
||||
|
||||
return &PreferenceKV{
|
||||
PreferenceId: preferenceValue.PreferenceId,
|
||||
PreferenceValue: preference.SanitizeValue(preferenceValue.PreferenceValue),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func UpdateUserPreference(ctx context.Context, preferenceId string, preferenceValue interface{}, userId string) (*PreferenceKV, *model.ApiError) {
|
||||
// check if the preference id is valid
|
||||
preference, seen := preferenceMap[preferenceId]
|
||||
if !seen {
|
||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("no such preferenceId exists: %s", preferenceId)}
|
||||
}
|
||||
|
||||
// check if the preference is enabled at user scope
|
||||
isPreferenceEnabledAtUserScope := preference.IsEnabledForScope(UserAllowedScope)
|
||||
if !isPreferenceEnabledAtUserScope {
|
||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("preference is not enabled at user scope: %s", preferenceId)}
|
||||
}
|
||||
|
||||
err := preference.IsValidValue(preferenceValue)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// update the user preference values
|
||||
query := `INSERT INTO user_preference(preference_id,preference_value,user_id) VALUES($1,$2,$3)
|
||||
ON CONFLICT(preference_id,user_id) DO
|
||||
UPDATE SET preference_value= $2 WHERE preference_id=$1 AND user_id=$3;`
|
||||
|
||||
_, dberrr := db.Exec(query, preferenceId, preferenceValue, userId)
|
||||
|
||||
if dberrr != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in setting the preference value: %s", dberrr.Error())}
|
||||
}
|
||||
|
||||
return &PreferenceKV{
|
||||
PreferenceId: preferenceId,
|
||||
PreferenceValue: preferenceValue,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func GetAllUserPreferences(ctx context.Context, orgId string, userId string) (*[]AllPreferences, *model.ApiError) {
|
||||
allUserPreferences := []AllPreferences{}
|
||||
|
||||
// fetch all the org preference values stored in org_preference table
|
||||
orgPreferenceValues := []PreferenceKV{}
|
||||
|
||||
query := `SELECT preference_id,preference_value FROM org_preference WHERE org_id=$1;`
|
||||
err := db.Select(&orgPreferenceValues, query, orgId)
|
||||
|
||||
if err != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in getting all org preference values: %s", err)}
|
||||
}
|
||||
|
||||
// create a map of key vs values from the above response
|
||||
preferenceOrgValueMap := map[string]interface{}{}
|
||||
|
||||
for _, preferenceValue := range orgPreferenceValues {
|
||||
preferenceOrgValueMap[preferenceValue.PreferenceId] = preferenceValue.PreferenceValue
|
||||
}
|
||||
|
||||
// fetch all the user preference values stored in user_preference table
|
||||
userPreferenceValues := []PreferenceKV{}
|
||||
|
||||
query = `SELECT preference_id,preference_value FROM user_preference WHERE user_id=$1;`
|
||||
err = db.Select(&userPreferenceValues, query, userId)
|
||||
|
||||
if err != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in getting all user preference values: %s", err)}
|
||||
}
|
||||
|
||||
// create a map of key vs values from the above response
|
||||
preferenceUserValueMap := map[string]interface{}{}
|
||||
|
||||
for _, preferenceValue := range userPreferenceValues {
|
||||
preferenceUserValueMap[preferenceValue.PreferenceId] = preferenceValue.PreferenceValue
|
||||
}
|
||||
|
||||
// update in the above filtered list wherver value present in the map
|
||||
for _, preference := range preferenceMap {
|
||||
isEnabledForUserScope := preference.IsEnabledForScope(UserAllowedScope)
|
||||
|
||||
if isEnabledForUserScope {
|
||||
preferenceWithValue := AllPreferences{}
|
||||
preferenceWithValue.Key = preference.Key
|
||||
preferenceWithValue.Name = preference.Name
|
||||
preferenceWithValue.Description = preference.Description
|
||||
preferenceWithValue.AllowedScopes = preference.AllowedScopes
|
||||
preferenceWithValue.AllowedValues = preference.AllowedValues
|
||||
preferenceWithValue.DefaultValue = preference.DefaultValue
|
||||
preferenceWithValue.Range = preference.Range
|
||||
preferenceWithValue.ValueType = preference.ValueType
|
||||
preferenceWithValue.IsDiscreteValues = preference.IsDiscreteValues
|
||||
preferenceWithValue.Value = preference.DefaultValue
|
||||
|
||||
isEnabledForOrgScope := preference.IsEnabledForScope(OrgAllowedScope)
|
||||
if isEnabledForOrgScope {
|
||||
value, seen := preferenceOrgValueMap[preference.Key]
|
||||
if seen {
|
||||
preferenceWithValue.Value = value
|
||||
}
|
||||
}
|
||||
|
||||
value, seen := preferenceUserValueMap[preference.Key]
|
||||
|
||||
if seen {
|
||||
preferenceWithValue.Value = value
|
||||
}
|
||||
|
||||
preferenceWithValue.Value = preference.SanitizeValue(preferenceWithValue.Value)
|
||||
allUserPreferences = append(allUserPreferences, preferenceWithValue)
|
||||
}
|
||||
}
|
||||
return &allUserPreferences, nil
|
||||
}
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net"
|
||||
@@ -27,6 +28,7 @@ import (
|
||||
"go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/opamp"
|
||||
opAmpModel "go.signoz.io/signoz/pkg/query-service/app/opamp/model"
|
||||
"go.signoz.io/signoz/pkg/query-service/app/preferences"
|
||||
"go.signoz.io/signoz/pkg/query-service/common"
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
|
||||
@@ -94,6 +96,10 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := preferences.InitDB(constants.RELATIONAL_DATASOURCE_PATH); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
localDB, err := dashboards.InitDB(constants.RELATIONAL_DATASOURCE_PATH)
|
||||
explorer.InitWithDSN(constants.RELATIONAL_DATASOURCE_PATH)
|
||||
|
||||
@@ -268,7 +274,21 @@ func (s *Server) createPublicServer(api *APIHandler) (*http.Server, error) {
|
||||
r.Use(s.analyticsMiddleware)
|
||||
r.Use(loggingMiddleware)
|
||||
|
||||
am := NewAuthMiddleware(auth.GetUserFromRequest)
|
||||
// add auth middleware
|
||||
getUserFromRequest := func(r *http.Request) (*model.UserPayload, error) {
|
||||
user, err := auth.GetUserFromRequest(r)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if user.User.OrgId == "" {
|
||||
return nil, model.UnauthorizedError(errors.New("orgId is missing in the claims"))
|
||||
}
|
||||
|
||||
return user, nil
|
||||
}
|
||||
am := NewAuthMiddleware(getUserFromRequest)
|
||||
|
||||
api.RegisterRoutes(r, am)
|
||||
api.RegisterLogsRoutes(r, am)
|
||||
|
||||
183
pkg/query-service/app/traces/v3/utils.go
Normal file
183
pkg/query-service/app/traces/v3/utils.go
Normal file
@@ -0,0 +1,183 @@
|
||||
package v3
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
|
||||
v3 "go.signoz.io/signoz/pkg/query-service/model/v3"
|
||||
"go.signoz.io/signoz/pkg/query-service/utils"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
// check if traceId filter is used in traces query and return the list of traceIds
|
||||
func TraceIdFilterUsedWithEqual(params *v3.QueryRangeParamsV3) (bool, []string) {
|
||||
compositeQuery := params.CompositeQuery
|
||||
if compositeQuery == nil {
|
||||
return false, []string{}
|
||||
}
|
||||
var traceIds []string
|
||||
var traceIdFilterUsed bool
|
||||
|
||||
// Build queries for each builder query
|
||||
for queryName, query := range compositeQuery.BuilderQueries {
|
||||
if query.Expression != queryName && query.DataSource != v3.DataSourceTraces {
|
||||
continue
|
||||
}
|
||||
|
||||
// check filter attribute
|
||||
if query.Filters != nil && len(query.Filters.Items) != 0 {
|
||||
for _, item := range query.Filters.Items {
|
||||
|
||||
if item.Key.Key == "traceID" && (item.Operator == v3.FilterOperatorIn ||
|
||||
item.Operator == v3.FilterOperatorEqual) {
|
||||
traceIdFilterUsed = true
|
||||
// validate value
|
||||
var err error
|
||||
val := item.Value
|
||||
val, err = utils.ValidateAndCastValue(val, item.Key.DataType)
|
||||
if err != nil {
|
||||
zap.L().Error("invalid value for key", zap.String("key", item.Key.Key), zap.Error(err))
|
||||
return false, []string{}
|
||||
}
|
||||
if val != nil {
|
||||
fmtVal := extractFormattedStringValues(val)
|
||||
traceIds = append(traceIds, fmtVal...)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
zap.L().Debug("traceIds", zap.Any("traceIds", traceIds))
|
||||
return traceIdFilterUsed, traceIds
|
||||
}
|
||||
|
||||
func extractFormattedStringValues(v interface{}) []string {
|
||||
// if it's pointer convert it to a value
|
||||
v = getPointerValue(v)
|
||||
|
||||
switch x := v.(type) {
|
||||
case string:
|
||||
return []string{x}
|
||||
|
||||
case []interface{}:
|
||||
if len(x) == 0 {
|
||||
return []string{}
|
||||
}
|
||||
switch x[0].(type) {
|
||||
case string:
|
||||
values := []string{}
|
||||
for _, val := range x {
|
||||
values = append(values, val.(string))
|
||||
}
|
||||
return values
|
||||
default:
|
||||
return []string{}
|
||||
}
|
||||
default:
|
||||
return []string{}
|
||||
}
|
||||
}
|
||||
|
||||
func getPointerValue(v interface{}) interface{} {
|
||||
switch x := v.(type) {
|
||||
case *uint8:
|
||||
return *x
|
||||
case *uint16:
|
||||
return *x
|
||||
case *uint32:
|
||||
return *x
|
||||
case *uint64:
|
||||
return *x
|
||||
case *int:
|
||||
return *x
|
||||
case *int8:
|
||||
return *x
|
||||
case *int16:
|
||||
return *x
|
||||
case *int32:
|
||||
return *x
|
||||
case *int64:
|
||||
return *x
|
||||
case *float32:
|
||||
return *x
|
||||
case *float64:
|
||||
return *x
|
||||
case *string:
|
||||
return *x
|
||||
case *bool:
|
||||
return *x
|
||||
case []interface{}:
|
||||
values := []interface{}{}
|
||||
for _, val := range x {
|
||||
values = append(values, getPointerValue(val))
|
||||
}
|
||||
return values
|
||||
default:
|
||||
return v
|
||||
}
|
||||
}
|
||||
|
||||
func AddTimestampFilters(minTime int64, maxTime int64, params *v3.QueryRangeParamsV3) {
|
||||
if minTime == 0 && maxTime == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
compositeQuery := params.CompositeQuery
|
||||
if compositeQuery == nil {
|
||||
return
|
||||
}
|
||||
// Build queries for each builder query
|
||||
for queryName, query := range compositeQuery.BuilderQueries {
|
||||
if query.Expression != queryName && query.DataSource != v3.DataSourceTraces {
|
||||
continue
|
||||
}
|
||||
|
||||
addTimeStampFilter := false
|
||||
|
||||
// check filter attribute
|
||||
if query.Filters != nil && len(query.Filters.Items) != 0 {
|
||||
for _, item := range query.Filters.Items {
|
||||
if item.Key.Key == "traceID" && (item.Operator == v3.FilterOperatorIn ||
|
||||
item.Operator == v3.FilterOperatorEqual) {
|
||||
addTimeStampFilter = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// add timestamp filter to query only if traceID filter along with equal/similar operator is used
|
||||
if addTimeStampFilter {
|
||||
timeFilters := []v3.FilterItem{
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "timestamp",
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: true,
|
||||
},
|
||||
Value: strconv.FormatUint(uint64(minTime), 10),
|
||||
Operator: v3.FilterOperatorGreaterThanOrEq,
|
||||
},
|
||||
{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "timestamp",
|
||||
Type: v3.AttributeKeyTypeTag,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
IsColumn: true,
|
||||
},
|
||||
Value: strconv.FormatUint(uint64(maxTime), 10),
|
||||
Operator: v3.FilterOperatorLessThanOrEq,
|
||||
},
|
||||
}
|
||||
|
||||
// add new timestamp filter to query
|
||||
if query.Filters == nil {
|
||||
query.Filters = &v3.FilterSet{
|
||||
Items: timeFilters,
|
||||
}
|
||||
} else {
|
||||
query.Filters.Items = append(query.Filters.Items, timeFilters...)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -467,6 +467,10 @@ func authenticateLogin(ctx context.Context, req *model.LoginRequest) (*model.Use
|
||||
return nil, errors.Wrap(err, "failed to validate refresh token")
|
||||
}
|
||||
|
||||
if user.OrgId == "" {
|
||||
return nil, model.UnauthorizedError(errors.New("orgId is missing in the claims"))
|
||||
}
|
||||
|
||||
return user, nil
|
||||
}
|
||||
|
||||
@@ -505,6 +509,7 @@ func GenerateJWTForUser(user *model.User) (model.UserJwtObject, error) {
|
||||
"gid": user.GroupId,
|
||||
"email": user.Email,
|
||||
"exp": j.AccessJwtExpiry,
|
||||
"orgId": user.OrgId,
|
||||
})
|
||||
|
||||
j.AccessJwt, err = token.SignedString([]byte(JwtSecret))
|
||||
@@ -518,6 +523,7 @@ func GenerateJWTForUser(user *model.User) (model.UserJwtObject, error) {
|
||||
"gid": user.GroupId,
|
||||
"email": user.Email,
|
||||
"exp": j.RefreshJwtExpiry,
|
||||
"orgId": user.OrgId,
|
||||
})
|
||||
|
||||
j.RefreshJwt, err = token.SignedString([]byte(JwtSecret))
|
||||
|
||||
@@ -20,6 +20,8 @@ var (
|
||||
)
|
||||
|
||||
func ParseJWT(jwtStr string) (jwt.MapClaims, error) {
|
||||
// TODO[@vikrantgupta25] : to update this to the claims check function for better integrity of JWT
|
||||
// reference - https://pkg.go.dev/github.com/golang-jwt/jwt/v5#Parser.ParseWithClaims
|
||||
token, err := jwt.Parse(jwtStr, func(token *jwt.Token) (interface{}, error) {
|
||||
if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok {
|
||||
return nil, errors.Errorf("unknown signing algo: %v", token.Header["alg"])
|
||||
@@ -35,6 +37,7 @@ func ParseJWT(jwtStr string) (jwt.MapClaims, error) {
|
||||
if !ok || !token.Valid {
|
||||
return nil, errors.Errorf("Not a valid jwt claim")
|
||||
}
|
||||
|
||||
return claims, nil
|
||||
}
|
||||
|
||||
@@ -47,11 +50,18 @@ func validateUser(tok string) (*model.UserPayload, error) {
|
||||
if !claims.VerifyExpiresAt(now, true) {
|
||||
return nil, model.ErrorTokenExpired
|
||||
}
|
||||
|
||||
var orgId string
|
||||
if claims["orgId"] != nil {
|
||||
orgId = claims["orgId"].(string)
|
||||
}
|
||||
|
||||
return &model.UserPayload{
|
||||
User: model.User{
|
||||
Id: claims["id"].(string),
|
||||
GroupId: claims["gid"].(string),
|
||||
Email: claims["email"].(string),
|
||||
OrgId: orgId,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ type Reader interface {
|
||||
GetInstantQueryMetricsResult(ctx context.Context, query *model.InstantQueryMetricsParams) (*promql.Result, *stats.QueryStats, *model.ApiError)
|
||||
GetQueryRangeResult(ctx context.Context, query *model.QueryRangeParams) (*promql.Result, *stats.QueryStats, *model.ApiError)
|
||||
GetServiceOverview(ctx context.Context, query *model.GetServiceOverviewParams, skipConfig *model.SkipConfig) (*[]model.ServiceOverviewItem, *model.ApiError)
|
||||
GetTopLevelOperations(ctx context.Context, skipConfig *model.SkipConfig, start, end time.Time) (*map[string][]string, *map[string][]string, *model.ApiError)
|
||||
GetTopLevelOperations(ctx context.Context, skipConfig *model.SkipConfig, start, end time.Time, services []string) (*map[string][]string, *model.ApiError)
|
||||
GetServices(ctx context.Context, query *model.GetServicesParams, skipConfig *model.SkipConfig) (*[]model.ServiceItem, *model.ApiError)
|
||||
GetTopOperations(ctx context.Context, query *model.GetTopOperationsParams) (*[]model.TopOperationsItem, *model.ApiError)
|
||||
GetUsage(ctx context.Context, query *model.GetUsageParams) (*[]model.UsageItem, error)
|
||||
@@ -103,6 +103,8 @@ type Reader interface {
|
||||
CheckClickHouse(ctx context.Context) error
|
||||
|
||||
GetMetricMetadata(context.Context, string, string) (*v3.MetricMetadataResponse, error)
|
||||
|
||||
GetMinAndMaxTimestampForTraceID(ctx context.Context, traceID []string) (int64, int64, error)
|
||||
}
|
||||
|
||||
type Querier interface {
|
||||
|
||||
@@ -638,6 +638,12 @@ type AlertsInfo struct {
|
||||
LogsBasedAlerts int `json:"logsBasedAlerts"`
|
||||
MetricBasedAlerts int `json:"metricBasedAlerts"`
|
||||
TracesBasedAlerts int `json:"tracesBasedAlerts"`
|
||||
SlackChannels int `json:"slackChannels"`
|
||||
WebHookChannels int `json:"webHookChannels"`
|
||||
PagerDutyChannels int `json:"pagerDutyChannels"`
|
||||
OpsGenieChannels int `json:"opsGenieChannels"`
|
||||
EmailChannels int `json:"emailChannels"`
|
||||
MSTeamsChannels int `json:"microsoftTeamsChannels"`
|
||||
}
|
||||
|
||||
type SavedViewsInfo struct {
|
||||
|
||||
@@ -293,6 +293,22 @@ func createTelemetry() {
|
||||
if err == nil {
|
||||
channels, err := telemetry.reader.GetChannels()
|
||||
if err == nil {
|
||||
for _, channel := range *channels {
|
||||
switch channel.Type {
|
||||
case "slack":
|
||||
alertsInfo.SlackChannels++
|
||||
case "webhook":
|
||||
alertsInfo.WebHookChannels++
|
||||
case "pagerduty":
|
||||
alertsInfo.PagerDutyChannels++
|
||||
case "opsgenie":
|
||||
alertsInfo.OpsGenieChannels++
|
||||
case "email":
|
||||
alertsInfo.EmailChannels++
|
||||
case "msteams":
|
||||
alertsInfo.MSTeamsChannels++
|
||||
}
|
||||
}
|
||||
savedViewsInfo, err := telemetry.reader.GetSavedViewsInfo(ctx)
|
||||
if err == nil {
|
||||
dashboardsAlertsData := map[string]interface{}{
|
||||
@@ -309,6 +325,12 @@ func createTelemetry() {
|
||||
"totalSavedViews": savedViewsInfo.TotalSavedViews,
|
||||
"logsSavedViews": savedViewsInfo.LogsSavedViews,
|
||||
"tracesSavedViews": savedViewsInfo.TracesSavedViews,
|
||||
"slackChannels": alertsInfo.SlackChannels,
|
||||
"webHookChannels": alertsInfo.WebHookChannels,
|
||||
"pagerDutyChannels": alertsInfo.PagerDutyChannels,
|
||||
"opsGenieChannels": alertsInfo.OpsGenieChannels,
|
||||
"emailChannels": alertsInfo.EmailChannels,
|
||||
"msteamsChannels": alertsInfo.MSTeamsChannels,
|
||||
}
|
||||
// send event only if there are dashboards or alerts or channels
|
||||
if (dashboardsInfo.TotalDashboards > 0 || alertsInfo.TotalAlerts > 0 || len(*channels) > 0 || savedViewsInfo.TotalSavedViews > 0) && apiErr == nil {
|
||||
|
||||
@@ -192,7 +192,7 @@ services:
|
||||
<<: *db-depend
|
||||
|
||||
otel-collector-migrator:
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.2}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.102.3}
|
||||
container_name: otel-migrator
|
||||
command:
|
||||
- "--dsn=tcp://clickhouse:9000"
|
||||
@@ -205,7 +205,7 @@ services:
|
||||
# condition: service_healthy
|
||||
|
||||
otel-collector:
|
||||
image: signoz/signoz-otel-collector:0.102.2
|
||||
image: signoz/signoz-otel-collector:0.102.3
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
[
|
||||
|
||||
Reference in New Issue
Block a user