mirror of
https://github.com/SigNoz/signoz.git
synced 2025-12-29 07:27:07 +00:00
Compare commits
7 Commits
update-que
...
v0.76.2-be
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
be44d6606a | ||
|
|
bf7a31817f | ||
|
|
492d846fb5 | ||
|
|
b6a121c2bb | ||
|
|
e09519a748 | ||
|
|
cd3645eb16 | ||
|
|
e7f00ab0cd |
@@ -65,5 +65,6 @@
|
||||
"INFRASTRUCTURE_MONITORING_KUBERNETES": "SigNoz | Infra Monitoring",
|
||||
"METRICS_EXPLORER": "SigNoz | Metrics Explorer",
|
||||
"METRICS_EXPLORER_EXPLORER": "SigNoz | Metrics Explorer",
|
||||
"METRICS_EXPLORER_VIEWS": "SigNoz | Metrics Explorer"
|
||||
"METRICS_EXPLORER_VIEWS": "SigNoz | Metrics Explorer",
|
||||
"API_MONITORING": "SigNoz | API Monitoring"
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Select, Spin, Table, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
@@ -151,6 +152,7 @@ function AllEndPoints({
|
||||
if (groupBy.length === 0) {
|
||||
setSelectedEndPointName(record.endpointName); // this will open up the endpoint details tab
|
||||
setSelectedView(VIEW_TYPES.ENDPOINT_DETAILS);
|
||||
logEvent('API Monitoring: Endpoint name row clicked', {});
|
||||
} else {
|
||||
handleGroupByRowClick(record); // this will prepare the nested query payload
|
||||
}
|
||||
|
||||
@@ -392,6 +392,39 @@
|
||||
gap: 20px;
|
||||
padding-top: 20px;
|
||||
|
||||
.endpoint-meta-data {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
.endpoint-meta-data-pill {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--bg-slate-300);
|
||||
width: fit-content;
|
||||
.endpoint-meta-data-label {
|
||||
display: flex;
|
||||
padding: 6px 8px;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
border-right: 1px solid var(--bg-slate-300);
|
||||
color: var(--text-vanilla-100);
|
||||
background: var(--bg-slate-500);
|
||||
height: calc(100% - 12px);
|
||||
}
|
||||
|
||||
.endpoint-meta-data-value {
|
||||
display: flex;
|
||||
padding: 6px 8px;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
color: var(--text-vanilla-400);
|
||||
background: var(--bg-slate-400);
|
||||
height: calc(100% - 12px);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.endpoint-details-filters-container {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
@@ -405,6 +438,13 @@
|
||||
}
|
||||
}
|
||||
|
||||
.ant-select-item,
|
||||
.ant-select-item-option-content {
|
||||
flex: auto;
|
||||
white-space: normal;
|
||||
overflow-wrap: break-word;
|
||||
}
|
||||
|
||||
.status-code-table-container {
|
||||
border-radius: 3px;
|
||||
border: 1px solid var(--bg-slate-500);
|
||||
@@ -809,6 +849,13 @@
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
|
||||
.ant-select-item,
|
||||
.ant-select-item-option-content {
|
||||
flex: auto;
|
||||
white-space: normal;
|
||||
overflow-wrap: break-word;
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
@@ -917,6 +964,20 @@
|
||||
}
|
||||
}
|
||||
|
||||
.endpoint-meta-data {
|
||||
.endpoint-meta-data-pill {
|
||||
.endpoint-meta-data-label {
|
||||
color: var(--text-ink-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
|
||||
.endpoint-meta-data-value {
|
||||
color: var(--text-ink-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.status-code-table-container {
|
||||
.ant-table {
|
||||
.ant-table-thead > tr > th {
|
||||
|
||||
@@ -2,6 +2,7 @@ import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import {
|
||||
END_POINT_DETAILS_QUERY_KEYS_ARRAY,
|
||||
extractPortAndEndpoint,
|
||||
getEndPointDetailsQueryPayload,
|
||||
} from 'container/ApiMonitoring/utils';
|
||||
import QueryBuilderSearchV2 from 'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2';
|
||||
@@ -121,6 +122,11 @@ function EndPointDetails({
|
||||
[endPointDetailsDataQueries],
|
||||
);
|
||||
|
||||
const { endpoint, port } = useMemo(
|
||||
() => extractPortAndEndpoint(endPointName),
|
||||
[endPointName],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="endpoint-details-container">
|
||||
<div className="endpoint-details-filters-container">
|
||||
@@ -129,6 +135,8 @@ function EndPointDetails({
|
||||
selectedEndPointName={endPointName}
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
endPointDropDownDataQuery={endPointDropDownDataQuery}
|
||||
parentContainerDiv=".endpoint-details-filters-container"
|
||||
dropdownStyle={{ width: 'calc(100% - 36px)' }}
|
||||
/>
|
||||
</div>
|
||||
<div className="endpoint-details-filters-container-search">
|
||||
@@ -141,6 +149,16 @@ function EndPointDetails({
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className="endpoint-meta-data">
|
||||
<div className="endpoint-meta-data-pill">
|
||||
<div className="endpoint-meta-data-label">Endpoint</div>
|
||||
<div className="endpoint-meta-data-value">{endpoint || '-'}</div>
|
||||
</div>
|
||||
<div className="endpoint-meta-data-pill">
|
||||
<div className="endpoint-meta-data-label">Port</div>
|
||||
<div className="endpoint-meta-data-value">{port || '-'}</div>
|
||||
</div>
|
||||
</div>
|
||||
<EndPointMetrics endPointMetricsDataQuery={endPointMetricsDataQuery} />
|
||||
{!isServicesFilterApplied && (
|
||||
<DependentServices
|
||||
|
||||
@@ -28,6 +28,8 @@ function EndPointDetailsZeroState({
|
||||
<EndPointsDropDown
|
||||
setSelectedEndPointName={setSelectedEndPointName}
|
||||
endPointDropDownDataQuery={endPointDropDownDataQuery}
|
||||
parentContainerDiv=".end-point-details-zero-state-wrapper"
|
||||
dropdownStyle={{ width: '60%' }}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -8,16 +8,22 @@ interface EndPointsDropDownProps {
|
||||
selectedEndPointName?: string;
|
||||
setSelectedEndPointName: (value: string) => void;
|
||||
endPointDropDownDataQuery: UseQueryResult<SuccessResponse<any>, unknown>;
|
||||
parentContainerDiv?: string;
|
||||
dropdownStyle?: React.CSSProperties;
|
||||
}
|
||||
|
||||
const defaultProps = {
|
||||
selectedEndPointName: '',
|
||||
parentContainerDiv: '',
|
||||
dropdownStyle: {},
|
||||
};
|
||||
|
||||
function EndPointsDropDown({
|
||||
selectedEndPointName,
|
||||
setSelectedEndPointName,
|
||||
endPointDropDownDataQuery,
|
||||
parentContainerDiv,
|
||||
dropdownStyle,
|
||||
}: EndPointsDropDownProps): JSX.Element {
|
||||
const { data, isLoading, isFetching } = endPointDropDownDataQuery;
|
||||
|
||||
@@ -39,6 +45,13 @@ function EndPointsDropDown({
|
||||
style={{ width: '100%' }}
|
||||
onChange={handleChange}
|
||||
options={formattedData}
|
||||
getPopupContainer={
|
||||
parentContainerDiv
|
||||
? (): HTMLElement =>
|
||||
document.querySelector(parentContainerDiv) as HTMLElement
|
||||
: (triggerNode): HTMLElement => triggerNode.parentNode as HTMLElement
|
||||
}
|
||||
dropdownStyle={dropdownStyle}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Spin, Table } from 'antd';
|
||||
import { ColumnType } from 'antd/lib/table';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ENTITY_VERSION_V4 } from 'constants/app';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
@@ -114,6 +115,7 @@ function ExpandedRow({
|
||||
onClick: (): void => {
|
||||
setSelectedEndPointName(record.endpointName);
|
||||
setSelectedView(VIEW_TYPES.ENDPOINT_DETAILS);
|
||||
logEvent('API Monitoring: Endpoint name row clicked', {});
|
||||
},
|
||||
className: 'expanded-clickable-row',
|
||||
})}
|
||||
|
||||
@@ -3,6 +3,7 @@ import '../Explorer.styles.scss';
|
||||
import { LoadingOutlined } from '@ant-design/icons';
|
||||
import { Spin, Table, Typography } from 'antd';
|
||||
import axios from 'api';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import cx from 'classnames';
|
||||
@@ -130,6 +131,7 @@ function DomainList({
|
||||
(item) => item.key === record.key,
|
||||
);
|
||||
setSelectedDomainIndex(dataIndex);
|
||||
logEvent('API Monitoring: Domain name row clicked', {});
|
||||
}
|
||||
},
|
||||
className: 'expanded-clickable-row',
|
||||
|
||||
@@ -3,13 +3,14 @@ import './Explorer.styles.scss';
|
||||
import { FilterOutlined } from '@ant-design/icons';
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { Switch, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import cx from 'classnames';
|
||||
import QuickFilters from 'components/QuickFilters/QuickFilters';
|
||||
import { QuickFiltersSource } from 'components/QuickFilters/types';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
@@ -21,6 +22,11 @@ function Explorer(): JSX.Element {
|
||||
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
|
||||
useEffect(() => {
|
||||
// logEvent('API Monitoring: Landing page visited', {});
|
||||
console.log('uncaught API Monitoring: Landing page visited');
|
||||
}, []);
|
||||
|
||||
const { handleChangeQueryData } = useQueryOperations({
|
||||
index: 0,
|
||||
query: currentQuery.builder.queryData[0],
|
||||
@@ -64,7 +70,12 @@ function Explorer(): JSX.Element {
|
||||
style={{ marginLeft: 'auto' }}
|
||||
checked={showIP}
|
||||
onClick={(): void => {
|
||||
setShowIP((showIP) => !showIP);
|
||||
setShowIP((showIP): boolean => {
|
||||
logEvent('API Monitoring: Show IP addresses clicked', {
|
||||
showIP: !showIP,
|
||||
});
|
||||
return !showIP;
|
||||
});
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -128,7 +128,10 @@ export const columnsConfig: ColumnType<APIDomainsRowData>[] = [
|
||||
sorter: false,
|
||||
align: 'right',
|
||||
className: `column`,
|
||||
render: (lastUsed: number): string => getLastUsedRelativeTime(lastUsed),
|
||||
render: (lastUsed: number | string): string =>
|
||||
lastUsed === 'n/a' || lastUsed === '-'
|
||||
? '-'
|
||||
: getLastUsedRelativeTime(lastUsed as number),
|
||||
},
|
||||
{
|
||||
title: (
|
||||
@@ -217,9 +220,9 @@ interface APIMonitoringResponseRow {
|
||||
data: {
|
||||
endpoints: number;
|
||||
error_rate: number;
|
||||
lastseen: number;
|
||||
lastseen: number | string;
|
||||
[domainNameKey]: string;
|
||||
p99: number;
|
||||
p99: number | string;
|
||||
rps: number;
|
||||
};
|
||||
}
|
||||
@@ -232,12 +235,12 @@ interface EndPointsResponseRow {
|
||||
|
||||
export interface APIDomainsRowData {
|
||||
key: string;
|
||||
domainName: React.ReactNode;
|
||||
endpointCount: React.ReactNode;
|
||||
rate: React.ReactNode;
|
||||
errorRate: React.ReactNode;
|
||||
latency: React.ReactNode;
|
||||
lastUsed: React.ReactNode;
|
||||
domainName: string;
|
||||
endpointCount: number | string;
|
||||
rate: number | string;
|
||||
errorRate: number | string;
|
||||
latency: number | string;
|
||||
lastUsed: string;
|
||||
}
|
||||
|
||||
// Rename this to a proper name
|
||||
@@ -250,8 +253,16 @@ export const formatDataForTable = (
|
||||
endpointCount: domain.data.endpoints,
|
||||
rate: domain.data.rps,
|
||||
errorRate: domain.data.error_rate,
|
||||
latency: Math.round(domain.data.p99 / 1000000), // Convert from nanoseconds to milliseconds
|
||||
lastUsed: new Date(Math.floor(domain.data.lastseen / 1000000)).toISOString(), // Convert from nanoseconds to milliseconds
|
||||
latency:
|
||||
domain.data.p99 === 'n/a'
|
||||
? '-'
|
||||
: Math.round(Number(domain.data.p99) / 1000000), // Convert from nanoseconds to milliseconds
|
||||
lastUsed:
|
||||
domain.data.lastseen === 'n/a'
|
||||
? '-'
|
||||
: new Date(
|
||||
Math.floor(Number(domain.data.lastseen) / 1000000),
|
||||
).toISOString(), // Convert from nanoseconds to milliseconds
|
||||
}));
|
||||
|
||||
// Rename this to a proper name
|
||||
|
||||
@@ -128,6 +128,7 @@ const menuItems: SidebarItem[] = [
|
||||
key: ROUTES.API_MONITORING,
|
||||
label: 'API Monitoring',
|
||||
icon: <Binoculars size={16} />,
|
||||
isNew: true,
|
||||
},
|
||||
{
|
||||
key: ROUTES.LIST_ALL_ALERT,
|
||||
|
||||
@@ -5982,10 +5982,10 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_
|
||||
}
|
||||
|
||||
firstQueryLimit := req.Limit
|
||||
samplesOrder := false
|
||||
dataPointsOrder := false
|
||||
var orderByClauseFirstQuery string
|
||||
if req.OrderBy.ColumnName == "samples" {
|
||||
samplesOrder = true
|
||||
dataPointsOrder = true
|
||||
orderByClauseFirstQuery = fmt.Sprintf("ORDER BY timeseries %s", req.OrderBy.Order)
|
||||
if req.Limit < 50 {
|
||||
firstQueryLimit = 50
|
||||
@@ -5995,33 +5995,30 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_
|
||||
}
|
||||
|
||||
// Determine which tables to use
|
||||
start, end, tsTable, localTsTable := utils.WhichTSTableToUse(req.Start, req.End)
|
||||
sampleTable, countExp := utils.WhichSampleTableToUse(req.Start, req.End)
|
||||
start, end, tsTable, localTsTable := utils.WhichTSTableToUse(req.Start, req.EndD)
|
||||
sampleTable, countExp := utils.WhichSampleTableToUse(req.Start, req.EndD)
|
||||
|
||||
metricsQuery := fmt.Sprintf(
|
||||
`SELECT
|
||||
metric_name,
|
||||
ANY_VALUE(description) AS description,
|
||||
ANY_VALUE(type) AS metric_type,
|
||||
ANY_VALUE(unit) AS metric_unit,
|
||||
uniq(fingerprint) AS timeseries,
|
||||
t.metric_name AS metric_name,
|
||||
ANY_VALUE(t.description) AS description,
|
||||
ANY_VALUE(t.type) AS metric_type,
|
||||
ANY_VALUE(t.unit) AS metric_unit,
|
||||
uniq(t.fingerprint) AS timeseries,
|
||||
uniq(metric_name) OVER() AS total
|
||||
FROM %s.%s
|
||||
FROM %s.%s AS t
|
||||
WHERE unix_milli BETWEEN ? AND ?
|
||||
AND NOT startsWith(metric_name, 'signoz_')
|
||||
AND __normalized = true
|
||||
%s
|
||||
GROUP BY metric_name
|
||||
AND NOT startsWith(metric_name, 'signoz_')
|
||||
AND __normalized = true
|
||||
%s
|
||||
GROUP BY t.metric_name
|
||||
%s
|
||||
LIMIT %d OFFSET %d;`,
|
||||
signozMetricDBName, tsTable, whereClause, orderByClauseFirstQuery, firstQueryLimit, req.Offset)
|
||||
|
||||
args = append(args, start, end)
|
||||
valueCtx := context.WithValue(ctx, "clickhouse_max_threads", constants.MetricsExplorerClickhouseThreads)
|
||||
begin := time.Now()
|
||||
rows, err := r.db.Query(valueCtx, metricsQuery, args...)
|
||||
duration := time.Since(begin)
|
||||
zap.L().Info("Time taken to execute metrics query to fetch metrics with high time series", zap.String("query", metricsQuery), zap.Any("args", args), zap.Duration("duration", duration))
|
||||
if err != nil {
|
||||
zap.L().Error("Error executing metrics query", zap.Error(err))
|
||||
return &metrics_explorer.SummaryListMetricsResponse{}, &model.ApiError{Typ: "ClickHouseError", Err: err}
|
||||
@@ -6052,14 +6049,12 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_
|
||||
// Build a comma-separated list of quoted metric names.
|
||||
metricsList := "'" + strings.Join(metricNames, "', '") + "'"
|
||||
// If samples are being sorted by datapoints, update the ORDER clause.
|
||||
if samplesOrder {
|
||||
if dataPointsOrder {
|
||||
orderByClauseFirstQuery = fmt.Sprintf("ORDER BY s.samples %s", req.OrderBy.Order)
|
||||
} else {
|
||||
orderByClauseFirstQuery = ""
|
||||
}
|
||||
|
||||
// reset the args for main query
|
||||
args = make([]interface{}, 0)
|
||||
var sampleQuery string
|
||||
var sb strings.Builder
|
||||
|
||||
@@ -6067,19 +6062,20 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_
|
||||
sb.WriteString(fmt.Sprintf(
|
||||
`SELECT
|
||||
s.samples,
|
||||
s.metric_name
|
||||
s.metric_name,
|
||||
s.lastReceived
|
||||
FROM (
|
||||
SELECT
|
||||
SELECT
|
||||
dm.metric_name,
|
||||
%s AS samples
|
||||
%s AS samples,
|
||||
MAX(dm.unix_milli) AS lastReceived
|
||||
FROM %s.%s AS dm
|
||||
WHERE dm.metric_name IN (%s)
|
||||
AND dm.fingerprint IN (
|
||||
SELECT fingerprint
|
||||
FROM %s.%s
|
||||
WHERE metric_name IN (%s)
|
||||
AND __normalized = true
|
||||
AND unix_milli BETWEEN ? AND ?
|
||||
AND __normalized = true
|
||||
%s
|
||||
GROUP BY fingerprint
|
||||
)
|
||||
@@ -6093,27 +6089,26 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_
|
||||
metricsList,
|
||||
whereClause,
|
||||
))
|
||||
args = append(args, start, end)
|
||||
args = append(args, req.Start, req.End)
|
||||
} else {
|
||||
// If no filters, it is a simpler query.
|
||||
sb.WriteString(fmt.Sprintf(
|
||||
`SELECT
|
||||
s.samples,
|
||||
s.metric_name
|
||||
FROM (
|
||||
SELECT
|
||||
metric_name,
|
||||
%s AS samples
|
||||
FROM %s.%s
|
||||
WHERE metric_name IN (%s)
|
||||
AND unix_milli BETWEEN ? AND ?
|
||||
GROUP BY metric_name
|
||||
) AS s `,
|
||||
s.samples,
|
||||
s.metric_name,
|
||||
s.lastReceived
|
||||
FROM (
|
||||
SELECT
|
||||
metric_name,
|
||||
%s AS samples,
|
||||
MAX(unix_milli) AS lastReceived
|
||||
FROM %s.%s
|
||||
WHERE metric_name IN (%s)
|
||||
AND unix_milli BETWEEN ? AND ?
|
||||
GROUP BY metric_name
|
||||
) AS s `,
|
||||
countExp,
|
||||
signozMetricDBName, sampleTable,
|
||||
metricsList))
|
||||
args = append(args, req.Start, req.End)
|
||||
}
|
||||
|
||||
// Append ORDER BY clause if provided.
|
||||
@@ -6125,10 +6120,9 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_
|
||||
sb.WriteString(fmt.Sprintf("LIMIT %d;", req.Limit))
|
||||
sampleQuery = sb.String()
|
||||
|
||||
begin = time.Now()
|
||||
// Append the time boundaries for sampleQuery.
|
||||
args = append(args, start, end)
|
||||
rows, err = r.db.Query(valueCtx, sampleQuery, args...)
|
||||
duration = time.Since(begin)
|
||||
zap.L().Info("Time taken to execute samples query", zap.String("query", sampleQuery), zap.Any("args", args), zap.Duration("duration", duration))
|
||||
if err != nil {
|
||||
zap.L().Error("Error executing samples query", zap.Error(err))
|
||||
return &response, &model.ApiError{Typ: "ClickHouseError", Err: err}
|
||||
@@ -6136,15 +6130,18 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_
|
||||
defer rows.Close()
|
||||
|
||||
samplesMap := make(map[string]uint64)
|
||||
lastReceivedMap := make(map[string]int64)
|
||||
|
||||
for rows.Next() {
|
||||
var samples uint64
|
||||
var metricName string
|
||||
if err := rows.Scan(&samples, &metricName); err != nil {
|
||||
var lastReceived int64
|
||||
if err := rows.Scan(&samples, &metricName, &lastReceived); err != nil {
|
||||
zap.L().Error("Error scanning sample row", zap.Error(err))
|
||||
return &response, &model.ApiError{Typ: "ClickHouseError", Err: err}
|
||||
}
|
||||
samplesMap[metricName] = samples
|
||||
lastReceivedMap[metricName] = lastReceived
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
zap.L().Error("Error iterating over sample rows", zap.Error(err))
|
||||
@@ -6170,13 +6167,16 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, req *metrics_
|
||||
}
|
||||
if samples, exists := samplesMap[response.Metrics[i].MetricName]; exists {
|
||||
response.Metrics[i].Samples = samples
|
||||
if lastReceived, exists := lastReceivedMap[response.Metrics[i].MetricName]; exists {
|
||||
response.Metrics[i].LastReceived = lastReceived
|
||||
}
|
||||
filteredMetrics = append(filteredMetrics, response.Metrics[i])
|
||||
}
|
||||
}
|
||||
response.Metrics = filteredMetrics
|
||||
|
||||
// If ordering by samples, sort in-memory.
|
||||
if samplesOrder {
|
||||
if dataPointsOrder {
|
||||
sort.Slice(response.Metrics, func(i, j int) bool {
|
||||
return response.Metrics[i].Samples > response.Metrics[j].Samples
|
||||
})
|
||||
@@ -6194,7 +6194,7 @@ func (r *ClickHouseReader) GetMetricsTimeSeriesPercentage(ctx context.Context, r
|
||||
if len(conditions) > 0 {
|
||||
whereClause = "AND " + strings.Join(conditions, " AND ")
|
||||
}
|
||||
start, end, tsTable, _ := utils.WhichTSTableToUse(req.Start, req.End)
|
||||
start, end, tsTable, _ := utils.WhichTSTableToUse(req.Start, req.EndD)
|
||||
|
||||
// Construct the query without backticks
|
||||
query := fmt.Sprintf(`
|
||||
@@ -6204,17 +6204,17 @@ func (r *ClickHouseReader) GetMetricsTimeSeriesPercentage(ctx context.Context, r
|
||||
(total_value * 100.0 / total_time_series) AS percentage
|
||||
FROM (
|
||||
SELECT
|
||||
metric_name,
|
||||
uniq(fingerprint) AS total_value,
|
||||
(SELECT uniq(fingerprint)
|
||||
FROM %s.%s
|
||||
WHERE unix_milli BETWEEN ? AND ? AND __normalized = true) AS total_time_series
|
||||
FROM %s.%s
|
||||
WHERE unix_milli BETWEEN ? AND ? AND NOT startsWith(metric_name, 'signoz_') AND __normalized = true %s
|
||||
GROUP BY metric_name
|
||||
)
|
||||
ORDER BY percentage DESC
|
||||
LIMIT %d;`,
|
||||
metric_name,
|
||||
uniq(fingerprint) AS total_value,
|
||||
(SELECT uniq(fingerprint)
|
||||
FROM %s.%s
|
||||
WHERE unix_milli BETWEEN ? AND ? AND __normalized = true) AS total_time_series
|
||||
FROM %s.%s
|
||||
WHERE unix_milli BETWEEN ? AND ? AND NOT startsWith(metric_name, 'signoz_') AND __normalized = true %s
|
||||
GROUP BY metric_name
|
||||
)
|
||||
ORDER BY percentage DESC
|
||||
LIMIT %d;`,
|
||||
signozMetricDBName,
|
||||
tsTable,
|
||||
signozMetricDBName,
|
||||
@@ -6224,29 +6224,26 @@ func (r *ClickHouseReader) GetMetricsTimeSeriesPercentage(ctx context.Context, r
|
||||
)
|
||||
|
||||
args = append(args,
|
||||
start, end, // For total_time_series subquery
|
||||
start, end, // For total_cardinality subquery
|
||||
start, end, // For main query
|
||||
)
|
||||
|
||||
valueCtx := context.WithValue(ctx, "clickhouse_max_threads", constants.MetricsExplorerClickhouseThreads)
|
||||
begin := time.Now()
|
||||
rows, err := r.db.Query(valueCtx, query, args...)
|
||||
duration := time.Since(begin)
|
||||
zap.L().Info("Time taken to execute time series percentage query", zap.String("query", query), zap.Any("args", args), zap.Duration("duration", duration))
|
||||
if err != nil {
|
||||
zap.L().Error("Error executing time series percentage query", zap.Error(err), zap.String("query", query))
|
||||
zap.L().Error("Error executing cardinality query", zap.Error(err), zap.String("query", query))
|
||||
return nil, &model.ApiError{Typ: "ClickHouseError", Err: err}
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var treeMap []metrics_explorer.TreeMapResponseItem
|
||||
var heatmap []metrics_explorer.TreeMapResponseItem
|
||||
for rows.Next() {
|
||||
var item metrics_explorer.TreeMapResponseItem
|
||||
if err := rows.Scan(&item.MetricName, &item.TotalValue, &item.Percentage); err != nil {
|
||||
zap.L().Error("Error scanning row", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: "ClickHouseError", Err: err}
|
||||
}
|
||||
treeMap = append(treeMap, item)
|
||||
heatmap = append(heatmap, item)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
@@ -6254,7 +6251,7 @@ func (r *ClickHouseReader) GetMetricsTimeSeriesPercentage(ctx context.Context, r
|
||||
return nil, &model.ApiError{Typ: "ClickHouseError", Err: err}
|
||||
}
|
||||
|
||||
return &treeMap, nil
|
||||
return &heatmap, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetMetricsSamplesPercentage(ctx context.Context, req *metrics_explorer.TreeMapMetricsRequest) (*[]metrics_explorer.TreeMapResponseItem, *model.ApiError) {
|
||||
@@ -6267,30 +6264,27 @@ func (r *ClickHouseReader) GetMetricsSamplesPercentage(ctx context.Context, req
|
||||
}
|
||||
|
||||
// Determine time range and tables to use
|
||||
start, end, tsTable, localTsTable := utils.WhichTSTableToUse(req.Start, req.End)
|
||||
sampleTable, countExp := utils.WhichSampleTableToUse(req.Start, req.End)
|
||||
start, end, tsTable, localTsTable := utils.WhichTSTableToUse(req.Start, req.EndD)
|
||||
sampleTable, countExp := utils.WhichSampleTableToUse(req.Start, req.EndD)
|
||||
|
||||
queryLimit := 50 + req.Limit
|
||||
metricsQuery := fmt.Sprintf(`
|
||||
SELECT
|
||||
metric_name,
|
||||
uniq(fingerprint) AS timeSeries
|
||||
FROM %s.%s
|
||||
WHERE NOT startsWith(metric_name, 'signoz_')
|
||||
AND __normalized = true
|
||||
AND unix_milli BETWEEN ? AND ?
|
||||
%s
|
||||
GROUP BY metric_name
|
||||
metricsQuery := fmt.Sprintf(
|
||||
`SELECT
|
||||
ts.metric_name AS metric_name,
|
||||
uniq(ts.fingerprint) AS timeSeries
|
||||
FROM %s.%s AS ts
|
||||
WHERE NOT startsWith(ts.metric_name, 'signoz_')
|
||||
AND __normalized = true
|
||||
AND unix_milli BETWEEN ? AND ?
|
||||
%s
|
||||
GROUP BY ts.metric_name
|
||||
ORDER BY timeSeries DESC
|
||||
LIMIT %d;`,
|
||||
signozMetricDBName, tsTable, whereClause, queryLimit,
|
||||
)
|
||||
|
||||
valueCtx := context.WithValue(ctx, "clickhouse_max_threads", constants.MetricsExplorerClickhouseThreads)
|
||||
begin := time.Now()
|
||||
rows, err := r.db.Query(valueCtx, metricsQuery, start, end)
|
||||
duration := time.Since(begin)
|
||||
zap.L().Info("Time taken to execute metrics query to reduce search space", zap.String("query", metricsQuery), zap.Any("start", start), zap.Any("end", end), zap.Duration("duration", duration))
|
||||
if err != nil {
|
||||
zap.L().Error("Error executing metrics query", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: "ClickHouseError", Err: err}
|
||||
@@ -6351,13 +6345,12 @@ func (r *ClickHouseReader) GetMetricsSamplesPercentage(ctx context.Context, req
|
||||
if whereClause != "" {
|
||||
sb.WriteString(fmt.Sprintf(
|
||||
` AND dm.fingerprint IN (
|
||||
SELECT fingerprint
|
||||
FROM %s.%s
|
||||
WHERE metric_name IN (%s)
|
||||
AND unix_milli BETWEEN ? AND ?
|
||||
AND __normalized = true
|
||||
%s
|
||||
GROUP BY fingerprint
|
||||
SELECT ts.fingerprint
|
||||
FROM %s.%s AS ts
|
||||
WHERE ts.metric_name IN (%s)
|
||||
AND __normalized = true
|
||||
%s
|
||||
GROUP BY ts.fingerprint
|
||||
)`,
|
||||
signozMetricDBName, localTsTable, metricsList, whereClause,
|
||||
))
|
||||
@@ -6377,18 +6370,10 @@ func (r *ClickHouseReader) GetMetricsSamplesPercentage(ctx context.Context, req
|
||||
sampleQuery := sb.String()
|
||||
|
||||
// Add start and end time to args (only for sample table)
|
||||
args = append(args,
|
||||
req.Start, req.End, // For total_samples subquery
|
||||
req.Start, req.End, // For main query
|
||||
start, end, // For where clause time series fingerprint query
|
||||
req.Limit,
|
||||
)
|
||||
args = append(args, start, end, start, end, req.Limit)
|
||||
|
||||
begin = time.Now()
|
||||
// Execute the sample percentage query
|
||||
rows, err = r.db.Query(valueCtx, sampleQuery, args...)
|
||||
duration = time.Since(begin)
|
||||
zap.L().Info("Time taken to execute samples percentage query", zap.String("query", sampleQuery), zap.Any("args", args), zap.Duration("duration", duration))
|
||||
if err != nil {
|
||||
zap.L().Error("Error executing samples query", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: "ClickHouseError", Err: err}
|
||||
|
||||
@@ -225,21 +225,21 @@ func (receiver *SummaryService) GetMetricsTreemap(ctx context.Context, params *m
|
||||
var response metrics_explorer.TreeMap
|
||||
switch params.Treemap {
|
||||
case metrics_explorer.TimeSeriesTeeMap:
|
||||
ts, apiError := receiver.reader.GetMetricsTimeSeriesPercentage(ctx, params)
|
||||
cardinality, apiError := receiver.reader.GetMetricsTimeSeriesPercentage(ctx, params)
|
||||
if apiError != nil {
|
||||
return nil, apiError
|
||||
}
|
||||
if ts != nil {
|
||||
response.TimeSeries = *ts
|
||||
if cardinality != nil {
|
||||
response.TimeSeries = *cardinality
|
||||
}
|
||||
return &response, nil
|
||||
case metrics_explorer.SamplesTreeMap:
|
||||
samples, apiError := receiver.reader.GetMetricsSamplesPercentage(ctx, params)
|
||||
dataPoints, apiError := receiver.reader.GetMetricsSamplesPercentage(ctx, params)
|
||||
if apiError != nil {
|
||||
return nil, apiError
|
||||
}
|
||||
if samples != nil {
|
||||
response.Samples = *samples
|
||||
if dataPoints != nil {
|
||||
response.Samples = *dataPoints
|
||||
}
|
||||
return &response, nil
|
||||
default:
|
||||
|
||||
@@ -89,17 +89,18 @@ func (aH *APIHandler) GetTreeMap(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
params, apiError := explorer.ParseTreeMapMetricsParams(r)
|
||||
if apiError != nil {
|
||||
zap.L().Error("error parsing tree map metric params", zap.Error(apiError.Err))
|
||||
zap.L().Error("error parsing heatmap metric params", zap.Error(apiError.Err))
|
||||
RespondError(w, apiError, nil)
|
||||
return
|
||||
}
|
||||
result, apiError := aH.SummaryService.GetMetricsTreemap(ctx, params)
|
||||
if apiError != nil {
|
||||
zap.L().Error("error getting tree map data", zap.Error(apiError.Err))
|
||||
zap.L().Error("error getting heatmap data", zap.Error(apiError.Err))
|
||||
RespondError(w, apiError, nil)
|
||||
return
|
||||
}
|
||||
aH.Respond(w, result)
|
||||
|
||||
}
|
||||
|
||||
func (aH *APIHandler) GetRelatedMetrics(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
@@ -20,7 +20,6 @@ import (
|
||||
smtpservice "github.com/SigNoz/signoz/pkg/query-service/utils/smtpService"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
)
|
||||
@@ -88,18 +87,12 @@ func Invite(ctx context.Context, req *model.InviteRequest) (*model.InviteRespons
|
||||
}
|
||||
|
||||
inv := &types.Invite{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
Name: req.Name,
|
||||
Email: req.Email,
|
||||
Token: token,
|
||||
Role: req.Role,
|
||||
OrgID: au.OrgID,
|
||||
Name: req.Name,
|
||||
Email: req.Email,
|
||||
Token: token,
|
||||
CreatedAt: time.Now(),
|
||||
Role: req.Role,
|
||||
OrgID: au.OrgID,
|
||||
}
|
||||
|
||||
if err := dao.DB().CreateInviteEntry(ctx, inv); err != nil {
|
||||
@@ -195,18 +188,12 @@ func inviteUser(ctx context.Context, req *model.InviteRequest, au *types.Gettabl
|
||||
}
|
||||
|
||||
inv := &types.Invite{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
Name: req.Name,
|
||||
Email: req.Email,
|
||||
Token: token,
|
||||
Role: req.Role,
|
||||
OrgID: au.OrgID,
|
||||
Name: req.Name,
|
||||
Email: req.Email,
|
||||
Token: token,
|
||||
CreatedAt: time.Now(),
|
||||
Role: req.Role,
|
||||
OrgID: au.OrgID,
|
||||
}
|
||||
|
||||
if err := dao.DB().CreateInviteEntry(ctx, inv); err != nil {
|
||||
|
||||
@@ -9,7 +9,7 @@ type SummaryListMetricsRequest struct {
|
||||
Limit int `json:"limit"`
|
||||
OrderBy v3.OrderBy `json:"orderBy"`
|
||||
Start int64 `json:"start"`
|
||||
End int64 `json:"end"`
|
||||
EndD int64 `json:"end"`
|
||||
Filters v3.FilterSet `json:"filters"`
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ type TreeMapMetricsRequest struct {
|
||||
Limit int `json:"limit"`
|
||||
Treemap TreeMapType `json:"treemap"`
|
||||
Start int64 `json:"start"`
|
||||
End int64 `json:"end"`
|
||||
EndD int64 `json:"end"`
|
||||
Filters v3.FilterSet `json:"filters"`
|
||||
}
|
||||
|
||||
|
||||
@@ -63,7 +63,6 @@ func NewSQLMigrationProviderFactories(sqlstore sqlstore.SQLStore) factory.NamedM
|
||||
sqlmigration.NewUpdatePatAndOrgDomainsFactory(sqlstore),
|
||||
sqlmigration.NewUpdatePipelines(sqlstore),
|
||||
sqlmigration.NewDropLicensesSitesFactory(sqlstore),
|
||||
sqlmigration.NewUpdateInvitesFactory(sqlstore),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,139 +0,0 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type updateInvites struct {
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
type existingInvite struct {
|
||||
bun.BaseModel `bun:"table:invites"`
|
||||
|
||||
OrgID string `bun:"org_id,type:text,notnull" json:"orgId"`
|
||||
ID int `bun:"id,pk,autoincrement" json:"id"`
|
||||
Name string `bun:"name,type:text,notnull" json:"name"`
|
||||
Email string `bun:"email,type:text,notnull,unique" json:"email"`
|
||||
Token string `bun:"token,type:text,notnull" json:"token"`
|
||||
CreatedAt time.Time `bun:"created_at,notnull" json:"createdAt"`
|
||||
Role string `bun:"role,type:text,notnull" json:"role"`
|
||||
}
|
||||
|
||||
type newInvite struct {
|
||||
bun.BaseModel `bun:"table:user_invite"`
|
||||
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Name string `bun:"name,type:text,notnull" json:"name"`
|
||||
Email string `bun:"email,type:text,notnull,unique" json:"email"`
|
||||
Token string `bun:"token,type:text,notnull" json:"token"`
|
||||
Role string `bun:"role,type:text,notnull" json:"role"`
|
||||
OrgID string `bun:"org_id,type:text,notnull" json:"orgId"`
|
||||
}
|
||||
|
||||
func NewUpdateInvitesFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.
|
||||
NewProviderFactory(
|
||||
factory.MustNewName("update_invites"),
|
||||
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newUpdateInvites(ctx, ps, c, sqlstore)
|
||||
})
|
||||
}
|
||||
|
||||
func newUpdateInvites(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) {
|
||||
return &updateInvites{store: store}, nil
|
||||
}
|
||||
|
||||
func (migration *updateInvites) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.
|
||||
Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updateInvites) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.
|
||||
BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer tx.Rollback()
|
||||
|
||||
err = migration.
|
||||
store.
|
||||
Dialect().
|
||||
RenameTableAndModifyModel(ctx, tx, new(existingInvite), new(newInvite), func(ctx context.Context) error {
|
||||
existingInvites := make([]*existingInvite, 0)
|
||||
err = tx.
|
||||
NewSelect().
|
||||
Model(&existingInvites).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if err != sql.ErrNoRows {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err == nil && len(existingInvites) > 0 {
|
||||
newInvites := migration.
|
||||
CopyOldInvitesToNewInvites(existingInvites)
|
||||
_, err = tx.
|
||||
NewInsert().
|
||||
Model(&newInvites).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updateInvites) Down(context.Context, *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updateInvites) CopyOldInvitesToNewInvites(existingInvites []*existingInvite) []*newInvite {
|
||||
newInvites := make([]*newInvite, 0)
|
||||
for _, invite := range existingInvites {
|
||||
newInvites = append(newInvites, &newInvite{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: invite.CreatedAt,
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
Name: invite.Name,
|
||||
Email: invite.Email,
|
||||
Token: invite.Token,
|
||||
Role: invite.Role,
|
||||
OrgID: invite.OrgID,
|
||||
})
|
||||
}
|
||||
|
||||
return newInvites
|
||||
}
|
||||
@@ -2,7 +2,6 @@ package postgressqlstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"reflect"
|
||||
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
@@ -152,60 +151,3 @@ func (dialect *dialect) RenameColumn(ctx context.Context, bun bun.IDB, table str
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) TableExists(ctx context.Context, bun bun.IDB, table interface{}) (bool, error) {
|
||||
|
||||
count := 0
|
||||
err := bun.
|
||||
NewSelect().
|
||||
ColumnExpr("count(*)").
|
||||
Table("pg_catalog.pg_tables").
|
||||
Where("tablename = ?", bun.Dialect().Tables().Get(reflect.TypeOf(table)).Name).
|
||||
Scan(ctx, &count)
|
||||
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if count == 0 {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, cb func(context.Context) error) error {
|
||||
exists, err := dialect.TableExists(ctx, bun, newModel)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if exists {
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(newModel).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = cb(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewDropTable().
|
||||
IfExists().
|
||||
Model(oldModel).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ package sqlitesqlstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"reflect"
|
||||
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
@@ -142,62 +141,3 @@ func (dialect *dialect) RenameColumn(ctx context.Context, bun bun.IDB, table str
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) TableExists(ctx context.Context, bun bun.IDB, table interface{}) (bool, error) {
|
||||
|
||||
count := 0
|
||||
err := bun.
|
||||
NewSelect().
|
||||
ColumnExpr("count(*)").
|
||||
Table("sqlite_master").
|
||||
Where("type = ?", "table").
|
||||
Where("name = ?", bun.Dialect().Tables().Get(reflect.TypeOf(table)).Name).
|
||||
Scan(ctx, &count)
|
||||
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if count == 0 {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, cb func(context.Context) error) error {
|
||||
exists, err := dialect.TableExists(ctx, bun, newModel)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if exists {
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(newModel).
|
||||
ForeignKey(`("org_id") REFERENCES "organizations" ("id")`).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = cb(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewDropTable().
|
||||
IfExists().
|
||||
Model(oldModel).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -42,5 +42,4 @@ type SQLDialect interface {
|
||||
GetColumnType(context.Context, bun.IDB, string, string) (string, error)
|
||||
ColumnExists(context.Context, bun.IDB, string, string) (bool, error)
|
||||
RenameColumn(context.Context, bun.IDB, string, string, string) (bool, error)
|
||||
RenameTableAndModifyModel(context.Context, bun.IDB, interface{}, interface{}, func(context.Context) error) error
|
||||
}
|
||||
|
||||
@@ -28,7 +28,3 @@ func (dialect *dialect) ColumnExists(ctx context.Context, bun bun.IDB, table str
|
||||
func (dialect *dialect) RenameColumn(ctx context.Context, bun bun.IDB, table string, oldColumnName string, newColumnName string) (bool, error) {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, cb func(context.Context) error) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -1,19 +1,21 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type Invite struct {
|
||||
bun.BaseModel `bun:"table:user_invite"`
|
||||
bun.BaseModel `bun:"table:invites"`
|
||||
|
||||
Identifiable
|
||||
TimeAuditable
|
||||
OrgID string `bun:"org_id,type:text,notnull" json:"orgId"`
|
||||
Name string `bun:"name,type:text,notnull" json:"name"`
|
||||
Email string `bun:"email,type:text,notnull,unique" json:"email"`
|
||||
Token string `bun:"token,type:text,notnull" json:"token"`
|
||||
Role string `bun:"role,type:text,notnull" json:"role"`
|
||||
OrgID string `bun:"org_id,type:text,notnull" json:"orgId"`
|
||||
ID int `bun:"id,pk,autoincrement" json:"id"`
|
||||
Name string `bun:"name,type:text,notnull" json:"name"`
|
||||
Email string `bun:"email,type:text,notnull,unique" json:"email"`
|
||||
Token string `bun:"token,type:text,notnull" json:"token"`
|
||||
CreatedAt time.Time `bun:"created_at,notnull" json:"createdAt"`
|
||||
Role string `bun:"role,type:text,notnull" json:"role"`
|
||||
}
|
||||
|
||||
type Group struct {
|
||||
|
||||
Reference in New Issue
Block a user