Compare commits
89 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a118c3c8a1 | ||
|
|
9baf873521 | ||
|
|
12911db945 | ||
|
|
bd149f4364 | ||
|
|
c69b9ae62a | ||
|
|
bc3f16d3de | ||
|
|
61bbd5551b | ||
|
|
286577d13d | ||
|
|
dbd0701779 | ||
|
|
0c7a5ce3c7 | ||
|
|
a92381df1b | ||
|
|
eb1509d385 | ||
|
|
34e33af290 | ||
|
|
c0004cd51c | ||
|
|
10bf545c65 | ||
|
|
7d2bcf11c3 | ||
|
|
3ff7ace54e | ||
|
|
abdfe6ccc5 | ||
|
|
aa398263fb | ||
|
|
ace02486e0 | ||
|
|
b318ba6b2f | ||
|
|
de4be411f4 | ||
|
|
362f264bae | ||
|
|
e94d984cdb | ||
|
|
bf0267d579 | ||
|
|
e4b3ea1f34 | ||
|
|
4ee6d4b546 | ||
|
|
a7836c26d0 | ||
|
|
15eb5364d5 | ||
|
|
47bf512a33 | ||
|
|
2776bfa311 | ||
|
|
8c7ac88f84 | ||
|
|
a08ad9e2cf | ||
|
|
d312398f18 | ||
|
|
d891c3e118 | ||
|
|
1e7b68203f | ||
|
|
3d152e23cd | ||
|
|
47cf1eebf7 | ||
|
|
6c84882dca | ||
|
|
a4424eca0e | ||
|
|
77992a59bc | ||
|
|
3cbb071138 | ||
|
|
9cd6e5cabe | ||
|
|
13bec63fca | ||
|
|
f2164a1a86 | ||
|
|
8a4f58e77b | ||
|
|
51a24673b9 | ||
|
|
c94feb9af2 | ||
|
|
a8668d19a8 | ||
|
|
a8e81c9666 | ||
|
|
2eed75560d | ||
|
|
8d6fb7f897 | ||
|
|
4cd0088029 | ||
|
|
872c8adbbb | ||
|
|
bba7344bae | ||
|
|
51fe634566 | ||
|
|
af58d085a0 | ||
|
|
5b9b344816 | ||
|
|
1caa07e0af | ||
|
|
ae23cec8d6 | ||
|
|
5afc04f205 | ||
|
|
6aed23ce66 | ||
|
|
007e2e7b78 | ||
|
|
762a3cdfcd | ||
|
|
308f8f8fed | ||
|
|
588bf2b93a | ||
|
|
fff38b58d2 | ||
|
|
cbd2036613 | ||
|
|
7ef72d4147 | ||
|
|
07af5c843a | ||
|
|
e524ce5743 | ||
|
|
24e1346521 | ||
|
|
62e77613a6 | ||
|
|
56c0265660 | ||
|
|
91b1d08dff | ||
|
|
239c2cb859 | ||
|
|
4173258d0a | ||
|
|
1cbbdd8265 | ||
|
|
433f3f3d94 | ||
|
|
fed23a6ab9 | ||
|
|
b979c24cb4 | ||
|
|
e4b41b1a27 | ||
|
|
44495b7669 | ||
|
|
cc3133b2d6 | ||
|
|
9c83319143 | ||
|
|
571c08c58e | ||
|
|
092cfc7804 | ||
|
|
245050aac2 | ||
|
|
606fa6591d |
33
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
33
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Create a report to help us improve
|
||||||
|
title: ''
|
||||||
|
labels: ''
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Bug description
|
||||||
|
|
||||||
|
*Please describe.*
|
||||||
|
*If this affects the front-end, screenshots would be of great help.*
|
||||||
|
|
||||||
|
## Expected behavior
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## How to reproduce
|
||||||
|
|
||||||
|
1.
|
||||||
|
2.
|
||||||
|
3.
|
||||||
|
|
||||||
|
## Version information
|
||||||
|
* **Signoz version**:
|
||||||
|
* **Browser version**:
|
||||||
|
* **Your OS and version**:
|
||||||
|
|
||||||
|
## Additional context
|
||||||
|
|
||||||
|
|
||||||
|
#### *Thank you* for your bug report – we love squashing them!
|
||||||
27
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
27
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
---
|
||||||
|
name: Feature request
|
||||||
|
about: Suggest an idea for this project
|
||||||
|
title: ''
|
||||||
|
labels: ''
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Is your feature request related to a problem?
|
||||||
|
|
||||||
|
*Please describe.*
|
||||||
|
|
||||||
|
## Describe the solution you'd like
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Describe alternatives you've considered
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Additional context
|
||||||
|
Add any other context or screenshots about the feature request here.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### *Thank you* for your feature request – we love each and every one!
|
||||||
33
.github/ISSUE_TEMPLATE/performance-issue-report.md
vendored
Normal file
33
.github/ISSUE_TEMPLATE/performance-issue-report.md
vendored
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
---
|
||||||
|
name: Performance issue report
|
||||||
|
about: Long response times, high resource usage? Ensuring that SigNoz is scalable
|
||||||
|
is our top priority
|
||||||
|
title: ''
|
||||||
|
labels: ''
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## In what situation are you experiencing subpar performance?
|
||||||
|
|
||||||
|
*Please describe.*
|
||||||
|
|
||||||
|
## How to reproduce
|
||||||
|
|
||||||
|
1.
|
||||||
|
2.
|
||||||
|
3.
|
||||||
|
|
||||||
|
## Your Environment
|
||||||
|
|
||||||
|
- [ ] Linux
|
||||||
|
- [ ] Mac
|
||||||
|
- [ ] Windows
|
||||||
|
|
||||||
|
Please provide details of OS version etc.
|
||||||
|
|
||||||
|
## Additional context
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### *Thank you* for your performance issue report – we want SigNoz to be blazing fast!
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
# How to Contribute
|
# How to Contribute
|
||||||
|
|
||||||
You can always reach out to ankit@signoz.io to understand more about the repo and product. We are very responsive over email and [slack](https://signoz-community.slack.com/join/shared_invite/zt-kj26gm1u-Xe3CYxCu0bGXCrCqKipjOA#/).
|
You can always reach out to ankit@signoz.io to understand more about the repo and product. We are very responsive over email and [slack](https://join.slack.com/t/signoz-community/shared_invite/zt-lrjknbbp-J_mI13rlw8pGF4EWBnorJA).
|
||||||
|
|
||||||
- You can create a PR (Pull Request)
|
- You can create a PR (Pull Request)
|
||||||
- If you find any bugs, please create an issue
|
- If you find any bugs, please create an issue
|
||||||
|
|||||||
@@ -8,13 +8,13 @@
|
|||||||
<img alt="License" src="https://img.shields.io/badge/license-MIT-brightgreen"> </a>
|
<img alt="License" src="https://img.shields.io/badge/license-MIT-brightgreen"> </a>
|
||||||
<img alt="Downloads" src="https://img.shields.io/docker/pulls/signoz/frontend?label=Downloads"> </a>
|
<img alt="Downloads" src="https://img.shields.io/docker/pulls/signoz/frontend?label=Downloads"> </a>
|
||||||
<img alt="GitHub issues" src="https://img.shields.io/github/issues/signoz/signoz"> </a>
|
<img alt="GitHub issues" src="https://img.shields.io/github/issues/signoz/signoz"> </a>
|
||||||
<a href="https://twitter.com/intent/tweet?text=Monitor%20your%20applications%20and%20troubleshoot%20problems%20with%20SigNoz,%20an%20open-source%20alternative%20to%20DataDog,%20NewRelic.&url=https://signoz.io/&via=SigNoz_io&hashtags=opensource,signoz,observability">
|
<a href="https://twitter.com/intent/tweet?text=Monitor%20your%20applications%20and%20troubleshoot%20problems%20with%20SigNoz,%20an%20open-source%20alternative%20to%20DataDog,%20NewRelic.&url=https://signoz.io/&via=SigNozHQ&hashtags=opensource,signoz,observability">
|
||||||
<img alt="tweet" src="https://img.shields.io/twitter/url/http/shields.io.svg?style=social"> </a>
|
<img alt="tweet" src="https://img.shields.io/twitter/url/http/shields.io.svg?style=social"> </a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
##
|
##
|
||||||
|
|
||||||
SigNoz helps developer monitor applications and troubleshoot problems in their deployed applications. SigNoz uses distributed tracing to gain visibility into your software stack.
|
SigNoz helps developers monitor applications and troubleshoot problems in their deployed applications. SigNoz uses distributed tracing to gain visibility into your software stack.
|
||||||
|
|
||||||
👉 You can see metrics like p99 latency, error rates for your services, external API calls and individual end points.
|
👉 You can see metrics like p99 latency, error rates for your services, external API calls and individual end points.
|
||||||
|
|
||||||
|
|||||||
517
deploy/docker/clickhouse-setup/clickhouse-config.xml
Normal file
517
deploy/docker/clickhouse-setup/clickhouse-config.xml
Normal file
@@ -0,0 +1,517 @@
|
|||||||
|
<?xml version="1.0"?>
|
||||||
|
<yandex>
|
||||||
|
<logger>
|
||||||
|
<level>trace</level>
|
||||||
|
<log>/var/log/clickhouse-server/clickhouse-server.log</log>
|
||||||
|
<errorlog>/var/log/clickhouse-server/clickhouse-server.err.log</errorlog>
|
||||||
|
<size>1000M</size>
|
||||||
|
<count>10</count>
|
||||||
|
</logger>
|
||||||
|
|
||||||
|
<http_port>8123</http_port>
|
||||||
|
<tcp_port>9000</tcp_port>
|
||||||
|
|
||||||
|
<!-- For HTTPS and SSL over native protocol. -->
|
||||||
|
<!--
|
||||||
|
<https_port>8443</https_port>
|
||||||
|
<tcp_ssl_port>9440</tcp_ssl_port>
|
||||||
|
-->
|
||||||
|
|
||||||
|
<!-- Used with https_port and tcp_ssl_port. Full ssl options list: https://github.com/yandex/ClickHouse/blob/master/contrib/libpoco/NetSSL_OpenSSL/include/Poco/Net/SSLManager.h#L71 -->
|
||||||
|
<openSSL>
|
||||||
|
<server> <!-- Used for https server AND secure tcp port -->
|
||||||
|
<!-- openssl req -subj "/CN=localhost" -new -newkey rsa:2048 -days 365 -nodes -x509 -keyout /etc/clickhouse-server/server.key -out /etc/clickhouse-server/server.crt -->
|
||||||
|
<certificateFile>/etc/clickhouse-server/server.crt</certificateFile>
|
||||||
|
<privateKeyFile>/etc/clickhouse-server/server.key</privateKeyFile>
|
||||||
|
<!-- openssl dhparam -out /etc/clickhouse-server/dhparam.pem 4096 -->
|
||||||
|
<dhParamsFile>/etc/clickhouse-server/dhparam.pem</dhParamsFile>
|
||||||
|
<verificationMode>none</verificationMode>
|
||||||
|
<loadDefaultCAFile>true</loadDefaultCAFile>
|
||||||
|
<cacheSessions>true</cacheSessions>
|
||||||
|
<disableProtocols>sslv2,sslv3</disableProtocols>
|
||||||
|
<preferServerCiphers>true</preferServerCiphers>
|
||||||
|
</server>
|
||||||
|
|
||||||
|
<client> <!-- Used for connecting to https dictionary source -->
|
||||||
|
<loadDefaultCAFile>true</loadDefaultCAFile>
|
||||||
|
<cacheSessions>true</cacheSessions>
|
||||||
|
<disableProtocols>sslv2,sslv3</disableProtocols>
|
||||||
|
<preferServerCiphers>true</preferServerCiphers>
|
||||||
|
<!-- Use for self-signed: <verificationMode>none</verificationMode> -->
|
||||||
|
<invalidCertificateHandler>
|
||||||
|
<!-- Use for self-signed: <name>AcceptCertificateHandler</name> -->
|
||||||
|
<name>RejectCertificateHandler</name>
|
||||||
|
</invalidCertificateHandler>
|
||||||
|
</client>
|
||||||
|
</openSSL>
|
||||||
|
|
||||||
|
<!-- Default root page on http[s] server. For example load UI from https://tabix.io/ when opening http://localhost:8123 -->
|
||||||
|
<!--
|
||||||
|
<http_server_default_response><![CDATA[<html ng-app="SMI2"><head><base href="http://ui.tabix.io/"></head><body><div ui-view="" class="content-ui"></div><script src="http://loader.tabix.io/master.js"></script></body></html>]]></http_server_default_response>
|
||||||
|
-->
|
||||||
|
|
||||||
|
<!-- Port for communication between replicas. Used for data exchange. -->
|
||||||
|
<interserver_http_port>9009</interserver_http_port>
|
||||||
|
|
||||||
|
<!-- Hostname that is used by other replicas to request this server.
|
||||||
|
If not specified, than it is determined analoguous to 'hostname -f' command.
|
||||||
|
This setting could be used to switch replication to another network interface.
|
||||||
|
-->
|
||||||
|
<!--
|
||||||
|
<interserver_http_host>example.yandex.ru</interserver_http_host>
|
||||||
|
-->
|
||||||
|
|
||||||
|
<!-- Listen specified host. use :: (wildcard IPv6 address), if you want to accept connections both with IPv4 and IPv6 from everywhere. -->
|
||||||
|
<listen_host>::</listen_host>
|
||||||
|
<!-- Same for hosts with disabled ipv6: -->
|
||||||
|
<!-- <listen_host>0.0.0.0</listen_host> -->
|
||||||
|
|
||||||
|
<!-- Default values - try listen localhost on ipv4 and ipv6: -->
|
||||||
|
<!-- <listen_host>0.0.0.0</listen_host> -->
|
||||||
|
|
||||||
|
<max_connections>4096</max_connections>
|
||||||
|
<keep_alive_timeout>3</keep_alive_timeout>
|
||||||
|
|
||||||
|
<!-- Maximum number of concurrent queries. -->
|
||||||
|
<max_concurrent_queries>100</max_concurrent_queries>
|
||||||
|
|
||||||
|
<!-- Set limit on number of open files (default: maximum). This setting makes sense on Mac OS X because getrlimit() fails to retrieve
|
||||||
|
correct maximum value. -->
|
||||||
|
<!-- <max_open_files>262144</max_open_files> -->
|
||||||
|
|
||||||
|
<!-- Size of cache of uncompressed blocks of data, used in tables of MergeTree family.
|
||||||
|
In bytes. Cache is single for server. Memory is allocated only on demand.
|
||||||
|
Cache is used when 'use_uncompressed_cache' user setting turned on (off by default).
|
||||||
|
Uncompressed cache is advantageous only for very short queries and in rare cases.
|
||||||
|
-->
|
||||||
|
<uncompressed_cache_size>8589934592</uncompressed_cache_size>
|
||||||
|
|
||||||
|
<!-- Approximate size of mark cache, used in tables of MergeTree family.
|
||||||
|
In bytes. Cache is single for server. Memory is allocated only on demand.
|
||||||
|
You should not lower this value.
|
||||||
|
-->
|
||||||
|
<mark_cache_size>5368709120</mark_cache_size>
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Path to data directory, with trailing slash. -->
|
||||||
|
<path>/var/lib/clickhouse/</path>
|
||||||
|
|
||||||
|
<!-- Path to temporary data for processing hard queries. -->
|
||||||
|
<tmp_path>/var/lib/clickhouse/tmp/</tmp_path>
|
||||||
|
|
||||||
|
<!-- Path to configuration file with users, access rights, profiles of settings, quotas. -->
|
||||||
|
<users_config>users.xml</users_config>
|
||||||
|
|
||||||
|
<!-- Default profile of settings.. -->
|
||||||
|
<default_profile>default</default_profile>
|
||||||
|
|
||||||
|
<!-- Default database. -->
|
||||||
|
<default_database>default</default_database>
|
||||||
|
|
||||||
|
<!-- Server time zone could be set here.
|
||||||
|
|
||||||
|
Time zone is used when converting between String and DateTime types,
|
||||||
|
when printing DateTime in text formats and parsing DateTime from text,
|
||||||
|
it is used in date and time related functions, if specific time zone was not passed as an argument.
|
||||||
|
|
||||||
|
Time zone is specified as identifier from IANA time zone database, like UTC or Africa/Abidjan.
|
||||||
|
If not specified, system time zone at server startup is used.
|
||||||
|
|
||||||
|
Please note, that server could display time zone alias instead of specified name.
|
||||||
|
Example: W-SU is an alias for Europe/Moscow and Zulu is an alias for UTC.
|
||||||
|
-->
|
||||||
|
<!-- <timezone>Europe/Moscow</timezone> -->
|
||||||
|
|
||||||
|
<!-- You can specify umask here (see "man umask"). Server will apply it on startup.
|
||||||
|
Number is always parsed as octal. Default umask is 027 (other users cannot read logs, data files, etc; group can only read).
|
||||||
|
-->
|
||||||
|
<!-- <umask>022</umask> -->
|
||||||
|
|
||||||
|
<!-- Configuration of clusters that could be used in Distributed tables.
|
||||||
|
https://clickhouse.yandex/reference_en.html#Distributed
|
||||||
|
-->
|
||||||
|
<remote_servers incl="clickhouse_remote_servers" >
|
||||||
|
<!-- Test only shard config for testing distributed storage -->
|
||||||
|
<test_shard_localhost>
|
||||||
|
<shard>
|
||||||
|
<replica>
|
||||||
|
<host>localhost</host>
|
||||||
|
<port>9000</port>
|
||||||
|
</replica>
|
||||||
|
</shard>
|
||||||
|
</test_shard_localhost>
|
||||||
|
</remote_servers>
|
||||||
|
|
||||||
|
|
||||||
|
<!-- If element has 'incl' attribute, then for it's value will be used corresponding substitution from another file.
|
||||||
|
By default, path to file with substitutions is /etc/metrika.xml. It could be changed in config in 'include_from' element.
|
||||||
|
Values for substitutions are specified in /yandex/name_of_substitution elements in that file.
|
||||||
|
-->
|
||||||
|
|
||||||
|
<!-- ZooKeeper is used to store metadata about replicas, when using Replicated tables.
|
||||||
|
Optional. If you don't use replicated tables, you could omit that.
|
||||||
|
|
||||||
|
See https://clickhouse.yandex/reference_en.html#Data%20replication
|
||||||
|
-->
|
||||||
|
<zookeeper incl="zookeeper-servers" optional="true" />
|
||||||
|
|
||||||
|
<!-- Substitutions for parameters of replicated tables.
|
||||||
|
Optional. If you don't use replicated tables, you could omit that.
|
||||||
|
|
||||||
|
See https://clickhouse.yandex/reference_en.html#Creating%20replicated%20tables
|
||||||
|
-->
|
||||||
|
<macros incl="macros" optional="true" />
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Reloading interval for embedded dictionaries, in seconds. Default: 3600. -->
|
||||||
|
<builtin_dictionaries_reload_interval>3600</builtin_dictionaries_reload_interval>
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Maximum session timeout, in seconds. Default: 3600. -->
|
||||||
|
<max_session_timeout>3600</max_session_timeout>
|
||||||
|
|
||||||
|
<!-- Default session timeout, in seconds. Default: 60. -->
|
||||||
|
<default_session_timeout>60</default_session_timeout>
|
||||||
|
|
||||||
|
<!-- Sending data to Graphite for monitoring. Several sections can be defined. -->
|
||||||
|
<!--
|
||||||
|
interval - send every X second
|
||||||
|
root_path - prefix for keys
|
||||||
|
hostname_in_path - append hostname to root_path (default = true)
|
||||||
|
metrics - send data from table system.metrics
|
||||||
|
events - send data from table system.events
|
||||||
|
asynchronous_metrics - send data from table system.asynchronous_metrics
|
||||||
|
-->
|
||||||
|
<!--
|
||||||
|
<graphite>
|
||||||
|
<host>localhost</host>
|
||||||
|
<port>42000</port>
|
||||||
|
<timeout>0.1</timeout>
|
||||||
|
<interval>60</interval>
|
||||||
|
<root_path>one_min</root_path>
|
||||||
|
<hostname_in_path>true<hostname_in_path>
|
||||||
|
|
||||||
|
<metrics>true</metrics>
|
||||||
|
<events>true</events>
|
||||||
|
<asynchronous_metrics>true</asynchronous_metrics>
|
||||||
|
</graphite>
|
||||||
|
<graphite>
|
||||||
|
<host>localhost</host>
|
||||||
|
<port>42000</port>
|
||||||
|
<timeout>0.1</timeout>
|
||||||
|
<interval>1</interval>
|
||||||
|
<root_path>one_sec</root_path>
|
||||||
|
|
||||||
|
<metrics>true</metrics>
|
||||||
|
<events>true</events>
|
||||||
|
<asynchronous_metrics>false</asynchronous_metrics>
|
||||||
|
</graphite>
|
||||||
|
-->
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Query log. Used only for queries with setting log_queries = 1. -->
|
||||||
|
<query_log>
|
||||||
|
<!-- What table to insert data. If table is not exist, it will be created.
|
||||||
|
When query log structure is changed after system update,
|
||||||
|
then old table will be renamed and new table will be created automatically.
|
||||||
|
-->
|
||||||
|
<database>system</database>
|
||||||
|
<table>query_log</table>
|
||||||
|
|
||||||
|
<!-- Interval of flushing data. -->
|
||||||
|
<flush_interval_milliseconds>7500</flush_interval_milliseconds>
|
||||||
|
</query_log>
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Uncomment if use part_log
|
||||||
|
<part_log>
|
||||||
|
<database>system</database>
|
||||||
|
<table>part_log</table>
|
||||||
|
|
||||||
|
<flush_interval_milliseconds>7500</flush_interval_milliseconds>
|
||||||
|
</part_log>
|
||||||
|
-->
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Parameters for embedded dictionaries, used in Yandex.Metrica.
|
||||||
|
See https://clickhouse.yandex/reference_en.html#Internal%20dictionaries
|
||||||
|
-->
|
||||||
|
|
||||||
|
<!-- Path to file with region hierarchy. -->
|
||||||
|
<!-- <path_to_regions_hierarchy_file>/opt/geo/regions_hierarchy.txt</path_to_regions_hierarchy_file> -->
|
||||||
|
|
||||||
|
<!-- Path to directory with files containing names of regions -->
|
||||||
|
<!-- <path_to_regions_names_files>/opt/geo/</path_to_regions_names_files> -->
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Configuration of external dictionaries. See:
|
||||||
|
https://clickhouse.yandex/reference_en.html#External%20Dictionaries
|
||||||
|
-->
|
||||||
|
<dictionaries_config>*_dictionary.xml</dictionaries_config>
|
||||||
|
|
||||||
|
<!-- Uncomment if you want data to be compressed 30-100% better.
|
||||||
|
Don't do that if you just started using ClickHouse.
|
||||||
|
-->
|
||||||
|
<compression incl="clickhouse_compression">
|
||||||
|
<!--
|
||||||
|
<!- - Set of variants. Checked in order. Last matching case wins. If nothing matches, lz4 will be used. - ->
|
||||||
|
<case>
|
||||||
|
|
||||||
|
<!- - Conditions. All must be satisfied. Some conditions may be omitted. - ->
|
||||||
|
<min_part_size>10000000000</min_part_size> <!- - Min part size in bytes. - ->
|
||||||
|
<min_part_size_ratio>0.01</min_part_size_ratio> <!- - Min size of part relative to whole table size. - ->
|
||||||
|
|
||||||
|
<!- - What compression method to use. - ->
|
||||||
|
<method>zstd</method>
|
||||||
|
</case>
|
||||||
|
-->
|
||||||
|
</compression>
|
||||||
|
|
||||||
|
<!-- Allow to execute distributed DDL queries (CREATE, DROP, ALTER, RENAME) on cluster.
|
||||||
|
Works only if ZooKeeper is enabled. Comment it if such functionality isn't required. -->
|
||||||
|
<distributed_ddl>
|
||||||
|
<!-- Path in ZooKeeper to queue with DDL queries -->
|
||||||
|
<path>/clickhouse/task_queue/ddl</path>
|
||||||
|
</distributed_ddl>
|
||||||
|
|
||||||
|
<!-- Settings to fine tune MergeTree tables. See documentation in source code, in MergeTreeSettings.h -->
|
||||||
|
<!--
|
||||||
|
<merge_tree>
|
||||||
|
<max_suspicious_broken_parts>5</max_suspicious_broken_parts>
|
||||||
|
</merge_tree>
|
||||||
|
-->
|
||||||
|
|
||||||
|
<!-- Protection from accidental DROP.
|
||||||
|
If size of a MergeTree table is greater than max_table_size_to_drop (in bytes) than table could not be dropped with any DROP query.
|
||||||
|
If you want do delete one table and don't want to restart clickhouse-server, you could create special file <clickhouse-path>/flags/force_drop_table and make DROP once.
|
||||||
|
By default max_table_size_to_drop is 50GB, max_table_size_to_drop=0 allows to DROP any tables.
|
||||||
|
Uncomment to disable protection.
|
||||||
|
-->
|
||||||
|
<!-- <max_table_size_to_drop>0</max_table_size_to_drop> -->
|
||||||
|
|
||||||
|
<!-- Example of parameters for GraphiteMergeTree table engine -->
|
||||||
|
<graphite_rollup>
|
||||||
|
<!-- carbon -->
|
||||||
|
<pattern>
|
||||||
|
<regexp>^carbon\.</regexp>
|
||||||
|
<function>any</function>
|
||||||
|
<retention>
|
||||||
|
<age>0</age>
|
||||||
|
<precision>60</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>7776000</age>
|
||||||
|
<precision>3600</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>10368000</age>
|
||||||
|
<precision>21600</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>34560000</age>
|
||||||
|
<precision>43200</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>63072000</age>
|
||||||
|
<precision>86400</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>94608000</age>
|
||||||
|
<precision>604800</precision>
|
||||||
|
</retention>
|
||||||
|
</pattern>
|
||||||
|
<!-- collectd -->
|
||||||
|
<pattern>
|
||||||
|
<regexp>^collectd\.</regexp>
|
||||||
|
<function>any</function>
|
||||||
|
<retention>
|
||||||
|
<age>0</age>
|
||||||
|
<precision>10</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>43200</age>
|
||||||
|
<precision>60</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>864000</age>
|
||||||
|
<precision>900</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>1728000</age>
|
||||||
|
<precision>1800</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>3456000</age>
|
||||||
|
<precision>3600</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>10368000</age>
|
||||||
|
<precision>21600</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>34560000</age>
|
||||||
|
<precision>43200</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>63072000</age>
|
||||||
|
<precision>86400</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>94608000</age>
|
||||||
|
<precision>604800</precision>
|
||||||
|
</retention>
|
||||||
|
</pattern>
|
||||||
|
<!-- high -->
|
||||||
|
<pattern>
|
||||||
|
<regexp>^high\.</regexp>
|
||||||
|
<function>any</function>
|
||||||
|
<retention>
|
||||||
|
<age>0</age>
|
||||||
|
<precision>10</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>172800</age>
|
||||||
|
<precision>60</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>864000</age>
|
||||||
|
<precision>900</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>1728000</age>
|
||||||
|
<precision>1800</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>3456000</age>
|
||||||
|
<precision>3600</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>10368000</age>
|
||||||
|
<precision>21600</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>34560000</age>
|
||||||
|
<precision>43200</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>63072000</age>
|
||||||
|
<precision>86400</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>94608000</age>
|
||||||
|
<precision>604800</precision>
|
||||||
|
</retention>
|
||||||
|
</pattern>
|
||||||
|
<!-- medium -->
|
||||||
|
<pattern>
|
||||||
|
<regexp>^medium\.</regexp>
|
||||||
|
<function>any</function>
|
||||||
|
<retention>
|
||||||
|
<age>0</age>
|
||||||
|
<precision>60</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>864000</age>
|
||||||
|
<precision>900</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>1728000</age>
|
||||||
|
<precision>1800</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>3456000</age>
|
||||||
|
<precision>3600</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>10368000</age>
|
||||||
|
<precision>21600</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>34560000</age>
|
||||||
|
<precision>43200</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>63072000</age>
|
||||||
|
<precision>86400</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>94608000</age>
|
||||||
|
<precision>604800</precision>
|
||||||
|
</retention>
|
||||||
|
</pattern>
|
||||||
|
<!-- low -->
|
||||||
|
<pattern>
|
||||||
|
<regexp>^low\.</regexp>
|
||||||
|
<function>any</function>
|
||||||
|
<retention>
|
||||||
|
<age>0</age>
|
||||||
|
<precision>600</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>15552000</age>
|
||||||
|
<precision>1800</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>31536000</age>
|
||||||
|
<precision>3600</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>63072000</age>
|
||||||
|
<precision>21600</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>126144000</age>
|
||||||
|
<precision>43200</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>252288000</age>
|
||||||
|
<precision>86400</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>315360000</age>
|
||||||
|
<precision>604800</precision>
|
||||||
|
</retention>
|
||||||
|
</pattern>
|
||||||
|
<!-- default -->
|
||||||
|
<default>
|
||||||
|
<function>any</function>
|
||||||
|
<retention>
|
||||||
|
<age>0</age>
|
||||||
|
<precision>60</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>864000</age>
|
||||||
|
<precision>900</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>1728000</age>
|
||||||
|
<precision>1800</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>3456000</age>
|
||||||
|
<precision>3600</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>10368000</age>
|
||||||
|
<precision>21600</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>34560000</age>
|
||||||
|
<precision>43200</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>63072000</age>
|
||||||
|
<precision>86400</precision>
|
||||||
|
</retention>
|
||||||
|
<retention>
|
||||||
|
<age>94608000</age>
|
||||||
|
<precision>604800</precision>
|
||||||
|
</retention>
|
||||||
|
</default>
|
||||||
|
</graphite_rollup>
|
||||||
|
|
||||||
|
<!-- Directory in <clickhouse-path> containing schema files for various input formats.
|
||||||
|
The directory will be created if it doesn't exist.
|
||||||
|
-->
|
||||||
|
<format_schema_path>/var/lib/clickhouse/format_schemas/</format_schema_path>
|
||||||
|
</yandex>
|
||||||
97
deploy/docker/clickhouse-setup/docker-compose.yaml
Normal file
97
deploy/docker/clickhouse-setup/docker-compose.yaml
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
version: "2.4"
|
||||||
|
|
||||||
|
services:
|
||||||
|
clickhouse:
|
||||||
|
image: yandex/clickhouse-server
|
||||||
|
expose:
|
||||||
|
- 8123
|
||||||
|
- 9000
|
||||||
|
ports:
|
||||||
|
- 9001:9000
|
||||||
|
- 8123:8123
|
||||||
|
volumes:
|
||||||
|
- ./clickhouse-config.xml:/etc/clickhouse-server/config.xml
|
||||||
|
- ./docker-entrypoint-initdb.d/init-db.sql:/docker-entrypoint-initdb.d/init-db.sql
|
||||||
|
healthcheck:
|
||||||
|
# "clickhouse", "client", "-u ${CLICKHOUSE_USER}", "--password ${CLICKHOUSE_PASSWORD}", "-q 'SELECT 1'"
|
||||||
|
test: ["CMD", "wget", "--spider", "-q", "localhost:8123/ping"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
query-service:
|
||||||
|
image: signoz/query-service:0.3.1
|
||||||
|
container_name: query-service
|
||||||
|
|
||||||
|
ports:
|
||||||
|
- "8080:8080"
|
||||||
|
|
||||||
|
environment:
|
||||||
|
- ClickHouseUrl=tcp://clickhouse:9000
|
||||||
|
- STORAGE=clickhouse
|
||||||
|
- POSTHOG_API_KEY=H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
clickhouse:
|
||||||
|
condition: service_healthy
|
||||||
|
|
||||||
|
frontend:
|
||||||
|
image: signoz/frontend:0.3.1
|
||||||
|
container_name: frontend
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- query-service
|
||||||
|
links:
|
||||||
|
- "query-service"
|
||||||
|
ports:
|
||||||
|
- "3000:3000"
|
||||||
|
volumes:
|
||||||
|
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||||
|
|
||||||
|
|
||||||
|
otel-collector:
|
||||||
|
image: signoz/otelcol:latest
|
||||||
|
command: ["--config=/etc/otel-collector-config.yaml", "--mem-ballast-size-mib=683"]
|
||||||
|
volumes:
|
||||||
|
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||||
|
ports:
|
||||||
|
- "1777:1777" # pprof extension
|
||||||
|
- "8887:8888" # Prometheus metrics exposed by the agent
|
||||||
|
- "14268:14268" # Jaeger receiver
|
||||||
|
- "55678" # OpenCensus receiver
|
||||||
|
- "55680:55680" # OTLP HTTP/2.0 legacy port
|
||||||
|
- "55681:55681" # OTLP HTTP/1.0 receiver
|
||||||
|
- "4317:4317" # OTLP GRPC receiver
|
||||||
|
- "55679:55679" # zpages extension
|
||||||
|
- "13133" # health_check
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
clickhouse:
|
||||||
|
condition: service_healthy
|
||||||
|
|
||||||
|
hotrod:
|
||||||
|
image: jaegertracing/example-hotrod:latest
|
||||||
|
container_name: hotrod
|
||||||
|
ports:
|
||||||
|
- "9000:8080"
|
||||||
|
command: ["all"]
|
||||||
|
environment:
|
||||||
|
- JAEGER_ENDPOINT=http://otel-collector:14268/api/traces
|
||||||
|
|
||||||
|
|
||||||
|
load-hotrod:
|
||||||
|
image: "grubykarol/locust:1.2.3-python3.9-alpine3.12"
|
||||||
|
container_name: load-hotrod
|
||||||
|
hostname: load-hotrod
|
||||||
|
ports:
|
||||||
|
- "8089:8089"
|
||||||
|
environment:
|
||||||
|
ATTACKED_HOST: http://hotrod:8080
|
||||||
|
LOCUST_MODE: standalone
|
||||||
|
NO_PROXY: standalone
|
||||||
|
TASK_DELAY_FROM: 5
|
||||||
|
TASK_DELAY_TO: 30
|
||||||
|
QUIET_MODE: "${QUIET_MODE:-false}"
|
||||||
|
LOCUST_OPTS: "--headless -u 10 -r 1"
|
||||||
|
volumes:
|
||||||
|
- ../common/locust-scripts:/locust
|
||||||
@@ -0,0 +1,27 @@
|
|||||||
|
CREATE TABLE IF NOT EXISTS signoz_index (
|
||||||
|
timestamp DateTime64(9) CODEC(Delta, ZSTD(1)),
|
||||||
|
traceID String CODEC(ZSTD(1)),
|
||||||
|
spanID String CODEC(ZSTD(1)),
|
||||||
|
parentSpanID String CODEC(ZSTD(1)),
|
||||||
|
serviceName LowCardinality(String) CODEC(ZSTD(1)),
|
||||||
|
name LowCardinality(String) CODEC(ZSTD(1)),
|
||||||
|
kind Int32 CODEC(ZSTD(1)),
|
||||||
|
durationNano UInt64 CODEC(ZSTD(1)),
|
||||||
|
tags Array(String) CODEC(ZSTD(1)),
|
||||||
|
tagsKeys Array(String) CODEC(ZSTD(1)),
|
||||||
|
tagsValues Array(String) CODEC(ZSTD(1)),
|
||||||
|
statusCode Int64 CODEC(ZSTD(1)),
|
||||||
|
references String CODEC(ZSTD(1)),
|
||||||
|
externalHttpMethod Nullable(String) CODEC(ZSTD(1)),
|
||||||
|
externalHttpUrl Nullable(String) CODEC(ZSTD(1)),
|
||||||
|
component Nullable(String) CODEC(ZSTD(1)),
|
||||||
|
dbSystem Nullable(String) CODEC(ZSTD(1)),
|
||||||
|
dbName Nullable(String) CODEC(ZSTD(1)),
|
||||||
|
dbOperation Nullable(String) CODEC(ZSTD(1)),
|
||||||
|
peerService Nullable(String) CODEC(ZSTD(1)),
|
||||||
|
INDEX idx_tagsKeys tagsKeys TYPE bloom_filter(0.01) GRANULARITY 64,
|
||||||
|
INDEX idx_tagsValues tagsValues TYPE bloom_filter(0.01) GRANULARITY 64,
|
||||||
|
INDEX idx_duration durationNano TYPE minmax GRANULARITY 1
|
||||||
|
) ENGINE MergeTree()
|
||||||
|
PARTITION BY toDate(timestamp)
|
||||||
|
ORDER BY (serviceName, -toUnixTimestamp(timestamp))
|
||||||
39
deploy/docker/clickhouse-setup/otel-collector-config.yaml
Normal file
39
deploy/docker/clickhouse-setup/otel-collector-config.yaml
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
receivers:
|
||||||
|
otlp:
|
||||||
|
protocols:
|
||||||
|
grpc:
|
||||||
|
http:
|
||||||
|
jaeger:
|
||||||
|
protocols:
|
||||||
|
grpc:
|
||||||
|
thrift_http:
|
||||||
|
processors:
|
||||||
|
batch:
|
||||||
|
send_batch_size: 1000
|
||||||
|
timeout: 10s
|
||||||
|
memory_limiter:
|
||||||
|
# Same as --mem-ballast-size-mib CLI argument
|
||||||
|
ballast_size_mib: 683
|
||||||
|
# 80% of maximum memory up to 2G
|
||||||
|
limit_mib: 1500
|
||||||
|
# 25% of limit up to 2G
|
||||||
|
spike_limit_mib: 512
|
||||||
|
check_interval: 5s
|
||||||
|
# queued_retry:
|
||||||
|
# num_workers: 4
|
||||||
|
# queue_size: 100
|
||||||
|
# retry_on_failure: true
|
||||||
|
extensions:
|
||||||
|
health_check: {}
|
||||||
|
zpages: {}
|
||||||
|
exporters:
|
||||||
|
clickhouse:
|
||||||
|
datasource: tcp://clickhouse:9000
|
||||||
|
|
||||||
|
service:
|
||||||
|
extensions: [health_check, zpages]
|
||||||
|
pipelines:
|
||||||
|
traces:
|
||||||
|
receivers: [jaeger, otlp]
|
||||||
|
processors: [batch]
|
||||||
|
exporters: [clickhouse]
|
||||||
@@ -1,6 +1,16 @@
|
|||||||
server {
|
server {
|
||||||
listen 3000;
|
listen 3000;
|
||||||
server_name _;
|
server_name _;
|
||||||
|
|
||||||
|
gzip on;
|
||||||
|
gzip_static on;
|
||||||
|
gzip_types text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript;
|
||||||
|
gzip_proxied any;
|
||||||
|
gzip_vary on;
|
||||||
|
gzip_comp_level 6;
|
||||||
|
gzip_buffers 16 8k;
|
||||||
|
gzip_http_version 1.1;
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
root /usr/share/nginx/html;
|
root /usr/share/nginx/html;
|
||||||
index index.html index.htm;
|
index index.html index.htm;
|
||||||
@@ -140,6 +140,11 @@ services:
|
|||||||
env_file:
|
env_file:
|
||||||
- environment_tiny/router
|
- environment_tiny/router
|
||||||
- environment_tiny/common
|
- environment_tiny/common
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "wget", "--spider", "-q", "http://router:8888/druid/coordinator/v1/datasources/flattened_spans"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
|
||||||
flatten-processor:
|
flatten-processor:
|
||||||
image: signoz/flattener-processor:0.2.0
|
image: signoz/flattener-processor:0.2.0
|
||||||
@@ -158,7 +163,7 @@ services:
|
|||||||
|
|
||||||
|
|
||||||
query-service:
|
query-service:
|
||||||
image: signoz.docker.scarf.sh/signoz/query-service:0.2.2
|
image: signoz.docker.scarf.sh/signoz/query-service:0.3.1
|
||||||
container_name: query-service
|
container_name: query-service
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
@@ -169,11 +174,15 @@ services:
|
|||||||
environment:
|
environment:
|
||||||
- DruidClientUrl=http://router:8888
|
- DruidClientUrl=http://router:8888
|
||||||
- DruidDatasource=flattened_spans
|
- DruidDatasource=flattened_spans
|
||||||
|
- STORAGE=druid
|
||||||
- POSTHOG_API_KEY=H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w
|
- POSTHOG_API_KEY=H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
router:
|
||||||
|
condition: service_healthy
|
||||||
|
|
||||||
frontend:
|
frontend:
|
||||||
image: signoz/frontend:0.2.3
|
image: signoz/frontend:0.3.1
|
||||||
container_name: frontend
|
container_name: frontend
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
@@ -183,7 +192,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- "3000:3000"
|
- "3000:3000"
|
||||||
volumes:
|
volumes:
|
||||||
- ./nginx-config.conf:/etc/nginx/conf.d/default.conf
|
- ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf
|
||||||
|
|
||||||
create-supervisor:
|
create-supervisor:
|
||||||
image: theithollow/hollowapp-blog:curl
|
image: theithollow/hollowapp-blog:curl
|
||||||
@@ -260,5 +269,5 @@ services:
|
|||||||
QUIET_MODE: "${QUIET_MODE:-false}"
|
QUIET_MODE: "${QUIET_MODE:-false}"
|
||||||
LOCUST_OPTS: "--headless -u 10 -r 1"
|
LOCUST_OPTS: "--headless -u 10 -r 1"
|
||||||
volumes:
|
volumes:
|
||||||
- ./locust-scripts:/locust
|
- ../common/locust-scripts:/locust
|
||||||
|
|
||||||
@@ -135,6 +135,11 @@ services:
|
|||||||
- router
|
- router
|
||||||
env_file:
|
env_file:
|
||||||
- environment_small/router
|
- environment_small/router
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "wget", "--spider", "-q", "http://router:8888/druid/coordinator/v1/datasources/flattened_spans"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
|
||||||
flatten-processor:
|
flatten-processor:
|
||||||
image: signoz/flattener-processor:0.2.0
|
image: signoz/flattener-processor:0.2.0
|
||||||
@@ -153,7 +158,7 @@ services:
|
|||||||
|
|
||||||
|
|
||||||
query-service:
|
query-service:
|
||||||
image: signoz.docker.scarf.sh/signoz/query-service:0.2.2
|
image: signoz.docker.scarf.sh/signoz/query-service:0.3.1
|
||||||
container_name: query-service
|
container_name: query-service
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
@@ -164,11 +169,15 @@ services:
|
|||||||
environment:
|
environment:
|
||||||
- DruidClientUrl=http://router:8888
|
- DruidClientUrl=http://router:8888
|
||||||
- DruidDatasource=flattened_spans
|
- DruidDatasource=flattened_spans
|
||||||
|
- STORAGE=druid
|
||||||
- POSTHOG_API_KEY=H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w
|
- POSTHOG_API_KEY=H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
router:
|
||||||
|
condition: service_healthy
|
||||||
|
|
||||||
frontend:
|
frontend:
|
||||||
image: signoz/frontend:0.2.3
|
image: signoz/frontend:0.3.1
|
||||||
container_name: frontend
|
container_name: frontend
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
26
deploy/docker/druid-kafka-setup/environment_tiny/common_test
Normal file
26
deploy/docker/druid-kafka-setup/environment_tiny/common_test
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
# For S3 storage
|
||||||
|
|
||||||
|
druid_extensions_loadList=["druid-histogram", "druid-datasketches", "druid-lookups-cached-global", "postgresql-metadata-storage", "druid-kafka-indexing-service", "druid-s3-extensions"]
|
||||||
|
|
||||||
|
|
||||||
|
druid_storage_type=s3
|
||||||
|
druid_storage_bucket=solvzy-test3
|
||||||
|
druid_storage_baseKey=druid/segments
|
||||||
|
|
||||||
|
AWS_ACCESS_KEY_ID=AKIARKCF5OX3CMI3XRXC
|
||||||
|
AWS_SECRET_ACCESS_KEY=KxuYpczA7a3IQ44U7Bd7DI+LZgJ26tmKr2cnkEVB
|
||||||
|
AWS_REGION=us-east-2
|
||||||
|
|
||||||
|
druid_indexer_logs_type=s3
|
||||||
|
druid_indexer_logs_s3Bucket=solvzy-test3
|
||||||
|
druid_indexer_logs_s3Prefix=druid/indexing-logs
|
||||||
|
|
||||||
|
# -----------------------------------------------------------
|
||||||
|
# For local storage
|
||||||
|
# druid_extensions_loadList=["druid-histogram", "druid-datasketches", "druid-lookups-cached-global", "postgresql-metadata-storage", "druid-kafka-indexing-service"]
|
||||||
|
|
||||||
|
# druid_storage_type=local
|
||||||
|
# druid_storage_storageDirectory=/opt/data/segments
|
||||||
|
# druid_indexer_logs_type=file
|
||||||
|
# druid_indexer_logs_directory=/opt/data/indexing-logs
|
||||||
|
|
||||||
@@ -1,256 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -o errexit
|
|
||||||
|
|
||||||
is_command_present() {
|
|
||||||
type "$1" >/dev/null 2>&1
|
|
||||||
}
|
|
||||||
|
|
||||||
is_mac() {
|
|
||||||
[[ $OSTYPE == darwin* ]]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
check_k8s_setup() {
|
|
||||||
echo "Checking your k8s setup status"
|
|
||||||
if ! is_command_present kubectl; then
|
|
||||||
echo "Please install kubectl on your machine"
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
|
|
||||||
if ! is_command_present jq; then
|
|
||||||
install_jq
|
|
||||||
fi
|
|
||||||
clusters=`kubectl config view -o json | jq -r '."current-context"'`
|
|
||||||
if [[ ! -n $clusters ]]; then
|
|
||||||
echo "Please setup a k8s cluster & config kubectl to connect to it"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
k8s_minor_version=`kubectl version --short -o json | jq ."serverVersion.minor" | sed 's/[^0-9]*//g'`
|
|
||||||
# if [[ $k8s_minor_version < 18 ]]; then
|
|
||||||
# echo "+++++++++++ ERROR ++++++++++++++++++++++"
|
|
||||||
# echo "SigNoz deployments require Kubernetes >= v1.18. Found version: v1.$k8s_minor_version"
|
|
||||||
# echo "+++++++++++ ++++++++++++++++++++++++++++"
|
|
||||||
# exit 1
|
|
||||||
# fi;
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
install_jq(){
|
|
||||||
if [ $package_manager == "brew" ]; then
|
|
||||||
brew install jq
|
|
||||||
elif [ $package_manager == "yum" ]; then
|
|
||||||
yum_cmd="sudo yum --assumeyes --quiet"
|
|
||||||
$yum_cmd install jq
|
|
||||||
else
|
|
||||||
apt_cmd="sudo apt-get --yes --quiet"
|
|
||||||
$apt_cmd update
|
|
||||||
$apt_cmd install jq
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
check_os() {
|
|
||||||
if is_mac; then
|
|
||||||
package_manager="brew"
|
|
||||||
desired_os=1
|
|
||||||
os="Mac"
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
|
|
||||||
os_name="$(cat /etc/*-release | awk -F= '$1 == "NAME" { gsub(/"/, ""); print $2; exit }')"
|
|
||||||
|
|
||||||
case "$os_name" in
|
|
||||||
Ubuntu*)
|
|
||||||
desired_os=1
|
|
||||||
os="ubuntu"
|
|
||||||
package_manager="apt-get"
|
|
||||||
;;
|
|
||||||
Debian*)
|
|
||||||
desired_os=1
|
|
||||||
os="debian"
|
|
||||||
package_manager="apt-get"
|
|
||||||
;;
|
|
||||||
Red\ Hat*)
|
|
||||||
desired_os=1
|
|
||||||
os="red hat"
|
|
||||||
package_manager="yum"
|
|
||||||
;;
|
|
||||||
CentOS*)
|
|
||||||
desired_os=1
|
|
||||||
os="centos"
|
|
||||||
package_manager="yum"
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
desired_os=0
|
|
||||||
os="Not Found"
|
|
||||||
esac
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
echo_contact_support() {
|
|
||||||
echo "Please contact <support@signoz.io> with your OS details and version${1:-.}"
|
|
||||||
}
|
|
||||||
|
|
||||||
bye() { # Prints a friendly good bye message and exits the script.
|
|
||||||
set +o errexit
|
|
||||||
echo "Please share your email to receive support with the installation"
|
|
||||||
read -rp 'Email: ' email
|
|
||||||
|
|
||||||
while [[ $email == "" ]]
|
|
||||||
do
|
|
||||||
read -rp 'Email: ' email
|
|
||||||
done
|
|
||||||
|
|
||||||
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Installation Support", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'", "email": "'"$email"'", "platform": "k8s", "k8s_minor_version": "'"$k8s_minor_version"'" } }'
|
|
||||||
URL="https://app.posthog.com/capture"
|
|
||||||
HEADER="Content-Type: application/json"
|
|
||||||
|
|
||||||
|
|
||||||
if has_curl; then
|
|
||||||
curl -sfL -d "$DATA" --header "$HEADER" "$URL" > /dev/null 2>&1
|
|
||||||
elif has_wget; then
|
|
||||||
wget -q --post-data="$DATA" --header="$HEADER" "$URL" > /dev/null 2>&1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo -e "\nExiting for now. Bye! \U1F44B\n"
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
deploy_app() {
|
|
||||||
kubectl apply -f "$install_dir/config-template"
|
|
||||||
kubectl apply -f "$install_dir"
|
|
||||||
}
|
|
||||||
|
|
||||||
wait_for_application_start() {
|
|
||||||
local timeout=$1
|
|
||||||
address=$custom_domain
|
|
||||||
if [[ "$ssl_enable" == "true" ]]; then
|
|
||||||
protocol="https"
|
|
||||||
else
|
|
||||||
protocol="http"
|
|
||||||
fi
|
|
||||||
# The while loop is important because for-loops don't work for dynamic values
|
|
||||||
while [[ $timeout -gt 0 ]]; do
|
|
||||||
if [[ $address == "" || $address == null ]]; then
|
|
||||||
address=`kubectl get ingress appsmith-ingress -o json | jq -r '.status.loadBalancer.ingress[0].ip'`
|
|
||||||
fi
|
|
||||||
status_code="$(curl -s -o /dev/null -w "%{http_code}" $protocol://$address/api/v1 || true)"
|
|
||||||
if [[ status_code -eq 401 ]]; then
|
|
||||||
break
|
|
||||||
else
|
|
||||||
echo -ne "Waiting for all containers to start. This check will timeout in $timeout seconds...\r\c"
|
|
||||||
fi
|
|
||||||
((timeout--))
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
echo -e "👋 Thank you for trying out SigNoz! "
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
|
|
||||||
# Checking OS and assigning package manager
|
|
||||||
desired_os=0
|
|
||||||
os=""
|
|
||||||
echo -e "🕵️ Detecting your OS"
|
|
||||||
check_os
|
|
||||||
SIGNOZ_INSTALLATION_ID=$(curl -s 'https://api64.ipify.org')
|
|
||||||
|
|
||||||
# Run bye if failure happens
|
|
||||||
trap bye EXIT
|
|
||||||
|
|
||||||
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Installation Started", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'", "platform": "k8s", "k8s_minor_version": "'"$k8s_minor_version"'" } }'
|
|
||||||
|
|
||||||
URL="https://app.posthog.com/capture"
|
|
||||||
HEADER="Content-Type: application/json"
|
|
||||||
|
|
||||||
if has_curl; then
|
|
||||||
curl -sfL -d "$DATA" --header "$HEADER" "$URL" > /dev/null 2>&1
|
|
||||||
elif has_wget; then
|
|
||||||
wget -q --post-data="$DATA" --header="$HEADER" "$URL" > /dev/null 2>&1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check for kubernetes setup
|
|
||||||
check_k8s_setup
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "Deploy Appmisth on your cluster"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
deploy_app
|
|
||||||
|
|
||||||
wait_for_application_start 60
|
|
||||||
|
|
||||||
|
|
||||||
if [[ $status_code -ne 200 ]]; then
|
|
||||||
echo "+++++++++++ ERROR ++++++++++++++++++++++"
|
|
||||||
echo "The containers didn't seem to start correctly. Please run the following command to check containers that may have errored out:"
|
|
||||||
echo ""
|
|
||||||
echo -e "sudo docker-compose -f docker/docker-compose-tiny.yaml ps -a"
|
|
||||||
echo "Please read our troubleshooting guide https://signoz.io/docs/deployment/docker#troubleshooting"
|
|
||||||
echo "or reach us on SigNoz for support https://join.slack.com/t/signoz-community/shared_invite/zt-lrjknbbp-J_mI13rlw8pGF4EWBnorJA"
|
|
||||||
echo "++++++++++++++++++++++++++++++++++++++++"
|
|
||||||
|
|
||||||
SUPERVISORS="$(curl -so - http://localhost:8888/druid/indexer/v1/supervisor)"
|
|
||||||
|
|
||||||
DATASOURCES="$(curl -so - http://localhost:8888/druid/coordinator/v1/datasources)"
|
|
||||||
|
|
||||||
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Installation Error - Checks", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'", "platform": "k8s", "error": "Containers not started", "SUPERVISORS": '"$SUPERVISORS"', "DATASOURCES": '"$DATASOURCES"' } }'
|
|
||||||
|
|
||||||
URL="https://app.posthog.com/capture"
|
|
||||||
HEADER="Content-Type: application/json"
|
|
||||||
|
|
||||||
if has_curl; then
|
|
||||||
curl -sfL -d "$DATA" --header "$HEADER" "$URL" > /dev/null 2>&1
|
|
||||||
elif has_wget; then
|
|
||||||
wget -q --post-data="$DATA" --header="$HEADER" "$URL" > /dev/null 2>&1
|
|
||||||
fi
|
|
||||||
|
|
||||||
exit 1
|
|
||||||
|
|
||||||
else
|
|
||||||
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Installation Success", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'"} }'
|
|
||||||
URL="https://app.posthog.com/capture"
|
|
||||||
HEADER="Content-Type: application/json"
|
|
||||||
|
|
||||||
if has_curl; then
|
|
||||||
curl -sfL -d "$DATA" --header "$HEADER" "$URL" > /dev/null 2>&1
|
|
||||||
elif has_wget; then
|
|
||||||
wget -q --post-data="$DATA" --header="$HEADER" "$URL" > /dev/null 2>&1
|
|
||||||
fi
|
|
||||||
echo "++++++++++++++++++ SUCCESS ++++++++++++++++++++++"
|
|
||||||
echo "Your installation is complete!"
|
|
||||||
echo ""
|
|
||||||
echo "Your frontend is running on 'http://localhost:3000'."
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "+++++++++++++++++++++++++++++++++++++++++++++++++"
|
|
||||||
echo ""
|
|
||||||
echo "Need help Getting Started?"
|
|
||||||
echo "Join us on Slack https://join.slack.com/t/signoz-community/shared_invite/zt-lrjknbbp-J_mI13rlw8pGF4EWBnorJA"
|
|
||||||
echo ""
|
|
||||||
echo "Please share your email to receive support & updates about SigNoz!"
|
|
||||||
read -rp 'Email: ' email
|
|
||||||
|
|
||||||
while [[ $email == "" ]]
|
|
||||||
do
|
|
||||||
read -rp 'Email: ' email
|
|
||||||
done
|
|
||||||
|
|
||||||
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Identify Successful Installation", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'", "email": "'"$email"'", "platform": "k8s" } }'
|
|
||||||
URL="https://app.posthog.com/capture"
|
|
||||||
HEADER="Content-Type: application/json"
|
|
||||||
|
|
||||||
if has_curl; then
|
|
||||||
curl -sfL -d "$DATA" --header "$HEADER" "$URL" > /dev/null 2>&1
|
|
||||||
elif has_wget; then
|
|
||||||
wget -q --post-data="$DATA" --header="$HEADER" "$URL" > /dev/null 2>&1
|
|
||||||
fi
|
|
||||||
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo -e "\nThank you!\n"
|
|
||||||
@@ -2,6 +2,16 @@
|
|||||||
|
|
||||||
set -o errexit
|
set -o errexit
|
||||||
|
|
||||||
|
# Regular Colors
|
||||||
|
Black='\033[0;30m' # Black
|
||||||
|
Red='\[\e[0;31m\]' # Red
|
||||||
|
Green='\033[0;32m' # Green
|
||||||
|
Yellow='\033[0;33m' # Yellow
|
||||||
|
Blue='\033[0;34m' # Blue
|
||||||
|
Purple='\033[0;35m' # Purple
|
||||||
|
Cyan='\033[0;36m' # Cyan
|
||||||
|
White='\033[0;37m' # White
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
is_command_present() {
|
is_command_present() {
|
||||||
type "$1" >/dev/null 2>&1
|
type "$1" >/dev/null 2>&1
|
||||||
@@ -88,7 +98,7 @@ check_os() {
|
|||||||
# The script should error out in case they aren't available
|
# The script should error out in case they aren't available
|
||||||
check_ports_occupied() {
|
check_ports_occupied() {
|
||||||
local port_check_output
|
local port_check_output
|
||||||
local ports_pattern="80|443"
|
local ports_pattern="80|3000|8080"
|
||||||
|
|
||||||
if is_mac; then
|
if is_mac; then
|
||||||
port_check_output="$(netstat -anp tcp | awk '$6 == "LISTEN" && $4 ~ /^.*\.('"$ports_pattern"')$/')"
|
port_check_output="$(netstat -anp tcp | awk '$6 == "LISTEN" && $4 ~ /^.*\.('"$ports_pattern"')$/')"
|
||||||
@@ -192,7 +202,7 @@ install_docker_compose() {
|
|||||||
echo ""
|
echo ""
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Installation Error", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'", "error": "Docker Compose not found" } }'
|
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Installation Error", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'", "error": "Docker Compose not found", "setup_type": "'"$setup_type"'" } }'
|
||||||
URL="https://app.posthog.com/capture"
|
URL="https://app.posthog.com/capture"
|
||||||
HEADER="Content-Type: application/json"
|
HEADER="Content-Type: application/json"
|
||||||
|
|
||||||
@@ -212,8 +222,7 @@ install_docker_compose() {
|
|||||||
|
|
||||||
start_docker() {
|
start_docker() {
|
||||||
echo "Starting Docker ..."
|
echo "Starting Docker ..."
|
||||||
if [ $os == "Mac" ]
|
if [ $os = "Mac" ]; then
|
||||||
then
|
|
||||||
open --background -a Docker && while ! docker system info > /dev/null 2>&1; do sleep 1; done
|
open --background -a Docker && while ! docker system info > /dev/null 2>&1; do sleep 1; done
|
||||||
else
|
else
|
||||||
if ! sudo systemctl is-active docker.service > /dev/null; then
|
if ! sudo systemctl is-active docker.service > /dev/null; then
|
||||||
@@ -231,16 +240,17 @@ wait_for_containers_start() {
|
|||||||
if [[ status_code -eq 200 ]]; then
|
if [[ status_code -eq 200 ]]; then
|
||||||
break
|
break
|
||||||
else
|
else
|
||||||
|
if [ $setup_type == 'druid' ]; then
|
||||||
SUPERVISORS="$(curl -so - http://localhost:8888/druid/indexer/v1/supervisor)"
|
SUPERVISORS="$(curl -so - http://localhost:8888/druid/indexer/v1/supervisor)"
|
||||||
LEN_SUPERVISORS="${#SUPERVISORS}"
|
LEN_SUPERVISORS="${#SUPERVISORS}"
|
||||||
|
|
||||||
if [[ LEN_SUPERVISORS -ne 19 && $timeout -eq 50 ]];then
|
if [[ LEN_SUPERVISORS -ne 19 && $timeout -eq 50 ]];then
|
||||||
echo "No Supervisors found... Re-applying docker compose\n"
|
echo -e "\n🟠 Supervisors taking time to start ⏳ ... let's wait for some more time ⏱️\n\n"
|
||||||
sudo docker-compose -f ./docker/docker-compose-tiny.yaml up -d
|
sudo docker-compose -f ./docker/druid-kafka-setup/docker-compose-tiny.yaml up -d
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
echo -ne "Waiting for all containers to start. This check will timeout in $timeout seconds ...\r\c"
|
||||||
echo -ne "Waiting for all containers to start. This check will timeout in $timeout seconds...\r\c"
|
|
||||||
fi
|
fi
|
||||||
((timeout--))
|
((timeout--))
|
||||||
sleep 1
|
sleep 1
|
||||||
@@ -253,14 +263,18 @@ bye() { # Prints a friendly good bye message and exits the script.
|
|||||||
if [ "$?" -ne 0 ]; then
|
if [ "$?" -ne 0 ]; then
|
||||||
set +o errexit
|
set +o errexit
|
||||||
|
|
||||||
echo "The containers didn't seem to start correctly. Please run the following command to check containers that may have errored out:"
|
echo "🔴 The containers didn't seem to start correctly. Please run the following command to check containers that may have errored out:"
|
||||||
echo ""
|
echo ""
|
||||||
echo -e "sudo docker-compose -f docker/docker-compose-tiny.yaml ps -a"
|
if [ $setup_type == 'clickhouse' ]; then
|
||||||
|
echo -e "sudo docker-compose -f docker/clickhouse-setup/docker-compose.yaml ps -a"
|
||||||
|
else
|
||||||
|
echo -e "sudo docker-compose -f docker/druid-kafka-setup/docker-compose-tiny.yaml ps -a"
|
||||||
|
fi
|
||||||
# echo "Please read our troubleshooting guide https://signoz.io/docs/deployment/docker#troubleshooting"
|
# echo "Please read our troubleshooting guide https://signoz.io/docs/deployment/docker#troubleshooting"
|
||||||
echo "or reach us on SigNoz for support https://join.slack.com/t/signoz-community/shared_invite/zt-lrjknbbp-J_mI13rlw8pGF4EWBnorJA"
|
echo "or reach us on SigNoz for support https://join.slack.com/t/signoz-community/shared_invite/zt-lrjknbbp-J_mI13rlw8pGF4EWBnorJA"
|
||||||
echo "++++++++++++++++++++++++++++++++++++++++"
|
echo "++++++++++++++++++++++++++++++++++++++++"
|
||||||
|
|
||||||
echo "Please share your email to receive support with the installation"
|
echo -e "\n📨 Please share your email to receive support with the installation"
|
||||||
read -rp 'Email: ' email
|
read -rp 'Email: ' email
|
||||||
|
|
||||||
while [[ $email == "" ]]
|
while [[ $email == "" ]]
|
||||||
@@ -268,7 +282,7 @@ bye() { # Prints a friendly good bye message and exits the script.
|
|||||||
read -rp 'Email: ' email
|
read -rp 'Email: ' email
|
||||||
done
|
done
|
||||||
|
|
||||||
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Installation Support", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'", "email": "'"$email"'" } }'
|
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Installation Support", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'", "email": "'"$email"'", "setup_type": "'"$setup_type"'" } }'
|
||||||
URL="https://app.posthog.com/capture"
|
URL="https://app.posthog.com/capture"
|
||||||
HEADER="Content-Type: application/json"
|
HEADER="Content-Type: application/json"
|
||||||
|
|
||||||
@@ -294,17 +308,39 @@ echo ""
|
|||||||
# Checking OS and assigning package manager
|
# Checking OS and assigning package manager
|
||||||
desired_os=0
|
desired_os=0
|
||||||
os=""
|
os=""
|
||||||
echo -e "🕵️ Detecting your OS"
|
echo -e "Detecting your OS ..."
|
||||||
check_os
|
check_os
|
||||||
|
|
||||||
|
|
||||||
SIGNOZ_INSTALLATION_ID=$(curl -s 'https://api64.ipify.org')
|
SIGNOZ_INSTALLATION_ID=$(curl -s 'https://api64.ipify.org')
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
echo -e "👉 ${RED}Two ways to go forward\n"
|
||||||
|
echo -e "${RED}1) ClickHouse as database (default)\n"
|
||||||
|
echo -e "${RED}2) Kafka + Druid setup to handle scale (recommended for production use)\n"
|
||||||
|
read -p "⚙️ Enter your preference (1/2):" choice_setup
|
||||||
|
|
||||||
|
while [[ $choice_setup != "1" && $choice_setup != "2" && $choice_setup != "" ]]
|
||||||
|
do
|
||||||
|
# echo $choice_setup
|
||||||
|
echo -e "\n❌ ${CYAN}Please enter either 1 or 2"
|
||||||
|
read -p "⚙️ Enter your preference (1/2): " choice_setup
|
||||||
|
# echo $choice_setup
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ $choice_setup == "1" || $choice_setup == "" ]];then
|
||||||
|
setup_type='clickhouse'
|
||||||
|
else
|
||||||
|
setup_type='druid'
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "\n✅ ${CYAN}You have chosen: ${setup_type} setup\n"
|
||||||
|
|
||||||
# Run bye if failure happens
|
# Run bye if failure happens
|
||||||
trap bye EXIT
|
trap bye EXIT
|
||||||
|
|
||||||
|
|
||||||
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Installation Started", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'" } }'
|
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Installation Started", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'", "setup_type": "'"$setup_type"'" } }'
|
||||||
URL="https://app.posthog.com/capture"
|
URL="https://app.posthog.com/capture"
|
||||||
HEADER="Content-Type: application/json"
|
HEADER="Content-Type: application/json"
|
||||||
|
|
||||||
@@ -316,7 +352,7 @@ fi
|
|||||||
|
|
||||||
|
|
||||||
if [[ $desired_os -eq 0 ]];then
|
if [[ $desired_os -eq 0 ]];then
|
||||||
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Installation Error", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'", "error": "OS Not Supported" } }'
|
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Installation Error", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'", "error": "OS Not Supported", "setup_type": "'"$setup_type"'" } }'
|
||||||
URL="https://app.posthog.com/capture"
|
URL="https://app.posthog.com/capture"
|
||||||
HEADER="Content-Type: application/json"
|
HEADER="Content-Type: application/json"
|
||||||
|
|
||||||
@@ -340,7 +376,7 @@ if ! is_command_present docker; then
|
|||||||
echo "Docker Desktop must be installed manually on Mac OS to proceed. Docker can only be installed automatically on Ubuntu / openSUSE / SLES / Redhat / Cent OS"
|
echo "Docker Desktop must be installed manually on Mac OS to proceed. Docker can only be installed automatically on Ubuntu / openSUSE / SLES / Redhat / Cent OS"
|
||||||
echo "https://docs.docker.com/docker-for-mac/install/"
|
echo "https://docs.docker.com/docker-for-mac/install/"
|
||||||
echo "++++++++++++++++++++++++++++++++++++++++++++++++"
|
echo "++++++++++++++++++++++++++++++++++++++++++++++++"
|
||||||
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Installation Error", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'", "error": "Docker not installed" } }'
|
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Installation Error", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'", "error": "Docker not installed", "setup_type": "'"$setup_type"'" } }'
|
||||||
URL="https://app.posthog.com/capture"
|
URL="https://app.posthog.com/capture"
|
||||||
HEADER="Content-Type: application/json"
|
HEADER="Content-Type: application/json"
|
||||||
|
|
||||||
@@ -358,43 +394,59 @@ if ! is_command_present docker-compose; then
|
|||||||
install_docker_compose
|
install_docker_compose
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# if ! is_command_present docker-compose; then
|
|
||||||
# install_docker_machine
|
|
||||||
# docker-machine create -d virtualbox --virtualbox-memory 3584 signoz
|
|
||||||
|
|
||||||
# fi
|
|
||||||
|
|
||||||
|
|
||||||
start_docker
|
start_docker
|
||||||
|
|
||||||
|
|
||||||
|
# sudo docker-compose -f ./docker/clickhouse-setup/docker-compose.yaml up -d --remove-orphans || true
|
||||||
|
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "Pulling the latest container images for SigNoz. To run as sudo it will ask for system password."
|
echo -e "\n🟡 Pulling the latest container images for SigNoz. To run as sudo it may ask for system password\n"
|
||||||
sudo docker-compose -f ./docker/docker-compose-tiny.yaml pull
|
if [ $setup_type == 'clickhouse' ]; then
|
||||||
|
sudo docker-compose -f ./docker/clickhouse-setup/docker-compose.yaml pull
|
||||||
|
else
|
||||||
|
sudo docker-compose -f ./docker/druid-kafka-setup/docker-compose-tiny.yaml pull
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "Starting the SigNoz containers. It may take a few minute ..."
|
echo "🟡 Starting the SigNoz containers. It may take a few minutes ..."
|
||||||
echo
|
echo
|
||||||
# The docker-compose command does some nasty stuff for the `--detach` functionality. So we add a `|| true` so that the
|
# The docker-compose command does some nasty stuff for the `--detach` functionality. So we add a `|| true` so that the
|
||||||
# script doesn't exit because this command looks like it failed to do it's thing.
|
# script doesn't exit because this command looks like it failed to do it's thing.
|
||||||
sudo docker-compose -f ./docker/docker-compose-tiny.yaml up --detach --remove-orphans || true
|
if [ $setup_type == 'clickhouse' ]; then
|
||||||
|
sudo docker-compose -f ./docker/clickhouse-setup/docker-compose.yaml up --detach --remove-orphans || true
|
||||||
|
else
|
||||||
|
sudo docker-compose -f ./docker/druid-kafka-setup/docker-compose-tiny.yaml up --detach --remove-orphans || true
|
||||||
|
fi
|
||||||
|
|
||||||
wait_for_containers_start 60
|
wait_for_containers_start 60
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
if [[ $status_code -ne 200 ]]; then
|
if [[ $status_code -ne 200 ]]; then
|
||||||
echo "+++++++++++ ERROR ++++++++++++++++++++++"
|
echo "+++++++++++ ERROR ++++++++++++++++++++++"
|
||||||
echo "The containers didn't seem to start correctly. Please run the following command to check containers that may have errored out:"
|
echo "🔴 The containers didn't seem to start correctly. Please run the following command to check containers that may have errored out:"
|
||||||
echo ""
|
echo ""
|
||||||
echo -e "sudo docker-compose -f docker/docker-compose-tiny.yaml ps -a"
|
if [ $setup_type == 'clickhouse' ]; then
|
||||||
echo "Please read our troubleshooting guide https://signoz.io/docs/deployment/docker#troubleshooting"
|
echo -e "sudo docker-compose -f docker/clickhouse-setup/docker-compose.yaml ps -a"
|
||||||
|
else
|
||||||
|
echo -e "sudo docker-compose -f docker/druid-kafka-setup/docker-compose-tiny.yaml ps -a"
|
||||||
|
fi
|
||||||
|
echo "Please read our troubleshooting guide https://signoz.io/docs/deployment/docker/#troubleshooting-of-common-issues"
|
||||||
echo "or reach us on SigNoz for support https://join.slack.com/t/signoz-community/shared_invite/zt-lrjknbbp-J_mI13rlw8pGF4EWBnorJA"
|
echo "or reach us on SigNoz for support https://join.slack.com/t/signoz-community/shared_invite/zt-lrjknbbp-J_mI13rlw8pGF4EWBnorJA"
|
||||||
echo "++++++++++++++++++++++++++++++++++++++++"
|
echo "++++++++++++++++++++++++++++++++++++++++"
|
||||||
|
|
||||||
|
if [ $setup_type == 'clickhouse' ]; then
|
||||||
|
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Installation Error - Checks", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'", "error": "Containers not started", "data": "some_checks", "setup_type": "'"$setup_type"'" } }'
|
||||||
|
else
|
||||||
SUPERVISORS="$(curl -so - http://localhost:8888/druid/indexer/v1/supervisor)"
|
SUPERVISORS="$(curl -so - http://localhost:8888/druid/indexer/v1/supervisor)"
|
||||||
|
|
||||||
DATASOURCES="$(curl -so - http://localhost:8888/druid/coordinator/v1/datasources)"
|
DATASOURCES="$(curl -so - http://localhost:8888/druid/coordinator/v1/datasources)"
|
||||||
|
|
||||||
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Installation Error - Checks", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'", "error": "Containers not started", "SUPERVISORS": '"$SUPERVISORS"', "DATASOURCES": '"$DATASOURCES"' } }'
|
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Installation Error - Checks", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'", "error": "Containers not started", "SUPERVISORS": '"$SUPERVISORS"', "DATASOURCES": '"$DATASOURCES"', "setup_type": "'"$setup_type"'" } }'
|
||||||
|
fi
|
||||||
|
|
||||||
URL="https://app.posthog.com/capture"
|
URL="https://app.posthog.com/capture"
|
||||||
HEADER="Content-Type: application/json"
|
HEADER="Content-Type: application/json"
|
||||||
@@ -408,7 +460,7 @@ if [[ $status_code -ne 200 ]]; then
|
|||||||
exit 1
|
exit 1
|
||||||
|
|
||||||
else
|
else
|
||||||
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Installation Success", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'"} }'
|
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Installation Success", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'"}, "setup_type": "'"$setup_type"'" }'
|
||||||
URL="https://app.posthog.com/capture"
|
URL="https://app.posthog.com/capture"
|
||||||
HEADER="Content-Type: application/json"
|
HEADER="Content-Type: application/json"
|
||||||
|
|
||||||
@@ -418,17 +470,25 @@ else
|
|||||||
wget -q --post-data="$DATA" --header="$HEADER" "$URL" > /dev/null 2>&1
|
wget -q --post-data="$DATA" --header="$HEADER" "$URL" > /dev/null 2>&1
|
||||||
fi
|
fi
|
||||||
echo "++++++++++++++++++ SUCCESS ++++++++++++++++++++++"
|
echo "++++++++++++++++++ SUCCESS ++++++++++++++++++++++"
|
||||||
echo "Your installation is complete!"
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "Your frontend is running on 'http://localhost:3000'."
|
echo "🟢 Your installation is complete!"
|
||||||
|
echo ""
|
||||||
|
echo -e "🟢 Your frontend is running on http://localhost:3000"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
if [ $setup_type == 'clickhouse' ]; then
|
||||||
|
echo "ℹ️ To bring down SigNoz and clean volumes : sudo docker-compose -f docker/clickhouse-setup/docker-compose.yaml down -v"
|
||||||
|
else
|
||||||
|
echo "ℹ️ To bring down SigNoz and clean volumes : sudo docker-compose -f docker/druid-kafka-setup/docker-compose-tiny.yaml down -v"
|
||||||
|
fi
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
echo "+++++++++++++++++++++++++++++++++++++++++++++++++"
|
echo "+++++++++++++++++++++++++++++++++++++++++++++++++"
|
||||||
echo ""
|
echo ""
|
||||||
echo "Need help Getting Started?"
|
echo "👉 Need help Getting Started?"
|
||||||
echo "Join us on Slack https://join.slack.com/t/signoz-community/shared_invite/zt-lrjknbbp-J_mI13rlw8pGF4EWBnorJA"
|
echo -e "Join us on Slack https://join.slack.com/t/signoz-community/shared_invite/zt-lrjknbbp-J_mI13rlw8pGF4EWBnorJA"
|
||||||
echo ""
|
echo ""
|
||||||
echo "Please share your email to receive support & updates about SigNoz!"
|
echo -e "\n📨 Please share your email to receive support & updates about SigNoz!"
|
||||||
read -rp 'Email: ' email
|
read -rp 'Email: ' email
|
||||||
|
|
||||||
while [[ $email == "" ]]
|
while [[ $email == "" ]]
|
||||||
@@ -436,7 +496,7 @@ else
|
|||||||
read -rp 'Email: ' email
|
read -rp 'Email: ' email
|
||||||
done
|
done
|
||||||
|
|
||||||
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Identify Successful Installation", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'", "email": "'"$email"'" } }'
|
DATA='{ "api_key": "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w", "type": "capture", "event": "Identify Successful Installation", "distinct_id": "'"$SIGNOZ_INSTALLATION_ID"'", "properties": { "os": "'"$os"'", "email": "'"$email"'", "setup_type": "'"$setup_type"'" } }'
|
||||||
URL="https://app.posthog.com/capture"
|
URL="https://app.posthog.com/capture"
|
||||||
HEADER="Content-Type: application/json"
|
HEADER="Content-Type: application/json"
|
||||||
|
|
||||||
@@ -448,28 +508,4 @@ else
|
|||||||
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo -e "\nThank you!\n"
|
echo -e "\n🙏 Thank you!\n"
|
||||||
|
|
||||||
|
|
||||||
##### Changing default memory limit of docker ############
|
|
||||||
# # Check if memory is less and Confirm to increase size of docker machine
|
|
||||||
# # https://github.com/docker/machine/releases
|
|
||||||
# # On OS X
|
|
||||||
|
|
||||||
# $ curl -L https://github.com/docker/machine/releases/download/v0.16.2/docker-machine-`uname -s`-`uname -m` >/usr/local/bin/docker-machine && \
|
|
||||||
# chmod +x /usr/local/bin/docker-machine
|
|
||||||
# # On Linux
|
|
||||||
|
|
||||||
# $ curl -L https://github.com/docker/machine/releases/download/v0.16.2/docker-machine-`uname -s`-`uname -m` >/tmp/docker-machine &&
|
|
||||||
# chmod +x /tmp/docker-machine &&
|
|
||||||
# sudo cp /tmp/docker-machine /usr/local/bin/docker-machine
|
|
||||||
|
|
||||||
# VBoxManage list vms
|
|
||||||
# docker-machine stop
|
|
||||||
# VBoxManage modifyvm default --cpus 2
|
|
||||||
# VBoxManage modifyvm default --memory 4096
|
|
||||||
# docker-machine start
|
|
||||||
|
|
||||||
# VBoxManage showvminfo default | grep Memory
|
|
||||||
# VBoxManage showvminfo default | grep CPU
|
|
||||||
|
|
||||||
@@ -25,6 +25,13 @@ data:
|
|||||||
"ServiceName",
|
"ServiceName",
|
||||||
"References",
|
"References",
|
||||||
"Tags",
|
"Tags",
|
||||||
|
"ExternalHttpMethod",
|
||||||
|
"ExternalHttpUrl",
|
||||||
|
"Component",
|
||||||
|
"DBSystem",
|
||||||
|
"DBName",
|
||||||
|
"DBOperation",
|
||||||
|
"PeerService",
|
||||||
{
|
{
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"name": "TagsKeys",
|
"name": "TagsKeys",
|
||||||
@@ -66,3 +73,4 @@ data:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -8,14 +8,14 @@ metadata:
|
|||||||
data:
|
data:
|
||||||
otel-collector-config: |
|
otel-collector-config: |
|
||||||
receivers:
|
receivers:
|
||||||
jaeger:
|
|
||||||
protocols:
|
|
||||||
grpc:
|
|
||||||
thrift_http:
|
|
||||||
otlp:
|
otlp:
|
||||||
protocols:
|
protocols:
|
||||||
grpc:
|
grpc:
|
||||||
http:
|
http:
|
||||||
|
jaeger:
|
||||||
|
protocols:
|
||||||
|
grpc:
|
||||||
|
thrift_http:
|
||||||
processors:
|
processors:
|
||||||
batch:
|
batch:
|
||||||
send_batch_size: 1000
|
send_batch_size: 1000
|
||||||
@@ -36,9 +36,16 @@ data:
|
|||||||
health_check: {}
|
health_check: {}
|
||||||
zpages: {}
|
zpages: {}
|
||||||
exporters:
|
exporters:
|
||||||
kafka:
|
kafka/traces:
|
||||||
brokers:
|
brokers:
|
||||||
- signoz-kafka:9092
|
- signoz-kafka:9092
|
||||||
|
topic: 'otlp_spans'
|
||||||
|
protocol_version: 2.0.0
|
||||||
|
|
||||||
|
kafka/metrics:
|
||||||
|
brokers:
|
||||||
|
- signoz-kafka:9092
|
||||||
|
topic: 'otlp_metrics'
|
||||||
protocol_version: 2.0.0
|
protocol_version: 2.0.0
|
||||||
service:
|
service:
|
||||||
extensions: [health_check, zpages]
|
extensions: [health_check, zpages]
|
||||||
@@ -46,8 +53,8 @@ data:
|
|||||||
traces:
|
traces:
|
||||||
receivers: [jaeger, otlp]
|
receivers: [jaeger, otlp]
|
||||||
processors: [memory_limiter, batch, queued_retry]
|
processors: [memory_limiter, batch, queued_retry]
|
||||||
exporters: [kafka]
|
exporters: [kafka/traces]
|
||||||
metrics:
|
metrics:
|
||||||
receivers: [otlp]
|
receivers: [otlp]
|
||||||
processors: [batch]
|
processors: [batch]
|
||||||
exporters: [kafka]
|
exporters: [kafka/metrics]
|
||||||
@@ -13,9 +13,9 @@ dependencies:
|
|||||||
version: 0.2.0
|
version: 0.2.0
|
||||||
- name: query-service
|
- name: query-service
|
||||||
repository: file://./signoz-charts/query-service
|
repository: file://./signoz-charts/query-service
|
||||||
version: 0.2.2
|
version: 0.3.1
|
||||||
- name: frontend
|
- name: frontend
|
||||||
repository: file://./signoz-charts/frontend
|
repository: file://./signoz-charts/frontend
|
||||||
version: 0.2.3
|
version: 0.3.1
|
||||||
digest: sha256:31c8e3a8a4c89d0e6071c6687f074e88b3eed8ce86310314e5b6f94e5d5017be
|
digest: sha256:ed5735a81c416a15b1e498f86a2ddb550ca0da9f5f445891561be0ef5d01b3b2
|
||||||
generated: "2021-05-18T16:54:30.24831+05:30"
|
generated: "2021-06-08T22:35:14.109626+05:30"
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ dependencies:
|
|||||||
version: 0.2.0
|
version: 0.2.0
|
||||||
- name: query-service
|
- name: query-service
|
||||||
repository: "file://./signoz-charts/query-service"
|
repository: "file://./signoz-charts/query-service"
|
||||||
version: 0.2.2
|
version: 0.3.1
|
||||||
- name: frontend
|
- name: frontend
|
||||||
repository: "file://./signoz-charts/frontend"
|
repository: "file://./signoz-charts/frontend"
|
||||||
version: 0.2.3
|
version: 0.3.1
|
||||||
@@ -14,8 +14,8 @@ type: application
|
|||||||
|
|
||||||
# This is the chart version. This version number should be incremented each time you make changes
|
# This is the chart version. This version number should be incremented each time you make changes
|
||||||
# to the chart and its templates, including the app version.
|
# to the chart and its templates, including the app version.
|
||||||
version: 0.2.3
|
version: 0.3.1
|
||||||
|
|
||||||
# This is the version number of the application being deployed. This version number should be
|
# This is the version number of the application being deployed. This version number should be
|
||||||
# incremented each time you make changes to the application.
|
# incremented each time you make changes to the application.
|
||||||
appVersion: 0.2.3
|
appVersion: 0.3.1
|
||||||
|
|||||||
@@ -9,6 +9,16 @@ data:
|
|||||||
server {
|
server {
|
||||||
listen {{ .Values.service.port }};
|
listen {{ .Values.service.port }};
|
||||||
server_name _;
|
server_name _;
|
||||||
|
|
||||||
|
gzip on;
|
||||||
|
gzip_static on;
|
||||||
|
gzip_types text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript;
|
||||||
|
gzip_proxied any;
|
||||||
|
gzip_vary on;
|
||||||
|
gzip_comp_level 6;
|
||||||
|
gzip_buffers 16 8k;
|
||||||
|
gzip_http_version 1.1;
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
root /usr/share/nginx/html;
|
root /usr/share/nginx/html;
|
||||||
index index.html index.htm;
|
index index.html index.htm;
|
||||||
|
|||||||
@@ -14,8 +14,8 @@ type: application
|
|||||||
|
|
||||||
# This is the chart version. This version number should be incremented each time you make changes
|
# This is the chart version. This version number should be incremented each time you make changes
|
||||||
# to the chart and its templates, including the app version.
|
# to the chart and its templates, including the app version.
|
||||||
version: 0.2.2
|
version: 0.3.1
|
||||||
|
|
||||||
# This is the version number of the application being deployed. This version number should be
|
# This is the version number of the application being deployed. This version number should be
|
||||||
# incremented each time you make changes to the application.
|
# incremented each time you make changes to the application.
|
||||||
appVersion: 0.2.2
|
appVersion: 0.3.1
|
||||||
|
|||||||
@@ -36,7 +36,8 @@ spec:
|
|||||||
value: {{ .Values.configVars.DruidClientUrl }}
|
value: {{ .Values.configVars.DruidClientUrl }}
|
||||||
- name: DruidDatasource
|
- name: DruidDatasource
|
||||||
value: {{ .Values.configVars.DruidDatasource }}
|
value: {{ .Values.configVars.DruidDatasource }}
|
||||||
|
- name: STORAGE
|
||||||
|
value: {{ .Values.configVars.STORAGE }}
|
||||||
|
|
||||||
# livenessProbe:
|
# livenessProbe:
|
||||||
# httpGet:
|
# httpGet:
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ fullnameOverride: ""
|
|||||||
configVars:
|
configVars:
|
||||||
DruidClientUrl: http://signoz-druid-router:8888
|
DruidClientUrl: http://signoz-druid-router:8888
|
||||||
DruidDatasource: flattened_spans
|
DruidDatasource: flattened_spans
|
||||||
|
STORAGE: druid
|
||||||
POSTHOG_API_KEY: "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w"
|
POSTHOG_API_KEY: "H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w"
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -10,6 +10,9 @@ kafka:
|
|||||||
zookeeperConnectionTimeoutMs: 6000
|
zookeeperConnectionTimeoutMs: 6000
|
||||||
|
|
||||||
druid:
|
druid:
|
||||||
|
image:
|
||||||
|
tag: 0.21.1-rc2
|
||||||
|
|
||||||
configVars:
|
configVars:
|
||||||
|
|
||||||
# To store data on local disks attached
|
# To store data on local disks attached
|
||||||
@@ -45,3 +48,4 @@ query-service:
|
|||||||
configVars:
|
configVars:
|
||||||
DruidClientUrl: http://signoz-druid-router:8888
|
DruidClientUrl: http://signoz-druid-router:8888
|
||||||
DruidDatasource: flattened_spans
|
DruidDatasource: flattened_spans
|
||||||
|
STORAGE: druid
|
||||||
|
|||||||
@@ -2,6 +2,15 @@ server {
|
|||||||
listen 3000;
|
listen 3000;
|
||||||
server_name _;
|
server_name _;
|
||||||
|
|
||||||
|
gzip on;
|
||||||
|
gzip_static on;
|
||||||
|
gzip_types text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript;
|
||||||
|
gzip_proxied any;
|
||||||
|
gzip_vary on;
|
||||||
|
gzip_comp_level 6;
|
||||||
|
gzip_buffers 16 8k;
|
||||||
|
gzip_http_version 1.1;
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
root /usr/share/nginx/html;
|
root /usr/share/nginx/html;
|
||||||
index index.html index.htm;
|
index index.html index.htm;
|
||||||
|
|||||||
@@ -15,7 +15,6 @@
|
|||||||
"@ant-design/icons": "^4.6.2",
|
"@ant-design/icons": "^4.6.2",
|
||||||
"@auth0/auth0-react": "^1.2.0",
|
"@auth0/auth0-react": "^1.2.0",
|
||||||
"@babel/core": "7.12.3",
|
"@babel/core": "7.12.3",
|
||||||
"@material-ui/core": "^4.0.0",
|
|
||||||
"@pmmmwh/react-refresh-webpack-plugin": "0.4.2",
|
"@pmmmwh/react-refresh-webpack-plugin": "0.4.2",
|
||||||
"@svgr/webpack": "5.4.0",
|
"@svgr/webpack": "5.4.0",
|
||||||
"@testing-library/jest-dom": "^5.11.4",
|
"@testing-library/jest-dom": "^5.11.4",
|
||||||
@@ -48,8 +47,6 @@
|
|||||||
"chart.js": "^2.9.4",
|
"chart.js": "^2.9.4",
|
||||||
"css-loader": "4.3.0",
|
"css-loader": "4.3.0",
|
||||||
"d3": "^6.2.0",
|
"d3": "^6.2.0",
|
||||||
"d3-array": "^2.8.0",
|
|
||||||
"d3-ease": "^2.0.0",
|
|
||||||
"d3-flame-graph": "^3.1.1",
|
"d3-flame-graph": "^3.1.1",
|
||||||
"d3-tip": "^0.9.1",
|
"d3-tip": "^0.9.1",
|
||||||
"dotenv": "8.2.0",
|
"dotenv": "8.2.0",
|
||||||
@@ -72,11 +69,7 @@
|
|||||||
"jest-circus": "26.6.0",
|
"jest-circus": "26.6.0",
|
||||||
"jest-resolve": "26.6.0",
|
"jest-resolve": "26.6.0",
|
||||||
"jest-watch-typeahead": "0.6.1",
|
"jest-watch-typeahead": "0.6.1",
|
||||||
"material-ui-chip-input": "^2.0.0-beta.2",
|
|
||||||
"mini-css-extract-plugin": "0.11.3",
|
|
||||||
"optimize-css-assets-webpack-plugin": "5.0.4",
|
|
||||||
"pnp-webpack-plugin": "1.6.4",
|
"pnp-webpack-plugin": "1.6.4",
|
||||||
"postcss-flexbugs-fixes": "4.2.1",
|
|
||||||
"postcss-loader": "3.0.0",
|
"postcss-loader": "3.0.0",
|
||||||
"postcss-normalize": "8.0.1",
|
"postcss-normalize": "8.0.1",
|
||||||
"postcss-preset-env": "6.7.0",
|
"postcss-preset-env": "6.7.0",
|
||||||
@@ -96,7 +89,6 @@
|
|||||||
"react-refresh": "^0.8.3",
|
"react-refresh": "^0.8.3",
|
||||||
"react-router-dom": "^5.2.0",
|
"react-router-dom": "^5.2.0",
|
||||||
"react-vis": "^1.11.7",
|
"react-vis": "^1.11.7",
|
||||||
"recharts": "^1.8.5",
|
|
||||||
"redux": "^4.0.5",
|
"redux": "^4.0.5",
|
||||||
"redux-thunk": "^2.3.0",
|
"redux-thunk": "^2.3.0",
|
||||||
"resolve": "1.18.1",
|
"resolve": "1.18.1",
|
||||||
@@ -142,6 +134,7 @@
|
|||||||
"@babel/preset-typescript": "^7.12.17",
|
"@babel/preset-typescript": "^7.12.17",
|
||||||
"autoprefixer": "^9.0.0",
|
"autoprefixer": "^9.0.0",
|
||||||
"babel-plugin-styled-components": "^1.12.0",
|
"babel-plugin-styled-components": "^1.12.0",
|
||||||
|
"compression-webpack-plugin": "^8.0.0",
|
||||||
"copy-webpack-plugin": "^7.0.0",
|
"copy-webpack-plugin": "^7.0.0",
|
||||||
"gulp": "^4.0.2",
|
"gulp": "^4.0.2",
|
||||||
"gulp-csso": "^4.0.1",
|
"gulp-csso": "^4.0.1",
|
||||||
|
|||||||
@@ -1,18 +1,8 @@
|
|||||||
@import "~antd/dist/antd.dark.css";
|
@import "~antd/dist/antd.dark.css";
|
||||||
@import "~antd/dist/antd.compact.css";
|
|
||||||
|
|
||||||
.ant-space-item {
|
.ant-space-item {
|
||||||
margin-right: 0 !important;
|
margin-right: 0 !important;
|
||||||
}
|
}
|
||||||
/* #components-layout-demo-side .logo {
|
|
||||||
height: 32px;
|
|
||||||
margin: 16px;
|
|
||||||
background: rgba(255, 255, 255, 0.3);
|
|
||||||
}
|
|
||||||
|
|
||||||
.site-layout .site-layout-background {
|
|
||||||
background: #fff;
|
|
||||||
} */
|
|
||||||
.instrument-card{
|
.instrument-card{
|
||||||
border-radius: 4px;
|
border-radius: 4px;
|
||||||
background: #313131;
|
background: #313131;
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ import {
|
|||||||
SettingsPage,
|
SettingsPage,
|
||||||
IntstrumentationPage,
|
IntstrumentationPage,
|
||||||
} from "Src/pages";
|
} from "Src/pages";
|
||||||
|
import { RouteProvider } from "./RouteProvider";
|
||||||
|
|
||||||
const App = () => {
|
const App = () => {
|
||||||
const { status } = useThemeSwitcher();
|
const { status } = useThemeSwitcher();
|
||||||
@@ -30,10 +31,16 @@ const App = () => {
|
|||||||
<Suspense fallback={<Spin size="large" />}>
|
<Suspense fallback={<Spin size="large" />}>
|
||||||
<Route path={"/"}>
|
<Route path={"/"}>
|
||||||
<Switch>
|
<Switch>
|
||||||
|
<RouteProvider>
|
||||||
<BaseLayout>
|
<BaseLayout>
|
||||||
|
<Suspense fallback={<Spin size="large" />}>
|
||||||
<Route path={ROUTES.SIGN_UP} exact component={Signup} />
|
<Route path={ROUTES.SIGN_UP} exact component={Signup} />
|
||||||
<Route path={ROUTES.APPLICATION} exact component={ServicesTable} />
|
<Route path={ROUTES.APPLICATION} exact component={ServicesTable} />
|
||||||
<Route path={ROUTES.SERVICE_METRICS} exact component={ServiceMetrics} />
|
<Route
|
||||||
|
path={ROUTES.SERVICE_METRICS}
|
||||||
|
exact
|
||||||
|
component={ServiceMetrics}
|
||||||
|
/>
|
||||||
<Route path={ROUTES.SERVICE_MAP} exact component={ServiceMap} />
|
<Route path={ROUTES.SERVICE_MAP} exact component={ServiceMap} />
|
||||||
<Route path={ROUTES.TRACES} exact component={TraceDetail} />
|
<Route path={ROUTES.TRACES} exact component={TraceDetail} />
|
||||||
<Route path={ROUTES.TRACE_GRAPH} exact component={TraceGraph} />
|
<Route path={ROUTES.TRACE_GRAPH} exact component={TraceGraph} />
|
||||||
@@ -59,7 +66,10 @@ const App = () => {
|
|||||||
);
|
);
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
|
</Suspense>
|
||||||
|
|
||||||
</BaseLayout>
|
</BaseLayout>
|
||||||
|
</RouteProvider>
|
||||||
</Switch>
|
</Switch>
|
||||||
</Route>
|
</Route>
|
||||||
</Suspense>
|
</Suspense>
|
||||||
|
|||||||
@@ -1,8 +1,11 @@
|
|||||||
import React, { ReactNode } from "react";
|
import React, { ReactNode, useEffect } from "react";
|
||||||
|
|
||||||
import { Layout } from "antd";
|
import { Layout } from "antd";
|
||||||
import SideNav from "./Nav/SideNav";
|
import SideNav from "./Nav/SideNav";
|
||||||
import TopNav from "./Nav/TopNav";
|
import TopNav from "./Nav/TopNav";
|
||||||
|
import { useLocation } from "react-router-dom";
|
||||||
|
import { useRoute } from "./RouteProvider";
|
||||||
|
|
||||||
const { Content, Footer } = Layout;
|
const { Content, Footer } = Layout;
|
||||||
|
|
||||||
interface BaseLayoutProps {
|
interface BaseLayoutProps {
|
||||||
@@ -10,6 +13,13 @@ interface BaseLayoutProps {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const BaseLayout: React.FC<BaseLayoutProps> = ({ children }) => {
|
const BaseLayout: React.FC<BaseLayoutProps> = ({ children }) => {
|
||||||
|
const location = useLocation();
|
||||||
|
const { dispatch } = useRoute();
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
dispatch({ type: "ROUTE_IS_LOADED", payload: location.pathname });
|
||||||
|
}, [location]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Layout style={{ minHeight: "100vh" }}>
|
<Layout style={{ minHeight: "100vh" }}>
|
||||||
<SideNav />
|
<SideNav />
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import { RouteComponentProps } from "react-router-dom";
|
|||||||
import styled from "styled-components";
|
import styled from "styled-components";
|
||||||
import ROUTES from "Src/constants/routes";
|
import ROUTES from "Src/constants/routes";
|
||||||
|
|
||||||
import { metricItem } from "../../store/actions/metrics";
|
import { metricItem } from "../../store/actions/MetricsActions";
|
||||||
|
|
||||||
const ChartPopUpUnique = styled.div<{
|
const ChartPopUpUnique = styled.div<{
|
||||||
ycoordinate: number;
|
ycoordinate: number;
|
||||||
@@ -55,7 +55,7 @@ class ErrorRateChart extends React.Component<ErrorRateChartProps> {
|
|||||||
xcoordinate: 0,
|
xcoordinate: 0,
|
||||||
ycoordinate: 0,
|
ycoordinate: 0,
|
||||||
showpopUp: false,
|
showpopUp: false,
|
||||||
firstpoint_ts: 0
|
firstpoint_ts: 0,
|
||||||
// graphInfo:{}
|
// graphInfo:{}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,9 @@
|
|||||||
import React from "react";
|
import React from "react";
|
||||||
import { Line as ChartJSLine } from "react-chartjs-2";
|
import { Line as ChartJSLine } from "react-chartjs-2";
|
||||||
import { ChartOptions } from "chart.js";
|
|
||||||
import { withRouter } from "react-router";
|
import { withRouter } from "react-router";
|
||||||
import { RouteComponentProps } from "react-router-dom";
|
import { RouteComponentProps } from "react-router-dom";
|
||||||
import styled from "styled-components";
|
|
||||||
import { getOptions, borderColors } from "./graphConfig";
|
import { getOptions, borderColors } from "./graphConfig";
|
||||||
import { externalMetricsItem } from "../../store/actions/metrics";
|
import { externalMetricsItem } from "../../../store/actions/MetricsActions";
|
||||||
import { uniqBy, filter } from "lodash";
|
import { uniqBy, filter } from "lodash";
|
||||||
|
|
||||||
const theme = "dark";
|
const theme = "dark";
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import React from "react";
|
|||||||
import { Bar, Line as ChartJSLine } from "react-chartjs-2";
|
import { Bar, Line as ChartJSLine } from "react-chartjs-2";
|
||||||
import styled from "styled-components";
|
import styled from "styled-components";
|
||||||
|
|
||||||
import { customMetricsItem } from "../../store/actions/metrics";
|
import { customMetricsItem } from "../../store/actions/MetricsActions";
|
||||||
|
|
||||||
const GenVisualizationWrapper = styled.div`
|
const GenVisualizationWrapper = styled.div`
|
||||||
height: 160px;
|
height: 160px;
|
||||||
|
|||||||
@@ -4,9 +4,7 @@ import { ChartOptions } from "chart.js";
|
|||||||
import { withRouter } from "react-router";
|
import { withRouter } from "react-router";
|
||||||
import { RouteComponentProps } from "react-router-dom";
|
import { RouteComponentProps } from "react-router-dom";
|
||||||
import styled from "styled-components";
|
import styled from "styled-components";
|
||||||
import ROUTES from "Src/constants/routes";
|
import { metricItem } from "../../store/actions/MetricsActions";
|
||||||
|
|
||||||
import { metricItem } from "../../store/actions/metrics";
|
|
||||||
|
|
||||||
const ChartPopUpUnique = styled.div<{
|
const ChartPopUpUnique = styled.div<{
|
||||||
ycoordinate: number;
|
ycoordinate: number;
|
||||||
@@ -39,11 +37,8 @@ interface LatencyLineChartProps extends RouteComponentProps<any> {
|
|||||||
popupClickHandler: Function;
|
popupClickHandler: Function;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface LatencyLineChart {
|
|
||||||
chartRef: any;
|
|
||||||
}
|
|
||||||
|
|
||||||
class LatencyLineChart extends React.Component<LatencyLineChartProps> {
|
class LatencyLineChart extends React.Component<LatencyLineChartProps> {
|
||||||
|
private chartRef: React.RefObject<HTMLElement>;
|
||||||
constructor(props: LatencyLineChartProps) {
|
constructor(props: LatencyLineChartProps) {
|
||||||
super(props);
|
super(props);
|
||||||
this.chartRef = React.createRef();
|
this.chartRef = React.createRef();
|
||||||
@@ -54,7 +49,6 @@ class LatencyLineChart extends React.Component<LatencyLineChartProps> {
|
|||||||
ycoordinate: 0,
|
ycoordinate: 0,
|
||||||
showpopUp: false,
|
showpopUp: false,
|
||||||
firstpoint_ts: 0,
|
firstpoint_ts: 0,
|
||||||
// graphInfo:{}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
onClickhandler = async (e: any, event: any) => {
|
onClickhandler = async (e: any, event: any) => {
|
||||||
@@ -69,7 +63,6 @@ class LatencyLineChart extends React.Component<LatencyLineChartProps> {
|
|||||||
ycoordinate: e.offsetY,
|
ycoordinate: e.offsetY,
|
||||||
showpopUp: true,
|
showpopUp: true,
|
||||||
firstpoint_ts: this.props.data[firstPoint._index].timestamp,
|
firstpoint_ts: this.props.data[firstPoint._index].timestamp,
|
||||||
// graphInfo:{...event}
|
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
// if clicked outside of the graph line, then firstpoint is undefined -> close popup.
|
// if clicked outside of the graph line, then firstpoint is undefined -> close popup.
|
||||||
@@ -80,15 +73,6 @@ class LatencyLineChart extends React.Component<LatencyLineChartProps> {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
gotoTracesHandler = (xc: any) => {
|
|
||||||
this.props.history.push(ROUTES.TRACES);
|
|
||||||
};
|
|
||||||
|
|
||||||
gotoAlertsHandler = () => {
|
|
||||||
this.props.history.push(ROUTES.SERVICE_MAP);
|
|
||||||
// PNOTE - Keeping service map for now, will replace with alerts when alert page is made
|
|
||||||
};
|
|
||||||
|
|
||||||
options_charts: ChartOptions = {
|
options_charts: ChartOptions = {
|
||||||
onClick: this.onClickhandler,
|
onClick: this.onClickhandler,
|
||||||
|
|
||||||
@@ -161,9 +145,6 @@ class LatencyLineChart extends React.Component<LatencyLineChartProps> {
|
|||||||
xAxes: [
|
xAxes: [
|
||||||
{
|
{
|
||||||
type: "time",
|
type: "time",
|
||||||
// time: {
|
|
||||||
// unit: 'second'
|
|
||||||
// },
|
|
||||||
distribution: "linear",
|
distribution: "linear",
|
||||||
//'linear': data are spread according to their time (distances can vary)
|
//'linear': data are spread according to their time (distances can vary)
|
||||||
// From https://www.chartjs.org/docs/latest/axes/cartesian/time.html
|
// From https://www.chartjs.org/docs/latest/axes/cartesian/time.html
|
||||||
@@ -193,7 +174,6 @@ class LatencyLineChart extends React.Component<LatencyLineChartProps> {
|
|||||||
>
|
>
|
||||||
View Traces
|
View Traces
|
||||||
</PopUpElements>
|
</PopUpElements>
|
||||||
{/* <PopUpElements onClick={this.gotoAlertsHandler}>Set Alerts</PopUpElements> */}
|
|
||||||
</ChartPopUpUnique>
|
</ChartPopUpUnique>
|
||||||
);
|
);
|
||||||
} else return null;
|
} else return null;
|
||||||
@@ -239,7 +219,7 @@ class LatencyLineChart extends React.Component<LatencyLineChartProps> {
|
|||||||
<div>
|
<div>
|
||||||
{this.GraphTracePopUp()}
|
{this.GraphTracePopUp()}
|
||||||
<div>
|
<div>
|
||||||
<div style={{textAlign: "center"}}>Application latency in ms</div>
|
<div style={{ textAlign: "center" }}>Application latency in ms</div>
|
||||||
<ChartJSLine
|
<ChartJSLine
|
||||||
ref={this.chartRef}
|
ref={this.chartRef}
|
||||||
data={data_chartJS}
|
data={data_chartJS}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import { withRouter } from "react-router";
|
|||||||
import { RouteComponentProps } from "react-router-dom";
|
import { RouteComponentProps } from "react-router-dom";
|
||||||
import styled from "styled-components";
|
import styled from "styled-components";
|
||||||
|
|
||||||
import { metricItem } from "../../store/actions/metrics";
|
import { metricItem } from "../../store/actions/MetricsActions";
|
||||||
import ROUTES from "Src/constants/routes";
|
import ROUTES from "Src/constants/routes";
|
||||||
|
|
||||||
const ChartPopUpUnique = styled.div<{
|
const ChartPopUpUnique = styled.div<{
|
||||||
|
|||||||
@@ -4,23 +4,24 @@ import { connect } from "react-redux";
|
|||||||
import { useParams, RouteComponentProps } from "react-router-dom";
|
import { useParams, RouteComponentProps } from "react-router-dom";
|
||||||
import { withRouter } from "react-router";
|
import { withRouter } from "react-router";
|
||||||
import ROUTES from "Src/constants/routes";
|
import ROUTES from "Src/constants/routes";
|
||||||
|
import { GlobalTime, updateTimeInterval } from "Src/store/actions";
|
||||||
import {
|
import {
|
||||||
getServicesMetrics,
|
|
||||||
metricItem,
|
metricItem,
|
||||||
getTopEndpoints,
|
|
||||||
getDbOverViewMetrics,
|
|
||||||
getExternalMetrics,
|
|
||||||
externalMetricsAvgDurationItem,
|
externalMetricsAvgDurationItem,
|
||||||
externalErrCodeMetricsItem,
|
externalErrCodeMetricsItem,
|
||||||
externalMetricsItem,
|
externalMetricsItem,
|
||||||
dbOverviewMetricsItem,
|
dbOverviewMetricsItem,
|
||||||
|
topEndpointListItem,
|
||||||
|
} from "../../store/actions/MetricsActions";
|
||||||
|
import {
|
||||||
|
getServicesMetrics,
|
||||||
|
getTopEndpoints,
|
||||||
|
getDbOverViewMetrics,
|
||||||
|
getExternalMetrics,
|
||||||
getExternalAvgDurationMetrics,
|
getExternalAvgDurationMetrics,
|
||||||
getExternalErrCodeMetrics,
|
getExternalErrCodeMetrics,
|
||||||
topEndpointListItem,
|
} from "../../store/actions/MetricsActions";
|
||||||
GlobalTime,
|
|
||||||
updateTimeInterval,
|
|
||||||
} from "Src/store/actions";
|
|
||||||
import { StoreState } from "../../store/reducers";
|
import { StoreState } from "../../store/reducers";
|
||||||
import LatencyLineChart from "./LatencyLineChart";
|
import LatencyLineChart from "./LatencyLineChart";
|
||||||
import RequestRateChart from "./RequestRateChart";
|
import RequestRateChart from "./RequestRateChart";
|
||||||
@@ -223,13 +224,13 @@ const mapStateToProps = (
|
|||||||
globalTime: GlobalTime;
|
globalTime: GlobalTime;
|
||||||
} => {
|
} => {
|
||||||
return {
|
return {
|
||||||
externalErrCodeMetrics: state.externalErrCodeMetrics,
|
externalErrCodeMetrics: state.metricsData.externalErrCodeMetricsItem,
|
||||||
serviceMetrics: state.serviceMetrics,
|
serviceMetrics: state.metricsData.metricItems,
|
||||||
topEndpointsList: state.topEndpointsList,
|
topEndpointsList: state.metricsData.topEndpointListItem,
|
||||||
externalMetrics: state.externalMetrics,
|
externalMetrics: state.metricsData.externalMetricsItem,
|
||||||
globalTime: state.globalTime,
|
globalTime: state.globalTime,
|
||||||
dbOverviewMetrics: state.dbOverviewMetrics,
|
dbOverviewMetrics: state.metricsData.dbOverviewMetricsItem,
|
||||||
externalAvgDurationMetrics: state.externalAvgDurationMetrics,
|
externalAvgDurationMetrics: state.metricsData.externalMetricsAvgDurationItem,
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -1,17 +1,12 @@
|
|||||||
import React, { useEffect, useState } from "react";
|
import React, { useEffect, useState } from "react";
|
||||||
import { useLocation } from "react-router-dom";
|
|
||||||
import { NavLink } from "react-router-dom";
|
import { NavLink } from "react-router-dom";
|
||||||
import { Button, Space, Spin, Table } from "antd";
|
import { Button, Space, Spin, Table } from "antd";
|
||||||
import styled from "styled-components";
|
import styled from "styled-components";
|
||||||
import { connect } from "react-redux";
|
import { connect } from "react-redux";
|
||||||
import { SKIP_ONBOARDING } from "Src/constants/onboarding";
|
import { SKIP_ONBOARDING } from "Src/constants/onboarding";
|
||||||
import ROUTES from "Src/constants/routes";
|
import ROUTES from "Src/constants/routes";
|
||||||
|
import { getServicesList, GlobalTime } from "../../store/actions";
|
||||||
import {
|
import { servicesListItem } from "../../store/actions/MetricsActions";
|
||||||
getServicesList,
|
|
||||||
GlobalTime,
|
|
||||||
servicesListItem,
|
|
||||||
} from "../../store/actions";
|
|
||||||
import { StoreState } from "../../store/reducers";
|
import { StoreState } from "../../store/reducers";
|
||||||
import { CustomModal } from "../../components/Modal";
|
import { CustomModal } from "../../components/Modal";
|
||||||
|
|
||||||
@@ -75,7 +70,7 @@ const columns = [
|
|||||||
key: "errorRate",
|
key: "errorRate",
|
||||||
sorter: (a: any, b: any) => a.errorRate - b.errorRate,
|
sorter: (a: any, b: any) => a.errorRate - b.errorRate,
|
||||||
// sortDirections: ['descend', 'ascend'],
|
// sortDirections: ['descend', 'ascend'],
|
||||||
render: (value: number) => (value).toFixed(2),
|
render: (value: number) => value.toFixed(2),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
title: "Requests Per Second",
|
title: "Requests Per Second",
|
||||||
@@ -88,8 +83,6 @@ const columns = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
const _ServicesTable = (props: ServicesTableProps) => {
|
const _ServicesTable = (props: ServicesTableProps) => {
|
||||||
const search = useLocation().search;
|
|
||||||
const time_interval = new URLSearchParams(search).get("time");
|
|
||||||
const [initialDataFetch, setDataFetched] = useState(false);
|
const [initialDataFetch, setDataFetched] = useState(false);
|
||||||
const [errorObject, setErrorObject] = useState({
|
const [errorObject, setErrorObject] = useState({
|
||||||
message: "",
|
message: "",
|
||||||
@@ -210,7 +203,10 @@ const _ServicesTable = (props: ServicesTableProps) => {
|
|||||||
const mapStateToProps = (
|
const mapStateToProps = (
|
||||||
state: StoreState,
|
state: StoreState,
|
||||||
): { servicesList: servicesListItem[]; globalTime: GlobalTime } => {
|
): { servicesList: servicesListItem[]; globalTime: GlobalTime } => {
|
||||||
return { servicesList: state.servicesList, globalTime: state.globalTime };
|
return {
|
||||||
|
servicesList: state.metricsData.serviceList,
|
||||||
|
globalTime: state.globalTime,
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
export const ServicesTable = connect(mapStateToProps, {
|
export const ServicesTable = connect(mapStateToProps, {
|
||||||
|
|||||||
12
frontend/src/modules/Metrics/TopEndpointsTable.css
Normal file
12
frontend/src/modules/Metrics/TopEndpointsTable.css
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
@media only screen and (min-width: 768px) {
|
||||||
|
.topEndpointsButton {
|
||||||
|
white-space: nowrap;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.topEndpointsButton span {
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
overflow: hidden;
|
||||||
|
max-width: 120px;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,18 +1,22 @@
|
|||||||
import React from "react";
|
import React from "react";
|
||||||
import { Table, Button } from "antd";
|
import { Table, Button, Tooltip } from "antd";
|
||||||
import { connect } from "react-redux";
|
import { connect } from "react-redux";
|
||||||
import styled from "styled-components";
|
import styled from "styled-components";
|
||||||
import { useHistory, useParams } from "react-router-dom";
|
import { useHistory, useParams } from "react-router-dom";
|
||||||
import { topEndpointListItem } from "../../store/actions/metrics";
|
import { topEndpointListItem } from "../../store/actions/MetricsActions";
|
||||||
import { METRICS_PAGE_QUERY_PARAM } from "Src/constants/query";
|
import { METRICS_PAGE_QUERY_PARAM } from "Src/constants/query";
|
||||||
import { GlobalTime } from "Src/store/actions";
|
import { GlobalTime } from "Src/store/actions";
|
||||||
import { StoreState } from "Src/store/reducers";
|
import { StoreState } from "Src/store/reducers";
|
||||||
|
import "./TopEndpointsTable.css";
|
||||||
|
|
||||||
const Wrapper = styled.div`
|
const Wrapper = styled.div`
|
||||||
padding-top: 10px;
|
padding-top: 10px;
|
||||||
padding-bottom: 10px;
|
padding-bottom: 10px;
|
||||||
padding-left: 20px;
|
padding-left: 8px;
|
||||||
padding-right: 20px;
|
padding-right: 8px;
|
||||||
|
@media only screen and (max-width: 767px) {
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
.ant-table table {
|
.ant-table table {
|
||||||
font-size: 12px;
|
font-size: 12px;
|
||||||
}
|
}
|
||||||
@@ -22,6 +26,9 @@ const Wrapper = styled.div`
|
|||||||
.ant-table-thead > tr > th {
|
.ant-table-thead > tr > th {
|
||||||
padding: 10px;
|
padding: 10px;
|
||||||
}
|
}
|
||||||
|
.ant-table-column-sorters {
|
||||||
|
padding: 6px;
|
||||||
|
}
|
||||||
`;
|
`;
|
||||||
|
|
||||||
interface TopEndpointsTableProps {
|
interface TopEndpointsTableProps {
|
||||||
@@ -58,9 +65,15 @@ const _TopEndpointsTable = (props: TopEndpointsTableProps) => {
|
|||||||
key: "name",
|
key: "name",
|
||||||
|
|
||||||
render: (text: string) => (
|
render: (text: string) => (
|
||||||
<Button type="link" onClick={() => handleOnClick(text)}>
|
<Tooltip placement="topLeft" title={text}>
|
||||||
|
<Button
|
||||||
|
className="topEndpointsButton"
|
||||||
|
type="link"
|
||||||
|
onClick={() => handleOnClick(text)}
|
||||||
|
>
|
||||||
{text}
|
{text}
|
||||||
</Button>
|
</Button>
|
||||||
|
</Tooltip>
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -72,10 +85,10 @@ const _TopEndpointsTable = (props: TopEndpointsTableProps) => {
|
|||||||
render: (value: number) => (value / 1000000).toFixed(2),
|
render: (value: number) => (value / 1000000).toFixed(2),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
title: "P90 (in ms)",
|
title: "P95 (in ms)",
|
||||||
dataIndex: "p90",
|
dataIndex: "p95",
|
||||||
key: "p90",
|
key: "p95",
|
||||||
sorter: (a: any, b: any) => a.p90 - b.p90,
|
sorter: (a: any, b: any) => a.p95 - b.p95,
|
||||||
// sortDirections: ['descend', 'ascend'],
|
// sortDirections: ['descend', 'ascend'],
|
||||||
render: (value: number) => (value / 1000000).toFixed(2),
|
render: (value: number) => (value / 1000000).toFixed(2),
|
||||||
},
|
},
|
||||||
|
|||||||
83
frontend/src/modules/RouteProvider.tsx
Normal file
83
frontend/src/modules/RouteProvider.tsx
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
import React, { useContext, createContext, ReactNode, Dispatch } from "react";
|
||||||
|
import ROUTES from "Src/constants/routes";
|
||||||
|
|
||||||
|
type State = {
|
||||||
|
[key: string]: {
|
||||||
|
route: string;
|
||||||
|
isLoaded: boolean;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
enum ActionTypes {
|
||||||
|
UPDATE_IS_LOADED = "ROUTE_IS_LOADED",
|
||||||
|
}
|
||||||
|
|
||||||
|
type Action = {
|
||||||
|
type: ActionTypes;
|
||||||
|
payload: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
interface ContextType {
|
||||||
|
state: State;
|
||||||
|
dispatch: Dispatch<Action>;
|
||||||
|
}
|
||||||
|
|
||||||
|
const RouteContext = createContext<ContextType | null>(null);
|
||||||
|
|
||||||
|
interface RouteProviderProps {
|
||||||
|
children: ReactNode;
|
||||||
|
}
|
||||||
|
interface RouteObj {
|
||||||
|
[key: string]: {
|
||||||
|
route: string;
|
||||||
|
isLoaded: boolean;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const updateLocation = (state: State, action: Action): State => {
|
||||||
|
if (action.type === ActionTypes.UPDATE_IS_LOADED) {
|
||||||
|
/*
|
||||||
|
Update the isLoaded property in routes obj
|
||||||
|
if the route matches the current pathname
|
||||||
|
|
||||||
|
Why: Checkout this issue https://github.com/SigNoz/signoz/issues/110
|
||||||
|
To avoid calling the api's twice for Date picker,
|
||||||
|
We will only call once the route is changed
|
||||||
|
*/
|
||||||
|
Object.keys(ROUTES).map((items) => {
|
||||||
|
state[items].isLoaded = state[items].route === action.payload;
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
const getInitialState = () => {
|
||||||
|
const routes: RouteObj = {};
|
||||||
|
Object.keys(ROUTES).map((items) => {
|
||||||
|
routes[items] = {
|
||||||
|
route: `${ROUTES[items]}`,
|
||||||
|
isLoaded: false,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
return routes;
|
||||||
|
};
|
||||||
|
|
||||||
|
const RouteProvider: React.FC<RouteProviderProps> = ({ children }) => {
|
||||||
|
const [state, dispatch] = React.useReducer(updateLocation, getInitialState());
|
||||||
|
const value = { state, dispatch };
|
||||||
|
return <RouteContext.Provider value={value}>{children}</RouteContext.Provider>;
|
||||||
|
};
|
||||||
|
|
||||||
|
const useRoute = (): ContextType => {
|
||||||
|
const context = useContext(RouteContext);
|
||||||
|
if (context === undefined) {
|
||||||
|
throw new Error("useRoute must be used within a RouteProvider");
|
||||||
|
}
|
||||||
|
return context as ContextType;
|
||||||
|
};
|
||||||
|
export { RouteProvider, useRoute };
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import React, { useEffect, useRef, useState } from "react";
|
import React, { useEffect, useRef } from "react";
|
||||||
import { connect } from "react-redux";
|
import { connect } from "react-redux";
|
||||||
import { RouteComponentProps } from "react-router-dom";
|
import { RouteComponentProps } from "react-router-dom";
|
||||||
import {
|
import {
|
||||||
@@ -14,6 +14,7 @@ import { StoreState } from "../../store/reducers";
|
|||||||
import { getZoomPx, getGraphData, getTooltip, transformLabel } from "./utils";
|
import { getZoomPx, getGraphData, getTooltip, transformLabel } from "./utils";
|
||||||
import SelectService from "./SelectService";
|
import SelectService from "./SelectService";
|
||||||
import { ForceGraph2D } from "react-force-graph";
|
import { ForceGraph2D } from "react-force-graph";
|
||||||
|
import { useRoute } from "../RouteProvider";
|
||||||
|
|
||||||
const Container = styled.div`
|
const Container = styled.div`
|
||||||
.force-graph-container .graph-tooltip {
|
.force-graph-container .graph-tooltip {
|
||||||
@@ -53,6 +54,8 @@ export interface graphDataType {
|
|||||||
|
|
||||||
const ServiceMap = (props: ServiceMapProps) => {
|
const ServiceMap = (props: ServiceMapProps) => {
|
||||||
const fgRef = useRef();
|
const fgRef = useRef();
|
||||||
|
const { state } = useRoute();
|
||||||
|
|
||||||
const {
|
const {
|
||||||
getDetailedServiceMapItems,
|
getDetailedServiceMapItems,
|
||||||
getServiceMapItems,
|
getServiceMapItems,
|
||||||
@@ -61,8 +64,14 @@ const ServiceMap = (props: ServiceMapProps) => {
|
|||||||
} = props;
|
} = props;
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
/*
|
||||||
|
Call the apis only when the route is loaded.
|
||||||
|
Check this issue: https://github.com/SigNoz/signoz/issues/110
|
||||||
|
*/
|
||||||
|
if (state.SERVICE_MAP.isLoaded) {
|
||||||
getServiceMapItems(globalTime);
|
getServiceMapItems(globalTime);
|
||||||
getDetailedServiceMapItems(globalTime);
|
getDetailedServiceMapItems(globalTime);
|
||||||
|
}
|
||||||
}, [globalTime]);
|
}, [globalTime]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
|||||||
@@ -83,6 +83,7 @@ export const getZoomPx = (): number => {
|
|||||||
} else if (width > 1700) {
|
} else if (width > 1700) {
|
||||||
return 470;
|
return 470;
|
||||||
}
|
}
|
||||||
|
return 190;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getTooltip = (node: {
|
export const getTooltip = (node: {
|
||||||
|
|||||||
@@ -2,15 +2,11 @@ import React, { useState, useEffect } from "react";
|
|||||||
import GenericVisualizations from "../Metrics/GenericVisualization";
|
import GenericVisualizations from "../Metrics/GenericVisualization";
|
||||||
import { Select, Card, Space, Form } from "antd";
|
import { Select, Card, Space, Form } from "antd";
|
||||||
import { connect } from "react-redux";
|
import { connect } from "react-redux";
|
||||||
|
|
||||||
import { StoreState } from "../../store/reducers";
|
import { StoreState } from "../../store/reducers";
|
||||||
import {
|
import { GlobalTime, TraceFilters } from "../../store/actions";
|
||||||
customMetricsItem,
|
import { useRoute } from "../RouteProvider";
|
||||||
getFilteredTraceMetrics,
|
import { getFilteredTraceMetrics } from "../../store/actions/MetricsActions";
|
||||||
GlobalTime,
|
import { customMetricsItem } from "../../store/actions/MetricsActions";
|
||||||
TraceFilters,
|
|
||||||
} from "../../store/actions";
|
|
||||||
|
|
||||||
const { Option } = Select;
|
const { Option } = Select;
|
||||||
|
|
||||||
const entity = [
|
const entity = [
|
||||||
@@ -48,10 +44,10 @@ const aggregation_options = [
|
|||||||
{
|
{
|
||||||
linked_entity: "duration",
|
linked_entity: "duration",
|
||||||
default_selected: { title: "p99", dataindex: "p99" },
|
default_selected: { title: "p99", dataindex: "p99" },
|
||||||
// options_available: [ {title:'Avg', dataindex:'avg'}, {title:'Max', dataindex:'max'},{title:'Min', dataindex:'min'}, {title:'p50', dataindex:'p50'},{title:'p90', dataindex:'p90'}, {title:'p95', dataindex:'p95'}]
|
// options_available: [ {title:'Avg', dataindex:'avg'}, {title:'Max', dataindex:'max'},{title:'Min', dataindex:'min'}, {title:'p50', dataindex:'p50'},{title:'p95', dataindex:'p95'}, {title:'p95', dataindex:'p95'}]
|
||||||
options_available: [
|
options_available: [
|
||||||
{ title: "p50", dataindex: "p50" },
|
{ title: "p50", dataindex: "p50" },
|
||||||
{ title: "p90", dataindex: "p90" },
|
{ title: "p95", dataindex: "p95" },
|
||||||
{ title: "p99", dataindex: "p99" },
|
{ title: "p99", dataindex: "p99" },
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
@@ -80,7 +76,10 @@ interface TraceCustomVisualizationsProps {
|
|||||||
const _TraceCustomVisualizations = (props: TraceCustomVisualizationsProps) => {
|
const _TraceCustomVisualizations = (props: TraceCustomVisualizationsProps) => {
|
||||||
const [selectedEntity, setSelectedEntity] = useState("calls");
|
const [selectedEntity, setSelectedEntity] = useState("calls");
|
||||||
const [selectedAggOption, setSelectedAggOption] = useState("count");
|
const [selectedAggOption, setSelectedAggOption] = useState("count");
|
||||||
const [selectedStep, setSelectedStep] = useState("60");
|
const { state } = useRoute();
|
||||||
|
const [form] = Form.useForm();
|
||||||
|
const selectedStep = "60";
|
||||||
|
|
||||||
// Step should be multiples of 60, 60 -> 1 min
|
// Step should be multiples of 60, 60 -> 1 min
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -109,21 +108,18 @@ const _TraceCustomVisualizations = (props: TraceCustomVisualizationsProps) => {
|
|||||||
minTime: props.globalTime.minTime - 15 * 60 * 1000000000,
|
minTime: props.globalTime.minTime - 15 * 60 * 1000000000,
|
||||||
maxTime: props.globalTime.maxTime + 15 * 60 * 1000000000,
|
maxTime: props.globalTime.maxTime + 15 * 60 * 1000000000,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
Call the apis only when the route is loaded.
|
||||||
|
Check this issue: https://github.com/SigNoz/signoz/issues/110
|
||||||
|
*/
|
||||||
|
if (state.TRACES.isLoaded) {
|
||||||
props.getFilteredTraceMetrics(request_string, plusMinus15);
|
props.getFilteredTraceMetrics(request_string, plusMinus15);
|
||||||
|
}
|
||||||
}, [selectedEntity, selectedAggOption, props.traceFilters, props.globalTime]);
|
}, [selectedEntity, selectedAggOption, props.traceFilters, props.globalTime]);
|
||||||
|
|
||||||
//Custom metrics API called if time, tracefilters, selected entity or agg option changes
|
//Custom metrics API called if time, tracefilters, selected entity or agg option changes
|
||||||
|
|
||||||
const [form] = Form.useForm();
|
|
||||||
|
|
||||||
function handleChange(value: string) {
|
|
||||||
// console.log(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
function handleFinish(value: string) {
|
|
||||||
// console.log(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
// PNOTE - Can also use 'coordinate' option in antd Select for implementing this - https://ant.design/components/select/
|
// PNOTE - Can also use 'coordinate' option in antd Select for implementing this - https://ant.design/components/select/
|
||||||
const handleFormValuesChange = (changedValues: any) => {
|
const handleFormValuesChange = (changedValues: any) => {
|
||||||
const formFieldName = Object.keys(changedValues)[0];
|
const formFieldName = Object.keys(changedValues)[0];
|
||||||
@@ -152,11 +148,9 @@ const _TraceCustomVisualizations = (props: TraceCustomVisualizationsProps) => {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<Card>
|
<Card>
|
||||||
{/* <Space direction="vertical"> */}
|
|
||||||
<div>Custom Visualizations</div>
|
<div>Custom Visualizations</div>
|
||||||
<Form
|
<Form
|
||||||
form={form}
|
form={form}
|
||||||
onFinish={handleFinish}
|
|
||||||
onValuesChange={handleFormValuesChange}
|
onValuesChange={handleFormValuesChange}
|
||||||
initialValues={{
|
initialValues={{
|
||||||
agg_options: "Count",
|
agg_options: "Count",
|
||||||
@@ -189,7 +183,7 @@ const _TraceCustomVisualizations = (props: TraceCustomVisualizationsProps) => {
|
|||||||
</Form.Item>
|
</Form.Item>
|
||||||
|
|
||||||
<Form.Item name="chart_style">
|
<Form.Item name="chart_style">
|
||||||
<Select style={{ width: 120 }} onChange={handleChange} allowClear>
|
<Select style={{ width: 120 }} allowClear>
|
||||||
<Option value="line">Line Chart</Option>
|
<Option value="line">Line Chart</Option>
|
||||||
<Option value="bar">Bar Chart</Option>
|
<Option value="bar">Bar Chart</Option>
|
||||||
<Option value="area">Area Chart</Option>
|
<Option value="area">Area Chart</Option>
|
||||||
@@ -197,7 +191,7 @@ const _TraceCustomVisualizations = (props: TraceCustomVisualizationsProps) => {
|
|||||||
</Form.Item>
|
</Form.Item>
|
||||||
|
|
||||||
<Form.Item name="interval">
|
<Form.Item name="interval">
|
||||||
<Select style={{ width: 120 }} onChange={handleChange} allowClear>
|
<Select style={{ width: 120 }} allowClear>
|
||||||
<Option value="1m">1 min</Option>
|
<Option value="1m">1 min</Option>
|
||||||
<Option value="5m">5 min</Option>
|
<Option value="5m">5 min</Option>
|
||||||
<Option value="30m">30 min</Option>
|
<Option value="30m">30 min</Option>
|
||||||
@@ -206,7 +200,7 @@ const _TraceCustomVisualizations = (props: TraceCustomVisualizationsProps) => {
|
|||||||
|
|
||||||
{/* Need heading for each option */}
|
{/* Need heading for each option */}
|
||||||
<Form.Item name="group_by">
|
<Form.Item name="group_by">
|
||||||
<Select style={{ width: 120 }} onChange={handleChange} allowClear>
|
<Select style={{ width: 120 }} allowClear>
|
||||||
<Option value="none">Group By</Option>
|
<Option value="none">Group By</Option>
|
||||||
<Option value="status">Status Code</Option>
|
<Option value="status">Status Code</Option>
|
||||||
<Option value="protocol">Protocol</Option>
|
<Option value="protocol">Protocol</Option>
|
||||||
@@ -229,7 +223,7 @@ const mapStateToProps = (
|
|||||||
traceFilters: TraceFilters;
|
traceFilters: TraceFilters;
|
||||||
} => {
|
} => {
|
||||||
return {
|
return {
|
||||||
filteredTraceMetrics: state.filteredTraceMetrics,
|
filteredTraceMetrics: state.metricsData.customMetricsItem,
|
||||||
globalTime: state.globalTime,
|
globalTime: state.globalTime,
|
||||||
traceFilters: state.traceFilters,
|
traceFilters: state.traceFilters,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ import FormItem from "antd/lib/form/FormItem";
|
|||||||
import api, { apiV1 } from "../../api";
|
import api, { apiV1 } from "../../api";
|
||||||
import { useLocation } from "react-router-dom";
|
import { useLocation } from "react-router-dom";
|
||||||
import { METRICS_PAGE_QUERY_PARAM } from "Src/constants/query";
|
import { METRICS_PAGE_QUERY_PARAM } from "Src/constants/query";
|
||||||
|
import { useRoute } from "../RouteProvider";
|
||||||
|
|
||||||
const { Option } = Select;
|
const { Option } = Select;
|
||||||
|
|
||||||
@@ -45,6 +46,7 @@ const _TraceFilter = (props: TraceFilterProps) => {
|
|||||||
const [tagKeyOptions, setTagKeyOptions] = useState<TagKeyOptionItem[]>([]);
|
const [tagKeyOptions, setTagKeyOptions] = useState<TagKeyOptionItem[]>([]);
|
||||||
const location = useLocation();
|
const location = useLocation();
|
||||||
const urlParams = new URLSearchParams(location.search.split("?")[1]);
|
const urlParams = new URLSearchParams(location.search.split("?")[1]);
|
||||||
|
const { state } = useRoute();
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
handleApplyFilterForm({
|
handleApplyFilterForm({
|
||||||
@@ -122,7 +124,13 @@ const _TraceFilter = (props: TraceFilterProps) => {
|
|||||||
"&tags=" +
|
"&tags=" +
|
||||||
encodeURIComponent(JSON.stringify(props.traceFilters.tags));
|
encodeURIComponent(JSON.stringify(props.traceFilters.tags));
|
||||||
|
|
||||||
|
/*
|
||||||
|
Call the apis only when the route is loaded.
|
||||||
|
Check this issue: https://github.com/SigNoz/signoz/issues/110
|
||||||
|
*/
|
||||||
|
if (state.TRACES.isLoaded) {
|
||||||
props.fetchTraces(props.globalTime, request_string);
|
props.fetchTraces(props.globalTime, request_string);
|
||||||
|
}
|
||||||
}, [props.traceFilters, props.globalTime]);
|
}, [props.traceFilters, props.globalTime]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
|||||||
@@ -36,10 +36,16 @@
|
|||||||
stroke-linecap: round;
|
stroke-linecap: round;
|
||||||
stroke-linejoin: round;
|
stroke-linejoin: round;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Prevent text vertical shift on hover */
|
||||||
|
.d3-flame-graph-label {
|
||||||
|
border: 1px dotted transparent;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
/* Transparency simulates sub pixel border https://stackoverflow.com/questions/13891177/css-border-less-than-1px */
|
/* Transparency simulates sub pixel border https://stackoverflow.com/questions/13891177/css-border-less-than-1px */
|
||||||
|
|
||||||
.d3-flame-graph-label:hover {
|
.d3-flame-graph-label:hover {
|
||||||
border: 1px dotted;
|
|
||||||
border-color: rgba(255, 255, 255, 0.75);
|
border-color: rgba(255, 255, 255, 0.75);
|
||||||
}
|
}
|
||||||
/*
|
/*
|
||||||
@@ -47,3 +53,7 @@
|
|||||||
border: 1px solid;
|
border: 1px solid;
|
||||||
border-color: rgba(255, 255, 255, 0.75);
|
border-color: rgba(255, 255, 255, 0.75);
|
||||||
} */
|
} */
|
||||||
|
|
||||||
|
.fade:not(.show) {
|
||||||
|
opacity: 0.5;
|
||||||
|
}
|
||||||
@@ -13,7 +13,6 @@ import "./TraceGraph.css";
|
|||||||
import { spanToTreeUtil } from "../../utils/spanToTree";
|
import { spanToTreeUtil } from "../../utils/spanToTree";
|
||||||
import { fetchTraceItem, spansWSameTraceIDResponse } from "../../store/actions";
|
import { fetchTraceItem, spansWSameTraceIDResponse } from "../../store/actions";
|
||||||
import { StoreState } from "../../store/reducers";
|
import { StoreState } from "../../store/reducers";
|
||||||
import { TraceGraphColumn } from "./TraceGraphColumn";
|
|
||||||
import SelectedSpanDetails from "./SelectedSpanDetails";
|
import SelectedSpanDetails from "./SelectedSpanDetails";
|
||||||
|
|
||||||
interface TraceGraphProps {
|
interface TraceGraphProps {
|
||||||
@@ -71,11 +70,7 @@ const _TraceGraph = (props: TraceGraphProps) => {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<Row gutter={{ xs: 8, sm: 16, md: 24, lg: 32 }}>
|
<Row gutter={{ xs: 8, sm: 16, md: 24, lg: 32 }}>
|
||||||
{/*<Col md={8} sm={24}>*/}
|
|
||||||
{/* <TraceGraphColumn />*/}
|
|
||||||
{/*</Col>*/}
|
|
||||||
<Col md={24} sm={24}>
|
<Col md={24} sm={24}>
|
||||||
{/* <Card style={{ width: 640 }}> */}
|
|
||||||
<Space direction="vertical" size="middle" style={{ width: "100%" }}>
|
<Space direction="vertical" size="middle" style={{ width: "100%" }}>
|
||||||
<Card bodyStyle={{ padding: 80 }} style={{ height: 320 }}>
|
<Card bodyStyle={{ padding: 80 }} style={{ height: 320 }}>
|
||||||
<div
|
<div
|
||||||
|
|||||||
@@ -1,18 +1,19 @@
|
|||||||
import React, { useEffect, useMemo, useState } from "react";
|
import React, { useEffect, useState } from "react";
|
||||||
import { Bar } from "react-chartjs-2";
|
import { Bar } from "react-chartjs-2";
|
||||||
import { Card, Form, Select, Space } from "antd";
|
import { Card, Select, Space } from "antd";
|
||||||
import { connect } from "react-redux";
|
import { connect } from "react-redux";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
getServicesList,
|
getServicesList,
|
||||||
getUsageData,
|
getUsageData,
|
||||||
GlobalTime,
|
GlobalTime,
|
||||||
servicesListItem,
|
|
||||||
usageDataItem,
|
usageDataItem,
|
||||||
} from "../../store/actions";
|
} from "../../store/actions";
|
||||||
import { StoreState } from "../../store/reducers";
|
import { StoreState } from "../../store/reducers";
|
||||||
import moment from "moment";
|
import moment from "moment";
|
||||||
import { isOnboardingSkipped } from "../../utils/app";
|
import { isOnboardingSkipped } from "../../utils/app";
|
||||||
|
import { useRoute } from "../RouteProvider";
|
||||||
|
import { servicesListItem } from "../../store/actions/MetricsActions";
|
||||||
const { Option } = Select;
|
const { Option } = Select;
|
||||||
|
|
||||||
interface UsageExplorerProps {
|
interface UsageExplorerProps {
|
||||||
@@ -56,6 +57,8 @@ const _UsageExplorer = (props: UsageExplorerProps) => {
|
|||||||
const [selectedInterval, setSelectedInterval] = useState(interval[2]);
|
const [selectedInterval, setSelectedInterval] = useState(interval[2]);
|
||||||
const [selectedService, setSelectedService] = useState<string>("");
|
const [selectedService, setSelectedService] = useState<string>("");
|
||||||
|
|
||||||
|
const { state } = useRoute();
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (selectedTime && selectedInterval) {
|
if (selectedTime && selectedInterval) {
|
||||||
const maxTime = new Date().getTime() * 1000000;
|
const maxTime = new Date().getTime() * 1000000;
|
||||||
@@ -71,7 +74,13 @@ const _UsageExplorer = (props: UsageExplorerProps) => {
|
|||||||
}, [selectedTime, selectedInterval, selectedService]);
|
}, [selectedTime, selectedInterval, selectedService]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
/*
|
||||||
|
Call the apis only when the route is loaded.
|
||||||
|
Check this issue: https://github.com/SigNoz/signoz/issues/110
|
||||||
|
*/
|
||||||
|
if (state.USAGE_EXPLORER.isLoaded) {
|
||||||
props.getServicesList(props.globalTime);
|
props.getServicesList(props.globalTime);
|
||||||
|
}
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const data = {
|
const data = {
|
||||||
@@ -203,7 +212,7 @@ const mapStateToProps = (
|
|||||||
totalCount: totalCount,
|
totalCount: totalCount,
|
||||||
usageData: state.usageDate,
|
usageData: state.usageDate,
|
||||||
globalTime: state.globalTime,
|
globalTime: state.globalTime,
|
||||||
servicesList: state.servicesList,
|
servicesList: state.metricsData.serviceList,
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
3
frontend/src/store/actions/MetricsActions/index.ts
Normal file
3
frontend/src/store/actions/MetricsActions/index.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
export * from "./metricsInterfaces";
|
||||||
|
export * from "./metricsActionTypes";
|
||||||
|
export * from "./metricsActions";
|
||||||
@@ -0,0 +1,32 @@
|
|||||||
|
import {
|
||||||
|
externalErrCodeMetricsActions,
|
||||||
|
externalMetricsAvgDurationAction,
|
||||||
|
getDbOverViewMetricsAction,
|
||||||
|
getExternalMetricsAction,
|
||||||
|
getFilteredTraceMetricsAction,
|
||||||
|
getServiceMetricsAction,
|
||||||
|
getServicesListAction,
|
||||||
|
getTopEndpointsAction,
|
||||||
|
} from "./metricsInterfaces";
|
||||||
|
|
||||||
|
export enum MetricsActionTypes {
|
||||||
|
updateInput = "UPDATE_INPUT",
|
||||||
|
getServicesList = "GET_SERVICE_LIST",
|
||||||
|
getServiceMetrics = "GET_SERVICE_METRICS",
|
||||||
|
getAvgDurationMetrics = "GET_AVG_DURATION_METRICS",
|
||||||
|
getErrCodeMetrics = "GET_ERR_CODE_METRICS",
|
||||||
|
getDbOverviewMetrics = "GET_DB_OVERVIEW_METRICS",
|
||||||
|
getExternalMetrics = "GET_EXTERNAL_METRICS",
|
||||||
|
getTopEndpoints = "GET_TOP_ENDPOINTS",
|
||||||
|
getFilteredTraceMetrics = "GET_FILTERED_TRACE_METRICS",
|
||||||
|
}
|
||||||
|
|
||||||
|
export type MetricsActions =
|
||||||
|
| getServicesListAction
|
||||||
|
| getServiceMetricsAction
|
||||||
|
| getTopEndpointsAction
|
||||||
|
| getFilteredTraceMetricsAction
|
||||||
|
| getExternalMetricsAction
|
||||||
|
| externalErrCodeMetricsActions
|
||||||
|
| getDbOverViewMetricsAction
|
||||||
|
| externalMetricsAvgDurationAction;
|
||||||
190
frontend/src/store/actions/MetricsActions/metricsActions.ts
Normal file
190
frontend/src/store/actions/MetricsActions/metricsActions.ts
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
import { Dispatch } from "redux";
|
||||||
|
import api, { apiV1 } from "../../../api";
|
||||||
|
|
||||||
|
import { GlobalTime } from "../global";
|
||||||
|
import { toUTCEpoch } from "../../../utils/timeUtils";
|
||||||
|
import { MetricsActionTypes } from "./metricsActionTypes";
|
||||||
|
import * as MetricsInterfaces from "./metricsInterfaces";
|
||||||
|
|
||||||
|
export const getServicesList = (globalTime: GlobalTime) => {
|
||||||
|
return async (dispatch: Dispatch) => {
|
||||||
|
let request_string =
|
||||||
|
"/services?start=" + globalTime.minTime + "&end=" + globalTime.maxTime;
|
||||||
|
|
||||||
|
const response = await api.get<MetricsInterfaces.servicesListItem[]>(
|
||||||
|
apiV1 + request_string,
|
||||||
|
);
|
||||||
|
|
||||||
|
dispatch<MetricsInterfaces.getServicesListAction>({
|
||||||
|
type: MetricsActionTypes.getServicesList,
|
||||||
|
payload: response.data,
|
||||||
|
//PNOTE - response.data in the axios response has the actual API response
|
||||||
|
});
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getDbOverViewMetrics = (
|
||||||
|
serviceName: string,
|
||||||
|
globalTime: GlobalTime,
|
||||||
|
) => {
|
||||||
|
return async (dispatch: Dispatch) => {
|
||||||
|
let request_string =
|
||||||
|
"/service/dbOverview?service=" +
|
||||||
|
serviceName +
|
||||||
|
"&start=" +
|
||||||
|
globalTime.minTime +
|
||||||
|
"&end=" +
|
||||||
|
globalTime.maxTime +
|
||||||
|
"&step=60";
|
||||||
|
const response = await api.get<MetricsInterfaces.dbOverviewMetricsItem[]>(
|
||||||
|
apiV1 + request_string,
|
||||||
|
);
|
||||||
|
dispatch<MetricsInterfaces.getDbOverViewMetricsAction>({
|
||||||
|
type: MetricsActionTypes.getDbOverviewMetrics,
|
||||||
|
payload: response.data,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getExternalMetrics = (
|
||||||
|
serviceName: string,
|
||||||
|
globalTime: GlobalTime,
|
||||||
|
) => {
|
||||||
|
return async (dispatch: Dispatch) => {
|
||||||
|
let request_string =
|
||||||
|
"/service/external?service=" +
|
||||||
|
serviceName +
|
||||||
|
"&start=" +
|
||||||
|
globalTime.minTime +
|
||||||
|
"&end=" +
|
||||||
|
globalTime.maxTime +
|
||||||
|
"&step=60";
|
||||||
|
const response = await api.get<MetricsInterfaces.externalMetricsItem[]>(
|
||||||
|
apiV1 + request_string,
|
||||||
|
);
|
||||||
|
dispatch<MetricsInterfaces.getExternalMetricsAction>({
|
||||||
|
type: MetricsActionTypes.getExternalMetrics,
|
||||||
|
payload: response.data,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getExternalAvgDurationMetrics = (
|
||||||
|
serviceName: string,
|
||||||
|
globalTime: GlobalTime,
|
||||||
|
) => {
|
||||||
|
return async (dispatch: Dispatch) => {
|
||||||
|
let request_string =
|
||||||
|
"/service/externalAvgDuration?service=" +
|
||||||
|
serviceName +
|
||||||
|
"&start=" +
|
||||||
|
globalTime.minTime +
|
||||||
|
"&end=" +
|
||||||
|
globalTime.maxTime +
|
||||||
|
"&step=60";
|
||||||
|
|
||||||
|
const response = await api.get<
|
||||||
|
MetricsInterfaces.externalMetricsAvgDurationItem[]
|
||||||
|
>(apiV1 + request_string);
|
||||||
|
dispatch<MetricsInterfaces.externalMetricsAvgDurationAction>({
|
||||||
|
type: MetricsActionTypes.getAvgDurationMetrics,
|
||||||
|
payload: response.data,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
};
|
||||||
|
export const getExternalErrCodeMetrics = (
|
||||||
|
serviceName: string,
|
||||||
|
globalTime: GlobalTime,
|
||||||
|
) => {
|
||||||
|
return async (dispatch: Dispatch) => {
|
||||||
|
let request_string =
|
||||||
|
"/service/externalErrors?service=" +
|
||||||
|
serviceName +
|
||||||
|
"&start=" +
|
||||||
|
globalTime.minTime +
|
||||||
|
"&end=" +
|
||||||
|
globalTime.maxTime +
|
||||||
|
"&step=60";
|
||||||
|
const response = await api.get<
|
||||||
|
MetricsInterfaces.externalErrCodeMetricsItem[]
|
||||||
|
>(apiV1 + request_string);
|
||||||
|
|
||||||
|
dispatch<MetricsInterfaces.externalErrCodeMetricsActions>({
|
||||||
|
type: MetricsActionTypes.getErrCodeMetrics,
|
||||||
|
payload: response.data,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getServicesMetrics = (
|
||||||
|
serviceName: string,
|
||||||
|
globalTime: GlobalTime,
|
||||||
|
) => {
|
||||||
|
return async (dispatch: Dispatch) => {
|
||||||
|
let request_string =
|
||||||
|
"/service/overview?service=" +
|
||||||
|
serviceName +
|
||||||
|
"&start=" +
|
||||||
|
globalTime.minTime +
|
||||||
|
"&end=" +
|
||||||
|
globalTime.maxTime +
|
||||||
|
"&step=60";
|
||||||
|
const response = await api.get<MetricsInterfaces.metricItem[]>(
|
||||||
|
apiV1 + request_string,
|
||||||
|
);
|
||||||
|
|
||||||
|
dispatch<MetricsInterfaces.getServiceMetricsAction>({
|
||||||
|
type: MetricsActionTypes.getServiceMetrics,
|
||||||
|
payload: response.data,
|
||||||
|
//PNOTE - response.data in the axios response has the actual API response
|
||||||
|
});
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getTopEndpoints = (
|
||||||
|
serviceName: string,
|
||||||
|
globalTime: GlobalTime,
|
||||||
|
) => {
|
||||||
|
return async (dispatch: Dispatch) => {
|
||||||
|
let request_string =
|
||||||
|
"/service/top_endpoints?service=" +
|
||||||
|
serviceName +
|
||||||
|
"&start=" +
|
||||||
|
globalTime.minTime +
|
||||||
|
"&end=" +
|
||||||
|
globalTime.maxTime;
|
||||||
|
const response = await api.get<MetricsInterfaces.topEndpointListItem[]>(
|
||||||
|
apiV1 + request_string,
|
||||||
|
);
|
||||||
|
|
||||||
|
dispatch<MetricsInterfaces.getTopEndpointsAction>({
|
||||||
|
type: MetricsActionTypes.getTopEndpoints,
|
||||||
|
payload: response.data,
|
||||||
|
//PNOTE - response.data in the axios response has the actual API response
|
||||||
|
});
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getFilteredTraceMetrics = (
|
||||||
|
filter_params: string,
|
||||||
|
globalTime: GlobalTime,
|
||||||
|
) => {
|
||||||
|
return async (dispatch: Dispatch) => {
|
||||||
|
let request_string =
|
||||||
|
"/spans/aggregates?start=" +
|
||||||
|
toUTCEpoch(globalTime.minTime) +
|
||||||
|
"&end=" +
|
||||||
|
toUTCEpoch(globalTime.maxTime) +
|
||||||
|
"&" +
|
||||||
|
filter_params;
|
||||||
|
const response = await api.get<MetricsInterfaces.customMetricsItem[]>(
|
||||||
|
apiV1 + request_string,
|
||||||
|
);
|
||||||
|
|
||||||
|
dispatch<MetricsInterfaces.getFilteredTraceMetricsAction>({
|
||||||
|
type: MetricsActionTypes.getFilteredTraceMetrics,
|
||||||
|
payload: response.data,
|
||||||
|
//PNOTE - response.data in the axios response has the actual API response
|
||||||
|
});
|
||||||
|
};
|
||||||
|
};
|
||||||
@@ -0,0 +1,98 @@
|
|||||||
|
import { MetricsActionTypes } from "./metricsActionTypes";
|
||||||
|
|
||||||
|
export interface servicesListItem {
|
||||||
|
serviceName: string;
|
||||||
|
p99: number;
|
||||||
|
avgDuration: number;
|
||||||
|
numCalls: number;
|
||||||
|
callRate: number;
|
||||||
|
numErrors: number;
|
||||||
|
errorRate: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface metricItem {
|
||||||
|
timestamp: number;
|
||||||
|
p50: number;
|
||||||
|
p95: number;
|
||||||
|
p99: number;
|
||||||
|
numCalls: number;
|
||||||
|
callRate: number;
|
||||||
|
numErrors: number;
|
||||||
|
errorRate: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface externalMetricsAvgDurationItem {
|
||||||
|
avgDuration: number;
|
||||||
|
timestamp: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface externalErrCodeMetricsItem {
|
||||||
|
externalHttpUrl: string;
|
||||||
|
numCalls: number;
|
||||||
|
timestamp: number;
|
||||||
|
callRate: number;
|
||||||
|
}
|
||||||
|
export interface topEndpointListItem {
|
||||||
|
p50: number;
|
||||||
|
p95: number;
|
||||||
|
p99: number;
|
||||||
|
numCalls: number;
|
||||||
|
name: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface externalMetricsItem {
|
||||||
|
avgDuration: number;
|
||||||
|
callRate: number;
|
||||||
|
externalHttpUrl: string;
|
||||||
|
numCalls: number;
|
||||||
|
timestamp: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface dbOverviewMetricsItem {
|
||||||
|
avgDuration: number;
|
||||||
|
callRate: number;
|
||||||
|
dbSystem: string;
|
||||||
|
numCalls: number;
|
||||||
|
timestamp: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface customMetricsItem {
|
||||||
|
timestamp: number;
|
||||||
|
value: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface getServicesListAction {
|
||||||
|
type: MetricsActionTypes.getServicesList;
|
||||||
|
payload: servicesListItem[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface externalErrCodeMetricsActions {
|
||||||
|
type: MetricsActionTypes.getErrCodeMetrics;
|
||||||
|
payload: externalErrCodeMetricsItem[];
|
||||||
|
}
|
||||||
|
export interface externalMetricsAvgDurationAction {
|
||||||
|
type: MetricsActionTypes.getAvgDurationMetrics;
|
||||||
|
payload: externalMetricsAvgDurationItem[];
|
||||||
|
}
|
||||||
|
export interface getServiceMetricsAction {
|
||||||
|
type: MetricsActionTypes.getServiceMetrics;
|
||||||
|
payload: metricItem[];
|
||||||
|
}
|
||||||
|
export interface getExternalMetricsAction {
|
||||||
|
type: MetricsActionTypes.getExternalMetrics;
|
||||||
|
payload: externalMetricsItem[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface getDbOverViewMetricsAction {
|
||||||
|
type: MetricsActionTypes.getDbOverviewMetrics;
|
||||||
|
payload: dbOverviewMetricsItem[];
|
||||||
|
}
|
||||||
|
export interface getTopEndpointsAction {
|
||||||
|
type: MetricsActionTypes.getTopEndpoints;
|
||||||
|
payload: topEndpointListItem[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface getFilteredTraceMetricsAction {
|
||||||
|
type: MetricsActionTypes.getFilteredTraceMetrics;
|
||||||
|
payload: customMetricsItem[];
|
||||||
|
}
|
||||||
@@ -2,6 +2,6 @@ export * from "./types";
|
|||||||
export * from "./traceFilters";
|
export * from "./traceFilters";
|
||||||
export * from "./serviceMap";
|
export * from "./serviceMap";
|
||||||
export * from "./traces";
|
export * from "./traces";
|
||||||
export * from "./metrics";
|
export * from "./MetricsActions";
|
||||||
export * from "./usage";
|
export * from "./usage";
|
||||||
export * from "./global";
|
export * from "./global";
|
||||||
|
|||||||
@@ -1,277 +0,0 @@
|
|||||||
import { Dispatch } from "redux";
|
|
||||||
import api, { apiV1 } from "../../api";
|
|
||||||
|
|
||||||
import { GlobalTime } from "./global";
|
|
||||||
import { ActionTypes } from "./types";
|
|
||||||
import { Token } from "../../utils/token";
|
|
||||||
import { toUTCEpoch } from "../../utils/timeUtils";
|
|
||||||
|
|
||||||
export interface servicesListItem {
|
|
||||||
serviceName: string;
|
|
||||||
p99: number;
|
|
||||||
avgDuration: number;
|
|
||||||
numCalls: number;
|
|
||||||
callRate: number;
|
|
||||||
numErrors: number;
|
|
||||||
errorRate: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface metricItem {
|
|
||||||
timestamp: number;
|
|
||||||
p50: number;
|
|
||||||
p95: number;
|
|
||||||
p99: number;
|
|
||||||
numCalls: number;
|
|
||||||
callRate: number;
|
|
||||||
numErrors: number;
|
|
||||||
errorRate: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface externalMetricsAvgDurationItem {
|
|
||||||
avgDuration: number;
|
|
||||||
timestamp: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface externalErrCodeMetricsItem {
|
|
||||||
errorRate: number;
|
|
||||||
externalHttpUrl: string;
|
|
||||||
numErrors: number;
|
|
||||||
timestamp: number;
|
|
||||||
}
|
|
||||||
export interface topEndpointListItem {
|
|
||||||
p50: number;
|
|
||||||
p90: number;
|
|
||||||
p99: number;
|
|
||||||
numCalls: number;
|
|
||||||
name: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface externalMetricsItem {
|
|
||||||
avgDuration: number;
|
|
||||||
callRate: number;
|
|
||||||
externalHttpUrl: string;
|
|
||||||
numCalls: number;
|
|
||||||
timestamp: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface dbOverviewMetricsItem {
|
|
||||||
avgDuration: number;
|
|
||||||
callRate: number;
|
|
||||||
dbSystem: string;
|
|
||||||
numCalls: number;
|
|
||||||
timestamp: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface customMetricsItem {
|
|
||||||
timestamp: number;
|
|
||||||
value: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface getServicesListAction {
|
|
||||||
type: ActionTypes.getServicesList;
|
|
||||||
payload: servicesListItem[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface externalErrCodeMetricsActions {
|
|
||||||
type: ActionTypes.getErrCodeMetrics;
|
|
||||||
payload: externalErrCodeMetricsItem[];
|
|
||||||
}
|
|
||||||
export interface externalMetricsAvgDurationAction {
|
|
||||||
type: ActionTypes.getAvgDurationMetrics;
|
|
||||||
payload: externalMetricsAvgDurationItem[];
|
|
||||||
}
|
|
||||||
export interface getServiceMetricsAction {
|
|
||||||
type: ActionTypes.getServiceMetrics;
|
|
||||||
payload: metricItem[];
|
|
||||||
}
|
|
||||||
export interface getExternalMetricsAction {
|
|
||||||
type: ActionTypes.getExternalMetrics;
|
|
||||||
payload: externalMetricsItem[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface getDbOverViewMetricsAction {
|
|
||||||
type: ActionTypes.getDbOverviewMetrics;
|
|
||||||
payload: dbOverviewMetricsItem[];
|
|
||||||
}
|
|
||||||
export interface getTopEndpointsAction {
|
|
||||||
type: ActionTypes.getTopEndpoints;
|
|
||||||
payload: topEndpointListItem[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface getFilteredTraceMetricsAction {
|
|
||||||
type: ActionTypes.getFilteredTraceMetrics;
|
|
||||||
payload: customMetricsItem[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getServicesList = (globalTime: GlobalTime) => {
|
|
||||||
return async (dispatch: Dispatch) => {
|
|
||||||
let request_string =
|
|
||||||
"/services?start=" + globalTime.minTime + "&end=" + globalTime.maxTime;
|
|
||||||
|
|
||||||
const response = await api.get<servicesListItem[]>(apiV1 + request_string);
|
|
||||||
|
|
||||||
dispatch<getServicesListAction>({
|
|
||||||
type: ActionTypes.getServicesList,
|
|
||||||
payload: response.data,
|
|
||||||
//PNOTE - response.data in the axios response has the actual API response
|
|
||||||
});
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getDbOverViewMetrics = (
|
|
||||||
serviceName: string,
|
|
||||||
globalTime: GlobalTime,
|
|
||||||
) => {
|
|
||||||
return async (dispatch: Dispatch) => {
|
|
||||||
let request_string =
|
|
||||||
"/service/dbOverview?service=" +
|
|
||||||
serviceName +
|
|
||||||
"&start=" +
|
|
||||||
globalTime.minTime +
|
|
||||||
"&end=" +
|
|
||||||
globalTime.maxTime +
|
|
||||||
"&step=60";
|
|
||||||
const response = await api.get<dbOverviewMetricsItem[]>(
|
|
||||||
apiV1 + request_string,
|
|
||||||
);
|
|
||||||
dispatch<getDbOverViewMetricsAction>({
|
|
||||||
type: ActionTypes.getDbOverviewMetrics,
|
|
||||||
payload: response.data,
|
|
||||||
});
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getExternalMetrics = (
|
|
||||||
serviceName: string,
|
|
||||||
globalTime: GlobalTime,
|
|
||||||
) => {
|
|
||||||
return async (dispatch: Dispatch) => {
|
|
||||||
let request_string =
|
|
||||||
"/service/external?service=" +
|
|
||||||
serviceName +
|
|
||||||
"&start=" +
|
|
||||||
globalTime.minTime +
|
|
||||||
"&end=" +
|
|
||||||
globalTime.maxTime +
|
|
||||||
"&step=60";
|
|
||||||
const response = await api.get<externalMetricsItem[]>(apiV1 + request_string);
|
|
||||||
dispatch<getExternalMetricsAction>({
|
|
||||||
type: ActionTypes.getExternalMetrics,
|
|
||||||
payload: response.data,
|
|
||||||
});
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getExternalAvgDurationMetrics = (
|
|
||||||
serviceName: string,
|
|
||||||
globalTime: GlobalTime,
|
|
||||||
) => {
|
|
||||||
return async (dispatch: Dispatch) => {
|
|
||||||
let request_string =
|
|
||||||
"/service/externalAvgDuration?service=" +
|
|
||||||
serviceName +
|
|
||||||
"&start=" +
|
|
||||||
globalTime.minTime +
|
|
||||||
"&end=" +
|
|
||||||
globalTime.maxTime +
|
|
||||||
"&step=60";
|
|
||||||
|
|
||||||
const response = await api.get<externalMetricsAvgDurationItem[]>(
|
|
||||||
apiV1 + request_string,
|
|
||||||
);
|
|
||||||
dispatch<externalMetricsAvgDurationAction>({
|
|
||||||
type: ActionTypes.getAvgDurationMetrics,
|
|
||||||
payload: response.data,
|
|
||||||
});
|
|
||||||
};
|
|
||||||
};
|
|
||||||
export const getExternalErrCodeMetrics = (
|
|
||||||
serviceName: string,
|
|
||||||
globalTime: GlobalTime,
|
|
||||||
) => {
|
|
||||||
return async (dispatch: Dispatch) => {
|
|
||||||
let request_string =
|
|
||||||
"/service/externalErrors?service=" +
|
|
||||||
serviceName +
|
|
||||||
"&start=" +
|
|
||||||
globalTime.minTime +
|
|
||||||
"&end=" +
|
|
||||||
globalTime.maxTime +
|
|
||||||
"&step=60";
|
|
||||||
const response = await api.get<externalErrCodeMetricsItem[]>(
|
|
||||||
apiV1 + request_string,
|
|
||||||
);
|
|
||||||
|
|
||||||
dispatch<externalErrCodeMetricsActions>({
|
|
||||||
type: ActionTypes.getErrCodeMetrics,
|
|
||||||
payload: response.data,
|
|
||||||
});
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getServicesMetrics = (
|
|
||||||
serviceName: string,
|
|
||||||
globalTime: GlobalTime,
|
|
||||||
) => {
|
|
||||||
return async (dispatch: Dispatch) => {
|
|
||||||
let request_string =
|
|
||||||
"/service/overview?service=" +
|
|
||||||
serviceName +
|
|
||||||
"&start=" +
|
|
||||||
globalTime.minTime +
|
|
||||||
"&end=" +
|
|
||||||
globalTime.maxTime +
|
|
||||||
"&step=60";
|
|
||||||
const response = await api.get<metricItem[]>(apiV1 + request_string);
|
|
||||||
|
|
||||||
dispatch<getServiceMetricsAction>({
|
|
||||||
type: ActionTypes.getServiceMetrics,
|
|
||||||
payload: response.data,
|
|
||||||
//PNOTE - response.data in the axios response has the actual API response
|
|
||||||
});
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getTopEndpoints = (
|
|
||||||
serviceName: string,
|
|
||||||
globalTime: GlobalTime,
|
|
||||||
) => {
|
|
||||||
return async (dispatch: Dispatch) => {
|
|
||||||
let request_string =
|
|
||||||
"/service/top_endpoints?service=" +
|
|
||||||
serviceName +
|
|
||||||
"&start=" +
|
|
||||||
globalTime.minTime +
|
|
||||||
"&end=" +
|
|
||||||
globalTime.maxTime;
|
|
||||||
const response = await api.get<topEndpointListItem[]>(apiV1 + request_string);
|
|
||||||
|
|
||||||
dispatch<getTopEndpointsAction>({
|
|
||||||
type: ActionTypes.getTopEndpoints,
|
|
||||||
payload: response.data,
|
|
||||||
//PNOTE - response.data in the axios response has the actual API response
|
|
||||||
});
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getFilteredTraceMetrics = (
|
|
||||||
filter_params: string,
|
|
||||||
globalTime: GlobalTime,
|
|
||||||
) => {
|
|
||||||
return async (dispatch: Dispatch) => {
|
|
||||||
let request_string =
|
|
||||||
"/spans/aggregates?start=" +
|
|
||||||
toUTCEpoch(globalTime.minTime) +
|
|
||||||
"&end=" +
|
|
||||||
toUTCEpoch(globalTime.maxTime) +
|
|
||||||
"&" +
|
|
||||||
filter_params;
|
|
||||||
const response = await api.get<customMetricsItem[]>(apiV1 + request_string);
|
|
||||||
|
|
||||||
dispatch<getFilteredTraceMetricsAction>({
|
|
||||||
type: ActionTypes.getFilteredTraceMetrics,
|
|
||||||
payload: response.data,
|
|
||||||
//PNOTE - response.data in the axios response has the actual API response
|
|
||||||
});
|
|
||||||
};
|
|
||||||
};
|
|
||||||
@@ -32,16 +32,4 @@ export const updateTraceFilters = (traceFilters: TraceFilters) => {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
export interface updateInputTagAction {
|
|
||||||
type: ActionTypes.updateInput;
|
|
||||||
payload: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const updateInputTag = (Input: string) => {
|
|
||||||
return {
|
|
||||||
type: ActionTypes.updateInput,
|
|
||||||
payload: Input,
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
//named export when you want to export multiple functions from the same file
|
//named export when you want to export multiple functions from the same file
|
||||||
|
|||||||
@@ -1,36 +1,18 @@
|
|||||||
import { FetchTracesAction, FetchTraceItemAction } from "./traces";
|
import { FetchTracesAction, FetchTraceItemAction } from "./traces";
|
||||||
import { updateTraceFiltersAction, updateInputTagAction } from "./traceFilters";
|
import { updateTraceFiltersAction, updateInputTagAction } from "./traceFilters";
|
||||||
import {
|
|
||||||
getServicesListAction,
|
|
||||||
getServiceMetricsAction,
|
|
||||||
externalErrCodeMetricsActions,
|
|
||||||
externalMetricsAvgDurationAction,
|
|
||||||
getExternalMetricsAction,
|
|
||||||
getTopEndpointsAction,
|
|
||||||
getFilteredTraceMetricsAction,
|
|
||||||
getDbOverViewMetricsAction,
|
|
||||||
} from "./metrics";
|
|
||||||
import { serviceMapItemAction, servicesAction } from "./serviceMap";
|
import { serviceMapItemAction, servicesAction } from "./serviceMap";
|
||||||
import { getUsageDataAction } from "./usage";
|
import { getUsageDataAction } from "./usage";
|
||||||
import { updateTimeIntervalAction } from "./global";
|
import { updateTimeIntervalAction } from "./global";
|
||||||
|
|
||||||
export enum ActionTypes {
|
export enum ActionTypes {
|
||||||
updateTraceFilters = "UPDATE_TRACES_FILTER",
|
updateTraceFilters = "UPDATE_TRACES_FILTER",
|
||||||
updateInput = "UPDATE_INPUT",
|
|
||||||
fetchTraces = "FETCH_TRACES",
|
|
||||||
fetchTraceItem = "FETCH_TRACE_ITEM",
|
|
||||||
getServicesList = "GET_SERVICE_LIST",
|
|
||||||
getServiceMetrics = "GET_SERVICE_METRICS",
|
|
||||||
getAvgDurationMetrics = "GET_AVG_DURATION_METRICS",
|
|
||||||
getErrCodeMetrics = "GET_ERR_CODE_METRICS",
|
|
||||||
getDbOverviewMetrics = "GET_DB_OVERVIEW_METRICS",
|
|
||||||
getExternalMetrics = "GET_EXTERNAL_METRICS",
|
|
||||||
getTopEndpoints = "GET_TOP_ENDPOINTS",
|
|
||||||
getUsageData = "GET_USAGE_DATE",
|
|
||||||
updateTimeInterval = "UPDATE_TIME_INTERVAL",
|
updateTimeInterval = "UPDATE_TIME_INTERVAL",
|
||||||
getFilteredTraceMetrics = "GET_FILTERED_TRACE_METRICS",
|
|
||||||
getServiceMapItems = "GET_SERVICE_MAP_ITEMS",
|
getServiceMapItems = "GET_SERVICE_MAP_ITEMS",
|
||||||
getServices = "GET_SERVICES",
|
getServices = "GET_SERVICES",
|
||||||
|
getUsageData = "GET_USAGE_DATE",
|
||||||
|
fetchTraces = "FETCH_TRACES",
|
||||||
|
fetchTraceItem = "FETCH_TRACE_ITEM",
|
||||||
}
|
}
|
||||||
|
|
||||||
export type Action =
|
export type Action =
|
||||||
@@ -38,15 +20,7 @@ export type Action =
|
|||||||
| FetchTracesAction
|
| FetchTracesAction
|
||||||
| updateTraceFiltersAction
|
| updateTraceFiltersAction
|
||||||
| updateInputTagAction
|
| updateInputTagAction
|
||||||
| getServicesListAction
|
|
||||||
| getServiceMetricsAction
|
|
||||||
| getTopEndpointsAction
|
|
||||||
| getUsageDataAction
|
| getUsageDataAction
|
||||||
| updateTimeIntervalAction
|
| updateTimeIntervalAction
|
||||||
| getFilteredTraceMetricsAction
|
|
||||||
| getExternalMetricsAction
|
|
||||||
| externalErrCodeMetricsActions
|
|
||||||
| getDbOverViewMetricsAction
|
|
||||||
| servicesAction
|
| servicesAction
|
||||||
| serviceMapItemAction
|
| serviceMapItemAction;
|
||||||
| externalMetricsAvgDurationAction;
|
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ import { Dispatch } from "redux";
|
|||||||
import api, { apiV1 } from "../../api";
|
import api, { apiV1 } from "../../api";
|
||||||
|
|
||||||
import { ActionTypes } from "./types";
|
import { ActionTypes } from "./types";
|
||||||
import { GlobalTime } from "./global";
|
|
||||||
import { toUTCEpoch } from "../../utils/timeUtils";
|
import { toUTCEpoch } from "../../utils/timeUtils";
|
||||||
|
|
||||||
export interface usageDataItem {
|
export interface usageDataItem {
|
||||||
|
|||||||
@@ -2,66 +2,35 @@ import { combineReducers } from "redux";
|
|||||||
import {
|
import {
|
||||||
traceResponseNew,
|
traceResponseNew,
|
||||||
spansWSameTraceIDResponse,
|
spansWSameTraceIDResponse,
|
||||||
servicesListItem,
|
|
||||||
metricItem,
|
|
||||||
topEndpointListItem,
|
|
||||||
externalMetricsItem,
|
|
||||||
externalMetricsAvgDurationItem,
|
|
||||||
usageDataItem,
|
usageDataItem,
|
||||||
GlobalTime,
|
GlobalTime,
|
||||||
externalErrCodeMetricsItem,
|
|
||||||
serviceMapStore,
|
serviceMapStore,
|
||||||
customMetricsItem,
|
|
||||||
TraceFilters,
|
TraceFilters,
|
||||||
} from "../actions";
|
} from "../actions";
|
||||||
import { updateGlobalTimeReducer } from "./global";
|
import { updateGlobalTimeReducer } from "./global";
|
||||||
import {
|
import { MetricsInitialState, metricsReducer } from "./metrics";
|
||||||
filteredTraceMetricsReducer,
|
import TraceFilterReducer from "./traceFilters";
|
||||||
serviceMetricsReducer,
|
|
||||||
externalErrCodeMetricsReducer,
|
|
||||||
serviceTableReducer,
|
|
||||||
topEndpointsReducer,
|
|
||||||
dbOverviewMetricsReducer,
|
|
||||||
externalMetricsReducer,
|
|
||||||
externalAvgDurationMetricsReducer,
|
|
||||||
} from "./metrics";
|
|
||||||
import { traceFiltersReducer, inputsReducer } from "./traceFilters";
|
|
||||||
import { traceItemReducer, tracesReducer } from "./traces";
|
import { traceItemReducer, tracesReducer } from "./traces";
|
||||||
import { usageDataReducer } from "./usage";
|
import { usageDataReducer } from "./usage";
|
||||||
import { ServiceMapReducer } from "./serviceMap";
|
import { ServiceMapReducer } from "./serviceMap";
|
||||||
|
|
||||||
export interface StoreState {
|
export interface StoreState {
|
||||||
|
metricsData: MetricsInitialState;
|
||||||
traceFilters: TraceFilters;
|
traceFilters: TraceFilters;
|
||||||
inputTag: string;
|
|
||||||
traces: traceResponseNew;
|
traces: traceResponseNew;
|
||||||
traceItem: spansWSameTraceIDResponse;
|
traceItem: spansWSameTraceIDResponse;
|
||||||
servicesList: servicesListItem[];
|
|
||||||
serviceMetrics: metricItem[];
|
|
||||||
topEndpointsList: topEndpointListItem[];
|
|
||||||
externalMetrics: externalMetricsItem[];
|
|
||||||
dbOverviewMetrics: externalMetricsItem[];
|
|
||||||
externalAvgDurationMetrics: externalMetricsAvgDurationItem[];
|
|
||||||
externalErrCodeMetrics: externalErrCodeMetricsItem[];
|
|
||||||
usageDate: usageDataItem[];
|
usageDate: usageDataItem[];
|
||||||
globalTime: GlobalTime;
|
globalTime: GlobalTime;
|
||||||
filteredTraceMetrics: customMetricsItem[];
|
|
||||||
serviceMap: serviceMapStore;
|
serviceMap: serviceMapStore;
|
||||||
}
|
}
|
||||||
|
|
||||||
const reducers = combineReducers<StoreState>({
|
const reducers = combineReducers<StoreState>({
|
||||||
traceFilters: traceFiltersReducer,
|
traceFilters: TraceFilterReducer,
|
||||||
inputTag: inputsReducer,
|
|
||||||
traces: tracesReducer,
|
traces: tracesReducer,
|
||||||
traceItem: traceItemReducer,
|
traceItem: traceItemReducer,
|
||||||
servicesList: serviceTableReducer,
|
|
||||||
serviceMetrics: serviceMetricsReducer,
|
|
||||||
dbOverviewMetrics: dbOverviewMetricsReducer,
|
|
||||||
topEndpointsList: topEndpointsReducer,
|
|
||||||
externalAvgDurationMetrics: externalAvgDurationMetricsReducer,
|
|
||||||
externalMetrics: externalMetricsReducer,
|
|
||||||
externalErrCodeMetrics: externalErrCodeMetricsReducer,
|
|
||||||
usageDate: usageDataReducer,
|
usageDate: usageDataReducer,
|
||||||
globalTime: updateGlobalTimeReducer,
|
globalTime: updateGlobalTimeReducer,
|
||||||
filteredTraceMetrics: filteredTraceMetricsReducer,
|
metricsData: metricsReducer,
|
||||||
serviceMap: ServiceMapReducer,
|
serviceMap: ServiceMapReducer,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,4 @@
|
|||||||
import {
|
import {
|
||||||
ActionTypes,
|
|
||||||
Action,
|
|
||||||
servicesListItem,
|
servicesListItem,
|
||||||
metricItem,
|
metricItem,
|
||||||
topEndpointListItem,
|
topEndpointListItem,
|
||||||
@@ -9,10 +7,21 @@ import {
|
|||||||
externalMetricsItem,
|
externalMetricsItem,
|
||||||
dbOverviewMetricsItem,
|
dbOverviewMetricsItem,
|
||||||
externalMetricsAvgDurationItem,
|
externalMetricsAvgDurationItem,
|
||||||
} from "../actions";
|
} from "../actions/MetricsActions";
|
||||||
|
import { MetricsActionTypes as ActionTypes } from "../actions/MetricsActions/metricsActionTypes";
|
||||||
|
|
||||||
export const serviceTableReducer = (
|
export type MetricsInitialState = {
|
||||||
state: servicesListItem[] = [
|
serviceList?: servicesListItem[];
|
||||||
|
metricItems?: metricItem[];
|
||||||
|
topEndpointListItem?: topEndpointListItem[];
|
||||||
|
externalMetricsAvgDurationItem?: externalMetricsAvgDurationItem[];
|
||||||
|
externalErrCodeMetricsItem?: externalErrCodeMetricsItem[];
|
||||||
|
externalMetricsItem?: externalMetricsItem[];
|
||||||
|
dbOverviewMetricsItem?: dbOverviewMetricsItem[];
|
||||||
|
customMetricsItem?: customMetricsItem[];
|
||||||
|
};
|
||||||
|
export const metricsInitialState: MetricsInitialState = {
|
||||||
|
serviceList: [
|
||||||
{
|
{
|
||||||
serviceName: "",
|
serviceName: "",
|
||||||
p99: 0,
|
p99: 0,
|
||||||
@@ -23,22 +32,11 @@ export const serviceTableReducer = (
|
|||||||
errorRate: 0,
|
errorRate: 0,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
action: Action,
|
metricItems: [
|
||||||
) => {
|
|
||||||
switch (action.type) {
|
|
||||||
case ActionTypes.getServicesList:
|
|
||||||
return action.payload;
|
|
||||||
default:
|
|
||||||
return state;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const serviceMetricsReducer = (
|
|
||||||
state: metricItem[] = [
|
|
||||||
{
|
{
|
||||||
timestamp: 0,
|
timestamp: 0,
|
||||||
p50: 0,
|
p50: 0,
|
||||||
p90: 0,
|
p95: 0,
|
||||||
p99: 0,
|
p99: 0,
|
||||||
numCalls: 0,
|
numCalls: 0,
|
||||||
callRate: 0.0,
|
callRate: 0.0,
|
||||||
@@ -46,49 +44,22 @@ export const serviceMetricsReducer = (
|
|||||||
errorRate: 0,
|
errorRate: 0,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
action: Action,
|
topEndpointListItem: [
|
||||||
) => {
|
{
|
||||||
switch (action.type) {
|
p50: 0,
|
||||||
case ActionTypes.getServiceMetrics:
|
p95: 0,
|
||||||
return action.payload;
|
p99: 0,
|
||||||
default:
|
numCalls: 0,
|
||||||
return state;
|
name: "",
|
||||||
}
|
},
|
||||||
};
|
|
||||||
|
|
||||||
export const topEndpointsReducer = (
|
|
||||||
state: topEndpointListItem[] = [
|
|
||||||
{ p50: 0, p90: 0, p99: 0, numCalls: 0, name: "" },
|
|
||||||
],
|
],
|
||||||
action: Action,
|
externalMetricsAvgDurationItem: [
|
||||||
) => {
|
|
||||||
switch (action.type) {
|
|
||||||
case ActionTypes.getTopEndpoints:
|
|
||||||
return action.payload;
|
|
||||||
default:
|
|
||||||
return state;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const externalAvgDurationMetricsReducer = (
|
|
||||||
state: externalMetricsAvgDurationItem[] = [
|
|
||||||
{
|
{
|
||||||
avgDuration: 0,
|
avgDuration: 0,
|
||||||
timestamp: 0,
|
timestamp: 0,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
action: Action,
|
externalErrCodeMetricsItem: [
|
||||||
) => {
|
|
||||||
switch (action.type) {
|
|
||||||
case ActionTypes.getAvgDurationMetrics:
|
|
||||||
return action.payload;
|
|
||||||
default:
|
|
||||||
return state;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const externalErrCodeMetricsReducer = (
|
|
||||||
state: externalErrCodeMetricsItem[] = [
|
|
||||||
{
|
{
|
||||||
callRate: 0,
|
callRate: 0,
|
||||||
externalHttpUrl: "",
|
externalHttpUrl: "",
|
||||||
@@ -96,18 +67,7 @@ export const externalErrCodeMetricsReducer = (
|
|||||||
timestamp: 0,
|
timestamp: 0,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
action: Action,
|
externalMetricsItem: [
|
||||||
) => {
|
|
||||||
switch (action.type) {
|
|
||||||
case ActionTypes.getErrCodeMetrics:
|
|
||||||
return action.payload;
|
|
||||||
default:
|
|
||||||
return state;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const externalMetricsReducer = (
|
|
||||||
state: externalMetricsItem[] = [
|
|
||||||
{
|
{
|
||||||
avgDuration: 0,
|
avgDuration: 0,
|
||||||
callRate: 0,
|
callRate: 0,
|
||||||
@@ -116,18 +76,7 @@ export const externalMetricsReducer = (
|
|||||||
timestamp: 0,
|
timestamp: 0,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
action: Action,
|
dbOverviewMetricsItem: [
|
||||||
) => {
|
|
||||||
switch (action.type) {
|
|
||||||
case ActionTypes.getExternalMetrics:
|
|
||||||
return action.payload;
|
|
||||||
default:
|
|
||||||
return state;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const dbOverviewMetricsReducer = (
|
|
||||||
state: dbOverviewMetricsItem[] = [
|
|
||||||
{
|
{
|
||||||
avgDuration: 0,
|
avgDuration: 0,
|
||||||
callRate: 0,
|
callRate: 0,
|
||||||
@@ -136,24 +85,68 @@ export const dbOverviewMetricsReducer = (
|
|||||||
timestamp: 0,
|
timestamp: 0,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
action: Action,
|
customMetricsItem: [
|
||||||
) => {
|
{
|
||||||
switch (action.type) {
|
timestamp: 0,
|
||||||
case ActionTypes.getDbOverviewMetrics:
|
value: 0,
|
||||||
return action.payload;
|
},
|
||||||
default:
|
],
|
||||||
return state;
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export const filteredTraceMetricsReducer = (
|
type ActionType = {
|
||||||
state: customMetricsItem[] = [{ timestamp: 0, value: 0 }],
|
type: string;
|
||||||
action: Action,
|
payload: any;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const metricsReducer = (
|
||||||
|
state: MetricsInitialState = metricsInitialState,
|
||||||
|
action: ActionType,
|
||||||
) => {
|
) => {
|
||||||
switch (action.type) {
|
switch (action.type) {
|
||||||
case ActionTypes.getFilteredTraceMetrics:
|
case ActionTypes.getFilteredTraceMetrics:
|
||||||
return action.payload;
|
return {
|
||||||
|
...state,
|
||||||
|
customMetricsItem: action.payload,
|
||||||
|
};
|
||||||
|
case ActionTypes.getServiceMetrics:
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
metricItems: action.payload,
|
||||||
|
};
|
||||||
|
case ActionTypes.getDbOverviewMetrics:
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
dbOverviewMetricsItem: action.payload,
|
||||||
|
};
|
||||||
|
case ActionTypes.getExternalMetrics:
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
externalMetricsItem: action.payload,
|
||||||
|
};
|
||||||
|
case ActionTypes.getTopEndpoints:
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
topEndpointListItem: action.payload,
|
||||||
|
};
|
||||||
|
case ActionTypes.getErrCodeMetrics:
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
externalErrCodeMetricsItem: action.payload,
|
||||||
|
};
|
||||||
|
case ActionTypes.getAvgDurationMetrics:
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
externalMetricsAvgDurationItem: action.payload,
|
||||||
|
};
|
||||||
|
|
||||||
|
case ActionTypes.getServicesList:
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
serviceList: action.payload,
|
||||||
|
};
|
||||||
default:
|
default:
|
||||||
return state;
|
return {
|
||||||
|
...state,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,12 +1,11 @@
|
|||||||
import { ActionTypes, Action, serviceMapStore } from "../actions";
|
import { ActionTypes, Action, serviceMapStore } from "../actions";
|
||||||
|
|
||||||
export const ServiceMapReducer = (
|
const initialState: serviceMapStore = {
|
||||||
state: serviceMapStore = {
|
|
||||||
items: [],
|
items: [],
|
||||||
services: [],
|
services: [],
|
||||||
},
|
};
|
||||||
action: Action,
|
|
||||||
) => {
|
export const ServiceMapReducer = (state = initialState, action: Action) => {
|
||||||
switch (action.type) {
|
switch (action.type) {
|
||||||
case ActionTypes.getServiceMapItems:
|
case ActionTypes.getServiceMapItems:
|
||||||
return {
|
return {
|
||||||
|
|||||||
@@ -1,19 +1,17 @@
|
|||||||
import {
|
import { ActionTypes, TraceFilters } from "../actions";
|
||||||
ActionTypes,
|
|
||||||
TraceFilters,
|
|
||||||
updateInputTagAction,
|
|
||||||
updateTraceFiltersAction,
|
|
||||||
} from "../actions";
|
|
||||||
|
|
||||||
export const traceFiltersReducer = (
|
type ACTION = {
|
||||||
state: TraceFilters = {
|
type: ActionTypes;
|
||||||
|
payload: TraceFilters;
|
||||||
|
};
|
||||||
|
const initialState: TraceFilters = {
|
||||||
service: "",
|
service: "",
|
||||||
tags: [],
|
tags: [],
|
||||||
operation: "",
|
operation: "",
|
||||||
latency: { min: "", max: "" },
|
latency: { min: "", max: "" },
|
||||||
},
|
};
|
||||||
action: updateTraceFiltersAction,
|
|
||||||
) => {
|
const TraceFilterReducer = (state = initialState, action: ACTION) => {
|
||||||
switch (action.type) {
|
switch (action.type) {
|
||||||
case ActionTypes.updateTraceFilters:
|
case ActionTypes.updateTraceFilters:
|
||||||
return action.payload;
|
return action.payload;
|
||||||
@@ -22,14 +20,4 @@ export const traceFiltersReducer = (
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const inputsReducer = (
|
export default TraceFilterReducer;
|
||||||
state: string = "",
|
|
||||||
action: updateInputTagAction,
|
|
||||||
) => {
|
|
||||||
switch (action.type) {
|
|
||||||
case ActionTypes.updateInput:
|
|
||||||
return action.payload;
|
|
||||||
default:
|
|
||||||
return state;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -1,18 +0,0 @@
|
|||||||
// dark-theme.less
|
|
||||||
|
|
||||||
@import "~antd/lib/style/color/colorPalette.less";
|
|
||||||
@import "~antd/dist/antd.less";
|
|
||||||
@import "~antd/lib/style/themes/dark.less";
|
|
||||||
|
|
||||||
// @primary-color: #00adb5;
|
|
||||||
// @border-radius-base: 4px;
|
|
||||||
|
|
||||||
// @component-background: #303030;
|
|
||||||
// @body-background: #303030;
|
|
||||||
// @popover-background: #303030;
|
|
||||||
// @border-color-base: #6f6c6c;
|
|
||||||
// @border-color-split: #424242;
|
|
||||||
// @table-header-sort-active-bg: #424242;
|
|
||||||
// @card-skeleton-bg: #424242;
|
|
||||||
// @skeleton-color: #424242;
|
|
||||||
// @table-header-sort-active-bg: #424242;
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
/* light-theme.less */
|
|
||||||
|
|
||||||
@import "~antd/lib/style/color/colorPalette.less";
|
|
||||||
@import "~antd/dist/antd.less";
|
|
||||||
@import "~antd/lib/style/themes/default.less";
|
|
||||||
|
|
||||||
/* These are shared variables that can be extracted to their own file */
|
|
||||||
@primary-color: #00adb5;
|
|
||||||
@border-radius-base: 4px;
|
|
||||||
@@ -2,6 +2,7 @@
|
|||||||
const { resolve } = require("path");
|
const { resolve } = require("path");
|
||||||
const HtmlWebpackPlugin = require("html-webpack-plugin");
|
const HtmlWebpackPlugin = require("html-webpack-plugin");
|
||||||
console.log(resolve(__dirname, "./src/"));
|
console.log(resolve(__dirname, "./src/"));
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
mode: "development",
|
mode: "development",
|
||||||
devtool: "source-map",
|
devtool: "source-map",
|
||||||
@@ -53,7 +54,9 @@ module.exports = {
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
plugins: [new HtmlWebpackPlugin({ template: "src/index.html.ejs" })],
|
plugins: [
|
||||||
|
new HtmlWebpackPlugin({ template: "src/index.html.ejs" }),
|
||||||
|
],
|
||||||
performance: {
|
performance: {
|
||||||
hints: false,
|
hints: false,
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
const { resolve } = require("path");
|
const { resolve } = require("path");
|
||||||
const HtmlWebpackPlugin = require("html-webpack-plugin");
|
const HtmlWebpackPlugin = require("html-webpack-plugin");
|
||||||
const CopyPlugin = require("copy-webpack-plugin");
|
const CopyPlugin = require("copy-webpack-plugin");
|
||||||
|
const CompressionPlugin = require("compression-webpack-plugin");
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
mode: "production",
|
mode: "production",
|
||||||
@@ -44,6 +45,9 @@ module.exports = {
|
|||||||
],
|
],
|
||||||
},
|
},
|
||||||
plugins: [
|
plugins: [
|
||||||
|
new CompressionPlugin({
|
||||||
|
exclude: /.map$/
|
||||||
|
}),
|
||||||
new HtmlWebpackPlugin({ template: "src/index.html.ejs" }),
|
new HtmlWebpackPlugin({ template: "src/index.html.ejs" }),
|
||||||
new CopyPlugin({
|
new CopyPlugin({
|
||||||
patterns: [{ from: resolve(__dirname, "public/"), to: "." }],
|
patterns: [{ from: resolve(__dirname, "public/"), to: "." }],
|
||||||
|
|||||||
1543
frontend/yarn.lock
1543
frontend/yarn.lock
File diff suppressed because it is too large
Load Diff
10
node_modules/.yarn-integrity
generated
vendored
Normal file
10
node_modules/.yarn-integrity
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"systemParams": "darwin-x64-83",
|
||||||
|
"modulesFolders": [],
|
||||||
|
"flags": [],
|
||||||
|
"linkedModules": [],
|
||||||
|
"topLevelPatterns": [],
|
||||||
|
"lockfileEntries": {},
|
||||||
|
"files": [],
|
||||||
|
"artifacts": {}
|
||||||
|
}
|
||||||
Binary file not shown.
124
pkg/query-service/app/clickhouseReader/options.go
Normal file
124
pkg/query-service/app/clickhouseReader/options.go
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
package clickhouseReader
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/jmoiron/sqlx"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Encoding string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// EncodingJSON is used for spans encoded as JSON.
|
||||||
|
EncodingJSON Encoding = "json"
|
||||||
|
// EncodingProto is used for spans encoded as Protobuf.
|
||||||
|
EncodingProto Encoding = "protobuf"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
defaultDatasource string = "tcp://localhost:9000"
|
||||||
|
defaultOperationsTable string = "signoz_operations"
|
||||||
|
defaultIndexTable string = "signoz_index"
|
||||||
|
defaultSpansTable string = "signoz_spans"
|
||||||
|
defaultArchiveSpansTable string = "signoz_archive_spans"
|
||||||
|
defaultWriteBatchDelay time.Duration = 5 * time.Second
|
||||||
|
defaultWriteBatchSize int = 10000
|
||||||
|
defaultEncoding Encoding = EncodingJSON
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
suffixEnabled = ".enabled"
|
||||||
|
suffixDatasource = ".datasource"
|
||||||
|
suffixOperationsTable = ".operations-table"
|
||||||
|
suffixIndexTable = ".index-table"
|
||||||
|
suffixSpansTable = ".spans-table"
|
||||||
|
suffixWriteBatchDelay = ".write-batch-delay"
|
||||||
|
suffixWriteBatchSize = ".write-batch-size"
|
||||||
|
suffixEncoding = ".encoding"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NamespaceConfig is Clickhouse's internal configuration data
|
||||||
|
type namespaceConfig struct {
|
||||||
|
namespace string
|
||||||
|
Enabled bool
|
||||||
|
Datasource string
|
||||||
|
OperationsTable string
|
||||||
|
IndexTable string
|
||||||
|
SpansTable string
|
||||||
|
WriteBatchDelay time.Duration
|
||||||
|
WriteBatchSize int
|
||||||
|
Encoding Encoding
|
||||||
|
Connector Connector
|
||||||
|
}
|
||||||
|
|
||||||
|
// Connecto defines how to connect to the database
|
||||||
|
type Connector func(cfg *namespaceConfig) (*sqlx.DB, error)
|
||||||
|
|
||||||
|
func defaultConnector(cfg *namespaceConfig) (*sqlx.DB, error) {
|
||||||
|
db, err := sqlx.Open("clickhouse", cfg.Datasource)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := db.Ping(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Options store storage plugin related configs
|
||||||
|
type Options struct {
|
||||||
|
primary *namespaceConfig
|
||||||
|
|
||||||
|
others map[string]*namespaceConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewOptions creates a new Options struct.
|
||||||
|
func NewOptions(datasource string, primaryNamespace string, otherNamespaces ...string) *Options {
|
||||||
|
|
||||||
|
if datasource == "" {
|
||||||
|
datasource = defaultDatasource
|
||||||
|
}
|
||||||
|
|
||||||
|
options := &Options{
|
||||||
|
primary: &namespaceConfig{
|
||||||
|
namespace: primaryNamespace,
|
||||||
|
Enabled: true,
|
||||||
|
Datasource: datasource,
|
||||||
|
OperationsTable: defaultOperationsTable,
|
||||||
|
IndexTable: defaultIndexTable,
|
||||||
|
SpansTable: defaultSpansTable,
|
||||||
|
WriteBatchDelay: defaultWriteBatchDelay,
|
||||||
|
WriteBatchSize: defaultWriteBatchSize,
|
||||||
|
Encoding: defaultEncoding,
|
||||||
|
Connector: defaultConnector,
|
||||||
|
},
|
||||||
|
others: make(map[string]*namespaceConfig, len(otherNamespaces)),
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, namespace := range otherNamespaces {
|
||||||
|
if namespace == archiveNamespace {
|
||||||
|
options.others[namespace] = &namespaceConfig{
|
||||||
|
namespace: namespace,
|
||||||
|
Datasource: datasource,
|
||||||
|
OperationsTable: "",
|
||||||
|
IndexTable: "",
|
||||||
|
SpansTable: defaultArchiveSpansTable,
|
||||||
|
WriteBatchDelay: defaultWriteBatchDelay,
|
||||||
|
WriteBatchSize: defaultWriteBatchSize,
|
||||||
|
Encoding: defaultEncoding,
|
||||||
|
Connector: defaultConnector,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
options.others[namespace] = &namespaceConfig{namespace: namespace}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return options
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPrimary returns the primary namespace configuration
|
||||||
|
func (opt *Options) getPrimary() *namespaceConfig {
|
||||||
|
return opt.primary
|
||||||
|
}
|
||||||
714
pkg/query-service/app/clickhouseReader/reader.go
Normal file
714
pkg/query-service/app/clickhouseReader/reader.go
Normal file
@@ -0,0 +1,714 @@
|
|||||||
|
package clickhouseReader
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
_ "github.com/ClickHouse/clickhouse-go"
|
||||||
|
"github.com/jmoiron/sqlx"
|
||||||
|
|
||||||
|
"go.signoz.io/query-service/model"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
primaryNamespace = "clickhouse"
|
||||||
|
archiveNamespace = "clickhouse-archive"
|
||||||
|
|
||||||
|
minTimespanForProgressiveSearch = time.Hour
|
||||||
|
minTimespanForProgressiveSearchMargin = time.Minute
|
||||||
|
maxProgressiveSteps = 4
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
ErrNoOperationsTable = errors.New("no operations table supplied")
|
||||||
|
ErrNoIndexTable = errors.New("no index table supplied")
|
||||||
|
ErrStartTimeRequired = errors.New("start time is required for search queries")
|
||||||
|
)
|
||||||
|
|
||||||
|
// SpanWriter for reading spans from ClickHouse
|
||||||
|
type ClickHouseReader struct {
|
||||||
|
db *sqlx.DB
|
||||||
|
operationsTable string
|
||||||
|
indexTable string
|
||||||
|
spansTable string
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewTraceReader returns a TraceReader for the database
|
||||||
|
func NewReader() *ClickHouseReader {
|
||||||
|
|
||||||
|
datasource := os.Getenv("ClickHouseUrl")
|
||||||
|
options := NewOptions(datasource, primaryNamespace, archiveNamespace)
|
||||||
|
db, err := initialize(options)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Error(err)
|
||||||
|
}
|
||||||
|
return &ClickHouseReader{
|
||||||
|
db: db,
|
||||||
|
operationsTable: options.primary.OperationsTable,
|
||||||
|
indexTable: options.primary.IndexTable,
|
||||||
|
spansTable: options.primary.SpansTable,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func initialize(options *Options) (*sqlx.DB, error) {
|
||||||
|
|
||||||
|
db, err := connect(options.getPrimary())
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error connecting to primary db: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return db, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func connect(cfg *namespaceConfig) (*sqlx.DB, error) {
|
||||||
|
if cfg.Encoding != EncodingJSON && cfg.Encoding != EncodingProto {
|
||||||
|
return nil, fmt.Errorf("unknown encoding %q, supported: %q, %q", cfg.Encoding, EncodingJSON, EncodingProto)
|
||||||
|
}
|
||||||
|
|
||||||
|
return cfg.Connector(cfg)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.GetServicesParams) (*[]model.ServiceItem, error) {
|
||||||
|
|
||||||
|
if r.indexTable == "" {
|
||||||
|
return nil, ErrNoIndexTable
|
||||||
|
}
|
||||||
|
|
||||||
|
serviceItems := []model.ServiceItem{}
|
||||||
|
|
||||||
|
query := fmt.Sprintf("SELECT serviceName, quantile(0.99)(durationNano) as p99, avg(durationNano) as avgDuration, count(*) as numCalls FROM %s WHERE timestamp>='%s' AND timestamp<='%s' AND kind='2' GROUP BY serviceName ORDER BY p99 DESC", r.indexTable, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10))
|
||||||
|
|
||||||
|
err := r.db.Select(&serviceItems, query)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return nil, fmt.Errorf("Error in processing sql query")
|
||||||
|
}
|
||||||
|
|
||||||
|
////////////////// Below block gets 5xx of services
|
||||||
|
serviceErrorItems := []model.ServiceItem{}
|
||||||
|
|
||||||
|
query = fmt.Sprintf("SELECT serviceName, count(*) as numErrors FROM %s WHERE timestamp>='%s' AND timestamp<='%s' AND kind='2' AND statusCode>=500 GROUP BY serviceName", r.indexTable, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10))
|
||||||
|
|
||||||
|
err = r.db.Select(&serviceErrorItems, query)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return nil, fmt.Errorf("Error in processing sql query")
|
||||||
|
}
|
||||||
|
|
||||||
|
m5xx := make(map[string]int)
|
||||||
|
|
||||||
|
for j, _ := range serviceErrorItems {
|
||||||
|
m5xx[serviceErrorItems[j].ServiceName] = serviceErrorItems[j].NumErrors
|
||||||
|
}
|
||||||
|
///////////////////////////////////////////
|
||||||
|
|
||||||
|
////////////////// Below block gets 4xx of services
|
||||||
|
|
||||||
|
service4xxItems := []model.ServiceItem{}
|
||||||
|
|
||||||
|
query = fmt.Sprintf("SELECT serviceName, count(*) as num4xx FROM %s WHERE timestamp>='%s' AND timestamp<='%s' AND kind='2' AND statusCode>=400 AND statusCode<500 GROUP BY serviceName", r.indexTable, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10))
|
||||||
|
|
||||||
|
err = r.db.Select(&service4xxItems, query)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return nil, fmt.Errorf("Error in processing sql query")
|
||||||
|
}
|
||||||
|
|
||||||
|
m4xx := make(map[string]int)
|
||||||
|
|
||||||
|
for j, _ := range service4xxItems {
|
||||||
|
m5xx[service4xxItems[j].ServiceName] = service4xxItems[j].Num4XX
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, _ := range serviceItems {
|
||||||
|
if val, ok := m5xx[serviceItems[i].ServiceName]; ok {
|
||||||
|
serviceItems[i].NumErrors = val
|
||||||
|
}
|
||||||
|
if val, ok := m4xx[serviceItems[i].ServiceName]; ok {
|
||||||
|
serviceItems[i].Num4XX = val
|
||||||
|
}
|
||||||
|
serviceItems[i].CallRate = float32(serviceItems[i].NumCalls) / float32(queryParams.Period)
|
||||||
|
serviceItems[i].FourXXRate = float32(serviceItems[i].Num4XX) / float32(queryParams.Period)
|
||||||
|
serviceItems[i].ErrorRate = float32(serviceItems[i].NumErrors) / float32(queryParams.Period)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &serviceItems, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) GetServiceOverview(ctx context.Context, queryParams *model.GetServiceOverviewParams) (*[]model.ServiceOverviewItem, error) {
|
||||||
|
|
||||||
|
serviceOverviewItems := []model.ServiceOverviewItem{}
|
||||||
|
|
||||||
|
query := fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %s minute) as time, quantile(0.99)(durationNano) as p99, quantile(0.95)(durationNano) as p95,quantile(0.50)(durationNano) as p50, count(*) as numCalls FROM %s WHERE timestamp>='%s' AND timestamp<='%s' AND kind='2' AND serviceName='%s' GROUP BY time ORDER BY time DESC", strconv.Itoa(int(queryParams.StepSeconds/60)), r.indexTable, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10), queryParams.ServiceName)
|
||||||
|
|
||||||
|
err := r.db.Select(&serviceOverviewItems, query)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return nil, fmt.Errorf("Error in processing sql query")
|
||||||
|
}
|
||||||
|
|
||||||
|
serviceErrorItems := []model.ServiceErrorItem{}
|
||||||
|
|
||||||
|
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %s minute) as time, count(*) as numErrors FROM %s WHERE timestamp>='%s' AND timestamp<='%s' AND kind='2' AND serviceName='%s' AND statusCode>=500 GROUP BY time ORDER BY time DESC", strconv.Itoa(int(queryParams.StepSeconds/60)), r.indexTable, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10), queryParams.ServiceName)
|
||||||
|
|
||||||
|
err = r.db.Select(&serviceErrorItems, query)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return nil, fmt.Errorf("Error in processing sql query")
|
||||||
|
}
|
||||||
|
|
||||||
|
m := make(map[int64]int)
|
||||||
|
|
||||||
|
for j, _ := range serviceErrorItems {
|
||||||
|
timeObj, _ := time.Parse(time.RFC3339Nano, serviceErrorItems[j].Time)
|
||||||
|
m[int64(timeObj.UnixNano())] = serviceErrorItems[j].NumErrors
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, _ := range serviceOverviewItems {
|
||||||
|
timeObj, _ := time.Parse(time.RFC3339Nano, serviceOverviewItems[i].Time)
|
||||||
|
serviceOverviewItems[i].Timestamp = int64(timeObj.UnixNano())
|
||||||
|
serviceOverviewItems[i].Time = ""
|
||||||
|
|
||||||
|
if val, ok := m[serviceOverviewItems[i].Timestamp]; ok {
|
||||||
|
serviceOverviewItems[i].NumErrors = val
|
||||||
|
}
|
||||||
|
serviceOverviewItems[i].ErrorRate = float32(serviceOverviewItems[i].NumErrors) * 100 / float32(serviceOverviewItems[i].NumCalls)
|
||||||
|
serviceOverviewItems[i].CallRate = float32(serviceOverviewItems[i].NumCalls) / float32(queryParams.StepSeconds)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &serviceOverviewItems, nil
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) SearchSpans(ctx context.Context, queryParams *model.SpanSearchParams) (*[]model.SearchSpansResult, error) {
|
||||||
|
|
||||||
|
query := fmt.Sprintf("SELECT timestamp, spanID, traceID, serviceName, name, kind, durationNano, tagsKeys, tagsValues FROM %s WHERE timestamp >= ? AND timestamp <= ?", r.indexTable)
|
||||||
|
|
||||||
|
args := []interface{}{strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10)}
|
||||||
|
|
||||||
|
if len(queryParams.ServiceName) != 0 {
|
||||||
|
query = query + " AND serviceName = ?"
|
||||||
|
args = append(args, queryParams.ServiceName)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(queryParams.OperationName) != 0 {
|
||||||
|
|
||||||
|
query = query + " AND name = ?"
|
||||||
|
args = append(args, queryParams.OperationName)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(queryParams.Kind) != 0 {
|
||||||
|
query = query + " AND kind = ?"
|
||||||
|
args = append(args, queryParams.Kind)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(queryParams.MinDuration) != 0 {
|
||||||
|
query = query + " AND durationNano >= ?"
|
||||||
|
args = append(args, queryParams.MinDuration)
|
||||||
|
}
|
||||||
|
if len(queryParams.MaxDuration) != 0 {
|
||||||
|
query = query + " AND durationNano <= ?"
|
||||||
|
args = append(args, queryParams.MaxDuration)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, item := range queryParams.Tags {
|
||||||
|
|
||||||
|
if item.Key == "error" && item.Value == "true" {
|
||||||
|
query = query + " AND ( has(tags, 'error:true') OR statusCode>=500)"
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if item.Operator == "equals" {
|
||||||
|
query = query + " AND has(tags, ?)"
|
||||||
|
args = append(args, fmt.Sprintf("%s:%s", item.Key, item.Value))
|
||||||
|
|
||||||
|
} else if item.Operator == "contains" {
|
||||||
|
query = query + " AND tagsValues[indexOf(tagsKeys, ?)] ILIKE ?"
|
||||||
|
args = append(args, item.Key)
|
||||||
|
args = append(args, fmt.Sprintf("%%%s%%", item.Value))
|
||||||
|
} else if item.Operator == "isnotnull" {
|
||||||
|
query = query + " AND has(tagsKeys, ?)"
|
||||||
|
args = append(args, item.Key)
|
||||||
|
} else {
|
||||||
|
return nil, fmt.Errorf("Tag Operator %s not supported", item.Operator)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
query = query + " ORDER BY timestamp DESC LIMIT 100"
|
||||||
|
|
||||||
|
var searchScanReponses []model.SearchSpanReponseItem
|
||||||
|
|
||||||
|
err := r.db.Select(&searchScanReponses, query, args...)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return nil, fmt.Errorf("Error in processing sql query")
|
||||||
|
}
|
||||||
|
|
||||||
|
searchSpansResult := []model.SearchSpansResult{
|
||||||
|
model.SearchSpansResult{
|
||||||
|
Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues"},
|
||||||
|
Events: make([][]interface{}, len(searchScanReponses)),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, item := range searchScanReponses {
|
||||||
|
spanEvents := item.GetValues()
|
||||||
|
searchSpansResult[0].Events[i] = spanEvents
|
||||||
|
}
|
||||||
|
|
||||||
|
return &searchSpansResult, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) GetServiceDBOverview(ctx context.Context, queryParams *model.GetServiceOverviewParams) (*[]model.ServiceDBOverviewItem, error) {
|
||||||
|
|
||||||
|
var serviceDBOverviewItems []model.ServiceDBOverviewItem
|
||||||
|
|
||||||
|
query := fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %s minute) as time, avg(durationNano) as avgDuration, count(1) as numCalls, dbSystem FROM %s WHERE serviceName='%s' AND timestamp>='%s' AND timestamp<='%s' AND kind='3' AND dbName IS NOT NULL GROUP BY time, dbSystem ORDER BY time DESC", strconv.Itoa(int(queryParams.StepSeconds/60)), r.indexTable, queryParams.ServiceName, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10))
|
||||||
|
|
||||||
|
err := r.db.Select(&serviceDBOverviewItems, query)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return nil, fmt.Errorf("Error in processing sql query")
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, _ := range serviceDBOverviewItems {
|
||||||
|
timeObj, _ := time.Parse(time.RFC3339Nano, serviceDBOverviewItems[i].Time)
|
||||||
|
serviceDBOverviewItems[i].Timestamp = int64(timeObj.UnixNano())
|
||||||
|
serviceDBOverviewItems[i].Time = ""
|
||||||
|
serviceDBOverviewItems[i].CallRate = float32(serviceDBOverviewItems[i].NumCalls) / float32(queryParams.StepSeconds)
|
||||||
|
}
|
||||||
|
|
||||||
|
if serviceDBOverviewItems == nil {
|
||||||
|
serviceDBOverviewItems = []model.ServiceDBOverviewItem{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &serviceDBOverviewItems, nil
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) GetServiceExternalAvgDuration(ctx context.Context, queryParams *model.GetServiceOverviewParams) (*[]model.ServiceExternalItem, error) {
|
||||||
|
|
||||||
|
var serviceExternalItems []model.ServiceExternalItem
|
||||||
|
|
||||||
|
query := fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %s minute) as time, avg(durationNano) as avgDuration FROM %s WHERE serviceName='%s' AND timestamp>='%s' AND timestamp<='%s' AND kind='3' AND externalHttpUrl IS NOT NULL GROUP BY time ORDER BY time DESC", strconv.Itoa(int(queryParams.StepSeconds/60)), r.indexTable, queryParams.ServiceName, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10))
|
||||||
|
|
||||||
|
err := r.db.Select(&serviceExternalItems, query)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return nil, fmt.Errorf("Error in processing sql query")
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, _ := range serviceExternalItems {
|
||||||
|
timeObj, _ := time.Parse(time.RFC3339Nano, serviceExternalItems[i].Time)
|
||||||
|
serviceExternalItems[i].Timestamp = int64(timeObj.UnixNano())
|
||||||
|
serviceExternalItems[i].Time = ""
|
||||||
|
serviceExternalItems[i].CallRate = float32(serviceExternalItems[i].NumCalls) / float32(queryParams.StepSeconds)
|
||||||
|
}
|
||||||
|
|
||||||
|
if serviceExternalItems == nil {
|
||||||
|
serviceExternalItems = []model.ServiceExternalItem{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &serviceExternalItems, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) GetServiceExternalErrors(ctx context.Context, queryParams *model.GetServiceOverviewParams) (*[]model.ServiceExternalItem, error) {
|
||||||
|
|
||||||
|
var serviceExternalErrorItems []model.ServiceExternalItem
|
||||||
|
|
||||||
|
query := fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %s minute) as time, avg(durationNano) as avgDuration, count(1) as numCalls, externalHttpUrl FROM %s WHERE serviceName='%s' AND timestamp>='%s' AND timestamp<='%s' AND kind='3' AND externalHttpUrl IS NOT NULL AND statusCode >= 500 GROUP BY time, externalHttpUrl ORDER BY time DESC", strconv.Itoa(int(queryParams.StepSeconds/60)), r.indexTable, queryParams.ServiceName, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10))
|
||||||
|
|
||||||
|
err := r.db.Select(&serviceExternalErrorItems, query)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return nil, fmt.Errorf("Error in processing sql query")
|
||||||
|
}
|
||||||
|
var serviceExternalTotalItems []model.ServiceExternalItem
|
||||||
|
|
||||||
|
queryTotal := fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %s minute) as time, avg(durationNano) as avgDuration, count(1) as numCalls, externalHttpUrl FROM %s WHERE serviceName='%s' AND timestamp>='%s' AND timestamp<='%s' AND kind='3' AND externalHttpUrl IS NOT NULL GROUP BY time, externalHttpUrl ORDER BY time DESC", strconv.Itoa(int(queryParams.StepSeconds/60)), r.indexTable, queryParams.ServiceName, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10))
|
||||||
|
|
||||||
|
errTotal := r.db.Select(&serviceExternalTotalItems, queryTotal)
|
||||||
|
|
||||||
|
if errTotal != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return nil, fmt.Errorf("Error in processing sql query")
|
||||||
|
}
|
||||||
|
|
||||||
|
m := make(map[string]int)
|
||||||
|
|
||||||
|
for j, _ := range serviceExternalErrorItems {
|
||||||
|
timeObj, _ := time.Parse(time.RFC3339Nano, serviceExternalErrorItems[j].Time)
|
||||||
|
m[strconv.FormatInt(timeObj.UnixNano(), 10)+"-"+serviceExternalErrorItems[j].ExternalHttpUrl] = serviceExternalErrorItems[j].NumCalls
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, _ := range serviceExternalTotalItems {
|
||||||
|
timeObj, _ := time.Parse(time.RFC3339Nano, serviceExternalTotalItems[i].Time)
|
||||||
|
serviceExternalTotalItems[i].Timestamp = int64(timeObj.UnixNano())
|
||||||
|
serviceExternalTotalItems[i].Time = ""
|
||||||
|
// serviceExternalTotalItems[i].CallRate = float32(serviceExternalTotalItems[i].NumCalls) / float32(queryParams.StepSeconds)
|
||||||
|
|
||||||
|
if val, ok := m[strconv.FormatInt(serviceExternalTotalItems[i].Timestamp, 10)+"-"+serviceExternalTotalItems[i].ExternalHttpUrl]; ok {
|
||||||
|
serviceExternalTotalItems[i].NumErrors = val
|
||||||
|
serviceExternalTotalItems[i].ErrorRate = float32(serviceExternalTotalItems[i].NumErrors) * 100 / float32(serviceExternalTotalItems[i].NumCalls)
|
||||||
|
}
|
||||||
|
serviceExternalTotalItems[i].CallRate = 0
|
||||||
|
serviceExternalTotalItems[i].NumCalls = 0
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
if serviceExternalTotalItems == nil {
|
||||||
|
serviceExternalTotalItems = []model.ServiceExternalItem{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &serviceExternalTotalItems, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) GetServiceExternal(ctx context.Context, queryParams *model.GetServiceOverviewParams) (*[]model.ServiceExternalItem, error) {
|
||||||
|
|
||||||
|
var serviceExternalItems []model.ServiceExternalItem
|
||||||
|
|
||||||
|
query := fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %s minute) as time, avg(durationNano) as avgDuration, count(1) as numCalls, externalHttpUrl FROM %s WHERE serviceName='%s' AND timestamp>='%s' AND timestamp<='%s' AND kind='3' AND externalHttpUrl IS NOT NULL GROUP BY time, externalHttpUrl ORDER BY time DESC", strconv.Itoa(int(queryParams.StepSeconds/60)), r.indexTable, queryParams.ServiceName, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10))
|
||||||
|
|
||||||
|
err := r.db.Select(&serviceExternalItems, query)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return nil, fmt.Errorf("Error in processing sql query")
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, _ := range serviceExternalItems {
|
||||||
|
timeObj, _ := time.Parse(time.RFC3339Nano, serviceExternalItems[i].Time)
|
||||||
|
serviceExternalItems[i].Timestamp = int64(timeObj.UnixNano())
|
||||||
|
serviceExternalItems[i].Time = ""
|
||||||
|
serviceExternalItems[i].CallRate = float32(serviceExternalItems[i].NumCalls) / float32(queryParams.StepSeconds)
|
||||||
|
}
|
||||||
|
|
||||||
|
if serviceExternalItems == nil {
|
||||||
|
serviceExternalItems = []model.ServiceExternalItem{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &serviceExternalItems, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) GetTopEndpoints(ctx context.Context, queryParams *model.GetTopEndpointsParams) (*[]model.TopEndpointsItem, error) {
|
||||||
|
|
||||||
|
var topEndpointsItems []model.TopEndpointsItem
|
||||||
|
|
||||||
|
query := fmt.Sprintf("SELECT quantile(0.5)(durationNano) as p50, quantile(0.95)(durationNano) as p95, quantile(0.99)(durationNano) as p99, COUNT(1) as numCalls, name FROM %s WHERE timestamp >= '%s' AND timestamp <= '%s' AND kind='2' and serviceName='%s' GROUP BY name", r.indexTable, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10), queryParams.ServiceName)
|
||||||
|
|
||||||
|
err := r.db.Select(&topEndpointsItems, query)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return nil, fmt.Errorf("Error in processing sql query")
|
||||||
|
}
|
||||||
|
|
||||||
|
if topEndpointsItems == nil {
|
||||||
|
topEndpointsItems = []model.TopEndpointsItem{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &topEndpointsItems, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) GetUsage(ctx context.Context, queryParams *model.GetUsageParams) (*[]model.UsageItem, error) {
|
||||||
|
|
||||||
|
var usageItems []model.UsageItem
|
||||||
|
|
||||||
|
var query string
|
||||||
|
if len(queryParams.ServiceName) != 0 {
|
||||||
|
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d HOUR) as time, count(1) as count FROM %s WHERE serviceName='%s' AND timestamp>='%s' AND timestamp<='%s' GROUP BY time ORDER BY time ASC", queryParams.StepHour, r.indexTable, queryParams.ServiceName, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10))
|
||||||
|
} else {
|
||||||
|
query = fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d HOUR) as time, count(1) as count FROM %s WHERE timestamp>='%s' AND timestamp<='%s' GROUP BY time ORDER BY time ASC", queryParams.StepHour, r.indexTable, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10))
|
||||||
|
}
|
||||||
|
|
||||||
|
err := r.db.Select(&usageItems, query)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return nil, fmt.Errorf("Error in processing sql query")
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, _ := range usageItems {
|
||||||
|
timeObj, _ := time.Parse(time.RFC3339Nano, usageItems[i].Time)
|
||||||
|
usageItems[i].Timestamp = int64(timeObj.UnixNano())
|
||||||
|
usageItems[i].Time = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if usageItems == nil {
|
||||||
|
usageItems = []model.UsageItem{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &usageItems, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) GetServicesList(ctx context.Context) (*[]string, error) {
|
||||||
|
|
||||||
|
services := []string{}
|
||||||
|
|
||||||
|
query := fmt.Sprintf(`SELECT DISTINCT serviceName FROM %s WHERE toDate(timestamp) > now() - INTERVAL 1 DAY`, r.indexTable)
|
||||||
|
|
||||||
|
err := r.db.Select(&services, query)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return nil, fmt.Errorf("Error in processing sql query")
|
||||||
|
}
|
||||||
|
|
||||||
|
return &services, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) GetTags(ctx context.Context, serviceName string) (*[]model.TagItem, error) {
|
||||||
|
|
||||||
|
tagItems := []model.TagItem{}
|
||||||
|
|
||||||
|
query := fmt.Sprintf(`SELECT DISTINCT arrayJoin(tagsKeys) as tagKeys FROM %s WHERE serviceName='%s' AND toDate(timestamp) > now() - INTERVAL 1 DAY`, r.indexTable, serviceName)
|
||||||
|
|
||||||
|
err := r.db.Select(&tagItems, query)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return nil, fmt.Errorf("Error in processing sql query")
|
||||||
|
}
|
||||||
|
|
||||||
|
return &tagItems, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) GetOperations(ctx context.Context, serviceName string) (*[]string, error) {
|
||||||
|
|
||||||
|
operations := []string{}
|
||||||
|
|
||||||
|
query := fmt.Sprintf(`SELECT DISTINCT(name) FROM %s WHERE serviceName='%s' AND toDate(timestamp) > now() - INTERVAL 1 DAY`, r.indexTable, serviceName)
|
||||||
|
|
||||||
|
err := r.db.Select(&operations, query)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return nil, fmt.Errorf("Error in processing sql query")
|
||||||
|
}
|
||||||
|
return &operations, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) SearchTraces(ctx context.Context, traceId string) (*[]model.SearchSpansResult, error) {
|
||||||
|
|
||||||
|
var searchScanReponses []model.SearchSpanReponseItem
|
||||||
|
|
||||||
|
query := fmt.Sprintf("SELECT timestamp, spanID, traceID, serviceName, name, kind, durationNano, tagsKeys, tagsValues, references FROM %s WHERE traceID='%s'", r.indexTable, traceId)
|
||||||
|
|
||||||
|
err := r.db.Select(&searchScanReponses, query)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return nil, fmt.Errorf("Error in processing sql query")
|
||||||
|
}
|
||||||
|
|
||||||
|
searchSpansResult := []model.SearchSpansResult{
|
||||||
|
model.SearchSpansResult{
|
||||||
|
Columns: []string{"__time", "SpanId", "TraceId", "ServiceName", "Name", "Kind", "DurationNano", "TagsKeys", "TagsValues", "References"},
|
||||||
|
Events: make([][]interface{}, len(searchScanReponses)),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, item := range searchScanReponses {
|
||||||
|
spanEvents := item.GetValues()
|
||||||
|
searchSpansResult[0].Events[i] = spanEvents
|
||||||
|
}
|
||||||
|
|
||||||
|
return &searchSpansResult, nil
|
||||||
|
|
||||||
|
}
|
||||||
|
func (r *ClickHouseReader) GetServiceMapDependencies(ctx context.Context, queryParams *model.GetServicesParams) (*[]model.ServiceMapDependencyResponseItem, error) {
|
||||||
|
serviceMapDependencyItems := []model.ServiceMapDependencyItem{}
|
||||||
|
|
||||||
|
query := fmt.Sprintf(`SELECT spanID, parentSpanID, serviceName FROM %s WHERE timestamp>='%s' AND timestamp<='%s'`, r.indexTable, strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10))
|
||||||
|
|
||||||
|
err := r.db.Select(&serviceMapDependencyItems, query)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return nil, fmt.Errorf("Error in processing sql query")
|
||||||
|
}
|
||||||
|
|
||||||
|
serviceMap := make(map[string]*model.ServiceMapDependencyResponseItem)
|
||||||
|
|
||||||
|
spanId2ServiceNameMap := make(map[string]string)
|
||||||
|
for i, _ := range serviceMapDependencyItems {
|
||||||
|
spanId2ServiceNameMap[serviceMapDependencyItems[i].SpanId] = serviceMapDependencyItems[i].ServiceName
|
||||||
|
}
|
||||||
|
for i, _ := range serviceMapDependencyItems {
|
||||||
|
parent2childServiceName := spanId2ServiceNameMap[serviceMapDependencyItems[i].ParentSpanId] + "-" + spanId2ServiceNameMap[serviceMapDependencyItems[i].SpanId]
|
||||||
|
if _, ok := serviceMap[parent2childServiceName]; !ok {
|
||||||
|
serviceMap[parent2childServiceName] = &model.ServiceMapDependencyResponseItem{
|
||||||
|
Parent: spanId2ServiceNameMap[serviceMapDependencyItems[i].ParentSpanId],
|
||||||
|
Child: spanId2ServiceNameMap[serviceMapDependencyItems[i].SpanId],
|
||||||
|
CallCount: 1,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
serviceMap[parent2childServiceName].CallCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
retMe := make([]model.ServiceMapDependencyResponseItem, 0, len(serviceMap))
|
||||||
|
for _, dependency := range serviceMap {
|
||||||
|
if dependency.Parent == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
retMe = append(retMe, *dependency)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &retMe, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *ClickHouseReader) SearchSpansAggregate(ctx context.Context, queryParams *model.SpanSearchAggregatesParams) ([]model.SpanSearchAggregatesResponseItem, error) {
|
||||||
|
|
||||||
|
spanSearchAggregatesResponseItems := []model.SpanSearchAggregatesResponseItem{}
|
||||||
|
|
||||||
|
aggregation_query := ""
|
||||||
|
if queryParams.Dimension == "duration" {
|
||||||
|
switch queryParams.AggregationOption {
|
||||||
|
case "p50":
|
||||||
|
aggregation_query = " quantile(0.50)(durationNano) as value "
|
||||||
|
break
|
||||||
|
|
||||||
|
case "p95":
|
||||||
|
aggregation_query = " quantile(0.95)(durationNano) as value "
|
||||||
|
break
|
||||||
|
|
||||||
|
case "p99":
|
||||||
|
aggregation_query = " quantile(0.99)(durationNano) as value "
|
||||||
|
break
|
||||||
|
}
|
||||||
|
} else if queryParams.Dimension == "calls" {
|
||||||
|
aggregation_query = " count(*) as value "
|
||||||
|
}
|
||||||
|
|
||||||
|
query := fmt.Sprintf("SELECT toStartOfInterval(timestamp, INTERVAL %d minute) as time, %s FROM %s WHERE timestamp >= ? AND timestamp <= ?", queryParams.StepSeconds/60, aggregation_query, r.indexTable)
|
||||||
|
|
||||||
|
args := []interface{}{strconv.FormatInt(queryParams.Start.UnixNano(), 10), strconv.FormatInt(queryParams.End.UnixNano(), 10)}
|
||||||
|
|
||||||
|
if len(queryParams.ServiceName) != 0 {
|
||||||
|
query = query + " AND serviceName = ?"
|
||||||
|
args = append(args, queryParams.ServiceName)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(queryParams.OperationName) != 0 {
|
||||||
|
|
||||||
|
query = query + " AND name = ?"
|
||||||
|
args = append(args, queryParams.OperationName)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(queryParams.Kind) != 0 {
|
||||||
|
query = query + " AND kind = ?"
|
||||||
|
args = append(args, queryParams.Kind)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(queryParams.MinDuration) != 0 {
|
||||||
|
query = query + " AND durationNano >= ?"
|
||||||
|
args = append(args, queryParams.MinDuration)
|
||||||
|
}
|
||||||
|
if len(queryParams.MaxDuration) != 0 {
|
||||||
|
query = query + " AND durationNano <= ?"
|
||||||
|
args = append(args, queryParams.MaxDuration)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, item := range queryParams.Tags {
|
||||||
|
|
||||||
|
if item.Key == "error" && item.Value == "true" {
|
||||||
|
query = query + " AND ( has(tags, 'error:true') OR statusCode>=500)"
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if item.Operator == "equals" {
|
||||||
|
query = query + " AND has(tags, ?)"
|
||||||
|
args = append(args, fmt.Sprintf("%s:%s", item.Key, item.Value))
|
||||||
|
|
||||||
|
} else if item.Operator == "contains" {
|
||||||
|
query = query + " AND tagsValues[indexOf(tagsKeys, ?)] ILIKE ?"
|
||||||
|
args = append(args, item.Key)
|
||||||
|
args = append(args, fmt.Sprintf("%%%s%%", item.Value))
|
||||||
|
} else if item.Operator == "isnotnull" {
|
||||||
|
query = query + " AND has(tagsKeys, ?)"
|
||||||
|
args = append(args, item.Key)
|
||||||
|
} else {
|
||||||
|
return nil, fmt.Errorf("Tag Operator %s not supported", item.Operator)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
query = query + " GROUP BY time ORDER BY time"
|
||||||
|
|
||||||
|
err := r.db.Select(&spanSearchAggregatesResponseItems, query, args...)
|
||||||
|
|
||||||
|
zap.S().Info(query)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
zap.S().Debug("Error in processing sql query: ", err)
|
||||||
|
return nil, fmt.Errorf("Error in processing sql query")
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, _ := range spanSearchAggregatesResponseItems {
|
||||||
|
|
||||||
|
timeObj, _ := time.Parse(time.RFC3339Nano, spanSearchAggregatesResponseItems[i].Time)
|
||||||
|
spanSearchAggregatesResponseItems[i].Timestamp = int64(timeObj.UnixNano())
|
||||||
|
spanSearchAggregatesResponseItems[i].Time = ""
|
||||||
|
if queryParams.AggregationOption == "rate_per_sec" {
|
||||||
|
spanSearchAggregatesResponseItems[i].Value = float32(spanSearchAggregatesResponseItems[i].Value) / float32(queryParams.StepSeconds)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return spanSearchAggregatesResponseItems, nil
|
||||||
|
|
||||||
|
}
|
||||||
99
pkg/query-service/app/druidReader/reader.go
Normal file
99
pkg/query-service/app/druidReader/reader.go
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
package druidReader
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"go.signoz.io/query-service/druidQuery"
|
||||||
|
"go.signoz.io/query-service/godruid"
|
||||||
|
"go.signoz.io/query-service/model"
|
||||||
|
)
|
||||||
|
|
||||||
|
type DruidReader struct {
|
||||||
|
Client *godruid.Client
|
||||||
|
SqlClient *druidQuery.SqlClient
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewReader() *DruidReader {
|
||||||
|
|
||||||
|
initialize()
|
||||||
|
druidClientUrl := os.Getenv("DruidClientUrl")
|
||||||
|
|
||||||
|
client := godruid.Client{
|
||||||
|
Url: druidClientUrl,
|
||||||
|
Debug: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
sqlClient := druidQuery.SqlClient{
|
||||||
|
Url: druidClientUrl,
|
||||||
|
Debug: true,
|
||||||
|
}
|
||||||
|
return &DruidReader{
|
||||||
|
Client: &client,
|
||||||
|
SqlClient: &sqlClient,
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func initialize() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (druid *DruidReader) GetServiceOverview(ctx context.Context, query *model.GetServiceOverviewParams) (*[]model.ServiceOverviewItem, error) {
|
||||||
|
return druidQuery.GetServiceOverview(druid.SqlClient, query)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (druid *DruidReader) GetServices(ctx context.Context, query *model.GetServicesParams) (*[]model.ServiceItem, error) {
|
||||||
|
return druidQuery.GetServices(druid.SqlClient, query)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (druid *DruidReader) SearchSpans(ctx context.Context, query *model.SpanSearchParams) (*[]model.SearchSpansResult, error) {
|
||||||
|
return druidQuery.SearchSpans(druid.Client, query)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (druid *DruidReader) GetServiceDBOverview(ctx context.Context, query *model.GetServiceOverviewParams) (*[]model.ServiceDBOverviewItem, error) {
|
||||||
|
return druidQuery.GetServiceDBOverview(druid.SqlClient, query)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (druid *DruidReader) GetServiceExternalAvgDuration(ctx context.Context, query *model.GetServiceOverviewParams) (*[]model.ServiceExternalItem, error) {
|
||||||
|
return druidQuery.GetServiceExternalAvgDuration(druid.SqlClient, query)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (druid *DruidReader) GetServiceExternalErrors(ctx context.Context, query *model.GetServiceOverviewParams) (*[]model.ServiceExternalItem, error) {
|
||||||
|
return druidQuery.GetServiceExternalErrors(druid.SqlClient, query)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (druid *DruidReader) GetServiceExternal(ctx context.Context, query *model.GetServiceOverviewParams) (*[]model.ServiceExternalItem, error) {
|
||||||
|
return druidQuery.GetServiceExternal(druid.SqlClient, query)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (druid *DruidReader) GetTopEndpoints(ctx context.Context, query *model.GetTopEndpointsParams) (*[]model.TopEndpointsItem, error) {
|
||||||
|
return druidQuery.GetTopEndpoints(druid.SqlClient, query)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (druid *DruidReader) GetUsage(ctx context.Context, query *model.GetUsageParams) (*[]model.UsageItem, error) {
|
||||||
|
return druidQuery.GetUsage(druid.SqlClient, query)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (druid *DruidReader) GetOperations(ctx context.Context, serviceName string) (*[]string, error) {
|
||||||
|
return druidQuery.GetOperations(druid.SqlClient, serviceName)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (druid *DruidReader) GetTags(ctx context.Context, serviceName string) (*[]model.TagItem, error) {
|
||||||
|
return druidQuery.GetTags(druid.SqlClient, serviceName)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (druid *DruidReader) GetServicesList(ctx context.Context) (*[]string, error) {
|
||||||
|
return druidQuery.GetServicesList(druid.SqlClient)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (druid *DruidReader) SearchTraces(ctx context.Context, traceId string) (*[]model.SearchSpansResult, error) {
|
||||||
|
return druidQuery.SearchTraces(druid.Client, traceId)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (druid *DruidReader) GetServiceMapDependencies(ctx context.Context, query *model.GetServicesParams) (*[]model.ServiceMapDependencyResponseItem, error) {
|
||||||
|
return druidQuery.GetServiceMapDependencies(druid.SqlClient, query)
|
||||||
|
}
|
||||||
|
func (druid *DruidReader) SearchSpansAggregate(ctx context.Context, queryParams *model.SpanSearchAggregatesParams) ([]model.SpanSearchAggregatesResponseItem, error) {
|
||||||
|
return druidQuery.SearchSpansAggregate(druid.Client, queryParams)
|
||||||
|
}
|
||||||
@@ -1,14 +1,13 @@
|
|||||||
package app
|
package app
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/gorilla/mux"
|
"github.com/gorilla/mux"
|
||||||
"github.com/posthog/posthog-go"
|
"github.com/posthog/posthog-go"
|
||||||
"go.signoz.io/query-service/druidQuery"
|
|
||||||
"go.signoz.io/query-service/godruid"
|
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -23,17 +22,15 @@ type APIHandler struct {
|
|||||||
// queryParser queryParser
|
// queryParser queryParser
|
||||||
basePath string
|
basePath string
|
||||||
apiPrefix string
|
apiPrefix string
|
||||||
client *godruid.Client
|
reader *Reader
|
||||||
sqlClient *druidQuery.SqlClient
|
|
||||||
pc *posthog.Client
|
pc *posthog.Client
|
||||||
distinctId string
|
distinctId string
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewAPIHandler returns an APIHandler
|
// NewAPIHandler returns an APIHandler
|
||||||
func NewAPIHandler(client *godruid.Client, sqlClient *druidQuery.SqlClient, pc *posthog.Client, distinctId string) *APIHandler {
|
func NewAPIHandler(reader *Reader, pc *posthog.Client, distinctId string) *APIHandler {
|
||||||
aH := &APIHandler{
|
aH := &APIHandler{
|
||||||
client: client,
|
reader: reader,
|
||||||
sqlClient: sqlClient,
|
|
||||||
pc: pc,
|
pc: pc,
|
||||||
distinctId: distinctId,
|
distinctId: distinctId,
|
||||||
}
|
}
|
||||||
@@ -59,7 +56,7 @@ type structuredError struct {
|
|||||||
func (aH *APIHandler) RegisterRoutes(router *mux.Router) {
|
func (aH *APIHandler) RegisterRoutes(router *mux.Router) {
|
||||||
|
|
||||||
router.HandleFunc("/api/v1/user", aH.user).Methods(http.MethodPost)
|
router.HandleFunc("/api/v1/user", aH.user).Methods(http.MethodPost)
|
||||||
router.HandleFunc("/api/v1/get_percentiles", aH.getApplicationPercentiles).Methods(http.MethodGet)
|
// router.HandleFunc("/api/v1/get_percentiles", aH.getApplicationPercentiles).Methods(http.MethodGet)
|
||||||
router.HandleFunc("/api/v1/services", aH.getServices).Methods(http.MethodGet)
|
router.HandleFunc("/api/v1/services", aH.getServices).Methods(http.MethodGet)
|
||||||
router.HandleFunc("/api/v1/services/list", aH.getServicesList).Methods(http.MethodGet)
|
router.HandleFunc("/api/v1/services/list", aH.getServicesList).Methods(http.MethodGet)
|
||||||
router.HandleFunc("/api/v1/service/overview", aH.getServiceOverview).Methods(http.MethodGet)
|
router.HandleFunc("/api/v1/service/overview", aH.getServiceOverview).Methods(http.MethodGet)
|
||||||
@@ -115,7 +112,7 @@ func (aH *APIHandler) getOperations(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := druidQuery.GetOperations(aH.sqlClient, serviceName)
|
result, err := (*aH.reader).GetOperations(context.Background(), serviceName)
|
||||||
if aH.handleError(w, err, http.StatusBadRequest) {
|
if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -126,7 +123,7 @@ func (aH *APIHandler) getOperations(w http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
func (aH *APIHandler) getServicesList(w http.ResponseWriter, r *http.Request) {
|
func (aH *APIHandler) getServicesList(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
result, err := druidQuery.GetServicesList(aH.sqlClient)
|
result, err := (*aH.reader).GetServicesList(context.Background())
|
||||||
if aH.handleError(w, err, http.StatusBadRequest) {
|
if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -139,7 +136,7 @@ func (aH *APIHandler) searchTags(w http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
serviceName := r.URL.Query().Get("service")
|
serviceName := r.URL.Query().Get("service")
|
||||||
|
|
||||||
result, err := druidQuery.GetTags(aH.sqlClient, serviceName)
|
result, err := (*aH.reader).GetTags(context.Background(), serviceName)
|
||||||
if aH.handleError(w, err, http.StatusBadRequest) {
|
if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -155,7 +152,8 @@ func (aH *APIHandler) getTopEndpoints(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := druidQuery.GetTopEndpoints(aH.sqlClient, query)
|
result, err := (*aH.reader).GetTopEndpoints(context.Background(), query)
|
||||||
|
|
||||||
if aH.handleError(w, err, http.StatusBadRequest) {
|
if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -171,7 +169,7 @@ func (aH *APIHandler) getUsage(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := druidQuery.GetUsage(aH.sqlClient, query)
|
result, err := (*aH.reader).GetUsage(context.Background(), query)
|
||||||
if aH.handleError(w, err, http.StatusBadRequest) {
|
if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -187,7 +185,8 @@ func (aH *APIHandler) getServiceDBOverview(w http.ResponseWriter, r *http.Reques
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := druidQuery.GetServiceDBOverview(aH.sqlClient, query)
|
result, err := (*aH.reader).GetServiceDBOverview(context.Background(), query)
|
||||||
|
|
||||||
if aH.handleError(w, err, http.StatusBadRequest) {
|
if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -203,7 +202,7 @@ func (aH *APIHandler) getServiceExternal(w http.ResponseWriter, r *http.Request)
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := druidQuery.GetServiceExternal(aH.sqlClient, query)
|
result, err := (*aH.reader).GetServiceExternal(context.Background(), query)
|
||||||
if aH.handleError(w, err, http.StatusBadRequest) {
|
if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -219,7 +218,7 @@ func (aH *APIHandler) GetServiceExternalAvgDuration(w http.ResponseWriter, r *ht
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := druidQuery.GetServiceExternalAvgDuration(aH.sqlClient, query)
|
result, err := (*aH.reader).GetServiceExternalAvgDuration(context.Background(), query)
|
||||||
if aH.handleError(w, err, http.StatusBadRequest) {
|
if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -235,7 +234,7 @@ func (aH *APIHandler) getServiceExternalErrors(w http.ResponseWriter, r *http.Re
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := druidQuery.GetServiceExternalErrors(aH.sqlClient, query)
|
result, err := (*aH.reader).GetServiceExternalErrors(context.Background(), query)
|
||||||
if aH.handleError(w, err, http.StatusBadRequest) {
|
if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -251,7 +250,7 @@ func (aH *APIHandler) getServiceOverview(w http.ResponseWriter, r *http.Request)
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := druidQuery.GetServiceOverview(aH.sqlClient, query)
|
result, err := (*aH.reader).GetServiceOverview(context.Background(), query)
|
||||||
if aH.handleError(w, err, http.StatusBadRequest) {
|
if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -267,7 +266,7 @@ func (aH *APIHandler) getServices(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := druidQuery.GetServices(aH.sqlClient, query)
|
result, err := (*aH.reader).GetServices(context.Background(), query)
|
||||||
if aH.handleError(w, err, http.StatusBadRequest) {
|
if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -289,7 +288,7 @@ func (aH *APIHandler) serviceMapDependencies(w http.ResponseWriter, r *http.Requ
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := druidQuery.GetServiceMapDependencies(aH.sqlClient, query)
|
result, err := (*aH.reader).GetServiceMapDependencies(context.Background(), query)
|
||||||
if aH.handleError(w, err, http.StatusBadRequest) {
|
if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -302,7 +301,7 @@ func (aH *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
|
|||||||
vars := mux.Vars(r)
|
vars := mux.Vars(r)
|
||||||
traceId := vars["traceId"]
|
traceId := vars["traceId"]
|
||||||
|
|
||||||
result, err := druidQuery.SearchTraces(aH.client, traceId)
|
result, err := (*aH.reader).SearchTraces(context.Background(), traceId)
|
||||||
if aH.handleError(w, err, http.StatusBadRequest) {
|
if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -310,6 +309,7 @@ func (aH *APIHandler) searchTraces(w http.ResponseWriter, r *http.Request) {
|
|||||||
aH.writeJSON(w, r, result)
|
aH.writeJSON(w, r, result)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (aH *APIHandler) searchSpansAggregates(w http.ResponseWriter, r *http.Request) {
|
func (aH *APIHandler) searchSpansAggregates(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
query, err := parseSearchSpanAggregatesRequest(r)
|
query, err := parseSearchSpanAggregatesRequest(r)
|
||||||
@@ -317,7 +317,7 @@ func (aH *APIHandler) searchSpansAggregates(w http.ResponseWriter, r *http.Reque
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := druidQuery.SearchSpansAggregate(aH.client, query)
|
result, err := (*aH.reader).SearchSpansAggregate(context.Background(), query)
|
||||||
if aH.handleError(w, err, http.StatusBadRequest) {
|
if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -332,7 +332,9 @@ func (aH *APIHandler) searchSpans(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := druidQuery.SearchSpans(aH.client, query)
|
// result, err := druidQuery.SearchSpans(aH.client, query)
|
||||||
|
result, err := (*aH.reader).SearchSpans(context.Background(), query)
|
||||||
|
|
||||||
if aH.handleError(w, err, http.StatusBadRequest) {
|
if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -340,20 +342,20 @@ func (aH *APIHandler) searchSpans(w http.ResponseWriter, r *http.Request) {
|
|||||||
aH.writeJSON(w, r, result)
|
aH.writeJSON(w, r, result)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (aH *APIHandler) getApplicationPercentiles(w http.ResponseWriter, r *http.Request) {
|
// func (aH *APIHandler) getApplicationPercentiles(w http.ResponseWriter, r *http.Request) {
|
||||||
// vars := mux.Vars(r)
|
// // vars := mux.Vars(r)
|
||||||
|
|
||||||
query, err := parseApplicationPercentileRequest(r)
|
// query, err := parseApplicationPercentileRequest(r)
|
||||||
if aH.handleError(w, err, http.StatusBadRequest) {
|
// if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
return
|
// return
|
||||||
}
|
// }
|
||||||
|
|
||||||
result, err := druidQuery.GetApplicationPercentiles(aH.client, query)
|
// result, err := (*aH.reader).GetApplicationPercentiles(context.Background(), query)
|
||||||
if aH.handleError(w, err, http.StatusBadRequest) {
|
// if aH.handleError(w, err, http.StatusBadRequest) {
|
||||||
return
|
// return
|
||||||
}
|
// }
|
||||||
aH.writeJSON(w, r, result)
|
// aH.writeJSON(w, r, result)
|
||||||
}
|
// }
|
||||||
|
|
||||||
func (aH *APIHandler) handleError(w http.ResponseWriter, err error, statusCode int) bool {
|
func (aH *APIHandler) handleError(w http.ResponseWriter, err error, statusCode int) bool {
|
||||||
if err == nil {
|
if err == nil {
|
||||||
|
|||||||
26
pkg/query-service/app/interface.go
Normal file
26
pkg/query-service/app/interface.go
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
package app
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"go.signoz.io/query-service/model"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Reader interface {
|
||||||
|
GetServiceOverview(ctx context.Context, query *model.GetServiceOverviewParams) (*[]model.ServiceOverviewItem, error)
|
||||||
|
GetServices(ctx context.Context, query *model.GetServicesParams) (*[]model.ServiceItem, error)
|
||||||
|
// GetApplicationPercentiles(ctx context.Context, query *model.ApplicationPercentileParams) ([]godruid.Timeseries, error)
|
||||||
|
SearchSpans(ctx context.Context, query *model.SpanSearchParams) (*[]model.SearchSpansResult, error)
|
||||||
|
GetServiceDBOverview(ctx context.Context, query *model.GetServiceOverviewParams) (*[]model.ServiceDBOverviewItem, error)
|
||||||
|
GetServiceExternalAvgDuration(ctx context.Context, query *model.GetServiceOverviewParams) (*[]model.ServiceExternalItem, error)
|
||||||
|
GetServiceExternalErrors(ctx context.Context, query *model.GetServiceOverviewParams) (*[]model.ServiceExternalItem, error)
|
||||||
|
GetServiceExternal(ctx context.Context, query *model.GetServiceOverviewParams) (*[]model.ServiceExternalItem, error)
|
||||||
|
GetTopEndpoints(ctx context.Context, query *model.GetTopEndpointsParams) (*[]model.TopEndpointsItem, error)
|
||||||
|
GetUsage(ctx context.Context, query *model.GetUsageParams) (*[]model.UsageItem, error)
|
||||||
|
GetOperations(ctx context.Context, serviceName string) (*[]string, error)
|
||||||
|
GetTags(ctx context.Context, serviceName string) (*[]model.TagItem, error)
|
||||||
|
GetServicesList(ctx context.Context) (*[]string, error)
|
||||||
|
SearchTraces(ctx context.Context, traceID string) (*[]model.SearchSpansResult, error)
|
||||||
|
GetServiceMapDependencies(ctx context.Context, query *model.GetServicesParams) (*[]model.ServiceMapDependencyResponseItem, error)
|
||||||
|
SearchSpansAggregate(ctx context.Context, queryParams *model.SpanSearchAggregatesParams) ([]model.SpanSearchAggregatesResponseItem, error)
|
||||||
|
}
|
||||||
@@ -16,7 +16,7 @@ var allowedDimesions = []string{"calls", "duration"}
|
|||||||
|
|
||||||
var allowedAggregations = map[string][]string{
|
var allowedAggregations = map[string][]string{
|
||||||
"calls": []string{"count", "rate_per_sec"},
|
"calls": []string{"count", "rate_per_sec"},
|
||||||
"duration": []string{"avg", "p50", "p90", "p99"},
|
"duration": []string{"avg", "p50", "p95", "p99"},
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseGetTopEndpointsRequest(r *http.Request) (*model.GetTopEndpointsParams, error) {
|
func parseGetTopEndpointsRequest(r *http.Request) (*model.GetTopEndpointsParams, error) {
|
||||||
@@ -38,6 +38,8 @@ func parseGetTopEndpointsRequest(r *http.Request) (*model.GetTopEndpointsParams,
|
|||||||
StartTime: startTime.Format(time.RFC3339Nano),
|
StartTime: startTime.Format(time.RFC3339Nano),
|
||||||
EndTime: endTime.Format(time.RFC3339Nano),
|
EndTime: endTime.Format(time.RFC3339Nano),
|
||||||
ServiceName: serviceName,
|
ServiceName: serviceName,
|
||||||
|
Start: startTime,
|
||||||
|
End: endTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
return &getTopEndpointsParams, nil
|
return &getTopEndpointsParams, nil
|
||||||
@@ -64,12 +66,16 @@ func parseGetUsageRequest(r *http.Request) (*model.GetUsageParams, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
serviceName := r.URL.Query().Get("service")
|
serviceName := r.URL.Query().Get("service")
|
||||||
|
stepHour := stepInt / 3600
|
||||||
|
|
||||||
getUsageParams := model.GetUsageParams{
|
getUsageParams := model.GetUsageParams{
|
||||||
StartTime: startTime.Format(time.RFC3339Nano),
|
StartTime: startTime.Format(time.RFC3339Nano),
|
||||||
EndTime: endTime.Format(time.RFC3339Nano),
|
EndTime: endTime.Format(time.RFC3339Nano),
|
||||||
|
Start: startTime,
|
||||||
|
End: endTime,
|
||||||
ServiceName: serviceName,
|
ServiceName: serviceName,
|
||||||
Period: fmt.Sprintf("PT%dH", stepInt/3600),
|
Period: fmt.Sprintf("PT%dH", stepHour),
|
||||||
|
StepHour: stepHour,
|
||||||
}
|
}
|
||||||
|
|
||||||
return &getUsageParams, nil
|
return &getUsageParams, nil
|
||||||
@@ -101,7 +107,9 @@ func parseGetServiceExternalRequest(r *http.Request) (*model.GetServiceOverviewP
|
|||||||
}
|
}
|
||||||
|
|
||||||
getServiceOverviewParams := model.GetServiceOverviewParams{
|
getServiceOverviewParams := model.GetServiceOverviewParams{
|
||||||
|
Start: startTime,
|
||||||
StartTime: startTime.Format(time.RFC3339Nano),
|
StartTime: startTime.Format(time.RFC3339Nano),
|
||||||
|
End: endTime,
|
||||||
EndTime: endTime.Format(time.RFC3339Nano),
|
EndTime: endTime.Format(time.RFC3339Nano),
|
||||||
ServiceName: serviceName,
|
ServiceName: serviceName,
|
||||||
Period: fmt.Sprintf("PT%dM", stepInt/60),
|
Period: fmt.Sprintf("PT%dM", stepInt/60),
|
||||||
@@ -137,7 +145,9 @@ func parseGetServiceOverviewRequest(r *http.Request) (*model.GetServiceOverviewP
|
|||||||
}
|
}
|
||||||
|
|
||||||
getServiceOverviewParams := model.GetServiceOverviewParams{
|
getServiceOverviewParams := model.GetServiceOverviewParams{
|
||||||
|
Start: startTime,
|
||||||
StartTime: startTime.Format(time.RFC3339Nano),
|
StartTime: startTime.Format(time.RFC3339Nano),
|
||||||
|
End: endTime,
|
||||||
EndTime: endTime.Format(time.RFC3339Nano),
|
EndTime: endTime.Format(time.RFC3339Nano),
|
||||||
ServiceName: serviceName,
|
ServiceName: serviceName,
|
||||||
Period: fmt.Sprintf("PT%dM", stepInt/60),
|
Period: fmt.Sprintf("PT%dM", stepInt/60),
|
||||||
@@ -160,7 +170,9 @@ func parseGetServicesRequest(r *http.Request) (*model.GetServicesParams, error)
|
|||||||
}
|
}
|
||||||
|
|
||||||
getServicesParams := model.GetServicesParams{
|
getServicesParams := model.GetServicesParams{
|
||||||
|
Start: startTime,
|
||||||
StartTime: startTime.Format(time.RFC3339Nano),
|
StartTime: startTime.Format(time.RFC3339Nano),
|
||||||
|
End: endTime,
|
||||||
EndTime: endTime.Format(time.RFC3339Nano),
|
EndTime: endTime.Format(time.RFC3339Nano),
|
||||||
Period: int(endTime.Unix() - startTime.Unix()),
|
Period: int(endTime.Unix() - startTime.Unix()),
|
||||||
}
|
}
|
||||||
@@ -222,6 +234,8 @@ func parseSearchSpanAggregatesRequest(r *http.Request) (*model.SpanSearchAggrega
|
|||||||
}
|
}
|
||||||
|
|
||||||
params := &model.SpanSearchAggregatesParams{
|
params := &model.SpanSearchAggregatesParams{
|
||||||
|
Start: startTime,
|
||||||
|
End: endTime,
|
||||||
Intervals: fmt.Sprintf("%s/%s", startTimeStr, endTimeStr),
|
Intervals: fmt.Sprintf("%s/%s", startTimeStr, endTimeStr),
|
||||||
GranOrigin: startTimeStr,
|
GranOrigin: startTimeStr,
|
||||||
GranPeriod: granPeriod,
|
GranPeriod: granPeriod,
|
||||||
@@ -283,6 +297,8 @@ func parseSpanSearchRequest(r *http.Request) (*model.SpanSearchParams, error) {
|
|||||||
// fmt.Println(startTimeStr)
|
// fmt.Println(startTimeStr)
|
||||||
params := &model.SpanSearchParams{
|
params := &model.SpanSearchParams{
|
||||||
Intervals: fmt.Sprintf("%s/%s", startTimeStr, endTimeStr),
|
Intervals: fmt.Sprintf("%s/%s", startTimeStr, endTimeStr),
|
||||||
|
Start: startTime,
|
||||||
|
End: endTime,
|
||||||
Limit: 100,
|
Limit: 100,
|
||||||
Order: "descending",
|
Order: "descending",
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
package app
|
package app
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"net"
|
"net"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"os"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
@@ -11,8 +13,8 @@ import (
|
|||||||
"github.com/posthog/posthog-go"
|
"github.com/posthog/posthog-go"
|
||||||
"github.com/rs/cors"
|
"github.com/rs/cors"
|
||||||
"github.com/soheilhy/cmux"
|
"github.com/soheilhy/cmux"
|
||||||
"go.signoz.io/query-service/druidQuery"
|
"go.signoz.io/query-service/app/clickhouseReader"
|
||||||
"go.signoz.io/query-service/godruid"
|
"go.signoz.io/query-service/app/druidReader"
|
||||||
"go.signoz.io/query-service/healthcheck"
|
"go.signoz.io/query-service/healthcheck"
|
||||||
"go.signoz.io/query-service/utils"
|
"go.signoz.io/query-service/utils"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
@@ -20,7 +22,7 @@ import (
|
|||||||
|
|
||||||
type ServerOptions struct {
|
type ServerOptions struct {
|
||||||
HTTPHostPort string
|
HTTPHostPort string
|
||||||
DruidClientUrl string
|
// DruidClientUrl string
|
||||||
}
|
}
|
||||||
|
|
||||||
// Server runs HTTP, Mux and a grpc server
|
// Server runs HTTP, Mux and a grpc server
|
||||||
@@ -28,10 +30,9 @@ type Server struct {
|
|||||||
// logger *zap.Logger
|
// logger *zap.Logger
|
||||||
// querySvc *querysvc.QueryService
|
// querySvc *querysvc.QueryService
|
||||||
// queryOptions *QueryOptions
|
// queryOptions *QueryOptions
|
||||||
serverOptions *ServerOptions
|
|
||||||
|
|
||||||
// tracer opentracing.Tracer // TODO make part of flags.Service
|
// tracer opentracing.Tracer // TODO make part of flags.Service
|
||||||
|
serverOptions *ServerOptions
|
||||||
conn net.Listener
|
conn net.Listener
|
||||||
// grpcConn net.Listener
|
// grpcConn net.Listener
|
||||||
httpConn net.Listener
|
httpConn net.Listener
|
||||||
@@ -64,6 +65,11 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
|||||||
// if err != nil {
|
// if err != nil {
|
||||||
// return nil, err
|
// return nil, err
|
||||||
// }
|
// }
|
||||||
|
httpServer, err := createHTTPServer()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
return &Server{
|
return &Server{
|
||||||
// logger: logger,
|
// logger: logger,
|
||||||
@@ -72,7 +78,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
|||||||
// tracer: tracer,
|
// tracer: tracer,
|
||||||
// grpcServer: grpcServer,
|
// grpcServer: grpcServer,
|
||||||
serverOptions: serverOptions,
|
serverOptions: serverOptions,
|
||||||
httpServer: createHTTPServer(serverOptions.DruidClientUrl),
|
httpServer: httpServer,
|
||||||
separatePorts: true,
|
separatePorts: true,
|
||||||
// separatePorts: grpcPort != httpPort,
|
// separatePorts: grpcPort != httpPort,
|
||||||
unavailableChannel: make(chan healthcheck.Status),
|
unavailableChannel: make(chan healthcheck.Status),
|
||||||
@@ -82,22 +88,25 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
|||||||
var posthogClient posthog.Client
|
var posthogClient posthog.Client
|
||||||
var distinctId string
|
var distinctId string
|
||||||
|
|
||||||
func createHTTPServer(druidClientUrl string) *http.Server {
|
func createHTTPServer() (*http.Server, error) {
|
||||||
|
|
||||||
posthogClient = posthog.New("H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w")
|
posthogClient = posthog.New("H-htDCae7CR3RV57gUzmol6IAKtm5IMCvbcm_fwnL-w")
|
||||||
distinctId = uuid.New().String()
|
distinctId = uuid.New().String()
|
||||||
|
|
||||||
client := godruid.Client{
|
var reader Reader
|
||||||
Url: druidClientUrl,
|
|
||||||
Debug: true,
|
storage := os.Getenv("STORAGE")
|
||||||
|
if storage == "druid" {
|
||||||
|
zap.S().Info("Using Apache Druid as datastore ...")
|
||||||
|
reader = druidReader.NewReader()
|
||||||
|
} else if storage == "clickhouse" {
|
||||||
|
zap.S().Info("Using ClickHouse as datastore ...")
|
||||||
|
reader = clickhouseReader.NewReader()
|
||||||
|
} else {
|
||||||
|
return nil, fmt.Errorf("Storage type: %s is not supported in query service", storage)
|
||||||
}
|
}
|
||||||
|
|
||||||
sqlClient := druidQuery.SqlClient{
|
apiHandler := NewAPIHandler(&reader, &posthogClient, distinctId)
|
||||||
Url: druidClientUrl,
|
|
||||||
Debug: true,
|
|
||||||
}
|
|
||||||
|
|
||||||
apiHandler := NewAPIHandler(&client, &sqlClient, &posthogClient, distinctId)
|
|
||||||
r := NewRouter()
|
r := NewRouter()
|
||||||
|
|
||||||
r.Use(analyticsMiddleware)
|
r.Use(analyticsMiddleware)
|
||||||
@@ -118,7 +127,7 @@ func createHTTPServer(druidClientUrl string) *http.Server {
|
|||||||
|
|
||||||
return &http.Server{
|
return &http.Server{
|
||||||
Handler: handler,
|
Handler: handler,
|
||||||
}
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func loggingMiddleware(next http.Handler) http.Handler {
|
func loggingMiddleware(next http.Handler) http.Handler {
|
||||||
|
|||||||
@@ -11,92 +11,6 @@ import (
|
|||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ServiceItem struct {
|
|
||||||
ServiceName string `json:"serviceName"`
|
|
||||||
Percentile99 float32 `json:"p99"`
|
|
||||||
AvgDuration float32 `json:"avgDuration"`
|
|
||||||
NumCalls int `json:"numCalls"`
|
|
||||||
CallRate float32 `json:"callRate"`
|
|
||||||
NumErrors int `json:"numErrors"`
|
|
||||||
ErrorRate float32 `json:"errorRate"`
|
|
||||||
Num4XX int `json:"num4XX"`
|
|
||||||
FourXXRate float32 `json:"fourXXRate"`
|
|
||||||
}
|
|
||||||
type ServiceListErrorItem struct {
|
|
||||||
ServiceName string `json:"serviceName"`
|
|
||||||
NumErrors int `json:"numErrors"`
|
|
||||||
Num4xx int `json:"num4xx"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ServiceErrorItem struct {
|
|
||||||
Time string `json:"time,omitempty"`
|
|
||||||
Timestamp int64 `json:"timestamp"`
|
|
||||||
NumErrors int `json:"numErrors"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ServiceOverviewItem struct {
|
|
||||||
Time string `json:"time,omitempty"`
|
|
||||||
Timestamp int64 `json:"timestamp"`
|
|
||||||
Percentile50 float32 `json:"p50"`
|
|
||||||
Percentile95 float32 `json:"p95"`
|
|
||||||
Percentile99 float32 `json:"p99"`
|
|
||||||
NumCalls int `json:"numCalls"`
|
|
||||||
CallRate float32 `json:"callRate"`
|
|
||||||
NumErrors int `json:"numErrors"`
|
|
||||||
ErrorRate float32 `json:"errorRate"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ServiceExternalItem struct {
|
|
||||||
Time string `json:"time,omitempty"`
|
|
||||||
Timestamp int64 `json:"timestamp,omitempty"`
|
|
||||||
ExternalHttpUrl string `json:"externalHttpUrl,omitempty"`
|
|
||||||
AvgDuration float32 `json:"avgDuration,omitempty"`
|
|
||||||
NumCalls int `json:"numCalls,omitempty"`
|
|
||||||
CallRate float32 `json:"callRate,omitempty"`
|
|
||||||
NumErrors int `json:"numErrors"`
|
|
||||||
ErrorRate float32 `json:"errorRate"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ServiceDBOverviewItem struct {
|
|
||||||
Time string `json:"time,omitempty"`
|
|
||||||
Timestamp int64 `json:"timestamp,omitempty"`
|
|
||||||
DBSystem string `json:"dbSystem,omitempty"`
|
|
||||||
AvgDuration float32 `json:"avgDuration,omitempty"`
|
|
||||||
NumCalls int `json:"numCalls,omitempty"`
|
|
||||||
CallRate float32 `json:"callRate,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ServiceMapDependencyItem struct {
|
|
||||||
SpanId string `json:"spanId,omitempty"`
|
|
||||||
ParentSpanId string `json:"parentSpanId,omitempty"`
|
|
||||||
ServiceName string `json:"serviceName,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type UsageItem struct {
|
|
||||||
Time string `json:"time,omitempty"`
|
|
||||||
Timestamp int64 `json:"timestamp"`
|
|
||||||
Count int64 `json:"count"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type TopEnpointsItem struct {
|
|
||||||
Percentile50 float32 `json:"p50"`
|
|
||||||
Percentile90 float32 `json:"p90"`
|
|
||||||
Percentile99 float32 `json:"p99"`
|
|
||||||
NumCalls int `json:"numCalls"`
|
|
||||||
Name string `json:"name"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type TagItem struct {
|
|
||||||
TagKeys string `json:"tagKeys"`
|
|
||||||
TagCount int `json:"tagCount"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ServiceMapDependencyResponseItem struct {
|
|
||||||
Parent string `json:"parent,omitempty"`
|
|
||||||
Child string `json:"child,omitempty"`
|
|
||||||
CallCount int `json:"callCount,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetOperations(client *SqlClient, serviceName string) (*[]string, error) {
|
func GetOperations(client *SqlClient, serviceName string) (*[]string, error) {
|
||||||
|
|
||||||
sqlQuery := fmt.Sprintf(`SELECT DISTINCT(Name) FROM %s WHERE ServiceName='%s' AND __time > CURRENT_TIMESTAMP - INTERVAL '1' DAY`, constants.DruidDatasource, serviceName)
|
sqlQuery := fmt.Sprintf(`SELECT DISTINCT(Name) FROM %s WHERE ServiceName='%s' AND __time > CURRENT_TIMESTAMP - INTERVAL '1' DAY`, constants.DruidDatasource, serviceName)
|
||||||
@@ -155,7 +69,7 @@ func GetServicesList(client *SqlClient) (*[]string, error) {
|
|||||||
return &servicesListReponse, nil
|
return &servicesListReponse, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetTags(client *SqlClient, serviceName string) (*[]TagItem, error) {
|
func GetTags(client *SqlClient, serviceName string) (*[]model.TagItem, error) {
|
||||||
|
|
||||||
var sqlQuery string
|
var sqlQuery string
|
||||||
|
|
||||||
@@ -176,7 +90,7 @@ func GetTags(client *SqlClient, serviceName string) (*[]TagItem, error) {
|
|||||||
|
|
||||||
// zap.S().Info(string(response))
|
// zap.S().Info(string(response))
|
||||||
|
|
||||||
res := new([]TagItem)
|
res := new([]model.TagItem)
|
||||||
err = json.Unmarshal(response, res)
|
err = json.Unmarshal(response, res)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.S().Error(err)
|
zap.S().Error(err)
|
||||||
@@ -187,9 +101,9 @@ func GetTags(client *SqlClient, serviceName string) (*[]TagItem, error) {
|
|||||||
return &tagResponse, nil
|
return &tagResponse, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetTopEndpoints(client *SqlClient, query *model.GetTopEndpointsParams) (*[]TopEnpointsItem, error) {
|
func GetTopEndpoints(client *SqlClient, query *model.GetTopEndpointsParams) (*[]model.TopEndpointsItem, error) {
|
||||||
|
|
||||||
sqlQuery := fmt.Sprintf(`SELECT APPROX_QUANTILE_DS("QuantileDuration", 0.5) as p50, APPROX_QUANTILE_DS("QuantileDuration", 0.9) as p90, APPROX_QUANTILE_DS("QuantileDuration", 0.99) as p99, COUNT(SpanId) as numCalls, Name FROM "%s" WHERE "__time" >= '%s' AND "__time" <= '%s' AND "Kind"='2' and "ServiceName"='%s' GROUP BY Name`, constants.DruidDatasource, query.StartTime, query.EndTime, query.ServiceName)
|
sqlQuery := fmt.Sprintf(`SELECT APPROX_QUANTILE_DS("QuantileDuration", 0.5) as p50, APPROX_QUANTILE_DS("QuantileDuration", 0.95) as p95, APPROX_QUANTILE_DS("QuantileDuration", 0.99) as p99, COUNT(SpanId) as numCalls, Name FROM "%s" WHERE "__time" >= '%s' AND "__time" <= '%s' AND "Kind"='2' and "ServiceName"='%s' GROUP BY Name`, constants.DruidDatasource, query.StartTime, query.EndTime, query.ServiceName)
|
||||||
|
|
||||||
// zap.S().Debug(sqlQuery)
|
// zap.S().Debug(sqlQuery)
|
||||||
|
|
||||||
@@ -202,7 +116,7 @@ func GetTopEndpoints(client *SqlClient, query *model.GetTopEndpointsParams) (*[]
|
|||||||
|
|
||||||
// zap.S().Info(string(response))
|
// zap.S().Info(string(response))
|
||||||
|
|
||||||
res := new([]TopEnpointsItem)
|
res := new([]model.TopEndpointsItem)
|
||||||
err = json.Unmarshal(response, res)
|
err = json.Unmarshal(response, res)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.S().Error(err)
|
zap.S().Error(err)
|
||||||
@@ -213,7 +127,7 @@ func GetTopEndpoints(client *SqlClient, query *model.GetTopEndpointsParams) (*[]
|
|||||||
return &topEnpointsResponse, nil
|
return &topEnpointsResponse, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetUsage(client *SqlClient, query *model.GetUsageParams) (*[]UsageItem, error) {
|
func GetUsage(client *SqlClient, query *model.GetUsageParams) (*[]model.UsageItem, error) {
|
||||||
|
|
||||||
var sqlQuery string
|
var sqlQuery string
|
||||||
|
|
||||||
@@ -236,7 +150,7 @@ func GetUsage(client *SqlClient, query *model.GetUsageParams) (*[]UsageItem, err
|
|||||||
|
|
||||||
// zap.S().Info(string(response))
|
// zap.S().Info(string(response))
|
||||||
|
|
||||||
res := new([]UsageItem)
|
res := new([]model.UsageItem)
|
||||||
err = json.Unmarshal(response, res)
|
err = json.Unmarshal(response, res)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.S().Error(err)
|
zap.S().Error(err)
|
||||||
@@ -253,7 +167,7 @@ func GetUsage(client *SqlClient, query *model.GetUsageParams) (*[]UsageItem, err
|
|||||||
return &usageResponse, nil
|
return &usageResponse, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetServiceExternalAvgDuration(client *SqlClient, query *model.GetServiceOverviewParams) (*[]ServiceExternalItem, error) {
|
func GetServiceExternalAvgDuration(client *SqlClient, query *model.GetServiceOverviewParams) (*[]model.ServiceExternalItem, error) {
|
||||||
|
|
||||||
sqlQuery := fmt.Sprintf(`SELECT TIME_FLOOR(__time, '%s') as "time", AVG(DurationNano) as "avgDuration" FROM %s WHERE ServiceName='%s' AND Kind='3' AND ExternalHttpUrl != '' AND "__time" >= '%s' AND "__time" <= '%s'
|
sqlQuery := fmt.Sprintf(`SELECT TIME_FLOOR(__time, '%s') as "time", AVG(DurationNano) as "avgDuration" FROM %s WHERE ServiceName='%s' AND Kind='3' AND ExternalHttpUrl != '' AND "__time" >= '%s' AND "__time" <= '%s'
|
||||||
GROUP BY TIME_FLOOR(__time, '%s')`, query.Period, constants.DruidDatasource, query.ServiceName, query.StartTime, query.EndTime, query.Period)
|
GROUP BY TIME_FLOOR(__time, '%s')`, query.Period, constants.DruidDatasource, query.ServiceName, query.StartTime, query.EndTime, query.Period)
|
||||||
@@ -270,7 +184,7 @@ func GetServiceExternalAvgDuration(client *SqlClient, query *model.GetServiceOve
|
|||||||
// responseStr := string(response)
|
// responseStr := string(response)
|
||||||
// zap.S().Info(responseStr)
|
// zap.S().Info(responseStr)
|
||||||
|
|
||||||
res := new([]ServiceExternalItem)
|
res := new([]model.ServiceExternalItem)
|
||||||
err = json.Unmarshal(response, res)
|
err = json.Unmarshal(response, res)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.S().Error(err)
|
zap.S().Error(err)
|
||||||
@@ -289,7 +203,7 @@ func GetServiceExternalAvgDuration(client *SqlClient, query *model.GetServiceOve
|
|||||||
return &servicesExternalResponse, nil
|
return &servicesExternalResponse, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetServiceExternalErrors(client *SqlClient, query *model.GetServiceOverviewParams) (*[]ServiceExternalItem, error) {
|
func GetServiceExternalErrors(client *SqlClient, query *model.GetServiceOverviewParams) (*[]model.ServiceExternalItem, error) {
|
||||||
|
|
||||||
sqlQuery := fmt.Sprintf(`SELECT TIME_FLOOR(__time, '%s') as "time", COUNT(SpanId) as "numCalls", ExternalHttpUrl as externalHttpUrl FROM %s WHERE ServiceName='%s' AND Kind='3' AND ExternalHttpUrl != '' AND StatusCode >= 500 AND "__time" >= '%s' AND "__time" <= '%s'
|
sqlQuery := fmt.Sprintf(`SELECT TIME_FLOOR(__time, '%s') as "time", COUNT(SpanId) as "numCalls", ExternalHttpUrl as externalHttpUrl FROM %s WHERE ServiceName='%s' AND Kind='3' AND ExternalHttpUrl != '' AND StatusCode >= 500 AND "__time" >= '%s' AND "__time" <= '%s'
|
||||||
GROUP BY TIME_FLOOR(__time, '%s'), ExternalHttpUrl`, query.Period, constants.DruidDatasource, query.ServiceName, query.StartTime, query.EndTime, query.Period)
|
GROUP BY TIME_FLOOR(__time, '%s'), ExternalHttpUrl`, query.Period, constants.DruidDatasource, query.ServiceName, query.StartTime, query.EndTime, query.Period)
|
||||||
@@ -306,7 +220,7 @@ func GetServiceExternalErrors(client *SqlClient, query *model.GetServiceOverview
|
|||||||
// responseStr := string(response)
|
// responseStr := string(response)
|
||||||
// zap.S().Info(responseStr)
|
// zap.S().Info(responseStr)
|
||||||
|
|
||||||
res := new([]ServiceExternalItem)
|
res := new([]model.ServiceExternalItem)
|
||||||
err = json.Unmarshal(response, res)
|
err = json.Unmarshal(response, res)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.S().Error(err)
|
zap.S().Error(err)
|
||||||
@@ -328,7 +242,7 @@ func GetServiceExternalErrors(client *SqlClient, query *model.GetServiceOverview
|
|||||||
// responseStr := string(response)
|
// responseStr := string(response)
|
||||||
// zap.S().Info(responseStr)
|
// zap.S().Info(responseStr)
|
||||||
|
|
||||||
resTotal := new([]ServiceExternalItem)
|
resTotal := new([]model.ServiceExternalItem)
|
||||||
err = json.Unmarshal(responseTotal, resTotal)
|
err = json.Unmarshal(responseTotal, resTotal)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.S().Error(err)
|
zap.S().Error(err)
|
||||||
@@ -361,7 +275,7 @@ func GetServiceExternalErrors(client *SqlClient, query *model.GetServiceOverview
|
|||||||
return &servicesExternalResponse, nil
|
return &servicesExternalResponse, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetServiceExternal(client *SqlClient, query *model.GetServiceOverviewParams) (*[]ServiceExternalItem, error) {
|
func GetServiceExternal(client *SqlClient, query *model.GetServiceOverviewParams) (*[]model.ServiceExternalItem, error) {
|
||||||
|
|
||||||
sqlQuery := fmt.Sprintf(`SELECT TIME_FLOOR(__time, '%s') as "time", AVG(DurationNano) as "avgDuration", COUNT(SpanId) as "numCalls", ExternalHttpUrl as externalHttpUrl FROM %s WHERE ServiceName='%s' AND Kind='3' AND ExternalHttpUrl != ''
|
sqlQuery := fmt.Sprintf(`SELECT TIME_FLOOR(__time, '%s') as "time", AVG(DurationNano) as "avgDuration", COUNT(SpanId) as "numCalls", ExternalHttpUrl as externalHttpUrl FROM %s WHERE ServiceName='%s' AND Kind='3' AND ExternalHttpUrl != ''
|
||||||
AND "__time" >= '%s' AND "__time" <= '%s'
|
AND "__time" >= '%s' AND "__time" <= '%s'
|
||||||
@@ -379,7 +293,7 @@ func GetServiceExternal(client *SqlClient, query *model.GetServiceOverviewParams
|
|||||||
// responseStr := string(response)
|
// responseStr := string(response)
|
||||||
// zap.S().Info(responseStr)
|
// zap.S().Info(responseStr)
|
||||||
|
|
||||||
res := new([]ServiceExternalItem)
|
res := new([]model.ServiceExternalItem)
|
||||||
err = json.Unmarshal(response, res)
|
err = json.Unmarshal(response, res)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.S().Error(err)
|
zap.S().Error(err)
|
||||||
@@ -398,7 +312,7 @@ func GetServiceExternal(client *SqlClient, query *model.GetServiceOverviewParams
|
|||||||
return &servicesExternalResponse, nil
|
return &servicesExternalResponse, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetServiceDBOverview(client *SqlClient, query *model.GetServiceOverviewParams) (*[]ServiceDBOverviewItem, error) {
|
func GetServiceDBOverview(client *SqlClient, query *model.GetServiceOverviewParams) (*[]model.ServiceDBOverviewItem, error) {
|
||||||
|
|
||||||
sqlQuery := fmt.Sprintf(`SELECT TIME_FLOOR(__time, '%s') as "time", AVG(DurationNano) as "avgDuration", COUNT(SpanId) as "numCalls", DBSystem as "dbSystem" FROM %s WHERE ServiceName='%s' AND Kind='3' AND DBName IS NOT NULL
|
sqlQuery := fmt.Sprintf(`SELECT TIME_FLOOR(__time, '%s') as "time", AVG(DurationNano) as "avgDuration", COUNT(SpanId) as "numCalls", DBSystem as "dbSystem" FROM %s WHERE ServiceName='%s' AND Kind='3' AND DBName IS NOT NULL
|
||||||
AND "__time" >= '%s' AND "__time" <= '%s'
|
AND "__time" >= '%s' AND "__time" <= '%s'
|
||||||
@@ -416,7 +330,7 @@ func GetServiceDBOverview(client *SqlClient, query *model.GetServiceOverviewPara
|
|||||||
// responseStr := string(response)
|
// responseStr := string(response)
|
||||||
// zap.S().Info(responseStr)
|
// zap.S().Info(responseStr)
|
||||||
|
|
||||||
res := new([]ServiceDBOverviewItem)
|
res := new([]model.ServiceDBOverviewItem)
|
||||||
err = json.Unmarshal(response, res)
|
err = json.Unmarshal(response, res)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.S().Error(err)
|
zap.S().Error(err)
|
||||||
@@ -435,7 +349,7 @@ func GetServiceDBOverview(client *SqlClient, query *model.GetServiceOverviewPara
|
|||||||
return &servicesDBOverviewResponse, nil
|
return &servicesDBOverviewResponse, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetServiceOverview(client *SqlClient, query *model.GetServiceOverviewParams) (*[]ServiceOverviewItem, error) {
|
func GetServiceOverview(client *SqlClient, query *model.GetServiceOverviewParams) (*[]model.ServiceOverviewItem, error) {
|
||||||
|
|
||||||
sqlQuery := fmt.Sprintf(`SELECT TIME_FLOOR(__time, '%s') as "time", APPROX_QUANTILE_DS("QuantileDuration", 0.5) as p50, APPROX_QUANTILE_DS("QuantileDuration", 0.95) as p95,
|
sqlQuery := fmt.Sprintf(`SELECT TIME_FLOOR(__time, '%s') as "time", APPROX_QUANTILE_DS("QuantileDuration", 0.5) as p50, APPROX_QUANTILE_DS("QuantileDuration", 0.95) as p95,
|
||||||
APPROX_QUANTILE_DS("QuantileDuration", 0.99) as p99, COUNT("SpanId") as "numCalls" FROM "%s" WHERE "__time" >= '%s' and "__time" <= '%s' and "Kind"='2' and "ServiceName"='%s' GROUP BY TIME_FLOOR(__time, '%s') `, query.Period, constants.DruidDatasource, query.StartTime, query.EndTime, query.ServiceName, query.Period)
|
APPROX_QUANTILE_DS("QuantileDuration", 0.99) as p99, COUNT("SpanId") as "numCalls" FROM "%s" WHERE "__time" >= '%s' and "__time" <= '%s' and "Kind"='2' and "ServiceName"='%s' GROUP BY TIME_FLOOR(__time, '%s') `, query.Period, constants.DruidDatasource, query.StartTime, query.EndTime, query.ServiceName, query.Period)
|
||||||
@@ -451,7 +365,7 @@ func GetServiceOverview(client *SqlClient, query *model.GetServiceOverviewParams
|
|||||||
|
|
||||||
// zap.S().Info(string(response))
|
// zap.S().Info(string(response))
|
||||||
|
|
||||||
res := new([]ServiceOverviewItem)
|
res := new([]model.ServiceOverviewItem)
|
||||||
err = json.Unmarshal(response, res)
|
err = json.Unmarshal(response, res)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.S().Error(err)
|
zap.S().Error(err)
|
||||||
@@ -471,7 +385,7 @@ func GetServiceOverview(client *SqlClient, query *model.GetServiceOverviewParams
|
|||||||
|
|
||||||
// zap.S().Info(string(response))
|
// zap.S().Info(string(response))
|
||||||
|
|
||||||
resError := new([]ServiceErrorItem)
|
resError := new([]model.ServiceErrorItem)
|
||||||
err = json.Unmarshal(responseError, resError)
|
err = json.Unmarshal(responseError, resError)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.S().Error(err)
|
zap.S().Error(err)
|
||||||
@@ -501,7 +415,7 @@ func GetServiceOverview(client *SqlClient, query *model.GetServiceOverviewParams
|
|||||||
return &servicesOverviewResponse, nil
|
return &servicesOverviewResponse, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetServices(client *SqlClient, query *model.GetServicesParams) (*[]ServiceItem, error) {
|
func GetServices(client *SqlClient, query *model.GetServicesParams) (*[]model.ServiceItem, error) {
|
||||||
|
|
||||||
sqlQuery := fmt.Sprintf(`SELECT APPROX_QUANTILE_DS("QuantileDuration", 0.99) as "p99", AVG("DurationNano") as "avgDuration", COUNT(SpanId) as numCalls, "ServiceName" as "serviceName" FROM %s WHERE "__time" >= '%s' and "__time" <= '%s' and "Kind"='2' GROUP BY "ServiceName" ORDER BY "p99" DESC`, constants.DruidDatasource, query.StartTime, query.EndTime)
|
sqlQuery := fmt.Sprintf(`SELECT APPROX_QUANTILE_DS("QuantileDuration", 0.99) as "p99", AVG("DurationNano") as "avgDuration", COUNT(SpanId) as numCalls, "ServiceName" as "serviceName" FROM %s WHERE "__time" >= '%s' and "__time" <= '%s' and "Kind"='2' GROUP BY "ServiceName" ORDER BY "p99" DESC`, constants.DruidDatasource, query.StartTime, query.EndTime)
|
||||||
|
|
||||||
@@ -516,7 +430,7 @@ func GetServices(client *SqlClient, query *model.GetServicesParams) (*[]ServiceI
|
|||||||
|
|
||||||
// zap.S().Info(string(response))
|
// zap.S().Info(string(response))
|
||||||
|
|
||||||
res := new([]ServiceItem)
|
res := new([]model.ServiceItem)
|
||||||
err = json.Unmarshal(response, res)
|
err = json.Unmarshal(response, res)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.S().Error(err)
|
zap.S().Error(err)
|
||||||
@@ -538,7 +452,7 @@ func GetServices(client *SqlClient, query *model.GetServicesParams) (*[]ServiceI
|
|||||||
|
|
||||||
// zap.S().Info(string(response))
|
// zap.S().Info(string(response))
|
||||||
|
|
||||||
resError := new([]ServiceListErrorItem)
|
resError := new([]model.ServiceListErrorItem)
|
||||||
err = json.Unmarshal(responseError, resError)
|
err = json.Unmarshal(responseError, resError)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.S().Error(err)
|
zap.S().Error(err)
|
||||||
@@ -555,7 +469,7 @@ func GetServices(client *SqlClient, query *model.GetServicesParams) (*[]ServiceI
|
|||||||
|
|
||||||
////////////////// Below block gets 4xx of services
|
////////////////// Below block gets 4xx of services
|
||||||
|
|
||||||
sqlQuery = fmt.Sprintf(`SELECT COUNT(SpanId) as numErrors, "ServiceName" as "serviceName" FROM %s WHERE "__time" >= '%s' and "__time" <= '%s' and "Kind"='2' and "StatusCode">=400 and "StatusCode" < 500 GROUP BY "ServiceName"`, constants.DruidDatasource, query.StartTime, query.EndTime)
|
sqlQuery = fmt.Sprintf(`SELECT COUNT(SpanId) as num4xx, "ServiceName" as "serviceName" FROM %s WHERE "__time" >= '%s' and "__time" <= '%s' and "Kind"='2' and "StatusCode">=400 and "StatusCode" < 500 GROUP BY "ServiceName"`, constants.DruidDatasource, query.StartTime, query.EndTime)
|
||||||
|
|
||||||
response4xx, err := client.Query(sqlQuery, "object")
|
response4xx, err := client.Query(sqlQuery, "object")
|
||||||
|
|
||||||
@@ -568,7 +482,7 @@ func GetServices(client *SqlClient, query *model.GetServicesParams) (*[]ServiceI
|
|||||||
|
|
||||||
// zap.S().Info(string(response))
|
// zap.S().Info(string(response))
|
||||||
|
|
||||||
res4xx := new([]ServiceListErrorItem)
|
res4xx := new([]model.ServiceListErrorItem)
|
||||||
err = json.Unmarshal(response4xx, res4xx)
|
err = json.Unmarshal(response4xx, res4xx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.S().Error(err)
|
zap.S().Error(err)
|
||||||
@@ -601,7 +515,7 @@ func GetServices(client *SqlClient, query *model.GetServicesParams) (*[]ServiceI
|
|||||||
return &servicesResponse, nil
|
return &servicesResponse, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetServiceMapDependencies(client *SqlClient, query *model.GetServicesParams) (*[]ServiceMapDependencyResponseItem, error) {
|
func GetServiceMapDependencies(client *SqlClient, query *model.GetServicesParams) (*[]model.ServiceMapDependencyResponseItem, error) {
|
||||||
|
|
||||||
sqlQuery := fmt.Sprintf(`SELECT SpanId, ParentSpanId, ServiceName FROM %s WHERE "__time" >= '%s' AND "__time" <= '%s' ORDER BY __time DESC LIMIT 100000`, constants.DruidDatasource, query.StartTime, query.EndTime)
|
sqlQuery := fmt.Sprintf(`SELECT SpanId, ParentSpanId, ServiceName FROM %s WHERE "__time" >= '%s' AND "__time" <= '%s' ORDER BY __time DESC LIMIT 100000`, constants.DruidDatasource, query.StartTime, query.EndTime)
|
||||||
|
|
||||||
@@ -617,7 +531,7 @@ func GetServiceMapDependencies(client *SqlClient, query *model.GetServicesParams
|
|||||||
// responseStr := string(response)
|
// responseStr := string(response)
|
||||||
// zap.S().Info(responseStr)
|
// zap.S().Info(responseStr)
|
||||||
|
|
||||||
res := new([]ServiceMapDependencyItem)
|
res := new([]model.ServiceMapDependencyItem)
|
||||||
err = json.Unmarshal(response, res)
|
err = json.Unmarshal(response, res)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.S().Error(err)
|
zap.S().Error(err)
|
||||||
@@ -626,7 +540,7 @@ func GetServiceMapDependencies(client *SqlClient, query *model.GetServicesParams
|
|||||||
// resCount := len(*res)
|
// resCount := len(*res)
|
||||||
// fmt.Println(resCount)
|
// fmt.Println(resCount)
|
||||||
|
|
||||||
serviceMap := make(map[string]*ServiceMapDependencyResponseItem)
|
serviceMap := make(map[string]*model.ServiceMapDependencyResponseItem)
|
||||||
|
|
||||||
spanId2ServiceNameMap := make(map[string]string)
|
spanId2ServiceNameMap := make(map[string]string)
|
||||||
for i, _ := range *res {
|
for i, _ := range *res {
|
||||||
@@ -635,7 +549,7 @@ func GetServiceMapDependencies(client *SqlClient, query *model.GetServicesParams
|
|||||||
for i, _ := range *res {
|
for i, _ := range *res {
|
||||||
parent2childServiceName := spanId2ServiceNameMap[(*res)[i].ParentSpanId] + "-" + spanId2ServiceNameMap[(*res)[i].SpanId]
|
parent2childServiceName := spanId2ServiceNameMap[(*res)[i].ParentSpanId] + "-" + spanId2ServiceNameMap[(*res)[i].SpanId]
|
||||||
if _, ok := serviceMap[parent2childServiceName]; !ok {
|
if _, ok := serviceMap[parent2childServiceName]; !ok {
|
||||||
serviceMap[parent2childServiceName] = &ServiceMapDependencyResponseItem{
|
serviceMap[parent2childServiceName] = &model.ServiceMapDependencyResponseItem{
|
||||||
Parent: spanId2ServiceNameMap[(*res)[i].ParentSpanId],
|
Parent: spanId2ServiceNameMap[(*res)[i].ParentSpanId],
|
||||||
Child: spanId2ServiceNameMap[(*res)[i].SpanId],
|
Child: spanId2ServiceNameMap[(*res)[i].SpanId],
|
||||||
CallCount: 1,
|
CallCount: 1,
|
||||||
@@ -645,7 +559,7 @@ func GetServiceMapDependencies(client *SqlClient, query *model.GetServicesParams
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
retMe := make([]ServiceMapDependencyResponseItem, 0, len(serviceMap))
|
retMe := make([]model.ServiceMapDependencyResponseItem, 0, len(serviceMap))
|
||||||
for _, dependency := range serviceMap {
|
for _, dependency := range serviceMap {
|
||||||
if dependency.Parent == "" {
|
if dependency.Parent == "" {
|
||||||
continue
|
continue
|
||||||
|
|||||||
@@ -27,11 +27,6 @@ type SpanSearchAggregatesDuratonReceivedItem struct {
|
|||||||
Result DurationItem `json:"result"`
|
Result DurationItem `json:"result"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type SpanSearchAggregatesResponseItem struct {
|
|
||||||
Timestamp int64 `json:"timestamp"`
|
|
||||||
Value float32 `json:"value"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func buildFilters(queryParams *model.SpanSearchParams) (*godruid.Filter, error) {
|
func buildFilters(queryParams *model.SpanSearchParams) (*godruid.Filter, error) {
|
||||||
|
|
||||||
var filter *godruid.Filter
|
var filter *godruid.Filter
|
||||||
@@ -181,7 +176,7 @@ func buildFiltersForSpansAggregates(queryParams *model.SpanSearchAggregatesParam
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func SearchTraces(client *godruid.Client, traceId string) ([]godruid.ScanResult, error) {
|
func SearchTraces(client *godruid.Client, traceId string) (*[]model.SearchSpansResult, error) {
|
||||||
|
|
||||||
filter := godruid.FilterSelector("TraceId", traceId)
|
filter := godruid.FilterSelector("TraceId", traceId)
|
||||||
|
|
||||||
@@ -206,10 +201,20 @@ func SearchTraces(client *godruid.Client, traceId string) ([]godruid.ScanResult,
|
|||||||
|
|
||||||
// fmt.Printf("query.QueryResult:\n%v", query.QueryResult)
|
// fmt.Printf("query.QueryResult:\n%v", query.QueryResult)
|
||||||
|
|
||||||
return query.QueryResult, nil
|
var searchSpansResult []model.SearchSpansResult
|
||||||
|
searchSpansResult = make([]model.SearchSpansResult, len(query.QueryResult))
|
||||||
|
|
||||||
|
searchSpansResult[0].Columns = make([]string, len(query.QueryResult[0].Columns))
|
||||||
|
copy(searchSpansResult[0].Columns, query.QueryResult[0].Columns)
|
||||||
|
|
||||||
|
searchSpansResult[0].Events = make([][]interface{}, len(query.QueryResult[0].Events))
|
||||||
|
copy(searchSpansResult[0].Events, query.QueryResult[0].Events)
|
||||||
|
|
||||||
|
return &searchSpansResult, nil
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func SearchSpansAggregate(client *godruid.Client, queryParams *model.SpanSearchAggregatesParams) ([]SpanSearchAggregatesResponseItem, error) {
|
func SearchSpansAggregate(client *godruid.Client, queryParams *model.SpanSearchAggregatesParams) ([]model.SpanSearchAggregatesResponseItem, error) {
|
||||||
|
|
||||||
filter, err := buildFiltersForSpansAggregates(queryParams)
|
filter, err := buildFiltersForSpansAggregates(queryParams)
|
||||||
var needsPostAggregation bool = true
|
var needsPostAggregation bool = true
|
||||||
@@ -235,10 +240,10 @@ func SearchSpansAggregate(client *godruid.Client, queryParams *model.SpanSearchA
|
|||||||
postAggregationString := `{"type":"quantilesDoublesSketchToQuantile","name":"value","field":{"type":"fieldAccess","fieldName":"quantile_agg"},"fraction":0.5}`
|
postAggregationString := `{"type":"quantilesDoublesSketchToQuantile","name":"value","field":{"type":"fieldAccess","fieldName":"quantile_agg"},"fraction":0.5}`
|
||||||
postAggregation = godruid.PostAggRawJson(postAggregationString)
|
postAggregation = godruid.PostAggRawJson(postAggregationString)
|
||||||
break
|
break
|
||||||
case "p90":
|
case "p95":
|
||||||
aggregationString := `{ "type": "quantilesDoublesSketch", "fieldName": "QuantileDuration", "name": "quantile_agg", "k": 128}`
|
aggregationString := `{ "type": "quantilesDoublesSketch", "fieldName": "QuantileDuration", "name": "quantile_agg", "k": 128}`
|
||||||
aggregation = godruid.AggRawJson(aggregationString)
|
aggregation = godruid.AggRawJson(aggregationString)
|
||||||
postAggregationString := `{"type":"quantilesDoublesSketchToQuantile","name":"value","field":{"type":"fieldAccess","fieldName":"quantile_agg"},"fraction":0.9}`
|
postAggregationString := `{"type":"quantilesDoublesSketchToQuantile","name":"value","field":{"type":"fieldAccess","fieldName":"quantile_agg"},"fraction":0.95}`
|
||||||
postAggregation = godruid.PostAggRawJson(postAggregationString)
|
postAggregation = godruid.PostAggRawJson(postAggregationString)
|
||||||
break
|
break
|
||||||
|
|
||||||
@@ -293,7 +298,7 @@ func SearchSpansAggregate(client *godruid.Client, queryParams *model.SpanSearchA
|
|||||||
return nil, fmt.Errorf("Error in unmarshalling response from druid")
|
return nil, fmt.Errorf("Error in unmarshalling response from druid")
|
||||||
}
|
}
|
||||||
|
|
||||||
var response []SpanSearchAggregatesResponseItem
|
var response []model.SpanSearchAggregatesResponseItem
|
||||||
|
|
||||||
for _, elem := range *receivedResponse {
|
for _, elem := range *receivedResponse {
|
||||||
|
|
||||||
@@ -304,7 +309,7 @@ func SearchSpansAggregate(client *godruid.Client, queryParams *model.SpanSearchA
|
|||||||
if queryParams.AggregationOption == "rate_per_sec" {
|
if queryParams.AggregationOption == "rate_per_sec" {
|
||||||
value = elem.Result.Value * 1.0 / float32(queryParams.StepSeconds)
|
value = elem.Result.Value * 1.0 / float32(queryParams.StepSeconds)
|
||||||
}
|
}
|
||||||
response = append(response, SpanSearchAggregatesResponseItem{
|
response = append(response, model.SpanSearchAggregatesResponseItem{
|
||||||
Timestamp: timestamp,
|
Timestamp: timestamp,
|
||||||
Value: value,
|
Value: value,
|
||||||
})
|
})
|
||||||
@@ -316,7 +321,7 @@ func SearchSpansAggregate(client *godruid.Client, queryParams *model.SpanSearchA
|
|||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func SearchSpans(client *godruid.Client, queryParams *model.SpanSearchParams) ([]godruid.ScanResult, error) {
|
func SearchSpans(client *godruid.Client, queryParams *model.SpanSearchParams) (*[]model.SearchSpansResult, error) {
|
||||||
|
|
||||||
filter, err := buildFilters(queryParams)
|
filter, err := buildFilters(queryParams)
|
||||||
|
|
||||||
@@ -347,7 +352,16 @@ func SearchSpans(client *godruid.Client, queryParams *model.SpanSearchParams) ([
|
|||||||
|
|
||||||
// fmt.Printf("query.QueryResult:\n%v", query.QueryResult)
|
// fmt.Printf("query.QueryResult:\n%v", query.QueryResult)
|
||||||
|
|
||||||
return query.QueryResult, nil
|
var searchSpansResult []model.SearchSpansResult
|
||||||
|
searchSpansResult = make([]model.SearchSpansResult, len(query.QueryResult))
|
||||||
|
|
||||||
|
searchSpansResult[0].Columns = make([]string, len(query.QueryResult[0].Columns))
|
||||||
|
copy(searchSpansResult[0].Columns, query.QueryResult[0].Columns)
|
||||||
|
|
||||||
|
searchSpansResult[0].Events = make([][]interface{}, len(query.QueryResult[0].Events))
|
||||||
|
copy(searchSpansResult[0].Events, query.QueryResult[0].Events)
|
||||||
|
|
||||||
|
return &searchSpansResult, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetApplicationPercentiles(client *godruid.Client, queryParams *model.ApplicationPercentileParams) ([]godruid.Timeseries, error) {
|
func GetApplicationPercentiles(client *godruid.Client, queryParams *model.ApplicationPercentileParams) ([]godruid.Timeseries, error) {
|
||||||
|
|||||||
@@ -3,10 +3,14 @@ module go.signoz.io/query-service
|
|||||||
go 1.14
|
go 1.14
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
github.com/ClickHouse/clickhouse-go v1.4.5
|
||||||
|
github.com/gogo/protobuf v1.2.1
|
||||||
github.com/google/uuid v1.1.1
|
github.com/google/uuid v1.1.1
|
||||||
github.com/gorilla/handlers v1.5.1
|
github.com/gorilla/handlers v1.5.1
|
||||||
github.com/gorilla/mux v1.8.0
|
github.com/gorilla/mux v1.8.0
|
||||||
github.com/jaegertracing/jaeger v1.21.0
|
github.com/jaegertracing/jaeger v1.21.0
|
||||||
|
github.com/jmoiron/sqlx v1.3.4
|
||||||
|
github.com/opentracing/opentracing-go v1.1.0
|
||||||
github.com/ory/viper v1.7.5
|
github.com/ory/viper v1.7.5
|
||||||
github.com/posthog/posthog-go v0.0.0-20200525173953-e46dc8e6b89b
|
github.com/posthog/posthog-go v0.0.0-20200525173953-e46dc8e6b89b
|
||||||
github.com/rs/cors v1.7.0
|
github.com/rs/cors v1.7.0
|
||||||
|
|||||||
@@ -2,6 +2,8 @@ cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMT
|
|||||||
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||||
github.com/AndreasBriese/bbloom v0.0.0-20190825152654-46b345b51c96/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8=
|
github.com/AndreasBriese/bbloom v0.0.0-20190825152654-46b345b51c96/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8=
|
||||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
|
github.com/ClickHouse/clickhouse-go v1.4.5 h1:FfhyEnv6/BaWldyjgT2k4gDDmeNwJ9C4NbY/MXxJlXk=
|
||||||
|
github.com/ClickHouse/clickhouse-go v1.4.5/go.mod h1:EaI/sW7Azgz9UATzd5ZdZHRUhHgv5+JMS9NSr2smCJI=
|
||||||
github.com/DataDog/zstd v1.3.6-0.20190409195224-796139022798/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo=
|
github.com/DataDog/zstd v1.3.6-0.20190409195224-796139022798/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo=
|
||||||
github.com/DataDog/zstd v1.4.4/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo=
|
github.com/DataDog/zstd v1.4.4/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo=
|
||||||
github.com/HdrHistogram/hdrhistogram-go v0.9.0/go.mod h1:nxrse8/Tzg2tg3DZcZjm6qEclQKK70g0KxO61gFFZD4=
|
github.com/HdrHistogram/hdrhistogram-go v0.9.0/go.mod h1:nxrse8/Tzg2tg3DZcZjm6qEclQKK70g0KxO61gFFZD4=
|
||||||
@@ -40,6 +42,7 @@ github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+Ce
|
|||||||
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||||
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
|
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
|
||||||
github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932/go.mod h1:NOuUCSz6Q9T7+igc/hlvDOUdtWKryOrtFyIVABv/p7k=
|
github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932/go.mod h1:NOuUCSz6Q9T7+igc/hlvDOUdtWKryOrtFyIVABv/p7k=
|
||||||
|
github.com/bkaradzic/go-lz4 v1.0.0/go.mod h1:0YdlkowM3VswSROI7qDxhRvJ3sLhlFrRRwjwegp5jy4=
|
||||||
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
|
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
|
||||||
github.com/bsm/sarama-cluster v2.1.13+incompatible/go.mod h1:r7ao+4tTNXvWm+VRpRJchr2kQhqxgmAp2iEX5W96gMM=
|
github.com/bsm/sarama-cluster v2.1.13+incompatible/go.mod h1:r7ao+4tTNXvWm+VRpRJchr2kQhqxgmAp2iEX5W96gMM=
|
||||||
github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ=
|
github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ=
|
||||||
@@ -50,6 +53,7 @@ github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghf
|
|||||||
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||||
github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE=
|
github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE=
|
||||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||||
|
github.com/cloudflare/golz4 v0.0.0-20150217214814-ef862a3cdc58/go.mod h1:EOBUe0h4xcZ5GoxqC5SDxFQ8gwyZPKQoEzownBlhI80=
|
||||||
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
||||||
github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8=
|
github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8=
|
||||||
github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI=
|
github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI=
|
||||||
@@ -166,6 +170,7 @@ github.com/go-openapi/validate v0.19.2/go.mod h1:1tRCw7m3jtI8eNWEEliiAqUIcBztB2K
|
|||||||
github.com/go-openapi/validate v0.19.3/go.mod h1:90Vh6jjkTn+OT1Eefm0ZixWNFjhtOH7vS9k0lo6zwJo=
|
github.com/go-openapi/validate v0.19.3/go.mod h1:90Vh6jjkTn+OT1Eefm0ZixWNFjhtOH7vS9k0lo6zwJo=
|
||||||
github.com/go-openapi/validate v0.19.8/go.mod h1:8DJv2CVJQ6kGNpFW6eV9N3JviE1C85nY1c2z52x1Gk4=
|
github.com/go-openapi/validate v0.19.8/go.mod h1:8DJv2CVJQ6kGNpFW6eV9N3JviE1C85nY1c2z52x1Gk4=
|
||||||
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
||||||
|
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||||
github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0=
|
github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0=
|
||||||
github.com/gobuffalo/depgen v0.0.0-20190329151759-d478694a28d3/go.mod h1:3STtPUQYuzV0gBVOY3vy6CfMm/ljR4pABfrTeHNLHUY=
|
github.com/gobuffalo/depgen v0.0.0-20190329151759-d478694a28d3/go.mod h1:3STtPUQYuzV0gBVOY3vy6CfMm/ljR4pABfrTeHNLHUY=
|
||||||
@@ -277,8 +282,12 @@ github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANyt
|
|||||||
github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo=
|
github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo=
|
||||||
github.com/jaegertracing/jaeger v1.21.0 h1:Fgre3vTI5E/cmkXKBXK7ksnzul5b/3gXjA3mQzt0+58=
|
github.com/jaegertracing/jaeger v1.21.0 h1:Fgre3vTI5E/cmkXKBXK7ksnzul5b/3gXjA3mQzt0+58=
|
||||||
github.com/jaegertracing/jaeger v1.21.0/go.mod h1:PCTGGFohQBPQMR4j333V5lt6If7tj8aWJ+pQNgvZ+wU=
|
github.com/jaegertracing/jaeger v1.21.0/go.mod h1:PCTGGFohQBPQMR4j333V5lt6If7tj8aWJ+pQNgvZ+wU=
|
||||||
|
github.com/jaegertracing/jaeger v1.22.0 h1:kFBhBn9XSB8V68DjD3t6qb/IUAJLLtyJ/27caGQOu7E=
|
||||||
github.com/jcmturner/gofork v0.0.0-20190328161633-dc7c13fece03/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/UM3ncEo0o=
|
github.com/jcmturner/gofork v0.0.0-20190328161633-dc7c13fece03/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/UM3ncEo0o=
|
||||||
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
|
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
|
||||||
|
github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks=
|
||||||
|
github.com/jmoiron/sqlx v1.3.4 h1:wv+0IJZfL5z0uZoUjlpKgHkgaFSYD+r9CfrXjEXsO7w=
|
||||||
|
github.com/jmoiron/sqlx v1.3.4/go.mod h1:2BljVx/86SuTyjE+aPYlHCTNvZrnJXghYGpNiXLBMCQ=
|
||||||
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
|
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
|
||||||
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
||||||
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
||||||
@@ -304,6 +313,7 @@ github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA=
|
|||||||
github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
|
github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
|
||||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||||
|
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||||
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||||
github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM=
|
github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM=
|
||||||
github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4=
|
github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4=
|
||||||
@@ -329,6 +339,8 @@ github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcME
|
|||||||
github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE=
|
github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE=
|
||||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||||
github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
|
github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
|
||||||
|
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
|
||||||
|
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||||
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
||||||
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
|
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
|
||||||
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
|
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
|
||||||
|
|||||||
@@ -57,14 +57,12 @@ func main() {
|
|||||||
logger := loggerMgr.Sugar()
|
logger := loggerMgr.Sugar()
|
||||||
logger.Debug("START!")
|
logger.Debug("START!")
|
||||||
|
|
||||||
// v := initViper()
|
|
||||||
|
|
||||||
serverOptions := &app.ServerOptions{
|
serverOptions := &app.ServerOptions{
|
||||||
// HTTPHostPort: v.GetString(app.HTTPHostPort),
|
// HTTPHostPort: v.GetString(app.HTTPHostPort),
|
||||||
// DruidClientUrl: v.GetString(app.DruidClientUrl),
|
// DruidClientUrl: v.GetString(app.DruidClientUrl),
|
||||||
|
|
||||||
HTTPHostPort: constants.HTTPHostPort,
|
HTTPHostPort: constants.HTTPHostPort,
|
||||||
DruidClientUrl: constants.DruidClientUrl,
|
// DruidClientUrl: constants.DruidClientUrl,
|
||||||
}
|
}
|
||||||
|
|
||||||
server, err := app.NewServer(serverOptions)
|
server, err := app.NewServer(serverOptions)
|
||||||
|
|||||||
@@ -1,11 +1,16 @@
|
|||||||
package model
|
package model
|
||||||
|
|
||||||
import "fmt"
|
import (
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
type GetTopEndpointsParams struct {
|
type GetTopEndpointsParams struct {
|
||||||
StartTime string
|
StartTime string
|
||||||
EndTime string
|
EndTime string
|
||||||
ServiceName string
|
ServiceName string
|
||||||
|
Start *time.Time
|
||||||
|
End *time.Time
|
||||||
}
|
}
|
||||||
|
|
||||||
type GetUsageParams struct {
|
type GetUsageParams struct {
|
||||||
@@ -13,17 +18,24 @@ type GetUsageParams struct {
|
|||||||
EndTime string
|
EndTime string
|
||||||
ServiceName string
|
ServiceName string
|
||||||
Period string
|
Period string
|
||||||
|
StepHour int
|
||||||
|
Start *time.Time
|
||||||
|
End *time.Time
|
||||||
}
|
}
|
||||||
|
|
||||||
type GetServicesParams struct {
|
type GetServicesParams struct {
|
||||||
StartTime string
|
StartTime string
|
||||||
EndTime string
|
EndTime string
|
||||||
Period int
|
Period int
|
||||||
|
Start *time.Time
|
||||||
|
End *time.Time
|
||||||
}
|
}
|
||||||
|
|
||||||
type GetServiceOverviewParams struct {
|
type GetServiceOverviewParams struct {
|
||||||
StartTime string
|
StartTime string
|
||||||
EndTime string
|
EndTime string
|
||||||
|
Start *time.Time
|
||||||
|
End *time.Time
|
||||||
ServiceName string
|
ServiceName string
|
||||||
Period string
|
Period string
|
||||||
StepSeconds int
|
StepSeconds int
|
||||||
@@ -54,6 +66,8 @@ type SpanSearchAggregatesParams struct {
|
|||||||
MinDuration string
|
MinDuration string
|
||||||
MaxDuration string
|
MaxDuration string
|
||||||
Tags []TagQuery
|
Tags []TagQuery
|
||||||
|
Start *time.Time
|
||||||
|
End *time.Time
|
||||||
GranOrigin string
|
GranOrigin string
|
||||||
GranPeriod string
|
GranPeriod string
|
||||||
Intervals string
|
Intervals string
|
||||||
@@ -67,6 +81,8 @@ type SpanSearchParams struct {
|
|||||||
OperationName string
|
OperationName string
|
||||||
Kind string
|
Kind string
|
||||||
Intervals string
|
Intervals string
|
||||||
|
Start *time.Time
|
||||||
|
End *time.Time
|
||||||
MinDuration string
|
MinDuration string
|
||||||
MaxDuration string
|
MaxDuration string
|
||||||
Limit int64
|
Limit int64
|
||||||
170
pkg/query-service/model/response.go
Normal file
170
pkg/query-service/model/response.go
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
package model
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ServiceItem struct {
|
||||||
|
ServiceName string `json:"serviceName" db:"serviceName"`
|
||||||
|
Percentile99 float32 `json:"p99" db:"p99"`
|
||||||
|
AvgDuration float32 `json:"avgDuration" db:"avgDuration"`
|
||||||
|
NumCalls int `json:"numCalls" db:"numCalls"`
|
||||||
|
CallRate float32 `json:"callRate" db:"callRate"`
|
||||||
|
NumErrors int `json:"numErrors" db:"numErrors"`
|
||||||
|
ErrorRate float32 `json:"errorRate" db:"errorRate"`
|
||||||
|
Num4XX int `json:"num4XX" db:"num4xx"`
|
||||||
|
FourXXRate float32 `json:"fourXXRate" db:"fourXXRate"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ServiceListErrorItem struct {
|
||||||
|
ServiceName string `json:"serviceName"`
|
||||||
|
NumErrors int `json:"numErrors"`
|
||||||
|
Num4xx int `json:"num4xx"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ServiceErrorItem struct {
|
||||||
|
Time string `json:"time,omitempty" db:"time,omitempty"`
|
||||||
|
Timestamp int64 `json:"timestamp" db:"timestamp"`
|
||||||
|
NumErrors int `json:"numErrors" db:"numErrors"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ServiceOverviewItem struct {
|
||||||
|
Time string `json:"time,omitempty" db:"time,omitempty"`
|
||||||
|
Timestamp int64 `json:"timestamp" db:"timestamp"`
|
||||||
|
Percentile50 float32 `json:"p50" db:"p50"`
|
||||||
|
Percentile95 float32 `json:"p95" db:"p95"`
|
||||||
|
Percentile99 float32 `json:"p99" db:"p99"`
|
||||||
|
NumCalls int `json:"numCalls" db:"numCalls"`
|
||||||
|
CallRate float32 `json:"callRate" db:"callRate"`
|
||||||
|
NumErrors int `json:"numErrors" db:"numErrors"`
|
||||||
|
ErrorRate float32 `json:"errorRate" db:"errorRate"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SearchSpansResult struct {
|
||||||
|
Columns []string `json:"columns"`
|
||||||
|
Events [][]interface{} `json:"events"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type TraceResult struct {
|
||||||
|
Data []interface{} `json:"data" db:"data"`
|
||||||
|
Total int `json:"total" db:"total"`
|
||||||
|
Limit int `json:"limit" db:"limit"`
|
||||||
|
Offset int `json:"offset" db:"offset"`
|
||||||
|
}
|
||||||
|
type TraceResultItem struct {
|
||||||
|
TraceID string
|
||||||
|
Spans []TraceResultSpan
|
||||||
|
}
|
||||||
|
type TraceResultSpan struct {
|
||||||
|
Timestamp string `db:"timestamp"`
|
||||||
|
SpanID string `db:"spanID"`
|
||||||
|
TraceID string `db:"traceID"`
|
||||||
|
ServiceName string `db:"serviceName"`
|
||||||
|
Name string `db:"name"`
|
||||||
|
Kind int32 `db:"kind"`
|
||||||
|
DurationNano int64 `db:"durationNano"`
|
||||||
|
TagsKeys []string `db:"tagsKeys"`
|
||||||
|
TagsValues []string `db:"tagsValues"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SearchSpanReponseItem struct {
|
||||||
|
Timestamp string `db:"timestamp"`
|
||||||
|
SpanID string `db:"spanID"`
|
||||||
|
TraceID string `db:"traceID"`
|
||||||
|
ServiceName string `db:"serviceName"`
|
||||||
|
Name string `db:"name"`
|
||||||
|
Kind int32 `db:"kind"`
|
||||||
|
References string `db:"references,omitempty"`
|
||||||
|
DurationNano int64 `db:"durationNano"`
|
||||||
|
TagsKeys []string `db:"tagsKeys"`
|
||||||
|
TagsValues []string `db:"tagsValues"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type OtelSpanRef struct {
|
||||||
|
TraceId string `json:"traceId,omitempty"`
|
||||||
|
SpanId string `json:"spanId,omitempty"`
|
||||||
|
RefType string `json:"refType,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ref *OtelSpanRef) toString() string {
|
||||||
|
|
||||||
|
retString := fmt.Sprintf(`{TraceId=%s, SpanId=%s, RefType=%s}`, ref.TraceId, ref.SpanId, ref.RefType)
|
||||||
|
|
||||||
|
return retString
|
||||||
|
}
|
||||||
|
|
||||||
|
func (item *SearchSpanReponseItem) GetValues() []interface{} {
|
||||||
|
|
||||||
|
timeObj, _ := time.Parse(time.RFC3339Nano, item.Timestamp)
|
||||||
|
references := []OtelSpanRef{}
|
||||||
|
json.Unmarshal([]byte(item.References), &references)
|
||||||
|
|
||||||
|
referencesStringArray := []string{}
|
||||||
|
for _, item := range references {
|
||||||
|
referencesStringArray = append(referencesStringArray, item.toString())
|
||||||
|
}
|
||||||
|
|
||||||
|
returnArray := []interface{}{int64(timeObj.UnixNano() / 1000000), item.SpanID, item.TraceID, item.ServiceName, item.Name, strconv.Itoa(int(item.Kind)), strconv.FormatInt(item.DurationNano, 10), item.TagsKeys, item.TagsValues, referencesStringArray}
|
||||||
|
|
||||||
|
return returnArray
|
||||||
|
}
|
||||||
|
|
||||||
|
type ServiceExternalItem struct {
|
||||||
|
Time string `json:"time,omitempty" db:"time,omitempty"`
|
||||||
|
Timestamp int64 `json:"timestamp,omitempty" db:"timestamp,omitempty"`
|
||||||
|
ExternalHttpUrl string `json:"externalHttpUrl,omitempty" db:"externalHttpUrl,omitempty"`
|
||||||
|
AvgDuration float32 `json:"avgDuration,omitempty" db:"avgDuration,omitempty"`
|
||||||
|
NumCalls int `json:"numCalls,omitempty" db:"numCalls,omitempty"`
|
||||||
|
CallRate float32 `json:"callRate,omitempty" db:"callRate,omitempty"`
|
||||||
|
NumErrors int `json:"numErrors" db:"numErrors"`
|
||||||
|
ErrorRate float32 `json:"errorRate" db:"errorRate"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ServiceDBOverviewItem struct {
|
||||||
|
Time string `json:"time,omitempty" db:"time,omitempty"`
|
||||||
|
Timestamp int64 `json:"timestamp,omitempty" db:"timestamp,omitempty"`
|
||||||
|
DBSystem string `json:"dbSystem,omitempty" db:"dbSystem,omitempty"`
|
||||||
|
AvgDuration float32 `json:"avgDuration,omitempty" db:"avgDuration,omitempty"`
|
||||||
|
NumCalls int `json:"numCalls,omitempty" db:"numCalls,omitempty"`
|
||||||
|
CallRate float32 `json:"callRate,omitempty" db:"callRate,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ServiceMapDependencyItem struct {
|
||||||
|
SpanId string `json:"spanId,omitempty" db:"spanID,omitempty"`
|
||||||
|
ParentSpanId string `json:"parentSpanId,omitempty" db:"parentSpanID,omitempty"`
|
||||||
|
ServiceName string `json:"serviceName,omitempty" db:"serviceName,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type UsageItem struct {
|
||||||
|
Time string `json:"time,omitempty" db:"time,omitempty"`
|
||||||
|
Timestamp int64 `json:"timestamp" db:"timestamp"`
|
||||||
|
Count int64 `json:"count" db:"count"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type TopEndpointsItem struct {
|
||||||
|
Percentile50 float32 `json:"p50" db:"p50"`
|
||||||
|
Percentile95 float32 `json:"p95" db:"p95"`
|
||||||
|
Percentile99 float32 `json:"p99" db:"p99"`
|
||||||
|
NumCalls int `json:"numCalls" db:"numCalls"`
|
||||||
|
Name string `json:"name" db:"name"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type TagItem struct {
|
||||||
|
TagKeys string `json:"tagKeys" db:"tagKeys"`
|
||||||
|
TagCount int `json:"tagCount" db:"tagCount"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ServiceMapDependencyResponseItem struct {
|
||||||
|
Parent string `json:"parent,omitempty" db:"parent,omitempty"`
|
||||||
|
Child string `json:"child,omitempty" db:"child,omitempty"`
|
||||||
|
CallCount int `json:"callCount,omitempty" db:"callCount,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SpanSearchAggregatesResponseItem struct {
|
||||||
|
Timestamp int64 `json:"timestamp,omitempty" db:"timestamp" `
|
||||||
|
Time string `json:"time,omitempty" db:"time"`
|
||||||
|
Value float32 `json:"value,omitempty" db:"value"`
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user