mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Merge remote-tracking branch 'upstream/master' into postgres-query-builder
This commit is contained in:
commit
6220dec076
@ -12,6 +12,45 @@ aliases:
|
||||
version: 2
|
||||
|
||||
jobs:
|
||||
mysql-integration-test:
|
||||
docker:
|
||||
- image: circleci/golang:1.10
|
||||
- image: circleci/mysql:5.6-ram
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: rootpass
|
||||
MYSQL_DATABASE: grafana_tests
|
||||
MYSQL_USER: grafana
|
||||
MYSQL_PASSWORD: password
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
- run: sudo apt update
|
||||
- run: sudo apt install -y mysql-client
|
||||
- run: dockerize -wait tcp://127.0.0.1:3306 -timeout 120s
|
||||
- run: cat docker/blocks/mysql_tests/setup.sql | mysql -h 127.0.0.1 -P 3306 -u root -prootpass
|
||||
- run:
|
||||
name: mysql integration tests
|
||||
command: 'GRAFANA_TEST_DB=mysql go test ./pkg/services/sqlstore/... ./pkg/tsdb/mysql/... '
|
||||
|
||||
postgres-integration-test:
|
||||
docker:
|
||||
- image: circleci/golang:1.10
|
||||
- image: circleci/postgres:9.3-ram
|
||||
environment:
|
||||
POSTGRES_USER: grafanatest
|
||||
POSTGRES_PASSWORD: grafanatest
|
||||
POSTGRES_DB: grafanatest
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
- run: sudo apt update
|
||||
- run: sudo apt install -y postgresql-client
|
||||
- run: dockerize -wait tcp://127.0.0.1:5432 -timeout 120s
|
||||
- run: 'PGPASSWORD=grafanatest psql -p 5432 -h 127.0.0.1 -U grafanatest -d grafanatest -f docker/blocks/postgres_tests/setup.sql'
|
||||
- run:
|
||||
name: postgres integration tests
|
||||
command: 'GRAFANA_TEST_DB=postgres go test ./pkg/services/sqlstore/... ./pkg/tsdb/postgres/...'
|
||||
|
||||
codespell:
|
||||
docker:
|
||||
- image: circleci/python
|
||||
@ -188,6 +227,10 @@ workflows:
|
||||
filters: *filter-not-release
|
||||
- test-backend:
|
||||
filters: *filter-not-release
|
||||
- mysql-integration-test:
|
||||
filters: *filter-not-release
|
||||
- postgres-integration-test:
|
||||
filters: *filter-not-release
|
||||
- deploy-master:
|
||||
requires:
|
||||
- build-all
|
||||
@ -195,6 +238,8 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- gometalinter
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters:
|
||||
branches:
|
||||
only: master
|
||||
@ -210,6 +255,10 @@ workflows:
|
||||
filters: *filter-only-release
|
||||
- test-backend:
|
||||
filters: *filter-only-release
|
||||
- mysql-integration-test:
|
||||
filters: *filter-only-release
|
||||
- postgres-integration-test:
|
||||
filters: *filter-only-release
|
||||
- deploy-release:
|
||||
requires:
|
||||
- build-all
|
||||
@ -217,4 +266,6 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- gometalinter
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters: *filter-only-release
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -66,3 +66,5 @@ debug.test
|
||||
/vendor/**/.editorconfig
|
||||
/vendor/**/appengine*
|
||||
*.orig
|
||||
|
||||
/devenv/dashboards/bulk-testing/*.json
|
||||
|
17
CHANGELOG.md
17
CHANGELOG.md
@ -1,18 +1,35 @@
|
||||
# 5.2.0 (unreleased)
|
||||
|
||||
### New Features
|
||||
|
||||
* **Elasticsearch**: Alerting support [#5893](https://github.com/grafana/grafana/issues/5893), thx [@WPH95](https://github.com/WPH95)
|
||||
* **Alert list panel**: Updated to support filtering alerts by name, dashboard title, folder, tags [#11500](https://github.com/grafana/grafana/issues/11500), [#8168](https://github.com/grafana/grafana/issues/8168), [#6541](https://github.com/grafana/grafana/issues/6541)
|
||||
|
||||
### Minor
|
||||
|
||||
* **Dashboard**: Modified time range and variables are now not saved by default [#10748](https://github.com/grafana/grafana/issues/10748), [#8805](https://github.com/grafana/grafana/issues/8805)
|
||||
* **Graph**: Show invisible highest value bucket in histogram [#11498](https://github.com/grafana/grafana/issues/11498)
|
||||
* **Dashboard**: Enable "Save As..." if user has edit permission [#11625](https://github.com/grafana/grafana/issues/11625)
|
||||
* **Prometheus**: Query dates are now step-aligned [#10434](https://github.com/grafana/grafana/pull/10434)
|
||||
* **Prometheus**: Table columns order now changes when rearrange queries [#11690](https://github.com/grafana/grafana/issues/11690), thx [@mtanda](https://github.com/mtanda)
|
||||
* **Variables**: Fix variable interpolation when using multiple formatting types [#11800](https://github.com/grafana/grafana/issues/11800), thx [@svenklemm](https://github.com/svenklemm)
|
||||
* **Dashboard**: Fix date selector styling for dark/light theme in time picker control [#11616](https://github.com/grafana/grafana/issues/11616)
|
||||
* **Discord**: Alert notification channel type for Discord, [#7964](https://github.com/grafana/grafana/issues/7964) thx [@jereksel](https://github.com/jereksel),
|
||||
* **InfluxDB**: Support SELECT queries in templating query, [#5013](https://github.com/grafana/grafana/issues/5013)
|
||||
* **InfluxDB**: Support count distinct aggregation [#11645](https://github.com/grafana/grafana/issues/11645), thx [@kichristensen](https://github.com/kichristensen)
|
||||
* **Dashboard**: JSON Model under dashboard settings can now be updated & changes saved, [#1429](https://github.com/grafana/grafana/issues/1429), thx [@jereksel](https://github.com/jereksel)
|
||||
* **Security**: Fix XSS vulnerabilities in dashboard links [#11813](https://github.com/grafana/grafana/pull/11813)
|
||||
* **Singlestat**: Fix "time of last point" shows local time when dashboard timezone set to UTC [#10338](https://github.com/grafana/grafana/issues/10338)
|
||||
* **Prometheus**: Add support for passing timeout parameter to Prometheus [#11788](https://github.com/grafana/grafana/pull/11788), thx [@mtanda](https://github.com/mtanda)
|
||||
* **Login**: Add optional option sign out url for generic oauth [#9847](https://github.com/grafana/grafana/issues/9847), thx [@roidelapluie](https://github.com/roidelapluie)
|
||||
* **Login**: Use proxy server from environment variable if available [#9703](https://github.com/grafana/grafana/issues/9703), thx [@iyeonok](https://github.com/iyeonok)
|
||||
* **Invite users**: Friendlier error message when smtp is not configured [#12087](https://github.com/grafana/grafana/issues/12087), thx [@thurt](https://github.com/thurt)
|
||||
* **Graphite**: Don't send distributed tracing headers when using direct/browser access mode [#11494](https://github.com/grafana/grafana/issues/11494)
|
||||
* **Sidenav**: Show create dashboard link for viewers if at least editor in one folder [#11858](https://github.com/grafana/grafana/issues/11858)
|
||||
* **SQL**: Second epochs are now correctly converted to ms. [#12085](https://github.com/grafana/grafana/pull/12085)
|
||||
* **Singlestat**: Fix singlestat threshold tooltip [#11971](https://github.com/grafana/grafana/issues/11971)
|
||||
* **Dashboard**: Hide grid controls in fullscreen/low-activity views [#11771](https://github.com/grafana/grafana/issues/11771)
|
||||
* **Dashboard**: Validate uid when importing dashboards [#11515](https://github.com/grafana/grafana/issues/11515)
|
||||
|
||||
# 5.1.3 (2018-05-16)
|
||||
|
||||
|
10
Gopkg.lock
generated
10
Gopkg.lock
generated
@ -186,14 +186,14 @@
|
||||
[[projects]]
|
||||
name = "github.com/go-xorm/core"
|
||||
packages = ["."]
|
||||
revision = "f43c33d9a48db006417a7ac4c16b08897e3e1458"
|
||||
version = "v0.5.8"
|
||||
revision = "da1adaf7a28ca792961721a34e6e04945200c890"
|
||||
version = "v0.5.7"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/go-xorm/xorm"
|
||||
packages = ["."]
|
||||
revision = "fc1b13e0d8e240788213230aa5747eb557f80f41"
|
||||
version = "v0.6.6"
|
||||
revision = "1933dd69e294c0a26c0266637067f24dbb25770c"
|
||||
version = "v0.6.4"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
@ -670,6 +670,6 @@
|
||||
[solve-meta]
|
||||
analyzer-name = "dep"
|
||||
analyzer-version = 1
|
||||
inputs-digest = "cdeb99713eda72e1ea84b5e6b110819785823cec9bc38b147efa0b86949ecff0"
|
||||
inputs-digest = "6c7ae4bcbe7fa4430d3bdbf204df1b7c59cba88151fbcefa167ce15e6351b6d3"
|
||||
solver-name = "gps-cdcl"
|
||||
solver-version = 1
|
||||
|
@ -85,11 +85,11 @@ ignored = [
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/go-xorm/core"
|
||||
version = "0.5.7"
|
||||
version = "=0.5.7"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/go-xorm/xorm"
|
||||
version = "0.6.4"
|
||||
version = "=0.6.4"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/gorilla/websocket"
|
||||
|
16
build.go
16
build.go
@ -156,8 +156,8 @@ func makeLatestDistCopies() {
|
||||
}
|
||||
|
||||
latestMapping := map[string]string{
|
||||
"_amd64.deb": "dist/grafana_latest_amd64.deb",
|
||||
".x86_64.rpm": "dist/grafana-latest-1.x86_64.rpm",
|
||||
"_amd64.deb": "dist/grafana_latest_amd64.deb",
|
||||
".x86_64.rpm": "dist/grafana-latest-1.x86_64.rpm",
|
||||
".linux-amd64.tar.gz": "dist/grafana-latest.linux-x64.tar.gz",
|
||||
}
|
||||
|
||||
@ -232,7 +232,7 @@ func createDebPackages() {
|
||||
previousPkgArch := pkgArch
|
||||
if pkgArch == "armv7" {
|
||||
pkgArch = "armhf"
|
||||
}
|
||||
}
|
||||
createPackage(linuxPackageOptions{
|
||||
packageType: "deb",
|
||||
homeDir: "/usr/share/grafana",
|
||||
@ -256,8 +256,10 @@ func createDebPackages() {
|
||||
func createRpmPackages() {
|
||||
previousPkgArch := pkgArch
|
||||
switch {
|
||||
case pkgArch == "armv7" : pkgArch = "armhfp"
|
||||
case pkgArch == "arm64" : pkgArch = "aarch64"
|
||||
case pkgArch == "armv7":
|
||||
pkgArch = "armhfp"
|
||||
case pkgArch == "arm64":
|
||||
pkgArch = "aarch64"
|
||||
}
|
||||
createPackage(linuxPackageOptions{
|
||||
packageType: "rpm",
|
||||
@ -416,6 +418,10 @@ func test(pkg string) {
|
||||
|
||||
func build(binaryName, pkg string, tags []string) {
|
||||
binary := fmt.Sprintf("./bin/%s-%s/%s", goos, goarch, binaryName)
|
||||
if isDev {
|
||||
//dont include os and arch in output path in dev environment
|
||||
binary = fmt.Sprintf("./bin/%s", binaryName)
|
||||
}
|
||||
|
||||
if goos == "windows" {
|
||||
binary += ".exe"
|
||||
|
@ -237,6 +237,9 @@ disable_login_form = false
|
||||
# Set to true to disable the signout link in the side menu. useful if you use auth.proxy
|
||||
disable_signout_menu = false
|
||||
|
||||
# URL to redirect the user to after sign out
|
||||
signout_redirect_url =
|
||||
|
||||
#################################### Anonymous Auth ######################
|
||||
[auth.anonymous]
|
||||
# enable anonymous access
|
||||
|
@ -217,6 +217,9 @@ log_queries =
|
||||
# Set to true to disable the signout link in the side menu. useful if you use auth.proxy, defaults to false
|
||||
;disable_signout_menu = false
|
||||
|
||||
# URL to redirect the user to after sign out
|
||||
;signout_redirect_url =
|
||||
|
||||
#################################### Anonymous Auth ##########################
|
||||
[auth.anonymous]
|
||||
# enable anonymous access
|
||||
|
11
devenv/README.md
Normal file
11
devenv/README.md
Normal file
@ -0,0 +1,11 @@
|
||||
This folder contains useful scripts and configuration for...
|
||||
|
||||
* Configuring datasources in Grafana
|
||||
* Provision example dashboards in Grafana
|
||||
* Run preconfiured datasources as docker containers
|
||||
|
||||
want to know more? run setup!
|
||||
|
||||
```bash
|
||||
./setup.sh
|
||||
```
|
9
devenv/dashboards/bulk-testing/bulk-dashboards.yaml
Normal file
9
devenv/dashboards/bulk-testing/bulk-dashboards.yaml
Normal file
@ -0,0 +1,9 @@
|
||||
apiVersion: 1
|
||||
|
||||
providers:
|
||||
- name: 'Bulk dashboards'
|
||||
folder: 'Bulk dashboards'
|
||||
type: file
|
||||
options:
|
||||
path: devenv/dashboards/bulk-testing
|
||||
|
1140
devenv/dashboards/bulk-testing/bulkdash.jsonnet
Normal file
1140
devenv/dashboards/bulk-testing/bulkdash.jsonnet
Normal file
File diff suppressed because it is too large
Load Diff
73
devenv/datasources/default/default.yaml
Normal file
73
devenv/datasources/default/default.yaml
Normal file
@ -0,0 +1,73 @@
|
||||
apiVersion: 1
|
||||
|
||||
datasources:
|
||||
- name: Graphite
|
||||
type: graphite
|
||||
access: proxy
|
||||
url: http://localhost:8080
|
||||
jsonData:
|
||||
graphiteVersion: "1.1"
|
||||
|
||||
- name: Prometheus
|
||||
type: prometheus
|
||||
access: proxy
|
||||
isDefault: true
|
||||
url: http://localhost:9090
|
||||
|
||||
- name: InfluxDB
|
||||
type: influxdb
|
||||
access: proxy
|
||||
database: site
|
||||
user: grafana
|
||||
password: grafana
|
||||
url: http://localhost:8086
|
||||
jsonData:
|
||||
timeInterval: "15s"
|
||||
|
||||
- name: OpenTsdb
|
||||
type: opentsdb
|
||||
access: proxy
|
||||
url: http://localhost:4242
|
||||
jsonData:
|
||||
tsdbResolution: 1
|
||||
tsdbVersion: 1
|
||||
|
||||
- name: Elastic
|
||||
type: elasticsearch
|
||||
access: proxy
|
||||
database: "[metrics-]YYYY.MM.DD"
|
||||
url: http://localhost:9200
|
||||
jsonData:
|
||||
interval: Daily
|
||||
timeField: "@timestamp"
|
||||
|
||||
- name: MySQL
|
||||
type: mysql
|
||||
url: localhost:3306
|
||||
database: grafana
|
||||
user: grafana
|
||||
password: password
|
||||
|
||||
- name: MSSQL
|
||||
type: mssql
|
||||
url: localhost:1433
|
||||
database: grafana
|
||||
user: grafana
|
||||
password: "Password!"
|
||||
|
||||
- name: Postgres
|
||||
type: postgres
|
||||
url: localhost:5432
|
||||
database: grafana
|
||||
user: grafana
|
||||
password: password
|
||||
jsonData:
|
||||
sslmode: "disable"
|
||||
|
||||
- name: Cloudwatch
|
||||
type: cloudwatch
|
||||
editable: true
|
||||
jsonData:
|
||||
authType: credentials
|
||||
defaultRegion: eu-west-2
|
||||
|
61
devenv/setup.sh
Executable file
61
devenv/setup.sh
Executable file
@ -0,0 +1,61 @@
|
||||
#/bin/bash
|
||||
|
||||
bulkDashboard() {
|
||||
|
||||
requiresJsonnet
|
||||
|
||||
COUNTER=0
|
||||
MAX=400
|
||||
while [ $COUNTER -lt $MAX ]; do
|
||||
jsonnet -o "dashboards/bulk-testing/dashboard${COUNTER}.json" -e "local bulkDash = import 'dashboards/bulk-testing/bulkdash.jsonnet'; bulkDash + { uid: 'uid-${COUNTER}', title: 'title-${COUNTER}' }"
|
||||
let COUNTER=COUNTER+1
|
||||
done
|
||||
|
||||
ln -s -f -r ./dashboards/bulk-testing/bulk-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml
|
||||
}
|
||||
|
||||
requiresJsonnet() {
|
||||
if ! type "jsonnet" > /dev/null; then
|
||||
echo "you need you install jsonnet to run this script"
|
||||
echo "follow the instructions on https://github.com/google/jsonnet"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
defaultDashboards() {
|
||||
echo "not implemented yet"
|
||||
}
|
||||
|
||||
defaultDatasources() {
|
||||
echo "setting up all default datasources using provisioning"
|
||||
|
||||
ln -s -f -r ./datasources/default/default.yaml ../conf/provisioning/datasources/custom.yaml
|
||||
}
|
||||
|
||||
usage() {
|
||||
echo -e "install.sh\n\tThis script installs my basic setup for a debian laptop\n"
|
||||
echo "Usage:"
|
||||
echo " bulk-dashboards - create and provisioning 400 dashboards"
|
||||
echo " default-datasources - provisiong all core datasources"
|
||||
}
|
||||
|
||||
main() {
|
||||
local cmd=$1
|
||||
|
||||
if [[ -z "$cmd" ]]; then
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ $cmd == "bulk-dashboards" ]]; then
|
||||
bulkDashboard
|
||||
elif [[ $cmd == "default-datasources" ]]; then
|
||||
defaultDatasources
|
||||
elif [[ $cmd == "default-dashboards" ]]; then
|
||||
bulkDashboard
|
||||
else
|
||||
usage
|
||||
fi
|
||||
}
|
||||
|
||||
main "$@"
|
15
docker/blocks/elastic6/docker-compose.yaml
Normal file
15
docker/blocks/elastic6/docker-compose.yaml
Normal file
@ -0,0 +1,15 @@
|
||||
# You need to run 'sysctl -w vm.max_map_count=262144' on the host machine
|
||||
|
||||
elasticsearch6:
|
||||
image: docker.elastic.co/elasticsearch/elasticsearch-oss:6.2.4
|
||||
command: elasticsearch
|
||||
ports:
|
||||
- "11200:9200"
|
||||
- "11300:9300"
|
||||
|
||||
fake-elastic6-data:
|
||||
image: grafana/fake-data-gen
|
||||
network_mode: bridge
|
||||
environment:
|
||||
FD_DATASOURCE: elasticsearch6
|
||||
FD_PORT: 11200
|
2
docker/blocks/elastic6/elasticsearch.yml
Normal file
2
docker/blocks/elastic6/elasticsearch.yml
Normal file
@ -0,0 +1,2 @@
|
||||
script.inline: on
|
||||
script.indexed: on
|
@ -1,5 +1,5 @@
|
||||
mysql:
|
||||
image: mysql:latest
|
||||
image: mysql:5.6
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: rootpass
|
||||
MYSQL_DATABASE: grafana
|
||||
|
@ -1,3 +1,3 @@
|
||||
FROM mysql:latest
|
||||
FROM mysql:5.6
|
||||
ADD setup.sql /docker-entrypoint-initdb.d
|
||||
CMD ["mysqld"]
|
||||
CMD ["mysqld"]
|
||||
|
@ -1,5 +1,5 @@
|
||||
postgrestest:
|
||||
image: postgres:latest
|
||||
image: postgres:9.3
|
||||
environment:
|
||||
POSTGRES_USER: grafana
|
||||
POSTGRES_PASSWORD: password
|
||||
@ -13,4 +13,4 @@
|
||||
network_mode: bridge
|
||||
environment:
|
||||
FD_DATASOURCE: postgres
|
||||
FD_PORT: 5432
|
||||
FD_PORT: 5432
|
||||
|
@ -1,3 +1,3 @@
|
||||
FROM postgres:latest
|
||||
FROM postgres:9.3
|
||||
ADD setup.sql /docker-entrypoint-initdb.d
|
||||
CMD ["postgres"]
|
||||
CMD ["postgres"]
|
||||
|
@ -1,3 +1,3 @@
|
||||
CREATE DATABASE grafanadstest;
|
||||
REVOKE CONNECT ON DATABASE grafanadstest FROM PUBLIC;
|
||||
GRANT CONNECT ON DATABASE grafanadstest TO grafanatest;
|
||||
GRANT CONNECT ON DATABASE grafanadstest TO grafanatest;
|
||||
|
@ -27,7 +27,9 @@ and the conditions that need to be met for the alert to change state and trigger
|
||||
## Execution
|
||||
|
||||
The alert rules are evaluated in the Grafana backend in a scheduler and query execution engine that is part
|
||||
of core Grafana. Only some data sources are supported right now. They include `Graphite`, `Prometheus`, `InfluxDB`, `OpenTSDB`, `MySQL`, `Postgres` and `Cloudwatch`.
|
||||
of core Grafana. Only some data sources are supported right now. They include `Graphite`, `Prometheus`, `Elasticsearch`, `InfluxDB`, `OpenTSDB`, `MySQL`, `Postgres` and `Cloudwatch`.
|
||||
|
||||
> Alerting support for Elasticsearch is only available in Grafana v5.2 and above.
|
||||
|
||||
### Clustering
|
||||
|
||||
@ -152,6 +154,8 @@ filters = alerting.scheduler:debug \
|
||||
tsdb.prometheus:debug \
|
||||
tsdb.opentsdb:debug \
|
||||
tsdb.influxdb:debug \
|
||||
tsdb.elasticsearch:debug \
|
||||
tsdb.elasticsearch.client:debug \
|
||||
```
|
||||
|
||||
If you want to log raw query sent to your TSDB and raw response in log you also have to set grafana.ini option `app_mode` to
|
||||
|
@ -14,11 +14,53 @@ weight = 2
|
||||
|
||||
<img class="screenshot" src="/assets/img/features/table-panel.png">
|
||||
|
||||
The new table panel is very flexible, supporting both multiple modes for time series as well as for
|
||||
The table panel is very flexible, supporting both multiple modes for time series as well as for
|
||||
table, annotation and raw JSON data. It also provides date formatting and value formatting and coloring options.
|
||||
|
||||
To view table panels in action and test different configurations with sample data, check out the [Table Panel Showcase in the Grafana Playground](http://play.grafana.org/dashboard/db/table-panel-showcase).
|
||||
|
||||
## Querying Data
|
||||
|
||||
The table panel displays the results of a query specified in the **Metrics** tab.
|
||||
The result being displayed depends on the datasource and the query, but generally there is one row per datapoint, with extra columns for associated keys and values, as well as one column for the numeric value of the datapoint.
|
||||
You can change the behavior in the section **Data to Table** below.
|
||||
|
||||
### Merge Multiple Queries per Table
|
||||
|
||||
> Only available in Grafana v5.0+.
|
||||
|
||||
Sometimes it is useful to display the results of multiple queries in the same table on corresponding rows, e.g., when comparing capacity and actual usage of resources.
|
||||
In this example usage and capacity are metrics that will have corresponding datapoints, while their associated keys and values can be used to match them.
|
||||
(This matching is only available with the **Table Transform** set to **Table**.)
|
||||
|
||||
In its simplest case, both queries return time-series data with a numeric value and a timestamp.
|
||||
If the timestamps are the same, datapoints will be matched and rendered on the same row.
|
||||
Some datasources return keys and values (labels, tags) associated with the datapoint.
|
||||
These are being matched as well if they are present in both results and have the same value.
|
||||
The following datapoints will end up on the same row with one time column, two label columns ("host" and "job") and two value columns:
|
||||
|
||||
```
|
||||
Datapoint for query A: {time: 1, host: "node-2", job: "job-8", value: 3}
|
||||
Datapoint for query B: {time: 1, host: "node-2", value: 4}
|
||||
```
|
||||
|
||||
The following two results cannot be matched and will be rendered on separate rows:
|
||||
|
||||
```
|
||||
Different time
|
||||
Datapoint for query A: {time: 1, host: "node-2", job: "job-8", value: 3}
|
||||
Datapoint for query B: {time: 2, host: "node-2", value: 4}
|
||||
|
||||
Different label "host"
|
||||
Datapoint for query A: {time: 1, host: "node-2", job: "job-8", value: 3}
|
||||
Datapoint for query B: {time: 1, host: "node-9", value: 4}
|
||||
```
|
||||
|
||||
You can still merge both of the above cases by changing the conflicting column's **Type** to **hidden** in the **Column Styles**.
|
||||
|
||||
Note that if each datapoint of your query results have multiple value fields like max, min, mean, etc., they will likely have different values and therefore will not match and render on separate rows.
|
||||
If you intend for rows to be merged but see them rendered on separate rows, check the query results in the **Query Inspector** for field values being identical across datapoints that should be merged into a row.
|
||||
|
||||
## Options overview
|
||||
|
||||
The table panel has many ways to manipulate your data for optimal presentation.
|
||||
|
@ -35,10 +35,15 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
|
||||
|
||||
`/api/alerts?dashboardId=1`
|
||||
|
||||
- **dashboardId** – Return alerts for a specified dashboard.
|
||||
- **panelId** – Return alerts for a specified panel on a dashboard.
|
||||
- **limit** - Limit response to x number of alerts.
|
||||
- **dashboardId** – Limit response to alerts in specified dashboard(s). You can specify multiple dashboards, e.g. dashboardId=23&dashboardId=35.
|
||||
- **panelId** – Limit response to alert for a specified panel on a dashboard.
|
||||
- **query** - Limit response to alerts having a name like this value.
|
||||
- **state** - Return alerts with one or more of the following alert states: `ALL`,`no_data`, `paused`, `alerting`, `ok`, `pending`. To specify multiple states use the following format: `?state=paused&state=alerting`
|
||||
- **limit** - Limit response to *X* number of alerts.
|
||||
- **folderId** – Limit response to alerts of dashboards in specified folder(s). You can specify multiple folders, e.g. folderId=23&folderId=35.
|
||||
- **dashboardQuery** - Limit response to alerts having a dashboard name like this value.
|
||||
- **dashboardTag** - Limit response to alerts of dashboards with specified tags. To do an "AND" filtering with multiple tags, specify the tags parameter multiple times e.g. dashboardTag=tag1&dashboardTag=tag2.
|
||||
|
||||
|
||||
**Example Response**:
|
||||
|
||||
|
23
package.json
23
package.json
@ -18,10 +18,9 @@
|
||||
"@types/react-dom": "^16.0.3",
|
||||
"angular-mocks": "^1.6.6",
|
||||
"autoprefixer": "^6.4.0",
|
||||
"awesome-typescript-loader": "^3.2.3",
|
||||
"awesome-typescript-loader": "^4.0.0",
|
||||
"axios": "^0.17.1",
|
||||
"babel-core": "^6.26.0",
|
||||
"babel-loader": "^7.1.2",
|
||||
"babel-plugin-syntax-dynamic-import": "^6.18.0",
|
||||
"babel-preset-es2015": "^6.24.1",
|
||||
"clean-webpack-plugin": "^0.1.19",
|
||||
@ -34,7 +33,7 @@
|
||||
"expect.js": "~0.2.0",
|
||||
"expose-loader": "^0.7.3",
|
||||
"extract-text-webpack-plugin": "^3.0.0",
|
||||
"file-loader": "^0.11.2",
|
||||
"file-loader": "^1.1.11",
|
||||
"gaze": "^1.1.2",
|
||||
"glob": "~7.0.0",
|
||||
"grunt": "1.0.1",
|
||||
@ -61,7 +60,6 @@
|
||||
"husky": "^0.14.3",
|
||||
"jest": "^22.0.4",
|
||||
"jshint-stylish": "~2.2.1",
|
||||
"json-loader": "^0.5.7",
|
||||
"karma": "1.7.0",
|
||||
"karma-chrome-launcher": "~2.2.0",
|
||||
"karma-expect": "~1.1.3",
|
||||
@ -83,16 +81,15 @@
|
||||
"postcss-loader": "^2.0.6",
|
||||
"postcss-reporter": "^5.0.0",
|
||||
"prettier": "1.9.2",
|
||||
"react-hot-loader": "^4.0.1",
|
||||
"react-hot-loader": "^4.2.0",
|
||||
"react-test-renderer": "^16.0.0",
|
||||
"sass-lint": "^1.10.2",
|
||||
"sass-loader": "^6.0.6",
|
||||
"sass-loader": "^7.0.1",
|
||||
"sinon": "1.17.6",
|
||||
"style-loader": "^0.20.3",
|
||||
"style-loader": "^0.21.0",
|
||||
"systemjs": "0.20.19",
|
||||
"systemjs-plugin-css": "^0.1.36",
|
||||
"ts-jest": "^22.0.0",
|
||||
"ts-loader": "^3.2.0",
|
||||
"tslint": "^5.8.0",
|
||||
"tslint-loader": "^3.5.3",
|
||||
"typescript": "^2.6.2",
|
||||
@ -105,7 +102,7 @@
|
||||
},
|
||||
"scripts": {
|
||||
"dev": "webpack --progress --colors --config scripts/webpack/webpack.dev.js",
|
||||
"start": "webpack-dev-server --progress --colors --config scripts/webpack/webpack.dev.js",
|
||||
"start": "webpack-dev-server --progress --colors --config scripts/webpack/webpack.hot.js",
|
||||
"watch": "webpack --progress --colors --watch --config scripts/webpack/webpack.dev.js",
|
||||
"build": "grunt build",
|
||||
"test": "grunt test",
|
||||
@ -137,11 +134,11 @@
|
||||
},
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"angular": "^1.6.6",
|
||||
"angular": "1.6.6",
|
||||
"angular-bindonce": "^0.3.1",
|
||||
"angular-native-dragdrop": "^1.2.2",
|
||||
"angular-route": "^1.6.6",
|
||||
"angular-sanitize": "^1.6.6",
|
||||
"angular-route": "1.6.6",
|
||||
"angular-sanitize": "1.6.6",
|
||||
"babel-polyfill": "^6.26.0",
|
||||
"baron": "^3.0.3",
|
||||
"brace": "^0.10.0",
|
||||
@ -164,7 +161,7 @@
|
||||
"prop-types": "^15.6.0",
|
||||
"react": "^16.2.0",
|
||||
"react-dom": "^16.2.0",
|
||||
"react-grid-layout-grafana": "0.16.0",
|
||||
"react-grid-layout": "0.16.6",
|
||||
"react-highlight-words": "^0.10.0",
|
||||
"react-popper": "^0.7.5",
|
||||
"react-select": "^1.1.0",
|
||||
|
@ -2,12 +2,14 @@ package api
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
"github.com/grafana/grafana/pkg/api/dtos"
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
m "github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/services/alerting"
|
||||
"github.com/grafana/grafana/pkg/services/guardian"
|
||||
"github.com/grafana/grafana/pkg/services/search"
|
||||
)
|
||||
|
||||
func ValidateOrgAlert(c *m.ReqContext) {
|
||||
@ -46,12 +48,64 @@ func GetAlertStatesForDashboard(c *m.ReqContext) Response {
|
||||
|
||||
// GET /api/alerts
|
||||
func GetAlerts(c *m.ReqContext) Response {
|
||||
dashboardQuery := c.Query("dashboardQuery")
|
||||
dashboardTags := c.QueryStrings("dashboardTag")
|
||||
stringDashboardIDs := c.QueryStrings("dashboardId")
|
||||
stringFolderIDs := c.QueryStrings("folderId")
|
||||
|
||||
dashboardIDs := make([]int64, 0)
|
||||
for _, id := range stringDashboardIDs {
|
||||
dashboardID, err := strconv.ParseInt(id, 10, 64)
|
||||
if err == nil {
|
||||
dashboardIDs = append(dashboardIDs, dashboardID)
|
||||
}
|
||||
}
|
||||
|
||||
if dashboardQuery != "" || len(dashboardTags) > 0 || len(stringFolderIDs) > 0 {
|
||||
folderIDs := make([]int64, 0)
|
||||
for _, id := range stringFolderIDs {
|
||||
folderID, err := strconv.ParseInt(id, 10, 64)
|
||||
if err == nil {
|
||||
folderIDs = append(folderIDs, folderID)
|
||||
}
|
||||
}
|
||||
|
||||
searchQuery := search.Query{
|
||||
Title: dashboardQuery,
|
||||
Tags: dashboardTags,
|
||||
SignedInUser: c.SignedInUser,
|
||||
Limit: 1000,
|
||||
OrgId: c.OrgId,
|
||||
DashboardIds: dashboardIDs,
|
||||
Type: string(search.DashHitDB),
|
||||
FolderIds: folderIDs,
|
||||
Permission: m.PERMISSION_EDIT,
|
||||
}
|
||||
|
||||
err := bus.Dispatch(&searchQuery)
|
||||
if err != nil {
|
||||
return Error(500, "List alerts failed", err)
|
||||
}
|
||||
|
||||
for _, d := range searchQuery.Result {
|
||||
if d.Type == search.DashHitDB && d.Id > 0 {
|
||||
dashboardIDs = append(dashboardIDs, d.Id)
|
||||
}
|
||||
}
|
||||
|
||||
// if we didn't find any dashboards, return empty result
|
||||
if len(dashboardIDs) == 0 {
|
||||
return JSON(200, []*m.AlertListItemDTO{})
|
||||
}
|
||||
}
|
||||
|
||||
query := m.GetAlertsQuery{
|
||||
OrgId: c.OrgId,
|
||||
DashboardId: c.QueryInt64("dashboardId"),
|
||||
PanelId: c.QueryInt64("panelId"),
|
||||
Limit: c.QueryInt64("limit"),
|
||||
User: c.SignedInUser,
|
||||
OrgId: c.OrgId,
|
||||
DashboardIDs: dashboardIDs,
|
||||
PanelId: c.QueryInt64("panelId"),
|
||||
Limit: c.QueryInt64("limit"),
|
||||
User: c.SignedInUser,
|
||||
Query: c.Query("query"),
|
||||
}
|
||||
|
||||
states := c.QueryStrings("state")
|
||||
|
@ -6,6 +6,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/api/dtos"
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
m "github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/services/search"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
@ -64,6 +65,60 @@ func TestAlertingApiEndpoint(t *testing.T) {
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/alerts?dashboardId=1", "/api/alerts", m.ROLE_EDITOR, func(sc *scenarioContext) {
|
||||
var searchQuery *search.Query
|
||||
bus.AddHandler("test", func(query *search.Query) error {
|
||||
searchQuery = query
|
||||
return nil
|
||||
})
|
||||
|
||||
var getAlertsQuery *m.GetAlertsQuery
|
||||
bus.AddHandler("test", func(query *m.GetAlertsQuery) error {
|
||||
getAlertsQuery = query
|
||||
return nil
|
||||
})
|
||||
|
||||
sc.handlerFunc = GetAlerts
|
||||
sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec()
|
||||
|
||||
So(searchQuery, ShouldBeNil)
|
||||
So(getAlertsQuery, ShouldNotBeNil)
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/alerts?dashboardId=1&dashboardId=2&folderId=3&dashboardTag=abc&dashboardQuery=dbQuery&limit=5&query=alertQuery", "/api/alerts", m.ROLE_EDITOR, func(sc *scenarioContext) {
|
||||
var searchQuery *search.Query
|
||||
bus.AddHandler("test", func(query *search.Query) error {
|
||||
searchQuery = query
|
||||
query.Result = search.HitList{
|
||||
&search.Hit{Id: 1},
|
||||
&search.Hit{Id: 2},
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
var getAlertsQuery *m.GetAlertsQuery
|
||||
bus.AddHandler("test", func(query *m.GetAlertsQuery) error {
|
||||
getAlertsQuery = query
|
||||
return nil
|
||||
})
|
||||
|
||||
sc.handlerFunc = GetAlerts
|
||||
sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec()
|
||||
|
||||
So(searchQuery, ShouldNotBeNil)
|
||||
So(searchQuery.DashboardIds[0], ShouldEqual, 1)
|
||||
So(searchQuery.DashboardIds[1], ShouldEqual, 2)
|
||||
So(searchQuery.FolderIds[0], ShouldEqual, 3)
|
||||
So(searchQuery.Tags[0], ShouldEqual, "abc")
|
||||
So(searchQuery.Title, ShouldEqual, "dbQuery")
|
||||
|
||||
So(getAlertsQuery, ShouldNotBeNil)
|
||||
So(getAlertsQuery.DashboardIDs[0], ShouldEqual, 1)
|
||||
So(getAlertsQuery.DashboardIDs[1], ShouldEqual, 2)
|
||||
So(getAlertsQuery.Limit, ShouldEqual, 5)
|
||||
So(getAlertsQuery.Query, ShouldEqual, "alertQuery")
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -92,17 +92,22 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) {
|
||||
data.Theme = "light"
|
||||
}
|
||||
|
||||
if c.OrgRole == m.ROLE_ADMIN || c.OrgRole == m.ROLE_EDITOR {
|
||||
if hasEditPermissionInFoldersQuery.Result {
|
||||
children := []*dtos.NavLink{
|
||||
{Text: "Dashboard", Icon: "gicon gicon-dashboard-new", Url: setting.AppSubUrl + "/dashboard/new"},
|
||||
}
|
||||
|
||||
if c.OrgRole == m.ROLE_ADMIN || c.OrgRole == m.ROLE_EDITOR {
|
||||
children = append(children, &dtos.NavLink{Text: "Folder", SubTitle: "Create a new folder to organize your dashboards", Id: "folder", Icon: "gicon gicon-folder-new", Url: setting.AppSubUrl + "/dashboards/folder/new"})
|
||||
children = append(children, &dtos.NavLink{Text: "Import", SubTitle: "Import dashboard from file or Grafana.com", Id: "import", Icon: "gicon gicon-dashboard-import", Url: setting.AppSubUrl + "/dashboard/import"})
|
||||
}
|
||||
|
||||
data.NavTree = append(data.NavTree, &dtos.NavLink{
|
||||
Text: "Create",
|
||||
Id: "create",
|
||||
Icon: "fa fa-fw fa-plus",
|
||||
Url: setting.AppSubUrl + "/dashboard/new",
|
||||
Children: []*dtos.NavLink{
|
||||
{Text: "Dashboard", Icon: "gicon gicon-dashboard-new", Url: setting.AppSubUrl + "/dashboard/new"},
|
||||
{Text: "Folder", SubTitle: "Create a new folder to organize your dashboards", Id: "folder", Icon: "gicon gicon-folder-new", Url: setting.AppSubUrl + "/dashboards/folder/new"},
|
||||
{Text: "Import", SubTitle: "Import dashboard from file or Grafana.com", Id: "import", Icon: "gicon gicon-dashboard-import", Url: setting.AppSubUrl + "/dashboard/import"},
|
||||
},
|
||||
Text: "Create",
|
||||
Id: "create",
|
||||
Icon: "fa fa-fw fa-plus",
|
||||
Url: setting.AppSubUrl + "/dashboard/new",
|
||||
Children: children,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -155,5 +155,9 @@ func Logout(c *m.ReqContext) {
|
||||
c.SetCookie(setting.CookieUserName, "", -1, setting.AppSubUrl+"/")
|
||||
c.SetCookie(setting.CookieRememberName, "", -1, setting.AppSubUrl+"/")
|
||||
c.Session.Destory(c.Context)
|
||||
c.Redirect(setting.AppSubUrl + "/login")
|
||||
if setting.SignoutRedirectUrl != "" {
|
||||
c.Redirect(setting.SignoutRedirectUrl)
|
||||
} else {
|
||||
c.Redirect(setting.AppSubUrl + "/login")
|
||||
}
|
||||
}
|
||||
|
@ -78,6 +78,7 @@ func OAuthLogin(ctx *m.ReqContext) {
|
||||
|
||||
// handle call back
|
||||
tr := &http.Transport{
|
||||
Proxy: http.ProxyFromEnvironment,
|
||||
TLSClientConfig: &tls.Config{
|
||||
InsecureSkipVerify: setting.OAuthService.OAuthInfos[name].TlsSkipVerify,
|
||||
},
|
||||
|
@ -74,6 +74,9 @@ func AddOrgInvite(c *m.ReqContext, inviteDto dtos.AddInviteForm) Response {
|
||||
}
|
||||
|
||||
if err := bus.Dispatch(&emailCmd); err != nil {
|
||||
if err == m.ErrSmtpNotEnabled {
|
||||
return Error(412, err.Error(), err)
|
||||
}
|
||||
return Error(500, "Failed to send email invite", err)
|
||||
}
|
||||
|
||||
|
@ -22,6 +22,7 @@ import (
|
||||
_ "github.com/grafana/grafana/pkg/services/alerting/conditions"
|
||||
_ "github.com/grafana/grafana/pkg/services/alerting/notifiers"
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/cloudwatch"
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/elasticsearch"
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/graphite"
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/influxdb"
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/mysql"
|
||||
|
@ -332,6 +332,8 @@ func updateTotalStats() {
|
||||
M_StatTotal_Orgs.Set(float64(statsQuery.Result.Orgs))
|
||||
}
|
||||
|
||||
var usageStatsURL = "https://stats.grafana.org/grafana-usage-report"
|
||||
|
||||
func sendUsageStats() {
|
||||
if !setting.ReportingEnabled {
|
||||
return
|
||||
@ -366,6 +368,12 @@ func sendUsageStats() {
|
||||
metrics["stats.active_users.count"] = statsQuery.Result.ActiveUsers
|
||||
metrics["stats.datasources.count"] = statsQuery.Result.Datasources
|
||||
metrics["stats.stars.count"] = statsQuery.Result.Stars
|
||||
metrics["stats.folders.count"] = statsQuery.Result.Folders
|
||||
metrics["stats.dashboard_permissions.count"] = statsQuery.Result.DashboardPermissions
|
||||
metrics["stats.folder_permissions.count"] = statsQuery.Result.FolderPermissions
|
||||
metrics["stats.provisioned_dashboards.count"] = statsQuery.Result.ProvisionedDashboards
|
||||
metrics["stats.snapshots.count"] = statsQuery.Result.Snapshots
|
||||
metrics["stats.teams.count"] = statsQuery.Result.Teams
|
||||
|
||||
dsStats := models.GetDataSourceStatsQuery{}
|
||||
if err := bus.Dispatch(&dsStats); err != nil {
|
||||
@ -386,9 +394,38 @@ func sendUsageStats() {
|
||||
}
|
||||
metrics["stats.ds.other.count"] = dsOtherCount
|
||||
|
||||
dsAccessStats := models.GetDataSourceAccessStatsQuery{}
|
||||
if err := bus.Dispatch(&dsAccessStats); err != nil {
|
||||
metricsLogger.Error("Failed to get datasource access stats", "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
// send access counters for each data source
|
||||
// but ignore any custom data sources
|
||||
// as sending that name could be sensitive information
|
||||
dsAccessOtherCount := make(map[string]int64)
|
||||
for _, dsAccessStat := range dsAccessStats.Result {
|
||||
if dsAccessStat.Access == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
access := strings.ToLower(dsAccessStat.Access)
|
||||
|
||||
if models.IsKnownDataSourcePlugin(dsAccessStat.Type) {
|
||||
metrics["stats.ds_access."+dsAccessStat.Type+"."+access+".count"] = dsAccessStat.Count
|
||||
} else {
|
||||
old := dsAccessOtherCount[access]
|
||||
dsAccessOtherCount[access] = old + dsAccessStat.Count
|
||||
}
|
||||
}
|
||||
|
||||
for access, count := range dsAccessOtherCount {
|
||||
metrics["stats.ds_access.other."+access+".count"] = count
|
||||
}
|
||||
|
||||
out, _ := json.MarshalIndent(report, "", " ")
|
||||
data := bytes.NewBuffer(out)
|
||||
|
||||
client := http.Client{Timeout: 5 * time.Second}
|
||||
go client.Post("https://stats.grafana.org/grafana-usage-report", "application/json", data)
|
||||
go client.Post(usageStatsURL, "application/json", data)
|
||||
}
|
||||
|
222
pkg/metrics/metrics_test.go
Normal file
222
pkg/metrics/metrics_test.go
Normal file
@ -0,0 +1,222 @@
|
||||
package metrics
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io/ioutil"
|
||||
"runtime"
|
||||
"sync"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/plugins"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestMetrics(t *testing.T) {
|
||||
Convey("Test send usage stats", t, func() {
|
||||
var getSystemStatsQuery *models.GetSystemStatsQuery
|
||||
bus.AddHandler("test", func(query *models.GetSystemStatsQuery) error {
|
||||
query.Result = &models.SystemStats{
|
||||
Dashboards: 1,
|
||||
Datasources: 2,
|
||||
Users: 3,
|
||||
ActiveUsers: 4,
|
||||
Orgs: 5,
|
||||
Playlists: 6,
|
||||
Alerts: 7,
|
||||
Stars: 8,
|
||||
Folders: 9,
|
||||
DashboardPermissions: 10,
|
||||
FolderPermissions: 11,
|
||||
ProvisionedDashboards: 12,
|
||||
Snapshots: 13,
|
||||
Teams: 14,
|
||||
}
|
||||
getSystemStatsQuery = query
|
||||
return nil
|
||||
})
|
||||
|
||||
var getDataSourceStatsQuery *models.GetDataSourceStatsQuery
|
||||
bus.AddHandler("test", func(query *models.GetDataSourceStatsQuery) error {
|
||||
query.Result = []*models.DataSourceStats{
|
||||
{
|
||||
Type: models.DS_ES,
|
||||
Count: 9,
|
||||
},
|
||||
{
|
||||
Type: models.DS_PROMETHEUS,
|
||||
Count: 10,
|
||||
},
|
||||
{
|
||||
Type: "unknown_ds",
|
||||
Count: 11,
|
||||
},
|
||||
{
|
||||
Type: "unknown_ds2",
|
||||
Count: 12,
|
||||
},
|
||||
}
|
||||
getDataSourceStatsQuery = query
|
||||
return nil
|
||||
})
|
||||
|
||||
var getDataSourceAccessStatsQuery *models.GetDataSourceAccessStatsQuery
|
||||
bus.AddHandler("test", func(query *models.GetDataSourceAccessStatsQuery) error {
|
||||
query.Result = []*models.DataSourceAccessStats{
|
||||
{
|
||||
Type: models.DS_ES,
|
||||
Access: "direct",
|
||||
Count: 1,
|
||||
},
|
||||
{
|
||||
Type: models.DS_ES,
|
||||
Access: "proxy",
|
||||
Count: 2,
|
||||
},
|
||||
{
|
||||
Type: models.DS_PROMETHEUS,
|
||||
Access: "proxy",
|
||||
Count: 3,
|
||||
},
|
||||
{
|
||||
Type: "unknown_ds",
|
||||
Access: "proxy",
|
||||
Count: 4,
|
||||
},
|
||||
{
|
||||
Type: "unknown_ds2",
|
||||
Access: "",
|
||||
Count: 5,
|
||||
},
|
||||
{
|
||||
Type: "unknown_ds3",
|
||||
Access: "direct",
|
||||
Count: 6,
|
||||
},
|
||||
{
|
||||
Type: "unknown_ds4",
|
||||
Access: "direct",
|
||||
Count: 7,
|
||||
},
|
||||
{
|
||||
Type: "unknown_ds5",
|
||||
Access: "proxy",
|
||||
Count: 8,
|
||||
},
|
||||
}
|
||||
getDataSourceAccessStatsQuery = query
|
||||
return nil
|
||||
})
|
||||
|
||||
var wg sync.WaitGroup
|
||||
var responseBuffer *bytes.Buffer
|
||||
var req *http.Request
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
|
||||
req = r
|
||||
buf, err := ioutil.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read response body, err=%v", err)
|
||||
}
|
||||
responseBuffer = bytes.NewBuffer(buf)
|
||||
wg.Done()
|
||||
}))
|
||||
usageStatsURL = ts.URL
|
||||
|
||||
sendUsageStats()
|
||||
|
||||
Convey("Given reporting not enabled and sending usage stats", func() {
|
||||
setting.ReportingEnabled = false
|
||||
sendUsageStats()
|
||||
|
||||
Convey("Should not gather stats or call http endpoint", func() {
|
||||
So(getSystemStatsQuery, ShouldBeNil)
|
||||
So(getDataSourceStatsQuery, ShouldBeNil)
|
||||
So(getDataSourceAccessStatsQuery, ShouldBeNil)
|
||||
So(req, ShouldBeNil)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Given reporting enabled and sending usage stats", func() {
|
||||
setting.ReportingEnabled = true
|
||||
setting.BuildVersion = "5.0.0"
|
||||
wg.Add(1)
|
||||
sendUsageStats()
|
||||
|
||||
Convey("Should gather stats and call http endpoint", func() {
|
||||
if waitTimeout(&wg, 2*time.Second) {
|
||||
t.Fatalf("Timed out waiting for http request")
|
||||
}
|
||||
|
||||
So(getSystemStatsQuery, ShouldNotBeNil)
|
||||
So(getDataSourceStatsQuery, ShouldNotBeNil)
|
||||
So(getDataSourceAccessStatsQuery, ShouldNotBeNil)
|
||||
So(req, ShouldNotBeNil)
|
||||
So(req.Method, ShouldEqual, http.MethodPost)
|
||||
So(req.Header.Get("Content-Type"), ShouldEqual, "application/json")
|
||||
|
||||
So(responseBuffer, ShouldNotBeNil)
|
||||
|
||||
j, err := simplejson.NewFromReader(responseBuffer)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(j.Get("version").MustString(), ShouldEqual, "5_0_0")
|
||||
So(j.Get("os").MustString(), ShouldEqual, runtime.GOOS)
|
||||
So(j.Get("arch").MustString(), ShouldEqual, runtime.GOARCH)
|
||||
|
||||
metrics := j.Get("metrics")
|
||||
So(metrics.Get("stats.dashboards.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Dashboards)
|
||||
So(metrics.Get("stats.users.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Users)
|
||||
So(metrics.Get("stats.orgs.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Orgs)
|
||||
So(metrics.Get("stats.playlist.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Playlists)
|
||||
So(metrics.Get("stats.plugins.apps.count").MustInt(), ShouldEqual, len(plugins.Apps))
|
||||
So(metrics.Get("stats.plugins.panels.count").MustInt(), ShouldEqual, len(plugins.Panels))
|
||||
So(metrics.Get("stats.plugins.datasources.count").MustInt(), ShouldEqual, len(plugins.DataSources))
|
||||
So(metrics.Get("stats.alerts.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Alerts)
|
||||
So(metrics.Get("stats.active_users.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.ActiveUsers)
|
||||
So(metrics.Get("stats.datasources.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Datasources)
|
||||
So(metrics.Get("stats.stars.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Stars)
|
||||
So(metrics.Get("stats.folders.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Folders)
|
||||
So(metrics.Get("stats.dashboard_permissions.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.DashboardPermissions)
|
||||
So(metrics.Get("stats.folder_permissions.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.FolderPermissions)
|
||||
So(metrics.Get("stats.provisioned_dashboards.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.ProvisionedDashboards)
|
||||
So(metrics.Get("stats.snapshots.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Snapshots)
|
||||
So(metrics.Get("stats.teams.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Teams)
|
||||
|
||||
So(metrics.Get("stats.ds."+models.DS_ES+".count").MustInt(), ShouldEqual, 9)
|
||||
So(metrics.Get("stats.ds."+models.DS_PROMETHEUS+".count").MustInt(), ShouldEqual, 10)
|
||||
So(metrics.Get("stats.ds.other.count").MustInt(), ShouldEqual, 11+12)
|
||||
|
||||
So(metrics.Get("stats.ds_access."+models.DS_ES+".direct.count").MustInt(), ShouldEqual, 1)
|
||||
So(metrics.Get("stats.ds_access."+models.DS_ES+".proxy.count").MustInt(), ShouldEqual, 2)
|
||||
So(metrics.Get("stats.ds_access."+models.DS_PROMETHEUS+".proxy.count").MustInt(), ShouldEqual, 3)
|
||||
So(metrics.Get("stats.ds_access.other.direct.count").MustInt(), ShouldEqual, 6+7)
|
||||
So(metrics.Get("stats.ds_access.other.proxy.count").MustInt(), ShouldEqual, 4+8)
|
||||
})
|
||||
})
|
||||
|
||||
Reset(func() {
|
||||
ts.Close()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func waitTimeout(wg *sync.WaitGroup, timeout time.Duration) bool {
|
||||
c := make(chan struct{})
|
||||
go func() {
|
||||
defer close(c)
|
||||
wg.Wait()
|
||||
}()
|
||||
select {
|
||||
case <-c:
|
||||
return false // completed normally
|
||||
case <-time.After(timeout):
|
||||
return true // timed out
|
||||
}
|
||||
}
|
@ -161,12 +161,13 @@ type SetAlertStateCommand struct {
|
||||
|
||||
//Queries
|
||||
type GetAlertsQuery struct {
|
||||
OrgId int64
|
||||
State []string
|
||||
DashboardId int64
|
||||
PanelId int64
|
||||
Limit int64
|
||||
User *SignedInUser
|
||||
OrgId int64
|
||||
State []string
|
||||
DashboardIDs []int64
|
||||
PanelId int64
|
||||
Limit int64
|
||||
Query string
|
||||
User *SignedInUser
|
||||
|
||||
Result []*AlertListItemDTO
|
||||
}
|
||||
|
@ -1,14 +1,20 @@
|
||||
package models
|
||||
|
||||
type SystemStats struct {
|
||||
Dashboards int64
|
||||
Datasources int64
|
||||
Users int64
|
||||
ActiveUsers int64
|
||||
Orgs int64
|
||||
Playlists int64
|
||||
Alerts int64
|
||||
Stars int64
|
||||
Dashboards int64
|
||||
Datasources int64
|
||||
Users int64
|
||||
ActiveUsers int64
|
||||
Orgs int64
|
||||
Playlists int64
|
||||
Alerts int64
|
||||
Stars int64
|
||||
Snapshots int64
|
||||
Teams int64
|
||||
DashboardPermissions int64
|
||||
FolderPermissions int64
|
||||
Folders int64
|
||||
ProvisionedDashboards int64
|
||||
}
|
||||
|
||||
type DataSourceStats struct {
|
||||
@ -24,6 +30,16 @@ type GetDataSourceStatsQuery struct {
|
||||
Result []*DataSourceStats
|
||||
}
|
||||
|
||||
type DataSourceAccessStats struct {
|
||||
Type string
|
||||
Access string
|
||||
Count int64
|
||||
}
|
||||
|
||||
type GetDataSourceAccessStatsQuery struct {
|
||||
Result []*DataSourceAccessStats
|
||||
}
|
||||
|
||||
type AdminStats struct {
|
||||
Users int `json:"users"`
|
||||
Orgs int `json:"orgs"`
|
||||
@ -40,3 +56,11 @@ type AdminStats struct {
|
||||
type GetAdminStatsQuery struct {
|
||||
Result *AdminStats
|
||||
}
|
||||
|
||||
type SystemUserCountStats struct {
|
||||
Count int64
|
||||
}
|
||||
|
||||
type GetSystemUserCountStatsQuery struct {
|
||||
Result *SystemUserCountStats
|
||||
}
|
||||
|
@ -47,9 +47,15 @@ func NewDashboardFileReader(cfg *DashboardsAsConfig, log log.Logger) (*fileReade
|
||||
log.Error("Cannot read directory", "error", err)
|
||||
}
|
||||
|
||||
absPath, err := filepath.Abs(path)
|
||||
if err != nil {
|
||||
log.Error("Could not create absolute path ", "path", path)
|
||||
absPath = path //if .Abs return an error we fallback to path
|
||||
}
|
||||
|
||||
return &fileReader{
|
||||
Cfg: cfg,
|
||||
Path: path,
|
||||
Path: absPath,
|
||||
log: log,
|
||||
dashboardService: dashboards.NewProvisioningService(),
|
||||
}, nil
|
||||
|
@ -3,6 +3,7 @@ package dashboards
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
@ -15,14 +16,59 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
defaultDashboards = "./testdata/test-dashboards/folder-one"
|
||||
brokenDashboards = "./testdata/test-dashboards/broken-dashboards"
|
||||
oneDashboard = "./testdata/test-dashboards/one-dashboard"
|
||||
containingId = "./testdata/test-dashboards/containing-id"
|
||||
defaultDashboards = "testdata/test-dashboards/folder-one"
|
||||
brokenDashboards = "testdata/test-dashboards/broken-dashboards"
|
||||
oneDashboard = "testdata/test-dashboards/one-dashboard"
|
||||
containingId = "testdata/test-dashboards/containing-id"
|
||||
|
||||
fakeService *fakeDashboardProvisioningService
|
||||
)
|
||||
|
||||
func TestCreatingNewDashboardFileReader(t *testing.T) {
|
||||
Convey("creating new dashboard file reader", t, func() {
|
||||
cfg := &DashboardsAsConfig{
|
||||
Name: "Default",
|
||||
Type: "file",
|
||||
OrgId: 1,
|
||||
Folder: "",
|
||||
Options: map[string]interface{}{},
|
||||
}
|
||||
|
||||
Convey("using path parameter", func() {
|
||||
cfg.Options["path"] = defaultDashboards
|
||||
reader, err := NewDashboardFileReader(cfg, log.New("test-logger"))
|
||||
So(err, ShouldBeNil)
|
||||
So(reader.Path, ShouldNotEqual, "")
|
||||
})
|
||||
|
||||
Convey("using folder as options", func() {
|
||||
cfg.Options["folder"] = defaultDashboards
|
||||
reader, err := NewDashboardFileReader(cfg, log.New("test-logger"))
|
||||
So(err, ShouldBeNil)
|
||||
So(reader.Path, ShouldNotEqual, "")
|
||||
})
|
||||
|
||||
Convey("using full path", func() {
|
||||
cfg.Options["folder"] = "/var/lib/grafana/dashboards"
|
||||
reader, err := NewDashboardFileReader(cfg, log.New("test-logger"))
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
if runtime.GOOS != "windows" {
|
||||
So(reader.Path, ShouldEqual, "/var/lib/grafana/dashboards")
|
||||
}
|
||||
So(filepath.IsAbs(reader.Path), ShouldBeTrue)
|
||||
})
|
||||
|
||||
Convey("using relative path", func() {
|
||||
cfg.Options["folder"] = defaultDashboards
|
||||
reader, err := NewDashboardFileReader(cfg, log.New("test-logger"))
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(filepath.IsAbs(reader.Path), ShouldBeTrue)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func TestDashboardFileReader(t *testing.T) {
|
||||
Convey("Dashboard file reader", t, func() {
|
||||
bus.ClearBusHandlers()
|
||||
@ -170,30 +216,6 @@ func TestDashboardFileReader(t *testing.T) {
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Can use bpth path and folder as dashboard path", func() {
|
||||
cfg := &DashboardsAsConfig{
|
||||
Name: "Default",
|
||||
Type: "file",
|
||||
OrgId: 1,
|
||||
Folder: "",
|
||||
Options: map[string]interface{}{},
|
||||
}
|
||||
|
||||
Convey("using path parameter", func() {
|
||||
cfg.Options["path"] = defaultDashboards
|
||||
reader, err := NewDashboardFileReader(cfg, log.New("test-logger"))
|
||||
So(err, ShouldBeNil)
|
||||
So(reader.Path, ShouldEqual, defaultDashboards)
|
||||
})
|
||||
|
||||
Convey("using folder as options", func() {
|
||||
cfg.Options["folder"] = defaultDashboards
|
||||
reader, err := NewDashboardFileReader(cfg, log.New("test-logger"))
|
||||
So(err, ShouldBeNil)
|
||||
So(reader.Path, ShouldEqual, defaultDashboards)
|
||||
})
|
||||
})
|
||||
|
||||
Reset(func() {
|
||||
dashboards.NewProvisioningService = origNewDashboardProvisioningService
|
||||
})
|
||||
|
@ -13,12 +13,12 @@ import (
|
||||
var (
|
||||
logger log.Logger = log.New("fake.log")
|
||||
|
||||
twoDatasourcesConfig = "./test-configs/two-datasources"
|
||||
twoDatasourcesConfigPurgeOthers = "./test-configs/insert-two-delete-two"
|
||||
doubleDatasourcesConfig = "./test-configs/double-default"
|
||||
allProperties = "./test-configs/all-properties"
|
||||
versionZero = "./test-configs/version-0"
|
||||
brokenYaml = "./test-configs/broken-yaml"
|
||||
twoDatasourcesConfig = "testdata/two-datasources"
|
||||
twoDatasourcesConfigPurgeOthers = "testdata/insert-two-delete-two"
|
||||
doubleDatasourcesConfig = "testdata/double-default"
|
||||
allProperties = "testdata/all-properties"
|
||||
versionZero = "testdata/version-0"
|
||||
brokenYaml = "testdata/broken-yaml"
|
||||
|
||||
fakeRepo *fakeRepository
|
||||
)
|
||||
|
@ -82,8 +82,16 @@ func HandleAlertsQuery(query *m.GetAlertsQuery) error {
|
||||
|
||||
builder.Write(`WHERE alert.org_id = ?`, query.OrgId)
|
||||
|
||||
if query.DashboardId != 0 {
|
||||
builder.Write(` AND alert.dashboard_id = ?`, query.DashboardId)
|
||||
if len(strings.TrimSpace(query.Query)) > 0 {
|
||||
builder.Write(" AND alert.name "+dialect.LikeStr()+" ?", "%"+query.Query+"%")
|
||||
}
|
||||
|
||||
if len(query.DashboardIDs) > 0 {
|
||||
builder.sql.WriteString(` AND alert.dashboard_id IN (?` + strings.Repeat(",?", len(query.DashboardIDs)-1) + `) `)
|
||||
|
||||
for _, dbID := range query.DashboardIDs {
|
||||
builder.AddParams(dbID)
|
||||
}
|
||||
}
|
||||
|
||||
if query.PanelId != 0 {
|
||||
|
@ -3,10 +3,11 @@ package sqlstore
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
m "github.com/grafana/grafana/pkg/models"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
"time"
|
||||
)
|
||||
|
||||
func mockTimeNow() {
|
||||
@ -99,7 +100,7 @@ func TestAlertingDataAccess(t *testing.T) {
|
||||
})
|
||||
|
||||
Convey("Can read properties", func() {
|
||||
alertQuery := m.GetAlertsQuery{DashboardId: testDash.Id, PanelId: 1, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}}
|
||||
alertQuery := m.GetAlertsQuery{DashboardIDs: []int64{testDash.Id}, PanelId: 1, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}}
|
||||
err2 := HandleAlertsQuery(&alertQuery)
|
||||
|
||||
alert := alertQuery.Result[0]
|
||||
@ -109,7 +110,7 @@ func TestAlertingDataAccess(t *testing.T) {
|
||||
})
|
||||
|
||||
Convey("Viewer cannot read alerts", func() {
|
||||
alertQuery := m.GetAlertsQuery{DashboardId: testDash.Id, PanelId: 1, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_VIEWER}}
|
||||
alertQuery := m.GetAlertsQuery{DashboardIDs: []int64{testDash.Id}, PanelId: 1, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_VIEWER}}
|
||||
err2 := HandleAlertsQuery(&alertQuery)
|
||||
|
||||
So(err2, ShouldBeNil)
|
||||
@ -134,7 +135,7 @@ func TestAlertingDataAccess(t *testing.T) {
|
||||
})
|
||||
|
||||
Convey("Alerts should be updated", func() {
|
||||
query := m.GetAlertsQuery{DashboardId: testDash.Id, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}}
|
||||
query := m.GetAlertsQuery{DashboardIDs: []int64{testDash.Id}, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}}
|
||||
err2 := HandleAlertsQuery(&query)
|
||||
|
||||
So(err2, ShouldBeNil)
|
||||
@ -183,7 +184,7 @@ func TestAlertingDataAccess(t *testing.T) {
|
||||
Convey("Should save 3 dashboards", func() {
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
queryForDashboard := m.GetAlertsQuery{DashboardId: testDash.Id, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}}
|
||||
queryForDashboard := m.GetAlertsQuery{DashboardIDs: []int64{testDash.Id}, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}}
|
||||
err2 := HandleAlertsQuery(&queryForDashboard)
|
||||
|
||||
So(err2, ShouldBeNil)
|
||||
@ -197,7 +198,7 @@ func TestAlertingDataAccess(t *testing.T) {
|
||||
err = SaveAlerts(&cmd)
|
||||
|
||||
Convey("should delete the missing alert", func() {
|
||||
query := m.GetAlertsQuery{DashboardId: testDash.Id, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}}
|
||||
query := m.GetAlertsQuery{DashboardIDs: []int64{testDash.Id}, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}}
|
||||
err2 := HandleAlertsQuery(&query)
|
||||
So(err2, ShouldBeNil)
|
||||
So(len(query.Result), ShouldEqual, 2)
|
||||
@ -232,7 +233,7 @@ func TestAlertingDataAccess(t *testing.T) {
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
Convey("Alerts should be removed", func() {
|
||||
query := m.GetAlertsQuery{DashboardId: testDash.Id, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}}
|
||||
query := m.GetAlertsQuery{DashboardIDs: []int64{testDash.Id}, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}}
|
||||
err2 := HandleAlertsQuery(&query)
|
||||
|
||||
So(testDash.Id, ShouldEqual, 1)
|
||||
|
@ -86,13 +86,13 @@ func (ss *SqlStore) Init() error {
|
||||
}
|
||||
|
||||
func (ss *SqlStore) ensureAdminUser() error {
|
||||
statsQuery := m.GetSystemStatsQuery{}
|
||||
systemUserCountQuery := m.GetSystemUserCountStatsQuery{}
|
||||
|
||||
if err := bus.Dispatch(&statsQuery); err != nil {
|
||||
if err := bus.Dispatch(&systemUserCountQuery); err != nil {
|
||||
fmt.Errorf("Could not determine if admin user exists: %v", err)
|
||||
}
|
||||
|
||||
if statsQuery.Result.Users > 0 {
|
||||
if systemUserCountQuery.Result.Count > 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -277,8 +277,8 @@ func InitTestDB(t *testing.T) *SqlStore {
|
||||
t.Fatalf("Failed to init test database: %v", err)
|
||||
}
|
||||
|
||||
//// sqlstore.engine.DatabaseTZ = time.UTC
|
||||
//// sqlstore.engine.TZLocation = time.UTC
|
||||
sqlstore.engine.DatabaseTZ = time.UTC
|
||||
sqlstore.engine.TZLocation = time.UTC
|
||||
|
||||
return sqlstore
|
||||
}
|
||||
|
@ -10,7 +10,9 @@ import (
|
||||
func init() {
|
||||
bus.AddHandler("sql", GetSystemStats)
|
||||
bus.AddHandler("sql", GetDataSourceStats)
|
||||
bus.AddHandler("sql", GetDataSourceAccessStats)
|
||||
bus.AddHandler("sql", GetAdminStats)
|
||||
bus.AddHandler("sql", GetSystemUserCountStats)
|
||||
}
|
||||
|
||||
var activeUserTimeLimit = time.Hour * 24 * 30
|
||||
@ -22,43 +24,51 @@ func GetDataSourceStats(query *m.GetDataSourceStatsQuery) error {
|
||||
return err
|
||||
}
|
||||
|
||||
func GetDataSourceAccessStats(query *m.GetDataSourceAccessStatsQuery) error {
|
||||
var rawSql = `SELECT COUNT(*) as count, type, access FROM data_source GROUP BY type, access`
|
||||
query.Result = make([]*m.DataSourceAccessStats, 0)
|
||||
err := x.SQL(rawSql).Find(&query.Result)
|
||||
return err
|
||||
}
|
||||
|
||||
func GetSystemStats(query *m.GetSystemStatsQuery) error {
|
||||
var rawSql = `SELECT
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM ` + dialect.Quote("user") + `
|
||||
) AS users,
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM ` + dialect.Quote("org") + `
|
||||
) AS orgs,
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM ` + dialect.Quote("dashboard") + `
|
||||
) AS dashboards,
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM ` + dialect.Quote("data_source") + `
|
||||
) AS datasources,
|
||||
(
|
||||
SELECT COUNT(*) FROM ` + dialect.Quote("star") + `
|
||||
) AS stars,
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM ` + dialect.Quote("playlist") + `
|
||||
) AS playlists,
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM ` + dialect.Quote("alert") + `
|
||||
) AS alerts,
|
||||
(
|
||||
SELECT COUNT(*) FROM ` + dialect.Quote("user") + ` where last_seen_at > ?
|
||||
) as active_users
|
||||
`
|
||||
sb := &SqlBuilder{}
|
||||
sb.Write("SELECT ")
|
||||
sb.Write(`(SELECT COUNT(*) FROM ` + dialect.Quote("user") + `) AS users,`)
|
||||
sb.Write(`(SELECT COUNT(*) FROM ` + dialect.Quote("org") + `) AS orgs,`)
|
||||
sb.Write(`(SELECT COUNT(*) FROM ` + dialect.Quote("dashboard") + `) AS dashboards,`)
|
||||
sb.Write(`(SELECT COUNT(*) FROM ` + dialect.Quote("data_source") + `) AS datasources,`)
|
||||
sb.Write(`(SELECT COUNT(*) FROM ` + dialect.Quote("star") + `) AS stars,`)
|
||||
sb.Write(`(SELECT COUNT(*) FROM ` + dialect.Quote("playlist") + `) AS playlists,`)
|
||||
sb.Write(`(SELECT COUNT(*) FROM ` + dialect.Quote("alert") + `) AS alerts,`)
|
||||
|
||||
activeUserDeadlineDate := time.Now().Add(-activeUserTimeLimit)
|
||||
sb.Write(`(SELECT COUNT(*) FROM `+dialect.Quote("user")+` where last_seen_at > ?) AS active_users,`, activeUserDeadlineDate)
|
||||
|
||||
sb.Write(`(SELECT COUNT(id) FROM `+dialect.Quote("dashboard")+` where is_folder = ?) AS folders,`, dialect.BooleanStr(true))
|
||||
|
||||
sb.Write(`(
|
||||
SELECT COUNT(acl.id)
|
||||
FROM `+dialect.Quote("dashboard_acl")+` as acl
|
||||
inner join `+dialect.Quote("dashboard")+` as d
|
||||
on d.id = acl.dashboard_id
|
||||
WHERE d.is_folder = ?
|
||||
) AS dashboard_permissions,`, dialect.BooleanStr(false))
|
||||
|
||||
sb.Write(`(
|
||||
SELECT COUNT(acl.id)
|
||||
FROM `+dialect.Quote("dashboard_acl")+` as acl
|
||||
inner join `+dialect.Quote("dashboard")+` as d
|
||||
on d.id = acl.dashboard_id
|
||||
WHERE d.is_folder = ?
|
||||
) AS folder_permissions,`, dialect.BooleanStr(true))
|
||||
|
||||
sb.Write(`(SELECT COUNT(id) FROM ` + dialect.Quote("dashboard_provisioning") + `) AS provisioned_dashboards,`)
|
||||
sb.Write(`(SELECT COUNT(id) FROM ` + dialect.Quote("dashboard_snapshot") + `) AS snapshots,`)
|
||||
sb.Write(`(SELECT COUNT(id) FROM ` + dialect.Quote("team") + `) AS teams`)
|
||||
|
||||
var stats m.SystemStats
|
||||
_, err := x.SQL(rawSql, activeUserDeadlineDate).Get(&stats)
|
||||
_, err := x.SQL(sb.GetSqlString(), sb.params...).Get(&stats)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -122,3 +132,16 @@ func GetAdminStats(query *m.GetAdminStatsQuery) error {
|
||||
query.Result = &stats
|
||||
return err
|
||||
}
|
||||
|
||||
func GetSystemUserCountStats(query *m.GetSystemUserCountStatsQuery) error {
|
||||
var rawSql = `SELECT COUNT(id) AS Count FROM ` + dialect.Quote("user")
|
||||
var stats m.SystemUserCountStats
|
||||
_, err := x.SQL(rawSql).Get(&stats)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
query.Result = &stats
|
||||
|
||||
return err
|
||||
}
|
||||
|
39
pkg/services/sqlstore/stats_test.go
Normal file
39
pkg/services/sqlstore/stats_test.go
Normal file
@ -0,0 +1,39 @@
|
||||
package sqlstore
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
m "github.com/grafana/grafana/pkg/models"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestStatsDataAccess(t *testing.T) {
|
||||
|
||||
Convey("Testing Stats Data Access", t, func() {
|
||||
InitTestDB(t)
|
||||
|
||||
Convey("Get system stats should not results in error", func() {
|
||||
query := m.GetSystemStatsQuery{}
|
||||
err := GetSystemStats(&query)
|
||||
So(err, ShouldBeNil)
|
||||
})
|
||||
|
||||
Convey("Get system user count stats should not results in error", func() {
|
||||
query := m.GetSystemUserCountStatsQuery{}
|
||||
err := GetSystemUserCountStats(&query)
|
||||
So(err, ShouldBeNil)
|
||||
})
|
||||
|
||||
Convey("Get datasource stats should not results in error", func() {
|
||||
query := m.GetDataSourceStatsQuery{}
|
||||
err := GetDataSourceStats(&query)
|
||||
So(err, ShouldBeNil)
|
||||
})
|
||||
|
||||
Convey("Get datasource access stats should not results in error", func() {
|
||||
query := m.GetDataSourceAccessStatsQuery{}
|
||||
err := GetDataSourceAccessStats(&query)
|
||||
So(err, ShouldBeNil)
|
||||
})
|
||||
})
|
||||
}
|
@ -104,6 +104,7 @@ var (
|
||||
DefaultTheme string
|
||||
DisableLoginForm bool
|
||||
DisableSignoutMenu bool
|
||||
SignoutRedirectUrl string
|
||||
ExternalUserMngLinkUrl string
|
||||
ExternalUserMngLinkName string
|
||||
ExternalUserMngInfo string
|
||||
@ -600,6 +601,7 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error {
|
||||
auth := iniFile.Section("auth")
|
||||
DisableLoginForm = auth.Key("disable_login_form").MustBool(false)
|
||||
DisableSignoutMenu = auth.Key("disable_signout_menu").MustBool(false)
|
||||
SignoutRedirectUrl = auth.Key("signout_redirect_url").String()
|
||||
|
||||
// anonymous access
|
||||
AnonymousEnabled = iniFile.Section("auth.anonymous").Key("enabled").MustBool(false)
|
||||
|
@ -230,8 +230,8 @@ func parseMultiSelectValue(input string) []string {
|
||||
// Please update the region list in public/app/plugins/datasource/cloudwatch/partials/config.html
|
||||
func (e *CloudWatchExecutor) handleGetRegions(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) {
|
||||
regions := []string{
|
||||
"ap-northeast-1", "ap-northeast-2", "ap-southeast-1", "ap-southeast-2", "ap-south-1", "ca-central-1", "cn-north-1",
|
||||
"eu-central-1", "eu-west-1", "eu-west-2", "sa-east-1", "us-east-1", "us-east-2", "us-gov-west-1", "us-west-1", "us-west-2",
|
||||
"ap-northeast-1", "ap-northeast-2", "ap-southeast-1", "ap-southeast-2", "ap-south-1", "ca-central-1", "cn-north-1", "cn-northwest-1",
|
||||
"eu-central-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "us-east-1", "us-east-2", "us-gov-west-1", "us-west-1", "us-west-2",
|
||||
}
|
||||
|
||||
result := make([]suggestData, 0)
|
||||
|
257
pkg/tsdb/elasticsearch/client/client.go
Normal file
257
pkg/tsdb/elasticsearch/client/client.go
Normal file
@ -0,0 +1,257 @@
|
||||
package es
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/log"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"golang.org/x/net/context/ctxhttp"
|
||||
)
|
||||
|
||||
const loggerName = "tsdb.elasticsearch.client"
|
||||
|
||||
var (
|
||||
clientLog = log.New(loggerName)
|
||||
)
|
||||
|
||||
var newDatasourceHttpClient = func(ds *models.DataSource) (*http.Client, error) {
|
||||
return ds.GetHttpClient()
|
||||
}
|
||||
|
||||
// Client represents a client which can interact with elasticsearch api
|
||||
type Client interface {
|
||||
GetVersion() int
|
||||
GetTimeField() string
|
||||
GetMinInterval(queryInterval string) (time.Duration, error)
|
||||
ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearchResponse, error)
|
||||
MultiSearch() *MultiSearchRequestBuilder
|
||||
}
|
||||
|
||||
// NewClient creates a new elasticsearch client
|
||||
var NewClient = func(ctx context.Context, ds *models.DataSource, timeRange *tsdb.TimeRange) (Client, error) {
|
||||
version, err := ds.JsonData.Get("esVersion").Int()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("eleasticsearch version is required, err=%v", err)
|
||||
}
|
||||
|
||||
timeField, err := ds.JsonData.Get("timeField").String()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("eleasticsearch time field name is required, err=%v", err)
|
||||
}
|
||||
|
||||
indexInterval := ds.JsonData.Get("interval").MustString()
|
||||
ip, err := newIndexPattern(indexInterval, ds.Database)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
indices, err := ip.GetIndices(timeRange)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
clientLog.Debug("Creating new client", "version", version, "timeField", timeField, "indices", strings.Join(indices, ", "))
|
||||
|
||||
switch version {
|
||||
case 2, 5, 56:
|
||||
return &baseClientImpl{
|
||||
ctx: ctx,
|
||||
ds: ds,
|
||||
version: version,
|
||||
timeField: timeField,
|
||||
indices: indices,
|
||||
timeRange: timeRange,
|
||||
}, nil
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("elasticsearch version=%d is not supported", version)
|
||||
}
|
||||
|
||||
type baseClientImpl struct {
|
||||
ctx context.Context
|
||||
ds *models.DataSource
|
||||
version int
|
||||
timeField string
|
||||
indices []string
|
||||
timeRange *tsdb.TimeRange
|
||||
}
|
||||
|
||||
func (c *baseClientImpl) GetVersion() int {
|
||||
return c.version
|
||||
}
|
||||
|
||||
func (c *baseClientImpl) GetTimeField() string {
|
||||
return c.timeField
|
||||
}
|
||||
|
||||
func (c *baseClientImpl) GetMinInterval(queryInterval string) (time.Duration, error) {
|
||||
return tsdb.GetIntervalFrom(c.ds, simplejson.NewFromAny(map[string]interface{}{
|
||||
"interval": queryInterval,
|
||||
}), 5*time.Second)
|
||||
}
|
||||
|
||||
func (c *baseClientImpl) getSettings() *simplejson.Json {
|
||||
return c.ds.JsonData
|
||||
}
|
||||
|
||||
type multiRequest struct {
|
||||
header map[string]interface{}
|
||||
body interface{}
|
||||
interval tsdb.Interval
|
||||
}
|
||||
|
||||
func (c *baseClientImpl) executeBatchRequest(uriPath string, requests []*multiRequest) (*http.Response, error) {
|
||||
bytes, err := c.encodeBatchRequests(requests)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return c.executeRequest(http.MethodPost, uriPath, bytes)
|
||||
}
|
||||
|
||||
func (c *baseClientImpl) encodeBatchRequests(requests []*multiRequest) ([]byte, error) {
|
||||
clientLog.Debug("Encoding batch requests to json", "batch requests", len(requests))
|
||||
start := time.Now()
|
||||
|
||||
payload := bytes.Buffer{}
|
||||
for _, r := range requests {
|
||||
reqHeader, err := json.Marshal(r.header)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
payload.WriteString(string(reqHeader) + "\n")
|
||||
|
||||
reqBody, err := json.Marshal(r.body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
body := string(reqBody)
|
||||
body = strings.Replace(body, "$__interval_ms", strconv.FormatInt(r.interval.Value.Nanoseconds()/int64(time.Millisecond), 10), -1)
|
||||
body = strings.Replace(body, "$__interval", r.interval.Text, -1)
|
||||
|
||||
payload.WriteString(body + "\n")
|
||||
}
|
||||
|
||||
elapsed := time.Now().Sub(start)
|
||||
clientLog.Debug("Encoded batch requests to json", "took", elapsed)
|
||||
|
||||
return payload.Bytes(), nil
|
||||
}
|
||||
|
||||
func (c *baseClientImpl) executeRequest(method, uriPath string, body []byte) (*http.Response, error) {
|
||||
u, _ := url.Parse(c.ds.Url)
|
||||
u.Path = path.Join(u.Path, uriPath)
|
||||
|
||||
var req *http.Request
|
||||
var err error
|
||||
if method == http.MethodPost {
|
||||
req, err = http.NewRequest(http.MethodPost, u.String(), bytes.NewBuffer(body))
|
||||
} else {
|
||||
req, err = http.NewRequest(http.MethodGet, u.String(), nil)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
clientLog.Debug("Executing request", "url", req.URL.String(), "method", method)
|
||||
|
||||
req.Header.Set("User-Agent", "Grafana")
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
if c.ds.BasicAuth {
|
||||
clientLog.Debug("Request configured to use basic authentication")
|
||||
req.SetBasicAuth(c.ds.BasicAuthUser, c.ds.BasicAuthPassword)
|
||||
}
|
||||
|
||||
if !c.ds.BasicAuth && c.ds.User != "" {
|
||||
clientLog.Debug("Request configured to use basic authentication")
|
||||
req.SetBasicAuth(c.ds.User, c.ds.Password)
|
||||
}
|
||||
|
||||
httpClient, err := newDatasourceHttpClient(c.ds)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
defer func() {
|
||||
elapsed := time.Now().Sub(start)
|
||||
clientLog.Debug("Executed request", "took", elapsed)
|
||||
}()
|
||||
return ctxhttp.Do(c.ctx, httpClient, req)
|
||||
}
|
||||
|
||||
func (c *baseClientImpl) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearchResponse, error) {
|
||||
clientLog.Debug("Executing multisearch", "search requests", len(r.Requests))
|
||||
|
||||
multiRequests := c.createMultiSearchRequests(r.Requests)
|
||||
res, err := c.executeBatchRequest("_msearch", multiRequests)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
clientLog.Debug("Received multisearch response", "code", res.StatusCode, "status", res.Status, "content-length", res.ContentLength)
|
||||
|
||||
start := time.Now()
|
||||
clientLog.Debug("Decoding multisearch json response")
|
||||
|
||||
var msr MultiSearchResponse
|
||||
defer res.Body.Close()
|
||||
dec := json.NewDecoder(res.Body)
|
||||
err = dec.Decode(&msr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
elapsed := time.Now().Sub(start)
|
||||
clientLog.Debug("Decoded multisearch json response", "took", elapsed)
|
||||
|
||||
msr.status = res.StatusCode
|
||||
|
||||
return &msr, nil
|
||||
}
|
||||
|
||||
func (c *baseClientImpl) createMultiSearchRequests(searchRequests []*SearchRequest) []*multiRequest {
|
||||
multiRequests := []*multiRequest{}
|
||||
|
||||
for _, searchReq := range searchRequests {
|
||||
mr := multiRequest{
|
||||
header: map[string]interface{}{
|
||||
"search_type": "query_then_fetch",
|
||||
"ignore_unavailable": true,
|
||||
"index": strings.Join(c.indices, ","),
|
||||
},
|
||||
body: searchReq,
|
||||
interval: searchReq.Interval,
|
||||
}
|
||||
|
||||
if c.version == 2 {
|
||||
mr.header["search_type"] = "count"
|
||||
}
|
||||
|
||||
if c.version >= 56 {
|
||||
maxConcurrentShardRequests := c.getSettings().Get("maxConcurrentShardRequests").MustInt(256)
|
||||
mr.header["max_concurrent_shard_requests"] = maxConcurrentShardRequests
|
||||
}
|
||||
|
||||
multiRequests = append(multiRequests, &mr)
|
||||
}
|
||||
|
||||
return multiRequests
|
||||
}
|
||||
|
||||
func (c *baseClientImpl) MultiSearch() *MultiSearchRequestBuilder {
|
||||
return NewMultiSearchRequestBuilder(c.GetVersion())
|
||||
}
|
304
pkg/tsdb/elasticsearch/client/client_test.go
Normal file
304
pkg/tsdb/elasticsearch/client/client_test.go
Normal file
@ -0,0 +1,304 @@
|
||||
package es
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestClient(t *testing.T) {
|
||||
Convey("Test elasticsearch client", t, func() {
|
||||
Convey("NewClient", func() {
|
||||
Convey("When no version set should return error", func() {
|
||||
ds := &models.DataSource{
|
||||
JsonData: simplejson.NewFromAny(make(map[string]interface{})),
|
||||
}
|
||||
|
||||
_, err := NewClient(nil, ds, nil)
|
||||
So(err, ShouldNotBeNil)
|
||||
})
|
||||
|
||||
Convey("When no time field name set should return error", func() {
|
||||
ds := &models.DataSource{
|
||||
JsonData: simplejson.NewFromAny(map[string]interface{}{
|
||||
"esVersion": 5,
|
||||
}),
|
||||
}
|
||||
|
||||
_, err := NewClient(nil, ds, nil)
|
||||
So(err, ShouldNotBeNil)
|
||||
})
|
||||
|
||||
Convey("When unspported version set should return error", func() {
|
||||
ds := &models.DataSource{
|
||||
JsonData: simplejson.NewFromAny(map[string]interface{}{
|
||||
"esVersion": 6,
|
||||
"timeField": "@timestamp",
|
||||
}),
|
||||
}
|
||||
|
||||
_, err := NewClient(nil, ds, nil)
|
||||
So(err, ShouldNotBeNil)
|
||||
})
|
||||
|
||||
Convey("When version 2 should return v2 client", func() {
|
||||
ds := &models.DataSource{
|
||||
JsonData: simplejson.NewFromAny(map[string]interface{}{
|
||||
"esVersion": 2,
|
||||
"timeField": "@timestamp",
|
||||
}),
|
||||
}
|
||||
|
||||
c, err := NewClient(nil, ds, nil)
|
||||
So(err, ShouldBeNil)
|
||||
So(c.GetVersion(), ShouldEqual, 2)
|
||||
})
|
||||
|
||||
Convey("When version 5 should return v5 client", func() {
|
||||
ds := &models.DataSource{
|
||||
JsonData: simplejson.NewFromAny(map[string]interface{}{
|
||||
"esVersion": 5,
|
||||
"timeField": "@timestamp",
|
||||
}),
|
||||
}
|
||||
|
||||
c, err := NewClient(nil, ds, nil)
|
||||
So(err, ShouldBeNil)
|
||||
So(c.GetVersion(), ShouldEqual, 5)
|
||||
})
|
||||
|
||||
Convey("When version 56 should return v5.6 client", func() {
|
||||
ds := &models.DataSource{
|
||||
JsonData: simplejson.NewFromAny(map[string]interface{}{
|
||||
"esVersion": 56,
|
||||
"timeField": "@timestamp",
|
||||
}),
|
||||
}
|
||||
|
||||
c, err := NewClient(nil, ds, nil)
|
||||
So(err, ShouldBeNil)
|
||||
So(c.GetVersion(), ShouldEqual, 56)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Given a fake http client", func() {
|
||||
var responseBuffer *bytes.Buffer
|
||||
var req *http.Request
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
|
||||
req = r
|
||||
buf, err := ioutil.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read response body, err=%v", err)
|
||||
}
|
||||
responseBuffer = bytes.NewBuffer(buf)
|
||||
}))
|
||||
|
||||
currentNewDatasourceHttpClient := newDatasourceHttpClient
|
||||
|
||||
newDatasourceHttpClient = func(ds *models.DataSource) (*http.Client, error) {
|
||||
return ts.Client(), nil
|
||||
}
|
||||
|
||||
from := time.Date(2018, 5, 15, 17, 50, 0, 0, time.UTC)
|
||||
to := time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC)
|
||||
fromStr := fmt.Sprintf("%d", from.UnixNano()/int64(time.Millisecond))
|
||||
toStr := fmt.Sprintf("%d", to.UnixNano()/int64(time.Millisecond))
|
||||
timeRange := tsdb.NewTimeRange(fromStr, toStr)
|
||||
|
||||
Convey("and a v2.x client", func() {
|
||||
ds := models.DataSource{
|
||||
Database: "[metrics-]YYYY.MM.DD",
|
||||
Url: ts.URL,
|
||||
JsonData: simplejson.NewFromAny(map[string]interface{}{
|
||||
"esVersion": 2,
|
||||
"timeField": "@timestamp",
|
||||
"interval": "Daily",
|
||||
}),
|
||||
}
|
||||
|
||||
c, err := NewClient(context.Background(), &ds, timeRange)
|
||||
So(err, ShouldBeNil)
|
||||
So(c, ShouldNotBeNil)
|
||||
|
||||
Convey("When executing multi search", func() {
|
||||
ms, err := createMultisearchForTest(c)
|
||||
So(err, ShouldBeNil)
|
||||
c.ExecuteMultisearch(ms)
|
||||
|
||||
Convey("Should send correct request and payload", func() {
|
||||
So(req, ShouldNotBeNil)
|
||||
So(req.Method, ShouldEqual, http.MethodPost)
|
||||
So(req.URL.Path, ShouldEqual, "/_msearch")
|
||||
|
||||
So(responseBuffer, ShouldNotBeNil)
|
||||
|
||||
headerBytes, err := responseBuffer.ReadBytes('\n')
|
||||
So(err, ShouldBeNil)
|
||||
bodyBytes := responseBuffer.Bytes()
|
||||
|
||||
jHeader, err := simplejson.NewJson(headerBytes)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
jBody, err := simplejson.NewJson(bodyBytes)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
fmt.Println("body", string(headerBytes))
|
||||
|
||||
So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15")
|
||||
So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true)
|
||||
So(jHeader.Get("search_type").MustString(), ShouldEqual, "count")
|
||||
So(jHeader.Get("max_concurrent_shard_requests").MustInt(10), ShouldEqual, 10)
|
||||
|
||||
Convey("and replace $__interval variable", func() {
|
||||
So(jBody.GetPath("aggs", "2", "aggs", "1", "avg", "script").MustString(), ShouldEqual, "15000*@hostname")
|
||||
})
|
||||
|
||||
Convey("and replace $__interval_ms variable", func() {
|
||||
So(jBody.GetPath("aggs", "2", "date_histogram", "interval").MustString(), ShouldEqual, "15s")
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("and a v5.x client", func() {
|
||||
ds := models.DataSource{
|
||||
Database: "[metrics-]YYYY.MM.DD",
|
||||
Url: ts.URL,
|
||||
JsonData: simplejson.NewFromAny(map[string]interface{}{
|
||||
"esVersion": 5,
|
||||
"maxConcurrentShardRequests": 100,
|
||||
"timeField": "@timestamp",
|
||||
"interval": "Daily",
|
||||
}),
|
||||
}
|
||||
|
||||
c, err := NewClient(context.Background(), &ds, timeRange)
|
||||
So(err, ShouldBeNil)
|
||||
So(c, ShouldNotBeNil)
|
||||
|
||||
Convey("When executing multi search", func() {
|
||||
ms, err := createMultisearchForTest(c)
|
||||
So(err, ShouldBeNil)
|
||||
c.ExecuteMultisearch(ms)
|
||||
|
||||
Convey("Should send correct request and payload", func() {
|
||||
So(req, ShouldNotBeNil)
|
||||
So(req.Method, ShouldEqual, http.MethodPost)
|
||||
So(req.URL.Path, ShouldEqual, "/_msearch")
|
||||
|
||||
So(responseBuffer, ShouldNotBeNil)
|
||||
|
||||
headerBytes, err := responseBuffer.ReadBytes('\n')
|
||||
So(err, ShouldBeNil)
|
||||
bodyBytes := responseBuffer.Bytes()
|
||||
|
||||
jHeader, err := simplejson.NewJson(headerBytes)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
jBody, err := simplejson.NewJson(bodyBytes)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
fmt.Println("body", string(headerBytes))
|
||||
|
||||
So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15")
|
||||
So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true)
|
||||
So(jHeader.Get("search_type").MustString(), ShouldEqual, "query_then_fetch")
|
||||
So(jHeader.Get("max_concurrent_shard_requests").MustInt(10), ShouldEqual, 10)
|
||||
|
||||
Convey("and replace $__interval variable", func() {
|
||||
So(jBody.GetPath("aggs", "2", "aggs", "1", "avg", "script").MustString(), ShouldEqual, "15000*@hostname")
|
||||
})
|
||||
|
||||
Convey("and replace $__interval_ms variable", func() {
|
||||
So(jBody.GetPath("aggs", "2", "date_histogram", "interval").MustString(), ShouldEqual, "15s")
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("and a v5.6 client", func() {
|
||||
ds := models.DataSource{
|
||||
Database: "[metrics-]YYYY.MM.DD",
|
||||
Url: ts.URL,
|
||||
JsonData: simplejson.NewFromAny(map[string]interface{}{
|
||||
"esVersion": 56,
|
||||
"maxConcurrentShardRequests": 100,
|
||||
"timeField": "@timestamp",
|
||||
"interval": "Daily",
|
||||
}),
|
||||
}
|
||||
|
||||
c, err := NewClient(context.Background(), &ds, timeRange)
|
||||
So(err, ShouldBeNil)
|
||||
So(c, ShouldNotBeNil)
|
||||
|
||||
Convey("When executing multi search", func() {
|
||||
ms, err := createMultisearchForTest(c)
|
||||
So(err, ShouldBeNil)
|
||||
c.ExecuteMultisearch(ms)
|
||||
|
||||
Convey("Should send correct request and payload", func() {
|
||||
So(req, ShouldNotBeNil)
|
||||
So(req.Method, ShouldEqual, http.MethodPost)
|
||||
So(req.URL.Path, ShouldEqual, "/_msearch")
|
||||
|
||||
So(responseBuffer, ShouldNotBeNil)
|
||||
|
||||
headerBytes, err := responseBuffer.ReadBytes('\n')
|
||||
So(err, ShouldBeNil)
|
||||
bodyBytes := responseBuffer.Bytes()
|
||||
|
||||
jHeader, err := simplejson.NewJson(headerBytes)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
jBody, err := simplejson.NewJson(bodyBytes)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
fmt.Println("body", string(headerBytes))
|
||||
|
||||
So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15")
|
||||
So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true)
|
||||
So(jHeader.Get("search_type").MustString(), ShouldEqual, "query_then_fetch")
|
||||
So(jHeader.Get("max_concurrent_shard_requests").MustInt(), ShouldEqual, 100)
|
||||
|
||||
Convey("and replace $__interval variable", func() {
|
||||
So(jBody.GetPath("aggs", "2", "aggs", "1", "avg", "script").MustString(), ShouldEqual, "15000*@hostname")
|
||||
})
|
||||
|
||||
Convey("and replace $__interval_ms variable", func() {
|
||||
So(jBody.GetPath("aggs", "2", "date_histogram", "interval").MustString(), ShouldEqual, "15s")
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Reset(func() {
|
||||
newDatasourceHttpClient = currentNewDatasourceHttpClient
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func createMultisearchForTest(c Client) (*MultiSearchRequest, error) {
|
||||
msb := c.MultiSearch()
|
||||
s := msb.Search(tsdb.Interval{Value: 15 * time.Second, Text: "15s"})
|
||||
s.Agg().DateHistogram("2", "@timestamp", func(a *DateHistogramAgg, ab AggBuilder) {
|
||||
a.Interval = "$__interval"
|
||||
|
||||
ab.Metric("1", "avg", "@hostname", func(a *MetricAggregation) {
|
||||
a.Settings["script"] = "$__interval_ms*@hostname"
|
||||
})
|
||||
})
|
||||
return msb.Build()
|
||||
}
|
312
pkg/tsdb/elasticsearch/client/index_pattern.go
Normal file
312
pkg/tsdb/elasticsearch/client/index_pattern.go
Normal file
@ -0,0 +1,312 @@
|
||||
package es
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
)
|
||||
|
||||
const (
|
||||
noInterval = ""
|
||||
intervalHourly = "hourly"
|
||||
intervalDaily = "daily"
|
||||
intervalWeekly = "weekly"
|
||||
intervalMonthly = "monthly"
|
||||
intervalYearly = "yearly"
|
||||
)
|
||||
|
||||
type indexPattern interface {
|
||||
GetIndices(timeRange *tsdb.TimeRange) ([]string, error)
|
||||
}
|
||||
|
||||
var newIndexPattern = func(interval string, pattern string) (indexPattern, error) {
|
||||
if interval == noInterval {
|
||||
return &staticIndexPattern{indexName: pattern}, nil
|
||||
}
|
||||
|
||||
return newDynamicIndexPattern(interval, pattern)
|
||||
}
|
||||
|
||||
type staticIndexPattern struct {
|
||||
indexName string
|
||||
}
|
||||
|
||||
func (ip *staticIndexPattern) GetIndices(timeRange *tsdb.TimeRange) ([]string, error) {
|
||||
return []string{ip.indexName}, nil
|
||||
}
|
||||
|
||||
type intervalGenerator interface {
|
||||
Generate(from, to time.Time) []time.Time
|
||||
}
|
||||
|
||||
type dynamicIndexPattern struct {
|
||||
interval string
|
||||
pattern string
|
||||
intervalGenerator intervalGenerator
|
||||
}
|
||||
|
||||
func newDynamicIndexPattern(interval, pattern string) (*dynamicIndexPattern, error) {
|
||||
var generator intervalGenerator
|
||||
|
||||
switch strings.ToLower(interval) {
|
||||
case intervalHourly:
|
||||
generator = &hourlyInterval{}
|
||||
case intervalDaily:
|
||||
generator = &dailyInterval{}
|
||||
case intervalWeekly:
|
||||
generator = &weeklyInterval{}
|
||||
case intervalMonthly:
|
||||
generator = &monthlyInterval{}
|
||||
case intervalYearly:
|
||||
generator = &yearlyInterval{}
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported interval '%s'", interval)
|
||||
}
|
||||
|
||||
return &dynamicIndexPattern{
|
||||
interval: interval,
|
||||
pattern: pattern,
|
||||
intervalGenerator: generator,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (ip *dynamicIndexPattern) GetIndices(timeRange *tsdb.TimeRange) ([]string, error) {
|
||||
from := timeRange.GetFromAsTimeUTC()
|
||||
to := timeRange.GetToAsTimeUTC()
|
||||
intervals := ip.intervalGenerator.Generate(from, to)
|
||||
indices := make([]string, 0)
|
||||
|
||||
for _, t := range intervals {
|
||||
indices = append(indices, formatDate(t, ip.pattern))
|
||||
}
|
||||
|
||||
return indices, nil
|
||||
}
|
||||
|
||||
type hourlyInterval struct{}
|
||||
|
||||
func (i *hourlyInterval) Generate(from, to time.Time) []time.Time {
|
||||
intervals := []time.Time{}
|
||||
start := time.Date(from.Year(), from.Month(), from.Day(), from.Hour(), 0, 0, 0, time.UTC)
|
||||
end := time.Date(to.Year(), to.Month(), to.Day(), to.Hour(), 0, 0, 0, time.UTC)
|
||||
|
||||
intervals = append(intervals, start)
|
||||
|
||||
for start.Before(end) {
|
||||
start = start.Add(time.Hour)
|
||||
intervals = append(intervals, start)
|
||||
}
|
||||
|
||||
return intervals
|
||||
}
|
||||
|
||||
type dailyInterval struct{}
|
||||
|
||||
func (i *dailyInterval) Generate(from, to time.Time) []time.Time {
|
||||
intervals := []time.Time{}
|
||||
start := time.Date(from.Year(), from.Month(), from.Day(), 0, 0, 0, 0, time.UTC)
|
||||
end := time.Date(to.Year(), to.Month(), to.Day(), 0, 0, 0, 0, time.UTC)
|
||||
|
||||
intervals = append(intervals, start)
|
||||
|
||||
for start.Before(end) {
|
||||
start = start.Add(24 * time.Hour)
|
||||
intervals = append(intervals, start)
|
||||
}
|
||||
|
||||
return intervals
|
||||
}
|
||||
|
||||
type weeklyInterval struct{}
|
||||
|
||||
func (i *weeklyInterval) Generate(from, to time.Time) []time.Time {
|
||||
intervals := []time.Time{}
|
||||
start := time.Date(from.Year(), from.Month(), from.Day(), 0, 0, 0, 0, time.UTC)
|
||||
end := time.Date(to.Year(), to.Month(), to.Day(), 0, 0, 0, 0, time.UTC)
|
||||
|
||||
for start.Weekday() != time.Monday {
|
||||
start = start.Add(-24 * time.Hour)
|
||||
}
|
||||
|
||||
for end.Weekday() != time.Monday {
|
||||
end = end.Add(-24 * time.Hour)
|
||||
}
|
||||
|
||||
year, week := start.ISOWeek()
|
||||
intervals = append(intervals, start)
|
||||
|
||||
for start.Before(end) {
|
||||
start = start.Add(24 * time.Hour)
|
||||
nextYear, nextWeek := start.ISOWeek()
|
||||
if nextYear != year || nextWeek != week {
|
||||
intervals = append(intervals, start)
|
||||
}
|
||||
year = nextYear
|
||||
week = nextWeek
|
||||
}
|
||||
|
||||
return intervals
|
||||
}
|
||||
|
||||
type monthlyInterval struct{}
|
||||
|
||||
func (i *monthlyInterval) Generate(from, to time.Time) []time.Time {
|
||||
intervals := []time.Time{}
|
||||
start := time.Date(from.Year(), from.Month(), 1, 0, 0, 0, 0, time.UTC)
|
||||
end := time.Date(to.Year(), to.Month(), 1, 0, 0, 0, 0, time.UTC)
|
||||
|
||||
month := start.Month()
|
||||
intervals = append(intervals, start)
|
||||
|
||||
for start.Before(end) {
|
||||
start = start.Add(24 * time.Hour)
|
||||
nextMonth := start.Month()
|
||||
if nextMonth != month {
|
||||
intervals = append(intervals, start)
|
||||
}
|
||||
month = nextMonth
|
||||
}
|
||||
|
||||
return intervals
|
||||
}
|
||||
|
||||
type yearlyInterval struct{}
|
||||
|
||||
func (i *yearlyInterval) Generate(from, to time.Time) []time.Time {
|
||||
intervals := []time.Time{}
|
||||
start := time.Date(from.Year(), 1, 1, 0, 0, 0, 0, time.UTC)
|
||||
end := time.Date(to.Year(), 1, 1, 0, 0, 0, 0, time.UTC)
|
||||
|
||||
year := start.Year()
|
||||
intervals = append(intervals, start)
|
||||
|
||||
for start.Before(end) {
|
||||
start = start.Add(24 * time.Hour)
|
||||
nextYear := start.Year()
|
||||
if nextYear != year {
|
||||
intervals = append(intervals, start)
|
||||
}
|
||||
year = nextYear
|
||||
}
|
||||
|
||||
return intervals
|
||||
}
|
||||
|
||||
var datePatternRegex = regexp.MustCompile("(LT|LL?L?L?|l{1,4}|Mo|MM?M?M?|Do|DDDo|DD?D?D?|ddd?d?|do?|w[o|w]?|W[o|W]?|YYYYY|YYYY|YY|gg(ggg?)?|GG(GGG?)?|e|E|a|A|hh?|HH?|mm?|ss?|SS?S?|X|zz?|ZZ?|Q)")
|
||||
|
||||
var datePatternReplacements = map[string]string{
|
||||
"M": "1", // stdNumMonth 1 2 ... 11 12
|
||||
"MM": "01", // stdZeroMonth 01 02 ... 11 12
|
||||
"MMM": "Jan", // stdMonth Jan Feb ... Nov Dec
|
||||
"MMMM": "January", // stdLongMonth January February ... November December
|
||||
"D": "2", // stdDay 1 2 ... 30 30
|
||||
"DD": "02", // stdZeroDay 01 02 ... 30 31
|
||||
"DDD": "<stdDayOfYear>", // Day of the year 1 2 ... 364 365
|
||||
"DDDD": "<stdDayOfYearZero>", // Day of the year 001 002 ... 364 365 @todo****
|
||||
"d": "<stdDayOfWeek>", // Numeric representation of day of the week 0 1 ... 5 6
|
||||
"dd": "Mon", // ***Su Mo ... Fr Sa @todo
|
||||
"ddd": "Mon", // Sun Mon ... Fri Sat
|
||||
"dddd": "Monday", // stdLongWeekDay Sunday Monday ... Friday Saturday
|
||||
"e": "<stdDayOfWeek>", // Numeric representation of day of the week 0 1 ... 5 6 @todo
|
||||
"E": "<stdDayOfWeekISO>", // ISO-8601 numeric representation of the day of the week (added in PHP 5.1.0) 1 2 ... 6 7 @todo
|
||||
"w": "<stdWeekOfYear>", // 1 2 ... 52 53
|
||||
"ww": "<stdWeekOfYear>", // ***01 02 ... 52 53 @todo
|
||||
"W": "<stdWeekOfYear>", // 1 2 ... 52 53
|
||||
"WW": "<stdWeekOfYear>", // ***01 02 ... 52 53 @todo
|
||||
"YY": "06", // stdYear 70 71 ... 29 30
|
||||
"YYYY": "2006", // stdLongYear 1970 1971 ... 2029 2030
|
||||
"gg": "<stdIsoYearShort>", // ISO-8601 year number 70 71 ... 29 30
|
||||
"gggg": "<stdIsoYear>", // ***1970 1971 ... 2029 2030
|
||||
"GG": "<stdIsoYearShort>", //70 71 ... 29 30
|
||||
"GGGG": "<stdIsoYear>", // ***1970 1971 ... 2029 2030
|
||||
"Q": "<stdQuarter>", // 1, 2, 3, 4
|
||||
"A": "PM", // stdPM AM PM
|
||||
"a": "pm", // stdpm am pm
|
||||
"H": "<stdHourNoZero>", // stdHour 0 1 ... 22 23
|
||||
"HH": "15", // 00 01 ... 22 23
|
||||
"h": "3", // stdHour12 1 2 ... 11 12
|
||||
"hh": "03", // stdZeroHour12 01 02 ... 11 12
|
||||
"m": "4", // stdZeroMinute 0 1 ... 58 59
|
||||
"mm": "04", // stdZeroMinute 00 01 ... 58 59
|
||||
"s": "5", // stdSecond 0 1 ... 58 59
|
||||
"ss": "05", // stdZeroSecond ***00 01 ... 58 59
|
||||
"z": "MST", //EST CST ... MST PST
|
||||
"zz": "MST", //EST CST ... MST PST
|
||||
"Z": "Z07:00", // stdNumColonTZ -07:00 -06:00 ... +06:00 +07:00
|
||||
"ZZ": "-0700", // stdNumTZ -0700 -0600 ... +0600 +0700
|
||||
"X": "<stdUnix>", // Seconds since unix epoch 1360013296
|
||||
"LT": "3:04 PM", // 8:30 PM
|
||||
"L": "01/02/2006", //09/04/1986
|
||||
"l": "1/2/2006", //9/4/1986
|
||||
"ll": "Jan 2 2006", //Sep 4 1986
|
||||
"lll": "Jan 2 2006 3:04 PM", //Sep 4 1986 8:30 PM
|
||||
"llll": "Mon, Jan 2 2006 3:04 PM", //Thu, Sep 4 1986 8:30 PM
|
||||
}
|
||||
|
||||
func formatDate(t time.Time, pattern string) string {
|
||||
var datePattern string
|
||||
parts := strings.Split(strings.TrimLeft(pattern, "["), "]")
|
||||
base := parts[0]
|
||||
if len(parts) == 2 {
|
||||
datePattern = parts[1]
|
||||
} else {
|
||||
datePattern = base
|
||||
base = ""
|
||||
}
|
||||
|
||||
formatted := t.Format(patternToLayout(datePattern))
|
||||
|
||||
if strings.Contains(formatted, "<std") {
|
||||
isoYear, isoWeek := t.ISOWeek()
|
||||
isoYearShort := fmt.Sprintf("%d", isoYear)[2:4]
|
||||
formatted = strings.Replace(formatted, "<stdIsoYear>", fmt.Sprintf("%d", isoYear), -1)
|
||||
formatted = strings.Replace(formatted, "<stdIsoYearShort>", isoYearShort, -1)
|
||||
formatted = strings.Replace(formatted, "<stdWeekOfYear>", fmt.Sprintf("%d", isoWeek), -1)
|
||||
|
||||
formatted = strings.Replace(formatted, "<stdUnix>", fmt.Sprintf("%d", t.Unix()), -1)
|
||||
|
||||
day := t.Weekday()
|
||||
dayOfWeekIso := int(day)
|
||||
if day == time.Sunday {
|
||||
dayOfWeekIso = 7
|
||||
}
|
||||
|
||||
formatted = strings.Replace(formatted, "<stdDayOfWeek>", fmt.Sprintf("%d", day), -1)
|
||||
formatted = strings.Replace(formatted, "<stdDayOfWeekISO>", fmt.Sprintf("%d", dayOfWeekIso), -1)
|
||||
formatted = strings.Replace(formatted, "<stdDayOfYear>", fmt.Sprintf("%d", t.YearDay()), -1)
|
||||
|
||||
quarter := 4
|
||||
|
||||
switch t.Month() {
|
||||
case time.January, time.February, time.March:
|
||||
quarter = 1
|
||||
case time.April, time.May, time.June:
|
||||
quarter = 2
|
||||
case time.July, time.August, time.September:
|
||||
quarter = 3
|
||||
}
|
||||
|
||||
formatted = strings.Replace(formatted, "<stdQuarter>", fmt.Sprintf("%d", quarter), -1)
|
||||
formatted = strings.Replace(formatted, "<stdHourNoZero>", fmt.Sprintf("%d", t.Hour()), -1)
|
||||
}
|
||||
|
||||
return base + formatted
|
||||
}
|
||||
|
||||
func patternToLayout(pattern string) string {
|
||||
var match [][]string
|
||||
if match = datePatternRegex.FindAllStringSubmatch(pattern, -1); match == nil {
|
||||
return pattern
|
||||
}
|
||||
|
||||
for i := range match {
|
||||
if replace, ok := datePatternReplacements[match[i][0]]; ok {
|
||||
pattern = strings.Replace(pattern, match[i][0], replace, 1)
|
||||
}
|
||||
}
|
||||
|
||||
return pattern
|
||||
}
|
244
pkg/tsdb/elasticsearch/client/index_pattern_test.go
Normal file
244
pkg/tsdb/elasticsearch/client/index_pattern_test.go
Normal file
@ -0,0 +1,244 @@
|
||||
package es
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestIndexPattern(t *testing.T) {
|
||||
Convey("Static index patterns", t, func() {
|
||||
indexPatternScenario(noInterval, "data-*", nil, func(indices []string) {
|
||||
So(indices, ShouldHaveLength, 1)
|
||||
So(indices[0], ShouldEqual, "data-*")
|
||||
})
|
||||
|
||||
indexPatternScenario(noInterval, "es-index-name", nil, func(indices []string) {
|
||||
So(indices, ShouldHaveLength, 1)
|
||||
So(indices[0], ShouldEqual, "es-index-name")
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Dynamic index patterns", t, func() {
|
||||
from := fmt.Sprintf("%d", time.Date(2018, 5, 15, 17, 50, 0, 0, time.UTC).UnixNano()/int64(time.Millisecond))
|
||||
to := fmt.Sprintf("%d", time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC).UnixNano()/int64(time.Millisecond))
|
||||
|
||||
indexPatternScenario(intervalHourly, "[data-]YYYY.MM.DD.HH", tsdb.NewTimeRange(from, to), func(indices []string) {
|
||||
//So(indices, ShouldHaveLength, 1)
|
||||
So(indices[0], ShouldEqual, "data-2018.05.15.17")
|
||||
})
|
||||
|
||||
indexPatternScenario(intervalDaily, "[data-]YYYY.MM.DD", tsdb.NewTimeRange(from, to), func(indices []string) {
|
||||
So(indices, ShouldHaveLength, 1)
|
||||
So(indices[0], ShouldEqual, "data-2018.05.15")
|
||||
})
|
||||
|
||||
indexPatternScenario(intervalWeekly, "[data-]GGGG.WW", tsdb.NewTimeRange(from, to), func(indices []string) {
|
||||
So(indices, ShouldHaveLength, 1)
|
||||
So(indices[0], ShouldEqual, "data-2018.20")
|
||||
})
|
||||
|
||||
indexPatternScenario(intervalMonthly, "[data-]YYYY.MM", tsdb.NewTimeRange(from, to), func(indices []string) {
|
||||
So(indices, ShouldHaveLength, 1)
|
||||
So(indices[0], ShouldEqual, "data-2018.05")
|
||||
})
|
||||
|
||||
indexPatternScenario(intervalYearly, "[data-]YYYY", tsdb.NewTimeRange(from, to), func(indices []string) {
|
||||
So(indices, ShouldHaveLength, 1)
|
||||
So(indices[0], ShouldEqual, "data-2018")
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Hourly interval", t, func() {
|
||||
Convey("Should return 1 interval", func() {
|
||||
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
|
||||
to := time.Date(2018, 1, 1, 23, 6, 0, 0, time.UTC)
|
||||
intervals := (&hourlyInterval{}).Generate(from, to)
|
||||
So(intervals, ShouldHaveLength, 1)
|
||||
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 23, 0, 0, 0, time.UTC))
|
||||
})
|
||||
|
||||
Convey("Should return 2 intervals", func() {
|
||||
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
|
||||
to := time.Date(2018, 1, 2, 0, 6, 0, 0, time.UTC)
|
||||
intervals := (&hourlyInterval{}).Generate(from, to)
|
||||
So(intervals, ShouldHaveLength, 2)
|
||||
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 23, 0, 0, 0, time.UTC))
|
||||
So(intervals[1], ShouldEqual, time.Date(2018, 1, 2, 0, 0, 0, 0, time.UTC))
|
||||
})
|
||||
|
||||
Convey("Should return 10 intervals", func() {
|
||||
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
|
||||
to := time.Date(2018, 1, 2, 8, 6, 0, 0, time.UTC)
|
||||
intervals := (&hourlyInterval{}).Generate(from, to)
|
||||
So(intervals, ShouldHaveLength, 10)
|
||||
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 23, 0, 0, 0, time.UTC))
|
||||
So(intervals[4], ShouldEqual, time.Date(2018, 1, 2, 3, 0, 0, 0, time.UTC))
|
||||
So(intervals[9], ShouldEqual, time.Date(2018, 1, 2, 8, 0, 0, 0, time.UTC))
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Daily interval", t, func() {
|
||||
Convey("Should return 1 day", func() {
|
||||
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
|
||||
to := time.Date(2018, 1, 1, 23, 6, 0, 0, time.UTC)
|
||||
intervals := (&dailyInterval{}).Generate(from, to)
|
||||
So(intervals, ShouldHaveLength, 1)
|
||||
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
|
||||
})
|
||||
|
||||
Convey("Should return 2 days", func() {
|
||||
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
|
||||
to := time.Date(2018, 1, 2, 0, 6, 0, 0, time.UTC)
|
||||
intervals := (&dailyInterval{}).Generate(from, to)
|
||||
So(intervals, ShouldHaveLength, 2)
|
||||
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
|
||||
So(intervals[1], ShouldEqual, time.Date(2018, 1, 2, 0, 0, 0, 0, time.UTC))
|
||||
})
|
||||
|
||||
Convey("Should return 32 days", func() {
|
||||
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
|
||||
to := time.Date(2018, 2, 1, 8, 6, 0, 0, time.UTC)
|
||||
intervals := (&dailyInterval{}).Generate(from, to)
|
||||
So(intervals, ShouldHaveLength, 32)
|
||||
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
|
||||
So(intervals[30], ShouldEqual, time.Date(2018, 1, 31, 0, 0, 0, 0, time.UTC))
|
||||
So(intervals[31], ShouldEqual, time.Date(2018, 2, 1, 0, 0, 0, 0, time.UTC))
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Weekly interval", t, func() {
|
||||
Convey("Should return 1 week (1)", func() {
|
||||
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
|
||||
to := time.Date(2018, 1, 1, 23, 6, 0, 0, time.UTC)
|
||||
intervals := (&weeklyInterval{}).Generate(from, to)
|
||||
So(intervals, ShouldHaveLength, 1)
|
||||
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
|
||||
})
|
||||
|
||||
Convey("Should return 1 week (2)", func() {
|
||||
from := time.Date(2017, 1, 1, 23, 1, 1, 0, time.UTC)
|
||||
to := time.Date(2017, 1, 1, 23, 6, 0, 0, time.UTC)
|
||||
intervals := (&weeklyInterval{}).Generate(from, to)
|
||||
So(intervals, ShouldHaveLength, 1)
|
||||
So(intervals[0], ShouldEqual, time.Date(2016, 12, 26, 0, 0, 0, 0, time.UTC))
|
||||
})
|
||||
|
||||
Convey("Should return 2 weeks (1)", func() {
|
||||
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
|
||||
to := time.Date(2018, 1, 10, 23, 6, 0, 0, time.UTC)
|
||||
intervals := (&weeklyInterval{}).Generate(from, to)
|
||||
So(intervals, ShouldHaveLength, 2)
|
||||
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
|
||||
So(intervals[1], ShouldEqual, time.Date(2018, 1, 8, 0, 0, 0, 0, time.UTC))
|
||||
})
|
||||
|
||||
Convey("Should return 2 weeks (2)", func() {
|
||||
from := time.Date(2017, 1, 1, 23, 1, 1, 0, time.UTC)
|
||||
to := time.Date(2017, 1, 8, 23, 6, 0, 0, time.UTC)
|
||||
intervals := (&weeklyInterval{}).Generate(from, to)
|
||||
So(intervals, ShouldHaveLength, 2)
|
||||
So(intervals[0], ShouldEqual, time.Date(2016, 12, 26, 0, 0, 0, 0, time.UTC))
|
||||
So(intervals[1], ShouldEqual, time.Date(2017, 1, 2, 0, 0, 0, 0, time.UTC))
|
||||
})
|
||||
|
||||
Convey("Should return 3 weeks (1)", func() {
|
||||
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
|
||||
to := time.Date(2018, 1, 21, 23, 6, 0, 0, time.UTC)
|
||||
intervals := (&weeklyInterval{}).Generate(from, to)
|
||||
So(intervals, ShouldHaveLength, 3)
|
||||
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
|
||||
So(intervals[1], ShouldEqual, time.Date(2018, 1, 8, 0, 0, 0, 0, time.UTC))
|
||||
So(intervals[2], ShouldEqual, time.Date(2018, 1, 15, 0, 0, 0, 0, time.UTC))
|
||||
})
|
||||
|
||||
Convey("Should return 3 weeks (2)", func() {
|
||||
from := time.Date(2017, 1, 1, 23, 1, 1, 0, time.UTC)
|
||||
to := time.Date(2017, 1, 9, 23, 6, 0, 0, time.UTC)
|
||||
intervals := (&weeklyInterval{}).Generate(from, to)
|
||||
So(intervals, ShouldHaveLength, 3)
|
||||
So(intervals[0], ShouldEqual, time.Date(2016, 12, 26, 0, 0, 0, 0, time.UTC))
|
||||
So(intervals[1], ShouldEqual, time.Date(2017, 1, 2, 0, 0, 0, 0, time.UTC))
|
||||
So(intervals[2], ShouldEqual, time.Date(2017, 1, 9, 0, 0, 0, 0, time.UTC))
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Monthly interval", t, func() {
|
||||
Convey("Should return 1 month", func() {
|
||||
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
|
||||
to := time.Date(2018, 1, 1, 23, 6, 0, 0, time.UTC)
|
||||
intervals := (&monthlyInterval{}).Generate(from, to)
|
||||
So(intervals, ShouldHaveLength, 1)
|
||||
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
|
||||
})
|
||||
|
||||
Convey("Should return 2 months", func() {
|
||||
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
|
||||
to := time.Date(2018, 2, 2, 0, 6, 0, 0, time.UTC)
|
||||
intervals := (&monthlyInterval{}).Generate(from, to)
|
||||
So(intervals, ShouldHaveLength, 2)
|
||||
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
|
||||
So(intervals[1], ShouldEqual, time.Date(2018, 2, 1, 0, 0, 0, 0, time.UTC))
|
||||
})
|
||||
|
||||
Convey("Should return 14 months", func() {
|
||||
from := time.Date(2017, 1, 1, 23, 1, 1, 0, time.UTC)
|
||||
to := time.Date(2018, 2, 1, 8, 6, 0, 0, time.UTC)
|
||||
intervals := (&monthlyInterval{}).Generate(from, to)
|
||||
So(intervals, ShouldHaveLength, 14)
|
||||
So(intervals[0], ShouldEqual, time.Date(2017, 1, 1, 0, 0, 0, 0, time.UTC))
|
||||
So(intervals[13], ShouldEqual, time.Date(2018, 2, 1, 0, 0, 0, 0, time.UTC))
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Yearly interval", t, func() {
|
||||
Convey("Should return 1 year (hour diff)", func() {
|
||||
from := time.Date(2018, 2, 1, 23, 1, 1, 0, time.UTC)
|
||||
to := time.Date(2018, 2, 1, 23, 6, 0, 0, time.UTC)
|
||||
intervals := (&yearlyInterval{}).Generate(from, to)
|
||||
So(intervals, ShouldHaveLength, 1)
|
||||
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
|
||||
})
|
||||
|
||||
Convey("Should return 1 year (month diff)", func() {
|
||||
from := time.Date(2018, 2, 1, 23, 1, 1, 0, time.UTC)
|
||||
to := time.Date(2018, 12, 31, 23, 59, 59, 0, time.UTC)
|
||||
intervals := (&yearlyInterval{}).Generate(from, to)
|
||||
So(intervals, ShouldHaveLength, 1)
|
||||
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
|
||||
})
|
||||
|
||||
Convey("Should return 2 years", func() {
|
||||
from := time.Date(2018, 2, 1, 23, 1, 1, 0, time.UTC)
|
||||
to := time.Date(2019, 1, 1, 23, 59, 59, 0, time.UTC)
|
||||
intervals := (&yearlyInterval{}).Generate(from, to)
|
||||
So(intervals, ShouldHaveLength, 2)
|
||||
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
|
||||
So(intervals[1], ShouldEqual, time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC))
|
||||
})
|
||||
|
||||
Convey("Should return 5 years", func() {
|
||||
from := time.Date(2014, 1, 1, 23, 1, 1, 0, time.UTC)
|
||||
to := time.Date(2018, 11, 1, 23, 59, 59, 0, time.UTC)
|
||||
intervals := (&yearlyInterval{}).Generate(from, to)
|
||||
So(intervals, ShouldHaveLength, 5)
|
||||
So(intervals[0], ShouldEqual, time.Date(2014, 1, 1, 0, 0, 0, 0, time.UTC))
|
||||
So(intervals[4], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func indexPatternScenario(interval string, pattern string, timeRange *tsdb.TimeRange, fn func(indices []string)) {
|
||||
Convey(fmt.Sprintf("Index pattern (interval=%s, index=%s", interval, pattern), func() {
|
||||
ip, err := newIndexPattern(interval, pattern)
|
||||
So(err, ShouldBeNil)
|
||||
So(ip, ShouldNotBeNil)
|
||||
indices, err := ip.GetIndices(timeRange)
|
||||
So(err, ShouldBeNil)
|
||||
fn(indices)
|
||||
})
|
||||
}
|
311
pkg/tsdb/elasticsearch/client/models.go
Normal file
311
pkg/tsdb/elasticsearch/client/models.go
Normal file
@ -0,0 +1,311 @@
|
||||
package es
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
)
|
||||
|
||||
// SearchRequest represents a search request
|
||||
type SearchRequest struct {
|
||||
Index string
|
||||
Interval tsdb.Interval
|
||||
Size int
|
||||
Sort map[string]interface{}
|
||||
Query *Query
|
||||
Aggs AggArray
|
||||
CustomProps map[string]interface{}
|
||||
}
|
||||
|
||||
// MarshalJSON returns the JSON encoding of the request.
|
||||
func (r *SearchRequest) MarshalJSON() ([]byte, error) {
|
||||
root := make(map[string]interface{})
|
||||
|
||||
root["size"] = r.Size
|
||||
if len(r.Sort) > 0 {
|
||||
root["sort"] = r.Sort
|
||||
}
|
||||
|
||||
for key, value := range r.CustomProps {
|
||||
root[key] = value
|
||||
}
|
||||
|
||||
root["query"] = r.Query
|
||||
|
||||
if len(r.Aggs) > 0 {
|
||||
root["aggs"] = r.Aggs
|
||||
}
|
||||
|
||||
return json.Marshal(root)
|
||||
}
|
||||
|
||||
// SearchResponseHits represents search response hits
|
||||
type SearchResponseHits struct {
|
||||
Hits []map[string]interface{}
|
||||
Total int64
|
||||
}
|
||||
|
||||
// SearchResponse represents a search response
|
||||
type SearchResponse struct {
|
||||
Error map[string]interface{} `json:"error"`
|
||||
Aggregations map[string]interface{} `json:"aggregations"`
|
||||
Hits *SearchResponseHits `json:"hits"`
|
||||
}
|
||||
|
||||
// func (r *Response) getErrMsg() string {
|
||||
// var msg bytes.Buffer
|
||||
// errJson := simplejson.NewFromAny(r.Err)
|
||||
// errType, err := errJson.Get("type").String()
|
||||
// if err == nil {
|
||||
// msg.WriteString(fmt.Sprintf("type:%s", errType))
|
||||
// }
|
||||
|
||||
// reason, err := errJson.Get("type").String()
|
||||
// if err == nil {
|
||||
// msg.WriteString(fmt.Sprintf("reason:%s", reason))
|
||||
// }
|
||||
// return msg.String()
|
||||
// }
|
||||
|
||||
// MultiSearchRequest represents a multi search request
|
||||
type MultiSearchRequest struct {
|
||||
Requests []*SearchRequest
|
||||
}
|
||||
|
||||
// MultiSearchResponse represents a multi search response
|
||||
type MultiSearchResponse struct {
|
||||
status int `json:"status,omitempty"`
|
||||
Responses []*SearchResponse `json:"responses"`
|
||||
}
|
||||
|
||||
// Query represents a query
|
||||
type Query struct {
|
||||
Bool *BoolQuery `json:"bool"`
|
||||
}
|
||||
|
||||
// BoolQuery represents a bool query
|
||||
type BoolQuery struct {
|
||||
Filters []Filter
|
||||
}
|
||||
|
||||
// NewBoolQuery create a new bool query
|
||||
func NewBoolQuery() *BoolQuery {
|
||||
return &BoolQuery{Filters: make([]Filter, 0)}
|
||||
}
|
||||
|
||||
// MarshalJSON returns the JSON encoding of the boolean query.
|
||||
func (q *BoolQuery) MarshalJSON() ([]byte, error) {
|
||||
root := make(map[string]interface{})
|
||||
|
||||
if len(q.Filters) > 0 {
|
||||
if len(q.Filters) == 1 {
|
||||
root["filter"] = q.Filters[0]
|
||||
} else {
|
||||
root["filter"] = q.Filters
|
||||
}
|
||||
}
|
||||
return json.Marshal(root)
|
||||
}
|
||||
|
||||
// Filter represents a search filter
|
||||
type Filter interface{}
|
||||
|
||||
// QueryStringFilter represents a query string search filter
|
||||
type QueryStringFilter struct {
|
||||
Filter
|
||||
Query string
|
||||
AnalyzeWildcard bool
|
||||
}
|
||||
|
||||
// MarshalJSON returns the JSON encoding of the query string filter.
|
||||
func (f *QueryStringFilter) MarshalJSON() ([]byte, error) {
|
||||
root := map[string]interface{}{
|
||||
"query_string": map[string]interface{}{
|
||||
"query": f.Query,
|
||||
"analyze_wildcard": f.AnalyzeWildcard,
|
||||
},
|
||||
}
|
||||
|
||||
return json.Marshal(root)
|
||||
}
|
||||
|
||||
// RangeFilter represents a range search filter
|
||||
type RangeFilter struct {
|
||||
Filter
|
||||
Key string
|
||||
Gte string
|
||||
Lte string
|
||||
Format string
|
||||
}
|
||||
|
||||
// DateFormatEpochMS represents a date format of epoch milliseconds (epoch_millis)
|
||||
const DateFormatEpochMS = "epoch_millis"
|
||||
|
||||
// MarshalJSON returns the JSON encoding of the query string filter.
|
||||
func (f *RangeFilter) MarshalJSON() ([]byte, error) {
|
||||
root := map[string]map[string]map[string]interface{}{
|
||||
"range": {
|
||||
f.Key: {
|
||||
"lte": f.Lte,
|
||||
"gte": f.Gte,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
if f.Format != "" {
|
||||
root["range"][f.Key]["format"] = f.Format
|
||||
}
|
||||
|
||||
return json.Marshal(root)
|
||||
}
|
||||
|
||||
// Aggregation represents an aggregation
|
||||
type Aggregation interface{}
|
||||
|
||||
// Agg represents a key and aggregation
|
||||
type Agg struct {
|
||||
Key string
|
||||
Aggregation *aggContainer
|
||||
}
|
||||
|
||||
// MarshalJSON returns the JSON encoding of the agg
|
||||
func (a *Agg) MarshalJSON() ([]byte, error) {
|
||||
root := map[string]interface{}{
|
||||
a.Key: a.Aggregation,
|
||||
}
|
||||
|
||||
return json.Marshal(root)
|
||||
}
|
||||
|
||||
// AggArray represents a collection of key/aggregation pairs
|
||||
type AggArray []*Agg
|
||||
|
||||
// MarshalJSON returns the JSON encoding of the agg
|
||||
func (a AggArray) MarshalJSON() ([]byte, error) {
|
||||
aggsMap := make(map[string]Aggregation)
|
||||
|
||||
for _, subAgg := range a {
|
||||
aggsMap[subAgg.Key] = subAgg.Aggregation
|
||||
}
|
||||
|
||||
return json.Marshal(aggsMap)
|
||||
}
|
||||
|
||||
type aggContainer struct {
|
||||
Type string
|
||||
Aggregation Aggregation
|
||||
Aggs AggArray
|
||||
}
|
||||
|
||||
// MarshalJSON returns the JSON encoding of the aggregation container
|
||||
func (a *aggContainer) MarshalJSON() ([]byte, error) {
|
||||
root := map[string]interface{}{
|
||||
a.Type: a.Aggregation,
|
||||
}
|
||||
|
||||
if len(a.Aggs) > 0 {
|
||||
root["aggs"] = a.Aggs
|
||||
}
|
||||
|
||||
return json.Marshal(root)
|
||||
}
|
||||
|
||||
type aggDef struct {
|
||||
key string
|
||||
aggregation *aggContainer
|
||||
builders []AggBuilder
|
||||
}
|
||||
|
||||
func newAggDef(key string, aggregation *aggContainer) *aggDef {
|
||||
return &aggDef{
|
||||
key: key,
|
||||
aggregation: aggregation,
|
||||
builders: make([]AggBuilder, 0),
|
||||
}
|
||||
}
|
||||
|
||||
// HistogramAgg represents a histogram aggregation
|
||||
type HistogramAgg struct {
|
||||
Interval int `json:"interval,omitempty"`
|
||||
Field string `json:"field"`
|
||||
MinDocCount int `json:"min_doc_count"`
|
||||
Missing *int `json:"missing,omitempty"`
|
||||
}
|
||||
|
||||
// DateHistogramAgg represents a date histogram aggregation
|
||||
type DateHistogramAgg struct {
|
||||
Field string `json:"field"`
|
||||
Interval string `json:"interval,omitempty"`
|
||||
MinDocCount int `json:"min_doc_count"`
|
||||
Missing *string `json:"missing,omitempty"`
|
||||
ExtendedBounds *ExtendedBounds `json:"extended_bounds"`
|
||||
Format string `json:"format"`
|
||||
}
|
||||
|
||||
// FiltersAggregation represents a filters aggregation
|
||||
type FiltersAggregation struct {
|
||||
Filters map[string]interface{} `json:"filters"`
|
||||
}
|
||||
|
||||
// TermsAggregation represents a terms aggregation
|
||||
type TermsAggregation struct {
|
||||
Field string `json:"field"`
|
||||
Size int `json:"size"`
|
||||
Order map[string]interface{} `json:"order"`
|
||||
MinDocCount *int `json:"min_doc_count,omitempty"`
|
||||
Missing *string `json:"missing,omitempty"`
|
||||
}
|
||||
|
||||
// ExtendedBounds represents extended bounds
|
||||
type ExtendedBounds struct {
|
||||
Min string `json:"min"`
|
||||
Max string `json:"max"`
|
||||
}
|
||||
|
||||
// GeoHashGridAggregation represents a geo hash grid aggregation
|
||||
type GeoHashGridAggregation struct {
|
||||
Field string `json:"field"`
|
||||
Precision int `json:"precision"`
|
||||
}
|
||||
|
||||
// MetricAggregation represents a metric aggregation
|
||||
type MetricAggregation struct {
|
||||
Field string
|
||||
Settings map[string]interface{}
|
||||
}
|
||||
|
||||
// MarshalJSON returns the JSON encoding of the metric aggregation
|
||||
func (a *MetricAggregation) MarshalJSON() ([]byte, error) {
|
||||
root := map[string]interface{}{
|
||||
"field": a.Field,
|
||||
}
|
||||
|
||||
for k, v := range a.Settings {
|
||||
if k != "" && v != nil {
|
||||
root[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
return json.Marshal(root)
|
||||
}
|
||||
|
||||
// PipelineAggregation represents a metric aggregation
|
||||
type PipelineAggregation struct {
|
||||
BucketPath string
|
||||
Settings map[string]interface{}
|
||||
}
|
||||
|
||||
// MarshalJSON returns the JSON encoding of the pipeline aggregation
|
||||
func (a *PipelineAggregation) MarshalJSON() ([]byte, error) {
|
||||
root := map[string]interface{}{
|
||||
"buckets_path": a.BucketPath,
|
||||
}
|
||||
|
||||
for k, v := range a.Settings {
|
||||
if k != "" && v != nil {
|
||||
root[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
return json.Marshal(root)
|
||||
}
|
451
pkg/tsdb/elasticsearch/client/search_request.go
Normal file
451
pkg/tsdb/elasticsearch/client/search_request.go
Normal file
@ -0,0 +1,451 @@
|
||||
package es
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
)
|
||||
|
||||
// SearchRequestBuilder represents a builder which can build a search request
|
||||
type SearchRequestBuilder struct {
|
||||
version int
|
||||
interval tsdb.Interval
|
||||
index string
|
||||
size int
|
||||
sort map[string]interface{}
|
||||
queryBuilder *QueryBuilder
|
||||
aggBuilders []AggBuilder
|
||||
customProps map[string]interface{}
|
||||
}
|
||||
|
||||
// NewSearchRequestBuilder create a new search request builder
|
||||
func NewSearchRequestBuilder(version int, interval tsdb.Interval) *SearchRequestBuilder {
|
||||
builder := &SearchRequestBuilder{
|
||||
version: version,
|
||||
interval: interval,
|
||||
sort: make(map[string]interface{}),
|
||||
customProps: make(map[string]interface{}),
|
||||
aggBuilders: make([]AggBuilder, 0),
|
||||
}
|
||||
return builder
|
||||
}
|
||||
|
||||
// Build builds and return a search request
|
||||
func (b *SearchRequestBuilder) Build() (*SearchRequest, error) {
|
||||
sr := SearchRequest{
|
||||
Index: b.index,
|
||||
Interval: b.interval,
|
||||
Size: b.size,
|
||||
Sort: b.sort,
|
||||
CustomProps: b.customProps,
|
||||
}
|
||||
|
||||
if b.queryBuilder != nil {
|
||||
q, err := b.queryBuilder.Build()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sr.Query = q
|
||||
}
|
||||
|
||||
if len(b.aggBuilders) > 0 {
|
||||
sr.Aggs = make(AggArray, 0)
|
||||
|
||||
for _, ab := range b.aggBuilders {
|
||||
aggArray, err := ab.Build()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, agg := range aggArray {
|
||||
sr.Aggs = append(sr.Aggs, agg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return &sr, nil
|
||||
}
|
||||
|
||||
// Size sets the size of the search request
|
||||
func (b *SearchRequestBuilder) Size(size int) *SearchRequestBuilder {
|
||||
b.size = size
|
||||
return b
|
||||
}
|
||||
|
||||
// SortDesc adds a sort to the search request
|
||||
func (b *SearchRequestBuilder) SortDesc(field, unmappedType string) *SearchRequestBuilder {
|
||||
props := map[string]string{
|
||||
"order": "desc",
|
||||
}
|
||||
|
||||
if unmappedType != "" {
|
||||
props["unmapped_type"] = unmappedType
|
||||
}
|
||||
|
||||
b.sort[field] = props
|
||||
|
||||
return b
|
||||
}
|
||||
|
||||
// AddDocValueField adds a doc value field to the search request
|
||||
func (b *SearchRequestBuilder) AddDocValueField(field string) *SearchRequestBuilder {
|
||||
// fields field not supported on version >= 5
|
||||
if b.version < 5 {
|
||||
b.customProps["fields"] = []string{"*", "_source"}
|
||||
}
|
||||
|
||||
b.customProps["script_fields"] = make(map[string]interface{})
|
||||
|
||||
if b.version < 5 {
|
||||
b.customProps["fielddata_fields"] = []string{field}
|
||||
} else {
|
||||
b.customProps["docvalue_fields"] = []string{field}
|
||||
}
|
||||
|
||||
return b
|
||||
}
|
||||
|
||||
// Query creates and return a query builder
|
||||
func (b *SearchRequestBuilder) Query() *QueryBuilder {
|
||||
if b.queryBuilder == nil {
|
||||
b.queryBuilder = NewQueryBuilder()
|
||||
}
|
||||
return b.queryBuilder
|
||||
}
|
||||
|
||||
// Agg initaite and returns a new aggregation builder
|
||||
func (b *SearchRequestBuilder) Agg() AggBuilder {
|
||||
aggBuilder := newAggBuilder()
|
||||
b.aggBuilders = append(b.aggBuilders, aggBuilder)
|
||||
return aggBuilder
|
||||
}
|
||||
|
||||
// MultiSearchRequestBuilder represents a builder which can build a multi search request
|
||||
type MultiSearchRequestBuilder struct {
|
||||
version int
|
||||
requestBuilders []*SearchRequestBuilder
|
||||
}
|
||||
|
||||
// NewMultiSearchRequestBuilder creates a new multi search request builder
|
||||
func NewMultiSearchRequestBuilder(version int) *MultiSearchRequestBuilder {
|
||||
return &MultiSearchRequestBuilder{
|
||||
version: version,
|
||||
}
|
||||
}
|
||||
|
||||
// Search initiates and returns a new search request builder
|
||||
func (m *MultiSearchRequestBuilder) Search(interval tsdb.Interval) *SearchRequestBuilder {
|
||||
b := NewSearchRequestBuilder(m.version, interval)
|
||||
m.requestBuilders = append(m.requestBuilders, b)
|
||||
return b
|
||||
}
|
||||
|
||||
// Build builds and return a multi search request
|
||||
func (m *MultiSearchRequestBuilder) Build() (*MultiSearchRequest, error) {
|
||||
requests := []*SearchRequest{}
|
||||
for _, sb := range m.requestBuilders {
|
||||
searchRequest, err := sb.Build()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
requests = append(requests, searchRequest)
|
||||
}
|
||||
|
||||
return &MultiSearchRequest{
|
||||
Requests: requests,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// QueryBuilder represents a query builder
|
||||
type QueryBuilder struct {
|
||||
boolQueryBuilder *BoolQueryBuilder
|
||||
}
|
||||
|
||||
// NewQueryBuilder create a new query builder
|
||||
func NewQueryBuilder() *QueryBuilder {
|
||||
return &QueryBuilder{}
|
||||
}
|
||||
|
||||
// Build builds and return a query builder
|
||||
func (b *QueryBuilder) Build() (*Query, error) {
|
||||
q := Query{}
|
||||
|
||||
if b.boolQueryBuilder != nil {
|
||||
b, err := b.boolQueryBuilder.Build()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
q.Bool = b
|
||||
}
|
||||
|
||||
return &q, nil
|
||||
}
|
||||
|
||||
// Bool creates and return a query builder
|
||||
func (b *QueryBuilder) Bool() *BoolQueryBuilder {
|
||||
if b.boolQueryBuilder == nil {
|
||||
b.boolQueryBuilder = NewBoolQueryBuilder()
|
||||
}
|
||||
return b.boolQueryBuilder
|
||||
}
|
||||
|
||||
// BoolQueryBuilder represents a bool query builder
|
||||
type BoolQueryBuilder struct {
|
||||
filterQueryBuilder *FilterQueryBuilder
|
||||
}
|
||||
|
||||
// NewBoolQueryBuilder create a new bool query builder
|
||||
func NewBoolQueryBuilder() *BoolQueryBuilder {
|
||||
return &BoolQueryBuilder{}
|
||||
}
|
||||
|
||||
// Filter creates and return a filter query builder
|
||||
func (b *BoolQueryBuilder) Filter() *FilterQueryBuilder {
|
||||
if b.filterQueryBuilder == nil {
|
||||
b.filterQueryBuilder = NewFilterQueryBuilder()
|
||||
}
|
||||
return b.filterQueryBuilder
|
||||
}
|
||||
|
||||
// Build builds and return a bool query builder
|
||||
func (b *BoolQueryBuilder) Build() (*BoolQuery, error) {
|
||||
boolQuery := BoolQuery{}
|
||||
|
||||
if b.filterQueryBuilder != nil {
|
||||
filters, err := b.filterQueryBuilder.Build()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
boolQuery.Filters = filters
|
||||
}
|
||||
|
||||
return &boolQuery, nil
|
||||
}
|
||||
|
||||
// FilterQueryBuilder represents a filter query builder
|
||||
type FilterQueryBuilder struct {
|
||||
filters []Filter
|
||||
}
|
||||
|
||||
// NewFilterQueryBuilder creates a new filter query builder
|
||||
func NewFilterQueryBuilder() *FilterQueryBuilder {
|
||||
return &FilterQueryBuilder{
|
||||
filters: make([]Filter, 0),
|
||||
}
|
||||
}
|
||||
|
||||
// Build builds and return a filter query builder
|
||||
func (b *FilterQueryBuilder) Build() ([]Filter, error) {
|
||||
return b.filters, nil
|
||||
}
|
||||
|
||||
// AddDateRangeFilter adds a new time range filter
|
||||
func (b *FilterQueryBuilder) AddDateRangeFilter(timeField, lte, gte, format string) *FilterQueryBuilder {
|
||||
b.filters = append(b.filters, &RangeFilter{
|
||||
Key: timeField,
|
||||
Lte: lte,
|
||||
Gte: gte,
|
||||
Format: format,
|
||||
})
|
||||
return b
|
||||
}
|
||||
|
||||
// AddQueryStringFilter adds a new query string filter
|
||||
func (b *FilterQueryBuilder) AddQueryStringFilter(querystring string, analyseWildcard bool) *FilterQueryBuilder {
|
||||
if len(strings.TrimSpace(querystring)) == 0 {
|
||||
return b
|
||||
}
|
||||
|
||||
b.filters = append(b.filters, &QueryStringFilter{
|
||||
Query: querystring,
|
||||
AnalyzeWildcard: analyseWildcard,
|
||||
})
|
||||
return b
|
||||
}
|
||||
|
||||
// AggBuilder represents an aggregation builder
|
||||
type AggBuilder interface {
|
||||
Histogram(key, field string, fn func(a *HistogramAgg, b AggBuilder)) AggBuilder
|
||||
DateHistogram(key, field string, fn func(a *DateHistogramAgg, b AggBuilder)) AggBuilder
|
||||
Terms(key, field string, fn func(a *TermsAggregation, b AggBuilder)) AggBuilder
|
||||
Filters(key string, fn func(a *FiltersAggregation, b AggBuilder)) AggBuilder
|
||||
GeoHashGrid(key, field string, fn func(a *GeoHashGridAggregation, b AggBuilder)) AggBuilder
|
||||
Metric(key, metricType, field string, fn func(a *MetricAggregation)) AggBuilder
|
||||
Pipeline(key, pipelineType, bucketPath string, fn func(a *PipelineAggregation)) AggBuilder
|
||||
Build() (AggArray, error)
|
||||
}
|
||||
|
||||
type aggBuilderImpl struct {
|
||||
AggBuilder
|
||||
aggDefs []*aggDef
|
||||
}
|
||||
|
||||
func newAggBuilder() *aggBuilderImpl {
|
||||
return &aggBuilderImpl{
|
||||
aggDefs: make([]*aggDef, 0),
|
||||
}
|
||||
}
|
||||
|
||||
func (b *aggBuilderImpl) Build() (AggArray, error) {
|
||||
aggs := make(AggArray, 0)
|
||||
|
||||
for _, aggDef := range b.aggDefs {
|
||||
agg := &Agg{
|
||||
Key: aggDef.key,
|
||||
Aggregation: aggDef.aggregation,
|
||||
}
|
||||
|
||||
for _, cb := range aggDef.builders {
|
||||
childAggs, err := cb.Build()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, childAgg := range childAggs {
|
||||
agg.Aggregation.Aggs = append(agg.Aggregation.Aggs, childAgg)
|
||||
}
|
||||
}
|
||||
|
||||
aggs = append(aggs, agg)
|
||||
}
|
||||
|
||||
return aggs, nil
|
||||
}
|
||||
|
||||
func (b *aggBuilderImpl) Histogram(key, field string, fn func(a *HistogramAgg, b AggBuilder)) AggBuilder {
|
||||
innerAgg := &HistogramAgg{
|
||||
Field: field,
|
||||
}
|
||||
aggDef := newAggDef(key, &aggContainer{
|
||||
Type: "histogram",
|
||||
Aggregation: innerAgg,
|
||||
})
|
||||
|
||||
if fn != nil {
|
||||
builder := newAggBuilder()
|
||||
aggDef.builders = append(aggDef.builders, builder)
|
||||
fn(innerAgg, builder)
|
||||
}
|
||||
|
||||
b.aggDefs = append(b.aggDefs, aggDef)
|
||||
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *aggBuilderImpl) DateHistogram(key, field string, fn func(a *DateHistogramAgg, b AggBuilder)) AggBuilder {
|
||||
innerAgg := &DateHistogramAgg{
|
||||
Field: field,
|
||||
}
|
||||
aggDef := newAggDef(key, &aggContainer{
|
||||
Type: "date_histogram",
|
||||
Aggregation: innerAgg,
|
||||
})
|
||||
|
||||
if fn != nil {
|
||||
builder := newAggBuilder()
|
||||
aggDef.builders = append(aggDef.builders, builder)
|
||||
fn(innerAgg, builder)
|
||||
}
|
||||
|
||||
b.aggDefs = append(b.aggDefs, aggDef)
|
||||
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *aggBuilderImpl) Terms(key, field string, fn func(a *TermsAggregation, b AggBuilder)) AggBuilder {
|
||||
innerAgg := &TermsAggregation{
|
||||
Field: field,
|
||||
Order: make(map[string]interface{}),
|
||||
}
|
||||
aggDef := newAggDef(key, &aggContainer{
|
||||
Type: "terms",
|
||||
Aggregation: innerAgg,
|
||||
})
|
||||
|
||||
if fn != nil {
|
||||
builder := newAggBuilder()
|
||||
aggDef.builders = append(aggDef.builders, builder)
|
||||
fn(innerAgg, builder)
|
||||
}
|
||||
|
||||
b.aggDefs = append(b.aggDefs, aggDef)
|
||||
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *aggBuilderImpl) Filters(key string, fn func(a *FiltersAggregation, b AggBuilder)) AggBuilder {
|
||||
innerAgg := &FiltersAggregation{
|
||||
Filters: make(map[string]interface{}),
|
||||
}
|
||||
aggDef := newAggDef(key, &aggContainer{
|
||||
Type: "filters",
|
||||
Aggregation: innerAgg,
|
||||
})
|
||||
if fn != nil {
|
||||
builder := newAggBuilder()
|
||||
aggDef.builders = append(aggDef.builders, builder)
|
||||
fn(innerAgg, builder)
|
||||
}
|
||||
|
||||
b.aggDefs = append(b.aggDefs, aggDef)
|
||||
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *aggBuilderImpl) GeoHashGrid(key, field string, fn func(a *GeoHashGridAggregation, b AggBuilder)) AggBuilder {
|
||||
innerAgg := &GeoHashGridAggregation{
|
||||
Field: field,
|
||||
Precision: 5,
|
||||
}
|
||||
aggDef := newAggDef(key, &aggContainer{
|
||||
Type: "geohash_grid",
|
||||
Aggregation: innerAgg,
|
||||
})
|
||||
|
||||
if fn != nil {
|
||||
builder := newAggBuilder()
|
||||
aggDef.builders = append(aggDef.builders, builder)
|
||||
fn(innerAgg, builder)
|
||||
}
|
||||
|
||||
b.aggDefs = append(b.aggDefs, aggDef)
|
||||
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *aggBuilderImpl) Metric(key, metricType, field string, fn func(a *MetricAggregation)) AggBuilder {
|
||||
innerAgg := &MetricAggregation{
|
||||
Field: field,
|
||||
Settings: make(map[string]interface{}),
|
||||
}
|
||||
aggDef := newAggDef(key, &aggContainer{
|
||||
Type: metricType,
|
||||
Aggregation: innerAgg,
|
||||
})
|
||||
|
||||
if fn != nil {
|
||||
fn(innerAgg)
|
||||
}
|
||||
|
||||
b.aggDefs = append(b.aggDefs, aggDef)
|
||||
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *aggBuilderImpl) Pipeline(key, pipelineType, bucketPath string, fn func(a *PipelineAggregation)) AggBuilder {
|
||||
innerAgg := &PipelineAggregation{
|
||||
BucketPath: bucketPath,
|
||||
Settings: make(map[string]interface{}),
|
||||
}
|
||||
aggDef := newAggDef(key, &aggContainer{
|
||||
Type: pipelineType,
|
||||
Aggregation: innerAgg,
|
||||
})
|
||||
|
||||
if fn != nil {
|
||||
fn(innerAgg)
|
||||
}
|
||||
|
||||
b.aggDefs = append(b.aggDefs, aggDef)
|
||||
|
||||
return b
|
||||
}
|
473
pkg/tsdb/elasticsearch/client/search_request_test.go
Normal file
473
pkg/tsdb/elasticsearch/client/search_request_test.go
Normal file
@ -0,0 +1,473 @@
|
||||
package es
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestSearchRequest(t *testing.T) {
|
||||
Convey("Test elasticsearch search request", t, func() {
|
||||
timeField := "@timestamp"
|
||||
Convey("Given new search request builder for es version 5", func() {
|
||||
b := NewSearchRequestBuilder(5, tsdb.Interval{Value: 15 * time.Second, Text: "15s"})
|
||||
|
||||
Convey("When building search request", func() {
|
||||
sr, err := b.Build()
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
Convey("Should have size of zero", func() {
|
||||
So(sr.Size, ShouldEqual, 0)
|
||||
})
|
||||
|
||||
Convey("Should have no sorting", func() {
|
||||
So(sr.Sort, ShouldHaveLength, 0)
|
||||
})
|
||||
|
||||
Convey("When marshal to JSON should generate correct json", func() {
|
||||
body, err := json.Marshal(sr)
|
||||
So(err, ShouldBeNil)
|
||||
json, err := simplejson.NewJson([]byte(body))
|
||||
So(err, ShouldBeNil)
|
||||
So(json.Get("size").MustInt(500), ShouldEqual, 0)
|
||||
So(json.Get("sort").Interface(), ShouldBeNil)
|
||||
So(json.Get("aggs").Interface(), ShouldBeNil)
|
||||
So(json.Get("query").Interface(), ShouldBeNil)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("When adding size, sort, filters", func() {
|
||||
b.Size(200)
|
||||
b.SortDesc(timeField, "boolean")
|
||||
filters := b.Query().Bool().Filter()
|
||||
filters.AddDateRangeFilter(timeField, "$timeTo", "$timeFrom", DateFormatEpochMS)
|
||||
filters.AddQueryStringFilter("test", true)
|
||||
|
||||
Convey("When building search request", func() {
|
||||
sr, err := b.Build()
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
Convey("Should have correct size", func() {
|
||||
So(sr.Size, ShouldEqual, 200)
|
||||
})
|
||||
|
||||
Convey("Should have correct sorting", func() {
|
||||
sort, ok := sr.Sort[timeField].(map[string]string)
|
||||
So(ok, ShouldBeTrue)
|
||||
So(sort["order"], ShouldEqual, "desc")
|
||||
So(sort["unmapped_type"], ShouldEqual, "boolean")
|
||||
})
|
||||
|
||||
Convey("Should have range filter", func() {
|
||||
f, ok := sr.Query.Bool.Filters[0].(*RangeFilter)
|
||||
So(ok, ShouldBeTrue)
|
||||
So(f.Gte, ShouldEqual, "$timeFrom")
|
||||
So(f.Lte, ShouldEqual, "$timeTo")
|
||||
So(f.Format, ShouldEqual, "epoch_millis")
|
||||
})
|
||||
|
||||
Convey("Should have query string filter", func() {
|
||||
f, ok := sr.Query.Bool.Filters[1].(*QueryStringFilter)
|
||||
So(ok, ShouldBeTrue)
|
||||
So(f.Query, ShouldEqual, "test")
|
||||
So(f.AnalyzeWildcard, ShouldBeTrue)
|
||||
})
|
||||
|
||||
Convey("When marshal to JSON should generate correct json", func() {
|
||||
body, err := json.Marshal(sr)
|
||||
So(err, ShouldBeNil)
|
||||
json, err := simplejson.NewJson([]byte(body))
|
||||
So(err, ShouldBeNil)
|
||||
So(json.Get("size").MustInt(0), ShouldEqual, 200)
|
||||
|
||||
sort := json.GetPath("sort", timeField)
|
||||
So(sort.Get("order").MustString(), ShouldEqual, "desc")
|
||||
So(sort.Get("unmapped_type").MustString(), ShouldEqual, "boolean")
|
||||
|
||||
timeRangeFilter := json.GetPath("query", "bool", "filter").GetIndex(0).Get("range").Get(timeField)
|
||||
So(timeRangeFilter.Get("gte").MustString(""), ShouldEqual, "$timeFrom")
|
||||
So(timeRangeFilter.Get("lte").MustString(""), ShouldEqual, "$timeTo")
|
||||
So(timeRangeFilter.Get("format").MustString(""), ShouldEqual, DateFormatEpochMS)
|
||||
|
||||
queryStringFilter := json.GetPath("query", "bool", "filter").GetIndex(1).Get("query_string")
|
||||
So(queryStringFilter.Get("analyze_wildcard").MustBool(false), ShouldEqual, true)
|
||||
So(queryStringFilter.Get("query").MustString(""), ShouldEqual, "test")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("When adding doc value field", func() {
|
||||
b.AddDocValueField(timeField)
|
||||
|
||||
Convey("should set correct props", func() {
|
||||
So(b.customProps["fields"], ShouldBeNil)
|
||||
|
||||
scriptFields, ok := b.customProps["script_fields"].(map[string]interface{})
|
||||
So(ok, ShouldBeTrue)
|
||||
So(scriptFields, ShouldHaveLength, 0)
|
||||
|
||||
docValueFields, ok := b.customProps["docvalue_fields"].([]string)
|
||||
So(ok, ShouldBeTrue)
|
||||
So(docValueFields, ShouldHaveLength, 1)
|
||||
So(docValueFields[0], ShouldEqual, timeField)
|
||||
})
|
||||
|
||||
Convey("When building search request", func() {
|
||||
sr, err := b.Build()
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
Convey("When marshal to JSON should generate correct json", func() {
|
||||
body, err := json.Marshal(sr)
|
||||
So(err, ShouldBeNil)
|
||||
json, err := simplejson.NewJson([]byte(body))
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
scriptFields, err := json.Get("script_fields").Map()
|
||||
So(err, ShouldBeNil)
|
||||
So(scriptFields, ShouldHaveLength, 0)
|
||||
|
||||
_, err = json.Get("fields").StringArray()
|
||||
So(err, ShouldNotBeNil)
|
||||
|
||||
docValueFields, err := json.Get("docvalue_fields").StringArray()
|
||||
So(err, ShouldBeNil)
|
||||
So(docValueFields, ShouldHaveLength, 1)
|
||||
So(docValueFields[0], ShouldEqual, timeField)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("and adding multiple top level aggs", func() {
|
||||
aggBuilder := b.Agg()
|
||||
aggBuilder.Terms("1", "@hostname", nil)
|
||||
aggBuilder.DateHistogram("2", "@timestamp", nil)
|
||||
|
||||
Convey("When building search request", func() {
|
||||
sr, err := b.Build()
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
Convey("Should have 2 top level aggs", func() {
|
||||
aggs := sr.Aggs
|
||||
So(aggs, ShouldHaveLength, 2)
|
||||
So(aggs[0].Key, ShouldEqual, "1")
|
||||
So(aggs[0].Aggregation.Type, ShouldEqual, "terms")
|
||||
So(aggs[1].Key, ShouldEqual, "2")
|
||||
So(aggs[1].Aggregation.Type, ShouldEqual, "date_histogram")
|
||||
})
|
||||
|
||||
Convey("When marshal to JSON should generate correct json", func() {
|
||||
body, err := json.Marshal(sr)
|
||||
So(err, ShouldBeNil)
|
||||
json, err := simplejson.NewJson([]byte(body))
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(json.Get("aggs").MustMap(), ShouldHaveLength, 2)
|
||||
So(json.GetPath("aggs", "1", "terms", "field").MustString(), ShouldEqual, "@hostname")
|
||||
So(json.GetPath("aggs", "2", "date_histogram", "field").MustString(), ShouldEqual, "@timestamp")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("and adding top level agg with child agg", func() {
|
||||
aggBuilder := b.Agg()
|
||||
aggBuilder.Terms("1", "@hostname", func(a *TermsAggregation, ib AggBuilder) {
|
||||
ib.DateHistogram("2", "@timestamp", nil)
|
||||
})
|
||||
|
||||
Convey("When building search request", func() {
|
||||
sr, err := b.Build()
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
Convey("Should have 1 top level agg and one child agg", func() {
|
||||
aggs := sr.Aggs
|
||||
So(aggs, ShouldHaveLength, 1)
|
||||
|
||||
topAgg := aggs[0]
|
||||
So(topAgg.Key, ShouldEqual, "1")
|
||||
So(topAgg.Aggregation.Type, ShouldEqual, "terms")
|
||||
So(topAgg.Aggregation.Aggs, ShouldHaveLength, 1)
|
||||
|
||||
childAgg := aggs[0].Aggregation.Aggs[0]
|
||||
So(childAgg.Key, ShouldEqual, "2")
|
||||
So(childAgg.Aggregation.Type, ShouldEqual, "date_histogram")
|
||||
})
|
||||
|
||||
Convey("When marshal to JSON should generate correct json", func() {
|
||||
body, err := json.Marshal(sr)
|
||||
So(err, ShouldBeNil)
|
||||
json, err := simplejson.NewJson([]byte(body))
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(json.Get("aggs").MustMap(), ShouldHaveLength, 1)
|
||||
firstLevelAgg := json.GetPath("aggs", "1")
|
||||
secondLevelAgg := firstLevelAgg.GetPath("aggs", "2")
|
||||
So(firstLevelAgg.GetPath("terms", "field").MustString(), ShouldEqual, "@hostname")
|
||||
So(secondLevelAgg.GetPath("date_histogram", "field").MustString(), ShouldEqual, "@timestamp")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("and adding two top level aggs with child agg", func() {
|
||||
aggBuilder := b.Agg()
|
||||
aggBuilder.Histogram("1", "@hostname", func(a *HistogramAgg, ib AggBuilder) {
|
||||
ib.DateHistogram("2", "@timestamp", nil)
|
||||
})
|
||||
aggBuilder.Filters("3", func(a *FiltersAggregation, ib AggBuilder) {
|
||||
ib.Terms("4", "@test", nil)
|
||||
})
|
||||
|
||||
Convey("When building search request", func() {
|
||||
sr, err := b.Build()
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
Convey("Should have 2 top level aggs with one child agg each", func() {
|
||||
aggs := sr.Aggs
|
||||
So(aggs, ShouldHaveLength, 2)
|
||||
|
||||
topAggOne := aggs[0]
|
||||
So(topAggOne.Key, ShouldEqual, "1")
|
||||
So(topAggOne.Aggregation.Type, ShouldEqual, "histogram")
|
||||
So(topAggOne.Aggregation.Aggs, ShouldHaveLength, 1)
|
||||
|
||||
topAggOnechildAgg := topAggOne.Aggregation.Aggs[0]
|
||||
So(topAggOnechildAgg.Key, ShouldEqual, "2")
|
||||
So(topAggOnechildAgg.Aggregation.Type, ShouldEqual, "date_histogram")
|
||||
|
||||
topAggTwo := aggs[1]
|
||||
So(topAggTwo.Key, ShouldEqual, "3")
|
||||
So(topAggTwo.Aggregation.Type, ShouldEqual, "filters")
|
||||
So(topAggTwo.Aggregation.Aggs, ShouldHaveLength, 1)
|
||||
|
||||
topAggTwochildAgg := topAggTwo.Aggregation.Aggs[0]
|
||||
So(topAggTwochildAgg.Key, ShouldEqual, "4")
|
||||
So(topAggTwochildAgg.Aggregation.Type, ShouldEqual, "terms")
|
||||
})
|
||||
|
||||
Convey("When marshal to JSON should generate correct json", func() {
|
||||
body, err := json.Marshal(sr)
|
||||
So(err, ShouldBeNil)
|
||||
json, err := simplejson.NewJson([]byte(body))
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
topAggOne := json.GetPath("aggs", "1")
|
||||
So(topAggOne.GetPath("histogram", "field").MustString(), ShouldEqual, "@hostname")
|
||||
topAggOnechildAgg := topAggOne.GetPath("aggs", "2")
|
||||
So(topAggOnechildAgg.GetPath("date_histogram", "field").MustString(), ShouldEqual, "@timestamp")
|
||||
|
||||
topAggTwo := json.GetPath("aggs", "3")
|
||||
topAggTwochildAgg := topAggTwo.GetPath("aggs", "4")
|
||||
So(topAggTwo.GetPath("filters").MustArray(), ShouldHaveLength, 0)
|
||||
So(topAggTwochildAgg.GetPath("terms", "field").MustString(), ShouldEqual, "@test")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("and adding top level agg with child agg with child agg", func() {
|
||||
aggBuilder := b.Agg()
|
||||
aggBuilder.Terms("1", "@hostname", func(a *TermsAggregation, ib AggBuilder) {
|
||||
ib.Terms("2", "@app", func(a *TermsAggregation, ib AggBuilder) {
|
||||
ib.DateHistogram("3", "@timestamp", nil)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("When building search request", func() {
|
||||
sr, err := b.Build()
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
Convey("Should have 1 top level agg with one child having a child", func() {
|
||||
aggs := sr.Aggs
|
||||
So(aggs, ShouldHaveLength, 1)
|
||||
|
||||
topAgg := aggs[0]
|
||||
So(topAgg.Key, ShouldEqual, "1")
|
||||
So(topAgg.Aggregation.Type, ShouldEqual, "terms")
|
||||
So(topAgg.Aggregation.Aggs, ShouldHaveLength, 1)
|
||||
|
||||
childAgg := topAgg.Aggregation.Aggs[0]
|
||||
So(childAgg.Key, ShouldEqual, "2")
|
||||
So(childAgg.Aggregation.Type, ShouldEqual, "terms")
|
||||
|
||||
childChildAgg := childAgg.Aggregation.Aggs[0]
|
||||
So(childChildAgg.Key, ShouldEqual, "3")
|
||||
So(childChildAgg.Aggregation.Type, ShouldEqual, "date_histogram")
|
||||
})
|
||||
|
||||
Convey("When marshal to JSON should generate correct json", func() {
|
||||
body, err := json.Marshal(sr)
|
||||
So(err, ShouldBeNil)
|
||||
json, err := simplejson.NewJson([]byte(body))
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
topAgg := json.GetPath("aggs", "1")
|
||||
So(topAgg.GetPath("terms", "field").MustString(), ShouldEqual, "@hostname")
|
||||
|
||||
childAgg := topAgg.GetPath("aggs", "2")
|
||||
So(childAgg.GetPath("terms", "field").MustString(), ShouldEqual, "@app")
|
||||
|
||||
childChildAgg := childAgg.GetPath("aggs", "3")
|
||||
So(childChildAgg.GetPath("date_histogram", "field").MustString(), ShouldEqual, "@timestamp")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("and adding bucket and metric aggs", func() {
|
||||
aggBuilder := b.Agg()
|
||||
aggBuilder.Terms("1", "@hostname", func(a *TermsAggregation, ib AggBuilder) {
|
||||
ib.Terms("2", "@app", func(a *TermsAggregation, ib AggBuilder) {
|
||||
ib.Metric("4", "avg", "@value", nil)
|
||||
ib.DateHistogram("3", "@timestamp", func(a *DateHistogramAgg, ib AggBuilder) {
|
||||
ib.Metric("4", "avg", "@value", nil)
|
||||
ib.Metric("5", "max", "@value", nil)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("When building search request", func() {
|
||||
sr, err := b.Build()
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
Convey("Should have 1 top level agg with one child having a child", func() {
|
||||
aggs := sr.Aggs
|
||||
So(aggs, ShouldHaveLength, 1)
|
||||
|
||||
topAgg := aggs[0]
|
||||
So(topAgg.Key, ShouldEqual, "1")
|
||||
So(topAgg.Aggregation.Type, ShouldEqual, "terms")
|
||||
So(topAgg.Aggregation.Aggs, ShouldHaveLength, 1)
|
||||
|
||||
childAgg := topAgg.Aggregation.Aggs[0]
|
||||
So(childAgg.Key, ShouldEqual, "2")
|
||||
So(childAgg.Aggregation.Type, ShouldEqual, "terms")
|
||||
|
||||
childChildOneAgg := childAgg.Aggregation.Aggs[0]
|
||||
So(childChildOneAgg.Key, ShouldEqual, "4")
|
||||
So(childChildOneAgg.Aggregation.Type, ShouldEqual, "avg")
|
||||
|
||||
childChildTwoAgg := childAgg.Aggregation.Aggs[1]
|
||||
So(childChildTwoAgg.Key, ShouldEqual, "3")
|
||||
So(childChildTwoAgg.Aggregation.Type, ShouldEqual, "date_histogram")
|
||||
|
||||
childChildTwoChildOneAgg := childChildTwoAgg.Aggregation.Aggs[0]
|
||||
So(childChildTwoChildOneAgg.Key, ShouldEqual, "4")
|
||||
So(childChildTwoChildOneAgg.Aggregation.Type, ShouldEqual, "avg")
|
||||
|
||||
childChildTwoChildTwoAgg := childChildTwoAgg.Aggregation.Aggs[1]
|
||||
So(childChildTwoChildTwoAgg.Key, ShouldEqual, "5")
|
||||
So(childChildTwoChildTwoAgg.Aggregation.Type, ShouldEqual, "max")
|
||||
})
|
||||
|
||||
Convey("When marshal to JSON should generate correct json", func() {
|
||||
body, err := json.Marshal(sr)
|
||||
So(err, ShouldBeNil)
|
||||
json, err := simplejson.NewJson([]byte(body))
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
termsAgg := json.GetPath("aggs", "1")
|
||||
So(termsAgg.GetPath("terms", "field").MustString(), ShouldEqual, "@hostname")
|
||||
|
||||
termsAggTwo := termsAgg.GetPath("aggs", "2")
|
||||
So(termsAggTwo.GetPath("terms", "field").MustString(), ShouldEqual, "@app")
|
||||
|
||||
termsAggTwoAvg := termsAggTwo.GetPath("aggs", "4")
|
||||
So(termsAggTwoAvg.GetPath("avg", "field").MustString(), ShouldEqual, "@value")
|
||||
|
||||
dateHistAgg := termsAggTwo.GetPath("aggs", "3")
|
||||
So(dateHistAgg.GetPath("date_histogram", "field").MustString(), ShouldEqual, "@timestamp")
|
||||
|
||||
avgAgg := dateHistAgg.GetPath("aggs", "4")
|
||||
So(avgAgg.GetPath("avg", "field").MustString(), ShouldEqual, "@value")
|
||||
|
||||
maxAgg := dateHistAgg.GetPath("aggs", "5")
|
||||
So(maxAgg.GetPath("max", "field").MustString(), ShouldEqual, "@value")
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Given new search request builder for es version 2", func() {
|
||||
b := NewSearchRequestBuilder(2, tsdb.Interval{Value: 15 * time.Second, Text: "15s"})
|
||||
|
||||
Convey("When adding doc value field", func() {
|
||||
b.AddDocValueField(timeField)
|
||||
|
||||
Convey("should set correct props", func() {
|
||||
fields, ok := b.customProps["fields"].([]string)
|
||||
So(ok, ShouldBeTrue)
|
||||
So(fields, ShouldHaveLength, 2)
|
||||
So(fields[0], ShouldEqual, "*")
|
||||
So(fields[1], ShouldEqual, "_source")
|
||||
|
||||
scriptFields, ok := b.customProps["script_fields"].(map[string]interface{})
|
||||
So(ok, ShouldBeTrue)
|
||||
So(scriptFields, ShouldHaveLength, 0)
|
||||
|
||||
fieldDataFields, ok := b.customProps["fielddata_fields"].([]string)
|
||||
So(ok, ShouldBeTrue)
|
||||
So(fieldDataFields, ShouldHaveLength, 1)
|
||||
So(fieldDataFields[0], ShouldEqual, timeField)
|
||||
})
|
||||
|
||||
Convey("When building search request", func() {
|
||||
sr, err := b.Build()
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
Convey("When marshal to JSON should generate correct json", func() {
|
||||
body, err := json.Marshal(sr)
|
||||
So(err, ShouldBeNil)
|
||||
json, err := simplejson.NewJson([]byte(body))
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
scriptFields, err := json.Get("script_fields").Map()
|
||||
So(err, ShouldBeNil)
|
||||
So(scriptFields, ShouldHaveLength, 0)
|
||||
|
||||
fields, err := json.Get("fields").StringArray()
|
||||
So(err, ShouldBeNil)
|
||||
So(fields, ShouldHaveLength, 2)
|
||||
So(fields[0], ShouldEqual, "*")
|
||||
So(fields[1], ShouldEqual, "_source")
|
||||
|
||||
fieldDataFields, err := json.Get("fielddata_fields").StringArray()
|
||||
So(err, ShouldBeNil)
|
||||
So(fieldDataFields, ShouldHaveLength, 1)
|
||||
So(fieldDataFields[0], ShouldEqual, timeField)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func TestMultiSearchRequest(t *testing.T) {
|
||||
Convey("Test elasticsearch multi search request", t, func() {
|
||||
Convey("Given new multi search request builder", func() {
|
||||
b := NewMultiSearchRequestBuilder(0)
|
||||
|
||||
Convey("When adding one search request", func() {
|
||||
b.Search(tsdb.Interval{Value: 15 * time.Second, Text: "15s"})
|
||||
|
||||
Convey("When building search request should contain one search request", func() {
|
||||
mr, err := b.Build()
|
||||
So(err, ShouldBeNil)
|
||||
So(mr.Requests, ShouldHaveLength, 1)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("When adding two search requests", func() {
|
||||
b.Search(tsdb.Interval{Value: 15 * time.Second, Text: "15s"})
|
||||
b.Search(tsdb.Interval{Value: 15 * time.Second, Text: "15s"})
|
||||
|
||||
Convey("When building search request should contain two search requests", func() {
|
||||
mr, err := b.Build()
|
||||
So(err, ShouldBeNil)
|
||||
So(mr.Requests, ShouldHaveLength, 2)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
45
pkg/tsdb/elasticsearch/elasticsearch.go
Normal file
45
pkg/tsdb/elasticsearch/elasticsearch.go
Normal file
@ -0,0 +1,45 @@
|
||||
package elasticsearch
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/grafana/grafana/pkg/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
|
||||
)
|
||||
|
||||
// ElasticsearchExecutor represents a handler for handling elasticsearch datasource request
|
||||
type ElasticsearchExecutor struct{}
|
||||
|
||||
var (
|
||||
glog log.Logger
|
||||
intervalCalculator tsdb.IntervalCalculator
|
||||
)
|
||||
|
||||
// NewElasticsearchExecutor creates a new elasticsearch executor
|
||||
func NewElasticsearchExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
|
||||
return &ElasticsearchExecutor{}, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
glog = log.New("tsdb.elasticsearch")
|
||||
intervalCalculator = tsdb.NewIntervalCalculator(nil)
|
||||
tsdb.RegisterTsdbQueryEndpoint("elasticsearch", NewElasticsearchExecutor)
|
||||
}
|
||||
|
||||
// Query handles an elasticsearch datasource request
|
||||
func (e *ElasticsearchExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
|
||||
if len(tsdbQuery.Queries) == 0 {
|
||||
return nil, fmt.Errorf("query contains no queries")
|
||||
}
|
||||
|
||||
client, err := es.NewClient(ctx, dsInfo, tsdbQuery.TimeRange)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
query := newTimeSeriesQuery(client, tsdbQuery, intervalCalculator)
|
||||
return query.execute()
|
||||
}
|
77
pkg/tsdb/elasticsearch/models.go
Normal file
77
pkg/tsdb/elasticsearch/models.go
Normal file
@ -0,0 +1,77 @@
|
||||
package elasticsearch
|
||||
|
||||
import (
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
)
|
||||
|
||||
// Query represents the time series query model of the datasource
|
||||
type Query struct {
|
||||
TimeField string `json:"timeField"`
|
||||
RawQuery string `json:"query"`
|
||||
BucketAggs []*BucketAgg `json:"bucketAggs"`
|
||||
Metrics []*MetricAgg `json:"metrics"`
|
||||
Alias string `json:"alias"`
|
||||
Interval string
|
||||
RefID string
|
||||
}
|
||||
|
||||
// BucketAgg represents a bucket aggregation of the time series query model of the datasource
|
||||
type BucketAgg struct {
|
||||
Field string `json:"field"`
|
||||
ID string `json:"id"`
|
||||
Settings *simplejson.Json `json:"settings"`
|
||||
Type string `jsons:"type"`
|
||||
}
|
||||
|
||||
// MetricAgg represents a metric aggregation of the time series query model of the datasource
|
||||
type MetricAgg struct {
|
||||
Field string `json:"field"`
|
||||
Hide bool `json:"hide"`
|
||||
ID string `json:"id"`
|
||||
PipelineAggregate string `json:"pipelineAgg"`
|
||||
Settings *simplejson.Json `json:"settings"`
|
||||
Meta *simplejson.Json `json:"meta"`
|
||||
Type string `json:"type"`
|
||||
}
|
||||
|
||||
var metricAggType = map[string]string{
|
||||
"count": "Count",
|
||||
"avg": "Average",
|
||||
"sum": "Sum",
|
||||
"max": "Max",
|
||||
"min": "Min",
|
||||
"extended_stats": "Extended Stats",
|
||||
"percentiles": "Percentiles",
|
||||
"cardinality": "Unique Count",
|
||||
"moving_avg": "Moving Average",
|
||||
"derivative": "Derivative",
|
||||
"raw_document": "Raw Document",
|
||||
}
|
||||
|
||||
var extendedStats = map[string]string{
|
||||
"avg": "Avg",
|
||||
"min": "Min",
|
||||
"max": "Max",
|
||||
"sum": "Sum",
|
||||
"count": "Count",
|
||||
"std_deviation": "Std Dev",
|
||||
"std_deviation_bounds_upper": "Std Dev Upper",
|
||||
"std_deviation_bounds_lower": "Std Dev Lower",
|
||||
}
|
||||
|
||||
var pipelineAggType = map[string]string{
|
||||
"moving_avg": "moving_avg",
|
||||
"derivative": "derivative",
|
||||
}
|
||||
|
||||
func isPipelineAgg(metricType string) bool {
|
||||
if _, ok := pipelineAggType[metricType]; ok {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func describeMetric(metricType, field string) string {
|
||||
text := metricAggType[metricType]
|
||||
return text + " " + field
|
||||
}
|
530
pkg/tsdb/elasticsearch/response_parser.go
Normal file
530
pkg/tsdb/elasticsearch/response_parser.go
Normal file
@ -0,0 +1,530 @@
|
||||
package elasticsearch
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/null"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
|
||||
)
|
||||
|
||||
type responseParser struct {
|
||||
Responses []*es.SearchResponse
|
||||
Targets []*Query
|
||||
}
|
||||
|
||||
var newResponseParser = func(responses []*es.SearchResponse, targets []*Query) *responseParser {
|
||||
return &responseParser{
|
||||
Responses: responses,
|
||||
Targets: targets,
|
||||
}
|
||||
}
|
||||
|
||||
func (rp *responseParser) getTimeSeries() (*tsdb.Response, error) {
|
||||
result := &tsdb.Response{}
|
||||
result.Results = make(map[string]*tsdb.QueryResult)
|
||||
|
||||
if rp.Responses == nil {
|
||||
return result, nil
|
||||
}
|
||||
|
||||
for i, res := range rp.Responses {
|
||||
target := rp.Targets[i]
|
||||
|
||||
if res.Error != nil {
|
||||
result.Results[target.RefID] = getErrorFromElasticResponse(res)
|
||||
continue
|
||||
}
|
||||
|
||||
queryRes := tsdb.NewQueryResult()
|
||||
props := make(map[string]string)
|
||||
table := tsdb.Table{
|
||||
Columns: make([]tsdb.TableColumn, 0),
|
||||
Rows: make([]tsdb.RowValues, 0),
|
||||
}
|
||||
err := rp.processBuckets(res.Aggregations, target, &queryRes.Series, &table, props, 0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rp.nameSeries(&queryRes.Series, target)
|
||||
rp.trimDatapoints(&queryRes.Series, target)
|
||||
|
||||
if len(table.Rows) > 0 {
|
||||
queryRes.Tables = append(queryRes.Tables, &table)
|
||||
}
|
||||
|
||||
result.Results[target.RefID] = queryRes
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (rp *responseParser) processBuckets(aggs map[string]interface{}, target *Query, series *tsdb.TimeSeriesSlice, table *tsdb.Table, props map[string]string, depth int) error {
|
||||
var err error
|
||||
maxDepth := len(target.BucketAggs) - 1
|
||||
|
||||
aggIDs := make([]string, 0)
|
||||
for k := range aggs {
|
||||
aggIDs = append(aggIDs, k)
|
||||
}
|
||||
sort.Strings(aggIDs)
|
||||
for _, aggID := range aggIDs {
|
||||
v := aggs[aggID]
|
||||
aggDef, _ := findAgg(target, aggID)
|
||||
esAgg := simplejson.NewFromAny(v)
|
||||
if aggDef == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if depth == maxDepth {
|
||||
if aggDef.Type == "date_histogram" {
|
||||
err = rp.processMetrics(esAgg, target, series, props)
|
||||
} else {
|
||||
err = rp.processAggregationDocs(esAgg, aggDef, target, table, props)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
for _, b := range esAgg.Get("buckets").MustArray() {
|
||||
bucket := simplejson.NewFromAny(b)
|
||||
newProps := make(map[string]string, 0)
|
||||
|
||||
for k, v := range props {
|
||||
newProps[k] = v
|
||||
}
|
||||
|
||||
if key, err := bucket.Get("key").String(); err == nil {
|
||||
newProps[aggDef.Field] = key
|
||||
} else if key, err := bucket.Get("key").Int64(); err == nil {
|
||||
newProps[aggDef.Field] = strconv.FormatInt(key, 10)
|
||||
}
|
||||
|
||||
if key, err := bucket.Get("key_as_string").String(); err == nil {
|
||||
newProps[aggDef.Field] = key
|
||||
}
|
||||
err = rp.processBuckets(bucket.MustMap(), target, series, table, newProps, depth+1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
for k, v := range esAgg.Get("buckets").MustMap() {
|
||||
bucket := simplejson.NewFromAny(v)
|
||||
newProps := make(map[string]string, 0)
|
||||
|
||||
for k, v := range props {
|
||||
newProps[k] = v
|
||||
}
|
||||
|
||||
newProps["filter"] = k
|
||||
|
||||
err = rp.processBuckets(bucket.MustMap(), target, series, table, newProps, depth+1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
return nil
|
||||
|
||||
}
|
||||
|
||||
func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, series *tsdb.TimeSeriesSlice, props map[string]string) error {
|
||||
for _, metric := range target.Metrics {
|
||||
if metric.Hide {
|
||||
continue
|
||||
}
|
||||
|
||||
switch metric.Type {
|
||||
case "count":
|
||||
newSeries := tsdb.TimeSeries{
|
||||
Tags: make(map[string]string),
|
||||
}
|
||||
|
||||
for _, v := range esAgg.Get("buckets").MustArray() {
|
||||
bucket := simplejson.NewFromAny(v)
|
||||
value := castToNullFloat(bucket.Get("doc_count"))
|
||||
key := castToNullFloat(bucket.Get("key"))
|
||||
newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key})
|
||||
}
|
||||
|
||||
for k, v := range props {
|
||||
newSeries.Tags[k] = v
|
||||
}
|
||||
newSeries.Tags["metric"] = "count"
|
||||
*series = append(*series, &newSeries)
|
||||
|
||||
case "percentiles":
|
||||
buckets := esAgg.Get("buckets").MustArray()
|
||||
if len(buckets) == 0 {
|
||||
break
|
||||
}
|
||||
|
||||
firstBucket := simplejson.NewFromAny(buckets[0])
|
||||
percentiles := firstBucket.GetPath(metric.ID, "values").MustMap()
|
||||
|
||||
percentileKeys := make([]string, 0)
|
||||
for k := range percentiles {
|
||||
percentileKeys = append(percentileKeys, k)
|
||||
}
|
||||
sort.Strings(percentileKeys)
|
||||
for _, percentileName := range percentileKeys {
|
||||
newSeries := tsdb.TimeSeries{
|
||||
Tags: make(map[string]string),
|
||||
}
|
||||
for k, v := range props {
|
||||
newSeries.Tags[k] = v
|
||||
}
|
||||
newSeries.Tags["metric"] = "p" + percentileName
|
||||
newSeries.Tags["field"] = metric.Field
|
||||
for _, v := range buckets {
|
||||
bucket := simplejson.NewFromAny(v)
|
||||
value := castToNullFloat(bucket.GetPath(metric.ID, "values", percentileName))
|
||||
key := castToNullFloat(bucket.Get("key"))
|
||||
newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key})
|
||||
}
|
||||
*series = append(*series, &newSeries)
|
||||
}
|
||||
case "extended_stats":
|
||||
buckets := esAgg.Get("buckets").MustArray()
|
||||
|
||||
metaKeys := make([]string, 0)
|
||||
meta := metric.Meta.MustMap()
|
||||
for k := range meta {
|
||||
metaKeys = append(metaKeys, k)
|
||||
}
|
||||
sort.Strings(metaKeys)
|
||||
for _, statName := range metaKeys {
|
||||
v := meta[statName]
|
||||
if enabled, ok := v.(bool); !ok || !enabled {
|
||||
continue
|
||||
}
|
||||
|
||||
newSeries := tsdb.TimeSeries{
|
||||
Tags: make(map[string]string),
|
||||
}
|
||||
for k, v := range props {
|
||||
newSeries.Tags[k] = v
|
||||
}
|
||||
newSeries.Tags["metric"] = statName
|
||||
newSeries.Tags["field"] = metric.Field
|
||||
|
||||
for _, v := range buckets {
|
||||
bucket := simplejson.NewFromAny(v)
|
||||
key := castToNullFloat(bucket.Get("key"))
|
||||
var value null.Float
|
||||
if statName == "std_deviation_bounds_upper" {
|
||||
value = castToNullFloat(bucket.GetPath(metric.ID, "std_deviation_bounds", "upper"))
|
||||
} else if statName == "std_deviation_bounds_lower" {
|
||||
value = castToNullFloat(bucket.GetPath(metric.ID, "std_deviation_bounds", "lower"))
|
||||
} else {
|
||||
value = castToNullFloat(bucket.GetPath(metric.ID, statName))
|
||||
}
|
||||
newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key})
|
||||
}
|
||||
*series = append(*series, &newSeries)
|
||||
}
|
||||
default:
|
||||
newSeries := tsdb.TimeSeries{
|
||||
Tags: make(map[string]string),
|
||||
}
|
||||
for k, v := range props {
|
||||
newSeries.Tags[k] = v
|
||||
}
|
||||
|
||||
newSeries.Tags["metric"] = metric.Type
|
||||
newSeries.Tags["field"] = metric.Field
|
||||
for _, v := range esAgg.Get("buckets").MustArray() {
|
||||
bucket := simplejson.NewFromAny(v)
|
||||
key := castToNullFloat(bucket.Get("key"))
|
||||
valueObj, err := bucket.Get(metric.ID).Map()
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
var value null.Float
|
||||
if _, ok := valueObj["normalized_value"]; ok {
|
||||
value = castToNullFloat(bucket.GetPath(metric.ID, "normalized_value"))
|
||||
} else {
|
||||
value = castToNullFloat(bucket.GetPath(metric.ID, "value"))
|
||||
}
|
||||
newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key})
|
||||
}
|
||||
*series = append(*series, &newSeries)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef *BucketAgg, target *Query, table *tsdb.Table, props map[string]string) error {
|
||||
propKeys := make([]string, 0)
|
||||
for k := range props {
|
||||
propKeys = append(propKeys, k)
|
||||
}
|
||||
sort.Strings(propKeys)
|
||||
|
||||
if len(table.Columns) == 0 {
|
||||
for _, propKey := range propKeys {
|
||||
table.Columns = append(table.Columns, tsdb.TableColumn{Text: propKey})
|
||||
}
|
||||
table.Columns = append(table.Columns, tsdb.TableColumn{Text: aggDef.Field})
|
||||
}
|
||||
|
||||
addMetricValue := func(values *tsdb.RowValues, metricName string, value null.Float) {
|
||||
found := false
|
||||
for _, c := range table.Columns {
|
||||
if c.Text == metricName {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
table.Columns = append(table.Columns, tsdb.TableColumn{Text: metricName})
|
||||
}
|
||||
*values = append(*values, value)
|
||||
}
|
||||
|
||||
for _, v := range esAgg.Get("buckets").MustArray() {
|
||||
bucket := simplejson.NewFromAny(v)
|
||||
values := make(tsdb.RowValues, 0)
|
||||
|
||||
for _, propKey := range propKeys {
|
||||
values = append(values, props[propKey])
|
||||
}
|
||||
|
||||
if key, err := bucket.Get("key").String(); err == nil {
|
||||
values = append(values, key)
|
||||
} else {
|
||||
values = append(values, castToNullFloat(bucket.Get("key")))
|
||||
}
|
||||
|
||||
for _, metric := range target.Metrics {
|
||||
switch metric.Type {
|
||||
case "count":
|
||||
addMetricValue(&values, rp.getMetricName(metric.Type), castToNullFloat(bucket.Get("doc_count")))
|
||||
break
|
||||
case "extended_stats":
|
||||
metaKeys := make([]string, 0)
|
||||
meta := metric.Meta.MustMap()
|
||||
for k := range meta {
|
||||
metaKeys = append(metaKeys, k)
|
||||
}
|
||||
sort.Strings(metaKeys)
|
||||
for _, statName := range metaKeys {
|
||||
v := meta[statName]
|
||||
if enabled, ok := v.(bool); !ok || !enabled {
|
||||
continue
|
||||
}
|
||||
|
||||
var value null.Float
|
||||
if statName == "std_deviation_bounds_upper" {
|
||||
value = castToNullFloat(bucket.GetPath(metric.ID, "std_deviation_bounds", "upper"))
|
||||
} else if statName == "std_deviation_bounds_lower" {
|
||||
value = castToNullFloat(bucket.GetPath(metric.ID, "std_deviation_bounds", "lower"))
|
||||
} else {
|
||||
value = castToNullFloat(bucket.GetPath(metric.ID, statName))
|
||||
}
|
||||
|
||||
addMetricValue(&values, rp.getMetricName(metric.Type), value)
|
||||
break
|
||||
}
|
||||
default:
|
||||
metricName := rp.getMetricName(metric.Type)
|
||||
otherMetrics := make([]*MetricAgg, 0)
|
||||
|
||||
for _, m := range target.Metrics {
|
||||
if m.Type == metric.Type {
|
||||
otherMetrics = append(otherMetrics, m)
|
||||
}
|
||||
}
|
||||
|
||||
if len(otherMetrics) > 1 {
|
||||
metricName += " " + metric.Field
|
||||
}
|
||||
|
||||
addMetricValue(&values, metricName, castToNullFloat(bucket.GetPath(metric.ID, "value")))
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
table.Rows = append(table.Rows, values)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rp *responseParser) trimDatapoints(series *tsdb.TimeSeriesSlice, target *Query) {
|
||||
var histogram *BucketAgg
|
||||
for _, bucketAgg := range target.BucketAggs {
|
||||
if bucketAgg.Type == "date_histogram" {
|
||||
histogram = bucketAgg
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if histogram == nil {
|
||||
return
|
||||
}
|
||||
|
||||
trimEdges, err := histogram.Settings.Get("trimEdges").Int()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
for _, s := range *series {
|
||||
if len(s.Points) > trimEdges*2 {
|
||||
s.Points = s.Points[trimEdges : len(s.Points)-trimEdges]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (rp *responseParser) nameSeries(seriesList *tsdb.TimeSeriesSlice, target *Query) {
|
||||
set := make(map[string]string)
|
||||
for _, v := range *seriesList {
|
||||
if metricType, exists := v.Tags["metric"]; exists {
|
||||
if _, ok := set[metricType]; !ok {
|
||||
set[metricType] = ""
|
||||
}
|
||||
}
|
||||
}
|
||||
metricTypeCount := len(set)
|
||||
for _, series := range *seriesList {
|
||||
series.Name = rp.getSeriesName(series, target, metricTypeCount)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
var aliasPatternRegex = regexp.MustCompile(`\{\{([\s\S]+?)\}\}`)
|
||||
|
||||
func (rp *responseParser) getSeriesName(series *tsdb.TimeSeries, target *Query, metricTypeCount int) string {
|
||||
metricType := series.Tags["metric"]
|
||||
metricName := rp.getMetricName(metricType)
|
||||
delete(series.Tags, "metric")
|
||||
|
||||
field := ""
|
||||
if v, ok := series.Tags["field"]; ok {
|
||||
field = v
|
||||
delete(series.Tags, "field")
|
||||
}
|
||||
|
||||
if target.Alias != "" {
|
||||
seriesName := target.Alias
|
||||
|
||||
subMatches := aliasPatternRegex.FindAllStringSubmatch(target.Alias, -1)
|
||||
for _, subMatch := range subMatches {
|
||||
group := subMatch[0]
|
||||
|
||||
if len(subMatch) > 1 {
|
||||
group = subMatch[1]
|
||||
}
|
||||
|
||||
if strings.Index(group, "term ") == 0 {
|
||||
seriesName = strings.Replace(seriesName, subMatch[0], series.Tags[group[5:]], 1)
|
||||
}
|
||||
if v, ok := series.Tags[group]; ok {
|
||||
seriesName = strings.Replace(seriesName, subMatch[0], v, 1)
|
||||
}
|
||||
if group == "metric" {
|
||||
seriesName = strings.Replace(seriesName, subMatch[0], metricName, 1)
|
||||
}
|
||||
if group == "field" {
|
||||
seriesName = strings.Replace(seriesName, subMatch[0], field, 1)
|
||||
}
|
||||
}
|
||||
|
||||
return seriesName
|
||||
}
|
||||
// todo, if field and pipelineAgg
|
||||
if field != "" && isPipelineAgg(metricType) {
|
||||
found := false
|
||||
for _, metric := range target.Metrics {
|
||||
if metric.ID == field {
|
||||
metricName += " " + describeMetric(metric.Type, field)
|
||||
found = true
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
metricName = "Unset"
|
||||
}
|
||||
} else if field != "" {
|
||||
metricName += " " + field
|
||||
}
|
||||
|
||||
if len(series.Tags) == 0 {
|
||||
return metricName
|
||||
}
|
||||
|
||||
name := ""
|
||||
for _, v := range series.Tags {
|
||||
name += v + " "
|
||||
}
|
||||
|
||||
if metricTypeCount == 1 {
|
||||
return strings.TrimSpace(name)
|
||||
}
|
||||
|
||||
return strings.TrimSpace(name) + " " + metricName
|
||||
|
||||
}
|
||||
|
||||
func (rp *responseParser) getMetricName(metric string) string {
|
||||
if text, ok := metricAggType[metric]; ok {
|
||||
return text
|
||||
}
|
||||
|
||||
if text, ok := extendedStats[metric]; ok {
|
||||
return text
|
||||
}
|
||||
|
||||
return metric
|
||||
}
|
||||
|
||||
func castToNullFloat(j *simplejson.Json) null.Float {
|
||||
f, err := j.Float64()
|
||||
if err == nil {
|
||||
return null.FloatFrom(f)
|
||||
}
|
||||
|
||||
if s, err := j.String(); err == nil {
|
||||
if strings.ToLower(s) == "nan" {
|
||||
return null.NewFloat(0, false)
|
||||
}
|
||||
|
||||
if v, err := strconv.ParseFloat(s, 64); err == nil {
|
||||
return null.FloatFromPtr(&v)
|
||||
}
|
||||
}
|
||||
|
||||
return null.NewFloat(0, false)
|
||||
}
|
||||
|
||||
func findAgg(target *Query, aggID string) (*BucketAgg, error) {
|
||||
for _, v := range target.BucketAggs {
|
||||
if aggID == v.ID {
|
||||
return v, nil
|
||||
}
|
||||
}
|
||||
return nil, errors.New("can't found aggDef, aggID:" + aggID)
|
||||
}
|
||||
|
||||
func getErrorFromElasticResponse(response *es.SearchResponse) *tsdb.QueryResult {
|
||||
result := tsdb.NewQueryResult()
|
||||
json := simplejson.NewFromAny(response.Error)
|
||||
reason := json.Get("reason").MustString()
|
||||
rootCauseReason := json.Get("root_cause").GetIndex(0).Get("reason").MustString()
|
||||
|
||||
if rootCauseReason != "" {
|
||||
result.ErrorString = rootCauseReason
|
||||
} else if reason != "" {
|
||||
result.ErrorString = reason
|
||||
} else {
|
||||
result.ErrorString = "Unkown elasticsearch error response"
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
880
pkg/tsdb/elasticsearch/response_parser_test.go
Normal file
880
pkg/tsdb/elasticsearch/response_parser_test.go
Normal file
@ -0,0 +1,880 @@
|
||||
package elasticsearch
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/null"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
|
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestResponseParser(t *testing.T) {
|
||||
Convey("Elasticsearch response parser test", t, func() {
|
||||
Convey("Simple query and count", func() {
|
||||
targets := map[string]string{
|
||||
"A": `{
|
||||
"timeField": "@timestamp",
|
||||
"metrics": [{ "type": "count", "id": "1" }],
|
||||
"bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "2" }]
|
||||
}`,
|
||||
}
|
||||
response := `{
|
||||
"responses": [
|
||||
{
|
||||
"aggregations": {
|
||||
"2": {
|
||||
"buckets": [
|
||||
{
|
||||
"doc_count": 10,
|
||||
"key": 1000
|
||||
},
|
||||
{
|
||||
"doc_count": 15,
|
||||
"key": 2000
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`
|
||||
rp, err := newResponseParserForTest(targets, response)
|
||||
So(err, ShouldBeNil)
|
||||
result, err := rp.getTimeSeries()
|
||||
So(err, ShouldBeNil)
|
||||
So(result.Results, ShouldHaveLength, 1)
|
||||
|
||||
queryRes := result.Results["A"]
|
||||
So(queryRes, ShouldNotBeNil)
|
||||
So(queryRes.Series, ShouldHaveLength, 1)
|
||||
series := queryRes.Series[0]
|
||||
So(series.Name, ShouldEqual, "Count")
|
||||
So(series.Points, ShouldHaveLength, 2)
|
||||
So(series.Points[0][0].Float64, ShouldEqual, 10)
|
||||
So(series.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
So(series.Points[1][0].Float64, ShouldEqual, 15)
|
||||
So(series.Points[1][1].Float64, ShouldEqual, 2000)
|
||||
})
|
||||
|
||||
Convey("Simple query count & avg aggregation", func() {
|
||||
targets := map[string]string{
|
||||
"A": `{
|
||||
"timeField": "@timestamp",
|
||||
"metrics": [{ "type": "count", "id": "1" }, {"type": "avg", "field": "value", "id": "2" }],
|
||||
"bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "3" }]
|
||||
}`,
|
||||
}
|
||||
response := `{
|
||||
"responses": [
|
||||
{
|
||||
"aggregations": {
|
||||
"3": {
|
||||
"buckets": [
|
||||
{
|
||||
"2": { "value": 88 },
|
||||
"doc_count": 10,
|
||||
"key": 1000
|
||||
},
|
||||
{
|
||||
"2": { "value": 99 },
|
||||
"doc_count": 15,
|
||||
"key": 2000
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`
|
||||
rp, err := newResponseParserForTest(targets, response)
|
||||
So(err, ShouldBeNil)
|
||||
result, err := rp.getTimeSeries()
|
||||
So(err, ShouldBeNil)
|
||||
So(result.Results, ShouldHaveLength, 1)
|
||||
|
||||
queryRes := result.Results["A"]
|
||||
So(queryRes, ShouldNotBeNil)
|
||||
So(queryRes.Series, ShouldHaveLength, 2)
|
||||
seriesOne := queryRes.Series[0]
|
||||
So(seriesOne.Name, ShouldEqual, "Count")
|
||||
So(seriesOne.Points, ShouldHaveLength, 2)
|
||||
So(seriesOne.Points[0][0].Float64, ShouldEqual, 10)
|
||||
So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
So(seriesOne.Points[1][0].Float64, ShouldEqual, 15)
|
||||
So(seriesOne.Points[1][1].Float64, ShouldEqual, 2000)
|
||||
|
||||
seriesTwo := queryRes.Series[1]
|
||||
So(seriesTwo.Name, ShouldEqual, "Average value")
|
||||
So(seriesTwo.Points, ShouldHaveLength, 2)
|
||||
So(seriesTwo.Points[0][0].Float64, ShouldEqual, 88)
|
||||
So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
So(seriesTwo.Points[1][0].Float64, ShouldEqual, 99)
|
||||
So(seriesTwo.Points[1][1].Float64, ShouldEqual, 2000)
|
||||
})
|
||||
|
||||
Convey("Single group by query one metric", func() {
|
||||
targets := map[string]string{
|
||||
"A": `{
|
||||
"timeField": "@timestamp",
|
||||
"metrics": [{ "type": "count", "id": "1" }],
|
||||
"bucketAggs": [
|
||||
{ "type": "terms", "field": "host", "id": "2" },
|
||||
{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
|
||||
]
|
||||
}`,
|
||||
}
|
||||
response := `{
|
||||
"responses": [
|
||||
{
|
||||
"aggregations": {
|
||||
"2": {
|
||||
"buckets": [
|
||||
{
|
||||
"3": {
|
||||
"buckets": [{ "doc_count": 1, "key": 1000 }, { "doc_count": 3, "key": 2000 }]
|
||||
},
|
||||
"doc_count": 4,
|
||||
"key": "server1"
|
||||
},
|
||||
{
|
||||
"3": {
|
||||
"buckets": [{ "doc_count": 2, "key": 1000 }, { "doc_count": 8, "key": 2000 }]
|
||||
},
|
||||
"doc_count": 10,
|
||||
"key": "server2"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`
|
||||
rp, err := newResponseParserForTest(targets, response)
|
||||
So(err, ShouldBeNil)
|
||||
result, err := rp.getTimeSeries()
|
||||
So(err, ShouldBeNil)
|
||||
So(result.Results, ShouldHaveLength, 1)
|
||||
|
||||
queryRes := result.Results["A"]
|
||||
So(queryRes, ShouldNotBeNil)
|
||||
So(queryRes.Series, ShouldHaveLength, 2)
|
||||
seriesOne := queryRes.Series[0]
|
||||
So(seriesOne.Name, ShouldEqual, "server1")
|
||||
So(seriesOne.Points, ShouldHaveLength, 2)
|
||||
So(seriesOne.Points[0][0].Float64, ShouldEqual, 1)
|
||||
So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
So(seriesOne.Points[1][0].Float64, ShouldEqual, 3)
|
||||
So(seriesOne.Points[1][1].Float64, ShouldEqual, 2000)
|
||||
|
||||
seriesTwo := queryRes.Series[1]
|
||||
So(seriesTwo.Name, ShouldEqual, "server2")
|
||||
So(seriesTwo.Points, ShouldHaveLength, 2)
|
||||
So(seriesTwo.Points[0][0].Float64, ShouldEqual, 2)
|
||||
So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
So(seriesTwo.Points[1][0].Float64, ShouldEqual, 8)
|
||||
So(seriesTwo.Points[1][1].Float64, ShouldEqual, 2000)
|
||||
})
|
||||
|
||||
Convey("Single group by query two metrics", func() {
|
||||
targets := map[string]string{
|
||||
"A": `{
|
||||
"timeField": "@timestamp",
|
||||
"metrics": [{ "type": "count", "id": "1" }, { "type": "avg", "field": "@value", "id": "4" }],
|
||||
"bucketAggs": [
|
||||
{ "type": "terms", "field": "host", "id": "2" },
|
||||
{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
|
||||
]
|
||||
}`,
|
||||
}
|
||||
response := `{
|
||||
"responses": [
|
||||
{
|
||||
"aggregations": {
|
||||
"2": {
|
||||
"buckets": [
|
||||
{
|
||||
"3": {
|
||||
"buckets": [
|
||||
{ "4": { "value": 10 }, "doc_count": 1, "key": 1000 },
|
||||
{ "4": { "value": 12 }, "doc_count": 3, "key": 2000 }
|
||||
]
|
||||
},
|
||||
"doc_count": 4,
|
||||
"key": "server1"
|
||||
},
|
||||
{
|
||||
"3": {
|
||||
"buckets": [
|
||||
{ "4": { "value": 20 }, "doc_count": 1, "key": 1000 },
|
||||
{ "4": { "value": 32 }, "doc_count": 3, "key": 2000 }
|
||||
]
|
||||
},
|
||||
"doc_count": 10,
|
||||
"key": "server2"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`
|
||||
rp, err := newResponseParserForTest(targets, response)
|
||||
So(err, ShouldBeNil)
|
||||
result, err := rp.getTimeSeries()
|
||||
So(err, ShouldBeNil)
|
||||
So(result.Results, ShouldHaveLength, 1)
|
||||
|
||||
queryRes := result.Results["A"]
|
||||
So(queryRes, ShouldNotBeNil)
|
||||
So(queryRes.Series, ShouldHaveLength, 4)
|
||||
seriesOne := queryRes.Series[0]
|
||||
So(seriesOne.Name, ShouldEqual, "server1 Count")
|
||||
So(seriesOne.Points, ShouldHaveLength, 2)
|
||||
So(seriesOne.Points[0][0].Float64, ShouldEqual, 1)
|
||||
So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
So(seriesOne.Points[1][0].Float64, ShouldEqual, 3)
|
||||
So(seriesOne.Points[1][1].Float64, ShouldEqual, 2000)
|
||||
|
||||
seriesTwo := queryRes.Series[1]
|
||||
So(seriesTwo.Name, ShouldEqual, "server1 Average @value")
|
||||
So(seriesTwo.Points, ShouldHaveLength, 2)
|
||||
So(seriesTwo.Points[0][0].Float64, ShouldEqual, 10)
|
||||
So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
So(seriesTwo.Points[1][0].Float64, ShouldEqual, 12)
|
||||
So(seriesTwo.Points[1][1].Float64, ShouldEqual, 2000)
|
||||
|
||||
seriesThree := queryRes.Series[2]
|
||||
So(seriesThree.Name, ShouldEqual, "server2 Count")
|
||||
So(seriesThree.Points, ShouldHaveLength, 2)
|
||||
So(seriesThree.Points[0][0].Float64, ShouldEqual, 1)
|
||||
So(seriesThree.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
So(seriesThree.Points[1][0].Float64, ShouldEqual, 3)
|
||||
So(seriesThree.Points[1][1].Float64, ShouldEqual, 2000)
|
||||
|
||||
seriesFour := queryRes.Series[3]
|
||||
So(seriesFour.Name, ShouldEqual, "server2 Average @value")
|
||||
So(seriesFour.Points, ShouldHaveLength, 2)
|
||||
So(seriesFour.Points[0][0].Float64, ShouldEqual, 20)
|
||||
So(seriesFour.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
So(seriesFour.Points[1][0].Float64, ShouldEqual, 32)
|
||||
So(seriesFour.Points[1][1].Float64, ShouldEqual, 2000)
|
||||
})
|
||||
|
||||
Convey("With percentiles", func() {
|
||||
targets := map[string]string{
|
||||
"A": `{
|
||||
"timeField": "@timestamp",
|
||||
"metrics": [{ "type": "percentiles", "settings": { "percents": [75, 90] }, "id": "1" }],
|
||||
"bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "3" }]
|
||||
}`,
|
||||
}
|
||||
response := `{
|
||||
"responses": [
|
||||
{
|
||||
"aggregations": {
|
||||
"3": {
|
||||
"buckets": [
|
||||
{
|
||||
"1": { "values": { "75": 3.3, "90": 5.5 } },
|
||||
"doc_count": 10,
|
||||
"key": 1000
|
||||
},
|
||||
{
|
||||
"1": { "values": { "75": 2.3, "90": 4.5 } },
|
||||
"doc_count": 15,
|
||||
"key": 2000
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`
|
||||
rp, err := newResponseParserForTest(targets, response)
|
||||
So(err, ShouldBeNil)
|
||||
result, err := rp.getTimeSeries()
|
||||
So(err, ShouldBeNil)
|
||||
So(result.Results, ShouldHaveLength, 1)
|
||||
|
||||
queryRes := result.Results["A"]
|
||||
So(queryRes, ShouldNotBeNil)
|
||||
So(queryRes.Series, ShouldHaveLength, 2)
|
||||
seriesOne := queryRes.Series[0]
|
||||
So(seriesOne.Name, ShouldEqual, "p75")
|
||||
So(seriesOne.Points, ShouldHaveLength, 2)
|
||||
So(seriesOne.Points[0][0].Float64, ShouldEqual, 3.3)
|
||||
So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
So(seriesOne.Points[1][0].Float64, ShouldEqual, 2.3)
|
||||
So(seriesOne.Points[1][1].Float64, ShouldEqual, 2000)
|
||||
|
||||
seriesTwo := queryRes.Series[1]
|
||||
So(seriesTwo.Name, ShouldEqual, "p90")
|
||||
So(seriesTwo.Points, ShouldHaveLength, 2)
|
||||
So(seriesTwo.Points[0][0].Float64, ShouldEqual, 5.5)
|
||||
So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
So(seriesTwo.Points[1][0].Float64, ShouldEqual, 4.5)
|
||||
So(seriesTwo.Points[1][1].Float64, ShouldEqual, 2000)
|
||||
})
|
||||
|
||||
Convey("With extended stats", func() {
|
||||
targets := map[string]string{
|
||||
"A": `{
|
||||
"timeField": "@timestamp",
|
||||
"metrics": [{ "type": "extended_stats", "meta": { "max": true, "std_deviation_bounds_upper": true, "std_deviation_bounds_lower": true }, "id": "1" }],
|
||||
"bucketAggs": [
|
||||
{ "type": "terms", "field": "host", "id": "3" },
|
||||
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
|
||||
]
|
||||
}`,
|
||||
}
|
||||
response := `{
|
||||
"responses": [
|
||||
{
|
||||
"aggregations": {
|
||||
"3": {
|
||||
"buckets": [
|
||||
{
|
||||
"key": "server1",
|
||||
"4": {
|
||||
"buckets": [
|
||||
{
|
||||
"1": {
|
||||
"max": 10.2,
|
||||
"min": 5.5,
|
||||
"std_deviation_bounds": { "upper": 3, "lower": -2 }
|
||||
},
|
||||
"doc_count": 10,
|
||||
"key": 1000
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"key": "server2",
|
||||
"4": {
|
||||
"buckets": [
|
||||
{
|
||||
"1": {
|
||||
"max": 15.5,
|
||||
"min": 3.4,
|
||||
"std_deviation_bounds": { "upper": 4, "lower": -1 }
|
||||
},
|
||||
"doc_count": 10,
|
||||
"key": 1000
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`
|
||||
rp, err := newResponseParserForTest(targets, response)
|
||||
So(err, ShouldBeNil)
|
||||
result, err := rp.getTimeSeries()
|
||||
So(err, ShouldBeNil)
|
||||
So(result.Results, ShouldHaveLength, 1)
|
||||
|
||||
queryRes := result.Results["A"]
|
||||
So(queryRes, ShouldNotBeNil)
|
||||
So(queryRes.Series, ShouldHaveLength, 6)
|
||||
|
||||
seriesOne := queryRes.Series[0]
|
||||
So(seriesOne.Name, ShouldEqual, "server1 Max")
|
||||
So(seriesOne.Points, ShouldHaveLength, 1)
|
||||
So(seriesOne.Points[0][0].Float64, ShouldEqual, 10.2)
|
||||
So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
|
||||
seriesTwo := queryRes.Series[1]
|
||||
So(seriesTwo.Name, ShouldEqual, "server1 Std Dev Lower")
|
||||
So(seriesTwo.Points, ShouldHaveLength, 1)
|
||||
So(seriesTwo.Points[0][0].Float64, ShouldEqual, -2)
|
||||
So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
|
||||
seriesThree := queryRes.Series[2]
|
||||
So(seriesThree.Name, ShouldEqual, "server1 Std Dev Upper")
|
||||
So(seriesThree.Points, ShouldHaveLength, 1)
|
||||
So(seriesThree.Points[0][0].Float64, ShouldEqual, 3)
|
||||
So(seriesThree.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
|
||||
seriesFour := queryRes.Series[3]
|
||||
So(seriesFour.Name, ShouldEqual, "server2 Max")
|
||||
So(seriesFour.Points, ShouldHaveLength, 1)
|
||||
So(seriesFour.Points[0][0].Float64, ShouldEqual, 15.5)
|
||||
So(seriesFour.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
|
||||
seriesFive := queryRes.Series[4]
|
||||
So(seriesFive.Name, ShouldEqual, "server2 Std Dev Lower")
|
||||
So(seriesFive.Points, ShouldHaveLength, 1)
|
||||
So(seriesFive.Points[0][0].Float64, ShouldEqual, -1)
|
||||
So(seriesFive.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
|
||||
seriesSix := queryRes.Series[5]
|
||||
So(seriesSix.Name, ShouldEqual, "server2 Std Dev Upper")
|
||||
So(seriesSix.Points, ShouldHaveLength, 1)
|
||||
So(seriesSix.Points[0][0].Float64, ShouldEqual, 4)
|
||||
So(seriesSix.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
})
|
||||
|
||||
Convey("Single group by with alias pattern", func() {
|
||||
targets := map[string]string{
|
||||
"A": `{
|
||||
"timeField": "@timestamp",
|
||||
"alias": "{{term @host}} {{metric}} and {{not_exist}} {{@host}}",
|
||||
"metrics": [{ "type": "count", "id": "1" }],
|
||||
"bucketAggs": [
|
||||
{ "type": "terms", "field": "@host", "id": "2" },
|
||||
{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
|
||||
]
|
||||
}`,
|
||||
}
|
||||
response := `{
|
||||
"responses": [
|
||||
{
|
||||
"aggregations": {
|
||||
"2": {
|
||||
"buckets": [
|
||||
{
|
||||
"3": {
|
||||
"buckets": [{ "doc_count": 1, "key": 1000 }, { "doc_count": 3, "key": 2000 }]
|
||||
},
|
||||
"doc_count": 4,
|
||||
"key": "server1"
|
||||
},
|
||||
{
|
||||
"3": {
|
||||
"buckets": [{ "doc_count": 2, "key": 1000 }, { "doc_count": 8, "key": 2000 }]
|
||||
},
|
||||
"doc_count": 10,
|
||||
"key": "server2"
|
||||
},
|
||||
{
|
||||
"3": {
|
||||
"buckets": [{ "doc_count": 2, "key": 1000 }, { "doc_count": 8, "key": 2000 }]
|
||||
},
|
||||
"doc_count": 10,
|
||||
"key": 0
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`
|
||||
rp, err := newResponseParserForTest(targets, response)
|
||||
So(err, ShouldBeNil)
|
||||
result, err := rp.getTimeSeries()
|
||||
So(err, ShouldBeNil)
|
||||
So(result.Results, ShouldHaveLength, 1)
|
||||
|
||||
queryRes := result.Results["A"]
|
||||
So(queryRes, ShouldNotBeNil)
|
||||
So(queryRes.Series, ShouldHaveLength, 3)
|
||||
|
||||
seriesOne := queryRes.Series[0]
|
||||
So(seriesOne.Name, ShouldEqual, "server1 Count and {{not_exist}} server1")
|
||||
So(seriesOne.Points, ShouldHaveLength, 2)
|
||||
So(seriesOne.Points[0][0].Float64, ShouldEqual, 1)
|
||||
So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
So(seriesOne.Points[1][0].Float64, ShouldEqual, 3)
|
||||
So(seriesOne.Points[1][1].Float64, ShouldEqual, 2000)
|
||||
|
||||
seriesTwo := queryRes.Series[1]
|
||||
So(seriesTwo.Name, ShouldEqual, "server2 Count and {{not_exist}} server2")
|
||||
So(seriesTwo.Points, ShouldHaveLength, 2)
|
||||
So(seriesTwo.Points[0][0].Float64, ShouldEqual, 2)
|
||||
So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
So(seriesTwo.Points[1][0].Float64, ShouldEqual, 8)
|
||||
So(seriesTwo.Points[1][1].Float64, ShouldEqual, 2000)
|
||||
|
||||
seriesThree := queryRes.Series[2]
|
||||
So(seriesThree.Name, ShouldEqual, "0 Count and {{not_exist}} 0")
|
||||
So(seriesThree.Points, ShouldHaveLength, 2)
|
||||
So(seriesThree.Points[0][0].Float64, ShouldEqual, 2)
|
||||
So(seriesThree.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
So(seriesThree.Points[1][0].Float64, ShouldEqual, 8)
|
||||
So(seriesThree.Points[1][1].Float64, ShouldEqual, 2000)
|
||||
})
|
||||
|
||||
Convey("Histogram response", func() {
|
||||
targets := map[string]string{
|
||||
"A": `{
|
||||
"timeField": "@timestamp",
|
||||
"metrics": [{ "type": "count", "id": "1" }],
|
||||
"bucketAggs": [{ "type": "histogram", "field": "bytes", "id": "3" }]
|
||||
}`,
|
||||
}
|
||||
response := `{
|
||||
"responses": [
|
||||
{
|
||||
"aggregations": {
|
||||
"3": {
|
||||
"buckets": [{ "doc_count": 1, "key": 1000 }, { "doc_count": 3, "key": 2000 }, { "doc_count": 2, "key": 3000 }]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`
|
||||
rp, err := newResponseParserForTest(targets, response)
|
||||
So(err, ShouldBeNil)
|
||||
result, err := rp.getTimeSeries()
|
||||
So(err, ShouldBeNil)
|
||||
So(result.Results, ShouldHaveLength, 1)
|
||||
|
||||
queryRes := result.Results["A"]
|
||||
So(queryRes, ShouldNotBeNil)
|
||||
So(queryRes.Tables, ShouldHaveLength, 1)
|
||||
|
||||
rows := queryRes.Tables[0].Rows
|
||||
So(rows, ShouldHaveLength, 3)
|
||||
cols := queryRes.Tables[0].Columns
|
||||
So(cols, ShouldHaveLength, 2)
|
||||
|
||||
So(cols[0].Text, ShouldEqual, "bytes")
|
||||
So(cols[1].Text, ShouldEqual, "Count")
|
||||
|
||||
So(rows[0][0].(null.Float).Float64, ShouldEqual, 1000)
|
||||
So(rows[0][1].(null.Float).Float64, ShouldEqual, 1)
|
||||
So(rows[1][0].(null.Float).Float64, ShouldEqual, 2000)
|
||||
So(rows[1][1].(null.Float).Float64, ShouldEqual, 3)
|
||||
So(rows[2][0].(null.Float).Float64, ShouldEqual, 3000)
|
||||
So(rows[2][1].(null.Float).Float64, ShouldEqual, 2)
|
||||
})
|
||||
|
||||
Convey("With two filters agg", func() {
|
||||
targets := map[string]string{
|
||||
"A": `{
|
||||
"timeField": "@timestamp",
|
||||
"metrics": [{ "type": "count", "id": "1" }],
|
||||
"bucketAggs": [
|
||||
{
|
||||
"type": "filters",
|
||||
"id": "2",
|
||||
"settings": {
|
||||
"filters": [{ "query": "@metric:cpu" }, { "query": "@metric:logins.count" }]
|
||||
}
|
||||
},
|
||||
{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
|
||||
]
|
||||
}`,
|
||||
}
|
||||
response := `{
|
||||
"responses": [
|
||||
{
|
||||
"aggregations": {
|
||||
"2": {
|
||||
"buckets": {
|
||||
"@metric:cpu": {
|
||||
"3": {
|
||||
"buckets": [{ "doc_count": 1, "key": 1000 }, { "doc_count": 3, "key": 2000 }]
|
||||
}
|
||||
},
|
||||
"@metric:logins.count": {
|
||||
"3": {
|
||||
"buckets": [{ "doc_count": 2, "key": 1000 }, { "doc_count": 8, "key": 2000 }]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`
|
||||
rp, err := newResponseParserForTest(targets, response)
|
||||
So(err, ShouldBeNil)
|
||||
result, err := rp.getTimeSeries()
|
||||
So(err, ShouldBeNil)
|
||||
So(result.Results, ShouldHaveLength, 1)
|
||||
|
||||
queryRes := result.Results["A"]
|
||||
So(queryRes, ShouldNotBeNil)
|
||||
So(queryRes.Series, ShouldHaveLength, 2)
|
||||
|
||||
seriesOne := queryRes.Series[0]
|
||||
So(seriesOne.Name, ShouldEqual, "@metric:cpu")
|
||||
So(seriesOne.Points, ShouldHaveLength, 2)
|
||||
So(seriesOne.Points[0][0].Float64, ShouldEqual, 1)
|
||||
So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
So(seriesOne.Points[1][0].Float64, ShouldEqual, 3)
|
||||
So(seriesOne.Points[1][1].Float64, ShouldEqual, 2000)
|
||||
|
||||
seriesTwo := queryRes.Series[1]
|
||||
So(seriesTwo.Name, ShouldEqual, "@metric:logins.count")
|
||||
So(seriesTwo.Points, ShouldHaveLength, 2)
|
||||
So(seriesTwo.Points[0][0].Float64, ShouldEqual, 2)
|
||||
So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000)
|
||||
So(seriesTwo.Points[1][0].Float64, ShouldEqual, 8)
|
||||
So(seriesTwo.Points[1][1].Float64, ShouldEqual, 2000)
|
||||
})
|
||||
|
||||
Convey("With dropfirst and last aggregation", func() {
|
||||
targets := map[string]string{
|
||||
"A": `{
|
||||
"timeField": "@timestamp",
|
||||
"metrics": [{ "type": "avg", "id": "1" }, { "type": "count" }],
|
||||
"bucketAggs": [
|
||||
{
|
||||
"type": "date_histogram",
|
||||
"field": "@timestamp",
|
||||
"id": "2",
|
||||
"settings": { "trimEdges": 1 }
|
||||
}
|
||||
]
|
||||
}`,
|
||||
}
|
||||
response := `{
|
||||
"responses": [
|
||||
{
|
||||
"aggregations": {
|
||||
"2": {
|
||||
"buckets": [
|
||||
{
|
||||
"1": { "value": 1000 },
|
||||
"key": 1,
|
||||
"doc_count": 369
|
||||
},
|
||||
{
|
||||
"1": { "value": 2000 },
|
||||
"key": 2,
|
||||
"doc_count": 200
|
||||
},
|
||||
{
|
||||
"1": { "value": 2000 },
|
||||
"key": 3,
|
||||
"doc_count": 200
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`
|
||||
rp, err := newResponseParserForTest(targets, response)
|
||||
So(err, ShouldBeNil)
|
||||
result, err := rp.getTimeSeries()
|
||||
So(err, ShouldBeNil)
|
||||
So(result.Results, ShouldHaveLength, 1)
|
||||
|
||||
queryRes := result.Results["A"]
|
||||
So(queryRes, ShouldNotBeNil)
|
||||
So(queryRes.Series, ShouldHaveLength, 2)
|
||||
|
||||
seriesOne := queryRes.Series[0]
|
||||
So(seriesOne.Name, ShouldEqual, "Average")
|
||||
So(seriesOne.Points, ShouldHaveLength, 1)
|
||||
So(seriesOne.Points[0][0].Float64, ShouldEqual, 2000)
|
||||
So(seriesOne.Points[0][1].Float64, ShouldEqual, 2)
|
||||
|
||||
seriesTwo := queryRes.Series[1]
|
||||
So(seriesTwo.Name, ShouldEqual, "Count")
|
||||
So(seriesTwo.Points, ShouldHaveLength, 1)
|
||||
So(seriesTwo.Points[0][0].Float64, ShouldEqual, 200)
|
||||
So(seriesTwo.Points[0][1].Float64, ShouldEqual, 2)
|
||||
})
|
||||
|
||||
Convey("No group by time", func() {
|
||||
targets := map[string]string{
|
||||
"A": `{
|
||||
"timeField": "@timestamp",
|
||||
"metrics": [{ "type": "avg", "id": "1" }, { "type": "count" }],
|
||||
"bucketAggs": [{ "type": "terms", "field": "host", "id": "2" }]
|
||||
}`,
|
||||
}
|
||||
response := `{
|
||||
"responses": [
|
||||
{
|
||||
"aggregations": {
|
||||
"2": {
|
||||
"buckets": [
|
||||
{
|
||||
"1": { "value": 1000 },
|
||||
"key": "server-1",
|
||||
"doc_count": 369
|
||||
},
|
||||
{
|
||||
"1": { "value": 2000 },
|
||||
"key": "server-2",
|
||||
"doc_count": 200
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`
|
||||
rp, err := newResponseParserForTest(targets, response)
|
||||
So(err, ShouldBeNil)
|
||||
result, err := rp.getTimeSeries()
|
||||
So(err, ShouldBeNil)
|
||||
So(result.Results, ShouldHaveLength, 1)
|
||||
|
||||
queryRes := result.Results["A"]
|
||||
So(queryRes, ShouldNotBeNil)
|
||||
So(queryRes.Tables, ShouldHaveLength, 1)
|
||||
|
||||
rows := queryRes.Tables[0].Rows
|
||||
So(rows, ShouldHaveLength, 2)
|
||||
cols := queryRes.Tables[0].Columns
|
||||
So(cols, ShouldHaveLength, 3)
|
||||
|
||||
So(cols[0].Text, ShouldEqual, "host")
|
||||
So(cols[1].Text, ShouldEqual, "Average")
|
||||
So(cols[2].Text, ShouldEqual, "Count")
|
||||
|
||||
So(rows[0][0].(string), ShouldEqual, "server-1")
|
||||
So(rows[0][1].(null.Float).Float64, ShouldEqual, 1000)
|
||||
So(rows[0][2].(null.Float).Float64, ShouldEqual, 369)
|
||||
So(rows[1][0].(string), ShouldEqual, "server-2")
|
||||
So(rows[1][1].(null.Float).Float64, ShouldEqual, 2000)
|
||||
So(rows[1][2].(null.Float).Float64, ShouldEqual, 200)
|
||||
})
|
||||
|
||||
Convey("Multiple metrics of same type", func() {
|
||||
targets := map[string]string{
|
||||
"A": `{
|
||||
"timeField": "@timestamp",
|
||||
"metrics": [{ "type": "avg", "field": "test", "id": "1" }, { "type": "avg", "field": "test2", "id": "2" }],
|
||||
"bucketAggs": [{ "type": "terms", "field": "host", "id": "2" }]
|
||||
}`,
|
||||
}
|
||||
response := `{
|
||||
"responses": [
|
||||
{
|
||||
"aggregations": {
|
||||
"2": {
|
||||
"buckets": [
|
||||
{
|
||||
"1": { "value": 1000 },
|
||||
"2": { "value": 3000 },
|
||||
"key": "server-1",
|
||||
"doc_count": 369
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`
|
||||
rp, err := newResponseParserForTest(targets, response)
|
||||
So(err, ShouldBeNil)
|
||||
result, err := rp.getTimeSeries()
|
||||
So(err, ShouldBeNil)
|
||||
So(result.Results, ShouldHaveLength, 1)
|
||||
|
||||
queryRes := result.Results["A"]
|
||||
So(queryRes, ShouldNotBeNil)
|
||||
So(queryRes.Tables, ShouldHaveLength, 1)
|
||||
|
||||
rows := queryRes.Tables[0].Rows
|
||||
So(rows, ShouldHaveLength, 1)
|
||||
cols := queryRes.Tables[0].Columns
|
||||
So(cols, ShouldHaveLength, 3)
|
||||
|
||||
So(cols[0].Text, ShouldEqual, "host")
|
||||
So(cols[1].Text, ShouldEqual, "Average test")
|
||||
So(cols[2].Text, ShouldEqual, "Average test2")
|
||||
|
||||
So(rows[0][0].(string), ShouldEqual, "server-1")
|
||||
So(rows[0][1].(null.Float).Float64, ShouldEqual, 1000)
|
||||
So(rows[0][2].(null.Float).Float64, ShouldEqual, 3000)
|
||||
})
|
||||
|
||||
// Convey("Raw documents query", func() {
|
||||
// targets := map[string]string{
|
||||
// "A": `{
|
||||
// "timeField": "@timestamp",
|
||||
// "metrics": [{ "type": "raw_document", "id": "1" }]
|
||||
// }`,
|
||||
// }
|
||||
// response := `{
|
||||
// "responses": [
|
||||
// {
|
||||
// "hits": {
|
||||
// "total": 100,
|
||||
// "hits": [
|
||||
// {
|
||||
// "_id": "1",
|
||||
// "_type": "type",
|
||||
// "_index": "index",
|
||||
// "_source": { "sourceProp": "asd" },
|
||||
// "fields": { "fieldProp": "field" }
|
||||
// },
|
||||
// {
|
||||
// "_source": { "sourceProp": "asd2" },
|
||||
// "fields": { "fieldProp": "field2" }
|
||||
// }
|
||||
// ]
|
||||
// }
|
||||
// }
|
||||
// ]
|
||||
// }`
|
||||
// rp, err := newResponseParserForTest(targets, response)
|
||||
// So(err, ShouldBeNil)
|
||||
// result, err := rp.getTimeSeries()
|
||||
// So(err, ShouldBeNil)
|
||||
// So(result.Results, ShouldHaveLength, 1)
|
||||
|
||||
// queryRes := result.Results["A"]
|
||||
// So(queryRes, ShouldNotBeNil)
|
||||
// So(queryRes.Tables, ShouldHaveLength, 1)
|
||||
|
||||
// rows := queryRes.Tables[0].Rows
|
||||
// So(rows, ShouldHaveLength, 1)
|
||||
// cols := queryRes.Tables[0].Columns
|
||||
// So(cols, ShouldHaveLength, 3)
|
||||
|
||||
// So(cols[0].Text, ShouldEqual, "host")
|
||||
// So(cols[1].Text, ShouldEqual, "Average test")
|
||||
// So(cols[2].Text, ShouldEqual, "Average test2")
|
||||
|
||||
// So(rows[0][0].(string), ShouldEqual, "server-1")
|
||||
// So(rows[0][1].(null.Float).Float64, ShouldEqual, 1000)
|
||||
// So(rows[0][2].(null.Float).Float64, ShouldEqual, 3000)
|
||||
// })
|
||||
})
|
||||
}
|
||||
|
||||
func newResponseParserForTest(tsdbQueries map[string]string, responseBody string) (*responseParser, error) {
|
||||
from := time.Date(2018, 5, 15, 17, 50, 0, 0, time.UTC)
|
||||
to := time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC)
|
||||
fromStr := fmt.Sprintf("%d", from.UnixNano()/int64(time.Millisecond))
|
||||
toStr := fmt.Sprintf("%d", to.UnixNano()/int64(time.Millisecond))
|
||||
tsdbQuery := &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{},
|
||||
TimeRange: tsdb.NewTimeRange(fromStr, toStr),
|
||||
}
|
||||
|
||||
for refID, tsdbQueryBody := range tsdbQueries {
|
||||
tsdbQueryJSON, err := simplejson.NewJson([]byte(tsdbQueryBody))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tsdbQuery.Queries = append(tsdbQuery.Queries, &tsdb.Query{
|
||||
Model: tsdbQueryJSON,
|
||||
RefId: refID,
|
||||
})
|
||||
}
|
||||
|
||||
var response es.MultiSearchResponse
|
||||
err := json.Unmarshal([]byte(responseBody), &response)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tsQueryParser := newTimeSeriesQueryParser()
|
||||
queries, err := tsQueryParser.parse(tsdbQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return newResponseParser(response.Responses, queries), nil
|
||||
}
|
318
pkg/tsdb/elasticsearch/time_series_query.go
Normal file
318
pkg/tsdb/elasticsearch/time_series_query.go
Normal file
@ -0,0 +1,318 @@
|
||||
package elasticsearch
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
|
||||
)
|
||||
|
||||
type timeSeriesQuery struct {
|
||||
client es.Client
|
||||
tsdbQuery *tsdb.TsdbQuery
|
||||
intervalCalculator tsdb.IntervalCalculator
|
||||
}
|
||||
|
||||
var newTimeSeriesQuery = func(client es.Client, tsdbQuery *tsdb.TsdbQuery, intervalCalculator tsdb.IntervalCalculator) *timeSeriesQuery {
|
||||
return &timeSeriesQuery{
|
||||
client: client,
|
||||
tsdbQuery: tsdbQuery,
|
||||
intervalCalculator: intervalCalculator,
|
||||
}
|
||||
}
|
||||
|
||||
func (e *timeSeriesQuery) execute() (*tsdb.Response, error) {
|
||||
result := &tsdb.Response{}
|
||||
result.Results = make(map[string]*tsdb.QueryResult)
|
||||
|
||||
tsQueryParser := newTimeSeriesQueryParser()
|
||||
queries, err := tsQueryParser.parse(e.tsdbQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ms := e.client.MultiSearch()
|
||||
|
||||
from := fmt.Sprintf("%d", e.tsdbQuery.TimeRange.GetFromAsMsEpoch())
|
||||
to := fmt.Sprintf("%d", e.tsdbQuery.TimeRange.GetToAsMsEpoch())
|
||||
|
||||
for _, q := range queries {
|
||||
minInterval, err := e.client.GetMinInterval(q.Interval)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
interval := e.intervalCalculator.Calculate(e.tsdbQuery.TimeRange, minInterval)
|
||||
|
||||
b := ms.Search(interval)
|
||||
b.Size(0)
|
||||
filters := b.Query().Bool().Filter()
|
||||
filters.AddDateRangeFilter(e.client.GetTimeField(), to, from, es.DateFormatEpochMS)
|
||||
|
||||
if q.RawQuery != "" {
|
||||
filters.AddQueryStringFilter(q.RawQuery, true)
|
||||
}
|
||||
|
||||
if len(q.BucketAggs) == 0 {
|
||||
if len(q.Metrics) == 0 || q.Metrics[0].Type != "raw_document" {
|
||||
result.Results[q.RefID] = &tsdb.QueryResult{
|
||||
RefId: q.RefID,
|
||||
Error: fmt.Errorf("invalid query, missing metrics and aggregations"),
|
||||
ErrorString: "invalid query, missing metrics and aggregations",
|
||||
}
|
||||
continue
|
||||
}
|
||||
metric := q.Metrics[0]
|
||||
b.Size(metric.Settings.Get("size").MustInt(500))
|
||||
b.SortDesc("@timestamp", "boolean")
|
||||
b.AddDocValueField("@timestamp")
|
||||
continue
|
||||
}
|
||||
|
||||
aggBuilder := b.Agg()
|
||||
|
||||
// iterate backwards to create aggregations bottom-down
|
||||
for _, bucketAgg := range q.BucketAggs {
|
||||
switch bucketAgg.Type {
|
||||
case "date_histogram":
|
||||
aggBuilder = addDateHistogramAgg(aggBuilder, bucketAgg, from, to)
|
||||
case "histogram":
|
||||
aggBuilder = addHistogramAgg(aggBuilder, bucketAgg)
|
||||
case "filters":
|
||||
aggBuilder = addFiltersAgg(aggBuilder, bucketAgg)
|
||||
case "terms":
|
||||
aggBuilder = addTermsAgg(aggBuilder, bucketAgg, q.Metrics)
|
||||
case "geohash_grid":
|
||||
aggBuilder = addGeoHashGridAgg(aggBuilder, bucketAgg)
|
||||
}
|
||||
}
|
||||
|
||||
for _, m := range q.Metrics {
|
||||
if m.Type == "count" {
|
||||
continue
|
||||
}
|
||||
|
||||
if isPipelineAgg(m.Type) {
|
||||
if _, err := strconv.Atoi(m.PipelineAggregate); err == nil {
|
||||
aggBuilder.Pipeline(m.ID, m.Type, m.PipelineAggregate, func(a *es.PipelineAggregation) {
|
||||
a.Settings = m.Settings.MustMap()
|
||||
})
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
aggBuilder.Metric(m.ID, m.Type, m.Field, func(a *es.MetricAggregation) {
|
||||
a.Settings = m.Settings.MustMap()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
req, err := ms.Build()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res, err := e.client.ExecuteMultisearch(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rp := newResponseParser(res.Responses, queries)
|
||||
return rp.getTimeSeries()
|
||||
}
|
||||
|
||||
func addDateHistogramAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg, timeFrom, timeTo string) es.AggBuilder {
|
||||
aggBuilder.DateHistogram(bucketAgg.ID, bucketAgg.Field, func(a *es.DateHistogramAgg, b es.AggBuilder) {
|
||||
a.Interval = bucketAgg.Settings.Get("interval").MustString("auto")
|
||||
a.MinDocCount = bucketAgg.Settings.Get("min_doc_count").MustInt(0)
|
||||
a.ExtendedBounds = &es.ExtendedBounds{Min: timeFrom, Max: timeTo}
|
||||
a.Format = bucketAgg.Settings.Get("format").MustString(es.DateFormatEpochMS)
|
||||
|
||||
if a.Interval == "auto" {
|
||||
a.Interval = "$__interval"
|
||||
}
|
||||
|
||||
if missing, err := bucketAgg.Settings.Get("missing").String(); err == nil {
|
||||
a.Missing = &missing
|
||||
}
|
||||
|
||||
aggBuilder = b
|
||||
})
|
||||
|
||||
return aggBuilder
|
||||
}
|
||||
|
||||
func addHistogramAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg) es.AggBuilder {
|
||||
aggBuilder.Histogram(bucketAgg.ID, bucketAgg.Field, func(a *es.HistogramAgg, b es.AggBuilder) {
|
||||
a.Interval = bucketAgg.Settings.Get("interval").MustInt(1000)
|
||||
a.MinDocCount = bucketAgg.Settings.Get("min_doc_count").MustInt(0)
|
||||
|
||||
if missing, err := bucketAgg.Settings.Get("missing").Int(); err == nil {
|
||||
a.Missing = &missing
|
||||
}
|
||||
|
||||
aggBuilder = b
|
||||
})
|
||||
|
||||
return aggBuilder
|
||||
}
|
||||
|
||||
func addTermsAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg, metrics []*MetricAgg) es.AggBuilder {
|
||||
aggBuilder.Terms(bucketAgg.ID, bucketAgg.Field, func(a *es.TermsAggregation, b es.AggBuilder) {
|
||||
if size, err := bucketAgg.Settings.Get("size").Int(); err == nil {
|
||||
a.Size = size
|
||||
} else if size, err := bucketAgg.Settings.Get("size").String(); err == nil {
|
||||
a.Size, err = strconv.Atoi(size)
|
||||
if err != nil {
|
||||
a.Size = 500
|
||||
}
|
||||
} else {
|
||||
a.Size = 500
|
||||
}
|
||||
if minDocCount, err := bucketAgg.Settings.Get("min_doc_count").Int(); err == nil {
|
||||
a.MinDocCount = &minDocCount
|
||||
}
|
||||
if missing, err := bucketAgg.Settings.Get("missing").String(); err == nil {
|
||||
a.Missing = &missing
|
||||
}
|
||||
|
||||
if orderBy, err := bucketAgg.Settings.Get("orderBy").String(); err == nil {
|
||||
a.Order[orderBy] = bucketAgg.Settings.Get("order").MustString("desc")
|
||||
|
||||
if _, err := strconv.Atoi(orderBy); err == nil {
|
||||
for _, m := range metrics {
|
||||
if m.ID == orderBy {
|
||||
b.Metric(m.ID, m.Type, m.Field, nil)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
aggBuilder = b
|
||||
})
|
||||
|
||||
return aggBuilder
|
||||
}
|
||||
|
||||
func addFiltersAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg) es.AggBuilder {
|
||||
filters := make(map[string]interface{})
|
||||
for _, filter := range bucketAgg.Settings.Get("filters").MustArray() {
|
||||
json := simplejson.NewFromAny(filter)
|
||||
query := json.Get("query").MustString()
|
||||
label := json.Get("label").MustString()
|
||||
if label == "" {
|
||||
label = query
|
||||
}
|
||||
filters[label] = &es.QueryStringFilter{Query: query, AnalyzeWildcard: true}
|
||||
}
|
||||
|
||||
if len(filters) > 0 {
|
||||
aggBuilder.Filters(bucketAgg.ID, func(a *es.FiltersAggregation, b es.AggBuilder) {
|
||||
a.Filters = filters
|
||||
aggBuilder = b
|
||||
})
|
||||
}
|
||||
|
||||
return aggBuilder
|
||||
}
|
||||
|
||||
func addGeoHashGridAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg) es.AggBuilder {
|
||||
aggBuilder.GeoHashGrid(bucketAgg.ID, bucketAgg.Field, func(a *es.GeoHashGridAggregation, b es.AggBuilder) {
|
||||
a.Precision = bucketAgg.Settings.Get("precision").MustInt(3)
|
||||
aggBuilder = b
|
||||
})
|
||||
|
||||
return aggBuilder
|
||||
}
|
||||
|
||||
type timeSeriesQueryParser struct{}
|
||||
|
||||
func newTimeSeriesQueryParser() *timeSeriesQueryParser {
|
||||
return &timeSeriesQueryParser{}
|
||||
}
|
||||
|
||||
func (p *timeSeriesQueryParser) parse(tsdbQuery *tsdb.TsdbQuery) ([]*Query, error) {
|
||||
queries := make([]*Query, 0)
|
||||
for _, q := range tsdbQuery.Queries {
|
||||
model := q.Model
|
||||
timeField, err := model.Get("timeField").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rawQuery := model.Get("query").MustString()
|
||||
bucketAggs, err := p.parseBucketAggs(model)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
metrics, err := p.parseMetrics(model)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
alias := model.Get("alias").MustString("")
|
||||
interval := strconv.FormatInt(q.IntervalMs, 10) + "ms"
|
||||
|
||||
queries = append(queries, &Query{
|
||||
TimeField: timeField,
|
||||
RawQuery: rawQuery,
|
||||
BucketAggs: bucketAggs,
|
||||
Metrics: metrics,
|
||||
Alias: alias,
|
||||
Interval: interval,
|
||||
RefID: q.RefId,
|
||||
})
|
||||
}
|
||||
|
||||
return queries, nil
|
||||
}
|
||||
|
||||
func (p *timeSeriesQueryParser) parseBucketAggs(model *simplejson.Json) ([]*BucketAgg, error) {
|
||||
var err error
|
||||
var result []*BucketAgg
|
||||
for _, t := range model.Get("bucketAggs").MustArray() {
|
||||
aggJSON := simplejson.NewFromAny(t)
|
||||
agg := &BucketAgg{}
|
||||
|
||||
agg.Type, err = aggJSON.Get("type").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
agg.ID, err = aggJSON.Get("id").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
agg.Field = aggJSON.Get("field").MustString()
|
||||
agg.Settings = simplejson.NewFromAny(aggJSON.Get("settings").MustMap())
|
||||
|
||||
result = append(result, agg)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (p *timeSeriesQueryParser) parseMetrics(model *simplejson.Json) ([]*MetricAgg, error) {
|
||||
var err error
|
||||
var result []*MetricAgg
|
||||
for _, t := range model.Get("metrics").MustArray() {
|
||||
metricJSON := simplejson.NewFromAny(t)
|
||||
metric := &MetricAgg{}
|
||||
|
||||
metric.Field = metricJSON.Get("field").MustString()
|
||||
metric.Hide = metricJSON.Get("hide").MustBool(false)
|
||||
metric.ID = metricJSON.Get("id").MustString()
|
||||
metric.PipelineAggregate = metricJSON.Get("pipelineAgg").MustString()
|
||||
metric.Settings = simplejson.NewFromAny(metricJSON.Get("settings").MustMap())
|
||||
metric.Meta = simplejson.NewFromAny(metricJSON.Get("meta").MustMap())
|
||||
|
||||
metric.Type, err = metricJSON.Get("type").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result = append(result, metric)
|
||||
}
|
||||
return result, nil
|
||||
}
|
604
pkg/tsdb/elasticsearch/time_series_query_test.go
Normal file
604
pkg/tsdb/elasticsearch/time_series_query_test.go
Normal file
@ -0,0 +1,604 @@
|
||||
package elasticsearch
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestExecuteTimeSeriesQuery(t *testing.T) {
|
||||
from := time.Date(2018, 5, 15, 17, 50, 0, 0, time.UTC)
|
||||
to := time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC)
|
||||
fromStr := fmt.Sprintf("%d", from.UnixNano()/int64(time.Millisecond))
|
||||
toStr := fmt.Sprintf("%d", to.UnixNano()/int64(time.Millisecond))
|
||||
|
||||
Convey("Test execute time series query", t, func() {
|
||||
Convey("With defaults on es 2", func() {
|
||||
c := newFakeClient(2)
|
||||
_, err := executeTsdbQuery(c, `{
|
||||
"timeField": "@timestamp",
|
||||
"bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "2" }],
|
||||
"metrics": [{"type": "count", "id": "0" }]
|
||||
}`, from, to, 15*time.Second)
|
||||
So(err, ShouldBeNil)
|
||||
sr := c.multisearchRequests[0].Requests[0]
|
||||
rangeFilter := sr.Query.Bool.Filters[0].(*es.RangeFilter)
|
||||
So(rangeFilter.Key, ShouldEqual, c.timeField)
|
||||
So(rangeFilter.Lte, ShouldEqual, toStr)
|
||||
So(rangeFilter.Gte, ShouldEqual, fromStr)
|
||||
So(rangeFilter.Format, ShouldEqual, es.DateFormatEpochMS)
|
||||
So(sr.Aggs[0].Key, ShouldEqual, "2")
|
||||
dateHistogramAgg := sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg)
|
||||
So(dateHistogramAgg.Field, ShouldEqual, "@timestamp")
|
||||
So(dateHistogramAgg.ExtendedBounds.Min, ShouldEqual, fromStr)
|
||||
So(dateHistogramAgg.ExtendedBounds.Max, ShouldEqual, toStr)
|
||||
})
|
||||
|
||||
Convey("With defaults on es 5", func() {
|
||||
c := newFakeClient(5)
|
||||
_, err := executeTsdbQuery(c, `{
|
||||
"timeField": "@timestamp",
|
||||
"bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "2" }],
|
||||
"metrics": [{"type": "count", "id": "0" }]
|
||||
}`, from, to, 15*time.Second)
|
||||
So(err, ShouldBeNil)
|
||||
sr := c.multisearchRequests[0].Requests[0]
|
||||
So(sr.Query.Bool.Filters[0].(*es.RangeFilter).Key, ShouldEqual, c.timeField)
|
||||
So(sr.Aggs[0].Key, ShouldEqual, "2")
|
||||
So(sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg).ExtendedBounds.Min, ShouldEqual, fromStr)
|
||||
So(sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg).ExtendedBounds.Max, ShouldEqual, toStr)
|
||||
})
|
||||
|
||||
Convey("With multiple bucket aggs", func() {
|
||||
c := newFakeClient(5)
|
||||
_, err := executeTsdbQuery(c, `{
|
||||
"timeField": "@timestamp",
|
||||
"bucketAggs": [
|
||||
{ "type": "terms", "field": "@host", "id": "2" },
|
||||
{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
|
||||
],
|
||||
"metrics": [{"type": "count", "id": "1" }]
|
||||
}`, from, to, 15*time.Second)
|
||||
So(err, ShouldBeNil)
|
||||
sr := c.multisearchRequests[0].Requests[0]
|
||||
firstLevel := sr.Aggs[0]
|
||||
So(firstLevel.Key, ShouldEqual, "2")
|
||||
So(firstLevel.Aggregation.Aggregation.(*es.TermsAggregation).Field, ShouldEqual, "@host")
|
||||
secondLevel := firstLevel.Aggregation.Aggs[0]
|
||||
So(secondLevel.Key, ShouldEqual, "3")
|
||||
So(secondLevel.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, ShouldEqual, "@timestamp")
|
||||
})
|
||||
|
||||
Convey("With select field", func() {
|
||||
c := newFakeClient(5)
|
||||
_, err := executeTsdbQuery(c, `{
|
||||
"timeField": "@timestamp",
|
||||
"bucketAggs": [
|
||||
{ "type": "date_histogram", "field": "@timestamp", "id": "2" }
|
||||
],
|
||||
"metrics": [{"type": "avg", "field": "@value", "id": "1" }]
|
||||
}`, from, to, 15*time.Second)
|
||||
So(err, ShouldBeNil)
|
||||
sr := c.multisearchRequests[0].Requests[0]
|
||||
firstLevel := sr.Aggs[0]
|
||||
So(firstLevel.Key, ShouldEqual, "2")
|
||||
So(firstLevel.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, ShouldEqual, "@timestamp")
|
||||
secondLevel := firstLevel.Aggregation.Aggs[0]
|
||||
So(secondLevel.Key, ShouldEqual, "1")
|
||||
So(secondLevel.Aggregation.Type, ShouldEqual, "avg")
|
||||
So(secondLevel.Aggregation.Aggregation.(*es.MetricAggregation).Field, ShouldEqual, "@value")
|
||||
})
|
||||
|
||||
Convey("With term agg and order by metric agg", func() {
|
||||
c := newFakeClient(5)
|
||||
_, err := executeTsdbQuery(c, `{
|
||||
"timeField": "@timestamp",
|
||||
"bucketAggs": [
|
||||
{
|
||||
"type": "terms",
|
||||
"field": "@host",
|
||||
"id": "2",
|
||||
"settings": { "size": "5", "order": "asc", "orderBy": "5" }
|
||||
},
|
||||
{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
|
||||
],
|
||||
"metrics": [
|
||||
{"type": "count", "id": "1" },
|
||||
{"type": "avg", "field": "@value", "id": "5" }
|
||||
]
|
||||
}`, from, to, 15*time.Second)
|
||||
So(err, ShouldBeNil)
|
||||
sr := c.multisearchRequests[0].Requests[0]
|
||||
|
||||
avgAggOrderBy := sr.Aggs[0].Aggregation.Aggs[0]
|
||||
So(avgAggOrderBy.Key, ShouldEqual, "5")
|
||||
So(avgAggOrderBy.Aggregation.Type, ShouldEqual, "avg")
|
||||
|
||||
avgAgg := sr.Aggs[0].Aggregation.Aggs[1].Aggregation.Aggs[0]
|
||||
So(avgAgg.Key, ShouldEqual, "5")
|
||||
So(avgAgg.Aggregation.Type, ShouldEqual, "avg")
|
||||
})
|
||||
|
||||
Convey("With metric percentiles", func() {
|
||||
c := newFakeClient(5)
|
||||
_, err := executeTsdbQuery(c, `{
|
||||
"timeField": "@timestamp",
|
||||
"bucketAggs": [
|
||||
{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
|
||||
],
|
||||
"metrics": [
|
||||
{
|
||||
"id": "1",
|
||||
"type": "percentiles",
|
||||
"field": "@load_time",
|
||||
"settings": {
|
||||
"percents": [ "1", "2", "3", "4" ]
|
||||
}
|
||||
}
|
||||
]
|
||||
}`, from, to, 15*time.Second)
|
||||
So(err, ShouldBeNil)
|
||||
sr := c.multisearchRequests[0].Requests[0]
|
||||
|
||||
percentilesAgg := sr.Aggs[0].Aggregation.Aggs[0]
|
||||
So(percentilesAgg.Key, ShouldEqual, "1")
|
||||
So(percentilesAgg.Aggregation.Type, ShouldEqual, "percentiles")
|
||||
metricAgg := percentilesAgg.Aggregation.Aggregation.(*es.MetricAggregation)
|
||||
percents := metricAgg.Settings["percents"].([]interface{})
|
||||
So(percents, ShouldHaveLength, 4)
|
||||
So(percents[0], ShouldEqual, "1")
|
||||
So(percents[1], ShouldEqual, "2")
|
||||
So(percents[2], ShouldEqual, "3")
|
||||
So(percents[3], ShouldEqual, "4")
|
||||
})
|
||||
|
||||
Convey("With filters aggs on es 2", func() {
|
||||
c := newFakeClient(2)
|
||||
_, err := executeTsdbQuery(c, `{
|
||||
"timeField": "@timestamp",
|
||||
"bucketAggs": [
|
||||
{
|
||||
"id": "2",
|
||||
"type": "filters",
|
||||
"settings": {
|
||||
"filters": [ { "query": "@metric:cpu" }, { "query": "@metric:logins.count" } ]
|
||||
}
|
||||
},
|
||||
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
|
||||
],
|
||||
"metrics": [{"type": "count", "id": "1" }]
|
||||
}`, from, to, 15*time.Second)
|
||||
So(err, ShouldBeNil)
|
||||
sr := c.multisearchRequests[0].Requests[0]
|
||||
|
||||
filtersAgg := sr.Aggs[0]
|
||||
So(filtersAgg.Key, ShouldEqual, "2")
|
||||
So(filtersAgg.Aggregation.Type, ShouldEqual, "filters")
|
||||
fAgg := filtersAgg.Aggregation.Aggregation.(*es.FiltersAggregation)
|
||||
So(fAgg.Filters["@metric:cpu"].(*es.QueryStringFilter).Query, ShouldEqual, "@metric:cpu")
|
||||
So(fAgg.Filters["@metric:logins.count"].(*es.QueryStringFilter).Query, ShouldEqual, "@metric:logins.count")
|
||||
|
||||
dateHistogramAgg := sr.Aggs[0].Aggregation.Aggs[0]
|
||||
So(dateHistogramAgg.Key, ShouldEqual, "4")
|
||||
So(dateHistogramAgg.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, ShouldEqual, "@timestamp")
|
||||
})
|
||||
|
||||
Convey("With filters aggs on es 5", func() {
|
||||
c := newFakeClient(5)
|
||||
_, err := executeTsdbQuery(c, `{
|
||||
"timeField": "@timestamp",
|
||||
"bucketAggs": [
|
||||
{
|
||||
"id": "2",
|
||||
"type": "filters",
|
||||
"settings": {
|
||||
"filters": [ { "query": "@metric:cpu" }, { "query": "@metric:logins.count" } ]
|
||||
}
|
||||
},
|
||||
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
|
||||
],
|
||||
"metrics": [{"type": "count", "id": "1" }]
|
||||
}`, from, to, 15*time.Second)
|
||||
So(err, ShouldBeNil)
|
||||
sr := c.multisearchRequests[0].Requests[0]
|
||||
|
||||
filtersAgg := sr.Aggs[0]
|
||||
So(filtersAgg.Key, ShouldEqual, "2")
|
||||
So(filtersAgg.Aggregation.Type, ShouldEqual, "filters")
|
||||
fAgg := filtersAgg.Aggregation.Aggregation.(*es.FiltersAggregation)
|
||||
So(fAgg.Filters["@metric:cpu"].(*es.QueryStringFilter).Query, ShouldEqual, "@metric:cpu")
|
||||
So(fAgg.Filters["@metric:logins.count"].(*es.QueryStringFilter).Query, ShouldEqual, "@metric:logins.count")
|
||||
|
||||
dateHistogramAgg := sr.Aggs[0].Aggregation.Aggs[0]
|
||||
So(dateHistogramAgg.Key, ShouldEqual, "4")
|
||||
So(dateHistogramAgg.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, ShouldEqual, "@timestamp")
|
||||
})
|
||||
|
||||
Convey("With raw document metric", func() {
|
||||
c := newFakeClient(5)
|
||||
_, err := executeTsdbQuery(c, `{
|
||||
"timeField": "@timestamp",
|
||||
"bucketAggs": [],
|
||||
"metrics": [{ "id": "1", "type": "raw_document", "settings": {} }]
|
||||
}`, from, to, 15*time.Second)
|
||||
So(err, ShouldBeNil)
|
||||
sr := c.multisearchRequests[0].Requests[0]
|
||||
|
||||
So(sr.Size, ShouldEqual, 500)
|
||||
})
|
||||
|
||||
Convey("With raw document metric size set", func() {
|
||||
c := newFakeClient(5)
|
||||
_, err := executeTsdbQuery(c, `{
|
||||
"timeField": "@timestamp",
|
||||
"bucketAggs": [],
|
||||
"metrics": [{ "id": "1", "type": "raw_document", "settings": { "size": 1337 } }]
|
||||
}`, from, to, 15*time.Second)
|
||||
So(err, ShouldBeNil)
|
||||
sr := c.multisearchRequests[0].Requests[0]
|
||||
|
||||
So(sr.Size, ShouldEqual, 1337)
|
||||
})
|
||||
|
||||
Convey("With date histogram agg", func() {
|
||||
c := newFakeClient(5)
|
||||
_, err := executeTsdbQuery(c, `{
|
||||
"timeField": "@timestamp",
|
||||
"bucketAggs": [
|
||||
{
|
||||
"id": "2",
|
||||
"type": "date_histogram",
|
||||
"field": "@timestamp",
|
||||
"settings": { "interval": "auto", "min_doc_count": 2 }
|
||||
}
|
||||
],
|
||||
"metrics": [{"type": "count", "id": "1" }]
|
||||
}`, from, to, 15*time.Second)
|
||||
So(err, ShouldBeNil)
|
||||
sr := c.multisearchRequests[0].Requests[0]
|
||||
|
||||
firstLevel := sr.Aggs[0]
|
||||
So(firstLevel.Key, ShouldEqual, "2")
|
||||
So(firstLevel.Aggregation.Type, ShouldEqual, "date_histogram")
|
||||
hAgg := firstLevel.Aggregation.Aggregation.(*es.DateHistogramAgg)
|
||||
So(hAgg.Field, ShouldEqual, "@timestamp")
|
||||
So(hAgg.Interval, ShouldEqual, "$__interval")
|
||||
So(hAgg.MinDocCount, ShouldEqual, 2)
|
||||
})
|
||||
|
||||
Convey("With histogram agg", func() {
|
||||
c := newFakeClient(5)
|
||||
_, err := executeTsdbQuery(c, `{
|
||||
"timeField": "@timestamp",
|
||||
"bucketAggs": [
|
||||
{
|
||||
"id": "3",
|
||||
"type": "histogram",
|
||||
"field": "bytes",
|
||||
"settings": { "interval": 10, "min_doc_count": 2, "missing": 5 }
|
||||
}
|
||||
],
|
||||
"metrics": [{"type": "count", "id": "1" }]
|
||||
}`, from, to, 15*time.Second)
|
||||
So(err, ShouldBeNil)
|
||||
sr := c.multisearchRequests[0].Requests[0]
|
||||
|
||||
firstLevel := sr.Aggs[0]
|
||||
So(firstLevel.Key, ShouldEqual, "3")
|
||||
So(firstLevel.Aggregation.Type, ShouldEqual, "histogram")
|
||||
hAgg := firstLevel.Aggregation.Aggregation.(*es.HistogramAgg)
|
||||
So(hAgg.Field, ShouldEqual, "bytes")
|
||||
So(hAgg.Interval, ShouldEqual, 10)
|
||||
So(hAgg.MinDocCount, ShouldEqual, 2)
|
||||
So(*hAgg.Missing, ShouldEqual, 5)
|
||||
})
|
||||
|
||||
Convey("With geo hash grid agg", func() {
|
||||
c := newFakeClient(5)
|
||||
_, err := executeTsdbQuery(c, `{
|
||||
"timeField": "@timestamp",
|
||||
"bucketAggs": [
|
||||
{
|
||||
"id": "3",
|
||||
"type": "geohash_grid",
|
||||
"field": "@location",
|
||||
"settings": { "precision": 3 }
|
||||
}
|
||||
],
|
||||
"metrics": [{"type": "count", "id": "1" }]
|
||||
}`, from, to, 15*time.Second)
|
||||
So(err, ShouldBeNil)
|
||||
sr := c.multisearchRequests[0].Requests[0]
|
||||
|
||||
firstLevel := sr.Aggs[0]
|
||||
So(firstLevel.Key, ShouldEqual, "3")
|
||||
So(firstLevel.Aggregation.Type, ShouldEqual, "geohash_grid")
|
||||
ghGridAgg := firstLevel.Aggregation.Aggregation.(*es.GeoHashGridAggregation)
|
||||
So(ghGridAgg.Field, ShouldEqual, "@location")
|
||||
So(ghGridAgg.Precision, ShouldEqual, 3)
|
||||
})
|
||||
|
||||
Convey("With moving average", func() {
|
||||
c := newFakeClient(5)
|
||||
_, err := executeTsdbQuery(c, `{
|
||||
"timeField": "@timestamp",
|
||||
"bucketAggs": [
|
||||
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
|
||||
],
|
||||
"metrics": [
|
||||
{ "id": "3", "type": "sum", "field": "@value" },
|
||||
{
|
||||
"id": "2",
|
||||
"type": "moving_avg",
|
||||
"field": "3",
|
||||
"pipelineAgg": "3"
|
||||
}
|
||||
]
|
||||
}`, from, to, 15*time.Second)
|
||||
So(err, ShouldBeNil)
|
||||
sr := c.multisearchRequests[0].Requests[0]
|
||||
|
||||
firstLevel := sr.Aggs[0]
|
||||
So(firstLevel.Key, ShouldEqual, "4")
|
||||
So(firstLevel.Aggregation.Type, ShouldEqual, "date_histogram")
|
||||
So(firstLevel.Aggregation.Aggs, ShouldHaveLength, 2)
|
||||
|
||||
sumAgg := firstLevel.Aggregation.Aggs[0]
|
||||
So(sumAgg.Key, ShouldEqual, "3")
|
||||
So(sumAgg.Aggregation.Type, ShouldEqual, "sum")
|
||||
mAgg := sumAgg.Aggregation.Aggregation.(*es.MetricAggregation)
|
||||
So(mAgg.Field, ShouldEqual, "@value")
|
||||
|
||||
movingAvgAgg := firstLevel.Aggregation.Aggs[1]
|
||||
So(movingAvgAgg.Key, ShouldEqual, "2")
|
||||
So(movingAvgAgg.Aggregation.Type, ShouldEqual, "moving_avg")
|
||||
pl := movingAvgAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
|
||||
So(pl.BucketPath, ShouldEqual, "3")
|
||||
})
|
||||
|
||||
Convey("With broken moving average", func() {
|
||||
c := newFakeClient(5)
|
||||
_, err := executeTsdbQuery(c, `{
|
||||
"timeField": "@timestamp",
|
||||
"bucketAggs": [
|
||||
{ "type": "date_histogram", "field": "@timestamp", "id": "5" }
|
||||
],
|
||||
"metrics": [
|
||||
{ "id": "3", "type": "sum", "field": "@value" },
|
||||
{
|
||||
"id": "2",
|
||||
"type": "moving_avg",
|
||||
"pipelineAgg": "3"
|
||||
},
|
||||
{
|
||||
"id": "4",
|
||||
"type": "moving_avg",
|
||||
"pipelineAgg": "Metric to apply moving average"
|
||||
}
|
||||
]
|
||||
}`, from, to, 15*time.Second)
|
||||
So(err, ShouldBeNil)
|
||||
sr := c.multisearchRequests[0].Requests[0]
|
||||
|
||||
firstLevel := sr.Aggs[0]
|
||||
So(firstLevel.Key, ShouldEqual, "5")
|
||||
So(firstLevel.Aggregation.Type, ShouldEqual, "date_histogram")
|
||||
|
||||
So(firstLevel.Aggregation.Aggs, ShouldHaveLength, 2)
|
||||
|
||||
movingAvgAgg := firstLevel.Aggregation.Aggs[1]
|
||||
So(movingAvgAgg.Key, ShouldEqual, "2")
|
||||
plAgg := movingAvgAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
|
||||
So(plAgg.BucketPath, ShouldEqual, "3")
|
||||
})
|
||||
|
||||
Convey("With derivative", func() {
|
||||
c := newFakeClient(5)
|
||||
_, err := executeTsdbQuery(c, `{
|
||||
"timeField": "@timestamp",
|
||||
"bucketAggs": [
|
||||
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
|
||||
],
|
||||
"metrics": [
|
||||
{ "id": "3", "type": "sum", "field": "@value" },
|
||||
{
|
||||
"id": "2",
|
||||
"type": "derivative",
|
||||
"pipelineAgg": "3"
|
||||
}
|
||||
]
|
||||
}`, from, to, 15*time.Second)
|
||||
So(err, ShouldBeNil)
|
||||
sr := c.multisearchRequests[0].Requests[0]
|
||||
|
||||
firstLevel := sr.Aggs[0]
|
||||
So(firstLevel.Key, ShouldEqual, "4")
|
||||
So(firstLevel.Aggregation.Type, ShouldEqual, "date_histogram")
|
||||
|
||||
derivativeAgg := firstLevel.Aggregation.Aggs[1]
|
||||
So(derivativeAgg.Key, ShouldEqual, "2")
|
||||
plAgg := derivativeAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
|
||||
So(plAgg.BucketPath, ShouldEqual, "3")
|
||||
})
|
||||
|
||||
})
|
||||
}
|
||||
|
||||
type fakeClient struct {
|
||||
version int
|
||||
timeField string
|
||||
multiSearchResponse *es.MultiSearchResponse
|
||||
multiSearchError error
|
||||
builder *es.MultiSearchRequestBuilder
|
||||
multisearchRequests []*es.MultiSearchRequest
|
||||
}
|
||||
|
||||
func newFakeClient(version int) *fakeClient {
|
||||
return &fakeClient{
|
||||
version: version,
|
||||
timeField: "@timestamp",
|
||||
multisearchRequests: make([]*es.MultiSearchRequest, 0),
|
||||
multiSearchResponse: &es.MultiSearchResponse{},
|
||||
}
|
||||
}
|
||||
|
||||
func (c *fakeClient) GetVersion() int {
|
||||
return c.version
|
||||
}
|
||||
|
||||
func (c *fakeClient) GetTimeField() string {
|
||||
return c.timeField
|
||||
}
|
||||
|
||||
func (c *fakeClient) GetMinInterval(queryInterval string) (time.Duration, error) {
|
||||
return 15 * time.Second, nil
|
||||
}
|
||||
|
||||
func (c *fakeClient) ExecuteMultisearch(r *es.MultiSearchRequest) (*es.MultiSearchResponse, error) {
|
||||
c.multisearchRequests = append(c.multisearchRequests, r)
|
||||
return c.multiSearchResponse, c.multiSearchError
|
||||
}
|
||||
|
||||
func (c *fakeClient) MultiSearch() *es.MultiSearchRequestBuilder {
|
||||
c.builder = es.NewMultiSearchRequestBuilder(c.version)
|
||||
return c.builder
|
||||
}
|
||||
|
||||
func newTsdbQuery(body string) (*tsdb.TsdbQuery, error) {
|
||||
json, err := simplejson.NewJson([]byte(body))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: json,
|
||||
},
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func executeTsdbQuery(c es.Client, body string, from, to time.Time, minInterval time.Duration) (*tsdb.Response, error) {
|
||||
json, err := simplejson.NewJson([]byte(body))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
fromStr := fmt.Sprintf("%d", from.UnixNano()/int64(time.Millisecond))
|
||||
toStr := fmt.Sprintf("%d", to.UnixNano()/int64(time.Millisecond))
|
||||
tsdbQuery := &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: json,
|
||||
},
|
||||
},
|
||||
TimeRange: tsdb.NewTimeRange(fromStr, toStr),
|
||||
}
|
||||
query := newTimeSeriesQuery(c, tsdbQuery, tsdb.NewIntervalCalculator(&tsdb.IntervalOptions{MinInterval: minInterval}))
|
||||
return query.execute()
|
||||
}
|
||||
|
||||
func TestTimeSeriesQueryParser(t *testing.T) {
|
||||
Convey("Test time series query parser", t, func() {
|
||||
p := newTimeSeriesQueryParser()
|
||||
|
||||
Convey("Should be able to parse query", func() {
|
||||
body := `{
|
||||
"timeField": "@timestamp",
|
||||
"query": "@metric:cpu",
|
||||
"alias": "{{@hostname}} {{metric}}",
|
||||
"metrics": [
|
||||
{
|
||||
"field": "@value",
|
||||
"id": "1",
|
||||
"meta": {},
|
||||
"settings": {
|
||||
"percents": [
|
||||
"90"
|
||||
]
|
||||
},
|
||||
"type": "percentiles"
|
||||
},
|
||||
{
|
||||
"type": "count",
|
||||
"field": "select field",
|
||||
"id": "4",
|
||||
"settings": {},
|
||||
"meta": {}
|
||||
}
|
||||
],
|
||||
"bucketAggs": [
|
||||
{
|
||||
"fake": true,
|
||||
"field": "@hostname",
|
||||
"id": "3",
|
||||
"settings": {
|
||||
"min_doc_count": 1,
|
||||
"order": "desc",
|
||||
"orderBy": "_term",
|
||||
"size": "10"
|
||||
},
|
||||
"type": "terms"
|
||||
},
|
||||
{
|
||||
"field": "@timestamp",
|
||||
"id": "2",
|
||||
"settings": {
|
||||
"interval": "5m",
|
||||
"min_doc_count": 0,
|
||||
"trimEdges": 0
|
||||
},
|
||||
"type": "date_histogram"
|
||||
}
|
||||
]
|
||||
}`
|
||||
tsdbQuery, err := newTsdbQuery(body)
|
||||
So(err, ShouldBeNil)
|
||||
queries, err := p.parse(tsdbQuery)
|
||||
So(err, ShouldBeNil)
|
||||
So(queries, ShouldHaveLength, 1)
|
||||
|
||||
q := queries[0]
|
||||
|
||||
So(q.TimeField, ShouldEqual, "@timestamp")
|
||||
So(q.RawQuery, ShouldEqual, "@metric:cpu")
|
||||
So(q.Alias, ShouldEqual, "{{@hostname}} {{metric}}")
|
||||
|
||||
So(q.Metrics, ShouldHaveLength, 2)
|
||||
So(q.Metrics[0].Field, ShouldEqual, "@value")
|
||||
So(q.Metrics[0].ID, ShouldEqual, "1")
|
||||
So(q.Metrics[0].Type, ShouldEqual, "percentiles")
|
||||
So(q.Metrics[0].Hide, ShouldBeFalse)
|
||||
So(q.Metrics[0].PipelineAggregate, ShouldEqual, "")
|
||||
So(q.Metrics[0].Settings.Get("percents").MustStringArray()[0], ShouldEqual, "90")
|
||||
|
||||
So(q.Metrics[1].Field, ShouldEqual, "select field")
|
||||
So(q.Metrics[1].ID, ShouldEqual, "4")
|
||||
So(q.Metrics[1].Type, ShouldEqual, "count")
|
||||
So(q.Metrics[1].Hide, ShouldBeFalse)
|
||||
So(q.Metrics[1].PipelineAggregate, ShouldEqual, "")
|
||||
So(q.Metrics[1].Settings.MustMap(), ShouldBeEmpty)
|
||||
|
||||
So(q.BucketAggs, ShouldHaveLength, 2)
|
||||
So(q.BucketAggs[0].Field, ShouldEqual, "@hostname")
|
||||
So(q.BucketAggs[0].ID, ShouldEqual, "3")
|
||||
So(q.BucketAggs[0].Type, ShouldEqual, "terms")
|
||||
So(q.BucketAggs[0].Settings.Get("min_doc_count").MustInt64(), ShouldEqual, 1)
|
||||
So(q.BucketAggs[0].Settings.Get("order").MustString(), ShouldEqual, "desc")
|
||||
So(q.BucketAggs[0].Settings.Get("orderBy").MustString(), ShouldEqual, "_term")
|
||||
So(q.BucketAggs[0].Settings.Get("size").MustString(), ShouldEqual, "10")
|
||||
|
||||
So(q.BucketAggs[1].Field, ShouldEqual, "@timestamp")
|
||||
So(q.BucketAggs[1].ID, ShouldEqual, "2")
|
||||
So(q.BucketAggs[1].Type, ShouldEqual, "date_histogram")
|
||||
So(q.BucketAggs[1].Settings.Get("interval").MustString(), ShouldEqual, "5m")
|
||||
So(q.BucketAggs[1].Settings.Get("min_doc_count").MustInt64(), ShouldEqual, 0)
|
||||
So(q.BucketAggs[1].Settings.Get("trimEdges").MustInt64(), ShouldEqual, 0)
|
||||
})
|
||||
})
|
||||
}
|
@ -76,5 +76,13 @@ func TestInfluxdbQueryPart(t *testing.T) {
|
||||
res := part.Render(query, queryContext, "mean(value)")
|
||||
So(res, ShouldEqual, `mean(value) AS "test"`)
|
||||
})
|
||||
|
||||
Convey("render count distinct", func() {
|
||||
part, err := NewQueryPart("count", []string{})
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res := part.Render(query, queryContext, "distinct(value)")
|
||||
So(res, ShouldEqual, `count(distinct(value))`)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
@ -601,7 +601,7 @@ func TestMySQL(t *testing.T) {
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"rawSql": `SELECT $__time(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values ORDER BY 1`,
|
||||
"rawSql": `SELECT $__time(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values ORDER BY 1,2`,
|
||||
"format": "time_series",
|
||||
}),
|
||||
RefId: "A",
|
||||
@ -615,8 +615,8 @@ func TestMySQL(t *testing.T) {
|
||||
So(queryResult.Error, ShouldBeNil)
|
||||
|
||||
So(len(queryResult.Series), ShouldEqual, 2)
|
||||
So(queryResult.Series[0].Name, ShouldEqual, "Metric B - value one")
|
||||
So(queryResult.Series[1].Name, ShouldEqual, "Metric A - value one")
|
||||
So(queryResult.Series[0].Name, ShouldEqual, "Metric A - value one")
|
||||
So(queryResult.Series[1].Name, ShouldEqual, "Metric B - value one")
|
||||
})
|
||||
|
||||
Convey("When doing a metric query grouping by time should return correct series", func() {
|
||||
|
@ -144,10 +144,10 @@ func ConvertSqlTimeColumnToEpochMs(values RowValues, timeIndex int) {
|
||||
if timeIndex >= 0 {
|
||||
switch value := values[timeIndex].(type) {
|
||||
case time.Time:
|
||||
values[timeIndex] = EpochPrecisionToMs(float64(value.UnixNano()))
|
||||
values[timeIndex] = float64(value.UnixNano()) / float64(time.Millisecond)
|
||||
case *time.Time:
|
||||
if value != nil {
|
||||
values[timeIndex] = EpochPrecisionToMs(float64((*value).UnixNano()))
|
||||
values[timeIndex] = float64((*value).UnixNano()) / float64(time.Millisecond)
|
||||
}
|
||||
case int64:
|
||||
values[timeIndex] = int64(EpochPrecisionToMs(float64(value)))
|
||||
|
@ -12,14 +12,17 @@ import (
|
||||
func TestSqlEngine(t *testing.T) {
|
||||
Convey("SqlEngine", t, func() {
|
||||
dt := time.Date(2018, 3, 14, 21, 20, 6, int(527345*time.Microsecond), time.UTC)
|
||||
earlyDt := time.Date(1970, 3, 14, 21, 20, 6, int(527345*time.Microsecond), time.UTC)
|
||||
|
||||
Convey("Given row values with time.Time as time columns", func() {
|
||||
var nilPointer *time.Time
|
||||
|
||||
fixtures := make([]interface{}, 3)
|
||||
fixtures := make([]interface{}, 5)
|
||||
fixtures[0] = dt
|
||||
fixtures[1] = &dt
|
||||
fixtures[2] = nilPointer
|
||||
fixtures[2] = earlyDt
|
||||
fixtures[3] = &earlyDt
|
||||
fixtures[4] = nilPointer
|
||||
|
||||
for i := range fixtures {
|
||||
ConvertSqlTimeColumnToEpochMs(fixtures, i)
|
||||
@ -27,9 +30,13 @@ func TestSqlEngine(t *testing.T) {
|
||||
|
||||
Convey("When converting them should return epoch time with millisecond precision ", func() {
|
||||
expected := float64(dt.UnixNano()) / float64(time.Millisecond)
|
||||
expectedEarly := float64(earlyDt.UnixNano()) / float64(time.Millisecond)
|
||||
|
||||
So(fixtures[0].(float64), ShouldEqual, expected)
|
||||
So(fixtures[1].(float64), ShouldEqual, expected)
|
||||
So(fixtures[2], ShouldBeNil)
|
||||
So(fixtures[2].(float64), ShouldEqual, expectedEarly)
|
||||
So(fixtures[3].(float64), ShouldEqual, expectedEarly)
|
||||
So(fixtures[4], ShouldBeNil)
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -41,6 +41,6 @@ export default class ElapsedTime extends PureComponent<any, any> {
|
||||
const { elapsed } = this.state;
|
||||
const { className, time } = this.props;
|
||||
const value = (time || elapsed) / 1000;
|
||||
return <span className={className}>{value.toFixed(1)}s</span>;
|
||||
return <span className={`elapsed-time ${className}`}>{value.toFixed(1)}s</span>;
|
||||
}
|
||||
}
|
||||
|
@ -4,10 +4,10 @@ import colors from 'app/core/utils/colors';
|
||||
import TimeSeries from 'app/core/time_series2';
|
||||
|
||||
import ElapsedTime from './ElapsedTime';
|
||||
import Legend from './Legend';
|
||||
import QueryRows from './QueryRows';
|
||||
import Graph from './Graph';
|
||||
import Table from './Table';
|
||||
import TimePicker, { DEFAULT_RANGE } from './TimePicker';
|
||||
import { DatasourceSrv } from 'app/features/plugins/datasource_srv';
|
||||
import { buildQueryOptions, ensureQueries, generateQueryKey, hasQuery } from './utils/query';
|
||||
import { decodePathComponent } from 'app/core/utils/location_util';
|
||||
@ -16,39 +16,30 @@ function makeTimeSeriesList(dataList, options) {
|
||||
return dataList.map((seriesData, index) => {
|
||||
const datapoints = seriesData.datapoints || [];
|
||||
const alias = seriesData.target;
|
||||
|
||||
const colorIndex = index % colors.length;
|
||||
const color = colors[colorIndex];
|
||||
|
||||
const series = new TimeSeries({
|
||||
datapoints: datapoints,
|
||||
alias: alias,
|
||||
color: color,
|
||||
datapoints,
|
||||
alias,
|
||||
color,
|
||||
unit: seriesData.unit,
|
||||
});
|
||||
|
||||
if (datapoints && datapoints.length > 0) {
|
||||
const last = datapoints[datapoints.length - 1][1];
|
||||
const from = options.range.from;
|
||||
if (last - from < -10000) {
|
||||
series.isOutsideRange = true;
|
||||
}
|
||||
}
|
||||
|
||||
return series;
|
||||
});
|
||||
}
|
||||
|
||||
function parseInitialQueries(initial) {
|
||||
if (!initial) {
|
||||
return [];
|
||||
}
|
||||
function parseInitialState(initial) {
|
||||
try {
|
||||
const parsed = JSON.parse(decodePathComponent(initial));
|
||||
return parsed.queries.map(q => q.query);
|
||||
return {
|
||||
queries: parsed.queries.map(q => q.query),
|
||||
range: parsed.range,
|
||||
};
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
return [];
|
||||
return { queries: [], range: DEFAULT_RANGE };
|
||||
}
|
||||
}
|
||||
|
||||
@ -60,6 +51,8 @@ interface IExploreState {
|
||||
latency: number;
|
||||
loading: any;
|
||||
queries: any;
|
||||
queryError: any;
|
||||
range: any;
|
||||
requestOptions: any;
|
||||
showingGraph: boolean;
|
||||
showingTable: boolean;
|
||||
@ -72,7 +65,7 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
|
||||
constructor(props) {
|
||||
super(props);
|
||||
const initialQueries = parseInitialQueries(props.routeParams.initial);
|
||||
const { range, queries } = parseInitialState(props.routeParams.initial);
|
||||
this.state = {
|
||||
datasource: null,
|
||||
datasourceError: null,
|
||||
@ -80,11 +73,14 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
graphResult: null,
|
||||
latency: 0,
|
||||
loading: false,
|
||||
queries: ensureQueries(initialQueries),
|
||||
queries: ensureQueries(queries),
|
||||
queryError: null,
|
||||
range: range || { ...DEFAULT_RANGE },
|
||||
requestOptions: null,
|
||||
showingGraph: true,
|
||||
showingTable: true,
|
||||
tableResult: null,
|
||||
...props.initialState,
|
||||
};
|
||||
}
|
||||
|
||||
@ -98,6 +94,10 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
}
|
||||
}
|
||||
|
||||
componentDidCatch(error) {
|
||||
console.error(error);
|
||||
}
|
||||
|
||||
handleAddQueryRow = index => {
|
||||
const { queries } = this.state;
|
||||
const nextQueries = [
|
||||
@ -119,10 +119,32 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
this.setState({ queries: nextQueries });
|
||||
};
|
||||
|
||||
handleChangeTime = nextRange => {
|
||||
const range = {
|
||||
from: nextRange.from,
|
||||
to: nextRange.to,
|
||||
};
|
||||
this.setState({ range }, () => this.handleSubmit());
|
||||
};
|
||||
|
||||
handleClickCloseSplit = () => {
|
||||
const { onChangeSplit } = this.props;
|
||||
if (onChangeSplit) {
|
||||
onChangeSplit(false);
|
||||
}
|
||||
};
|
||||
|
||||
handleClickGraphButton = () => {
|
||||
this.setState(state => ({ showingGraph: !state.showingGraph }));
|
||||
};
|
||||
|
||||
handleClickSplit = () => {
|
||||
const { onChangeSplit } = this.props;
|
||||
if (onChangeSplit) {
|
||||
onChangeSplit(true, this.state);
|
||||
}
|
||||
};
|
||||
|
||||
handleClickTableButton = () => {
|
||||
this.setState(state => ({ showingTable: !state.showingTable }));
|
||||
};
|
||||
@ -147,17 +169,17 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
};
|
||||
|
||||
async runGraphQuery() {
|
||||
const { datasource, queries } = this.state;
|
||||
const { datasource, queries, range } = this.state;
|
||||
if (!hasQuery(queries)) {
|
||||
return;
|
||||
}
|
||||
this.setState({ latency: 0, loading: true, graphResult: null });
|
||||
this.setState({ latency: 0, loading: true, graphResult: null, queryError: null });
|
||||
const now = Date.now();
|
||||
const options = buildQueryOptions({
|
||||
format: 'time_series',
|
||||
interval: datasource.interval,
|
||||
instant: false,
|
||||
now,
|
||||
range,
|
||||
queries: queries.map(q => q.query),
|
||||
});
|
||||
try {
|
||||
@ -165,24 +187,25 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
const result = makeTimeSeriesList(res.data, options);
|
||||
const latency = Date.now() - now;
|
||||
this.setState({ latency, loading: false, graphResult: result, requestOptions: options });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
this.setState({ loading: false, graphResult: error });
|
||||
} catch (response) {
|
||||
console.error(response);
|
||||
const queryError = response.data ? response.data.error : response;
|
||||
this.setState({ loading: false, queryError });
|
||||
}
|
||||
}
|
||||
|
||||
async runTableQuery() {
|
||||
const { datasource, queries } = this.state;
|
||||
const { datasource, queries, range } = this.state;
|
||||
if (!hasQuery(queries)) {
|
||||
return;
|
||||
}
|
||||
this.setState({ latency: 0, loading: true, tableResult: null });
|
||||
this.setState({ latency: 0, loading: true, queryError: null, tableResult: null });
|
||||
const now = Date.now();
|
||||
const options = buildQueryOptions({
|
||||
format: 'table',
|
||||
interval: datasource.interval,
|
||||
instant: true,
|
||||
now,
|
||||
range,
|
||||
queries: queries.map(q => q.query),
|
||||
});
|
||||
try {
|
||||
@ -190,9 +213,10 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
const tableModel = res.data[0];
|
||||
const latency = Date.now() - now;
|
||||
this.setState({ latency, loading: false, tableResult: tableModel, requestOptions: options });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
this.setState({ loading: false, tableResult: null });
|
||||
} catch (response) {
|
||||
console.error(response);
|
||||
const queryError = response.data ? response.data.error : response;
|
||||
this.setState({ loading: false, queryError });
|
||||
}
|
||||
}
|
||||
|
||||
@ -202,6 +226,7 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
};
|
||||
|
||||
render() {
|
||||
const { position, split } = this.props;
|
||||
const {
|
||||
datasource,
|
||||
datasourceError,
|
||||
@ -210,59 +235,93 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
latency,
|
||||
loading,
|
||||
queries,
|
||||
queryError,
|
||||
range,
|
||||
requestOptions,
|
||||
showingGraph,
|
||||
showingTable,
|
||||
tableResult,
|
||||
} = this.state;
|
||||
const showingBoth = showingGraph && showingTable;
|
||||
const graphHeight = showingBoth ? '200px' : null;
|
||||
const graphButtonClassName = showingBoth || showingGraph ? 'btn m-r-1' : 'btn btn-inverse m-r-1';
|
||||
const tableButtonClassName = showingBoth || showingTable ? 'btn m-r-1' : 'btn btn-inverse m-r-1';
|
||||
const graphHeight = showingBoth ? '200px' : '400px';
|
||||
const graphButtonActive = showingBoth || showingGraph ? 'active' : '';
|
||||
const tableButtonActive = showingBoth || showingTable ? 'active' : '';
|
||||
const exploreClass = split ? 'explore explore-split' : 'explore';
|
||||
return (
|
||||
<div className="explore">
|
||||
<div className="page-body page-full">
|
||||
<h2 className="page-sub-heading">Explore</h2>
|
||||
{datasourceLoading ? <div>Loading datasource...</div> : null}
|
||||
|
||||
{datasourceError ? <div title={datasourceError}>Error connecting to datasource.</div> : null}
|
||||
|
||||
{datasource ? (
|
||||
<div className="m-r-3">
|
||||
<div className="nav m-b-1">
|
||||
<div className="pull-right">
|
||||
{loading || latency ? <ElapsedTime time={latency} className="" /> : null}
|
||||
<button type="submit" className="m-l-1 btn btn-primary" onClick={this.handleSubmit}>
|
||||
<i className="fa fa-return" /> Run Query
|
||||
</button>
|
||||
</div>
|
||||
<div>
|
||||
<button className={graphButtonClassName} onClick={this.handleClickGraphButton}>
|
||||
Graph
|
||||
</button>
|
||||
<button className={tableButtonClassName} onClick={this.handleClickTableButton}>
|
||||
Table
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<QueryRows
|
||||
queries={queries}
|
||||
request={this.request}
|
||||
onAddQueryRow={this.handleAddQueryRow}
|
||||
onChangeQuery={this.handleChangeQuery}
|
||||
onExecuteQuery={this.handleSubmit}
|
||||
onRemoveQueryRow={this.handleRemoveQueryRow}
|
||||
/>
|
||||
<main className="m-t-2">
|
||||
{showingGraph ? (
|
||||
<Graph data={graphResult} id="explore-1" options={requestOptions} height={graphHeight} />
|
||||
) : null}
|
||||
{showingGraph ? <Legend data={graphResult} /> : null}
|
||||
{showingTable ? <Table data={tableResult} className="m-t-3" /> : null}
|
||||
</main>
|
||||
<div className={exploreClass}>
|
||||
<div className="navbar">
|
||||
{position === 'left' ? (
|
||||
<div>
|
||||
<a className="navbar-page-btn">
|
||||
<i className="fa fa-rocket" />
|
||||
Explore
|
||||
</a>
|
||||
</div>
|
||||
) : (
|
||||
<div className="navbar-buttons explore-first-button">
|
||||
<button className="btn navbar-button" onClick={this.handleClickCloseSplit}>
|
||||
Close Split
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
<div className="navbar__spacer" />
|
||||
{position === 'left' && !split ? (
|
||||
<div className="navbar-buttons">
|
||||
<button className="btn navbar-button" onClick={this.handleClickSplit}>
|
||||
Split
|
||||
</button>
|
||||
</div>
|
||||
) : null}
|
||||
<div className="navbar-buttons">
|
||||
<button className={`btn navbar-button ${graphButtonActive}`} onClick={this.handleClickGraphButton}>
|
||||
Graph
|
||||
</button>
|
||||
<button className={`btn navbar-button ${tableButtonActive}`} onClick={this.handleClickTableButton}>
|
||||
Table
|
||||
</button>
|
||||
</div>
|
||||
<TimePicker range={range} onChangeTime={this.handleChangeTime} />
|
||||
<div className="navbar-buttons relative">
|
||||
<button className="btn navbar-button--primary" onClick={this.handleSubmit}>
|
||||
Run Query <i className="fa fa-level-down run-icon" />
|
||||
</button>
|
||||
{loading || latency ? <ElapsedTime time={latency} className="text-info" /> : null}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{datasourceLoading ? <div className="explore-container">Loading datasource...</div> : null}
|
||||
|
||||
{datasourceError ? (
|
||||
<div className="explore-container" title={datasourceError}>
|
||||
Error connecting to datasource.
|
||||
</div>
|
||||
) : null}
|
||||
|
||||
{datasource ? (
|
||||
<div className="explore-container">
|
||||
<QueryRows
|
||||
queries={queries}
|
||||
request={this.request}
|
||||
onAddQueryRow={this.handleAddQueryRow}
|
||||
onChangeQuery={this.handleChangeQuery}
|
||||
onExecuteQuery={this.handleSubmit}
|
||||
onRemoveQueryRow={this.handleRemoveQueryRow}
|
||||
/>
|
||||
{queryError ? <div className="text-warning m-a-2">{queryError}</div> : null}
|
||||
<main className="m-t-2">
|
||||
{showingGraph ? (
|
||||
<Graph
|
||||
data={graphResult}
|
||||
id={`explore-graph-${position}`}
|
||||
options={requestOptions}
|
||||
height={graphHeight}
|
||||
split={split}
|
||||
/>
|
||||
) : null}
|
||||
{showingTable ? <Table data={tableResult} className="m-t-3" /> : null}
|
||||
</main>
|
||||
</div>
|
||||
) : null}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
@ -1,10 +1,13 @@
|
||||
import $ from 'jquery';
|
||||
import React, { Component } from 'react';
|
||||
|
||||
import TimeSeries from 'app/core/time_series2';
|
||||
import moment from 'moment';
|
||||
|
||||
import 'vendor/flot/jquery.flot';
|
||||
import 'vendor/flot/jquery.flot.time';
|
||||
import * as dateMath from 'app/core/utils/datemath';
|
||||
import TimeSeries from 'app/core/time_series2';
|
||||
|
||||
import Legend from './Legend';
|
||||
|
||||
// Copied from graph.ts
|
||||
function time_format(ticks, min, max) {
|
||||
@ -72,6 +75,7 @@ class Graph extends Component<any, any> {
|
||||
if (
|
||||
prevProps.data !== this.props.data ||
|
||||
prevProps.options !== this.props.options ||
|
||||
prevProps.split !== this.props.split ||
|
||||
prevProps.height !== this.props.height
|
||||
) {
|
||||
this.draw();
|
||||
@ -84,14 +88,22 @@ class Graph extends Component<any, any> {
|
||||
return;
|
||||
}
|
||||
const series = data.map((ts: TimeSeries) => ({
|
||||
color: ts.color,
|
||||
label: ts.label,
|
||||
data: ts.getFlotPairs('null'),
|
||||
}));
|
||||
|
||||
const $el = $(`#${this.props.id}`);
|
||||
const ticks = $el.width() / 100;
|
||||
const min = userOptions.range.from.valueOf();
|
||||
const max = userOptions.range.to.valueOf();
|
||||
let { from, to } = userOptions.range;
|
||||
if (!moment.isMoment(from)) {
|
||||
from = dateMath.parse(from, false);
|
||||
}
|
||||
if (!moment.isMoment(to)) {
|
||||
to = dateMath.parse(to, true);
|
||||
}
|
||||
const min = from.valueOf();
|
||||
const max = to.valueOf();
|
||||
const dynamicOptions = {
|
||||
xaxis: {
|
||||
mode: 'time',
|
||||
@ -111,12 +123,13 @@ class Graph extends Component<any, any> {
|
||||
}
|
||||
|
||||
render() {
|
||||
const style = {
|
||||
height: this.props.height || '400px',
|
||||
width: this.props.width || '100%',
|
||||
};
|
||||
|
||||
return <div id={this.props.id} style={style} />;
|
||||
const { data, height } = this.props;
|
||||
return (
|
||||
<div className="panel-container">
|
||||
<div id={this.props.id} className="explore-graph" style={{ height }} />
|
||||
<Legend data={data} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -50,7 +50,7 @@ class Portal extends React.Component {
|
||||
constructor(props) {
|
||||
super(props);
|
||||
this.node = document.createElement('div');
|
||||
this.node.classList.add(`query-field-portal-${props.index}`);
|
||||
this.node.classList.add('explore-typeahead', `explore-typeahead-${props.index}`);
|
||||
document.body.appendChild(this.node);
|
||||
}
|
||||
|
||||
|
@ -48,10 +48,10 @@ class QueryRow extends PureComponent<any, any> {
|
||||
return (
|
||||
<div className="query-row">
|
||||
<div className="query-row-tools">
|
||||
<button className="btn btn-small btn-inverse" onClick={this.handleClickAddButton}>
|
||||
<button className="btn navbar-button navbar-button--tight" onClick={this.handleClickAddButton}>
|
||||
<i className="fa fa-plus" />
|
||||
</button>
|
||||
<button className="btn btn-small btn-inverse" onClick={this.handleClickRemoveButton}>
|
||||
<button className="btn navbar-button navbar-button--tight" onClick={this.handleClickRemoveButton}>
|
||||
<i className="fa fa-minus" />
|
||||
</button>
|
||||
</div>
|
||||
@ -60,6 +60,7 @@ class QueryRow extends PureComponent<any, any> {
|
||||
initialQuery={edited ? null : query}
|
||||
onPressEnter={this.handlePressEnter}
|
||||
onQueryChange={this.handleChangeQuery}
|
||||
placeholder="Enter a PromQL query"
|
||||
request={request}
|
||||
/>
|
||||
</div>
|
||||
|
74
public/app/containers/Explore/TimePicker.jest.tsx
Normal file
74
public/app/containers/Explore/TimePicker.jest.tsx
Normal file
@ -0,0 +1,74 @@
|
||||
import React from 'react';
|
||||
import { shallow } from 'enzyme';
|
||||
import sinon from 'sinon';
|
||||
|
||||
import * as rangeUtil from 'app/core/utils/rangeutil';
|
||||
import TimePicker, { DEFAULT_RANGE, parseTime } from './TimePicker';
|
||||
|
||||
describe('<TimePicker />', () => {
|
||||
it('renders closed with default values', () => {
|
||||
const rangeString = rangeUtil.describeTimeRange(DEFAULT_RANGE);
|
||||
const wrapper = shallow(<TimePicker />);
|
||||
expect(wrapper.find('.timepicker-rangestring').text()).toBe(rangeString);
|
||||
expect(wrapper.find('.gf-timepicker-dropdown').exists()).toBe(false);
|
||||
});
|
||||
|
||||
it('renders with relative range', () => {
|
||||
const range = {
|
||||
from: 'now-7h',
|
||||
to: 'now',
|
||||
};
|
||||
const rangeString = rangeUtil.describeTimeRange(range);
|
||||
const wrapper = shallow(<TimePicker range={range} isOpen />);
|
||||
expect(wrapper.find('.timepicker-rangestring').text()).toBe(rangeString);
|
||||
expect(wrapper.state('fromRaw')).toBe(range.from);
|
||||
expect(wrapper.state('toRaw')).toBe(range.to);
|
||||
expect(wrapper.find('.timepicker-from').props().value).toBe(range.from);
|
||||
expect(wrapper.find('.timepicker-to').props().value).toBe(range.to);
|
||||
});
|
||||
|
||||
it('renders with epoch (millies) range converted to ISO-ish', () => {
|
||||
const range = {
|
||||
from: '1',
|
||||
to: '1000',
|
||||
};
|
||||
const rangeString = rangeUtil.describeTimeRange({
|
||||
from: parseTime(range.from),
|
||||
to: parseTime(range.to),
|
||||
});
|
||||
const wrapper = shallow(<TimePicker range={range} isUtc isOpen />);
|
||||
expect(wrapper.state('fromRaw')).toBe('1970-01-01 00:00:00');
|
||||
expect(wrapper.state('toRaw')).toBe('1970-01-01 00:00:01');
|
||||
expect(wrapper.find('.timepicker-rangestring').text()).toBe(rangeString);
|
||||
expect(wrapper.find('.timepicker-from').props().value).toBe('1970-01-01 00:00:00');
|
||||
expect(wrapper.find('.timepicker-to').props().value).toBe('1970-01-01 00:00:01');
|
||||
});
|
||||
|
||||
it('moves ranges forward and backward by half the range on arrow click', () => {
|
||||
const range = {
|
||||
from: '2000',
|
||||
to: '4000',
|
||||
};
|
||||
const rangeString = rangeUtil.describeTimeRange({
|
||||
from: parseTime(range.from),
|
||||
to: parseTime(range.to),
|
||||
});
|
||||
|
||||
const onChangeTime = sinon.spy();
|
||||
const wrapper = shallow(<TimePicker range={range} isUtc isOpen onChangeTime={onChangeTime} />);
|
||||
expect(wrapper.state('fromRaw')).toBe('1970-01-01 00:00:02');
|
||||
expect(wrapper.state('toRaw')).toBe('1970-01-01 00:00:04');
|
||||
expect(wrapper.find('.timepicker-rangestring').text()).toBe(rangeString);
|
||||
expect(wrapper.find('.timepicker-from').props().value).toBe('1970-01-01 00:00:02');
|
||||
expect(wrapper.find('.timepicker-to').props().value).toBe('1970-01-01 00:00:04');
|
||||
|
||||
wrapper.find('.timepicker-left').simulate('click');
|
||||
expect(onChangeTime.calledOnce).toBe(true);
|
||||
expect(wrapper.state('fromRaw')).toBe('1970-01-01 00:00:01');
|
||||
expect(wrapper.state('toRaw')).toBe('1970-01-01 00:00:03');
|
||||
|
||||
wrapper.find('.timepicker-right').simulate('click');
|
||||
expect(wrapper.state('fromRaw')).toBe('1970-01-01 00:00:02');
|
||||
expect(wrapper.state('toRaw')).toBe('1970-01-01 00:00:04');
|
||||
});
|
||||
});
|
245
public/app/containers/Explore/TimePicker.tsx
Normal file
245
public/app/containers/Explore/TimePicker.tsx
Normal file
@ -0,0 +1,245 @@
|
||||
import React, { PureComponent } from 'react';
|
||||
import moment from 'moment';
|
||||
|
||||
import * as dateMath from 'app/core/utils/datemath';
|
||||
import * as rangeUtil from 'app/core/utils/rangeutil';
|
||||
|
||||
const DATE_FORMAT = 'YYYY-MM-DD HH:mm:ss';
|
||||
|
||||
export const DEFAULT_RANGE = {
|
||||
from: 'now-6h',
|
||||
to: 'now',
|
||||
};
|
||||
|
||||
export function parseTime(value, isUtc = false, asString = false) {
|
||||
if (value.indexOf('now') !== -1) {
|
||||
return value;
|
||||
}
|
||||
if (!isNaN(value)) {
|
||||
const epoch = parseInt(value);
|
||||
const m = isUtc ? moment.utc(epoch) : moment(epoch);
|
||||
return asString ? m.format(DATE_FORMAT) : m;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export default class TimePicker extends PureComponent<any, any> {
|
||||
dropdownEl: any;
|
||||
constructor(props) {
|
||||
super(props);
|
||||
|
||||
const fromRaw = props.range ? props.range.from : DEFAULT_RANGE.from;
|
||||
const toRaw = props.range ? props.range.to : DEFAULT_RANGE.to;
|
||||
const range = {
|
||||
from: parseTime(fromRaw),
|
||||
to: parseTime(toRaw),
|
||||
};
|
||||
this.state = {
|
||||
fromRaw: parseTime(fromRaw, props.isUtc, true),
|
||||
isOpen: props.isOpen,
|
||||
isUtc: props.isUtc,
|
||||
rangeString: rangeUtil.describeTimeRange(range),
|
||||
refreshInterval: '',
|
||||
toRaw: parseTime(toRaw, props.isUtc, true),
|
||||
};
|
||||
}
|
||||
|
||||
move(direction) {
|
||||
const { onChangeTime } = this.props;
|
||||
const { fromRaw, toRaw } = this.state;
|
||||
const range = {
|
||||
from: dateMath.parse(fromRaw, false),
|
||||
to: dateMath.parse(toRaw, true),
|
||||
};
|
||||
|
||||
const timespan = (range.to.valueOf() - range.from.valueOf()) / 2;
|
||||
let to, from;
|
||||
if (direction === -1) {
|
||||
to = range.to.valueOf() - timespan;
|
||||
from = range.from.valueOf() - timespan;
|
||||
} else if (direction === 1) {
|
||||
to = range.to.valueOf() + timespan;
|
||||
from = range.from.valueOf() + timespan;
|
||||
if (to > Date.now() && range.to < Date.now()) {
|
||||
to = Date.now();
|
||||
from = range.from.valueOf();
|
||||
}
|
||||
} else {
|
||||
to = range.to.valueOf();
|
||||
from = range.from.valueOf();
|
||||
}
|
||||
|
||||
const rangeString = rangeUtil.describeTimeRange(range);
|
||||
// No need to convert to UTC again
|
||||
to = moment(to);
|
||||
from = moment(from);
|
||||
|
||||
this.setState(
|
||||
{
|
||||
rangeString,
|
||||
fromRaw: from.format(DATE_FORMAT),
|
||||
toRaw: to.format(DATE_FORMAT),
|
||||
},
|
||||
() => {
|
||||
onChangeTime({ to, from });
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
handleChangeFrom = e => {
|
||||
this.setState({
|
||||
fromRaw: e.target.value,
|
||||
});
|
||||
};
|
||||
|
||||
handleChangeTo = e => {
|
||||
this.setState({
|
||||
toRaw: e.target.value,
|
||||
});
|
||||
};
|
||||
|
||||
handleClickApply = () => {
|
||||
const { onChangeTime } = this.props;
|
||||
const { toRaw, fromRaw } = this.state;
|
||||
const range = {
|
||||
from: dateMath.parse(fromRaw, false),
|
||||
to: dateMath.parse(toRaw, true),
|
||||
};
|
||||
const rangeString = rangeUtil.describeTimeRange(range);
|
||||
this.setState(
|
||||
{
|
||||
isOpen: false,
|
||||
rangeString,
|
||||
},
|
||||
() => {
|
||||
if (onChangeTime) {
|
||||
onChangeTime(range);
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
handleClickLeft = () => this.move(-1);
|
||||
handleClickPicker = () => {
|
||||
this.setState(state => ({
|
||||
isOpen: !state.isOpen,
|
||||
}));
|
||||
};
|
||||
handleClickRight = () => this.move(1);
|
||||
handleClickRefresh = () => {};
|
||||
handleClickRelativeOption = range => {
|
||||
const { onChangeTime } = this.props;
|
||||
const rangeString = rangeUtil.describeTimeRange(range);
|
||||
this.setState(
|
||||
{
|
||||
toRaw: range.to,
|
||||
fromRaw: range.from,
|
||||
isOpen: false,
|
||||
rangeString,
|
||||
},
|
||||
() => {
|
||||
if (onChangeTime) {
|
||||
onChangeTime(range);
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
getTimeOptions() {
|
||||
return rangeUtil.getRelativeTimesList({}, this.state.rangeString);
|
||||
}
|
||||
|
||||
dropdownRef = el => {
|
||||
this.dropdownEl = el;
|
||||
};
|
||||
|
||||
renderDropdown() {
|
||||
const { fromRaw, isOpen, toRaw } = this.state;
|
||||
if (!isOpen) {
|
||||
return null;
|
||||
}
|
||||
const timeOptions = this.getTimeOptions();
|
||||
return (
|
||||
<div ref={this.dropdownRef} className="gf-timepicker-dropdown">
|
||||
<div className="gf-timepicker-absolute-section">
|
||||
<h3 className="section-heading">Custom range</h3>
|
||||
|
||||
<label className="small">From:</label>
|
||||
<div className="gf-form-inline">
|
||||
<div className="gf-form max-width-28">
|
||||
<input
|
||||
type="text"
|
||||
className="gf-form-input input-large timepicker-from"
|
||||
value={fromRaw}
|
||||
onChange={this.handleChangeFrom}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<label className="small">To:</label>
|
||||
<div className="gf-form-inline">
|
||||
<div className="gf-form max-width-28">
|
||||
<input
|
||||
type="text"
|
||||
className="gf-form-input input-large timepicker-to"
|
||||
value={toRaw}
|
||||
onChange={this.handleChangeTo}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* <label className="small">Refreshing every:</label>
|
||||
<div className="gf-form-inline">
|
||||
<div className="gf-form max-width-28">
|
||||
<select className="gf-form-input input-medium" ng-options="f.value as f.text for f in ctrl.refresh.options"></select>
|
||||
</div>
|
||||
</div> */}
|
||||
<div className="gf-form">
|
||||
<button className="btn gf-form-btn btn-secondary" onClick={this.handleClickApply}>
|
||||
Apply
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="gf-timepicker-relative-section">
|
||||
<h3 className="section-heading">Quick ranges</h3>
|
||||
{Object.keys(timeOptions).map(section => {
|
||||
const group = timeOptions[section];
|
||||
return (
|
||||
<ul key={section}>
|
||||
{group.map(option => (
|
||||
<li className={option.active ? 'active' : ''} key={option.display}>
|
||||
<a onClick={() => this.handleClickRelativeOption(option)}>{option.display}</a>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
render() {
|
||||
const { isUtc, rangeString, refreshInterval } = this.state;
|
||||
return (
|
||||
<div className="timepicker">
|
||||
<div className="navbar-buttons">
|
||||
<button className="btn navbar-button navbar-button--tight timepicker-left" onClick={this.handleClickLeft}>
|
||||
<i className="fa fa-chevron-left" />
|
||||
</button>
|
||||
<button className="btn navbar-button gf-timepicker-nav-btn" onClick={this.handleClickPicker}>
|
||||
<i className="fa fa-clock-o" />
|
||||
<span className="timepicker-rangestring">{rangeString}</span>
|
||||
{isUtc ? <span className="gf-timepicker-utc">UTC</span> : null}
|
||||
{refreshInterval ? <span className="text-warning"> Refresh every {refreshInterval}</span> : null}
|
||||
</button>
|
||||
<button className="btn navbar-button navbar-button--tight timepicker-right" onClick={this.handleClickRight}>
|
||||
<i className="fa fa-chevron-right" />
|
||||
</button>
|
||||
</div>
|
||||
{this.renderDropdown()}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
33
public/app/containers/Explore/Wrapper.tsx
Normal file
33
public/app/containers/Explore/Wrapper.tsx
Normal file
@ -0,0 +1,33 @@
|
||||
import React, { PureComponent } from 'react';
|
||||
|
||||
import Explore from './Explore';
|
||||
|
||||
export default class Wrapper extends PureComponent<any, any> {
|
||||
state = {
|
||||
initialState: null,
|
||||
split: false,
|
||||
};
|
||||
|
||||
handleChangeSplit = (split, initialState) => {
|
||||
this.setState({ split, initialState });
|
||||
};
|
||||
|
||||
render() {
|
||||
// State overrides for props from first Explore
|
||||
const { initialState, split } = this.state;
|
||||
return (
|
||||
<div className="explore-wrapper">
|
||||
<Explore {...this.props} position="left" onChangeSplit={this.handleChangeSplit} split={split} />
|
||||
{split ? (
|
||||
<Explore
|
||||
{...this.props}
|
||||
initialState={initialState}
|
||||
onChangeSplit={this.handleChangeSplit}
|
||||
position="right"
|
||||
split={split}
|
||||
/>
|
||||
) : null}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
@ -1,12 +1,7 @@
|
||||
export function buildQueryOptions({ format, interval, instant, now, queries }) {
|
||||
const to = now;
|
||||
const from = to - 1000 * 60 * 60 * 3;
|
||||
export function buildQueryOptions({ format, interval, instant, range, queries }) {
|
||||
return {
|
||||
interval,
|
||||
range: {
|
||||
from,
|
||||
to,
|
||||
},
|
||||
range,
|
||||
targets: queries.map(expr => ({
|
||||
expr,
|
||||
format,
|
||||
|
@ -5,7 +5,7 @@
|
||||
<i class="gf-form-input-icon fa fa-search"></i>
|
||||
</label>
|
||||
<div class="page-action-bar__spacer"></div>
|
||||
<a class="btn btn-success" ng-href="{{ctrl.createDashboardUrl()}}" ng-if="ctrl.isEditor || ctrl.canSave">
|
||||
<a class="btn btn-success" ng-href="{{ctrl.createDashboardUrl()}}" ng-if="ctrl.hasEditPermissionInFolders || ctrl.canSave">
|
||||
<i class="fa fa-plus"></i>
|
||||
Dashboard
|
||||
</a>
|
||||
|
@ -42,9 +42,12 @@ export class ManageDashboardsCtrl {
|
||||
// if user has editor role or higher
|
||||
isEditor: boolean;
|
||||
|
||||
hasEditPermissionInFolders: boolean;
|
||||
|
||||
/** @ngInject */
|
||||
constructor(private backendSrv, navModelSrv, private searchSrv: SearchSrv, private contextSrv) {
|
||||
this.isEditor = this.contextSrv.isEditor;
|
||||
this.hasEditPermissionInFolders = this.contextSrv.hasEditPermissionInFolders;
|
||||
|
||||
this.query = {
|
||||
query: '',
|
||||
@ -80,6 +83,9 @@ export class ManageDashboardsCtrl {
|
||||
|
||||
return this.backendSrv.getFolderByUid(this.folderUid).then(folder => {
|
||||
this.canSave = folder.canSave;
|
||||
if (!this.canSave) {
|
||||
this.hasEditPermissionInFolders = false;
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
@ -45,14 +45,14 @@
|
||||
</tag-filter>
|
||||
</div>
|
||||
|
||||
<div class="search-filter-box" ng-if="ctrl.isEditor">
|
||||
<div class="search-filter-box" ng-if="ctrl.isEditor || ctrl.hasEditPermissionInFolders">
|
||||
<a href="dashboard/new" class="search-filter-box-link">
|
||||
<i class="gicon gicon-dashboard-new"></i> New dashboard
|
||||
</a>
|
||||
<a href="dashboards/folder/new" class="search-filter-box-link">
|
||||
<a href="dashboards/folder/new" class="search-filter-box-link" ng-if="ctrl.isEditor">
|
||||
<i class="gicon gicon-folder-new"></i> New folder
|
||||
</a>
|
||||
<a href="dashboard/import" class="search-filter-box-link">
|
||||
<a href="dashboard/import" class="search-filter-box-link" ng-if="ctrl.isEditor">
|
||||
<i class="gicon gicon-dashboard-import"></i> Import dashboard
|
||||
</a>
|
||||
<a class="search-filter-box-link" target="_blank" href="https://grafana.com/dashboards?utm_source=grafana_search">
|
||||
|
@ -17,6 +17,7 @@ export class SearchCtrl {
|
||||
isLoading: boolean;
|
||||
initialFolderFilterTitle: string;
|
||||
isEditor: string;
|
||||
hasEditPermissionInFolders: boolean;
|
||||
|
||||
/** @ngInject */
|
||||
constructor($scope, private $location, private $timeout, private searchSrv: SearchSrv) {
|
||||
@ -27,6 +28,7 @@ export class SearchCtrl {
|
||||
this.getTags = this.getTags.bind(this);
|
||||
this.onTagSelect = this.onTagSelect.bind(this);
|
||||
this.isEditor = contextSrv.isEditor;
|
||||
this.hasEditPermissionInFolders = contextSrv.hasEditPermissionInFolders;
|
||||
}
|
||||
|
||||
closeSearch() {
|
||||
|
@ -14,7 +14,7 @@ export class KeybindingSrv {
|
||||
timepickerOpen = false;
|
||||
|
||||
/** @ngInject */
|
||||
constructor(private $rootScope, private $location, private datasourceSrv) {
|
||||
constructor(private $rootScope, private $location, private datasourceSrv, private timeSrv) {
|
||||
// clear out all shortcuts on route change
|
||||
$rootScope.$on('$routeChangeSuccess', () => {
|
||||
Mousetrap.reset();
|
||||
@ -182,7 +182,12 @@ export class KeybindingSrv {
|
||||
const panel = dashboard.getPanelById(dashboard.meta.focusPanelId);
|
||||
const datasource = await this.datasourceSrv.get(panel.datasource);
|
||||
if (datasource && datasource.supportsExplore) {
|
||||
const exploreState = encodePathComponent(JSON.stringify(datasource.getExploreState(panel)));
|
||||
const range = this.timeSrv.timeRangeForUrl();
|
||||
const state = {
|
||||
...datasource.getExploreState(panel),
|
||||
range,
|
||||
};
|
||||
const exploreState = encodePathComponent(JSON.stringify(state));
|
||||
this.$location.url(`/explore/${exploreState}`);
|
||||
}
|
||||
}
|
||||
|
@ -448,6 +448,7 @@ kbn.valueFormats.currencyISK = kbn.formatBuilders.currency('kr');
|
||||
kbn.valueFormats.currencyNOK = kbn.formatBuilders.currency('kr');
|
||||
kbn.valueFormats.currencySEK = kbn.formatBuilders.currency('kr');
|
||||
kbn.valueFormats.currencyCZK = kbn.formatBuilders.currency('czk');
|
||||
kbn.valueFormats.currencyCHF = kbn.formatBuilders.currency('CHF');
|
||||
|
||||
// Data (Binary)
|
||||
kbn.valueFormats.bits = kbn.formatBuilders.binarySIPrefix('b');
|
||||
@ -873,6 +874,7 @@ kbn.getUnitFormats = function() {
|
||||
{ text: 'Norwegian Krone (kr)', value: 'currencyNOK' },
|
||||
{ text: 'Swedish Krona (kr)', value: 'currencySEK' },
|
||||
{ text: 'Czech koruna (czk)', value: 'currencyCZK' },
|
||||
{ text: 'Swiss franc (CHF)', value: 'currencyCHF' },
|
||||
],
|
||||
},
|
||||
{
|
||||
|
@ -70,6 +70,10 @@ export class AnnotationsEditorCtrl {
|
||||
this.mode = 'list';
|
||||
}
|
||||
|
||||
move(index, dir) {
|
||||
_.move(this.annotations, index, index + dir);
|
||||
}
|
||||
|
||||
add() {
|
||||
this.annotations.push(this.currentAnnotation);
|
||||
this.reset();
|
||||
|
@ -21,7 +21,7 @@
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr ng-repeat="annotation in ctrl.annotations">
|
||||
<tr ng-repeat="annotation in ctrl.annotations track by annotation.name">
|
||||
<td style="width:90%" ng-hide="annotation.builtIn" class="pointer" ng-click="ctrl.edit(annotation)">
|
||||
<i class="fa fa-comment" style="color:{{annotation.iconColor}}"></i>
|
||||
{{annotation.name}}
|
||||
@ -33,8 +33,8 @@
|
||||
<td class="pointer" ng-click="ctrl.edit(annotation)">
|
||||
{{annotation.datasource || 'Default'}}
|
||||
</td>
|
||||
<td style="width: 1%"><i ng-click="_.move(ctrl.annotations,$index,$index-1)" ng-hide="$first" class="pointer fa fa-arrow-up"></i></td>
|
||||
<td style="width: 1%"><i ng-click="_.move(ctrl.annotations,$index,$index+1)" ng-hide="$last" class="pointer fa fa-arrow-down"></i></td>
|
||||
<td style="width: 1%"><i ng-click="ctrl.move($index,-1)" ng-hide="$first" class="pointer fa fa-arrow-up"></i></td>
|
||||
<td style="width: 1%"><i ng-click="ctrl.move($index,1)" ng-hide="$last" class="pointer fa fa-arrow-down"></i></td>
|
||||
<td style="width: 1%">
|
||||
<a ng-click="ctrl.removeAnnotation(annotation)" class="btn btn-danger btn-mini" ng-hide="annotation.builtIn">
|
||||
<i class="fa fa-remove"></i>
|
||||
|
@ -7,6 +7,7 @@ export class DashboardImportCtrl {
|
||||
jsonText: string;
|
||||
parseError: string;
|
||||
nameExists: boolean;
|
||||
uidExists: boolean;
|
||||
dash: any;
|
||||
inputs: any[];
|
||||
inputsValid: boolean;
|
||||
@ -16,6 +17,10 @@ export class DashboardImportCtrl {
|
||||
titleTouched: boolean;
|
||||
hasNameValidationError: boolean;
|
||||
nameValidationError: any;
|
||||
hasUidValidationError: boolean;
|
||||
uidValidationError: any;
|
||||
autoGenerateUid: boolean;
|
||||
autoGenerateUidValue: string;
|
||||
|
||||
/** @ngInject */
|
||||
constructor(private backendSrv, private validationSrv, navModelSrv, private $location, $routeParams) {
|
||||
@ -23,6 +28,9 @@ export class DashboardImportCtrl {
|
||||
|
||||
this.step = 1;
|
||||
this.nameExists = false;
|
||||
this.uidExists = false;
|
||||
this.autoGenerateUid = true;
|
||||
this.autoGenerateUidValue = 'auto-generated';
|
||||
|
||||
// check gnetId in url
|
||||
if ($routeParams.gnetId) {
|
||||
@ -61,6 +69,7 @@ export class DashboardImportCtrl {
|
||||
|
||||
this.inputsValid = this.inputs.length === 0;
|
||||
this.titleChanged();
|
||||
this.uidChanged(true);
|
||||
}
|
||||
|
||||
setDatasourceOptions(input, inputModel) {
|
||||
@ -107,6 +116,28 @@ export class DashboardImportCtrl {
|
||||
});
|
||||
}
|
||||
|
||||
uidChanged(initial) {
|
||||
this.uidExists = false;
|
||||
this.hasUidValidationError = false;
|
||||
|
||||
if (initial === true && this.dash.uid) {
|
||||
this.autoGenerateUidValue = 'value set';
|
||||
}
|
||||
|
||||
this.backendSrv
|
||||
.getDashboardByUid(this.dash.uid)
|
||||
.then(res => {
|
||||
this.uidExists = true;
|
||||
this.hasUidValidationError = true;
|
||||
this.uidValidationError = `Dashboard named '${res.dashboard.title}' in folder '${
|
||||
res.meta.folderTitle
|
||||
}' has the same uid`;
|
||||
})
|
||||
.catch(err => {
|
||||
err.isHandled = true;
|
||||
});
|
||||
}
|
||||
|
||||
saveDashboard() {
|
||||
var inputs = this.inputs.map(input => {
|
||||
return {
|
||||
|
@ -22,8 +22,10 @@ export class DashboardModel {
|
||||
editable: any;
|
||||
graphTooltip: any;
|
||||
time: any;
|
||||
originalTime: any;
|
||||
timepicker: any;
|
||||
templating: any;
|
||||
originalTemplating: any;
|
||||
annotations: any;
|
||||
refresh: any;
|
||||
snapshot: any;
|
||||
@ -68,8 +70,12 @@ export class DashboardModel {
|
||||
this.editable = data.editable !== false;
|
||||
this.graphTooltip = data.graphTooltip || 0;
|
||||
this.time = data.time || { from: 'now-6h', to: 'now' };
|
||||
this.originalTime = _.cloneDeep(this.time);
|
||||
this.timepicker = data.timepicker || {};
|
||||
this.templating = this.ensureListExist(data.templating);
|
||||
this.originalTemplating = _.map(this.templating.list, variable => {
|
||||
return { name: variable.name, current: _.clone(variable.current) };
|
||||
});
|
||||
this.annotations = this.ensureListExist(data.annotations);
|
||||
this.refresh = data.refresh;
|
||||
this.snapshot = data.snapshot;
|
||||
@ -130,7 +136,12 @@ export class DashboardModel {
|
||||
}
|
||||
|
||||
// cleans meta data and other non persistent state
|
||||
getSaveModelClone() {
|
||||
getSaveModelClone(options?) {
|
||||
let defaults = _.defaults(options || {}, {
|
||||
saveVariables: false,
|
||||
saveTimerange: false,
|
||||
});
|
||||
|
||||
// make clone
|
||||
var copy: any = {};
|
||||
for (var property in this) {
|
||||
@ -142,10 +153,23 @@ export class DashboardModel {
|
||||
}
|
||||
|
||||
// get variable save models
|
||||
//console.log(this.templating.list);
|
||||
copy.templating = {
|
||||
list: _.map(this.templating.list, variable => (variable.getSaveModel ? variable.getSaveModel() : variable)),
|
||||
};
|
||||
|
||||
if (!defaults.saveVariables && copy.templating.list.length === this.originalTemplating.length) {
|
||||
for (let i = 0; i < copy.templating.list.length; i++) {
|
||||
if (copy.templating.list[i].name === this.originalTemplating[i].name) {
|
||||
copy.templating.list[i].current = this.originalTemplating[i].current;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!defaults.saveTimerange) {
|
||||
copy.time = this.originalTime;
|
||||
}
|
||||
|
||||
// get panel save models
|
||||
copy.panels = _.chain(this.panels)
|
||||
.filter(panel => panel.type !== 'add-panel')
|
||||
|
@ -1,5 +1,5 @@
|
||||
import React from 'react';
|
||||
import ReactGridLayout from 'react-grid-layout-grafana';
|
||||
import ReactGridLayout from 'react-grid-layout';
|
||||
import { GRID_CELL_HEIGHT, GRID_CELL_VMARGIN, GRID_COLUMN_COUNT } from 'app/core/constants';
|
||||
import { DashboardPanel } from './DashboardPanel';
|
||||
import { DashboardModel } from '../dashboard_model';
|
||||
|
@ -12,6 +12,7 @@ export class FolderPickerCtrl {
|
||||
enterFolderCreation: any;
|
||||
exitFolderCreation: any;
|
||||
enableCreateNew: boolean;
|
||||
enableReset: boolean;
|
||||
rootName = 'General';
|
||||
folder: any;
|
||||
createNewFolder: boolean;
|
||||
@ -58,6 +59,10 @@ export class FolderPickerCtrl {
|
||||
result.unshift({ title: '-- New Folder --', id: -1 });
|
||||
}
|
||||
|
||||
if (this.enableReset && query === '' && this.initialTitle !== '') {
|
||||
result.unshift({ title: this.initialTitle, id: null });
|
||||
}
|
||||
|
||||
return _.map(result, item => {
|
||||
return { text: item.title, value: item.id };
|
||||
});
|
||||
@ -65,7 +70,9 @@ export class FolderPickerCtrl {
|
||||
}
|
||||
|
||||
onFolderChange(option) {
|
||||
if (option.value === -1) {
|
||||
if (!option) {
|
||||
option = { value: 0, text: this.rootName };
|
||||
} else if (option.value === -1) {
|
||||
this.createNewFolder = true;
|
||||
this.enterFolderCreation();
|
||||
return;
|
||||
@ -134,7 +141,7 @@ export class FolderPickerCtrl {
|
||||
this.onFolderLoad();
|
||||
});
|
||||
} else {
|
||||
if (this.initialTitle) {
|
||||
if (this.initialTitle && this.initialFolderId === null) {
|
||||
this.folder = { text: this.initialTitle, value: null };
|
||||
} else {
|
||||
this.folder = { text: this.rootName, value: 0 };
|
||||
@ -171,6 +178,7 @@ export function folderPicker() {
|
||||
enterFolderCreation: '&',
|
||||
exitFolderCreation: '&',
|
||||
enableCreateNew: '@',
|
||||
enableReset: '@',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
@ -80,6 +80,34 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="gf-form-inline">
|
||||
<div class="gf-form gf-form--grow">
|
||||
<span class="gf-form-label width-15">
|
||||
Unique identifier (uid)
|
||||
<info-popover mode="right-normal">
|
||||
The unique identifier (uid) of a dashboard can be used for uniquely identify a dashboard between multiple Grafana installs.
|
||||
The uid allows having consistent URL’s for accessing dashboards so changing the title of a dashboard will not break any
|
||||
bookmarked links to that dashboard.
|
||||
</info-popover>
|
||||
</span>
|
||||
<input type="text" class="gf-form-input" disabled="disabled" ng-model="ctrl.autoGenerateUidValue" ng-if="ctrl.autoGenerateUid">
|
||||
<a class="btn btn-secondary gf-form-btn" href="#" ng-click="ctrl.autoGenerateUid = false" ng-if="ctrl.autoGenerateUid">change</a>
|
||||
<input type="text" class="gf-form-input" maxlength="40" placeholder="optional, will be auto-generated if empty" ng-model="ctrl.dash.uid" ng-change="ctrl.uidChanged()" ng-if="!ctrl.autoGenerateUid">
|
||||
<label class="gf-form-label text-success" ng-if="!ctrl.autoGenerateUid && !ctrl.hasUidValidationError">
|
||||
<i class="fa fa-check"></i>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="gf-form-inline" ng-if="ctrl.hasUidValidationError">
|
||||
<div class="gf-form offset-width-15 gf-form--grow">
|
||||
<label class="gf-form-label text-warning gf-form-label--grow">
|
||||
<i class="fa fa-warning"></i>
|
||||
{{ctrl.uidValidationError}}
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div ng-repeat="input in ctrl.inputs">
|
||||
<div class="gf-form">
|
||||
<label class="gf-form-label width-15">
|
||||
@ -104,10 +132,10 @@
|
||||
</div>
|
||||
|
||||
<div class="gf-form-button-row">
|
||||
<button type="button" class="btn btn-success width-12" ng-click="ctrl.saveDashboard()" ng-hide="ctrl.nameExists" ng-disabled="!ctrl.inputsValid">
|
||||
<button type="button" class="btn btn-success width-12" ng-click="ctrl.saveDashboard()" ng-hide="ctrl.nameExists || ctrl.uidExists" ng-disabled="!ctrl.inputsValid">
|
||||
<i class="fa fa-save"></i> Import
|
||||
</button>
|
||||
<button type="button" class="btn btn-danger width-12" ng-click="ctrl.saveDashboard()" ng-show="ctrl.nameExists" ng-disabled="!ctrl.inputsValid">
|
||||
<button type="button" class="btn btn-danger width-12" ng-click="ctrl.saveDashboard()" ng-show="ctrl.nameExists || ctrl.uidExists" ng-disabled="!ctrl.inputsValid">
|
||||
<i class="fa fa-save"></i> Import (Overwrite)
|
||||
</button>
|
||||
<a class="btn btn-link" ng-click="ctrl.back()">Cancel</a>
|
||||
|
@ -1,4 +1,5 @@
|
||||
import coreModule from 'app/core/core_module';
|
||||
import _ from 'lodash';
|
||||
|
||||
const template = `
|
||||
<div class="modal-body">
|
||||
@ -14,19 +15,29 @@ const template = `
|
||||
</div>
|
||||
|
||||
<form name="ctrl.saveForm" ng-submit="ctrl.save()" class="modal-content" novalidate>
|
||||
<h6 class="text-center">Add a note to describe your changes</h6>
|
||||
<div class="p-t-2">
|
||||
<div class="p-t-1">
|
||||
<div class="gf-form-group" ng-if="ctrl.timeChange || ctrl.variableChange">
|
||||
<gf-form-switch class="gf-form"
|
||||
label="Save current time range" ng-if="ctrl.timeChange" label-class="width-12" switch-class="max-width-6"
|
||||
checked="ctrl.saveTimerange" on-change="buildUrl()">
|
||||
</gf-form-switch>
|
||||
<gf-form-switch class="gf-form"
|
||||
label="Save current variables" ng-if="ctrl.variableChange" label-class="width-12" switch-class="max-width-6"
|
||||
checked="ctrl.saveVariables" on-change="buildUrl()">
|
||||
</gf-form-switch>
|
||||
</div>
|
||||
<div class="gf-form">
|
||||
<label class="gf-form-hint">
|
||||
<input
|
||||
type="text"
|
||||
name="message"
|
||||
class="gf-form-input"
|
||||
placeholder="Updates to …"
|
||||
placeholder="Add a note to describe your changes …"
|
||||
give-focus="true"
|
||||
ng-model="ctrl.message"
|
||||
ng-model-options="{allowInvalid: true}"
|
||||
ng-maxlength="this.max"
|
||||
maxlength="64"
|
||||
autocomplete="off" />
|
||||
<small class="gf-form-hint-text muted" ng-cloak>
|
||||
<span ng-class="{'text-error': ctrl.saveForm.message.$invalid && ctrl.saveForm.message.$dirty }">
|
||||
@ -40,7 +51,7 @@ const template = `
|
||||
|
||||
<div class="gf-form-button-row text-center">
|
||||
<button type="submit" class="btn btn-success" ng-disabled="ctrl.saveForm.$invalid">Save</button>
|
||||
<button class="btn btn-inverse" ng-click="ctrl.dismiss();">Cancel</button>
|
||||
<a class="btn btn-link" ng-click="ctrl.dismiss();">Cancel</a>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
@ -48,14 +59,51 @@ const template = `
|
||||
|
||||
export class SaveDashboardModalCtrl {
|
||||
message: string;
|
||||
saveVariables = false;
|
||||
saveTimerange = false;
|
||||
templating: any;
|
||||
time: any;
|
||||
originalTime: any;
|
||||
current = [];
|
||||
originalCurrent = [];
|
||||
max: number;
|
||||
saveForm: any;
|
||||
dismiss: () => void;
|
||||
timeChange = false;
|
||||
variableChange = false;
|
||||
|
||||
/** @ngInject */
|
||||
constructor(private dashboardSrv) {
|
||||
this.message = '';
|
||||
this.max = 64;
|
||||
this.templating = dashboardSrv.dash.templating.list;
|
||||
|
||||
this.compareTemplating();
|
||||
this.compareTime();
|
||||
}
|
||||
|
||||
compareTime() {
|
||||
if (_.isEqual(this.dashboardSrv.dash.time, this.dashboardSrv.dash.originalTime)) {
|
||||
this.timeChange = false;
|
||||
} else {
|
||||
this.timeChange = true;
|
||||
}
|
||||
}
|
||||
|
||||
compareTemplating() {
|
||||
if (this.dashboardSrv.dash.templating.list.length > 0) {
|
||||
for (let i = 0; i < this.dashboardSrv.dash.templating.list.length; i++) {
|
||||
if (
|
||||
this.dashboardSrv.dash.templating.list[i].current.text !==
|
||||
this.dashboardSrv.dash.originalTemplating[i].current.text
|
||||
) {
|
||||
return (this.variableChange = true);
|
||||
}
|
||||
}
|
||||
return (this.variableChange = false);
|
||||
} else {
|
||||
return (this.variableChange = false);
|
||||
}
|
||||
}
|
||||
|
||||
save() {
|
||||
@ -63,9 +111,14 @@ export class SaveDashboardModalCtrl {
|
||||
return;
|
||||
}
|
||||
|
||||
var options = {
|
||||
saveVariables: this.saveVariables,
|
||||
saveTimerange: this.saveTimerange,
|
||||
message: this.message,
|
||||
};
|
||||
|
||||
var dashboard = this.dashboardSrv.getCurrent();
|
||||
var saveModel = dashboard.getSaveModelClone();
|
||||
var options = { message: this.message };
|
||||
var saveModel = dashboard.getSaveModelClone(options);
|
||||
|
||||
return this.dashboardSrv.save(saveModel, options).then(this.dismiss);
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ export class SaveProvisionedDashboardModalCtrl {
|
||||
constructor(dashboardSrv) {
|
||||
this.dash = dashboardSrv.getCurrent().getSaveModelClone();
|
||||
delete this.dash.id;
|
||||
this.dashboardJson = JSON.stringify(this.dash, null, 2);
|
||||
this.dashboardJson = angular.toJson(this.dash, true);
|
||||
}
|
||||
|
||||
save() {
|
||||
|
@ -2,6 +2,7 @@ import { coreModule, appEvents, contextSrv } from 'app/core/core';
|
||||
import { DashboardModel } from '../dashboard_model';
|
||||
import $ from 'jquery';
|
||||
import _ from 'lodash';
|
||||
import angular from 'angular';
|
||||
import config from 'app/core/config';
|
||||
|
||||
export class SettingsCtrl {
|
||||
@ -118,7 +119,7 @@ export class SettingsCtrl {
|
||||
this.viewId = this.$location.search().editview;
|
||||
|
||||
if (this.viewId) {
|
||||
this.json = JSON.stringify(this.dashboard.getSaveModelClone(), null, 2);
|
||||
this.json = angular.toJson(this.dashboard.getSaveModelClone(), true);
|
||||
}
|
||||
|
||||
if (this.viewId === 'settings' && this.dashboard.meta.canMakeEditable) {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user