diff --git a/.circleci/config.yml b/.circleci/config.yml
index 1e046aec34d..eb8724bed3c 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -19,7 +19,7 @@ version: 2
jobs:
mysql-integration-test:
docker:
- - image: circleci/golang:1.10
+ - image: circleci/golang:1.11
- image: circleci/mysql:5.6-ram
environment:
MYSQL_ROOT_PASSWORD: rootpass
@@ -39,7 +39,7 @@ jobs:
postgres-integration-test:
docker:
- - image: circleci/golang:1.10
+ - image: circleci/golang:1.11
- image: circleci/postgres:9.3-ram
environment:
POSTGRES_USER: grafanatest
@@ -74,7 +74,7 @@ jobs:
gometalinter:
docker:
- - image: circleci/golang:1.10
+ - image: circleci/golang:1.11
environment:
# we need CGO because of go-sqlite3
CGO_ENABLED: 1
@@ -115,7 +115,7 @@ jobs:
test-backend:
docker:
- - image: circleci/golang:1.10
+ - image: circleci/golang:1.11
working_directory: /go/src/github.com/grafana/grafana
steps:
- checkout
@@ -125,7 +125,7 @@ jobs:
build-all:
docker:
- - image: grafana/build-container:1.0.0
+ - image: grafana/build-container:1.1.0
working_directory: /go/src/github.com/grafana/grafana
steps:
- checkout
@@ -168,7 +168,7 @@ jobs:
build:
docker:
- - image: grafana/build-container:1.0.0
+ - image: grafana/build-container:1.1.0
working_directory: /go/src/github.com/grafana/grafana
steps:
- checkout
diff --git a/CHANGELOG.md b/CHANGELOG.md
index aed25afb02e..b89e925e826 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,77 +1,98 @@
# 5.3.0 (unreleased)
-* **OAuth**: Gitlab OAuth with support for filter by groups [#5623](https://github.com/grafana/grafana/issues/5623), thx [@BenoitKnecht](https://github.com/BenoitKnecht)
-* **Dataproxy**: Pass configured/auth headers to a Datasource [#10971](https://github.com/grafana/grafana/issues/10971), thx [@mrsiano](https://github.com/mrsiano)
-* **Cleanup**: Make temp file time to live configurable [#11607](https://github.com/grafana/grafana/issues/11607), thx [@xapon](https://github.com/xapon)
-* **LDAP**: Define Grafana Admin permission in ldap group mappings [#2469](https://github.com/grafana/grafana/issues/2496), PR [#12622](https://github.com/grafana/grafana/issues/12622)
-* **Cloudwatch**: CloudWatch GetMetricData support [#11487](https://github.com/grafana/grafana/issues/11487), thx [@mtanda](https://github.com/mtanda)
-* **Configuration**: Allow auto-assigning users to specific organization (other than Main. Org) [#1823](https://github.com/grafana/grafana/issues/1823) [#12801](https://github.com/grafana/grafana/issues/12801), thx [@gzzo](https://github.com/gzzo) and [@ofosos](https://github.com/ofosos)
-* **Profile**: List teams that the user is member of in current/active organization [#12476](https://github.com/grafana/grafana/issues/12476)
-* **LDAP**: Client certificates support [#12805](https://github.com/grafana/grafana/issues/12805), thx [@nyxi](https://github.com/nyxi)
-* **Postgres**: TimescaleDB support, e.g. use `time_bucket` for grouping by time when option enabled [#12680](https://github.com/grafana/grafana/pull/12680), thx [svenklemm](https://github.com/svenklemm)
-
### Minor
-* **Api**: Delete nonexistent datasource should return 404 [#12313](https://github.com/grafana/grafana/issues/12313), thx [@AustinWinstanley](https://github.com/AustinWinstanley)
-* **Dashboard**: Fix selecting current dashboard from search should not reload dashboard [#12248](https://github.com/grafana/grafana/issues/12248)
-* **Dashboard**: Use uid when linking to dashboards internally in a dashboard [#10705](https://github.com/grafana/grafana/issues/10705)
-* **Singlestat**: Make colorization of prefix and postfix optional in singlestat [#11892](https://github.com/grafana/grafana/pull/11892), thx [@ApsOps](https://github.com/ApsOps)
+* **Alerting**: Link to view full size image in Microsoft Teams alert notifier [#13121](https://github.com/grafana/grafana/issues/13121), thx [@holiiveira](https://github.com/holiiveira)
+
+# 5.3.0-beta1 (2018-09-06)
+
+### New Major Features
+
+* **Alerting**: Notification reminders [#7330](https://github.com/grafana/grafana/issues/7330), thx [@jbaublitz](https://github.com/jbaublitz)
+* **Dashboard**: TV & Kiosk mode changes, new cycle view mode button in dashboard toolbar [#13025](https://github.com/grafana/grafana/pull/13025)
+* **OAuth**: Gitlab OAuth with support for filter by groups [#5623](https://github.com/grafana/grafana/issues/5623), thx [@BenoitKnecht](https://github.com/BenoitKnecht)
+* **Postgres**: Graphical query builder [#10095](https://github.com/grafana/grafana/issues/10095), thx [svenklemm](https://github.com/svenklemm)
+
+### New Features
+
+* **LDAP**: Define Grafana Admin permission in ldap group mappings [#2469](https://github.com/grafana/grafana/issues/2496), PR [#12622](https://github.com/grafana/grafana/issues/12622)
+* **LDAP**: Client certificates support [#12805](https://github.com/grafana/grafana/issues/12805), thx [@nyxi](https://github.com/nyxi)
+* **Profile**: List teams that the user is member of in current/active organization [#12476](https://github.com/grafana/grafana/issues/12476)
+* **Configuration**: Allow auto-assigning users to specific organization (other than Main. Org) [#1823](https://github.com/grafana/grafana/issues/1823) [#12801](https://github.com/grafana/grafana/issues/12801), thx [@gzzo](https://github.com/gzzo) and [@ofosos](https://github.com/ofosos)
+* **Dataproxy**: Pass configured/auth headers to a Datasource [#10971](https://github.com/grafana/grafana/issues/10971), thx [@mrsiano](https://github.com/mrsiano)
+* **Cloudwatch**: CloudWatch GetMetricData support [#11487](https://github.com/grafana/grafana/issues/11487), thx [@mtanda](https://github.com/mtanda)
+* **Postgres**: TimescaleDB support, e.g. use `time_bucket` for grouping by time when option enabled [#12680](https://github.com/grafana/grafana/pull/12680), thx [svenklemm](https://github.com/svenklemm)
+* **Cleanup**: Make temp file time to live configurable [#11607](https://github.com/grafana/grafana/issues/11607), thx [@xapon](https://github.com/xapon)
+
+### Minor
+
+* **Alerting**: Its now possible to configure the default value for how to handle errors and no data in alerting. [#10424](https://github.com/grafana/grafana/issues/10424)
+* **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane)
+* **Alerting**: Fix rendering timeout which could cause notifications to not be sent due to rendering timing out [#12151](https://github.com/grafana/grafana/issues/12151)
+* **Docker**: Make it possible to set a specific plugin url [#12861](https://github.com/grafana/grafana/pull/12861), thx [ClementGautier](https://github.com/ClementGautier)
+* **GrafanaCli**: Fixed issue with grafana-cli install plugin resulting in corrupt http response from source error. Fixes [#13079](https://github.com/grafana/grafana/issues/13079)
+* **Provisioning**: Should allow one default datasource per organisation [#12229](https://github.com/grafana/grafana/issues/12229)
+* **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber)
+* **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
* **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379)
* **Prometheus**: Heatmap - fix unhandled error when some points are missing [#12484](https://github.com/grafana/grafana/issues/12484)
* **Prometheus**: Add $__interval, $__interval_ms, $__range, $__range_s & $__range_ms support for dashboard and template queries [#12597](https://github.com/grafana/grafana/issues/12597) [#12882](https://github.com/grafana/grafana/issues/12882), thx [@roidelapluie](https://github.com/roidelapluie)
-* **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda)
-* **Variables**: Limit amount of queries executed when updating variable that other variable(s) are dependent on [#11890](https://github.com/grafana/grafana/issues/11890)
-* **Variables**: Support query variable refresh when another variable referenced in `Regex` field change its value [#12952](https://github.com/grafana/grafana/issues/12952), thx [@franciscocpg](https://github.com/franciscocpg)
-* **Variables**: Support variables in query variable `Custom all value` field [#12965](https://github.com/grafana/grafana/issues/12965), thx [@franciscocpg](https://github.com/franciscocpg)
+* **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731)
+* **Graphite**: Fix for quoting of int function parameters (when using variables) [#11927](https://github.com/grafana/grafana/pull/11927)
+* **InfluxDB**: Support timeFilter in query templating for InfluxDB [#12598](https://github.com/grafana/grafana/pull/12598), thx [kichristensen](https://github.com/kichristensen)
* **Postgres/MySQL/MSSQL**: New $__unixEpochGroup and $__unixEpochGroupAlias macros [#12892](https://github.com/grafana/grafana/issues/12892), thx [@svenklemm](https://github.com/svenklemm)
* **Postgres/MySQL/MSSQL**: Add previous fill mode to $__timeGroup macro which will fill in previously seen value when point is missing [#12756](https://github.com/grafana/grafana/issues/12756), thx [@svenklemm](https://github.com/svenklemm)
* **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm)
* **Postgres/MySQL/MSSQL**: Use metric column as prefix when returning multiple value columns [#12727](https://github.com/grafana/grafana/issues/12727), thx [@svenklemm](https://github.com/svenklemm)
* **Postgres/MySQL/MSSQL**: New $__timeGroupAlias macro. Postgres $__timeGroup no longer automatically adds time column alias [#12749](https://github.com/grafana/grafana/issues/12749), thx [@svenklemm](https://github.com/svenklemm)
* **Postgres/MySQL/MSSQL**: Escape single quotes in variables [#12785](https://github.com/grafana/grafana/issues/12785), thx [@eMerzh](https://github.com/eMerzh)
+* **Postgres/MySQL/MSSQL**: Min time interval support [#13157](https://github.com/grafana/grafana/issues/13157), thx [@svenklemm](https://github.com/svenklemm)
* **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley)
* **Postgres**: Escape ssl mode parameter in connectionstring [#12644](https://github.com/grafana/grafana/issues/12644), thx [@yogyrahmawan](https://github.com/yogyrahmawan)
-* **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber)
-* **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane)
-* **Alerting**: Fix rendering timeout which could cause notifications to not be sent due to rendering timing out [#12151](https://github.com/grafana/grafana/issues/12151)
* **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda)
* **Cloudwatch**: AppSync metrics and dimensions [#12300](https://github.com/grafana/grafana/issues/12300), thx [@franciscocpg](https://github.com/franciscocpg)
* **Cloudwatch**: Direct Connect metrics and dimensions [#12762](https://github.com/grafana/grafana/pulls/12762), thx [@mindriot88](https://github.com/mindriot88)
* **Cloudwatch**: Added BurstBalance metric to list of AWS RDS metrics [#12561](https://github.com/grafana/grafana/pulls/12561), thx [@activeshadow](https://github.com/activeshadow)
* **Cloudwatch**: Add new Redshift metrics and dimensions [#12063](https://github.com/grafana/grafana/pulls/12063), thx [@A21z](https://github.com/A21z)
+* **Dashboard**: Fix selecting current dashboard from search should not reload dashboard [#12248](https://github.com/grafana/grafana/issues/12248)
+* **Dashboard**: Use uid when linking to dashboards internally in a dashboard [#10705](https://github.com/grafana/grafana/issues/10705)
+* **Graph**: Option to hide series from tooltip [#3341](https://github.com/grafana/grafana/issues/3341), thx [@mtanda](https://github.com/mtanda)
+* **Singlestat**: Make colorization of prefix and postfix optional in singlestat [#11892](https://github.com/grafana/grafana/pull/11892), thx [@ApsOps](https://github.com/ApsOps)
* **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668)
* **Table**: Fix link color when using light theme and thresholds in use [#12766](https://github.com/grafana/grafana/issues/12766)
-om/grafana/grafana/issues/12668)
* **Table**: Fix for useless horizontal scrollbar for table panel [#9964](https://github.com/grafana/grafana/issues/9964)
* **Table**: Make table sorting stable when null values exist [#12362](https://github.com/grafana/grafana/pull/12362), thx [@bz2](https://github.com/bz2)
-* **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731)
-* **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
+* **Heatmap**: Fix broken tooltip and crosshair on Firefox [#12486](https://github.com/grafana/grafana/issues/12486)
+* **Datasource**: Fix UI issue with secret fields after updating datasource [#11270](https://github.com/grafana/grafana/issues/11270)
+* **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda)
+* **Variables**: Limit amount of queries executed when updating variable that other variable(s) are dependent on [#11890](https://github.com/grafana/grafana/issues/11890)
+* **Variables**: Support query variable refresh when another variable referenced in `Regex` field change its value [#12952](https://github.com/grafana/grafana/issues/12952), thx [@franciscocpg](https://github.com/franciscocpg)
+* **Variables**: Support variables in query variable `Custom all value` field [#12965](https://github.com/grafana/grafana/issues/12965), thx [@franciscocpg](https://github.com/franciscocpg)
* **Units**: Change units to include characters for power of 2 and 3 [#12744](https://github.com/grafana/grafana/pull/12744), thx [@Worty](https://github.com/Worty)
* **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek)
-* **Graph**: Option to hide series from tooltip [#3341](https://github.com/grafana/grafana/issues/3341), thx [@mtanda](https://github.com/mtanda)
-* **UI**: Fix iOS home screen "app" icon and Windows 10 app experience [#12752](https://github.com/grafana/grafana/issues/12752), thx [@andig](https://github.com/andig)
-* **Datasource**: Fix UI issue with secret fields after updating datasource [#11270](https://github.com/grafana/grafana/issues/11270)
-* **Plugins**: Convert URL-like text to links in plugins readme [#12843](https://github.com/grafana/grafana/pull/12843), thx [pgiraud](https://github.com/pgiraud)
-* **Docker**: Make it possible to set a specific plugin url [#12861](https://github.com/grafana/grafana/pull/12861), thx [ClementGautier](https://github.com/ClementGautier)
-* **Graphite**: Fix for quoting of int function parameters (when using variables) [#11927](https://github.com/grafana/grafana/pull/11927)
-* **InfluxDB**: Support timeFilter in query templating for InfluxDB [#12598](https://github.com/grafana/grafana/pull/12598), thx [kichristensen](https://github.com/kichristensen)
-* **Provisioning**: Should allow one default datasource per organisation [#12229](https://github.com/grafana/grafana/issues/12229)
-* **Heatmap**: Fix broken tooltip and crosshair on Firefox [#12486](https://github.com/grafana/grafana/issues/12486)
+* **Units**: Adds bitcoin axes unit. [#13125](https://github.com/grafana/grafana/pull/13125)
+* **Api**: Delete nonexistent datasource should return 404 [#12313](https://github.com/grafana/grafana/issues/12313), thx [@AustinWinstanley](https://github.com/AustinWinstanley)
+* **Logging**: Reopen log files after receiving a SIGHUP signal [#13112](https://github.com/grafana/grafana/pull/13112), thx [@filewalkwithme](https://github.com/filewalkwithme)
* **Login**: Show loading animation while waiting for authentication response on login [#12865](https://github.com/grafana/grafana/issues/12865)
+* **UI**: Fix iOS home screen "app" icon and Windows 10 app experience [#12752](https://github.com/grafana/grafana/issues/12752), thx [@andig](https://github.com/andig)
+* **Plugins**: Convert URL-like text to links in plugins readme [#12843](https://github.com/grafana/grafana/pull/12843), thx [pgiraud](https://github.com/pgiraud)
### Breaking changes
* Postgres datasource no longer automatically adds time column alias when using the $__timeGroup alias. However, there's code in place which should make this change backward compatible and shouldn't create any issues.
+* Kiosk mode now also hides submenu (variables)
+* ?inactive url parameter no longer supported, replaced with kiosk=tv url parameter
### New experimental features
-These are new features that's still being worked on and are in an experimental phase. We incourage users to try these out and provide any feedback in related issue.
+These are new features that's still being worked on and are in an experimental phase. We encourage users to try these out and provide any feedback in related issue.
* **Dashboard**: Auto fit dashboard panels to optimize space used for current TV / Monitor [#12768](https://github.com/grafana/grafana/issues/12768)
### Tech
* **Frontend**: Convert all Frontend Karma tests to Jest tests [#12224](https://github.com/grafana/grafana/issues/12224)
+* **Backend**: Upgrade to golang 1.11 [#13030](https://github.com/grafana/grafana/issues/13030)
# 5.2.3 (2018-08-29)
diff --git a/Dockerfile b/Dockerfile
index f7e45893c38..28dd71952af 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,5 +1,5 @@
# Golang build container
-FROM golang:1.10
+FROM golang:1.11
WORKDIR $GOPATH/src/github.com/grafana/grafana
diff --git a/Gopkg.lock b/Gopkg.lock
index 6f08e208ecd..bd247d691dd 100644
--- a/Gopkg.lock
+++ b/Gopkg.lock
@@ -427,12 +427,6 @@
revision = "1744e2970ca51c86172c8190fadad617561ed6e7"
version = "v1.0.0"
-[[projects]]
- branch = "master"
- name = "github.com/shurcooL/sanitized_anchor_name"
- packages = ["."]
- revision = "86672fcb3f950f35f2e675df2240550f2a50762f"
-
[[projects]]
name = "github.com/smartystreets/assertions"
packages = [
@@ -679,6 +673,6 @@
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
- inputs-digest = "cb8e7fd81f23ec987fc4d5dd9d31ae0f1164bc2f30cbea2fe86e0d97dd945beb"
+ inputs-digest = "81a37e747b875cf870c1b9486fa3147e704dea7db8ba86f7cb942d3ddc01d3e3"
solver-name = "gps-cdcl"
solver-version = 1
diff --git a/README.md b/README.md
index 74fb10c8066..133d9e50d07 100644
--- a/README.md
+++ b/README.md
@@ -24,7 +24,7 @@ the latest master builds [here](https://grafana.com/grafana/download)
### Dependencies
-- Go 1.10
+- Go 1.11
- NodeJS LTS
### Building the backend
diff --git a/appveyor.yml b/appveyor.yml
index 5cdec1b8bf5..52f23162033 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -7,7 +7,7 @@ clone_folder: c:\gopath\src\github.com\grafana\grafana
environment:
nodejs_version: "6"
GOPATH: C:\gopath
- GOVERSION: 1.10
+ GOVERSION: 1.11
install:
- rmdir c:\go /s /q
diff --git a/conf/defaults.ini b/conf/defaults.ini
index 90fc144c6e0..85d0953c6af 100644
--- a/conf/defaults.ini
+++ b/conf/defaults.ini
@@ -467,6 +467,12 @@ enabled = true
# Makes it possible to turn off alert rule execution but alerting UI is visible
execute_alerts = true
+# Default setting for new alert rules. Defaults to categorize error and timeouts as alerting. (alerting, keep_state)
+error_or_timeout = alerting
+
+# Default setting for how Grafana handles nodata or null values in alerting. (alerting, no_data, keep_state, ok)
+nodata_or_nullvalues = no_data
+
#################################### Explore #############################
[explore]
# Enable the Explore section
@@ -538,3 +544,8 @@ container_name =
[external_image_storage.local]
# does not require any configuration
+
+[rendering]
+# Options to configure external image rendering server like https://github.com/grafana/grafana-image-renderer
+server_url =
+callback_url =
diff --git a/conf/sample.ini b/conf/sample.ini
index 4291071e026..2ef254f79b9 100644
--- a/conf/sample.ini
+++ b/conf/sample.ini
@@ -387,6 +387,12 @@ log_queries =
# Makes it possible to turn off alert rule execution but alerting UI is visible
;execute_alerts = true
+# Default setting for new alert rules. Defaults to categorize error and timeouts as alerting. (alerting, keep_state)
+;error_or_timeout = alerting
+
+# Default setting for how Grafana handles nodata or null values in alerting. (alerting, no_data, keep_state, ok)
+;nodata_or_nullvalues = no_data
+
#################################### Explore #############################
[explore]
# Enable the Explore section
@@ -460,3 +466,8 @@ log_queries =
[external_image_storage.local]
# does not require any configuration
+
+[rendering]
+# Options to configure external image rendering server like https://github.com/grafana/grafana-image-renderer
+;server_url =
+;callback_url =
diff --git a/docs/sources/administration/permissions.md b/docs/sources/administration/permissions.md
index e7b84a417c0..1d1a70607c8 100644
--- a/docs/sources/administration/permissions.md
+++ b/docs/sources/administration/permissions.md
@@ -52,8 +52,6 @@ This admin flag makes a user a `Super Admin`. This means they can access the `Se
### Dashboard & Folder Permissions
-> Introduced in Grafana v5.0
-
{{< docs-imagebox img="/img/docs/v50/folder_permissions.png" max-width="500px" class="docs-image--right" >}}
For dashboards and dashboard folders there is a **Permissions** page that make it possible to
diff --git a/docs/sources/administration/provisioning.md b/docs/sources/administration/provisioning.md
index c57fb1683f0..c8d83ea1c54 100644
--- a/docs/sources/administration/provisioning.md
+++ b/docs/sources/administration/provisioning.md
@@ -154,8 +154,8 @@ Since not all datasources have the same configuration settings we only have the
| tlsAuthWithCACert | boolean | *All* | Enable TLS authentication using CA cert |
| tlsSkipVerify | boolean | *All* | Controls whether a client verifies the server's certificate chain and host name. |
| graphiteVersion | string | Graphite | Graphite version |
-| timeInterval | string | Elastic, InfluxDB & Prometheus | Lowest interval/step value that should be used for this data source |
-| esVersion | number | Elastic | Elasticsearch version as an number (2/5/56) |
+| timeInterval | string | Prometheus, Elasticsearch, InfluxDB, MySQL, PostgreSQL & MSSQL | Lowest interval/step value that should be used for this data source |
+| esVersion | number | Elastic | Elasticsearch version as a number (2/5/56) |
| timeField | string | Elastic | Which field that should be used as timestamp |
| interval | string | Elastic | Index date time format |
| authType | string | Cloudwatch | Auth provider. keys/credentials/arn |
@@ -165,6 +165,8 @@ Since not all datasources have the same configuration settings we only have the
| tsdbVersion | string | OpenTSDB | Version |
| tsdbResolution | string | OpenTSDB | Resolution |
| sslmode | string | PostgreSQL | SSLmode. 'disable', 'require', 'verify-ca' or 'verify-full' |
+| postgresVersion | number | PostgreSQL | Postgres version as a number (903/904/905/906/1000) meaning v9.3, v9.4, ..., v10 |
+| timescaledb | boolean | PostgreSQL | Enable usage of TimescaleDB extension |
#### Secure Json Data
diff --git a/docs/sources/alerting/notifications.md b/docs/sources/alerting/notifications.md
index 58046cafae4..a5b7f4264e0 100644
--- a/docs/sources/alerting/notifications.md
+++ b/docs/sources/alerting/notifications.md
@@ -16,12 +16,11 @@ weight = 2
When an alert changes state, it sends out notifications. Each alert rule can have
multiple notifications. In order to add a notification to an alert rule you first need
-to add and configure a `notification` channel (can be email, PagerDuty or other integration). This is done from the Notification Channels page.
+to add and configure a `notification` channel (can be email, PagerDuty or other integration).
+This is done from the Notification Channels page.
## Notification Channel Setup
-{{< imgbox max-width="30%" img="/img/docs/v50/alerts_notifications_menu.png" caption="Alerting Notification Channels" >}}
-
On the Notification Channels page hit the `New Channel` button to go the page where you
can configure and setup a new Notification Channel.
@@ -30,7 +29,31 @@ sure it's setup correctly.
### Send on all alerts
-When checked, this option will nofity for all alert rules - existing and new.
+When checked, this option will notify for all alert rules - existing and new.
+
+### Send reminders
+
+> Only available in Grafana v5.3 and above.
+
+{{< docs-imagebox max-width="600px" img="/img/docs/v53/alerting_notification_reminders.png" class="docs-image--right" caption="Alerting notification reminders setup" >}}
+
+When this option is checked additional notifications (reminders) will be sent for triggered alerts. You can specify how often reminders
+should be sent using number of seconds (s), minutes (m) or hours (h), for example `30s`, `3m`, `5m` or `1h` etc.
+
+**Important:** Alert reminders are sent after rules are evaluated. Therefore a reminder can never be sent more frequently than a configured [alert rule evaluation interval](/alerting/rules/#name-evaluation-interval).
+
+These examples show how often and when reminders are sent for a triggered alert.
+
+Alert rule evaluation interval | Send reminders every | Reminder sent every (after last alert notification)
+---------- | ----------- | -----------
+`30s` | `15s` | ~30 seconds
+`1m` | `5m` | ~5 minutes
+`5m` | `15m` | ~15 minutes
+`6m` | `20m` | ~24 minutes
+`1h` | `15m` | ~1 hour
+`1h` | `2h` | ~2 hours
+
+
## Supported Notification Types
@@ -132,23 +155,23 @@ Once these two properties are set, you can send the alerts to Kafka for further
### All supported notifiers
-Name | Type |Support images
------|------------ | ------
-Slack | `slack` | yes
-Pagerduty | `pagerduty` | yes
-Email | `email` | yes
-Webhook | `webhook` | link
-Kafka | `kafka` | no
-Hipchat | `hipchat` | yes
-VictorOps | `victorops` | yes
-Sensu | `sensu` | yes
-OpsGenie | `opsgenie` | yes
-Threema | `threema` | yes
-Pushover | `pushover` | no
-Telegram | `telegram` | no
-Line | `line` | no
-Prometheus Alertmanager | `prometheus-alertmanager` | no
-Microsoft Teams | `teams` | yes
+Name | Type |Support images | Support reminders
+-----|------------ | ------ | ------ |
+Slack | `slack` | yes | yes
+Pagerduty | `pagerduty` | yes | yes
+Email | `email` | yes | yes
+Webhook | `webhook` | link | yes
+Kafka | `kafka` | no | yes
+Hipchat | `hipchat` | yes | yes
+VictorOps | `victorops` | yes | yes
+Sensu | `sensu` | yes | yes
+OpsGenie | `opsgenie` | yes | yes
+Threema | `threema` | yes | yes
+Pushover | `pushover` | no | yes
+Telegram | `telegram` | no | yes
+Line | `line` | no | yes
+Microsoft Teams | `teams` | yes | yes
+Prometheus Alertmanager | `prometheus-alertmanager` | no | no
diff --git a/docs/sources/alerting/rules.md b/docs/sources/alerting/rules.md
index fa7332e7145..488619055e2 100644
--- a/docs/sources/alerting/rules.md
+++ b/docs/sources/alerting/rules.md
@@ -88,6 +88,11 @@ So as you can see from the above scenario Grafana will not send out notification
to fire if the rule already is in state `Alerting`. To improve support for queries that return multiple series
we plan to track state **per series** in a future release.
+> Starting with Grafana v5.3 you can configure reminders to be sent for triggered alerts. This will send additional notifications
+> when an alert continues to fire. If other series (like server2 in the example above) also cause the alert rule to fire they will
+> be included in the reminder notification. Depending on what notification channel you're using you may be able to take advantage
+> of this feature for identifying new/existing series causing alert to fire. [Read more about notification reminders here](/alerting/notifications/#send-reminders).
+
### No Data / Null values
Below your conditions you can configure how the rule evaluation engine should handle queries that return no data or only null values.
diff --git a/docs/sources/tutorials/authproxy.md b/docs/sources/auth/auth-proxy.md
similarity index 67%
rename from docs/sources/tutorials/authproxy.md
rename to docs/sources/auth/auth-proxy.md
index 6f13de85c18..e066eed9190 100644
--- a/docs/sources/tutorials/authproxy.md
+++ b/docs/sources/auth/auth-proxy.md
@@ -1,42 +1,43 @@
+++
-title = "Grafana Authproxy"
+title = "Auth Proxy"
+description = "Grafana Auth Proxy Guide "
+keywords = ["grafana", "configuration", "documentation", "proxy"]
type = "docs"
-keywords = ["grafana", "tutorials", "authproxy"]
+aliases = ["/tutorials/authproxy/"]
[menu.docs]
-parent = "tutorials"
-weight = 10
+name = "Auth Proxy"
+identifier = "auth-proxy"
+parent = "authentication"
+weight = 2
+++
-# Grafana Authproxy
+# Auth Proxy Authentication
-AuthProxy allows you to offload the authentication of users to a web server (there are many reasons why you’d want to run a web server in front of a production version of Grafana, especially if it’s exposed to the Internet).
+You can configure Grafana to let a http reverse proxy handling authentication. Popular web servers have a very
+extensive list of pluggable authentication modules, and any of them can be used with the AuthProxy feature.
+Below we detail the configuration options for auth proxy.
-Popular web servers have a very extensive list of pluggable authentication modules, and any of them can be used with the AuthProxy feature.
-
-The Grafana AuthProxy feature is very simple in design, but it is this simplicity that makes it so powerful.
-
-## Interacting with Grafana’s AuthProxy via curl
-
-The AuthProxy feature can be configured through the Grafana configuration file with the following options:
-
-```js
+```bash
[auth.proxy]
+# Defaults to false, but set to true to enable this feature
enabled = true
+# HTTP Header name that will contain the username or email
header_name = X-WEBAUTH-USER
+# HTTP Header property, defaults to `username` but can also be `email`
header_property = username
+# Set to `true` to enable auto sign up of users who do not exist in Grafana DB. Defaults to `true`.
auto_sign_up = true
+# If combined with Grafana LDAP integration define sync interval
ldap_sync_ttl = 60
+# Limit where auth proxy requests come from by configuring a list of IP addresses.
+# This can be used to prevent users spoofing the X-WEBAUTH-USER header.
whitelist =
+# Optionally define more headers to sync other user attributes
+# Example `headers = Name:X-WEBAUTH-NAME Email:X-WEBAUTH-EMAIL``
+headers =
```
-* **enabled**: this is to toggle the feature on or off
-* **header_name**: this is the HTTP header name that passes the username or email address of the authenticated user to Grafana. Grafana will trust what ever username is contained in this header and automatically log the user in.
-* **header_property**: this tells Grafana whether the value in the header_name is a username or an email address. (In Grafana you can log in using your account username or account email)
-* **auto_sign_up**: If set to true, Grafana will automatically create user accounts in the Grafana DB if one does not exist. If set to false, users who do not exist in the GrafanaDB won’t be able to log in, even though their username and password are valid.
-* **ldap_sync_ttl**: When both auth.proxy and auth.ldap are enabled, user's organisation and role are synchronised from ldap after the http proxy authentication. You can force ldap re-synchronisation after `ldap_sync_ttl` minutes.
-* **whitelist**: Comma separated list of trusted authentication proxies IP.
-
-With a fresh install of Grafana, using the above configuration for the authProxy feature, we can send a simple API call to list all users. The only user that will be present is the default “Admin” user that is added the first time Grafana starts up. As you can see all we need to do to authenticate the request is to provide the “X-WEBAUTH-USER” header.
+## Interacting with Grafana’s AuthProxy via curl
```bash
curl -H "X-WEBAUTH-USER: admin" http://localhost:3000/api/users
@@ -71,7 +72,8 @@ I’ll demonstrate how to use Apache for authenticating users. In this example w
### Apache BasicAuth
-In this example we use Apache as a reverseProxy in front of Grafana. Apache handles the Authentication of users before forwarding requests to the Grafana backend service.
+In this example we use Apache as a reverse proxy in front of Grafana. Apache handles the Authentication of users before forwarding requests to the Grafana backend service.
+
#### Apache configuration
@@ -116,38 +118,7 @@ In this example we use Apache as a reverseProxy in front of Grafana. Apache hand
* The last 3 lines are then just standard reverse proxy configuration to direct all authenticated requests to our Grafana server running on port 3000.
-#### Grafana configuration
-
-```bash
-############# Users ################
-[users]
- # disable user signup / registration
-allow_sign_up = false
-
-# Set to true to automatically assign new users to the default organization (id 1)
-auto_assign_org = true
-
-# Default role new users will be automatically assigned (if auto_assign_org above is set to true)
- auto_assign_org_role = Editor
-
-
-############ Auth Proxy ########
-[auth.proxy]
-enabled = true
-
-# the Header name that contains the authenticated user.
-header_name = X-WEBAUTH-USER
-
-# does the user authenticate against the proxy using a 'username' or an 'email'
-header_property = username
-
-# automatically add the user to the system if they don't already exist.
-auto_sign_up = true
-```
-
-#### Full walk through using Docker.
-
-##### Grafana Container
+## Full walk through using Docker.
For this example, we use the official Grafana docker image available at [Docker Hub](https://hub.docker.com/r/grafana/grafana/)
@@ -166,7 +137,8 @@ header_property = username
auto_sign_up = true
```
-* Launch the Grafana container, using our custom grafana.ini to replace `/etc/grafana/grafana.ini`. We don't expose any ports for this container as it will only be connected to by our Apache container.
+Launch the Grafana container, using our custom grafana.ini to replace `/etc/grafana/grafana.ini`. We don't expose
+any ports for this container as it will only be connected to by our Apache container.
```bash
docker run -i -v $(pwd)/grafana.ini:/etc/grafana/grafana.ini --name grafana grafana/grafana
diff --git a/docs/sources/auth/generic-oauth.md b/docs/sources/auth/generic-oauth.md
new file mode 100644
index 00000000000..bec5a98e04a
--- /dev/null
+++ b/docs/sources/auth/generic-oauth.md
@@ -0,0 +1,172 @@
++++
+title = "OAuth authentication"
+description = "Grafana OAuthentication Guide "
+keywords = ["grafana", "configuration", "documentation", "oauth"]
+type = "docs"
+[menu.docs]
+name = "Generic OAuth"
+identifier = "generic_oauth"
+parent = "authentication"
+weight = 3
++++
+
+# Generic OAuth Authentication
+
+You can configure many different oauth2 authentication services with Grafana using the generic oauth2 feature. Below you
+can find examples using Okta, BitBucket, OneLogin and Azure.
+
+This callback URL must match the full HTTP address that you use in your browser to access Grafana, but with the prefix path of `/login/generic_oauth`.
+
+Example config:
+
+```bash
+[auth.generic_oauth]
+enabled = true
+client_id = YOUR_APP_CLIENT_ID
+client_secret = YOUR_APP_CLIENT_SECRET
+scopes =
+auth_url =
+token_url =
+api_url =
+allowed_domains = mycompany.com mycompany.org
+allow_sign_up = true
+```
+
+Set api_url to the resource that returns [OpenID UserInfo](https://connect2id.com/products/server/docs/api/userinfo) compatible information.
+
+## Set up OAuth2 with Okta
+
+First set up Grafana as an OpenId client "webapplication" in Okta. Then set the Base URIs to `https:///` and set the Login redirect URIs to `https:///login/generic_oauth`.
+
+Finally set up the generic oauth module like this:
+```bash
+[auth.generic_oauth]
+name = Okta
+enabled = true
+scopes = openid profile email
+client_id =
+client_secret =
+auth_url = https:///oauth2/v1/authorize
+token_url = https:///oauth2/v1/token
+api_url = https:///oauth2/v1/userinfo
+```
+
+## Set up OAuth2 with Bitbucket
+
+```bash
+[auth.generic_oauth]
+name = BitBucket
+enabled = true
+allow_sign_up = true
+client_id =
+client_secret =
+scopes = account email
+auth_url = https://bitbucket.org/site/oauth2/authorize
+token_url = https://bitbucket.org/site/oauth2/access_token
+api_url = https://api.bitbucket.org/2.0/user
+team_ids =
+allowed_organizations =
+```
+
+## Set up OAuth2 with OneLogin
+
+1. Create a new Custom Connector with the following settings:
+ - Name: Grafana
+ - Sign On Method: OpenID Connect
+ - Redirect URI: `https:///login/generic_oauth`
+ - Signing Algorithm: RS256
+ - Login URL: `https:///login/generic_oauth`
+
+ then:
+2. Add an App to the Grafana Connector:
+ - Display Name: Grafana
+
+ then:
+3. Under the SSO tab on the Grafana App details page you'll find the Client ID and Client Secret.
+
+ Your OneLogin Domain will match the url you use to access OneLogin.
+
+ Configure Grafana as follows:
+
+ ```bash
+ [auth.generic_oauth]
+ name = OneLogin
+ enabled = true
+ allow_sign_up = true
+ client_id =
+ client_secret =
+ scopes = openid email name
+ auth_url = https://.onelogin.com/oidc/auth
+ token_url = https://.onelogin.com/oidc/token
+ api_url = https://.onelogin.com/oidc/me
+ team_ids =
+ allowed_organizations =
+ ```
+
+### Set up OAuth2 with Auth0
+
+1. Create a new Client in Auth0
+ - Name: Grafana
+ - Type: Regular Web Application
+
+2. Go to the Settings tab and set:
+ - Allowed Callback URLs: `https:///login/generic_oauth`
+
+3. Click Save Changes, then use the values at the top of the page to configure Grafana:
+
+ ```bash
+ [auth.generic_oauth]
+ enabled = true
+ allow_sign_up = true
+ team_ids =
+ allowed_organizations =
+ name = Auth0
+ client_id =
+ client_secret =
+ scopes = openid profile email
+ auth_url = https:///authorize
+ token_url = https:///oauth/token
+ api_url = https:///userinfo
+ ```
+
+### Set up OAuth2 with Azure Active Directory
+
+1. Log in to portal.azure.com and click "Azure Active Directory" in the side menu, then click the "Properties" sub-menu item.
+
+2. Copy the "Directory ID", this is needed for setting URLs later
+
+3. Click "App Registrations" and add a new application registration:
+ - Name: Grafana
+ - Application type: Web app / API
+ - Sign-on URL: `https:///login/generic_oauth`
+
+4. Click the name of the new application to open the application details page.
+
+5. Note down the "Application ID", this will be the OAuth client id.
+
+6. Click "Settings", then click "Keys" and add a new entry under Passwords
+ - Key Description: Grafana OAuth
+ - Duration: Never Expires
+
+7. Click Save then copy the key value, this will be the OAuth client secret.
+
+8. Configure Grafana as follows:
+
+ ```bash
+ [auth.generic_oauth]
+ name = Azure AD
+ enabled = true
+ allow_sign_up = true
+ client_id =
+ client_secret =
+ scopes = openid email name
+ auth_url = https://login.microsoftonline.com//oauth2/authorize
+ token_url = https://login.microsoftonline.com//oauth2/token
+ api_url =
+ team_ids =
+ allowed_organizations =
+ ```
+
+
+
+
diff --git a/docs/sources/auth/github.md b/docs/sources/auth/github.md
new file mode 100644
index 00000000000..263b3cc5d4d
--- /dev/null
+++ b/docs/sources/auth/github.md
@@ -0,0 +1,98 @@
++++
+title = "Google OAuth2 Authentication"
+description = "Grafana OAuthentication Guide "
+keywords = ["grafana", "configuration", "documentation", "oauth"]
+type = "docs"
+[menu.docs]
+name = "GitHub"
+identifier = "github_oauth2"
+parent = "authentication"
+weight = 4
++++
+
+# GitHub OAuth2 Authentication
+
+To enable the GitHub OAuth2 you must register your application with GitHub. GitHub will generate a client ID and secret key for you to use.
+
+## Configure GitHub OAuth application
+
+You need to create a GitHub OAuth application (you find this under the GitHub
+settings page). When you create the application you will need to specify
+a callback URL. Specify this as callback:
+
+```bash
+http://:/login/github
+```
+
+This callback URL must match the full HTTP address that you use in your
+browser to access Grafana, but with the prefix path of `/login/github`.
+When the GitHub OAuth application is created you will get a Client ID and a
+Client Secret. Specify these in the Grafana configuration file. For
+example:
+
+## Enable GitHub in Grafana
+
+```bash
+[auth.github]
+enabled = true
+allow_sign_up = true
+client_id = YOUR_GITHUB_APP_CLIENT_ID
+client_secret = YOUR_GITHUB_APP_CLIENT_SECRET
+scopes = user:email,read:org
+auth_url = https://github.com/login/oauth/authorize
+token_url = https://github.com/login/oauth/access_token
+api_url = https://api.github.com/user
+team_ids =
+allowed_organizations =
+```
+
+Restart the Grafana back-end. You should now see a GitHub login button
+on the login page. You can now login or sign up with your GitHub
+accounts.
+
+You may allow users to sign-up via GitHub authentication by setting the
+`allow_sign_up` option to `true`. When this option is set to `true`, any
+user successfully authenticating via GitHub authentication will be
+automatically signed up.
+
+### team_ids
+
+Require an active team membership for at least one of the given teams on
+GitHub. If the authenticated user isn't a member of at least one of the
+teams they will not be able to register or authenticate with your
+Grafana instance. For example:
+
+```bash
+[auth.github]
+enabled = true
+client_id = YOUR_GITHUB_APP_CLIENT_ID
+client_secret = YOUR_GITHUB_APP_CLIENT_SECRET
+scopes = user:email,read:org
+team_ids = 150,300
+auth_url = https://github.com/login/oauth/authorize
+token_url = https://github.com/login/oauth/access_token
+api_url = https://api.github.com/user
+allow_sign_up = true
+```
+
+### allowed_organizations
+
+Require an active organization membership for at least one of the given
+organizations on GitHub. If the authenticated user isn't a member of at least
+one of the organizations they will not be able to register or authenticate with
+your Grafana instance. For example
+
+```bash
+[auth.github]
+enabled = true
+client_id = YOUR_GITHUB_APP_CLIENT_ID
+client_secret = YOUR_GITHUB_APP_CLIENT_SECRET
+scopes = user:email,read:org
+auth_url = https://github.com/login/oauth/authorize
+token_url = https://github.com/login/oauth/access_token
+api_url = https://api.github.com/user
+allow_sign_up = true
+# space-delimited organization names
+allowed_organizations = github google
+```
+
diff --git a/docs/sources/auth/gitlab.md b/docs/sources/auth/gitlab.md
new file mode 100644
index 00000000000..32910167f16
--- /dev/null
+++ b/docs/sources/auth/gitlab.md
@@ -0,0 +1,115 @@
++++
+title = "Google OAuth2 Authentication"
+description = "Grafana OAuthentication Guide "
+keywords = ["grafana", "configuration", "documentation", "oauth"]
+type = "docs"
+[menu.docs]
+name = "GitLab"
+identifier = "gitlab_oauth"
+parent = "authentication"
+weight = 5
++++
+
+# GitLab OAuth2 Authentication
+
+To enable the GitLab OAuth2 you must register an application in GitLab. GitLab will generate a client ID and secret key for you to use.
+
+## Create GitLab OAuth keys
+
+You need to [create a GitLab OAuth application](https://docs.gitlab.com/ce/integration/oauth_provider.html).
+Choose a descriptive *Name*, and use the following *Redirect URI*:
+
+```
+https://grafana.example.com/login/gitlab
+```
+
+where `https://grafana.example.com` is the URL you use to connect to Grafana.
+Adjust it as needed if you don't use HTTPS or if you use a different port; for
+instance, if you access Grafana at `http://203.0.113.31:3000`, you should use
+
+```
+http://203.0.113.31:3000/login/gitlab
+```
+
+Finally, select *api* as the *Scope* and submit the form. Note that if you're
+not going to use GitLab groups for authorization (i.e. not setting
+`allowed_groups`, see below), you can select *read_user* instead of *api* as
+the *Scope*, thus giving a more restricted access to your GitLab API.
+
+You'll get an *Application Id* and a *Secret* in return; we'll call them
+`GITLAB_APPLICATION_ID` and `GITLAB_SECRET` respectively for the rest of this
+section.
+
+## Enable GitLab in Grafana
+
+Add the following to your Grafana configuration file to enable GitLab
+authentication:
+
+```bash
+[auth.gitlab]
+enabled = false
+allow_sign_up = false
+client_id = GITLAB_APPLICATION_ID
+client_secret = GITLAB_SECRET
+scopes = api
+auth_url = https://gitlab.com/oauth/authorize
+token_url = https://gitlab.com/oauth/token
+api_url = https://gitlab.com/api/v4
+allowed_groups =
+```
+
+Restart the Grafana backend for your changes to take effect.
+
+If you use your own instance of GitLab instead of `gitlab.com`, adjust
+`auth_url`, `token_url` and `api_url` accordingly by replacing the `gitlab.com`
+hostname with your own.
+
+With `allow_sign_up` set to `false`, only existing users will be able to login
+using their GitLab account, but with `allow_sign_up` set to `true`, *any* user
+who can authenticate on GitLab will be able to login on your Grafana instance;
+if you use the public `gitlab.com`, it means anyone in the world would be able
+to login on your Grafana instance.
+
+You can can however limit access to only members of a given group or list of
+groups by setting the `allowed_groups` option.
+
+### allowed_groups
+
+To limit access to authenticated users that are members of one or more [GitLab
+groups](https://docs.gitlab.com/ce/user/group/index.html), set `allowed_groups`
+to a comma- or space-separated list of groups. For instance, if you want to
+only give access to members of the `example` group, set
+
+
+```ini
+allowed_groups = example
+```
+
+If you want to also give access to members of the subgroup `bar`, which is in
+the group `foo`, set
+
+```ini
+allowed_groups = example, foo/bar
+```
+
+Note that in GitLab, the group or subgroup name doesn't always match its
+display name, especially if the display name contains spaces or special
+characters. Make sure you always use the group or subgroup name as it appears
+in the URL of the group or subgroup.
+
+Here's a complete example with `alloed_sign_up` enabled, and access limited to
+the `example` and `foo/bar` groups:
+
+```ini
+[auth.gitlab]
+enabled = false
+allow_sign_up = true
+client_id = GITLAB_APPLICATION_ID
+client_secret = GITLAB_SECRET
+scopes = api
+auth_url = https://gitlab.com/oauth/authorize
+token_url = https://gitlab.com/oauth/token
+api_url = https://gitlab.com/api/v4
+allowed_groups = example, foo/bar
+```
+
diff --git a/docs/sources/auth/google.md b/docs/sources/auth/google.md
new file mode 100644
index 00000000000..eeb78044d3e
--- /dev/null
+++ b/docs/sources/auth/google.md
@@ -0,0 +1,55 @@
++++
+title = "Google OAuth2 Authentication"
+description = "Grafana OAuthentication Guide "
+keywords = ["grafana", "configuration", "documentation", "oauth"]
+type = "docs"
+[menu.docs]
+name = "Google"
+identifier = "ggogle_oauth2"
+parent = "authentication"
+weight = 3
++++
+
+# Google OAuth2 Authentication
+
+To enable the Google OAuth2 you must register your application with Google. Google will generate a client ID and secret key for you to use.
+
+## Create Google OAuth keys
+
+First, you need to create a Google OAuth Client:
+
+1. Go to https://console.developers.google.com/apis/credentials
+2. Click the 'Create Credentials' button, then click 'OAuth Client ID' in the menu that drops down
+3. Enter the following:
+ - Application Type: Web Application
+ - Name: Grafana
+ - Authorized Javascript Origins: https://grafana.mycompany.com
+ - Authorized Redirect URLs: https://grafana.mycompany.com/login/google
+ - Replace https://grafana.mycompany.com with the URL of your Grafana instance.
+4. Click Create
+5. Copy the Client ID and Client Secret from the 'OAuth Client' modal
+
+## Enable Google OAuth in Grafana
+
+Specify the Client ID and Secret in the [Grafana configuration file]({{< relref "installation/configuration.md#config-file-locations" >}}). For example:
+
+```bash
+[auth.google]
+enabled = true
+client_id = CLIENT_ID
+client_secret = CLIENT_SECRET
+scopes = https://www.googleapis.com/auth/userinfo.profile https://www.googleapis.com/auth/userinfo.email
+auth_url = https://accounts.google.com/o/oauth2/auth
+token_url = https://accounts.google.com/o/oauth2/token
+allowed_domains = mycompany.com mycompany.org
+allow_sign_up = true
+```
+
+Restart the Grafana back-end. You should now see a Google login button
+on the login page. You can now login or sign up with your Google
+accounts. The `allowed_domains` option is optional, and domains were separated by space.
+
+You may allow users to sign-up via Google authentication by setting the
+`allow_sign_up` option to `true`. When this option is set to `true`, any
+user successfully authenticating via Google authentication will be
+automatically signed up.
diff --git a/docs/sources/auth/index.md b/docs/sources/auth/index.md
new file mode 100644
index 00000000000..7fdcc082319
--- /dev/null
+++ b/docs/sources/auth/index.md
@@ -0,0 +1,12 @@
++++
+title = "Authentication"
+description = "Authentication"
+type = "docs"
+[menu.docs]
+name = "Authentication"
+identifier = "authentication"
+parent = "admin"
+weight = 3
++++
+
+
diff --git a/docs/sources/installation/ldap.md b/docs/sources/auth/ldap.md
similarity index 84%
rename from docs/sources/installation/ldap.md
rename to docs/sources/auth/ldap.md
index 88cf40632db..f63a44e1750 100644
--- a/docs/sources/installation/ldap.md
+++ b/docs/sources/auth/ldap.md
@@ -4,25 +4,37 @@ description = "Grafana LDAP Authentication Guide "
keywords = ["grafana", "configuration", "documentation", "ldap"]
type = "docs"
[menu.docs]
-name = "LDAP Authentication"
+name = "LDAP"
identifier = "ldap"
-parent = "admin"
+parent = "authentication"
weight = 2
+++
-# LDAP Authentication
+# LDAP
-Grafana (2.1 and newer) ships with a strong LDAP integration feature. The LDAP integration in Grafana allows your
-Grafana users to login with their LDAP credentials. You can also specify mappings between LDAP
-group memberships and Grafana Organization user roles.
+The LDAP integration in Grafana allows your Grafana users to login with their LDAP credentials. You can also specify mappings between LDAP
+group memberships and Grafana Organization user roles. Below we detail grafana.ini config file
+settings and ldap.toml config file options.
-## Configuration
-You turn on LDAP in the [main config file]({{< relref "configuration.md#auth-ldap" >}}) as well as specify the path to the LDAP
+## Enable LDAP
+
+You turn on LDAP in the [main config file]({{< relref "installation/configuration.md" >}}) as well as specify the path to the LDAP
specific configuration file (default: `/etc/grafana/ldap.toml`).
-### Example config
+```bash
+[auth.ldap]
+# Set to `true` to enable LDAP integration (default: `false`)
+enabled = true
+# Path to the LDAP specific configuration file (default: `/etc/grafana/ldap.toml`)
+config_file = /etc/grafana/ldap.toml`
+# Allow sign up should almost always be true (default) to allow new Grafana users to be created (if ldap authentication is ok). If set to
+# false only pre-existing Grafana users will be able to login (if ldap authentication is ok).
+allow_sign_up = true
+```
-```toml
+## LDAP Configuration
+
+```bash
# To troubleshoot and get more log info enable ldap debug logging in grafana.ini
# [log]
# filters = ldap:debug
@@ -119,7 +131,7 @@ The search filter and search bases settings are still needed to perform the LDAP
## POSIX schema (no memberOf attribute)
If your ldap server does not support the memberOf attribute add these options:
-```toml
+```bash
## Group search filter, to retrieve the groups of which the user is a member (only set if memberOf attribute is not available)
group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))"
## An array of the base DNs to search through for groups. Typically uses ou=groups
diff --git a/docs/sources/auth/overview.md b/docs/sources/auth/overview.md
new file mode 100644
index 00000000000..3a38ed83988
--- /dev/null
+++ b/docs/sources/auth/overview.md
@@ -0,0 +1,87 @@
++++
+title = "Overview"
+description = "Overview for auth"
+type = "docs"
+[menu.docs]
+name = "Overview"
+identifier = "overview-auth"
+parent = "authentication"
+weight = 1
++++
+
+# User Authentication Overview
+
+Grafana provides many ways to authenticate users. Some authentication integrations also enable syncing user
+permissions and org memberships.
+
+## OAuth Integrations
+
+- [Google OAuth]({{< relref "auth/google.md" >}})
+- [GitHub OAuth]({{< relref "auth/github.md" >}})
+- [Gitlab OAuth]({{< relref "auth/gitlab.md" >}})
+- [Generic OAuth]({{< relref "auth/generic-oauth.md" >}}) (Okta2, BitBucket, Azure, OneLogin, Auth0)
+
+## LDAP integrations
+
+- [LDAP Authentication]({{< relref "auth/ldap.md" >}}) (OpenLDAP, ActiveDirectory, etc)
+
+## Auth proxy
+
+- [Auth Proxy]({{< relref "auth/auth-proxy.md" >}}) If you want to handle authentication outside Grafana using a reverse
+ proxy.
+
+## Grafana Auth
+
+Grafana of course has a built in user authentication system with password authenticaten enabled by default. You can
+disable authentication by enabling anonymous access. You can also hide login form and only allow login through an auth
+provider (listed above). There is also options for allowing self sign up.
+
+### Anonymous authenticaten
+
+You can make Grafana accessible without any login required by enabling anonymous access in the configuration file.
+
+Example:
+
+```bash
+[auth.anonymous]
+enabled = true
+
+# Organization name that should be used for unauthenticated users
+org_name = Main Org.
+
+# Role for unauthenticated users, other valid values are `Editor` and `Admin`
+org_role = Viewer
+```
+
+If you change your organization name in the Grafana UI this setting needs to be updated to match the new name.
+
+### Basic authentication
+
+Basic auth is enabled by default and works with the built in Grafana user password authentication system and LDAP
+authenticaten integration.
+
+To disable basic auth:
+
+```bash
+[auth.basic]
+enabled = false
+```
+
+### Disable login form
+
+You can hide the Grafana login form using the below configuration settings.
+
+```bash
+[auth]
+disable_login_form ⁼ true
+```
+
+### Hide sign-out menu
+
+Set to the option detailed below to true to hide sign-out menu link. Useful if you use an auth proxy.
+
+```bash
+[auth]
+disable_signout_menu = true
+```
+
diff --git a/docs/sources/features/datasources/mssql.md b/docs/sources/features/datasources/mssql.md
index da0c9581e99..6bfcfd807f1 100644
--- a/docs/sources/features/datasources/mssql.md
+++ b/docs/sources/features/datasources/mssql.md
@@ -6,7 +6,7 @@ type = "docs"
[menu.docs]
name = "Microsoft SQL Server"
parent = "datasources"
-weight = 7
+weight = 8
+++
# Using Microsoft SQL Server in Grafana
@@ -33,6 +33,24 @@ Name | Description
*User* | Database user's login/username
*Password* | Database user's password
+### Min time interval
+
+A lower limit for the [$__interval](/reference/templating/#the-interval-variable) and [$__interval_ms](/reference/templating/#the-interval-ms-variable) variables.
+Recommended to be set to write frequency, for example `1m` if your data is written every minute.
+This option can also be overridden/configured in a dashboard panel under data source options. It's important to note that this value **needs** to be formatted as a
+number followed by a valid time identifier, e.g. `1m` (1 minute) or `30s` (30 seconds). The following time identifiers are supported:
+
+Identifier | Description
+------------ | -------------
+`y` | year
+`M` | month
+`w` | week
+`d` | day
+`h` | hour
+`m` | minute
+`s` | second
+`ms` | millisecond
+
### Database User Permissions (Important!)
The database user you specify when you add the data source should only be granted SELECT permissions on
diff --git a/docs/sources/features/datasources/mysql.md b/docs/sources/features/datasources/mysql.md
index afac746b050..e13abcf80a2 100644
--- a/docs/sources/features/datasources/mysql.md
+++ b/docs/sources/features/datasources/mysql.md
@@ -36,6 +36,24 @@ Name | Description
*User* | Database user's login/username
*Password* | Database user's password
+### Min time interval
+
+A lower limit for the [$__interval](/reference/templating/#the-interval-variable) and [$__interval_ms](/reference/templating/#the-interval-ms-variable) variables.
+Recommended to be set to write frequency, for example `1m` if your data is written every minute.
+This option can also be overridden/configured in a dashboard panel under data source options. It's important to note that this value **needs** to be formatted as a
+number followed by a valid time identifier, e.g. `1m` (1 minute) or `30s` (30 seconds). The following time identifiers are supported:
+
+Identifier | Description
+------------ | -------------
+`y` | year
+`M` | month
+`w` | week
+`d` | day
+`h` | hour
+`m` | minute
+`s` | second
+`ms` | millisecond
+
### Database User Permissions (Important!)
The database user you specify when you add the data source should only be granted SELECT permissions on
diff --git a/docs/sources/features/datasources/postgres.md b/docs/sources/features/datasources/postgres.md
index 1d195a01349..013d6342634 100644
--- a/docs/sources/features/datasources/postgres.md
+++ b/docs/sources/features/datasources/postgres.md
@@ -31,7 +31,26 @@ Name | Description
*User* | Database user's login/username
*Password* | Database user's password
*SSL Mode* | This option determines whether or with what priority a secure SSL TCP/IP connection will be negotiated with the server.
-*TimescaleDB* | With this option enabled Grafana will use TimescaleDB features, e.g. use ```time_bucket``` for grouping by time (only available in Grafana 5.3+).
+*Version* | This option determines which functions are available in the query builder (only available in Grafana 5.3+).
+*TimescaleDB* | TimescaleDB is a time-series database built as a PostgreSQL extension. If enabled, Grafana will use `time_bucket` in the `$__timeGroup` macro and display TimescaleDB specific aggregate functions in the query builder (only available in Grafana 5.3+).
+
+### Min time interval
+
+A lower limit for the [$__interval](/reference/templating/#the-interval-variable) and [$__interval_ms](/reference/templating/#the-interval-ms-variable) variables.
+Recommended to be set to write frequency, for example `1m` if your data is written every minute.
+This option can also be overridden/configured in a dashboard panel under data source options. It's important to note that this value **needs** to be formatted as a
+number followed by a valid time identifier, e.g. `1m` (1 minute) or `30s` (30 seconds). The following time identifiers are supported:
+
+Identifier | Description
+------------ | -------------
+`y` | year
+`M` | month
+`w` | week
+`d` | day
+`h` | hour
+`m` | minute
+`s` | second
+`ms` | millisecond
### Database User Permissions (Important!)
@@ -292,5 +311,6 @@ datasources:
password: "Password!"
jsonData:
sslmode: "disable" # disable/require/verify-ca/verify-full
+ postgresVersion: 903 # 903=9.3, 904=9.4, 905=9.5, 906=9.6, 1000=10
timescaledb: false
```
diff --git a/docs/sources/guides/whats-new-in-v5-3.md b/docs/sources/guides/whats-new-in-v5-3.md
new file mode 100644
index 00000000000..4a2674c9b39
--- /dev/null
+++ b/docs/sources/guides/whats-new-in-v5-3.md
@@ -0,0 +1,18 @@
++++
+title = "What's New in Grafana v5.3"
+description = "Feature & improvement highlights for Grafana v5.3"
+keywords = ["grafana", "new", "documentation", "5.3"]
+type = "docs"
+[menu.docs]
+name = "Version 5.3"
+identifier = "v5.3"
+parent = "whatsnew"
+weight = -9
++++
+
+# What's New in Grafana v5.3
+
+## Changelog
+
+Checkout the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list
+of new features, changes, and bug fixes.
diff --git a/docs/sources/http_api/alerting.md b/docs/sources/http_api/alerting.md
index 80b6e283be3..032fd508dd0 100644
--- a/docs/sources/http_api/alerting.md
+++ b/docs/sources/http_api/alerting.md
@@ -50,6 +50,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
```http
HTTP/1.1 200
Content-Type: application/json
+
[
{
"id": 1,
@@ -86,6 +87,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
```http
HTTP/1.1 200
Content-Type: application/json
+
{
"id": 1,
"dashboardId": 1,
@@ -146,6 +148,7 @@ JSON Body Schema:
```http
HTTP/1.1 200
Content-Type: application/json
+
{
"alertId": 1,
"state": "Paused",
@@ -177,6 +180,7 @@ JSON Body Schema:
```http
HTTP/1.1 200
Content-Type: application/json
+
{
"state": "Paused",
"message": "alert paused",
@@ -204,14 +208,21 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
HTTP/1.1 200
Content-Type: application/json
-{
- "id": 1,
- "name": "Team A",
- "type": "email",
- "isDefault": true,
- "created": "2017-01-01 12:45",
- "updated": "2017-01-01 12:45"
-}
+[
+ {
+ "id": 1,
+ "name": "Team A",
+ "type": "email",
+ "isDefault": false,
+ "sendReminder": false,
+ "settings": {
+ "addresses": "carl@grafana.com;dev@grafana.com"
+ },
+ "created": "2018-04-23T14:44:09+02:00",
+ "updated": "2018-08-20T15:47:49+02:00"
+ }
+]
+
```
## Create alert notification
@@ -232,6 +243,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
"name": "new alert notification", //Required
"type": "email", //Required
"isDefault": false,
+ "sendReminder": false,
"settings": {
"addresses": "carl@grafana.com;dev@grafana.com"
}
@@ -243,14 +255,18 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
```http
HTTP/1.1 200
Content-Type: application/json
+
{
"id": 1,
"name": "new alert notification",
"type": "email",
"isDefault": false,
- "settings": { addresses: "carl@grafana.com;dev@grafana.com"} }
- "created": "2017-01-01 12:34",
- "updated": "2017-01-01 12:34"
+ "sendReminder": false,
+ "settings": {
+ "addresses": "carl@grafana.com;dev@grafana.com"
+ },
+ "created": "2018-04-23T14:44:09+02:00",
+ "updated": "2018-08-20T15:47:49+02:00"
}
```
@@ -271,6 +287,8 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
"name": "new alert notification", //Required
"type": "email", //Required
"isDefault": false,
+ "sendReminder": true,
+ "frequency": "15m",
"settings": {
"addresses: "carl@grafana.com;dev@grafana.com"
}
@@ -282,12 +300,17 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
```http
HTTP/1.1 200
Content-Type: application/json
+
{
"id": 1,
"name": "new alert notification",
"type": "email",
"isDefault": false,
- "settings": { addresses: "carl@grafana.com;dev@grafana.com"} }
+ "sendReminder": true,
+ "frequency": "15m",
+ "settings": {
+ "addresses": "carl@grafana.com;dev@grafana.com"
+ },
"created": "2017-01-01 12:34",
"updated": "2017-01-01 12:34"
}
@@ -311,6 +334,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
```http
HTTP/1.1 200
Content-Type: application/json
+
{
"message": "Notification deleted"
}
diff --git a/docs/sources/http_api/auth.md b/docs/sources/http_api/auth.md
index 8ff40b5ef04..e87d3571322 100644
--- a/docs/sources/http_api/auth.md
+++ b/docs/sources/http_api/auth.md
@@ -5,7 +5,7 @@ keywords = ["grafana", "http", "documentation", "api", "authentication"]
aliases = ["/http_api/authentication/"]
type = "docs"
[menu.docs]
-name = "Authentication"
+name = "Authentication HTTP API"
parent = "http_api"
+++
diff --git a/docs/sources/installation/configuration.md b/docs/sources/installation/configuration.md
index 3394dfe16bc..2bf4789257d 100644
--- a/docs/sources/installation/configuration.md
+++ b/docs/sources/installation/configuration.md
@@ -322,470 +322,17 @@ Defaults to `false`.
## [auth]
-### disable_login_form
-
-Set to true to disable (hide) the login form, useful if you use OAuth, defaults to false.
-
-### disable_signout_menu
-
-Set to true to disable the signout link in the side menu. useful if you use auth.proxy, defaults to false.
-
-
-
-## [auth.anonymous]
-
-### enabled
-
-Set to `true` to enable anonymous access. Defaults to `false`
-
-### org_name
-
-Set the organization name that should be used for anonymous users. If
-you change your organization name in the Grafana UI this setting needs
-to be updated to match the new name.
-
-### org_role
-
-Specify role for anonymous users. Defaults to `Viewer`, other valid
-options are `Editor` and `Admin`.
-
-## [auth.github]
-
-You need to create a GitHub OAuth application (you find this under the GitHub
-settings page). When you create the application you will need to specify
-a callback URL. Specify this as callback:
-
-```bash
-http://:/login/github
-```
-
-This callback URL must match the full HTTP address that you use in your
-browser to access Grafana, but with the prefix path of `/login/github`.
-When the GitHub OAuth application is created you will get a Client ID and a
-Client Secret. Specify these in the Grafana configuration file. For
-example:
-
-```bash
-[auth.github]
-enabled = true
-allow_sign_up = true
-client_id = YOUR_GITHUB_APP_CLIENT_ID
-client_secret = YOUR_GITHUB_APP_CLIENT_SECRET
-scopes = user:email,read:org
-auth_url = https://github.com/login/oauth/authorize
-token_url = https://github.com/login/oauth/access_token
-api_url = https://api.github.com/user
-team_ids =
-allowed_organizations =
-```
-
-Restart the Grafana back-end. You should now see a GitHub login button
-on the login page. You can now login or sign up with your GitHub
-accounts.
-
-You may allow users to sign-up via GitHub authentication by setting the
-`allow_sign_up` option to `true`. When this option is set to `true`, any
-user successfully authenticating via GitHub authentication will be
-automatically signed up.
-
-### team_ids
-
-Require an active team membership for at least one of the given teams on
-GitHub. If the authenticated user isn't a member of at least one of the
-teams they will not be able to register or authenticate with your
-Grafana instance. For example:
-
-```bash
-[auth.github]
-enabled = true
-client_id = YOUR_GITHUB_APP_CLIENT_ID
-client_secret = YOUR_GITHUB_APP_CLIENT_SECRET
-scopes = user:email,read:org
-team_ids = 150,300
-auth_url = https://github.com/login/oauth/authorize
-token_url = https://github.com/login/oauth/access_token
-api_url = https://api.github.com/user
-allow_sign_up = true
-```
-
-### allowed_organizations
-
-Require an active organization membership for at least one of the given
-organizations on GitHub. If the authenticated user isn't a member of at least
-one of the organizations they will not be able to register or authenticate with
-your Grafana instance. For example
-
-```bash
-[auth.github]
-enabled = true
-client_id = YOUR_GITHUB_APP_CLIENT_ID
-client_secret = YOUR_GITHUB_APP_CLIENT_SECRET
-scopes = user:email,read:org
-auth_url = https://github.com/login/oauth/authorize
-token_url = https://github.com/login/oauth/access_token
-api_url = https://api.github.com/user
-allow_sign_up = true
-# space-delimited organization names
-allowed_organizations = github google
-```
-
-
-
-## [auth.gitlab]
-
-> Only available in Grafana v5.3+.
-
-You need to [create a GitLab OAuth
-application](https://docs.gitlab.com/ce/integration/oauth_provider.html).
-Choose a descriptive *Name*, and use the following *Redirect URI*:
-
-```
-https://grafana.example.com/login/gitlab
-```
-
-where `https://grafana.example.com` is the URL you use to connect to Grafana.
-Adjust it as needed if you don't use HTTPS or if you use a different port; for
-instance, if you access Grafana at `http://203.0.113.31:3000`, you should use
-
-```
-http://203.0.113.31:3000/login/gitlab
-```
-
-Finally, select *api* as the *Scope* and submit the form. Note that if you're
-not going to use GitLab groups for authorization (i.e. not setting
-`allowed_groups`, see below), you can select *read_user* instead of *api* as
-the *Scope*, thus giving a more restricted access to your GitLab API.
-
-You'll get an *Application Id* and a *Secret* in return; we'll call them
-`GITLAB_APPLICATION_ID` and `GITLAB_SECRET` respectively for the rest of this
-section.
-
-Add the following to your Grafana configuration file to enable GitLab
-authentication:
-
-```ini
-[auth.gitlab]
-enabled = false
-allow_sign_up = false
-client_id = GITLAB_APPLICATION_ID
-client_secret = GITLAB_SECRET
-scopes = api
-auth_url = https://gitlab.com/oauth/authorize
-token_url = https://gitlab.com/oauth/token
-api_url = https://gitlab.com/api/v4
-allowed_groups =
-```
-
-Restart the Grafana backend for your changes to take effect.
-
-If you use your own instance of GitLab instead of `gitlab.com`, adjust
-`auth_url`, `token_url` and `api_url` accordingly by replacing the `gitlab.com`
-hostname with your own.
-
-With `allow_sign_up` set to `false`, only existing users will be able to login
-using their GitLab account, but with `allow_sign_up` set to `true`, *any* user
-who can authenticate on GitLab will be able to login on your Grafana instance;
-if you use the public `gitlab.com`, it means anyone in the world would be able
-to login on your Grafana instance.
-
-You can can however limit access to only members of a given group or list of
-groups by setting the `allowed_groups` option.
-
-### allowed_groups
-
-To limit access to authenticated users that are members of one or more [GitLab
-groups](https://docs.gitlab.com/ce/user/group/index.html), set `allowed_groups`
-to a comma- or space-separated list of groups. For instance, if you want to
-only give access to members of the `example` group, set
-
-
-```ini
-allowed_groups = example
-```
-
-If you want to also give access to members of the subgroup `bar`, which is in
-the group `foo`, set
-
-```ini
-allowed_groups = example, foo/bar
-```
-
-Note that in GitLab, the group or subgroup name doesn't always match its
-display name, especially if the display name contains spaces or special
-characters. Make sure you always use the group or subgroup name as it appears
-in the URL of the group or subgroup.
-
-Here's a complete example with `alloed_sign_up` enabled, and access limited to
-the `example` and `foo/bar` groups:
-
-```ini
-[auth.gitlab]
-enabled = false
-allow_sign_up = true
-client_id = GITLAB_APPLICATION_ID
-client_secret = GITLAB_SECRET
-scopes = api
-auth_url = https://gitlab.com/oauth/authorize
-token_url = https://gitlab.com/oauth/token
-api_url = https://gitlab.com/api/v4
-allowed_groups = example, foo/bar
-```
-
-
-
-## [auth.google]
-
-First, you need to create a Google OAuth Client:
-
-1. Go to https://console.developers.google.com/apis/credentials
-
-2. Click the 'Create Credentials' button, then click 'OAuth Client ID' in the
-menu that drops down
-
-3. Enter the following:
-
- - Application Type: Web Application
- - Name: Grafana
- - Authorized Javascript Origins: https://grafana.mycompany.com
- - Authorized Redirect URLs: https://grafana.mycompany.com/login/google
-
- Replace https://grafana.mycompany.com with the URL of your Grafana instance.
-
-4. Click Create
-
-5. Copy the Client ID and Client Secret from the 'OAuth Client' modal
-
-Specify the Client ID and Secret in the Grafana configuration file. For example:
-
-```bash
-[auth.google]
-enabled = true
-client_id = CLIENT_ID
-client_secret = CLIENT_SECRET
-scopes = https://www.googleapis.com/auth/userinfo.profile https://www.googleapis.com/auth/userinfo.email
-auth_url = https://accounts.google.com/o/oauth2/auth
-token_url = https://accounts.google.com/o/oauth2/token
-allowed_domains = mycompany.com mycompany.org
-allow_sign_up = true
-```
-
-Restart the Grafana back-end. You should now see a Google login button
-on the login page. You can now login or sign up with your Google
-accounts. The `allowed_domains` option is optional, and domains were separated by space.
-
-You may allow users to sign-up via Google authentication by setting the
-`allow_sign_up` option to `true`. When this option is set to `true`, any
-user successfully authenticating via Google authentication will be
-automatically signed up.
-
-## [auth.generic_oauth]
-
-This option could be used if have your own oauth service.
-
-This callback URL must match the full HTTP address that you use in your
-browser to access Grafana, but with the prefix path of `/login/generic_oauth`.
-
-```bash
-[auth.generic_oauth]
-enabled = true
-client_id = YOUR_APP_CLIENT_ID
-client_secret = YOUR_APP_CLIENT_SECRET
-scopes =
-auth_url =
-token_url =
-api_url =
-allowed_domains = mycompany.com mycompany.org
-allow_sign_up = true
-```
-
-Set api_url to the resource that returns [OpenID UserInfo](https://connect2id.com/products/server/docs/api/userinfo) compatible information.
-
-### Set up oauth2 with Okta
-
-First set up Grafana as an OpenId client "webapplication" in Okta. Then set the Base URIs to `https:///` and set the Login redirect URIs to `https:///login/generic_oauth`.
-
-Finally set up the generic oauth module like this:
-```bash
-[auth.generic_oauth]
-name = Okta
-enabled = true
-scopes = openid profile email
-client_id =
-client_secret =
-auth_url = https:///oauth2/v1/authorize
-token_url = https:///oauth2/v1/token
-api_url = https:///oauth2/v1/userinfo
-```
-
-### Set up oauth2 with Bitbucket
-
-```bash
-[auth.generic_oauth]
-name = BitBucket
-enabled = true
-allow_sign_up = true
-client_id =
-client_secret =
-scopes = account email
-auth_url = https://bitbucket.org/site/oauth2/authorize
-token_url = https://bitbucket.org/site/oauth2/access_token
-api_url = https://api.bitbucket.org/2.0/user
-team_ids =
-allowed_organizations =
-```
-
-### Set up oauth2 with OneLogin
-
-1. Create a new Custom Connector with the following settings:
- - Name: Grafana
- - Sign On Method: OpenID Connect
- - Redirect URI: `https:///login/generic_oauth`
- - Signing Algorithm: RS256
- - Login URL: `https:///login/generic_oauth`
-
- then:
-2. Add an App to the Grafana Connector:
- - Display Name: Grafana
-
- then:
-3. Under the SSO tab on the Grafana App details page you'll find the Client ID and Client Secret.
-
- Your OneLogin Domain will match the url you use to access OneLogin.
-
- Configure Grafana as follows:
-
- ```bash
- [auth.generic_oauth]
- name = OneLogin
- enabled = true
- allow_sign_up = true
- client_id =
- client_secret =
- scopes = openid email name
- auth_url = https://.onelogin.com/oidc/auth
- token_url = https://.onelogin.com/oidc/token
- api_url = https://.onelogin.com/oidc/me
- team_ids =
- allowed_organizations =
- ```
-
-### Set up oauth2 with Auth0
-
-1. Create a new Client in Auth0
- - Name: Grafana
- - Type: Regular Web Application
-
-2. Go to the Settings tab and set:
- - Allowed Callback URLs: `https:///login/generic_oauth`
-
-3. Click Save Changes, then use the values at the top of the page to configure Grafana:
-
- ```bash
- [auth.generic_oauth]
- enabled = true
- allow_sign_up = true
- team_ids =
- allowed_organizations =
- name = Auth0
- client_id =
- client_secret =
- scopes = openid profile email
- auth_url = https:///authorize
- token_url = https:///oauth/token
- api_url = https:///userinfo
- ```
-
-### Set up oauth2 with Azure Active Directory
-
-1. Log in to portal.azure.com and click "Azure Active Directory" in the side menu, then click the "Properties" sub-menu item.
-
-2. Copy the "Directory ID", this is needed for setting URLs later
-
-3. Click "App Registrations" and add a new application registration:
- - Name: Grafana
- - Application type: Web app / API
- - Sign-on URL: `https:///login/generic_oauth`
-
-4. Click the name of the new application to open the application details page.
-
-5. Note down the "Application ID", this will be the OAuth client id.
-
-6. Click "Settings", then click "Keys" and add a new entry under Passwords
- - Key Description: Grafana OAuth
- - Duration: Never Expires
-
-7. Click Save then copy the key value, this will be the OAuth client secret.
-
-8. Configure Grafana as follows:
-
- ```bash
- [auth.generic_oauth]
- name = Azure AD
- enabled = true
- allow_sign_up = true
- client_id =
- client_secret =
- scopes = openid email name
- auth_url = https://login.microsoftonline.com//oauth2/authorize
- token_url = https://login.microsoftonline.com//oauth2/token
- api_url =
- team_ids =
- allowed_organizations =
- ```
-
-
-
-## [auth.basic]
-### enabled
-When enabled is `true` (default) the http api will accept basic authentication.
-
-
-
-## [auth.ldap]
-### enabled
-Set to `true` to enable LDAP integration (default: `false`)
-
-### config_file
-Path to the LDAP specific configuration file (default: `/etc/grafana/ldap.toml`)
-
-### allow_sign_up
-
-Allow sign up should almost always be true (default) to allow new Grafana users to be created (if ldap authentication is ok). If set to
-false only pre-existing Grafana users will be able to login (if ldap authentication is ok).
-
-> For details on LDAP Configuration, go to the [LDAP Integration]({{< relref "ldap.md" >}}) page.
-
-
-
-## [auth.proxy]
-
-This feature allows you to handle authentication in a http reverse proxy.
-
-### enabled
-
-Defaults to `false`
-
-### header_name
-
-Defaults to X-WEBAUTH-USER
-
-#### header_property
-
-Defaults to username but can also be set to email
-
-### auto_sign_up
-
-Set to `true` to enable auto sign up of users who do not exist in Grafana DB. Defaults to `true`.
-
-### whitelist
-
-Limit where auth proxy requests come from by configuring a list of IP addresses. This can be used to prevent users spoofing the X-WEBAUTH-USER header.
-
-### headers
-
-Used to define additional headers for `Name`, `Email` and/or `Login`, for example if the user's name is sent in the X-WEBAUTH-NAME header and their email address in the X-WEBAUTH-EMAIL header, set `headers = Name:X-WEBAUTH-NAME Email:X-WEBAUTH-EMAIL`.
-
-
+Grafana provides many ways to authenticate users. The docs for authentication has been split in to many different pages
+below.
+
+- [Authentication Overview]({{< relref "auth/overview.md" >}}) (anonymous access options, hide login and more)
+- [Google OAuth]({{< relref "auth/google.md" >}}) (auth.google)
+- [GitHub OAuth]({{< relref "auth/github.md" >}}) (auth.github)
+- [Gitlab OAuth]({{< relref "auth/gitlab.md" >}}) (auth.gitlab)
+- [Generic OAuth]({{< relref "auth/generic-oauth.md" >}}) (auth.generic_oauth, okta2, auth0, bitbucket, azure)
+- [Basic Authentication]({{< relref "auth/overview.md" >}}) (auth.basic)
+- [LDAP Authentication]({{< relref "auth/ldap.md" >}}) (auth.ldap)
+- [Auth Proxy]({{< relref "auth/auth-proxy.md" >}}) (auth.proxy)
## [session]
@@ -1009,3 +556,13 @@ Defaults to true. Set to false to disable alerting engine and hide Alerting from
### execute_alerts
Makes it possible to turn off alert rule execution.
+
+### error_or_timeout
+> Available in 5.3 and above
+
+Default setting for new alert rules. Defaults to categorize error and timeouts as alerting. (alerting, keep_state)
+
+### nodata_or_nullvalues
+> Available in 5.3 and above
+
+Default setting for how Grafana handles nodata or null values in alerting. (alerting, no_data, keep_state, ok)
diff --git a/docs/sources/installation/docker.md b/docs/sources/installation/docker.md
index c71dc105ad4..ba0d6199ba4 100644
--- a/docs/sources/installation/docker.md
+++ b/docs/sources/installation/docker.md
@@ -20,7 +20,7 @@ $ docker run -d -p 3000:3000 grafana/grafana
## Configuration
-All options defined in conf/grafana.ini can be overridden using environment
+All options defined in `conf/grafana.ini` can be overridden using environment
variables by using the syntax `GF__`.
For example:
@@ -40,6 +40,19 @@ those options.
> For any changes to `conf/grafana.ini` (or corresponding environment variables) to take effect you need to restart Grafana by restarting the Docker container.
+### Default Paths
+
+The following settings are hard-coded when launching the Grafana Docker container and can only be overridden using environment variables, not in `conf/grafana.ini`.
+
+Setting | Default value
+----------------------|---------------------------
+GF_PATHS_CONFIG | /etc/grafana/grafana.ini
+GF_PATHS_DATA | /var/lib/grafana
+GF_PATHS_HOME | /usr/share/grafana
+GF_PATHS_LOGS | /var/log/grafana
+GF_PATHS_PLUGINS | /var/lib/grafana/plugins
+GF_PATHS_PROVISIONING | /etc/grafana/provisioning
+
## Running a Specific Version of Grafana
```bash
diff --git a/docs/sources/installation/upgrading.md b/docs/sources/installation/upgrading.md
index c72bb4c0921..a476a38c3c5 100644
--- a/docs/sources/installation/upgrading.md
+++ b/docs/sources/installation/upgrading.md
@@ -109,3 +109,11 @@ positioning system when you load them in v5. Dashboards saved in v5 will not wor
external panel plugins might need to be updated to work properly.
For more details on the new panel positioning system, [click here]({{< relref "reference/dashboard.md#panel-size-position" >}})
+
+## Upgrading to v5.2
+
+One of the database migrations included in this release will update all annotation timestamps from second to millisecond precision. If you have a large amount of annotations the database migration may take a long time to complete which may cause problems if you use systemd to run Grafana.
+
+We've got one report where using systemd, PostgreSQL and a large amount of annotations (table size 1645mb) took 8-20 minutes for the database migration to complete. However, the grafana-server process was killed after 90 seconds by systemd. Any database migration queries in progress when systemd kills the grafana-server process continues to execute in database until finished.
+
+If you're using systemd and have a large amount of annotations consider temporary adjusting the systemd `TimeoutStartSec` setting to something high like `30m` before upgrading.
diff --git a/docs/sources/project/building_from_source.md b/docs/sources/project/building_from_source.md
index 64e67a22bae..ea75b9797e8 100644
--- a/docs/sources/project/building_from_source.md
+++ b/docs/sources/project/building_from_source.md
@@ -13,7 +13,7 @@ dev environment. Grafana ships with its own required backend server; also comple
## Dependencies
-- [Go 1.10](https://golang.org/dl/)
+- [Go 1.11](https://golang.org/dl/)
- [Git](https://git-scm.com/downloads)
- [NodeJS LTS](https://nodejs.org/download/)
- node-gyp is the Node.js native addon build tool and it requires extra dependencies: python 2.7, make and GCC. These are already installed for most Linux distros and MacOS. See the Building On Windows section or the [node-gyp installation instructions](https://github.com/nodejs/node-gyp#installation) for more details.
diff --git a/docs/sources/reference/templating.md b/docs/sources/reference/templating.md
index 7f86465312c..31251fd6389 100644
--- a/docs/sources/reference/templating.md
+++ b/docs/sources/reference/templating.md
@@ -245,7 +245,7 @@ Grafana has global built-in variables that can be used in expressions in the que
### The $__interval Variable
-This $__interval variable is similar to the `auto` interval variable that is described above. It can be used as a parameter to group by time (for InfluxDB), Date histogram interval (for Elasticsearch) or as a *summarize* function parameter (for Graphite).
+This $__interval variable is similar to the `auto` interval variable that is described above. It can be used as a parameter to group by time (for InfluxDB, MySQL, Postgres, MSSQL), Date histogram interval (for Elasticsearch) or as a *summarize* function parameter (for Graphite).
Grafana automatically calculates an interval that can be used to group by time in queries. When there are more data points than can be shown on a graph then queries can be made more efficient by grouping by a larger interval. It is more efficient to group by 1 day than by 10s when looking at 3 months of data and the graph will look the same and the query will be faster. The `$__interval` is calculated using the time range and the width of the graph (the number of pixels).
diff --git a/docs/sources/tutorials/ha_setup.md b/docs/sources/tutorials/ha_setup.md
index 9ae2989f6e6..0f138b20a17 100644
--- a/docs/sources/tutorials/ha_setup.md
+++ b/docs/sources/tutorials/ha_setup.md
@@ -27,7 +27,7 @@ Grafana will now persist all long term data in the database. How to configure th
## User sessions
The second thing to consider is how to deal with user sessions and how to configure your load balancer infront of Grafana.
-Grafana support two says of storing session data locally on disk or in a database/cache-server.
+Grafana supports two ways of storing session data: locally on disk or in a database/cache-server.
If you want to store sessions on disk you can use `sticky sessions` in your load balanacer. If you prefer to store session data in a database/cache-server
you can use any stateless routing strategy in your load balancer (ex round robin or least connections).
diff --git a/package.json b/package.json
index d7f136cb1b2..9ee81d7f8ac 100644
--- a/package.json
+++ b/package.json
@@ -102,7 +102,7 @@
"build": "grunt build",
"test": "grunt test",
"test:coverage": "grunt test --coverage=true",
- "lint": "tslint -c tslint.json --project tsconfig.json --type-check",
+ "lint": "tslint -c tslint.json --project tsconfig.json",
"jest": "jest --notify --watch",
"api-tests": "jest --notify --watch --config=tests/api/jest.js",
"precommit": "lint-staged && grunt precommit"
diff --git a/pkg/api/alerting.go b/pkg/api/alerting.go
index 60013fe2b10..a936d696207 100644
--- a/pkg/api/alerting.go
+++ b/pkg/api/alerting.go
@@ -192,14 +192,7 @@ func GetAlertNotifications(c *m.ReqContext) Response {
result := make([]*dtos.AlertNotification, 0)
for _, notification := range query.Result {
- result = append(result, &dtos.AlertNotification{
- Id: notification.Id,
- Name: notification.Name,
- Type: notification.Type,
- IsDefault: notification.IsDefault,
- Created: notification.Created,
- Updated: notification.Updated,
- })
+ result = append(result, dtos.NewAlertNotification(notification))
}
return JSON(200, result)
@@ -215,7 +208,7 @@ func GetAlertNotificationByID(c *m.ReqContext) Response {
return Error(500, "Failed to get alert notifications", err)
}
- return JSON(200, query.Result)
+ return JSON(200, dtos.NewAlertNotification(query.Result))
}
func CreateAlertNotification(c *m.ReqContext, cmd m.CreateAlertNotificationCommand) Response {
@@ -225,7 +218,7 @@ func CreateAlertNotification(c *m.ReqContext, cmd m.CreateAlertNotificationComma
return Error(500, "Failed to create alert notification", err)
}
- return JSON(200, cmd.Result)
+ return JSON(200, dtos.NewAlertNotification(cmd.Result))
}
func UpdateAlertNotification(c *m.ReqContext, cmd m.UpdateAlertNotificationCommand) Response {
@@ -235,7 +228,7 @@ func UpdateAlertNotification(c *m.ReqContext, cmd m.UpdateAlertNotificationComma
return Error(500, "Failed to update alert notification", err)
}
- return JSON(200, cmd.Result)
+ return JSON(200, dtos.NewAlertNotification(cmd.Result))
}
func DeleteAlertNotification(c *m.ReqContext) Response {
diff --git a/pkg/api/dtos/alerting.go b/pkg/api/dtos/alerting.go
index d30f2697f3f..697d0a35a08 100644
--- a/pkg/api/dtos/alerting.go
+++ b/pkg/api/dtos/alerting.go
@@ -1,35 +1,76 @@
package dtos
import (
+ "fmt"
"time"
"github.com/grafana/grafana/pkg/components/null"
"github.com/grafana/grafana/pkg/components/simplejson"
- m "github.com/grafana/grafana/pkg/models"
+ "github.com/grafana/grafana/pkg/models"
)
type AlertRule struct {
- Id int64 `json:"id"`
- DashboardId int64 `json:"dashboardId"`
- PanelId int64 `json:"panelId"`
- Name string `json:"name"`
- Message string `json:"message"`
- State m.AlertStateType `json:"state"`
- NewStateDate time.Time `json:"newStateDate"`
- EvalDate time.Time `json:"evalDate"`
- EvalData *simplejson.Json `json:"evalData"`
- ExecutionError string `json:"executionError"`
- Url string `json:"url"`
- CanEdit bool `json:"canEdit"`
+ Id int64 `json:"id"`
+ DashboardId int64 `json:"dashboardId"`
+ PanelId int64 `json:"panelId"`
+ Name string `json:"name"`
+ Message string `json:"message"`
+ State models.AlertStateType `json:"state"`
+ NewStateDate time.Time `json:"newStateDate"`
+ EvalDate time.Time `json:"evalDate"`
+ EvalData *simplejson.Json `json:"evalData"`
+ ExecutionError string `json:"executionError"`
+ Url string `json:"url"`
+ CanEdit bool `json:"canEdit"`
+}
+
+func formatShort(interval time.Duration) string {
+ var result string
+
+ hours := interval / time.Hour
+ if hours > 0 {
+ result += fmt.Sprintf("%dh", hours)
+ }
+
+ remaining := interval - (hours * time.Hour)
+ mins := remaining / time.Minute
+ if mins > 0 {
+ result += fmt.Sprintf("%dm", mins)
+ }
+
+ remaining = remaining - (mins * time.Minute)
+ seconds := remaining / time.Second
+ if seconds > 0 {
+ result += fmt.Sprintf("%ds", seconds)
+ }
+
+ return result
+}
+
+func NewAlertNotification(notification *models.AlertNotification) *AlertNotification {
+ return &AlertNotification{
+ Id: notification.Id,
+ Name: notification.Name,
+ Type: notification.Type,
+ IsDefault: notification.IsDefault,
+ Created: notification.Created,
+ Updated: notification.Updated,
+ Frequency: formatShort(notification.Frequency),
+ SendReminder: notification.SendReminder,
+ Settings: notification.Settings,
+ }
}
type AlertNotification struct {
- Id int64 `json:"id"`
- Name string `json:"name"`
- Type string `json:"type"`
- IsDefault bool `json:"isDefault"`
- Created time.Time `json:"created"`
- Updated time.Time `json:"updated"`
+ Id int64 `json:"id"`
+ Name string `json:"name"`
+ Type string `json:"type"`
+ IsDefault bool `json:"isDefault"`
+ SendReminder bool `json:"sendReminder"`
+ Frequency string `json:"frequency"`
+ Created time.Time `json:"created"`
+ Updated time.Time `json:"updated"`
+ Settings *simplejson.Json `json:"settings"`
}
type AlertTestCommand struct {
@@ -39,7 +80,7 @@ type AlertTestCommand struct {
type AlertTestResult struct {
Firing bool `json:"firing"`
- State m.AlertStateType `json:"state"`
+ State models.AlertStateType `json:"state"`
ConditionEvals string `json:"conditionEvals"`
TimeMs string `json:"timeMs"`
Error string `json:"error,omitempty"`
@@ -59,9 +100,11 @@ type EvalMatch struct {
}
type NotificationTestCommand struct {
- Name string `json:"name"`
- Type string `json:"type"`
- Settings *simplejson.Json `json:"settings"`
+ Name string `json:"name"`
+ Type string `json:"type"`
+ SendReminder bool `json:"sendReminder"`
+ Frequency string `json:"frequency"`
+ Settings *simplejson.Json `json:"settings"`
}
type PauseAlertCommand struct {
diff --git a/pkg/api/dtos/alerting_test.go b/pkg/api/dtos/alerting_test.go
new file mode 100644
index 00000000000..c38f281be9c
--- /dev/null
+++ b/pkg/api/dtos/alerting_test.go
@@ -0,0 +1,35 @@
+package dtos
+
+import (
+ "testing"
+ "time"
+)
+
+func TestFormatShort(t *testing.T) {
+ tcs := []struct {
+ interval time.Duration
+ expected string
+ }{
+ {interval: time.Hour, expected: "1h"},
+ {interval: time.Hour + time.Minute, expected: "1h1m"},
+ {interval: (time.Hour * 10) + time.Minute, expected: "10h1m"},
+ {interval: (time.Hour * 10) + (time.Minute * 10) + time.Second, expected: "10h10m1s"},
+ {interval: time.Minute * 10, expected: "10m"},
+ }
+
+ for _, tc := range tcs {
+ got := formatShort(tc.interval)
+ if got != tc.expected {
+ t.Errorf("expected %s got %s interval: %v", tc.expected, got, tc.interval)
+ }
+
+ parsed, err := time.ParseDuration(tc.expected)
+ if err != nil {
+ t.Fatalf("could not parse expected duration")
+ }
+
+ if parsed != tc.interval {
+ t.Errorf("expectes the parsed duration to equal the interval. Got %v expected: %v", parsed, tc.interval)
+ }
+ }
+}
diff --git a/pkg/api/frontendsettings.go b/pkg/api/frontendsettings.go
index da3c88566c1..a58be38781e 100644
--- a/pkg/api/frontendsettings.go
+++ b/pkg/api/frontendsettings.go
@@ -132,20 +132,22 @@ func getFrontendSettingsMap(c *m.ReqContext) (map[string]interface{}, error) {
}
jsonObj := map[string]interface{}{
- "defaultDatasource": defaultDatasource,
- "datasources": datasources,
- "panels": panels,
- "appSubUrl": setting.AppSubUrl,
- "allowOrgCreate": (setting.AllowUserOrgCreate && c.IsSignedIn) || c.IsGrafanaAdmin,
- "authProxyEnabled": setting.AuthProxyEnabled,
- "ldapEnabled": setting.LdapEnabled,
- "alertingEnabled": setting.AlertingEnabled,
- "exploreEnabled": setting.ExploreEnabled,
- "googleAnalyticsId": setting.GoogleAnalyticsId,
- "disableLoginForm": setting.DisableLoginForm,
- "externalUserMngInfo": setting.ExternalUserMngInfo,
- "externalUserMngLinkUrl": setting.ExternalUserMngLinkUrl,
- "externalUserMngLinkName": setting.ExternalUserMngLinkName,
+ "defaultDatasource": defaultDatasource,
+ "datasources": datasources,
+ "panels": panels,
+ "appSubUrl": setting.AppSubUrl,
+ "allowOrgCreate": (setting.AllowUserOrgCreate && c.IsSignedIn) || c.IsGrafanaAdmin,
+ "authProxyEnabled": setting.AuthProxyEnabled,
+ "ldapEnabled": setting.LdapEnabled,
+ "alertingEnabled": setting.AlertingEnabled,
+ "alertingErrorOrTimeout": setting.AlertingErrorOrTimeout,
+ "alertingNoDataOrNullValues": setting.AlertingNoDataOrNullValues,
+ "exploreEnabled": setting.ExploreEnabled,
+ "googleAnalyticsId": setting.GoogleAnalyticsId,
+ "disableLoginForm": setting.DisableLoginForm,
+ "externalUserMngInfo": setting.ExternalUserMngInfo,
+ "externalUserMngLinkUrl": setting.ExternalUserMngLinkUrl,
+ "externalUserMngLinkName": setting.ExternalUserMngLinkName,
"buildInfo": map[string]interface{}{
"version": setting.BuildVersion,
"commit": setting.BuildCommit,
diff --git a/pkg/api/live/conn.go b/pkg/api/live/conn.go
index f2a041d7631..0fae7f75b73 100644
--- a/pkg/api/live/conn.go
+++ b/pkg/api/live/conn.go
@@ -70,7 +70,7 @@ func (c *connection) readPump() {
func (c *connection) handleMessage(message []byte) {
json, err := simplejson.NewJson(message)
if err != nil {
- log.Error(3, "Unreadable message on websocket channel:", err)
+ log.Error(3, "Unreadable message on websocket channel. error: %v", err)
}
msgType := json.Get("action").MustString()
diff --git a/pkg/cmd/grafana-cli/commands/install_command.go b/pkg/cmd/grafana-cli/commands/install_command.go
index 9bdb73a5858..5d4969e06af 100644
--- a/pkg/cmd/grafana-cli/commands/install_command.go
+++ b/pkg/cmd/grafana-cli/commands/install_command.go
@@ -152,7 +152,7 @@ func downloadFile(pluginName, filePath, url string) (err error) {
return err
}
- r, err := zip.NewReader(bytes.NewReader(body), resp.ContentLength)
+ r, err := zip.NewReader(bytes.NewReader(body), int64(len(body)))
if err != nil {
return err
}
diff --git a/pkg/cmd/grafana-cli/services/services.go b/pkg/cmd/grafana-cli/services/services.go
index e743d42022c..b4e50ac84df 100644
--- a/pkg/cmd/grafana-cli/services/services.go
+++ b/pkg/cmd/grafana-cli/services/services.go
@@ -63,7 +63,7 @@ func ListAllPlugins(repoUrl string) (m.PluginRepo, error) {
var data m.PluginRepo
err = json.Unmarshal(body, &data)
if err != nil {
- logger.Info("Failed to unmarshal graphite response error: %v", err)
+ logger.Info("Failed to unmarshal graphite response error:", err)
return m.PluginRepo{}, err
}
@@ -140,7 +140,7 @@ func GetPlugin(pluginId, repoUrl string) (m.Plugin, error) {
var data m.Plugin
err = json.Unmarshal(body, &data)
if err != nil {
- logger.Info("Failed to unmarshal graphite response error: %v", err)
+ logger.Info("Failed to unmarshal graphite response error:", err)
return m.Plugin{}, err
}
diff --git a/pkg/cmd/grafana-cli/utils/grafana_path.go b/pkg/cmd/grafana-cli/utils/grafana_path.go
index afb622bbb93..5f5c944f52b 100644
--- a/pkg/cmd/grafana-cli/utils/grafana_path.go
+++ b/pkg/cmd/grafana-cli/utils/grafana_path.go
@@ -42,6 +42,8 @@ func returnOsDefault(currentOs string) string {
return "/usr/local/var/lib/grafana/plugins"
case "freebsd":
return "/var/db/grafana/plugins"
+ case "openbsd":
+ return "/var/grafana/plugins"
default: //"linux"
return "/var/lib/grafana/plugins"
}
diff --git a/pkg/cmd/grafana-server/main.go b/pkg/cmd/grafana-server/main.go
index f00e6bba0fd..f1e298671d7 100644
--- a/pkg/cmd/grafana-server/main.go
+++ b/pkg/cmd/grafana-server/main.go
@@ -96,13 +96,17 @@ func main() {
func listenToSystemSignals(server *GrafanaServerImpl) {
signalChan := make(chan os.Signal, 1)
- ignoreChan := make(chan os.Signal, 1)
+ sighupChan := make(chan os.Signal, 1)
- signal.Notify(ignoreChan, syscall.SIGHUP)
+ signal.Notify(sighupChan, syscall.SIGHUP)
signal.Notify(signalChan, os.Interrupt, os.Kill, syscall.SIGTERM)
- select {
- case sig := <-signalChan:
- server.Shutdown(fmt.Sprintf("System signal: %s", sig))
+ for {
+ select {
+ case _ = <-sighupChan:
+ log.Reload()
+ case sig := <-signalChan:
+ server.Shutdown(fmt.Sprintf("System signal: %s", sig))
+ }
}
}
diff --git a/pkg/components/imguploader/s3uploader.go b/pkg/components/imguploader/s3uploader.go
index 62196357c61..a1e4aed0f47 100644
--- a/pkg/components/imguploader/s3uploader.go
+++ b/pkg/components/imguploader/s3uploader.go
@@ -60,7 +60,7 @@ func (u *S3Uploader) Upload(ctx context.Context, imageDiskPath string) (string,
s3_endpoint, _ := endpoints.DefaultResolver().EndpointFor("s3", u.region)
key := u.path + util.GetRandomString(20) + ".png"
image_url := s3_endpoint.URL + "/" + u.bucket + "/" + key
- log.Debug("Uploading image to s3", "url = ", image_url)
+ log.Debug("Uploading image to s3. url = %s", image_url)
file, err := os.Open(imageDiskPath)
if err != nil {
diff --git a/pkg/log/file.go b/pkg/log/file.go
index d137adbf3de..b8430dc6086 100644
--- a/pkg/log/file.go
+++ b/pkg/log/file.go
@@ -236,3 +236,20 @@ func (w *FileLogWriter) Close() {
func (w *FileLogWriter) Flush() {
w.mw.fd.Sync()
}
+
+// Reload file logger
+func (w *FileLogWriter) Reload() {
+ // block Logger's io.Writer
+ w.mw.Lock()
+ defer w.mw.Unlock()
+
+ // Close
+ fd := w.mw.fd
+ fd.Close()
+
+ // Open again
+ err := w.StartLogger()
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "Reload StartLogger: %s\n", err)
+ }
+}
diff --git a/pkg/log/handlers.go b/pkg/log/handlers.go
index 14a96fdcdb4..804d8fcbd70 100644
--- a/pkg/log/handlers.go
+++ b/pkg/log/handlers.go
@@ -3,3 +3,7 @@ package log
type DisposableHandler interface {
Close()
}
+
+type ReloadableHandler interface {
+ Reload()
+}
diff --git a/pkg/log/log.go b/pkg/log/log.go
index 0e6874e1b4b..8f0522748ef 100644
--- a/pkg/log/log.go
+++ b/pkg/log/log.go
@@ -21,10 +21,12 @@ import (
var Root log15.Logger
var loggersToClose []DisposableHandler
+var loggersToReload []ReloadableHandler
var filters map[string]log15.Lvl
func init() {
loggersToClose = make([]DisposableHandler, 0)
+ loggersToReload = make([]ReloadableHandler, 0)
Root = log15.Root()
Root.SetHandler(log15.DiscardHandler())
}
@@ -103,7 +105,7 @@ func Critical(skip int, format string, v ...interface{}) {
}
func Fatal(skip int, format string, v ...interface{}) {
- Root.Crit(fmt.Sprintf(format, v))
+ Root.Crit(fmt.Sprintf(format, v...))
Close()
os.Exit(1)
}
@@ -115,6 +117,12 @@ func Close() {
loggersToClose = make([]DisposableHandler, 0)
}
+func Reload() {
+ for _, logger := range loggersToReload {
+ logger.Reload()
+ }
+}
+
func GetLogLevelFor(name string) Lvl {
if level, ok := filters[name]; ok {
switch level {
@@ -230,6 +238,7 @@ func ReadLoggingConfig(modes []string, logsPath string, cfg *ini.File) {
fileHandler.Init()
loggersToClose = append(loggersToClose, fileHandler)
+ loggersToReload = append(loggersToReload, fileHandler)
handler = fileHandler
case "syslog":
sysLogHandler := NewSyslog(sec, format)
diff --git a/pkg/login/auth.go b/pkg/login/auth.go
index 215a22cde33..991fa72fd54 100644
--- a/pkg/login/auth.go
+++ b/pkg/login/auth.go
@@ -2,7 +2,6 @@ package login
import (
"errors"
-
"github.com/grafana/grafana/pkg/bus"
m "github.com/grafana/grafana/pkg/models"
)
@@ -14,6 +13,7 @@ var (
ErrProviderDeniedRequest = errors.New("Login provider denied login request")
ErrSignUpNotAllowed = errors.New("Signup is not allowed for this adapter")
ErrTooManyLoginAttempts = errors.New("Too many consecutive incorrect login attempts for user. Login for user temporarily blocked")
+ ErrPasswordEmpty = errors.New("No password provided.")
ErrUsersQuotaReached = errors.New("Users quota reached")
ErrGettingUserQuota = errors.New("Error getting user quota")
)
@@ -28,6 +28,10 @@ func AuthenticateUser(query *m.LoginUserQuery) error {
return err
}
+ if err := validatePasswordSet(query.Password); err != nil {
+ return err
+ }
+
err := loginUsingGrafanaDB(query)
if err == nil || (err != m.ErrUserNotFound && err != ErrInvalidCredentials) {
return err
@@ -52,3 +56,10 @@ func AuthenticateUser(query *m.LoginUserQuery) error {
return err
}
+func validatePasswordSet(password string) error {
+ if len(password) == 0 {
+ return ErrPasswordEmpty
+ }
+
+ return nil
+}
diff --git a/pkg/login/auth_test.go b/pkg/login/auth_test.go
index 932125c410e..a4cd8284cdd 100644
--- a/pkg/login/auth_test.go
+++ b/pkg/login/auth_test.go
@@ -10,6 +10,24 @@ import (
func TestAuthenticateUser(t *testing.T) {
Convey("Authenticate user", t, func() {
+ authScenario("When a user authenticates without setting a password", func(sc *authScenarioContext) {
+ mockLoginAttemptValidation(nil, sc)
+ mockLoginUsingGrafanaDB(nil, sc)
+ mockLoginUsingLdap(false, nil, sc)
+
+ loginQuery := m.LoginUserQuery{
+ Username: "user",
+ Password: "",
+ }
+ err := AuthenticateUser(&loginQuery)
+
+ Convey("login should fail", func() {
+ So(sc.grafanaLoginWasCalled, ShouldBeFalse)
+ So(sc.ldapLoginWasCalled, ShouldBeFalse)
+ So(err, ShouldEqual, ErrPasswordEmpty)
+ })
+ })
+
authScenario("When a user authenticates having too many login attempts", func(sc *authScenarioContext) {
mockLoginAttemptValidation(ErrTooManyLoginAttempts, sc)
mockLoginUsingGrafanaDB(nil, sc)
diff --git a/pkg/login/ext_user.go b/pkg/login/ext_user.go
index a421e3ebe0a..1262c1cc44f 100644
--- a/pkg/login/ext_user.go
+++ b/pkg/login/ext_user.go
@@ -35,7 +35,7 @@ func UpsertUser(cmd *m.UpsertUserCommand) error {
limitReached, err := quota.QuotaReached(cmd.ReqContext, "user")
if err != nil {
- log.Warn("Error getting user quota", "err", err)
+ log.Warn("Error getting user quota. error: %v", err)
return ErrGettingUserQuota
}
if limitReached {
@@ -135,7 +135,7 @@ func updateUser(user *m.User, extUser *m.ExternalUserInfo) error {
return nil
}
- log.Debug("Syncing user info", "id", user.Id, "update", updateCmd)
+ log.Debug2("Syncing user info", "id", user.Id, "update", updateCmd)
return bus.Dispatch(updateCmd)
}
diff --git a/pkg/metrics/metrics.go b/pkg/metrics/metrics.go
index a8d9f7308fa..dcdfbf124e1 100644
--- a/pkg/metrics/metrics.go
+++ b/pkg/metrics/metrics.go
@@ -440,6 +440,16 @@ func sendUsageStats() {
metrics["stats.ds_access.other."+access+".count"] = count
}
+ anStats := models.GetAlertNotifierUsageStatsQuery{}
+ if err := bus.Dispatch(&anStats); err != nil {
+ metricsLogger.Error("Failed to get alert notification stats", "error", err)
+ return
+ }
+
+ for _, stats := range anStats.Result {
+ metrics["stats.alert_notifiers."+stats.Type+".count"] = stats.Count
+ }
+
out, _ := json.MarshalIndent(report, "", " ")
data := bytes.NewBuffer(out)
diff --git a/pkg/metrics/metrics_test.go b/pkg/metrics/metrics_test.go
index 8d88e03d106..9fbfd0c26a2 100644
--- a/pkg/metrics/metrics_test.go
+++ b/pkg/metrics/metrics_test.go
@@ -115,6 +115,24 @@ func TestMetrics(t *testing.T) {
return nil
})
+ var getAlertNotifierUsageStatsQuery *models.GetAlertNotifierUsageStatsQuery
+ bus.AddHandler("test", func(query *models.GetAlertNotifierUsageStatsQuery) error {
+ query.Result = []*models.NotifierUsageStats{
+ {
+ Type: "slack",
+ Count: 1,
+ },
+ {
+ Type: "webhook",
+ Count: 2,
+ },
+ }
+
+ getAlertNotifierUsageStatsQuery = query
+
+ return nil
+ })
+
var wg sync.WaitGroup
var responseBuffer *bytes.Buffer
var req *http.Request
@@ -157,6 +175,7 @@ func TestMetrics(t *testing.T) {
So(getSystemStatsQuery, ShouldNotBeNil)
So(getDataSourceStatsQuery, ShouldNotBeNil)
So(getDataSourceAccessStatsQuery, ShouldNotBeNil)
+ So(getAlertNotifierUsageStatsQuery, ShouldNotBeNil)
So(req, ShouldNotBeNil)
So(req.Method, ShouldEqual, http.MethodPost)
So(req.Header.Get("Content-Type"), ShouldEqual, "application/json")
@@ -198,6 +217,9 @@ func TestMetrics(t *testing.T) {
So(metrics.Get("stats.ds_access."+models.DS_PROMETHEUS+".proxy.count").MustInt(), ShouldEqual, 3)
So(metrics.Get("stats.ds_access.other.direct.count").MustInt(), ShouldEqual, 6+7)
So(metrics.Get("stats.ds_access.other.proxy.count").MustInt(), ShouldEqual, 4+8)
+
+ So(metrics.Get("stats.alert_notifiers.slack.count").MustInt(), ShouldEqual, 1)
+ So(metrics.Get("stats.alert_notifiers.webhook.count").MustInt(), ShouldEqual, 2)
})
})
diff --git a/pkg/middleware/auth_proxy.go b/pkg/middleware/auth_proxy.go
index 144a0ae3a69..29bd305b336 100644
--- a/pkg/middleware/auth_proxy.go
+++ b/pkg/middleware/auth_proxy.go
@@ -36,7 +36,7 @@ func initContextWithAuthProxy(ctx *m.ReqContext, orgID int64) bool {
// initialize session
if err := ctx.Session.Start(ctx.Context); err != nil {
- log.Error(3, "Failed to start session", err)
+ log.Error(3, "Failed to start session. error %v", err)
return false
}
@@ -146,12 +146,12 @@ func initContextWithAuthProxy(ctx *m.ReqContext, orgID int64) bool {
if getRequestUserId(ctx) > 0 && getRequestUserId(ctx) != query.Result.UserId {
// remove session
if err := ctx.Session.Destory(ctx.Context); err != nil {
- log.Error(3, "Failed to destroy session, err")
+ log.Error(3, "Failed to destroy session. error: %v", err)
}
// initialize a new session
if err := ctx.Session.Start(ctx.Context); err != nil {
- log.Error(3, "Failed to start session", err)
+ log.Error(3, "Failed to start session. error: %v", err)
}
}
diff --git a/pkg/models/alert_notifications.go b/pkg/models/alert_notifications.go
index 87b515f370c..42d33d5ed22 100644
--- a/pkg/models/alert_notifications.go
+++ b/pkg/models/alert_notifications.go
@@ -1,38 +1,50 @@
package models
import (
+ "errors"
"time"
"github.com/grafana/grafana/pkg/components/simplejson"
)
+var (
+ ErrNotificationFrequencyNotFound = errors.New("Notification frequency not specified")
+ ErrJournalingNotFound = errors.New("alert notification journaling not found")
+)
+
type AlertNotification struct {
- Id int64 `json:"id"`
- OrgId int64 `json:"-"`
- Name string `json:"name"`
- Type string `json:"type"`
- IsDefault bool `json:"isDefault"`
- Settings *simplejson.Json `json:"settings"`
- Created time.Time `json:"created"`
- Updated time.Time `json:"updated"`
+ Id int64 `json:"id"`
+ OrgId int64 `json:"-"`
+ Name string `json:"name"`
+ Type string `json:"type"`
+ SendReminder bool `json:"sendReminder"`
+ Frequency time.Duration `json:"frequency"`
+ IsDefault bool `json:"isDefault"`
+ Settings *simplejson.Json `json:"settings"`
+ Created time.Time `json:"created"`
+ Updated time.Time `json:"updated"`
}
type CreateAlertNotificationCommand struct {
- Name string `json:"name" binding:"Required"`
- Type string `json:"type" binding:"Required"`
- IsDefault bool `json:"isDefault"`
- Settings *simplejson.Json `json:"settings"`
+ Name string `json:"name" binding:"Required"`
+ Type string `json:"type" binding:"Required"`
+ SendReminder bool `json:"sendReminder"`
+ Frequency string `json:"frequency"`
+ IsDefault bool `json:"isDefault"`
+ Settings *simplejson.Json `json:"settings"`
OrgId int64 `json:"-"`
Result *AlertNotification
}
type UpdateAlertNotificationCommand struct {
- Id int64 `json:"id" binding:"Required"`
- Name string `json:"name" binding:"Required"`
- Type string `json:"type" binding:"Required"`
- IsDefault bool `json:"isDefault"`
- Settings *simplejson.Json `json:"settings" binding:"Required"`
+ Id int64 `json:"id" binding:"Required"`
+ Name string `json:"name" binding:"Required"`
+ Type string `json:"type" binding:"Required"`
+ SendReminder bool `json:"sendReminder"`
+ Frequency string `json:"frequency"`
+ IsDefault bool `json:"isDefault"`
+ Settings *simplejson.Json `json:"settings" binding:"Required"`
OrgId int64 `json:"-"`
Result *AlertNotification
@@ -63,3 +75,34 @@ type GetAllAlertNotificationsQuery struct {
Result []*AlertNotification
}
+
+type AlertNotificationJournal struct {
+ Id int64
+ OrgId int64
+ AlertId int64
+ NotifierId int64
+ SentAt int64
+ Success bool
+}
+
+type RecordNotificationJournalCommand struct {
+ OrgId int64
+ AlertId int64
+ NotifierId int64
+ SentAt int64
+ Success bool
+}
+
+type GetLatestNotificationQuery struct {
+ OrgId int64
+ AlertId int64
+ NotifierId int64
+
+ Result *AlertNotificationJournal
+}
+
+type CleanNotificationJournalCommand struct {
+ OrgId int64
+ AlertId int64
+ NotifierId int64
+}
diff --git a/pkg/models/datasource.go b/pkg/models/datasource.go
index b7e3e3eaa17..cbdd0136f4d 100644
--- a/pkg/models/datasource.go
+++ b/pkg/models/datasource.go
@@ -59,22 +59,22 @@ type DataSource struct {
}
var knownDatasourcePlugins = map[string]bool{
- DS_ES: true,
- DS_GRAPHITE: true,
- DS_INFLUXDB: true,
- DS_INFLUXDB_08: true,
- DS_KAIROSDB: true,
- DS_CLOUDWATCH: true,
- DS_PROMETHEUS: true,
- DS_OPENTSDB: true,
- DS_POSTGRES: true,
- DS_MYSQL: true,
- DS_MSSQL: true,
- "opennms": true,
- "abhisant-druid-datasource": true,
- "dalmatinerdb-datasource": true,
- "gnocci": true,
- "zabbix": true,
+ DS_ES: true,
+ DS_GRAPHITE: true,
+ DS_INFLUXDB: true,
+ DS_INFLUXDB_08: true,
+ DS_KAIROSDB: true,
+ DS_CLOUDWATCH: true,
+ DS_PROMETHEUS: true,
+ DS_OPENTSDB: true,
+ DS_POSTGRES: true,
+ DS_MYSQL: true,
+ DS_MSSQL: true,
+ "opennms": true,
+ "abhisant-druid-datasource": true,
+ "dalmatinerdb-datasource": true,
+ "gnocci": true,
+ "zabbix": true,
"alexanderzobnin-zabbix-datasource": true,
"newrelic-app": true,
"grafana-datadog-datasource": true,
diff --git a/pkg/models/stats.go b/pkg/models/stats.go
index 4cd50d37463..d3e145dedf4 100644
--- a/pkg/models/stats.go
+++ b/pkg/models/stats.go
@@ -40,6 +40,15 @@ type GetDataSourceAccessStatsQuery struct {
Result []*DataSourceAccessStats
}
+type NotifierUsageStats struct {
+ Type string
+ Count int64
+}
+
+type GetAlertNotifierUsageStatsQuery struct {
+ Result []*NotifierUsageStats
+}
+
type AdminStats struct {
Users int `json:"users"`
Orgs int `json:"orgs"`
diff --git a/pkg/services/alerting/interfaces.go b/pkg/services/alerting/interfaces.go
index 18f969ba1b9..46f8b3c769c 100644
--- a/pkg/services/alerting/interfaces.go
+++ b/pkg/services/alerting/interfaces.go
@@ -1,6 +1,9 @@
package alerting
-import "time"
+import (
+ "context"
+ "time"
+)
type EvalHandler interface {
Eval(evalContext *EvalContext)
@@ -15,10 +18,14 @@ type Notifier interface {
Notify(evalContext *EvalContext) error
GetType() string
NeedsImage() bool
- ShouldNotify(evalContext *EvalContext) bool
+
+ // ShouldNotify checks this evaluation should send an alert notification
+ ShouldNotify(ctx context.Context, evalContext *EvalContext) bool
GetNotifierId() int64
GetIsDefault() bool
+ GetSendReminder() bool
+ GetFrequency() time.Duration
}
type NotifierSlice []Notifier
diff --git a/pkg/services/alerting/notifier.go b/pkg/services/alerting/notifier.go
index f4e0a0f434f..7fbd956f4f9 100644
--- a/pkg/services/alerting/notifier.go
+++ b/pkg/services/alerting/notifier.go
@@ -1,10 +1,10 @@
package alerting
import (
+ "context"
"errors"
"fmt"
-
- "golang.org/x/sync/errgroup"
+ "time"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/imguploader"
@@ -58,17 +58,47 @@ func (n *notificationService) SendIfNeeded(context *EvalContext) error {
return n.sendNotifications(context, notifiers)
}
-func (n *notificationService) sendNotifications(context *EvalContext, notifiers []Notifier) error {
- g, _ := errgroup.WithContext(context.Ctx)
-
+func (n *notificationService) sendNotifications(evalContext *EvalContext, notifiers []Notifier) error {
for _, notifier := range notifiers {
- not := notifier //avoid updating scope variable in go routine
- n.log.Debug("Sending notification", "type", not.GetType(), "id", not.GetNotifierId(), "isDefault", not.GetIsDefault())
- metrics.M_Alerting_Notification_Sent.WithLabelValues(not.GetType()).Inc()
- g.Go(func() error { return not.Notify(context) })
+ not := notifier
+
+ err := bus.InTransaction(evalContext.Ctx, func(ctx context.Context) error {
+ n.log.Debug("trying to send notification", "id", not.GetNotifierId())
+
+ // Verify that we can send the notification again
+ // but this time within the same transaction.
+ if !evalContext.IsTestRun && !not.ShouldNotify(context.Background(), evalContext) {
+ return nil
+ }
+
+ n.log.Debug("Sending notification", "type", not.GetType(), "id", not.GetNotifierId(), "isDefault", not.GetIsDefault())
+ metrics.M_Alerting_Notification_Sent.WithLabelValues(not.GetType()).Inc()
+
+ //send notification
+ success := not.Notify(evalContext) == nil
+
+ if evalContext.IsTestRun {
+ return nil
+ }
+
+ //write result to db.
+ cmd := &m.RecordNotificationJournalCommand{
+ OrgId: evalContext.Rule.OrgId,
+ AlertId: evalContext.Rule.Id,
+ NotifierId: not.GetNotifierId(),
+ SentAt: time.Now().Unix(),
+ Success: success,
+ }
+
+ return bus.DispatchCtx(ctx, cmd)
+ })
+
+ if err != nil {
+ n.log.Error("failed to send notification", "id", not.GetNotifierId())
+ }
}
- return g.Wait()
+ return nil
}
func (n *notificationService) uploadImage(context *EvalContext) (err error) {
@@ -110,7 +140,7 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) {
return nil
}
-func (n *notificationService) getNeededNotifiers(orgId int64, notificationIds []int64, context *EvalContext) (NotifierSlice, error) {
+func (n *notificationService) getNeededNotifiers(orgId int64, notificationIds []int64, evalContext *EvalContext) (NotifierSlice, error) {
query := &m.GetAlertNotificationsToSendQuery{OrgId: orgId, Ids: notificationIds}
if err := bus.Dispatch(query); err != nil {
@@ -123,7 +153,8 @@ func (n *notificationService) getNeededNotifiers(orgId int64, notificationIds []
if err != nil {
return nil, err
}
- if not.ShouldNotify(context) {
+
+ if not.ShouldNotify(evalContext.Ctx, evalContext) {
result = append(result, not)
}
}
diff --git a/pkg/services/alerting/notifiers/alertmanager.go b/pkg/services/alerting/notifiers/alertmanager.go
index d449167de13..9826dd1dffb 100644
--- a/pkg/services/alerting/notifiers/alertmanager.go
+++ b/pkg/services/alerting/notifiers/alertmanager.go
@@ -1,6 +1,7 @@
package notifiers
import (
+ "context"
"time"
"github.com/grafana/grafana/pkg/bus"
@@ -33,7 +34,7 @@ func NewAlertmanagerNotifier(model *m.AlertNotification) (alerting.Notifier, err
}
return &AlertmanagerNotifier{
- NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
+ NotifierBase: NewNotifierBase(model),
Url: url,
log: log.New("alerting.notifier.prometheus-alertmanager"),
}, nil
@@ -45,7 +46,7 @@ type AlertmanagerNotifier struct {
log log.Logger
}
-func (this *AlertmanagerNotifier) ShouldNotify(evalContext *alerting.EvalContext) bool {
+func (this *AlertmanagerNotifier) ShouldNotify(ctx context.Context, evalContext *alerting.EvalContext) bool {
this.log.Debug("Should notify", "ruleId", evalContext.Rule.Id, "state", evalContext.Rule.State, "previousState", evalContext.PrevAlertState)
// Do not notify when we become OK for the first time.
diff --git a/pkg/services/alerting/notifiers/base.go b/pkg/services/alerting/notifiers/base.go
index 868db3aec79..ca011356247 100644
--- a/pkg/services/alerting/notifiers/base.go
+++ b/pkg/services/alerting/notifiers/base.go
@@ -1,50 +1,94 @@
package notifiers
import (
- "github.com/grafana/grafana/pkg/components/simplejson"
- m "github.com/grafana/grafana/pkg/models"
+ "context"
+ "time"
+
+ "github.com/grafana/grafana/pkg/bus"
+ "github.com/grafana/grafana/pkg/log"
+ "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/alerting"
)
type NotifierBase struct {
- Name string
- Type string
- Id int64
- IsDeault bool
- UploadImage bool
+ Name string
+ Type string
+ Id int64
+ IsDeault bool
+ UploadImage bool
+ SendReminder bool
+ Frequency time.Duration
+
+ log log.Logger
}
-func NewNotifierBase(id int64, isDefault bool, name, notifierType string, model *simplejson.Json) NotifierBase {
+func NewNotifierBase(model *models.AlertNotification) NotifierBase {
uploadImage := true
- value, exist := model.CheckGet("uploadImage")
+ value, exist := model.Settings.CheckGet("uploadImage")
if exist {
uploadImage = value.MustBool()
}
return NotifierBase{
- Id: id,
- Name: name,
- IsDeault: isDefault,
- Type: notifierType,
- UploadImage: uploadImage,
+ Id: model.Id,
+ Name: model.Name,
+ IsDeault: model.IsDefault,
+ Type: model.Type,
+ UploadImage: uploadImage,
+ SendReminder: model.SendReminder,
+ Frequency: model.Frequency,
+ log: log.New("alerting.notifier." + model.Name),
}
}
-func defaultShouldNotify(context *alerting.EvalContext) bool {
+func defaultShouldNotify(context *alerting.EvalContext, sendReminder bool, frequency time.Duration, lastNotify time.Time) bool {
// Only notify on state change.
- if context.PrevAlertState == context.Rule.State {
+ if context.PrevAlertState == context.Rule.State && !sendReminder {
return false
}
+
+ // Do not notify if interval has not elapsed
+ if sendReminder && !lastNotify.IsZero() && lastNotify.Add(frequency).After(time.Now()) {
+ return false
+ }
+
+ // Do not notify if alert state if OK or pending even on repeated notify
+ if sendReminder && (context.Rule.State == models.AlertStateOK || context.Rule.State == models.AlertStatePending) {
+ return false
+ }
+
// Do not notify when we become OK for the first time.
- if (context.PrevAlertState == m.AlertStatePending) && (context.Rule.State == m.AlertStateOK) {
+ if (context.PrevAlertState == models.AlertStatePending) && (context.Rule.State == models.AlertStateOK) {
return false
}
+
return true
}
-func (n *NotifierBase) ShouldNotify(context *alerting.EvalContext) bool {
- return defaultShouldNotify(context)
+// ShouldNotify checks this evaluation should send an alert notification
+func (n *NotifierBase) ShouldNotify(ctx context.Context, c *alerting.EvalContext) bool {
+ cmd := &models.GetLatestNotificationQuery{
+ OrgId: c.Rule.OrgId,
+ AlertId: c.Rule.Id,
+ NotifierId: n.Id,
+ }
+
+ err := bus.DispatchCtx(ctx, cmd)
+ if err == models.ErrJournalingNotFound {
+ return true
+ }
+
+ if err != nil {
+ n.log.Error("Could not determine last time alert notifier fired", "Alert name", c.Rule.Name, "Error", err)
+ return false
+ }
+
+ if !cmd.Result.Success {
+ return true
+ }
+
+ return defaultShouldNotify(c, n.SendReminder, n.Frequency, time.Unix(cmd.Result.SentAt, 0))
}
func (n *NotifierBase) GetType() string {
@@ -62,3 +106,11 @@ func (n *NotifierBase) GetNotifierId() int64 {
func (n *NotifierBase) GetIsDefault() bool {
return n.IsDeault
}
+
+func (n *NotifierBase) GetSendReminder() bool {
+ return n.SendReminder
+}
+
+func (n *NotifierBase) GetFrequency() time.Duration {
+ return n.Frequency
+}
diff --git a/pkg/services/alerting/notifiers/base_test.go b/pkg/services/alerting/notifiers/base_test.go
index b7142d144cc..57b82f32466 100644
--- a/pkg/services/alerting/notifiers/base_test.go
+++ b/pkg/services/alerting/notifiers/base_test.go
@@ -2,7 +2,11 @@ package notifiers
import (
"context"
+ "errors"
"testing"
+ "time"
+
+ "github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson"
m "github.com/grafana/grafana/pkg/models"
@@ -10,47 +14,129 @@ import (
. "github.com/smartystreets/goconvey/convey"
)
-func TestBaseNotifier(t *testing.T) {
- Convey("Base notifier tests", t, func() {
- Convey("default constructor for notifiers", func() {
- bJson := simplejson.New()
+func TestShouldSendAlertNotification(t *testing.T) {
+ tcs := []struct {
+ name string
+ prevState m.AlertStateType
+ newState m.AlertStateType
+ expected bool
+ sendReminder bool
+ }{
+ {
+ name: "pending -> ok should not trigger an notification",
+ newState: m.AlertStatePending,
+ prevState: m.AlertStateOK,
+ expected: false,
+ },
+ {
+ name: "ok -> alerting should trigger an notification",
+ newState: m.AlertStateOK,
+ prevState: m.AlertStateAlerting,
+ expected: true,
+ },
+ {
+ name: "ok -> pending should not trigger an notification",
+ newState: m.AlertStateOK,
+ prevState: m.AlertStatePending,
+ expected: false,
+ },
+ {
+ name: "ok -> ok should not trigger an notification",
+ newState: m.AlertStateOK,
+ prevState: m.AlertStateOK,
+ expected: false,
+ sendReminder: false,
+ },
+ {
+ name: "ok -> alerting should not trigger an notification",
+ newState: m.AlertStateOK,
+ prevState: m.AlertStateAlerting,
+ expected: true,
+ sendReminder: true,
+ },
+ {
+ name: "ok -> ok with reminder should not trigger an notification",
+ newState: m.AlertStateOK,
+ prevState: m.AlertStateOK,
+ expected: false,
+ sendReminder: true,
+ },
+ }
- Convey("can parse false value", func() {
- bJson.Set("uploadImage", false)
-
- base := NewNotifierBase(1, false, "name", "email", bJson)
- So(base.UploadImage, ShouldBeFalse)
- })
-
- Convey("can parse true value", func() {
- bJson.Set("uploadImage", true)
-
- base := NewNotifierBase(1, false, "name", "email", bJson)
- So(base.UploadImage, ShouldBeTrue)
- })
-
- Convey("default value should be true for backwards compatibility", func() {
- base := NewNotifierBase(1, false, "name", "email", bJson)
- So(base.UploadImage, ShouldBeTrue)
- })
+ for _, tc := range tcs {
+ evalContext := alerting.NewEvalContext(context.TODO(), &alerting.Rule{
+ State: tc.newState,
})
- Convey("should notify", func() {
- Convey("pending -> ok", func() {
- context := alerting.NewEvalContext(context.TODO(), &alerting.Rule{
- State: m.AlertStatePending,
- })
- context.Rule.State = m.AlertStateOK
- So(defaultShouldNotify(context), ShouldBeFalse)
+ evalContext.Rule.State = tc.prevState
+ if defaultShouldNotify(evalContext, true, 0, time.Now()) != tc.expected {
+ t.Errorf("failed %s. expected %+v to return %v", tc.name, tc, tc.expected)
+ }
+ }
+}
+
+func TestShouldNotifyWhenNoJournalingIsFound(t *testing.T) {
+ Convey("base notifier", t, func() {
+ bus.ClearBusHandlers()
+
+ notifier := NewNotifierBase(&m.AlertNotification{
+ Id: 1,
+ Name: "name",
+ Type: "email",
+ Settings: simplejson.New(),
+ })
+ evalContext := alerting.NewEvalContext(context.TODO(), &alerting.Rule{})
+
+ Convey("should notify if no journaling is found", func() {
+ bus.AddHandlerCtx("", func(ctx context.Context, q *m.GetLatestNotificationQuery) error {
+ return m.ErrJournalingNotFound
})
- Convey("ok -> alerting", func() {
- context := alerting.NewEvalContext(context.TODO(), &alerting.Rule{
- State: m.AlertStateOK,
- })
- context.Rule.State = m.AlertStateAlerting
- So(defaultShouldNotify(context), ShouldBeTrue)
+ if !notifier.ShouldNotify(context.Background(), evalContext) {
+ t.Errorf("should send notifications when ErrJournalingNotFound is returned")
+ }
+ })
+
+ Convey("should not notify query returns error", func() {
+ bus.AddHandlerCtx("", func(ctx context.Context, q *m.GetLatestNotificationQuery) error {
+ return errors.New("some kind of error unknown error")
})
+
+ if notifier.ShouldNotify(context.Background(), evalContext) {
+ t.Errorf("should not send notifications when query returns error")
+ }
+ })
+ })
+}
+
+func TestBaseNotifier(t *testing.T) {
+ Convey("default constructor for notifiers", t, func() {
+ bJson := simplejson.New()
+
+ model := &m.AlertNotification{
+ Id: 1,
+ Name: "name",
+ Type: "email",
+ Settings: bJson,
+ }
+
+ Convey("can parse false value", func() {
+ bJson.Set("uploadImage", false)
+
+ base := NewNotifierBase(model)
+ So(base.UploadImage, ShouldBeFalse)
+ })
+
+ Convey("can parse true value", func() {
+ bJson.Set("uploadImage", true)
+
+ base := NewNotifierBase(model)
+ So(base.UploadImage, ShouldBeTrue)
+ })
+
+ Convey("default value should be true for backwards compatibility", func() {
+ base := NewNotifierBase(model)
+ So(base.UploadImage, ShouldBeTrue)
})
})
}
diff --git a/pkg/services/alerting/notifiers/dingding.go b/pkg/services/alerting/notifiers/dingding.go
index 14eacef5831..738e43af2d2 100644
--- a/pkg/services/alerting/notifiers/dingding.go
+++ b/pkg/services/alerting/notifiers/dingding.go
@@ -32,7 +32,7 @@ func NewDingDingNotifier(model *m.AlertNotification) (alerting.Notifier, error)
}
return &DingDingNotifier{
- NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
+ NotifierBase: NewNotifierBase(model),
Url: url,
log: log.New("alerting.notifier.dingding"),
}, nil
diff --git a/pkg/services/alerting/notifiers/discord.go b/pkg/services/alerting/notifiers/discord.go
index 3ffa7484870..57d9d438fa2 100644
--- a/pkg/services/alerting/notifiers/discord.go
+++ b/pkg/services/alerting/notifiers/discord.go
@@ -39,7 +39,7 @@ func NewDiscordNotifier(model *m.AlertNotification) (alerting.Notifier, error) {
}
return &DiscordNotifier{
- NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
+ NotifierBase: NewNotifierBase(model),
WebhookURL: url,
log: log.New("alerting.notifier.discord"),
}, nil
diff --git a/pkg/services/alerting/notifiers/email.go b/pkg/services/alerting/notifiers/email.go
index 562ffbe1269..17b88f7d97f 100644
--- a/pkg/services/alerting/notifiers/email.go
+++ b/pkg/services/alerting/notifiers/email.go
@@ -52,7 +52,7 @@ func NewEmailNotifier(model *m.AlertNotification) (alerting.Notifier, error) {
})
return &EmailNotifier{
- NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
+ NotifierBase: NewNotifierBase(model),
Addresses: addresses,
log: log.New("alerting.notifier.email"),
}, nil
diff --git a/pkg/services/alerting/notifiers/hipchat.go b/pkg/services/alerting/notifiers/hipchat.go
index 58e1b7bd71e..1c284ec3d2b 100644
--- a/pkg/services/alerting/notifiers/hipchat.go
+++ b/pkg/services/alerting/notifiers/hipchat.go
@@ -59,7 +59,7 @@ func NewHipChatNotifier(model *models.AlertNotification) (alerting.Notifier, err
roomId := model.Settings.Get("roomid").MustString()
return &HipChatNotifier{
- NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
+ NotifierBase: NewNotifierBase(model),
Url: url,
ApiKey: apikey,
RoomId: roomId,
diff --git a/pkg/services/alerting/notifiers/kafka.go b/pkg/services/alerting/notifiers/kafka.go
index 92f6489106b..d8d19fc5dae 100644
--- a/pkg/services/alerting/notifiers/kafka.go
+++ b/pkg/services/alerting/notifiers/kafka.go
@@ -43,7 +43,7 @@ func NewKafkaNotifier(model *m.AlertNotification) (alerting.Notifier, error) {
}
return &KafkaNotifier{
- NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
+ NotifierBase: NewNotifierBase(model),
Endpoint: endpoint,
Topic: topic,
log: log.New("alerting.notifier.kafka"),
diff --git a/pkg/services/alerting/notifiers/line.go b/pkg/services/alerting/notifiers/line.go
index 4814662f3a9..9e3888b8f95 100644
--- a/pkg/services/alerting/notifiers/line.go
+++ b/pkg/services/alerting/notifiers/line.go
@@ -39,7 +39,7 @@ func NewLINENotifier(model *m.AlertNotification) (alerting.Notifier, error) {
}
return &LineNotifier{
- NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
+ NotifierBase: NewNotifierBase(model),
Token: token,
log: log.New("alerting.notifier.line"),
}, nil
diff --git a/pkg/services/alerting/notifiers/opsgenie.go b/pkg/services/alerting/notifiers/opsgenie.go
index f0f5142cf05..84148a0d99c 100644
--- a/pkg/services/alerting/notifiers/opsgenie.go
+++ b/pkg/services/alerting/notifiers/opsgenie.go
@@ -56,7 +56,7 @@ func NewOpsGenieNotifier(model *m.AlertNotification) (alerting.Notifier, error)
}
return &OpsGenieNotifier{
- NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
+ NotifierBase: NewNotifierBase(model),
ApiKey: apiKey,
ApiUrl: apiUrl,
AutoClose: autoClose,
diff --git a/pkg/services/alerting/notifiers/pagerduty.go b/pkg/services/alerting/notifiers/pagerduty.go
index 02219b2203d..bf85466388f 100644
--- a/pkg/services/alerting/notifiers/pagerduty.go
+++ b/pkg/services/alerting/notifiers/pagerduty.go
@@ -51,7 +51,7 @@ func NewPagerdutyNotifier(model *m.AlertNotification) (alerting.Notifier, error)
}
return &PagerdutyNotifier{
- NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
+ NotifierBase: NewNotifierBase(model),
Key: key,
AutoResolve: autoResolve,
log: log.New("alerting.notifier.pagerduty"),
diff --git a/pkg/services/alerting/notifiers/pushover.go b/pkg/services/alerting/notifiers/pushover.go
index cbe9e16801a..55dc02c5f4a 100644
--- a/pkg/services/alerting/notifiers/pushover.go
+++ b/pkg/services/alerting/notifiers/pushover.go
@@ -99,7 +99,7 @@ func NewPushoverNotifier(model *m.AlertNotification) (alerting.Notifier, error)
return nil, alerting.ValidationError{Reason: "API token not given"}
}
return &PushoverNotifier{
- NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
+ NotifierBase: NewNotifierBase(model),
UserKey: userKey,
ApiToken: apiToken,
Priority: priority,
diff --git a/pkg/services/alerting/notifiers/sensu.go b/pkg/services/alerting/notifiers/sensu.go
index 9f77801d458..21d5d3d9d9e 100644
--- a/pkg/services/alerting/notifiers/sensu.go
+++ b/pkg/services/alerting/notifiers/sensu.go
@@ -51,7 +51,7 @@ func NewSensuNotifier(model *m.AlertNotification) (alerting.Notifier, error) {
}
return &SensuNotifier{
- NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
+ NotifierBase: NewNotifierBase(model),
Url: url,
User: model.Settings.Get("username").MustString(),
Source: model.Settings.Get("source").MustString(),
diff --git a/pkg/services/alerting/notifiers/slack.go b/pkg/services/alerting/notifiers/slack.go
index c1dadba414d..374b49ea957 100644
--- a/pkg/services/alerting/notifiers/slack.go
+++ b/pkg/services/alerting/notifiers/slack.go
@@ -78,7 +78,7 @@ func NewSlackNotifier(model *m.AlertNotification) (alerting.Notifier, error) {
uploadImage := model.Settings.Get("uploadImage").MustBool(true)
return &SlackNotifier{
- NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
+ NotifierBase: NewNotifierBase(model),
Url: url,
Recipient: recipient,
Mention: mention,
diff --git a/pkg/services/alerting/notifiers/teams.go b/pkg/services/alerting/notifiers/teams.go
index 4e34e16ab51..7beb71e5c65 100644
--- a/pkg/services/alerting/notifiers/teams.go
+++ b/pkg/services/alerting/notifiers/teams.go
@@ -33,7 +33,7 @@ func NewTeamsNotifier(model *m.AlertNotification) (alerting.Notifier, error) {
}
return &TeamsNotifier{
- NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
+ NotifierBase: NewNotifierBase(model),
Url: url,
log: log.New("alerting.notifier.teams"),
}, nil
@@ -96,14 +96,26 @@ func (this *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error {
},
},
"text": message,
- "potentialAction": []map[string]interface{}{
+ },
+ },
+ "potentialAction": []map[string]interface{}{
+ {
+ "@context": "http://schema.org",
+ "@type": "OpenUri",
+ "name": "View Rule",
+ "targets": []map[string]interface{}{
{
- "@context": "http://schema.org",
- "@type": "ViewAction",
- "name": "View Rule",
- "target": []string{
- ruleUrl,
- },
+ "os": "default", "uri": ruleUrl,
+ },
+ },
+ },
+ {
+ "@context": "http://schema.org",
+ "@type": "OpenUri",
+ "name": "View Graph",
+ "targets": []map[string]interface{}{
+ {
+ "os": "default", "uri": evalContext.ImagePublicUrl,
},
},
},
diff --git a/pkg/services/alerting/notifiers/telegram.go b/pkg/services/alerting/notifiers/telegram.go
index ca24c996914..5492de45d39 100644
--- a/pkg/services/alerting/notifiers/telegram.go
+++ b/pkg/services/alerting/notifiers/telegram.go
@@ -78,7 +78,7 @@ func NewTelegramNotifier(model *m.AlertNotification) (alerting.Notifier, error)
}
return &TelegramNotifier{
- NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
+ NotifierBase: NewNotifierBase(model),
BotToken: botToken,
ChatID: chatId,
UploadImage: uploadImage,
@@ -216,7 +216,7 @@ func appendIfPossible(message string, extra string, sizeLimit int) string {
if len(extra)+len(message) <= sizeLimit {
return message + extra
}
- log.Debug("Line too long for image caption.", "value", extra)
+ log.Debug("Line too long for image caption. value: %s", extra)
return message
}
diff --git a/pkg/services/alerting/notifiers/threema.go b/pkg/services/alerting/notifiers/threema.go
index e4ffffc9108..28a62fade17 100644
--- a/pkg/services/alerting/notifiers/threema.go
+++ b/pkg/services/alerting/notifiers/threema.go
@@ -106,7 +106,7 @@ func NewThreemaNotifier(model *m.AlertNotification) (alerting.Notifier, error) {
}
return &ThreemaNotifier{
- NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
+ NotifierBase: NewNotifierBase(model),
GatewayID: gatewayID,
RecipientID: recipientID,
APISecret: apiSecret,
diff --git a/pkg/services/alerting/notifiers/victorops.go b/pkg/services/alerting/notifiers/victorops.go
index a753ca3cbf6..3093aec9957 100644
--- a/pkg/services/alerting/notifiers/victorops.go
+++ b/pkg/services/alerting/notifiers/victorops.go
@@ -51,7 +51,7 @@ func NewVictoropsNotifier(model *models.AlertNotification) (alerting.Notifier, e
}
return &VictoropsNotifier{
- NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
+ NotifierBase: NewNotifierBase(model),
URL: url,
AutoResolve: autoResolve,
log: log.New("alerting.notifier.victorops"),
diff --git a/pkg/services/alerting/notifiers/webhook.go b/pkg/services/alerting/notifiers/webhook.go
index 4c97ed2b75e..4045e496af9 100644
--- a/pkg/services/alerting/notifiers/webhook.go
+++ b/pkg/services/alerting/notifiers/webhook.go
@@ -47,7 +47,7 @@ func NewWebHookNotifier(model *m.AlertNotification) (alerting.Notifier, error) {
}
return &WebhookNotifier{
- NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings),
+ NotifierBase: NewNotifierBase(model),
Url: url,
User: model.Settings.Get("username").MustString(),
Password: model.Settings.Get("password").MustString(),
diff --git a/pkg/services/alerting/result_handler.go b/pkg/services/alerting/result_handler.go
index c57b28c7c3e..363d06d1132 100644
--- a/pkg/services/alerting/result_handler.go
+++ b/pkg/services/alerting/result_handler.go
@@ -88,6 +88,18 @@ func (handler *DefaultResultHandler) Handle(evalContext *EvalContext) error {
}
}
+ if evalContext.Rule.State == m.AlertStateOK && evalContext.PrevAlertState != m.AlertStateOK {
+ for _, notifierId := range evalContext.Rule.Notifications {
+ cmd := &m.CleanNotificationJournalCommand{
+ AlertId: evalContext.Rule.Id,
+ NotifierId: notifierId,
+ OrgId: evalContext.Rule.OrgId,
+ }
+ if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil {
+ handler.log.Error("Failed to clean up old notification records", "notifier", notifierId, "alert", evalContext.Rule.Id, "Error", err)
+ }
+ }
+ }
handler.notifier.SendIfNeeded(evalContext)
return nil
diff --git a/pkg/services/rendering/http_mode.go b/pkg/services/rendering/http_mode.go
index 9084ca27353..d47dfaeaae1 100644
--- a/pkg/services/rendering/http_mode.go
+++ b/pkg/services/rendering/http_mode.go
@@ -2,6 +2,7 @@ package rendering
import (
"context"
+ "fmt"
"io"
"net"
"net/http"
@@ -20,14 +21,13 @@ var netTransport = &http.Transport{
TLSHandshakeTimeout: 5 * time.Second,
}
+var netClient = &http.Client{
+ Transport: netTransport,
+}
+
func (rs *RenderingService) renderViaHttp(ctx context.Context, opts Opts) (*RenderResult, error) {
filePath := rs.getFilePathForNewImage()
- var netClient = &http.Client{
- Timeout: opts.Timeout,
- Transport: netTransport,
- }
-
rendererUrl, err := url.Parse(rs.Cfg.RendererUrl)
if err != nil {
return nil, err
@@ -35,10 +35,10 @@ func (rs *RenderingService) renderViaHttp(ctx context.Context, opts Opts) (*Rend
queryParams := rendererUrl.Query()
queryParams.Add("url", rs.getURL(opts.Path))
- queryParams.Add("renderKey", rs.getRenderKey(opts.UserId, opts.OrgId, opts.OrgRole))
+ queryParams.Add("renderKey", rs.getRenderKey(opts.OrgId, opts.UserId, opts.OrgRole))
queryParams.Add("width", strconv.Itoa(opts.Width))
queryParams.Add("height", strconv.Itoa(opts.Height))
- queryParams.Add("domain", rs.getLocalDomain())
+ queryParams.Add("domain", rs.domain)
queryParams.Add("timezone", isoTimeOffsetToPosixTz(opts.Timezone))
queryParams.Add("encoding", opts.Encoding)
queryParams.Add("timeout", strconv.Itoa(int(opts.Timeout.Seconds())))
@@ -49,20 +49,48 @@ func (rs *RenderingService) renderViaHttp(ctx context.Context, opts Opts) (*Rend
return nil, err
}
+ reqContext, cancel := context.WithTimeout(ctx, opts.Timeout+time.Second*2)
+ defer cancel()
+
+ req = req.WithContext(reqContext)
+
// make request to renderer server
resp, err := netClient.Do(req)
if err != nil {
- return nil, err
+ rs.log.Error("Failed to send request to remote rendering service.", "error", err)
+ return nil, fmt.Errorf("Failed to send request to remote rendering service. %s", err)
}
// save response to file
defer resp.Body.Close()
+
+ // check for timeout first
+ if reqContext.Err() == context.DeadlineExceeded {
+ rs.log.Info("Rendering timed out")
+ return nil, ErrTimeout
+ }
+
+ // if we didnt get a 200 response, something went wrong.
+ if resp.StatusCode != http.StatusOK {
+ rs.log.Error("Remote rendering request failed", "error", resp.Status)
+ return nil, fmt.Errorf("Remote rendering request failed. %d: %s", resp.StatusCode, resp.Status)
+ }
+
out, err := os.Create(filePath)
if err != nil {
return nil, err
}
defer out.Close()
- io.Copy(out, resp.Body)
+ _, err = io.Copy(out, resp.Body)
+ if err != nil {
+ // check that we didnt timeout while receiving the response.
+ if reqContext.Err() == context.DeadlineExceeded {
+ rs.log.Info("Rendering timed out")
+ return nil, ErrTimeout
+ }
+ rs.log.Error("Remote rendering request failed", "error", err)
+ return nil, fmt.Errorf("Remote rendering request failed. %s", err)
+ }
return &RenderResult{FilePath: filePath}, err
}
diff --git a/pkg/services/rendering/phantomjs.go b/pkg/services/rendering/phantomjs.go
index 87ccaf6b5d2..1bd7489c153 100644
--- a/pkg/services/rendering/phantomjs.go
+++ b/pkg/services/rendering/phantomjs.go
@@ -49,7 +49,7 @@ func (rs *RenderingService) renderViaPhantomJS(ctx context.Context, opts Opts) (
fmt.Sprintf("width=%v", opts.Width),
fmt.Sprintf("height=%v", opts.Height),
fmt.Sprintf("png=%v", pngPath),
- fmt.Sprintf("domain=%v", rs.getLocalDomain()),
+ fmt.Sprintf("domain=%v", rs.domain),
fmt.Sprintf("timeout=%v", opts.Timeout.Seconds()),
fmt.Sprintf("renderKey=%v", renderKey),
}
diff --git a/pkg/services/rendering/plugin_mode.go b/pkg/services/rendering/plugin_mode.go
index 550779ad7c3..58fef2b095f 100644
--- a/pkg/services/rendering/plugin_mode.go
+++ b/pkg/services/rendering/plugin_mode.go
@@ -77,10 +77,10 @@ func (rs *RenderingService) renderViaPlugin(ctx context.Context, opts Opts) (*Re
Height: int32(opts.Height),
FilePath: pngPath,
Timeout: int32(opts.Timeout.Seconds()),
- RenderKey: rs.getRenderKey(opts.UserId, opts.OrgId, opts.OrgRole),
+ RenderKey: rs.getRenderKey(opts.OrgId, opts.UserId, opts.OrgRole),
Encoding: opts.Encoding,
Timezone: isoTimeOffsetToPosixTz(opts.Timezone),
- Domain: rs.getLocalDomain(),
+ Domain: rs.domain,
})
if err != nil {
diff --git a/pkg/services/rendering/rendering.go b/pkg/services/rendering/rendering.go
index 799aecc3e88..ff4a67cc9b6 100644
--- a/pkg/services/rendering/rendering.go
+++ b/pkg/services/rendering/rendering.go
@@ -3,6 +3,8 @@ package rendering
import (
"context"
"fmt"
+ "net/url"
+ "os"
"path/filepath"
plugin "github.com/hashicorp/go-plugin"
@@ -27,12 +29,31 @@ type RenderingService struct {
grpcPlugin pluginModel.RendererPlugin
pluginInfo *plugins.RendererPlugin
renderAction renderFunc
+ domain string
Cfg *setting.Cfg `inject:""`
}
func (rs *RenderingService) Init() error {
rs.log = log.New("rendering")
+
+ // ensure ImagesDir exists
+ err := os.MkdirAll(rs.Cfg.ImagesDir, 0700)
+ if err != nil {
+ return err
+ }
+
+ // set value used for domain attribute of renderKey cookie
+ if rs.Cfg.RendererUrl != "" {
+ // RendererCallbackUrl has already been passed, it wont generate an error.
+ u, _ := url.Parse(rs.Cfg.RendererCallbackUrl)
+ rs.domain = u.Hostname()
+ } else if setting.HttpAddr != setting.DEFAULT_HTTP_ADDR {
+ rs.domain = setting.HttpAddr
+ } else {
+ rs.domain = "localhost"
+ }
+
return nil
}
@@ -82,16 +103,17 @@ func (rs *RenderingService) getFilePathForNewImage() string {
}
func (rs *RenderingService) getURL(path string) string {
- // &render=1 signals to the legacy redirect layer to
- return fmt.Sprintf("%s://%s:%s/%s&render=1", setting.Protocol, rs.getLocalDomain(), setting.HttpPort, path)
-}
+ if rs.Cfg.RendererUrl != "" {
+ // The backend rendering service can potentially be remote.
+ // So we need to use the root_url to ensure the rendering service
+ // can reach this Grafana instance.
+
+ // &render=1 signals to the legacy redirect layer to
+ return fmt.Sprintf("%s%s&render=1", rs.Cfg.RendererCallbackUrl, path)
-func (rs *RenderingService) getLocalDomain() string {
- if setting.HttpAddr != setting.DEFAULT_HTTP_ADDR {
- return setting.HttpAddr
}
-
- return "localhost"
+ // &render=1 signals to the legacy redirect layer to
+ return fmt.Sprintf("%s://%s:%s/%s&render=1", setting.Protocol, rs.domain, setting.HttpPort, path)
}
func (rs *RenderingService) getRenderKey(orgId, userId int64, orgRole models.RoleType) string {
diff --git a/pkg/services/sqlstore/alert_notification.go b/pkg/services/sqlstore/alert_notification.go
index 651241f7714..8fb1e2212a9 100644
--- a/pkg/services/sqlstore/alert_notification.go
+++ b/pkg/services/sqlstore/alert_notification.go
@@ -2,6 +2,7 @@ package sqlstore
import (
"bytes"
+ "context"
"fmt"
"strings"
"time"
@@ -17,6 +18,9 @@ func init() {
bus.AddHandler("sql", DeleteAlertNotification)
bus.AddHandler("sql", GetAlertNotificationsToSend)
bus.AddHandler("sql", GetAllAlertNotifications)
+ bus.AddHandlerCtx("sql", RecordNotificationJournal)
+ bus.AddHandlerCtx("sql", GetLatestNotification)
+ bus.AddHandlerCtx("sql", CleanNotificationJournal)
}
func DeleteAlertNotification(cmd *m.DeleteAlertNotificationCommand) error {
@@ -53,7 +57,9 @@ func GetAlertNotificationsToSend(query *m.GetAlertNotificationsToSendQuery) erro
alert_notification.created,
alert_notification.updated,
alert_notification.settings,
- alert_notification.is_default
+ alert_notification.is_default,
+ alert_notification.send_reminder,
+ alert_notification.frequency
FROM alert_notification
`)
@@ -91,7 +97,9 @@ func getAlertNotificationInternal(query *m.GetAlertNotificationsQuery, sess *DBS
alert_notification.created,
alert_notification.updated,
alert_notification.settings,
- alert_notification.is_default
+ alert_notification.is_default,
+ alert_notification.send_reminder,
+ alert_notification.frequency
FROM alert_notification
`)
@@ -137,17 +145,31 @@ func CreateAlertNotificationCommand(cmd *m.CreateAlertNotificationCommand) error
return fmt.Errorf("Alert notification name %s already exists", cmd.Name)
}
- alertNotification := &m.AlertNotification{
- OrgId: cmd.OrgId,
- Name: cmd.Name,
- Type: cmd.Type,
- Settings: cmd.Settings,
- Created: time.Now(),
- Updated: time.Now(),
- IsDefault: cmd.IsDefault,
+ var frequency time.Duration
+ if cmd.SendReminder {
+ if cmd.Frequency == "" {
+ return m.ErrNotificationFrequencyNotFound
+ }
+
+ frequency, err = time.ParseDuration(cmd.Frequency)
+ if err != nil {
+ return err
+ }
}
- if _, err = sess.Insert(alertNotification); err != nil {
+ alertNotification := &m.AlertNotification{
+ OrgId: cmd.OrgId,
+ Name: cmd.Name,
+ Type: cmd.Type,
+ Settings: cmd.Settings,
+ SendReminder: cmd.SendReminder,
+ Frequency: frequency,
+ Created: time.Now(),
+ Updated: time.Now(),
+ IsDefault: cmd.IsDefault,
+ }
+
+ if _, err = sess.MustCols("send_reminder").Insert(alertNotification); err != nil {
return err
}
@@ -179,16 +201,77 @@ func UpdateAlertNotification(cmd *m.UpdateAlertNotificationCommand) error {
current.Name = cmd.Name
current.Type = cmd.Type
current.IsDefault = cmd.IsDefault
+ current.SendReminder = cmd.SendReminder
- sess.UseBool("is_default")
+ if current.SendReminder {
+ if cmd.Frequency == "" {
+ return m.ErrNotificationFrequencyNotFound
+ }
+
+ frequency, err := time.ParseDuration(cmd.Frequency)
+ if err != nil {
+ return err
+ }
+
+ current.Frequency = frequency
+ }
+
+ sess.UseBool("is_default", "send_reminder")
if affected, err := sess.ID(cmd.Id).Update(current); err != nil {
return err
} else if affected == 0 {
- return fmt.Errorf("Could not find alert notification")
+ return fmt.Errorf("Could not update alert notification")
}
cmd.Result = ¤t
return nil
})
}
+
+func RecordNotificationJournal(ctx context.Context, cmd *m.RecordNotificationJournalCommand) error {
+ return inTransactionCtx(ctx, func(sess *DBSession) error {
+ journalEntry := &m.AlertNotificationJournal{
+ OrgId: cmd.OrgId,
+ AlertId: cmd.AlertId,
+ NotifierId: cmd.NotifierId,
+ SentAt: cmd.SentAt,
+ Success: cmd.Success,
+ }
+
+ if _, err := sess.Insert(journalEntry); err != nil {
+ return err
+ }
+
+ return nil
+ })
+}
+
+func GetLatestNotification(ctx context.Context, cmd *m.GetLatestNotificationQuery) error {
+ return inTransactionCtx(ctx, func(sess *DBSession) error {
+ nj := &m.AlertNotificationJournal{}
+
+ _, err := sess.Desc("alert_notification_journal.sent_at").
+ Limit(1).
+ Where("alert_notification_journal.org_id = ? AND alert_notification_journal.alert_id = ? AND alert_notification_journal.notifier_id = ?", cmd.OrgId, cmd.AlertId, cmd.NotifierId).Get(nj)
+
+ if err != nil {
+ return err
+ }
+
+ if nj.AlertId == 0 && nj.Id == 0 && nj.NotifierId == 0 && nj.OrgId == 0 {
+ return m.ErrJournalingNotFound
+ }
+
+ cmd.Result = nj
+ return nil
+ })
+}
+
+func CleanNotificationJournal(ctx context.Context, cmd *m.CleanNotificationJournalCommand) error {
+ return inTransactionCtx(ctx, func(sess *DBSession) error {
+ sql := "DELETE FROM alert_notification_journal WHERE alert_notification_journal.org_id = ? AND alert_notification_journal.alert_id = ? AND alert_notification_journal.notifier_id = ?"
+ _, err := sess.Exec(sql, cmd.OrgId, cmd.AlertId, cmd.NotifierId)
+ return err
+ })
+}
diff --git a/pkg/services/sqlstore/alert_notification_test.go b/pkg/services/sqlstore/alert_notification_test.go
index 2dbf9de5ca8..83fb42db9bb 100644
--- a/pkg/services/sqlstore/alert_notification_test.go
+++ b/pkg/services/sqlstore/alert_notification_test.go
@@ -1,7 +1,9 @@
package sqlstore
import (
+ "context"
"testing"
+ "time"
"github.com/grafana/grafana/pkg/components/simplejson"
m "github.com/grafana/grafana/pkg/models"
@@ -11,7 +13,48 @@ import (
func TestAlertNotificationSQLAccess(t *testing.T) {
Convey("Testing Alert notification sql access", t, func() {
InitTestDB(t)
- var err error
+
+ Convey("Alert notification journal", func() {
+ var alertId int64 = 5
+ var orgId int64 = 5
+ var notifierId int64 = 5
+
+ Convey("Getting last journal should raise error if no one exists", func() {
+ query := &m.GetLatestNotificationQuery{AlertId: alertId, OrgId: orgId, NotifierId: notifierId}
+ err := GetLatestNotification(context.Background(), query)
+ So(err, ShouldEqual, m.ErrJournalingNotFound)
+
+ Convey("shoulbe be able to record two journaling events", func() {
+ createCmd := &m.RecordNotificationJournalCommand{AlertId: alertId, NotifierId: notifierId, OrgId: orgId, Success: true, SentAt: 1}
+
+ err := RecordNotificationJournal(context.Background(), createCmd)
+ So(err, ShouldBeNil)
+
+ createCmd.SentAt += 1000 //increase epoch
+
+ err = RecordNotificationJournal(context.Background(), createCmd)
+ So(err, ShouldBeNil)
+
+ Convey("get last journaling event", func() {
+ err := GetLatestNotification(context.Background(), query)
+ So(err, ShouldBeNil)
+ So(query.Result.SentAt, ShouldEqual, 1001)
+
+ Convey("be able to clear all journaling for an notifier", func() {
+ cmd := &m.CleanNotificationJournalCommand{AlertId: alertId, NotifierId: notifierId, OrgId: orgId}
+ err := CleanNotificationJournal(context.Background(), cmd)
+ So(err, ShouldBeNil)
+
+ Convey("querying for last junaling should raise error", func() {
+ query := &m.GetLatestNotificationQuery{AlertId: alertId, OrgId: orgId, NotifierId: notifierId}
+ err := GetLatestNotification(context.Background(), query)
+ So(err, ShouldEqual, m.ErrJournalingNotFound)
+ })
+ })
+ })
+ })
+ })
+ })
Convey("Alert notifications should be empty", func() {
cmd := &m.GetAlertNotificationsQuery{
@@ -24,19 +67,75 @@ func TestAlertNotificationSQLAccess(t *testing.T) {
So(cmd.Result, ShouldBeNil)
})
- Convey("Can save Alert Notification", func() {
+ Convey("Cannot save alert notifier with send reminder = true", func() {
cmd := &m.CreateAlertNotificationCommand{
- Name: "ops",
- Type: "email",
- OrgId: 1,
- Settings: simplejson.New(),
+ Name: "ops",
+ Type: "email",
+ OrgId: 1,
+ SendReminder: true,
+ Settings: simplejson.New(),
}
- err = CreateAlertNotificationCommand(cmd)
+ Convey("and missing frequency", func() {
+ err := CreateAlertNotificationCommand(cmd)
+ So(err, ShouldEqual, m.ErrNotificationFrequencyNotFound)
+ })
+
+ Convey("invalid frequency", func() {
+ cmd.Frequency = "invalid duration"
+
+ err := CreateAlertNotificationCommand(cmd)
+ So(err.Error(), ShouldEqual, "time: invalid duration invalid duration")
+ })
+ })
+
+ Convey("Cannot update alert notifier with send reminder = false", func() {
+ cmd := &m.CreateAlertNotificationCommand{
+ Name: "ops update",
+ Type: "email",
+ OrgId: 1,
+ SendReminder: false,
+ Settings: simplejson.New(),
+ }
+
+ err := CreateAlertNotificationCommand(cmd)
+ So(err, ShouldBeNil)
+
+ updateCmd := &m.UpdateAlertNotificationCommand{
+ Id: cmd.Result.Id,
+ SendReminder: true,
+ }
+
+ Convey("and missing frequency", func() {
+ err := UpdateAlertNotification(updateCmd)
+ So(err, ShouldEqual, m.ErrNotificationFrequencyNotFound)
+ })
+
+ Convey("invalid frequency", func() {
+ updateCmd.Frequency = "invalid duration"
+
+ err := UpdateAlertNotification(updateCmd)
+ So(err, ShouldNotBeNil)
+ So(err.Error(), ShouldEqual, "time: invalid duration invalid duration")
+ })
+ })
+
+ Convey("Can save Alert Notification", func() {
+ cmd := &m.CreateAlertNotificationCommand{
+ Name: "ops",
+ Type: "email",
+ OrgId: 1,
+ SendReminder: true,
+ Frequency: "10s",
+ Settings: simplejson.New(),
+ }
+
+ err := CreateAlertNotificationCommand(cmd)
So(err, ShouldBeNil)
So(cmd.Result.Id, ShouldNotEqual, 0)
So(cmd.Result.OrgId, ShouldNotEqual, 0)
So(cmd.Result.Type, ShouldEqual, "email")
+ So(cmd.Result.Frequency, ShouldEqual, 10*time.Second)
Convey("Cannot save Alert Notification with the same name", func() {
err = CreateAlertNotificationCommand(cmd)
@@ -45,25 +144,42 @@ func TestAlertNotificationSQLAccess(t *testing.T) {
Convey("Can update alert notification", func() {
newCmd := &m.UpdateAlertNotificationCommand{
- Name: "NewName",
- Type: "webhook",
- OrgId: cmd.Result.OrgId,
- Settings: simplejson.New(),
- Id: cmd.Result.Id,
+ Name: "NewName",
+ Type: "webhook",
+ OrgId: cmd.Result.OrgId,
+ SendReminder: true,
+ Frequency: "60s",
+ Settings: simplejson.New(),
+ Id: cmd.Result.Id,
}
err := UpdateAlertNotification(newCmd)
So(err, ShouldBeNil)
So(newCmd.Result.Name, ShouldEqual, "NewName")
+ So(newCmd.Result.Frequency, ShouldEqual, 60*time.Second)
+ })
+
+ Convey("Can update alert notification to disable sending of reminders", func() {
+ newCmd := &m.UpdateAlertNotificationCommand{
+ Name: "NewName",
+ Type: "webhook",
+ OrgId: cmd.Result.OrgId,
+ SendReminder: false,
+ Settings: simplejson.New(),
+ Id: cmd.Result.Id,
+ }
+ err := UpdateAlertNotification(newCmd)
+ So(err, ShouldBeNil)
+ So(newCmd.Result.SendReminder, ShouldBeFalse)
})
})
Convey("Can search using an array of ids", func() {
- cmd1 := m.CreateAlertNotificationCommand{Name: "nagios", Type: "webhook", OrgId: 1, Settings: simplejson.New()}
- cmd2 := m.CreateAlertNotificationCommand{Name: "slack", Type: "webhook", OrgId: 1, Settings: simplejson.New()}
- cmd3 := m.CreateAlertNotificationCommand{Name: "ops2", Type: "email", OrgId: 1, Settings: simplejson.New()}
- cmd4 := m.CreateAlertNotificationCommand{IsDefault: true, Name: "default", Type: "email", OrgId: 1, Settings: simplejson.New()}
+ cmd1 := m.CreateAlertNotificationCommand{Name: "nagios", Type: "webhook", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()}
+ cmd2 := m.CreateAlertNotificationCommand{Name: "slack", Type: "webhook", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()}
+ cmd3 := m.CreateAlertNotificationCommand{Name: "ops2", Type: "email", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()}
+ cmd4 := m.CreateAlertNotificationCommand{IsDefault: true, Name: "default", Type: "email", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()}
- otherOrg := m.CreateAlertNotificationCommand{Name: "default", Type: "email", OrgId: 2, Settings: simplejson.New()}
+ otherOrg := m.CreateAlertNotificationCommand{Name: "default", Type: "email", OrgId: 2, SendReminder: true, Frequency: "10s", Settings: simplejson.New()}
So(CreateAlertNotificationCommand(&cmd1), ShouldBeNil)
So(CreateAlertNotificationCommand(&cmd2), ShouldBeNil)
diff --git a/pkg/services/sqlstore/migrations/alert_mig.go b/pkg/services/sqlstore/migrations/alert_mig.go
index 2a364d5f464..e27e64c6124 100644
--- a/pkg/services/sqlstore/migrations/alert_mig.go
+++ b/pkg/services/sqlstore/migrations/alert_mig.go
@@ -65,6 +65,13 @@ func addAlertMigrations(mg *Migrator) {
mg.AddMigration("Add column is_default", NewAddColumnMigration(alert_notification, &Column{
Name: "is_default", Type: DB_Bool, Nullable: false, Default: "0",
}))
+ mg.AddMigration("Add column frequency", NewAddColumnMigration(alert_notification, &Column{
+ Name: "frequency", Type: DB_BigInt, Nullable: true,
+ }))
+ mg.AddMigration("Add column send_reminder", NewAddColumnMigration(alert_notification, &Column{
+ Name: "send_reminder", Type: DB_Bool, Nullable: true, Default: "0",
+ }))
+
mg.AddMigration("add index alert_notification org_id & name", NewAddIndexMigration(alert_notification, alert_notification.Indices[0]))
mg.AddMigration("Update alert table charset", NewTableCharsetMigration("alert", []*Column{
@@ -82,4 +89,22 @@ func addAlertMigrations(mg *Migrator) {
{Name: "type", Type: DB_NVarchar, Length: 255, Nullable: false},
{Name: "settings", Type: DB_Text, Nullable: false},
}))
+
+ notification_journal := Table{
+ Name: "alert_notification_journal",
+ Columns: []*Column{
+ {Name: "id", Type: DB_BigInt, IsPrimaryKey: true, IsAutoIncrement: true},
+ {Name: "org_id", Type: DB_BigInt, Nullable: false},
+ {Name: "alert_id", Type: DB_BigInt, Nullable: false},
+ {Name: "notifier_id", Type: DB_BigInt, Nullable: false},
+ {Name: "sent_at", Type: DB_BigInt, Nullable: false},
+ {Name: "success", Type: DB_Bool, Nullable: false},
+ },
+ Indices: []*Index{
+ {Cols: []string{"org_id", "alert_id", "notifier_id"}, Type: IndexType},
+ },
+ }
+
+ mg.AddMigration("create notification_journal table v1", NewAddTableMigration(notification_journal))
+ mg.AddMigration("add index notification_journal org_id & alert_id & notifier_id", NewAddIndexMigration(notification_journal, notification_journal.Indices[0]))
}
diff --git a/pkg/services/sqlstore/sqlstore.go b/pkg/services/sqlstore/sqlstore.go
index 13d706b6198..5477bc7b2d1 100644
--- a/pkg/services/sqlstore/sqlstore.go
+++ b/pkg/services/sqlstore/sqlstore.go
@@ -106,7 +106,7 @@ func (ss *SqlStore) inTransactionWithRetryCtx(ctx context.Context, callback dbTr
if len(sess.events) > 0 {
for _, e := range sess.events {
if err = bus.Publish(e); err != nil {
- log.Error(3, "Failed to publish event after commit", err)
+ log.Error(3, "Failed to publish event after commit. error: %v", err)
}
}
}
diff --git a/pkg/services/sqlstore/stats.go b/pkg/services/sqlstore/stats.go
index 6db481bf06b..2cec86e7239 100644
--- a/pkg/services/sqlstore/stats.go
+++ b/pkg/services/sqlstore/stats.go
@@ -13,11 +13,19 @@ func init() {
bus.AddHandler("sql", GetDataSourceStats)
bus.AddHandler("sql", GetDataSourceAccessStats)
bus.AddHandler("sql", GetAdminStats)
+ bus.AddHandlerCtx("sql", GetAlertNotifiersUsageStats)
bus.AddHandlerCtx("sql", GetSystemUserCountStats)
}
var activeUserTimeLimit = time.Hour * 24 * 30
+func GetAlertNotifiersUsageStats(ctx context.Context, query *m.GetAlertNotifierUsageStatsQuery) error {
+ var rawSql = `SELECT COUNT(*) as count, type FROM alert_notification GROUP BY type`
+ query.Result = make([]*m.NotifierUsageStats, 0)
+ err := x.SQL(rawSql).Find(&query.Result)
+ return err
+}
+
func GetDataSourceStats(query *m.GetDataSourceStatsQuery) error {
var rawSql = `SELECT COUNT(*) as count, type FROM data_source GROUP BY type`
query.Result = make([]*m.DataSourceStats, 0)
diff --git a/pkg/services/sqlstore/stats_test.go b/pkg/services/sqlstore/stats_test.go
index dae24952d17..6949a0dbda2 100644
--- a/pkg/services/sqlstore/stats_test.go
+++ b/pkg/services/sqlstore/stats_test.go
@@ -36,5 +36,11 @@ func TestStatsDataAccess(t *testing.T) {
err := GetDataSourceAccessStats(&query)
So(err, ShouldBeNil)
})
+
+ Convey("Get alert notifier stats should not results in error", func() {
+ query := m.GetAlertNotifierUsageStatsQuery{}
+ err := GetAlertNotifiersUsageStats(context.Background(), &query)
+ So(err, ShouldBeNil)
+ })
})
}
diff --git a/pkg/services/sqlstore/transactions.go b/pkg/services/sqlstore/transactions.go
index eccd37f9a43..edf29fffb8f 100644
--- a/pkg/services/sqlstore/transactions.go
+++ b/pkg/services/sqlstore/transactions.go
@@ -89,7 +89,7 @@ func inTransactionWithRetryCtx(ctx context.Context, callback dbTransactionFunc,
if len(sess.events) > 0 {
for _, e := range sess.events {
if err = bus.Publish(e); err != nil {
- log.Error(3, "Failed to publish event after commit", err)
+ log.Error(3, "Failed to publish event after commit. error: %v", err)
}
}
}
diff --git a/pkg/setting/setting.go b/pkg/setting/setting.go
index eb61568261d..fb23a192a85 100644
--- a/pkg/setting/setting.go
+++ b/pkg/setting/setting.go
@@ -164,8 +164,10 @@ var (
Quota QuotaSettings
// Alerting
- AlertingEnabled bool
- ExecuteAlerts bool
+ AlertingEnabled bool
+ ExecuteAlerts bool
+ AlertingErrorOrTimeout string
+ AlertingNoDataOrNullValues string
// Explore UI
ExploreEnabled bool
@@ -197,6 +199,7 @@ type Cfg struct {
ImagesDir string
PhantomDir string
RendererUrl string
+ RendererCallbackUrl string
DisableBruteForceLoginProtection bool
TempDataLifetime time.Duration
@@ -324,7 +327,7 @@ func getCommandLineProperties(args []string) map[string]string {
trimmed := strings.TrimPrefix(arg, "cfg:")
parts := strings.Split(trimmed, "=")
if len(parts) != 2 {
- log.Fatal(3, "Invalid command line argument", arg)
+ log.Fatal(3, "Invalid command line argument. argument: %v", arg)
return nil
}
@@ -641,6 +644,18 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error {
// Rendering
renderSec := iniFile.Section("rendering")
cfg.RendererUrl = renderSec.Key("server_url").String()
+ cfg.RendererCallbackUrl = renderSec.Key("callback_url").String()
+ if cfg.RendererCallbackUrl == "" {
+ cfg.RendererCallbackUrl = AppUrl
+ } else {
+ if cfg.RendererCallbackUrl[len(cfg.RendererCallbackUrl)-1] != '/' {
+ cfg.RendererCallbackUrl += "/"
+ }
+ _, err := url.Parse(cfg.RendererCallbackUrl)
+ if err != nil {
+ log.Fatal(4, "Invalid callback_url(%s): %s", cfg.RendererCallbackUrl, err)
+ }
+ }
cfg.ImagesDir = filepath.Join(DataPath, "png")
cfg.PhantomDir = filepath.Join(HomePath, "tools/phantomjs")
cfg.TempDataLifetime = iniFile.Section("paths").Key("temp_data_lifetime").MustDuration(time.Second * 3600 * 24)
@@ -659,6 +674,8 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error {
alerting := iniFile.Section("alerting")
AlertingEnabled = alerting.Key("enabled").MustBool(true)
ExecuteAlerts = alerting.Key("execute_alerts").MustBool(true)
+ AlertingErrorOrTimeout = alerting.Key("error_or_timeout").MustString("alerting")
+ AlertingNoDataOrNullValues = alerting.Key("nodata_or_nullvalues").MustString("no_data")
explore := iniFile.Section("explore")
ExploreEnabled = explore.Key("enabled").MustBool(false)
diff --git a/pkg/setting/setting_test.go b/pkg/setting/setting_test.go
index 9de22c86811..affb3c3e7ca 100644
--- a/pkg/setting/setting_test.go
+++ b/pkg/setting/setting_test.go
@@ -20,6 +20,7 @@ func TestLoadingSettings(t *testing.T) {
So(err, ShouldBeNil)
So(AdminUser, ShouldEqual, "admin")
+ So(cfg.RendererCallbackUrl, ShouldEqual, "http://localhost:3000/")
})
Convey("Should be able to override via environment variables", func() {
@@ -178,5 +179,15 @@ func TestLoadingSettings(t *testing.T) {
So(InstanceName, ShouldEqual, hostname)
})
+ Convey("Reading callback_url should add trailing slash", func() {
+ cfg := NewCfg()
+ cfg.Load(&CommandLineArgs{
+ HomePath: "../../",
+ Args: []string{"cfg:rendering.callback_url=http://myserver/renderer"},
+ })
+
+ So(cfg.RendererCallbackUrl, ShouldEqual, "http://myserver/renderer/")
+ })
+
})
}
diff --git a/public/app/app.ts b/public/app/app.ts
index d9e31018af9..8e30747072e 100644
--- a/public/app/app.ts
+++ b/public/app/app.ts
@@ -21,7 +21,7 @@ import _ from 'lodash';
import moment from 'moment';
// add move to lodash for backward compatabiltiy
-_.move = function(array, fromIndex, toIndex) {
+_.move = (array, fromIndex, toIndex) => {
array.splice(toIndex, 0, array.splice(fromIndex, 1)[0]);
return array;
};
@@ -76,9 +76,9 @@ export class GrafanaApp {
$provide.decorator('$http', [
'$delegate',
'$templateCache',
- function($delegate, $templateCache) {
+ ($delegate, $templateCache) => {
const get = $delegate.get;
- $delegate.get = function(url, config) {
+ $delegate.get = (url, config) => {
if (url.match(/\.html$/)) {
// some template's already exist in the cache
if (!$templateCache.get(url)) {
@@ -105,9 +105,9 @@ export class GrafanaApp {
'react',
];
- const module_types = ['controllers', 'directives', 'factories', 'services', 'filters', 'routes'];
+ const moduleTypes = ['controllers', 'directives', 'factories', 'services', 'filters', 'routes'];
- _.each(module_types, type => {
+ _.each(moduleTypes, type => {
const moduleName = 'grafana.' + type;
this.useModule(angular.module(moduleName, []));
});
@@ -135,7 +135,7 @@ export class GrafanaApp {
this.preBootModules = null;
});
})
- .catch(function(err) {
+ .catch(err => {
console.log('Application boot failed:', err);
});
}
diff --git a/public/app/containers/Explore/Explore.tsx b/public/app/containers/Explore/Explore.tsx
index 92712709858..16175747a06 100644
--- a/public/app/containers/Explore/Explore.tsx
+++ b/public/app/containers/Explore/Explore.tsx
@@ -173,6 +173,12 @@ export class Explore extends React.Component {
datasource.init();
}
+ // Keep queries but reset edit state
+ const nextQueries = this.state.queries.map(q => ({
+ ...q,
+ edited: false,
+ }));
+
this.setState(
{
datasource,
@@ -182,6 +188,7 @@ export class Explore extends React.Component {
supportsLogs,
supportsTable,
datasourceLoading: false,
+ queries: nextQueries,
},
() => datasourceError === null && this.onSubmit()
);
diff --git a/public/app/containers/Explore/TimePicker.tsx b/public/app/containers/Explore/TimePicker.tsx
index 3ae4ea4a83c..08867f8d0fc 100644
--- a/public/app/containers/Explore/TimePicker.tsx
+++ b/public/app/containers/Explore/TimePicker.tsx
@@ -16,7 +16,7 @@ export function parseTime(value, isUtc = false, asString = false) {
return value;
}
if (!isNaN(value)) {
- const epoch = parseInt(value);
+ const epoch = parseInt(value, 10);
const m = isUtc ? moment.utc(epoch) : moment(epoch);
return asString ? m.format(DATE_FORMAT) : m;
}
diff --git a/public/app/containers/Explore/utils/debounce.ts b/public/app/containers/Explore/utils/debounce.ts
index 9f2bd35e116..a7c9450a6c1 100644
--- a/public/app/containers/Explore/utils/debounce.ts
+++ b/public/app/containers/Explore/utils/debounce.ts
@@ -1,10 +1,10 @@
// Based on underscore.js debounce()
export default function debounce(func, wait) {
let timeout;
- return function() {
+ return function(this: any) {
const context = this;
const args = arguments;
- const later = function() {
+ const later = () => {
timeout = null;
func.apply(context, args);
};
diff --git a/public/app/containers/Explore/utils/dom.ts b/public/app/containers/Explore/utils/dom.ts
index 6ba21b54c83..381c150e3f4 100644
--- a/public/app/containers/Explore/utils/dom.ts
+++ b/public/app/containers/Explore/utils/dom.ts
@@ -1,6 +1,6 @@
// Node.closest() polyfill
if ('Element' in window && !Element.prototype.closest) {
- Element.prototype.closest = function(s) {
+ Element.prototype.closest = function(this: any, s) {
const matches = (this.document || this.ownerDocument).querySelectorAll(s);
let el = this;
let i;
@@ -9,7 +9,8 @@ if ('Element' in window && !Element.prototype.closest) {
i = matches.length;
// eslint-disable-next-line
while (--i >= 0 && matches.item(i) !== el) {}
- } while (i < 0 && (el = el.parentElement));
+ el = el.parentElement;
+ } while (i < 0 && el);
return el;
};
}
diff --git a/public/app/containers/Teams/TeamList.tsx b/public/app/containers/Teams/TeamList.tsx
index d0feee75184..2a5743bea96 100644
--- a/public/app/containers/Teams/TeamList.tsx
+++ b/public/app/containers/Teams/TeamList.tsx
@@ -6,6 +6,7 @@ import { NavStore } from 'app/stores/NavStore/NavStore';
import { TeamsStore, Team } from 'app/stores/TeamsStore/TeamsStore';
import { BackendSrv } from 'app/core/services/backend_srv';
import DeleteButton from 'app/core/components/DeleteButton/DeleteButton';
+import EmptyListCTA from 'app/core/components/EmptyListCTA/EmptyListCTA';
interface Props {
nav: typeof NavStore.Type;
@@ -61,48 +62,81 @@ export class TeamList extends React.Component {
);
}
+ renderTeamList(teams) {
+ return (
+
+ Send reminder every
+
+ Specify how often reminders should be sent, e.g. every 30s, 1m, 10m, 30m or 1h etc.
+
+
+
+
+
+
+
+ Alert reminders are sent after rules are evaluated. Therefore a reminder can never be sent more frequently than a configured alert rule evaluation interval.
+
+
diff --git a/public/app/features/alerting/threshold_mapper.ts b/public/app/features/alerting/threshold_mapper.ts
index cbc7dbf6b43..995add42ef0 100644
--- a/public/app/features/alerting/threshold_mapper.ts
+++ b/public/app/features/alerting/threshold_mapper.ts
@@ -1,6 +1,6 @@
export class ThresholdMapper {
static alertToGraphThresholds(panel) {
- for (var i = 0; i < panel.alert.conditions.length; i++) {
+ for (let i = 0; i < panel.alert.conditions.length; i++) {
const condition = panel.alert.conditions[i];
if (condition.type !== 'query') {
continue;
diff --git a/public/app/features/annotations/annotation_tooltip.ts b/public/app/features/annotations/annotation_tooltip.ts
index cadbac1bd89..7e626bc5860 100644
--- a/public/app/features/annotations/annotation_tooltip.ts
+++ b/public/app/features/annotations/annotation_tooltip.ts
@@ -3,7 +3,7 @@ import $ from 'jquery';
import coreModule from 'app/core/core_module';
import alertDef from '../alerting/alert_def';
-/** @ngInject **/
+/** @ngInject */
export function annotationTooltipDirective($sanitize, dashboardSrv, contextSrv, $compile) {
function sanitizeString(str) {
try {
@@ -20,14 +20,14 @@ export function annotationTooltipDirective($sanitize, dashboardSrv, contextSrv,
event: '=',
onEdit: '&',
},
- link: function(scope, element) {
+ link: (scope, element) => {
const event = scope.event;
- var title = event.title;
- var text = event.text;
+ let title = event.title;
+ let text = event.text;
const dashboard = dashboardSrv.getCurrent();
- var tooltip = '
';
- var titleStateClass = '';
+ let tooltip = '
';
+ let titleStateClass = '';
if (event.alertId) {
const stateModel = alertDef.getStateDisplayModel(event.newState);
@@ -42,7 +42,7 @@ export function annotationTooltipDirective($sanitize, dashboardSrv, contextSrv,
title = '';
}
- var header = `
-
diff --git a/public/app/features/playlist/playlist_edit_ctrl.ts b/public/app/features/playlist/playlist_edit_ctrl.ts
index 037fa9e6f6b..16da9a0a209 100644
--- a/public/app/features/playlist/playlist_edit_ctrl.ts
+++ b/public/app/features/playlist/playlist_edit_ctrl.ts
@@ -19,36 +19,25 @@ export class PlaylistEditCtrl {
/** @ngInject */
constructor(private $scope, private backendSrv, private $location, $route, navModelSrv) {
this.navModel = navModelSrv.getNav('dashboards', 'playlists', 0);
- this.isNew = $route.current.params.id;
+ this.isNew = !$route.current.params.id;
if ($route.current.params.id) {
const playlistId = $route.current.params.id;
backendSrv.get('/api/playlists/' + playlistId).then(result => {
this.playlist = result;
- this.navModel.node = {
- text: result.name,
- icon: this.navModel.node.icon,
- };
- this.navModel.breadcrumbs.push(this.navModel.node);
});
backendSrv.get('/api/playlists/' + playlistId + '/items').then(result => {
this.playlistItems = result;
});
- } else {
- this.navModel.node = {
- text: 'New playlist',
- icon: this.navModel.node.icon,
- };
- this.navModel.breadcrumbs.push(this.navModel.node);
}
}
filterFoundPlaylistItems() {
this.filteredDashboards = _.reject(this.dashboardresult, playlistItem => {
return _.find(this.playlistItems, listPlaylistItem => {
- return parseInt(listPlaylistItem.value) === playlistItem.id;
+ return parseInt(listPlaylistItem.value, 10) === playlistItem.id;
});
});
@@ -88,7 +77,7 @@ export class PlaylistEditCtrl {
}
savePlaylist(playlist, playlistItems) {
- var savePromise;
+ let savePromise;
playlist.items = playlistItems;
diff --git a/public/app/features/playlist/playlist_routes.ts b/public/app/features/playlist/playlist_routes.ts
index 3cb9aceaefb..6e907ea0858 100644
--- a/public/app/features/playlist/playlist_routes.ts
+++ b/public/app/features/playlist/playlist_routes.ts
@@ -19,11 +19,9 @@ function grafanaRoutes($routeProvider) {
controller: 'PlaylistEditCtrl',
})
.when('/playlists/play/:id', {
- templateUrl: 'public/app/features/playlist/partials/playlists.html',
- controllerAs: 'ctrl',
- controller: 'PlaylistsCtrl',
+ template: '',
resolve: {
- init: function(playlistSrv, $route) {
+ init: (playlistSrv, $route) => {
const playlistId = $route.current.params.id;
playlistSrv.start(playlistId);
},
diff --git a/public/app/features/playlist/playlist_search.ts b/public/app/features/playlist/playlist_search.ts
index ee9a7ab6121..790cd393180 100644
--- a/public/app/features/playlist/playlist_search.ts
+++ b/public/app/features/playlist/playlist_search.ts
@@ -8,7 +8,7 @@ export class PlaylistSearchCtrl {
/** @ngInject */
constructor($timeout, private backendSrv) {
- this.query = { query: '', tag: [], starred: false, limit: 30 };
+ this.query = { query: '', tag: [], starred: false, limit: 20 };
$timeout(() => {
this.query.query = '';
diff --git a/public/app/features/playlist/playlist_srv.ts b/public/app/features/playlist/playlist_srv.ts
index e95f2d59884..9d3b635a1e5 100644
--- a/public/app/features/playlist/playlist_srv.ts
+++ b/public/app/features/playlist/playlist_srv.ts
@@ -1,6 +1,8 @@
import coreModule from '../../core/core_module';
import kbn from 'app/core/utils/kbn';
import appEvents from 'app/core/app_events';
+import _ from 'lodash';
+import { toUrlParams } from 'app/core/utils/url';
class PlaylistSrv {
private cancelPromise: any;
@@ -8,45 +10,30 @@ class PlaylistSrv {
private index: number;
private interval: any;
private startUrl: string;
- public isPlaying: boolean;
+ isPlaying: boolean;
/** @ngInject */
- constructor(private $location: any, private $timeout: any, private backendSrv: any, private $routeParams: any) {}
+ constructor(private $location: any, private $timeout: any, private backendSrv: any) {}
next() {
this.$timeout.cancel(this.cancelPromise);
const playedAllDashboards = this.index > this.dashboards.length - 1;
-
if (playedAllDashboards) {
- window.location.href = this.getUrlWithKioskMode();
+ window.location.href = this.startUrl;
return;
}
const dash = this.dashboards[this.index];
- this.$location.url('dashboard/' + dash.uri);
+ const queryParams = this.$location.search();
+ const filteredParams = _.pickBy(queryParams, value => value !== null);
+
+ this.$location.url('dashboard/' + dash.uri + '?' + toUrlParams(filteredParams));
this.index++;
this.cancelPromise = this.$timeout(() => this.next(), this.interval);
}
- getUrlWithKioskMode() {
- const inKioskMode = document.body.classList.contains('page-kiosk-mode');
-
- // check if should add kiosk query param
- if (inKioskMode && this.startUrl.indexOf('kiosk') === -1) {
- return this.startUrl + '?kiosk=true';
- }
-
- // check if should remove kiosk query param
- if (!inKioskMode) {
- return this.startUrl.split('?')[0];
- }
-
- // already has kiosk query param, just return startUrl
- return this.startUrl;
- }
-
prev() {
this.index = Math.max(this.index - 2, 0);
this.next();
@@ -59,10 +46,6 @@ class PlaylistSrv {
this.index = 0;
this.isPlaying = true;
- if (this.$routeParams.kiosk) {
- appEvents.emit('toggle-kiosk-mode');
- }
-
this.backendSrv.get(`/api/playlists/${playlistId}`).then(playlist => {
this.backendSrv.get(`/api/playlists/${playlistId}/dashboards`).then(dashboards => {
this.dashboards = dashboards;
@@ -73,6 +56,13 @@ class PlaylistSrv {
}
stop() {
+ if (this.isPlaying) {
+ const queryParams = this.$location.search();
+ if (queryParams.kiosk) {
+ appEvents.emit('toggle-kiosk-mode', { exit: true });
+ }
+ }
+
this.index = 0;
this.isPlaying = false;
diff --git a/public/app/features/playlist/playlists_ctrl.ts b/public/app/features/playlist/playlists_ctrl.ts
index bc7e2bb1440..b481ac4ee7f 100644
--- a/public/app/features/playlist/playlists_ctrl.ts
+++ b/public/app/features/playlist/playlists_ctrl.ts
@@ -10,7 +10,10 @@ export class PlaylistsCtrl {
this.navModel = navModelSrv.getNav('dashboards', 'playlists', 0);
backendSrv.get('/api/playlists').then(result => {
- this.playlists = result;
+ this.playlists = result.map(item => {
+ item.startUrl = `playlists/play/${item.id}`;
+ return item;
+ });
});
}
diff --git a/public/app/features/playlist/specs/playlist_edit_ctrl.test.ts b/public/app/features/playlist/specs/playlist_edit_ctrl.test.ts
index 183947f5072..bc2e0229c6e 100644
--- a/public/app/features/playlist/specs/playlist_edit_ctrl.test.ts
+++ b/public/app/features/playlist/specs/playlist_edit_ctrl.test.ts
@@ -2,7 +2,7 @@ import '../playlist_edit_ctrl';
import { PlaylistEditCtrl } from '../playlist_edit_ctrl';
describe('PlaylistEditCtrl', () => {
- var ctx: any;
+ let ctx: any;
beforeEach(() => {
const navModelSrv = {
getNav: () => {
diff --git a/public/app/features/plugins/datasource_srv.ts b/public/app/features/plugins/datasource_srv.ts
index 2b8b9476b94..7ef82519668 100644
--- a/public/app/features/plugins/datasource_srv.ts
+++ b/public/app/features/plugins/datasource_srv.ts
@@ -77,7 +77,7 @@ export class DatasourceSrv {
this.addDataSourceVariables(sources);
- _.each(config.datasources, function(value) {
+ _.each(config.datasources, value => {
if (value.meta && value.meta.annotations) {
sources.push(value);
}
@@ -97,7 +97,7 @@ export class DatasourceSrv {
getMetricSources(options) {
const metricSources = [];
- _.each(config.datasources, function(value, key) {
+ _.each(config.datasources, (value, key) => {
if (value.meta && value.meta.metrics) {
let metricSource = { value: key, name: key, meta: value.meta, sort: key };
@@ -121,7 +121,7 @@ export class DatasourceSrv {
this.addDataSourceVariables(metricSources);
}
- metricSources.sort(function(a, b) {
+ metricSources.sort((a, b) => {
if (a.sort.toLowerCase() > b.sort.toLowerCase()) {
return 1;
}
@@ -136,13 +136,13 @@ export class DatasourceSrv {
addDataSourceVariables(list) {
// look for data source variables
- for (var i = 0; i < this.templateSrv.variables.length; i++) {
+ for (let i = 0; i < this.templateSrv.variables.length; i++) {
const variable = this.templateSrv.variables[i];
if (variable.type !== 'datasource') {
continue;
}
- var first = variable.current.value;
+ let first = variable.current.value;
if (first === 'default') {
first = config.defaultDatasource;
}
diff --git a/public/app/features/plugins/ds_edit_ctrl.ts b/public/app/features/plugins/ds_edit_ctrl.ts
index 280622cb29a..19889d3e26e 100644
--- a/public/app/features/plugins/ds_edit_ctrl.ts
+++ b/public/app/features/plugins/ds_edit_ctrl.ts
@@ -4,7 +4,7 @@ import config from 'app/core/config';
import { coreModule, appEvents } from 'app/core/core';
import { store } from 'app/stores/store';
-var datasourceTypes = [];
+let datasourceTypes = [];
const defaults = {
name: '',
@@ -16,7 +16,7 @@ const defaults = {
secureJsonData: {},
};
-var datasourceCreated = false;
+let datasourceCreated = false;
export class DataSourceEditCtrl {
isNew: boolean;
@@ -200,7 +200,7 @@ export class DataSourceEditCtrl {
coreModule.controller('DataSourceEditCtrl', DataSourceEditCtrl);
-coreModule.directive('datasourceHttpSettings', function() {
+coreModule.directive('datasourceHttpSettings', () => {
return {
scope: {
current: '=',
@@ -209,15 +209,15 @@ coreModule.directive('datasourceHttpSettings', function() {
},
templateUrl: 'public/app/features/plugins/partials/ds_http_settings.html',
link: {
- pre: function($scope, elem, attrs) {
+ pre: ($scope, elem, attrs) => {
// do not show access option if direct access is disabled
$scope.showAccessOption = $scope.noDirectAccess !== 'true';
$scope.showAccessHelp = false;
- $scope.toggleAccessHelp = function() {
+ $scope.toggleAccessHelp = () => {
$scope.showAccessHelp = !$scope.showAccessHelp;
};
- $scope.getSuggestUrls = function() {
+ $scope.getSuggestUrls = () => {
return [$scope.suggestUrl];
};
},
diff --git a/public/app/features/plugins/plugin_component.ts b/public/app/features/plugins/plugin_component.ts
index fe895693032..41d1b6f1deb 100644
--- a/public/app/features/plugins/plugin_component.ts
+++ b/public/app/features/plugins/plugin_component.ts
@@ -7,7 +7,7 @@ import { importPluginModule } from './plugin_loader';
import { UnknownPanelCtrl } from 'app/plugins/panel/unknown/module';
-/** @ngInject **/
+/** @ngInject */
function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $templateCache) {
function getTemplate(component) {
if (component.template) {
@@ -36,7 +36,7 @@ function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $
// handle relative template urls for plugin templates
options.Component.templateUrl = relativeTemplateUrlToAbs(options.Component.templateUrl, options.baseUrl);
- return function() {
+ return () => {
return {
templateUrl: options.Component.templateUrl,
template: options.Component.template,
@@ -69,14 +69,14 @@ function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $
};
const panelInfo = config.panels[scope.panel.type];
- var panelCtrlPromise = Promise.resolve(UnknownPanelCtrl);
+ let panelCtrlPromise = Promise.resolve(UnknownPanelCtrl);
if (panelInfo) {
- panelCtrlPromise = importPluginModule(panelInfo.module).then(function(panelModule) {
+ panelCtrlPromise = importPluginModule(panelInfo.module).then(panelModule => {
return panelModule.PanelCtrl;
});
}
- return panelCtrlPromise.then(function(PanelCtrl: any) {
+ return panelCtrlPromise.then((PanelCtrl: any) => {
componentInfo.Component = PanelCtrl;
if (!PanelCtrl || PanelCtrl.registered) {
@@ -128,7 +128,7 @@ function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $
}
// Annotations
case 'annotations-query-ctrl': {
- return importPluginModule(scope.ctrl.currentDatasource.meta.module).then(function(dsModule) {
+ return importPluginModule(scope.ctrl.currentDatasource.meta.module).then(dsModule => {
return {
baseUrl: scope.ctrl.currentDatasource.meta.baseUrl,
name: 'annotations-query-ctrl-' + scope.ctrl.currentDatasource.meta.id,
@@ -144,7 +144,7 @@ function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $
// Datasource ConfigCtrl
case 'datasource-config-ctrl': {
const dsMeta = scope.ctrl.datasourceMeta;
- return importPluginModule(dsMeta.module).then(function(dsModule): any {
+ return importPluginModule(dsMeta.module).then((dsModule): any => {
if (!dsModule.ConfigCtrl) {
return { notFound: true };
}
@@ -161,7 +161,7 @@ function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $
// AppConfigCtrl
case 'app-config-ctrl': {
const model = scope.ctrl.model;
- return importPluginModule(model.module).then(function(appModule) {
+ return importPluginModule(model.module).then(appModule => {
return {
baseUrl: model.baseUrl,
name: 'app-config-' + model.id,
@@ -174,7 +174,7 @@ function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $
// App Page
case 'app-page': {
const appModel = scope.ctrl.appModel;
- return importPluginModule(appModel.module).then(function(appModule) {
+ return importPluginModule(appModel.module).then(appModule => {
return {
baseUrl: appModel.baseUrl,
name: 'app-page-' + appModel.id + '-' + scope.ctrl.page.slug,
@@ -206,9 +206,9 @@ function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $
elem.empty();
// let a binding digest cycle complete before adding to dom
- setTimeout(function() {
+ setTimeout(() => {
elem.append(child);
- scope.$applyAsync(function() {
+ scope.$applyAsync(() => {
scope.$broadcast('component-did-mount');
scope.$broadcast('refresh');
});
@@ -239,9 +239,9 @@ function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $
return {
restrict: 'E',
- link: function(scope, elem, attrs) {
+ link: (scope, elem, attrs) => {
getModule(scope, attrs)
- .then(function(componentInfo) {
+ .then(componentInfo => {
registerPluginComponent(scope, elem, attrs, componentInfo);
})
.catch(err => {
diff --git a/public/app/features/plugins/plugin_loader.ts b/public/app/features/plugins/plugin_loader.ts
index e227dbb910c..bc3c719917c 100644
--- a/public/app/features/plugins/plugin_loader.ts
+++ b/public/app/features/plugins/plugin_loader.ts
@@ -65,7 +65,7 @@ System.config({
});
function exposeToPlugin(name: string, component: any) {
- System.registerDynamic(name, [], true, function(require, exports, module) {
+ System.registerDynamic(name, [], true, (require, exports, module) => {
module.exports = component;
});
}
diff --git a/public/app/features/plugins/specs/datasource_srv.test.ts b/public/app/features/plugins/specs/datasource_srv.test.ts
index 653e431cb9f..a8d0807c765 100644
--- a/public/app/features/plugins/specs/datasource_srv.test.ts
+++ b/public/app/features/plugins/specs/datasource_srv.test.ts
@@ -15,7 +15,7 @@ const templateSrv = {
],
};
-describe('datasource_srv', function() {
+describe('datasource_srv', () => {
const _datasourceSrv = new DatasourceSrv({}, {}, {}, templateSrv);
describe('when loading explore sources', () => {
diff --git a/public/app/features/styleguide/styleguide.ts b/public/app/features/styleguide/styleguide.ts
index 26f14bf9388..4aac194d950 100644
--- a/public/app/features/styleguide/styleguide.ts
+++ b/public/app/features/styleguide/styleguide.ts
@@ -8,7 +8,7 @@ class StyleGuideCtrl {
buttonVariants = ['-'];
navModel: any;
- /** @ngInject **/
+ /** @ngInject */
constructor(private $routeParams, private backendSrv, navModelSrv) {
this.navModel = navModelSrv.getNav('cfg', 'admin', 'styleguide', 1);
this.theme = config.bootData.user.lightTheme ? 'light' : 'dark';
diff --git a/public/app/features/templating/adhoc_variable.ts b/public/app/features/templating/adhoc_variable.ts
index 3e5b2af8b6b..bc157bfd697 100644
--- a/public/app/features/templating/adhoc_variable.ts
+++ b/public/app/features/templating/adhoc_variable.ts
@@ -15,7 +15,7 @@ export class AdhocVariable implements Variable {
skipUrlSync: false,
};
- /** @ngInject **/
+ /** @ngInject */
constructor(private model) {
assignModelProperties(this, model, this.defaults);
}
diff --git a/public/app/features/templating/all.ts b/public/app/features/templating/all.ts
index c970b73fe59..16465740642 100644
--- a/public/app/features/templating/all.ts
+++ b/public/app/features/templating/all.ts
@@ -10,7 +10,7 @@ import { CustomVariable } from './custom_variable';
import { ConstantVariable } from './constant_variable';
import { AdhocVariable } from './adhoc_variable';
-coreModule.factory('templateSrv', function() {
+coreModule.factory('templateSrv', () => {
return templateSrv;
});
diff --git a/public/app/features/templating/constant_variable.ts b/public/app/features/templating/constant_variable.ts
index e727c6e98af..dcda31f43a3 100644
--- a/public/app/features/templating/constant_variable.ts
+++ b/public/app/features/templating/constant_variable.ts
@@ -17,7 +17,7 @@ export class ConstantVariable implements Variable {
skipUrlSync: false,
};
- /** @ngInject **/
+ /** @ngInject */
constructor(private model, private variableSrv) {
assignModelProperties(this, model, this.defaults);
}
diff --git a/public/app/features/templating/custom_variable.ts b/public/app/features/templating/custom_variable.ts
index 4490a41a38f..bc946458705 100644
--- a/public/app/features/templating/custom_variable.ts
+++ b/public/app/features/templating/custom_variable.ts
@@ -23,7 +23,7 @@ export class CustomVariable implements Variable {
skipUrlSync: false,
};
- /** @ngInject **/
+ /** @ngInject */
constructor(private model, private variableSrv) {
assignModelProperties(this, model, this.defaults);
}
@@ -39,7 +39,7 @@ export class CustomVariable implements Variable {
updateOptions() {
// extract options in comma separated string
- this.options = _.map(this.query.split(/[,]+/), function(text) {
+ this.options = _.map(this.query.split(/[,]+/), text => {
return { text: text.trim(), value: text.trim() };
});
diff --git a/public/app/features/templating/datasource_variable.ts b/public/app/features/templating/datasource_variable.ts
index 366d2f89b68..4424720c7f8 100644
--- a/public/app/features/templating/datasource_variable.ts
+++ b/public/app/features/templating/datasource_variable.ts
@@ -22,7 +22,7 @@ export class DatasourceVariable implements Variable {
skipUrlSync: false,
};
- /** @ngInject **/
+ /** @ngInject */
constructor(private model, private datasourceSrv, private variableSrv, private templateSrv) {
assignModelProperties(this, model, this.defaults);
this.refresh = 1;
@@ -43,14 +43,14 @@ export class DatasourceVariable implements Variable {
updateOptions() {
const options = [];
const sources = this.datasourceSrv.getMetricSources({ skipVariables: true });
- var regex;
+ let regex;
if (this.regex) {
regex = this.templateSrv.replace(this.regex, null, 'regex');
regex = kbn.stringToJsRegex(regex);
}
- for (var i = 0; i < sources.length; i++) {
+ for (let i = 0; i < sources.length; i++) {
const source = sources[i];
// must match on type
if (source.meta.id !== this.query) {
diff --git a/public/app/features/templating/editor_ctrl.ts b/public/app/features/templating/editor_ctrl.ts
index 1222af7f93c..cef7c9cc912 100644
--- a/public/app/features/templating/editor_ctrl.ts
+++ b/public/app/features/templating/editor_ctrl.ts
@@ -4,7 +4,7 @@ import { variableTypes } from './variable';
import appEvents from 'app/core/app_events';
export class VariableEditorCtrl {
- /** @ngInject **/
+ /** @ngInject */
constructor($scope, datasourceSrv, variableSrv, templateSrv) {
$scope.variableTypes = variableTypes;
$scope.ctrl = {};
@@ -30,31 +30,31 @@ export class VariableEditorCtrl {
$scope.hideOptions = [{ value: 0, text: '' }, { value: 1, text: 'Label' }, { value: 2, text: 'Variable' }];
- $scope.init = function() {
+ $scope.init = () => {
$scope.mode = 'list';
$scope.variables = variableSrv.variables;
$scope.reset();
- $scope.$watch('mode', function(val) {
+ $scope.$watch('mode', val => {
if (val === 'new') {
$scope.reset();
}
});
};
- $scope.setMode = function(mode) {
+ $scope.setMode = mode => {
$scope.mode = mode;
};
- $scope.add = function() {
+ $scope.add = () => {
if ($scope.isValid()) {
variableSrv.addVariable($scope.current);
$scope.update();
}
};
- $scope.isValid = function() {
+ $scope.isValid = () => {
if (!$scope.ctrl.form.$valid) {
return false;
}
@@ -84,7 +84,7 @@ export class VariableEditorCtrl {
return true;
};
- $scope.validate = function() {
+ $scope.validate = () => {
$scope.infoText = '';
if ($scope.current.type === 'adhoc' && $scope.current.datasource !== null) {
$scope.infoText = 'Adhoc filters are applied automatically to all queries that target this datasource';
@@ -96,7 +96,7 @@ export class VariableEditorCtrl {
}
};
- $scope.runQuery = function() {
+ $scope.runQuery = () => {
$scope.optionsLimit = 20;
return variableSrv.updateOptions($scope.current).catch(err => {
if (err.data && err.data.message) {
@@ -106,23 +106,23 @@ export class VariableEditorCtrl {
});
};
- $scope.edit = function(variable) {
+ $scope.edit = variable => {
$scope.current = variable;
$scope.currentIsNew = false;
$scope.mode = 'edit';
$scope.validate();
};
- $scope.duplicate = function(variable) {
+ $scope.duplicate = variable => {
const clone = _.cloneDeep(variable.getSaveModel());
$scope.current = variableSrv.createVariableFromModel(clone);
$scope.current.name = 'copy_of_' + variable.name;
variableSrv.addVariable($scope.current);
};
- $scope.update = function() {
+ $scope.update = () => {
if ($scope.isValid()) {
- $scope.runQuery().then(function() {
+ $scope.runQuery().then(() => {
$scope.reset();
$scope.mode = 'list';
templateSrv.updateTemplateData();
@@ -130,18 +130,18 @@ export class VariableEditorCtrl {
}
};
- $scope.reset = function() {
+ $scope.reset = () => {
$scope.currentIsNew = true;
$scope.current = variableSrv.createVariableFromModel({ type: 'query' });
// this is done here in case a new data source type variable was added
- $scope.datasources = _.filter(datasourceSrv.getMetricSources(), function(ds) {
+ $scope.datasources = _.filter(datasourceSrv.getMetricSources(), ds => {
return !ds.meta.mixed && ds.value !== null;
});
$scope.datasourceTypes = _($scope.datasources)
.uniqBy('meta.id')
- .map(function(ds) {
+ .map(ds => {
return { text: ds.meta.name, value: ds.meta.id };
})
.value();
@@ -164,11 +164,11 @@ export class VariableEditorCtrl {
$scope.validate();
};
- $scope.removeVariable = function(variable) {
+ $scope.removeVariable = variable => {
variableSrv.removeVariable(variable);
};
- $scope.showMoreOptions = function() {
+ $scope.showMoreOptions = () => {
$scope.optionsLimit += 20;
};
}
diff --git a/public/app/features/templating/interval_variable.ts b/public/app/features/templating/interval_variable.ts
index e6ee861f828..57e5ae8eec3 100644
--- a/public/app/features/templating/interval_variable.ts
+++ b/public/app/features/templating/interval_variable.ts
@@ -4,8 +4,8 @@ import { Variable, assignModelProperties, variableTypes } from './variable';
export class IntervalVariable implements Variable {
name: string;
- auto_count: number;
- auto_min: number;
+ auto_count: number; // tslint:disable-line variable-name
+ auto_min: number; // tslint:disable-line variable-name
options: any;
auto: boolean;
query: string;
@@ -28,7 +28,7 @@ export class IntervalVariable implements Variable {
skipUrlSync: false,
};
- /** @ngInject **/
+ /** @ngInject */
constructor(private model, private timeSrv, private templateSrv, private variableSrv) {
assignModelProperties(this, model, this.defaults);
this.refresh = 2;
@@ -65,7 +65,7 @@ export class IntervalVariable implements Variable {
updateOptions() {
// extract options between quotes and/or comma
- this.options = _.map(this.query.match(/(["'])(.*?)\1|\w+/g), function(text) {
+ this.options = _.map(this.query.match(/(["'])(.*?)\1|\w+/g), text => {
text = text.replace(/["']+/g, '');
return { text: text.trim(), value: text.trim() };
});
diff --git a/public/app/features/templating/query_variable.ts b/public/app/features/templating/query_variable.ts
index fc51b44ca04..d3f39023cfb 100644
--- a/public/app/features/templating/query_variable.ts
+++ b/public/app/features/templating/query_variable.ts
@@ -46,7 +46,7 @@ export class QueryVariable implements Variable {
skipUrlSync: false,
};
- /** @ngInject **/
+ /** @ngInject */
constructor(private model, private datasourceSrv, private templateSrv, private variableSrv, private timeSrv) {
// copy model properties to this instance
assignModelProperties(this, model, this.defaults);
@@ -91,7 +91,7 @@ export class QueryVariable implements Variable {
if (this.useTags) {
return this.metricFindQuery(datasource, this.tagsQuery).then(results => {
this.tags = [];
- for (var i = 0; i < results.length; i++) {
+ for (let i = 0; i < results.length; i++) {
this.tags.push(results[i].text);
}
return datasource;
@@ -106,8 +106,8 @@ export class QueryVariable implements Variable {
getValuesForTag(tagKey) {
return this.datasourceSrv.get(this.datasource).then(datasource => {
const query = this.tagValuesQuery.replace('$tag', tagKey);
- return this.metricFindQuery(datasource, query).then(function(results) {
- return _.map(results, function(value) {
+ return this.metricFindQuery(datasource, query).then(results => {
+ return _.map(results, value => {
return value.text;
});
});
@@ -142,7 +142,7 @@ export class QueryVariable implements Variable {
}
metricNamesToVariableValues(metricNames) {
- var regex, options, i, matches;
+ let regex, options, i, matches;
options = [];
if (this.regex) {
@@ -150,9 +150,9 @@ export class QueryVariable implements Variable {
}
for (i = 0; i < metricNames.length; i++) {
const item = metricNames[i];
- var text = item.text === undefined || item.text === null ? item.value : item.text;
+ let text = item.text === undefined || item.text === null ? item.value : item.text;
- var value = item.value === undefined || item.value === null ? item.text : item.value;
+ let value = item.value === undefined || item.value === null ? item.text : item.value;
if (_.isNumber(value)) {
value = value.toString();
diff --git a/public/app/features/templating/specs/adhoc_variable.test.ts b/public/app/features/templating/specs/adhoc_variable.test.ts
index f85c49e73d5..6d15ce8362c 100644
--- a/public/app/features/templating/specs/adhoc_variable.test.ts
+++ b/public/app/features/templating/specs/adhoc_variable.test.ts
@@ -1,8 +1,8 @@
import { AdhocVariable } from '../adhoc_variable';
-describe('AdhocVariable', function() {
- describe('when serializing to url', function() {
- it('should set return key value and op separated by pipe', function() {
+describe('AdhocVariable', () => {
+ describe('when serializing to url', () => {
+ it('should set return key value and op separated by pipe', () => {
const variable = new AdhocVariable({
filters: [
{ key: 'key1', operator: '=', value: 'value1' },
@@ -15,8 +15,8 @@ describe('AdhocVariable', function() {
});
});
- describe('when deserializing from url', function() {
- it('should restore filters', function() {
+ describe('when deserializing from url', () => {
+ it('should restore filters', () => {
const variable = new AdhocVariable({});
variable.setValueFromUrl(['key1|=|value1', 'key2|!=|value2', 'key3|=|value3a__gfp__value3b__gfp__value3c']);
diff --git a/public/app/features/templating/specs/template_srv.test.ts b/public/app/features/templating/specs/template_srv.test.ts
index 984d62cb729..7f5ff959216 100644
--- a/public/app/features/templating/specs/template_srv.test.ts
+++ b/public/app/features/templating/specs/template_srv.test.ts
@@ -1,6 +1,6 @@
import { TemplateSrv } from '../template_srv';
-describe('templateSrv', function() {
+describe('templateSrv', () => {
let _templateSrv;
function initTemplateSrv(variables) {
@@ -8,58 +8,58 @@ describe('templateSrv', function() {
_templateSrv.init(variables);
}
- describe('init', function() {
- beforeEach(function() {
+ describe('init', () => {
+ beforeEach(() => {
initTemplateSrv([{ type: 'query', name: 'test', current: { value: 'oogle' } }]);
});
- it('should initialize template data', function() {
+ it('should initialize template data', () => {
const target = _templateSrv.replace('this.[[test]].filters');
expect(target).toBe('this.oogle.filters');
});
});
- describe('replace can pass scoped vars', function() {
- beforeEach(function() {
+ describe('replace can pass scoped vars', () => {
+ beforeEach(() => {
initTemplateSrv([{ type: 'query', name: 'test', current: { value: 'oogle' } }]);
});
- it('should replace $test with scoped value', function() {
+ it('should replace $test with scoped value', () => {
const target = _templateSrv.replace('this.$test.filters', {
test: { value: 'mupp', text: 'asd' },
});
expect(target).toBe('this.mupp.filters');
});
- it('should replace ${test} with scoped value', function() {
+ it('should replace ${test} with scoped value', () => {
const target = _templateSrv.replace('this.${test}.filters', {
test: { value: 'mupp', text: 'asd' },
});
expect(target).toBe('this.mupp.filters');
});
- it('should replace ${test:glob} with scoped value', function() {
+ it('should replace ${test:glob} with scoped value', () => {
const target = _templateSrv.replace('this.${test:glob}.filters', {
test: { value: 'mupp', text: 'asd' },
});
expect(target).toBe('this.mupp.filters');
});
- it('should replace $test with scoped text', function() {
+ it('should replace $test with scoped text', () => {
const target = _templateSrv.replaceWithText('this.$test.filters', {
test: { value: 'mupp', text: 'asd' },
});
expect(target).toBe('this.asd.filters');
});
- it('should replace ${test} with scoped text', function() {
+ it('should replace ${test} with scoped text', () => {
const target = _templateSrv.replaceWithText('this.${test}.filters', {
test: { value: 'mupp', text: 'asd' },
});
expect(target).toBe('this.asd.filters');
});
- it('should replace ${test:glob} with scoped text', function() {
+ it('should replace ${test:glob} with scoped text', () => {
const target = _templateSrv.replaceWithText('this.${test:glob}.filters', {
test: { value: 'mupp', text: 'asd' },
});
@@ -67,8 +67,8 @@ describe('templateSrv', function() {
});
});
- describe('getAdhocFilters', function() {
- beforeEach(function() {
+ describe('getAdhocFilters', () => {
+ beforeEach(() => {
initTemplateSrv([
{
type: 'datasource',
@@ -80,24 +80,24 @@ describe('templateSrv', function() {
]);
});
- it('should return filters if datasourceName match', function() {
+ it('should return filters if datasourceName match', () => {
const filters = _templateSrv.getAdhocFilters('oogle');
expect(filters).toMatchObject([1]);
});
- it('should return empty array if datasourceName does not match', function() {
+ it('should return empty array if datasourceName does not match', () => {
const filters = _templateSrv.getAdhocFilters('oogleasdasd');
expect(filters).toMatchObject([]);
});
- it('should return filters when datasourceName match via data source variable', function() {
+ it('should return filters when datasourceName match via data source variable', () => {
const filters = _templateSrv.getAdhocFilters('logstash');
expect(filters).toMatchObject([2]);
});
});
- describe('replace can pass multi / all format', function() {
- beforeEach(function() {
+ describe('replace can pass multi / all format', () => {
+ beforeEach(() => {
initTemplateSrv([
{
type: 'query',
@@ -107,44 +107,44 @@ describe('templateSrv', function() {
]);
});
- it('should replace $test with globbed value', function() {
+ it('should replace $test with globbed value', () => {
const target = _templateSrv.replace('this.$test.filters', {}, 'glob');
expect(target).toBe('this.{value1,value2}.filters');
});
- it('should replace ${test} with globbed value', function() {
+ it('should replace ${test} with globbed value', () => {
const target = _templateSrv.replace('this.${test}.filters', {}, 'glob');
expect(target).toBe('this.{value1,value2}.filters');
});
- it('should replace ${test:glob} with globbed value', function() {
+ it('should replace ${test:glob} with globbed value', () => {
const target = _templateSrv.replace('this.${test:glob}.filters', {});
expect(target).toBe('this.{value1,value2}.filters');
});
- it('should replace $test with piped value', function() {
+ it('should replace $test with piped value', () => {
const target = _templateSrv.replace('this=$test', {}, 'pipe');
expect(target).toBe('this=value1|value2');
});
- it('should replace ${test} with piped value', function() {
+ it('should replace ${test} with piped value', () => {
const target = _templateSrv.replace('this=${test}', {}, 'pipe');
expect(target).toBe('this=value1|value2');
});
- it('should replace ${test:pipe} with piped value', function() {
+ it('should replace ${test:pipe} with piped value', () => {
const target = _templateSrv.replace('this=${test:pipe}', {});
expect(target).toBe('this=value1|value2');
});
- it('should replace ${test:pipe} with piped value and $test with globbed value', function() {
+ it('should replace ${test:pipe} with piped value and $test with globbed value', () => {
const target = _templateSrv.replace('${test:pipe},$test', {}, 'glob');
expect(target).toBe('value1|value2,{value1,value2}');
});
});
- describe('variable with all option', function() {
- beforeEach(function() {
+ describe('variable with all option', () => {
+ beforeEach(() => {
initTemplateSrv([
{
type: 'query',
@@ -155,29 +155,29 @@ describe('templateSrv', function() {
]);
});
- it('should replace $test with formatted all value', function() {
+ it('should replace $test with formatted all value', () => {
const target = _templateSrv.replace('this.$test.filters', {}, 'glob');
expect(target).toBe('this.{value1,value2}.filters');
});
- it('should replace ${test} with formatted all value', function() {
+ it('should replace ${test} with formatted all value', () => {
const target = _templateSrv.replace('this.${test}.filters', {}, 'glob');
expect(target).toBe('this.{value1,value2}.filters');
});
- it('should replace ${test:glob} with formatted all value', function() {
+ it('should replace ${test:glob} with formatted all value', () => {
const target = _templateSrv.replace('this.${test:glob}.filters', {});
expect(target).toBe('this.{value1,value2}.filters');
});
- it('should replace ${test:pipe} with piped value and $test with globbed value', function() {
+ it('should replace ${test:pipe} with piped value and $test with globbed value', () => {
const target = _templateSrv.replace('${test:pipe},$test', {}, 'glob');
expect(target).toBe('value1|value2,{value1,value2}');
});
});
- describe('variable with all option and custom value', function() {
- beforeEach(function() {
+ describe('variable with all option and custom value', () => {
+ beforeEach(() => {
initTemplateSrv([
{
type: 'query',
@@ -189,143 +189,143 @@ describe('templateSrv', function() {
]);
});
- it('should replace $test with formatted all value', function() {
+ it('should replace $test with formatted all value', () => {
const target = _templateSrv.replace('this.$test.filters', {}, 'glob');
expect(target).toBe('this.*.filters');
});
- it('should replace ${test} with formatted all value', function() {
+ it('should replace ${test} with formatted all value', () => {
const target = _templateSrv.replace('this.${test}.filters', {}, 'glob');
expect(target).toBe('this.*.filters');
});
- it('should replace ${test:glob} with formatted all value', function() {
+ it('should replace ${test:glob} with formatted all value', () => {
const target = _templateSrv.replace('this.${test:glob}.filters', {});
expect(target).toBe('this.*.filters');
});
- it('should not escape custom all value', function() {
+ it('should not escape custom all value', () => {
const target = _templateSrv.replace('this.$test', {}, 'regex');
expect(target).toBe('this.*');
});
});
- describe('lucene format', function() {
- it('should properly escape $test with lucene escape sequences', function() {
+ describe('lucene format', () => {
+ it('should properly escape $test with lucene escape sequences', () => {
initTemplateSrv([{ type: 'query', name: 'test', current: { value: 'value/4' } }]);
const target = _templateSrv.replace('this:$test', {}, 'lucene');
expect(target).toBe('this:value\\/4');
});
- it('should properly escape ${test} with lucene escape sequences', function() {
+ it('should properly escape ${test} with lucene escape sequences', () => {
initTemplateSrv([{ type: 'query', name: 'test', current: { value: 'value/4' } }]);
const target = _templateSrv.replace('this:${test}', {}, 'lucene');
expect(target).toBe('this:value\\/4');
});
- it('should properly escape ${test:lucene} with lucene escape sequences', function() {
+ it('should properly escape ${test:lucene} with lucene escape sequences', () => {
initTemplateSrv([{ type: 'query', name: 'test', current: { value: 'value/4' } }]);
const target = _templateSrv.replace('this:${test:lucene}', {});
expect(target).toBe('this:value\\/4');
});
});
- describe('format variable to string values', function() {
- it('single value should return value', function() {
+ describe('format variable to string values', () => {
+ it('single value should return value', () => {
const result = _templateSrv.formatValue('test');
expect(result).toBe('test');
});
- it('multi value and glob format should render glob string', function() {
+ it('multi value and glob format should render glob string', () => {
const result = _templateSrv.formatValue(['test', 'test2'], 'glob');
expect(result).toBe('{test,test2}');
});
- it('multi value and lucene should render as lucene expr', function() {
+ it('multi value and lucene should render as lucene expr', () => {
const result = _templateSrv.formatValue(['test', 'test2'], 'lucene');
expect(result).toBe('("test" OR "test2")');
});
- it('multi value and regex format should render regex string', function() {
+ it('multi value and regex format should render regex string', () => {
const result = _templateSrv.formatValue(['test.', 'test2'], 'regex');
expect(result).toBe('(test\\.|test2)');
});
- it('multi value and pipe should render pipe string', function() {
+ it('multi value and pipe should render pipe string', () => {
const result = _templateSrv.formatValue(['test', 'test2'], 'pipe');
expect(result).toBe('test|test2');
});
- it('multi value and distributed should render distributed string', function() {
+ it('multi value and distributed should render distributed string', () => {
const result = _templateSrv.formatValue(['test', 'test2'], 'distributed', {
name: 'build',
});
expect(result).toBe('test,build=test2');
});
- it('multi value and distributed should render when not string', function() {
+ it('multi value and distributed should render when not string', () => {
const result = _templateSrv.formatValue(['test'], 'distributed', {
name: 'build',
});
expect(result).toBe('test');
});
- it('multi value and csv format should render csv string', function() {
+ it('multi value and csv format should render csv string', () => {
const result = _templateSrv.formatValue(['test', 'test2'], 'csv');
expect(result).toBe('test,test2');
});
- it('slash should be properly escaped in regex format', function() {
+ it('slash should be properly escaped in regex format', () => {
const result = _templateSrv.formatValue('Gi3/14', 'regex');
expect(result).toBe('Gi3\\/14');
});
});
- describe('can check if variable exists', function() {
- beforeEach(function() {
+ describe('can check if variable exists', () => {
+ beforeEach(() => {
initTemplateSrv([{ type: 'query', name: 'test', current: { value: 'oogle' } }]);
});
- it('should return true if exists', function() {
+ it('should return true if exists', () => {
const result = _templateSrv.variableExists('$test');
expect(result).toBe(true);
});
});
- describe('can highlight variables in string', function() {
- beforeEach(function() {
+ describe('can highlight variables in string', () => {
+ beforeEach(() => {
initTemplateSrv([{ type: 'query', name: 'test', current: { value: 'oogle' } }]);
});
- it('should insert html', function() {
+ it('should insert html', () => {
const result = _templateSrv.highlightVariablesAsHtml('$test');
expect(result).toBe('$test');
});
- it('should insert html anywhere in string', function() {
+ it('should insert html anywhere in string', () => {
const result = _templateSrv.highlightVariablesAsHtml('this $test ok');
expect(result).toBe('this $test ok');
});
- it('should ignore if variables does not exist', function() {
+ it('should ignore if variables does not exist', () => {
const result = _templateSrv.highlightVariablesAsHtml('this $google ok');
expect(result).toBe('this $google ok');
});
});
- describe('updateTemplateData with simple value', function() {
- beforeEach(function() {
+ describe('updateTemplateData with simple value', () => {
+ beforeEach(() => {
initTemplateSrv([{ type: 'query', name: 'test', current: { value: 'muuuu' } }]);
});
- it('should set current value and update template data', function() {
+ it('should set current value and update template data', () => {
const target = _templateSrv.replace('this.[[test]].filters');
expect(target).toBe('this.muuuu.filters');
});
});
- describe('fillVariableValuesForUrl with multi value', function() {
- beforeEach(function() {
+ describe('fillVariableValuesForUrl with multi value', () => {
+ beforeEach(() => {
initTemplateSrv([
{
type: 'query',
@@ -338,15 +338,15 @@ describe('templateSrv', function() {
]);
});
- it('should set multiple url params', function() {
+ it('should set multiple url params', () => {
const params = {};
_templateSrv.fillVariableValuesForUrl(params);
expect(params['var-test']).toMatchObject(['val1', 'val2']);
});
});
- describe('fillVariableValuesForUrl skip url sync', function() {
- beforeEach(function() {
+ describe('fillVariableValuesForUrl skip url sync', () => {
+ beforeEach(() => {
initTemplateSrv([
{
name: 'test',
@@ -359,15 +359,15 @@ describe('templateSrv', function() {
]);
});
- it('should not include template variable value in url', function() {
+ it('should not include template variable value in url', () => {
const params = {};
_templateSrv.fillVariableValuesForUrl(params);
expect(params['var-test']).toBe(undefined);
});
});
- describe('fillVariableValuesForUrl with multi value with skip url sync', function() {
- beforeEach(function() {
+ describe('fillVariableValuesForUrl with multi value with skip url sync', () => {
+ beforeEach(() => {
initTemplateSrv([
{
type: 'query',
@@ -381,19 +381,19 @@ describe('templateSrv', function() {
]);
});
- it('should not include template variable value in url', function() {
+ it('should not include template variable value in url', () => {
const params = {};
_templateSrv.fillVariableValuesForUrl(params);
expect(params['var-test']).toBe(undefined);
});
});
- describe('fillVariableValuesForUrl with multi value and scopedVars', function() {
- beforeEach(function() {
+ describe('fillVariableValuesForUrl with multi value and scopedVars', () => {
+ beforeEach(() => {
initTemplateSrv([{ type: 'query', name: 'test', current: { value: ['val1', 'val2'] } }]);
});
- it('should set scoped value as url params', function() {
+ it('should set scoped value as url params', () => {
const params = {};
_templateSrv.fillVariableValuesForUrl(params, {
test: { value: 'val1' },
@@ -402,12 +402,12 @@ describe('templateSrv', function() {
});
});
- describe('fillVariableValuesForUrl with multi value, scopedVars and skip url sync', function() {
- beforeEach(function() {
+ describe('fillVariableValuesForUrl with multi value, scopedVars and skip url sync', () => {
+ beforeEach(() => {
initTemplateSrv([{ type: 'query', name: 'test', current: { value: ['val1', 'val2'] } }]);
});
- it('should not set scoped value as url params', function() {
+ it('should not set scoped value as url params', () => {
const params = {};
_templateSrv.fillVariableValuesForUrl(params, {
test: { name: 'test', value: 'val1', skipUrlSync: true },
@@ -416,8 +416,8 @@ describe('templateSrv', function() {
});
});
- describe('replaceWithText', function() {
- beforeEach(function() {
+ describe('replaceWithText', () => {
+ beforeEach(() => {
initTemplateSrv([
{
type: 'query',
@@ -434,18 +434,18 @@ describe('templateSrv', function() {
_templateSrv.updateTemplateData();
});
- it('should replace with text except for grafanaVariables', function() {
+ it('should replace with text except for grafanaVariables', () => {
const target = _templateSrv.replaceWithText('Server: $server, period: $period');
expect(target).toBe('Server: All, period: 13m');
});
});
- describe('built in interval variables', function() {
- beforeEach(function() {
+ describe('built in interval variables', () => {
+ beforeEach(() => {
initTemplateSrv([]);
});
- it('should replace $__interval_ms with interval milliseconds', function() {
+ it('should replace $__interval_ms with interval milliseconds', () => {
const target = _templateSrv.replace('10 * $__interval_ms', {
__interval_ms: { text: '100', value: '100' },
});
diff --git a/public/app/features/templating/specs/variable.test.ts b/public/app/features/templating/specs/variable.test.ts
index 814c5fbe003..6d5e88fa4bd 100644
--- a/public/app/features/templating/specs/variable.test.ts
+++ b/public/app/features/templating/specs/variable.test.ts
@@ -1,53 +1,53 @@
import { containsVariable, assignModelProperties } from '../variable';
-describe('containsVariable', function() {
- describe('when checking if a string contains a variable', function() {
- it('should find it with $const syntax', function() {
+describe('containsVariable', () => {
+ describe('when checking if a string contains a variable', () => {
+ it('should find it with $const syntax', () => {
const contains = containsVariable('this.$test.filters', 'test');
expect(contains).toBe(true);
});
- it('should not find it if only part matches with $const syntax', function() {
+ it('should not find it if only part matches with $const syntax', () => {
const contains = containsVariable('this.$serverDomain.filters', 'server');
expect(contains).toBe(false);
});
- it('should find it if it ends with variable and passing multiple test strings', function() {
+ it('should find it if it ends with variable and passing multiple test strings', () => {
const contains = containsVariable('show field keys from $pgmetric', 'test string2', 'pgmetric');
expect(contains).toBe(true);
});
- it('should find it with [[var]] syntax', function() {
+ it('should find it with [[var]] syntax', () => {
const contains = containsVariable('this.[[test]].filters', 'test');
expect(contains).toBe(true);
});
- it('should find it when part of segment', function() {
+ it('should find it when part of segment', () => {
const contains = containsVariable('metrics.$env.$group-*', 'group');
expect(contains).toBe(true);
});
- it('should find it its the only thing', function() {
+ it('should find it its the only thing', () => {
const contains = containsVariable('$env', 'env');
expect(contains).toBe(true);
});
- it('should be able to pass in multiple test strings', function() {
+ it('should be able to pass in multiple test strings', () => {
const contains = containsVariable('asd', 'asd2.$env', 'env');
expect(contains).toBe(true);
});
});
});
-describe('assignModelProperties', function() {
- it('only set properties defined in defaults', function() {
+describe('assignModelProperties', () => {
+ it('only set properties defined in defaults', () => {
const target: any = { test: 'asd' };
assignModelProperties(target, { propA: 1, propB: 2 }, { propB: 0 });
expect(target.propB).toBe(2);
expect(target.test).toBe('asd');
});
- it('use default value if not found on source', function() {
+ it('use default value if not found on source', () => {
const target: any = { test: 'asd' };
assignModelProperties(target, { propA: 1, propB: 2 }, { propC: 10 });
expect(target.propC).toBe(10);
diff --git a/public/app/features/templating/specs/variable_srv.test.ts b/public/app/features/templating/specs/variable_srv.test.ts
index 28fd3860ed3..359d5b79a38 100644
--- a/public/app/features/templating/specs/variable_srv.test.ts
+++ b/public/app/features/templating/specs/variable_srv.test.ts
@@ -3,8 +3,8 @@ import { VariableSrv } from '../variable_srv';
import moment from 'moment';
import $q from 'q';
-describe('VariableSrv', function() {
- const ctx = {
+describe('VariableSrv', function(this: any) {
+ const ctx = {
datasourceSrv: {},
timeSrv: {
timeRange: () => {},
@@ -29,12 +29,12 @@ describe('VariableSrv', function() {
$location: {
search: () => {},
},
- };
+ } as any;
function describeUpdateVariable(desc, fn) {
describe(desc, () => {
const scenario: any = {};
- scenario.setup = function(setupFn) {
+ scenario.setup = setupFn => {
scenario.setupFn = setupFn;
};
@@ -135,7 +135,7 @@ describe('VariableSrv', function() {
//
// Query variable update
//
- describeUpdateVariable('query variable with empty current object and refresh', function(scenario) {
+ describeUpdateVariable('query variable with empty current object and refresh', scenario => {
scenario.setup(() => {
scenario.variableModel = {
type: 'query',
@@ -154,7 +154,7 @@ describe('VariableSrv', function() {
describeUpdateVariable(
'query variable with multi select and new options does not contain some selected values',
- function(scenario) {
+ scenario => {
scenario.setup(() => {
scenario.variableModel = {
type: 'query',
@@ -177,7 +177,7 @@ describe('VariableSrv', function() {
describeUpdateVariable(
'query variable with multi select and new options does not contain any selected values',
- function(scenario) {
+ scenario => {
scenario.setup(() => {
scenario.variableModel = {
type: 'query',
@@ -198,7 +198,7 @@ describe('VariableSrv', function() {
}
);
- describeUpdateVariable('query variable with multi select and $__all selected', function(scenario) {
+ describeUpdateVariable('query variable with multi select and $__all selected', scenario => {
scenario.setup(() => {
scenario.variableModel = {
type: 'query',
@@ -219,7 +219,7 @@ describe('VariableSrv', function() {
});
});
- describeUpdateVariable('query variable with numeric results', function(scenario) {
+ describeUpdateVariable('query variable with numeric results', scenario => {
scenario.setup(() => {
scenario.variableModel = {
type: 'query',
@@ -237,7 +237,7 @@ describe('VariableSrv', function() {
});
});
- describeUpdateVariable('basic query variable', function(scenario) {
+ describeUpdateVariable('basic query variable', scenario => {
scenario.setup(() => {
scenario.variableModel = { type: 'query', query: 'apps.*', name: 'test' };
scenario.queryResult = [{ text: 'backend1' }, { text: 'backend2' }];
@@ -255,7 +255,7 @@ describe('VariableSrv', function() {
});
});
- describeUpdateVariable('and existing value still exists in options', function(scenario) {
+ describeUpdateVariable('and existing value still exists in options', scenario => {
scenario.setup(() => {
scenario.variableModel = { type: 'query', query: 'apps.*', name: 'test' };
scenario.variableModel.current = { value: 'backend2', text: 'backend2' };
@@ -267,7 +267,7 @@ describe('VariableSrv', function() {
});
});
- describeUpdateVariable('and regex pattern exists', function(scenario) {
+ describeUpdateVariable('and regex pattern exists', scenario => {
scenario.setup(() => {
scenario.variableModel = { type: 'query', query: 'apps.*', name: 'test' };
scenario.variableModel.regex = '/apps.*(backend_[0-9]+)/';
@@ -282,7 +282,7 @@ describe('VariableSrv', function() {
});
});
- describeUpdateVariable('and regex pattern exists and no match', function(scenario) {
+ describeUpdateVariable('and regex pattern exists and no match', scenario => {
scenario.setup(() => {
scenario.variableModel = { type: 'query', query: 'apps.*', name: 'test' };
scenario.variableModel.regex = '/apps.*(backendasd[0-9]+)/';
@@ -298,7 +298,7 @@ describe('VariableSrv', function() {
});
});
- describeUpdateVariable('regex pattern without slashes', function(scenario) {
+ describeUpdateVariable('regex pattern without slashes', scenario => {
scenario.setup(() => {
scenario.variableModel = { type: 'query', query: 'apps.*', name: 'test' };
scenario.variableModel.regex = 'backend_01';
@@ -313,7 +313,7 @@ describe('VariableSrv', function() {
});
});
- describeUpdateVariable('regex pattern remove duplicates', function(scenario) {
+ describeUpdateVariable('regex pattern remove duplicates', scenario => {
scenario.setup(() => {
scenario.variableModel = { type: 'query', query: 'apps.*', name: 'test' };
scenario.variableModel.regex = '/backend_01/';
@@ -328,7 +328,7 @@ describe('VariableSrv', function() {
});
});
- describeUpdateVariable('with include All', function(scenario) {
+ describeUpdateVariable('with include All', scenario => {
scenario.setup(() => {
scenario.variableModel = {
type: 'query',
@@ -345,7 +345,7 @@ describe('VariableSrv', function() {
});
});
- describeUpdateVariable('with include all and custom value', function(scenario) {
+ describeUpdateVariable('with include all and custom value', scenario => {
scenario.setup(() => {
scenario.variableModel = {
type: 'query',
@@ -362,7 +362,7 @@ describe('VariableSrv', function() {
});
});
- describeUpdateVariable('without sort', function(scenario) {
+ describeUpdateVariable('without sort', scenario => {
scenario.setup(() => {
scenario.variableModel = {
type: 'query',
@@ -380,7 +380,7 @@ describe('VariableSrv', function() {
});
});
- describeUpdateVariable('with alphabetical sort (asc)', function(scenario) {
+ describeUpdateVariable('with alphabetical sort (asc)', scenario => {
scenario.setup(() => {
scenario.variableModel = {
type: 'query',
@@ -398,7 +398,7 @@ describe('VariableSrv', function() {
});
});
- describeUpdateVariable('with alphabetical sort (desc)', function(scenario) {
+ describeUpdateVariable('with alphabetical sort (desc)', scenario => {
scenario.setup(() => {
scenario.variableModel = {
type: 'query',
@@ -416,7 +416,7 @@ describe('VariableSrv', function() {
});
});
- describeUpdateVariable('with numerical sort (asc)', function(scenario) {
+ describeUpdateVariable('with numerical sort (asc)', scenario => {
scenario.setup(() => {
scenario.variableModel = {
type: 'query',
@@ -434,7 +434,7 @@ describe('VariableSrv', function() {
});
});
- describeUpdateVariable('with numerical sort (desc)', function(scenario) {
+ describeUpdateVariable('with numerical sort (desc)', scenario => {
scenario.setup(() => {
scenario.variableModel = {
type: 'query',
@@ -455,7 +455,7 @@ describe('VariableSrv', function() {
//
// datasource variable update
//
- describeUpdateVariable('datasource variable with regex filter', function(scenario) {
+ describeUpdateVariable('datasource variable with regex filter', scenario => {
scenario.setup(() => {
scenario.variableModel = {
type: 'datasource',
@@ -486,7 +486,7 @@ describe('VariableSrv', function() {
//
// Custom variable update
//
- describeUpdateVariable('update custom variable', function(scenario) {
+ describeUpdateVariable('update custom variable', scenario => {
scenario.setup(() => {
scenario.variableModel = {
type: 'custom',
diff --git a/public/app/features/templating/specs/variable_srv_init.test.ts b/public/app/features/templating/specs/variable_srv_init.test.ts
index f06f533e429..b5d00a5289e 100644
--- a/public/app/features/templating/specs/variable_srv_init.test.ts
+++ b/public/app/features/templating/specs/variable_srv_init.test.ts
@@ -4,7 +4,7 @@ import _ from 'lodash';
import { VariableSrv } from '../variable_srv';
import $q from 'q';
-describe('VariableSrv init', function() {
+describe('VariableSrv init', function(this: any) {
const templateSrv = {
init: vars => {
this.variables = vars;
@@ -17,12 +17,12 @@ describe('VariableSrv init', function() {
}),
};
- const $injector = {};
+ const $injector = {} as any;
const $rootscope = {
$on: () => {},
};
- let ctx = {};
+ let ctx = {} as any;
function describeInitScenario(desc, fn) {
describe(desc, () => {
diff --git a/public/app/features/templating/template_srv.ts b/public/app/features/templating/template_srv.ts
index 8e716a9f5d9..6eab51abbfa 100644
--- a/public/app/features/templating/template_srv.ts
+++ b/public/app/features/templating/template_srv.ts
@@ -32,7 +32,7 @@ export class TemplateSrv {
updateTemplateData() {
this.index = {};
- for (var i = 0; i < this.variables.length; i++) {
+ for (let i = 0; i < this.variables.length; i++) {
const variable = this.variables[i];
if (!variable.current || (!variable.current.isNone && !variable.current.value)) {
@@ -48,9 +48,9 @@ export class TemplateSrv {
}
getAdhocFilters(datasourceName) {
- var filters = [];
+ let filters = [];
- for (var i = 0; i < this.variables.length; i++) {
+ for (let i = 0; i < this.variables.length; i++) {
const variable = this.variables[i];
if (variable.type !== 'adhoc') {
continue;
@@ -77,7 +77,7 @@ export class TemplateSrv {
if (value instanceof Array && value.length === 0) {
return '__empty__';
}
- const quotedValues = _.map(value, function(val) {
+ const quotedValues = _.map(value, val => {
return '"' + luceneEscape(val) + '"';
});
return '(' + quotedValues.join(' OR ') + ')';
@@ -171,7 +171,7 @@ export class TemplateSrv {
return variable.allValue;
}
const values = [];
- for (var i = 1; i < variable.options.length; i++) {
+ for (let i = 1; i < variable.options.length; i++) {
values.push(variable.options[i].value);
}
return values;
@@ -182,7 +182,7 @@ export class TemplateSrv {
return target;
}
- var variable, systemValue, value, fmt;
+ let variable, systemValue, value, fmt;
this.regex.lastIndex = 0;
return target.replace(this.regex, (match, var1, var2, fmt2, var3, fmt3) => {
@@ -227,7 +227,7 @@ export class TemplateSrv {
return target;
}
- var variable;
+ let variable;
this.regex.lastIndex = 0;
return target.replace(this.regex, (match, var1, var2, fmt2, var3) => {
@@ -248,7 +248,7 @@ export class TemplateSrv {
}
fillVariableValuesForUrl(params, scopedVars) {
- _.each(this.variables, function(variable) {
+ _.each(this.variables, variable => {
if (scopedVars && scopedVars[variable.name] !== void 0) {
if (scopedVars[variable.name].skipUrlSync) {
return;
@@ -264,7 +264,7 @@ export class TemplateSrv {
}
distributeVariable(value, variable) {
- value = _.map(value, function(val, index) {
+ value = _.map(value, (val, index) => {
if (index !== 0) {
return variable + '=' + val;
} else {
diff --git a/public/app/features/templating/variable.ts b/public/app/features/templating/variable.ts
index 8751a64b47d..412426fb294 100644
--- a/public/app/features/templating/variable.ts
+++ b/public/app/features/templating/variable.ts
@@ -10,14 +10,14 @@ export interface Variable {
getSaveModel();
}
-export var variableTypes = {};
+export let variableTypes = {};
export { assignModelProperties };
export function containsVariable(...args: any[]) {
- var variableName = args[args.length - 1];
- var str = args[0] || '';
+ let variableName = args[args.length - 1];
+ let str = args[0] || '';
- for (var i = 1; i < args.length - 1; i++) {
+ for (let i = 1; i < args.length - 1; i++) {
str += ' ' + args[i] || '';
}
diff --git a/public/app/features/templating/variable_srv.ts b/public/app/features/templating/variable_srv.ts
index 55294d6fea2..8c0f1f11f77 100644
--- a/public/app/features/templating/variable_srv.ts
+++ b/public/app/features/templating/variable_srv.ts
@@ -122,12 +122,13 @@ export class VariableSrv {
}
const g = this.createGraph();
- const promises = g
- .getNode(variable.name)
- .getOptimizedInputEdges()
- .map(e => {
+ const node = g.getNode(variable.name);
+ let promises = [];
+ if (node) {
+ promises = node.getOptimizedInputEdges().map(e => {
return this.updateOptions(this.variables.find(v => v.name === e.inputNode.name));
});
+ }
return this.$q.all(promises).then(() => {
if (emitChangeEvents) {
@@ -138,7 +139,7 @@ export class VariableSrv {
}
selectOptionsForCurrentValue(variable) {
- var i, y, value, option;
+ let i, y, value, option;
const selected: any = [];
for (i = 0; i < variable.options.length; i++) {
@@ -167,17 +168,17 @@ export class VariableSrv {
}
if (_.isArray(variable.current.value)) {
- var selected = this.selectOptionsForCurrentValue(variable);
+ let selected = this.selectOptionsForCurrentValue(variable);
// if none pick first
if (selected.length === 0) {
selected = variable.options[0];
} else {
selected = {
- value: _.map(selected, function(val) {
+ value: _.map(selected, val => {
return val.value;
}),
- text: _.map(selected, function(val) {
+ text: _.map(selected, val => {
return val.text;
}).join(' + '),
};
@@ -200,14 +201,14 @@ export class VariableSrv {
}
setOptionFromUrl(variable, urlValue) {
- var promise = this.$q.when();
+ let promise = this.$q.when();
if (variable.refresh) {
promise = variable.updateOptions();
}
return promise.then(() => {
- var option = _.find(variable.options, op => {
+ let option = _.find(variable.options, op => {
return op.text === urlValue || op.value === urlValue;
});
@@ -249,7 +250,7 @@ export class VariableSrv {
const params = this.$location.search();
// remove variable params
- _.each(params, function(value, key) {
+ _.each(params, (value, key) => {
if (key.indexOf('var-') === 0) {
delete params[key];
}
@@ -262,7 +263,7 @@ export class VariableSrv {
}
setAdhocFilter(options) {
- var variable = _.find(this.variables, {
+ let variable = _.find(this.variables, {
type: 'adhoc',
datasource: options.datasource,
});
diff --git a/public/app/plugins/datasource/cloudwatch/datasource.ts b/public/app/plugins/datasource/cloudwatch/datasource.ts
index 8b0905ac80b..34771618095 100644
--- a/public/app/plugins/datasource/cloudwatch/datasource.ts
+++ b/public/app/plugins/datasource/cloudwatch/datasource.ts
@@ -85,12 +85,12 @@ export default class CloudWatchDatasource {
const end = this.convertToCloudWatchTime(options.range.to, true);
now = Math.round((now || Date.now()) / 1000);
- var period;
+ let period;
const range = end - start;
const hourSec = 60 * 60;
const daySec = hourSec * 24;
- var periodUnit = 60;
+ let periodUnit = 60;
if (!target.period) {
if (now - start <= daySec * 15) {
// until 15 days ago
@@ -222,10 +222,10 @@ export default class CloudWatchDatasource {
}
metricFindQuery(query) {
- var region;
- var namespace;
- var metricName;
- var filterJson;
+ let region;
+ let namespace;
+ let metricName;
+ let filterJson;
const regionQuery = query.match(/^regions\(\)/);
if (regionQuery) {
@@ -291,7 +291,7 @@ export default class CloudWatchDatasource {
return this.templateSrv.replace(s);
});
const defaultPeriod = annotation.prefixMatching ? '' : '300';
- var period = annotation.period || defaultPeriod;
+ let period = annotation.period || defaultPeriod;
period = parseInt(period, 10);
const parameters = {
prefixMatching: annotation.prefixMatching,
diff --git a/public/app/plugins/datasource/cloudwatch/query_ctrl.ts b/public/app/plugins/datasource/cloudwatch/query_ctrl.ts
index 0d250935445..55b7786e302 100644
--- a/public/app/plugins/datasource/cloudwatch/query_ctrl.ts
+++ b/public/app/plugins/datasource/cloudwatch/query_ctrl.ts
@@ -6,7 +6,7 @@ export class CloudWatchQueryCtrl extends QueryCtrl {
aliasSyntax: string;
- /** @ngInject **/
+ /** @ngInject */
constructor($scope, $injector) {
super($scope, $injector);
this.aliasSyntax = '{{metric}} {{stat}} {{namespace}} {{region}} {{}}';
diff --git a/public/app/plugins/datasource/cloudwatch/query_parameter_ctrl.ts b/public/app/plugins/datasource/cloudwatch/query_parameter_ctrl.ts
index e69c99c8ad9..4f4b2961761 100644
--- a/public/app/plugins/datasource/cloudwatch/query_parameter_ctrl.ts
+++ b/public/app/plugins/datasource/cloudwatch/query_parameter_ctrl.ts
@@ -19,7 +19,7 @@ export class CloudWatchQueryParameter {
export class CloudWatchQueryParameterCtrl {
/** @ngInject */
constructor($scope, templateSrv, uiSegmentSrv, datasourceSrv, $q) {
- $scope.init = function() {
+ $scope.init = () => {
const target = $scope.target;
target.namespace = target.namespace || '';
target.metricName = target.metricName || '';
@@ -38,7 +38,7 @@ export class CloudWatchQueryParameterCtrl {
$scope.dimSegments = _.reduce(
$scope.target.dimensions,
- function(memo, value, key) {
+ (memo, value, key) => {
memo.push(uiSegmentSrv.newKey(key));
memo.push(uiSegmentSrv.newOperator('='));
memo.push(uiSegmentSrv.newKeyValue(value));
@@ -47,7 +47,7 @@ export class CloudWatchQueryParameterCtrl {
[]
);
- $scope.statSegments = _.map($scope.target.statistics, function(stat) {
+ $scope.statSegments = _.map($scope.target.statistics, stat => {
return uiSegmentSrv.getSegmentForValue(stat);
});
@@ -67,15 +67,15 @@ export class CloudWatchQueryParameterCtrl {
}
if (!$scope.onChange) {
- $scope.onChange = function() {};
+ $scope.onChange = () => {};
}
};
- $scope.getStatSegments = function() {
+ $scope.getStatSegments = () => {
return $q.when(
_.flatten([
angular.copy($scope.removeStatSegment),
- _.map($scope.datasource.standardStatistics, function(s) {
+ _.map($scope.datasource.standardStatistics, s => {
return uiSegmentSrv.getSegmentForValue(s);
}),
uiSegmentSrv.getSegmentForValue('pNN.NN'),
@@ -83,7 +83,7 @@ export class CloudWatchQueryParameterCtrl {
);
};
- $scope.statSegmentChanged = function(segment, index) {
+ $scope.statSegmentChanged = (segment, index) => {
if (segment.value === $scope.removeStatSegment.value) {
$scope.statSegments.splice(index, 1);
} else {
@@ -92,7 +92,7 @@ export class CloudWatchQueryParameterCtrl {
$scope.target.statistics = _.reduce(
$scope.statSegments,
- function(memo, seg) {
+ (memo, seg) => {
if (!seg.fake) {
memo.push(seg.value);
}
@@ -105,7 +105,7 @@ export class CloudWatchQueryParameterCtrl {
$scope.onChange();
};
- $scope.ensurePlusButton = function(segments) {
+ $scope.ensurePlusButton = segments => {
const count = segments.length;
const lastSegment = segments[Math.max(count - 1, 0)];
@@ -114,13 +114,13 @@ export class CloudWatchQueryParameterCtrl {
}
};
- $scope.getDimSegments = function(segment, $index) {
+ $scope.getDimSegments = (segment, $index) => {
if (segment.type === 'operator') {
return $q.when([]);
}
const target = $scope.target;
- var query = $q.when([]);
+ let query = $q.when([]);
if (segment.type === 'key' || segment.type === 'plus-button') {
query = $scope.datasource.getDimensionKeys($scope.target.namespace, $scope.target.region);
@@ -135,7 +135,7 @@ export class CloudWatchQueryParameterCtrl {
);
}
- return query.then($scope.transformToSegments(true)).then(function(results) {
+ return query.then($scope.transformToSegments(true)).then(results => {
if (segment.type === 'key') {
results.splice(0, 0, angular.copy($scope.removeDimSegment));
}
@@ -143,7 +143,7 @@ export class CloudWatchQueryParameterCtrl {
});
};
- $scope.dimSegmentChanged = function(segment, index) {
+ $scope.dimSegmentChanged = (segment, index) => {
$scope.dimSegments[index] = segment;
if (segment.value === $scope.removeDimSegment.value) {
@@ -160,11 +160,11 @@ export class CloudWatchQueryParameterCtrl {
$scope.onChange();
};
- $scope.syncDimSegmentsWithModel = function() {
+ $scope.syncDimSegmentsWithModel = () => {
const dims = {};
const length = $scope.dimSegments.length;
- for (var i = 0; i < length - 2; i += 3) {
+ for (let i = 0; i < length - 2; i += 3) {
const keySegment = $scope.dimSegments[i];
const valueSegment = $scope.dimSegments[i + 2];
if (!valueSegment.fake) {
@@ -175,44 +175,44 @@ export class CloudWatchQueryParameterCtrl {
$scope.target.dimensions = dims;
};
- $scope.getRegions = function() {
+ $scope.getRegions = () => {
return $scope.datasource
.metricFindQuery('regions()')
- .then(function(results) {
+ .then(results => {
results.unshift({ text: 'default' });
return results;
})
.then($scope.transformToSegments(true));
};
- $scope.getNamespaces = function() {
+ $scope.getNamespaces = () => {
return $scope.datasource.metricFindQuery('namespaces()').then($scope.transformToSegments(true));
};
- $scope.getMetrics = function() {
+ $scope.getMetrics = () => {
return $scope.datasource
.metricFindQuery('metrics(' + $scope.target.namespace + ',' + $scope.target.region + ')')
.then($scope.transformToSegments(true));
};
- $scope.regionChanged = function() {
+ $scope.regionChanged = () => {
$scope.target.region = $scope.regionSegment.value;
$scope.onChange();
};
- $scope.namespaceChanged = function() {
+ $scope.namespaceChanged = () => {
$scope.target.namespace = $scope.namespaceSegment.value;
$scope.onChange();
};
- $scope.metricChanged = function() {
+ $scope.metricChanged = () => {
$scope.target.metricName = $scope.metricSegment.value;
$scope.onChange();
};
- $scope.transformToSegments = function(addTemplateVars) {
- return function(results) {
- const segments = _.map(results, function(segment) {
+ $scope.transformToSegments = addTemplateVars => {
+ return results => {
+ const segments = _.map(results, segment => {
return uiSegmentSrv.newSegment({
value: segment.text,
expandable: segment.expandable,
@@ -220,7 +220,7 @@ export class CloudWatchQueryParameterCtrl {
});
if (addTemplateVars) {
- _.each(templateSrv.variables, function(variable) {
+ _.each(templateSrv.variables, variable => {
segments.unshift(
uiSegmentSrv.newSegment({
type: 'template',
diff --git a/public/app/plugins/datasource/cloudwatch/specs/datasource.test.ts b/public/app/plugins/datasource/cloudwatch/specs/datasource.test.ts
index 08329ba4e73..497c773687f 100644
--- a/public/app/plugins/datasource/cloudwatch/specs/datasource.test.ts
+++ b/public/app/plugins/datasource/cloudwatch/specs/datasource.test.ts
@@ -3,7 +3,7 @@ import CloudWatchDatasource from '../datasource';
import * as dateMath from 'app/core/utils/datemath';
import _ from 'lodash';
-describe('CloudWatchDatasource', function() {
+describe('CloudWatchDatasource', () => {
const instanceSettings = {
jsonData: { defaultRegion: 'us-east-1', access: 'proxy' },
};
@@ -25,16 +25,16 @@ describe('CloudWatchDatasource', function() {
},
};
const backendSrv = {};
- const ctx = {
+ const ctx = {
backendSrv,
templateSrv,
- };
+ } as any;
beforeEach(() => {
ctx.ds = new CloudWatchDatasource(instanceSettings, {}, backendSrv, templateSrv, timeSrv);
});
- describe('When performing CloudWatch query', function() {
+ describe('When performing CloudWatch query', () => {
let requestParams;
const query = {
@@ -80,8 +80,8 @@ describe('CloudWatchDatasource', function() {
});
});
- it('should generate the correct query', function(done) {
- ctx.ds.query(query).then(function() {
+ it('should generate the correct query', done => {
+ ctx.ds.query(query).then(() => {
const params = requestParams.queries[0];
expect(params.namespace).toBe(query.targets[0].namespace);
expect(params.metricName).toBe(query.targets[0].metricName);
@@ -92,7 +92,7 @@ describe('CloudWatchDatasource', function() {
});
});
- it('should generate the correct query with interval variable', function(done) {
+ it('should generate the correct query with interval variable', done => {
ctx.templateSrv.data = {
period: '10m',
};
@@ -114,14 +114,14 @@ describe('CloudWatchDatasource', function() {
],
};
- ctx.ds.query(query).then(function() {
+ ctx.ds.query(query).then(() => {
const params = requestParams.queries[0];
expect(params.period).toBe('600');
done();
});
});
- it('should cancel query for invalid extended statistics', function() {
+ it('should cancel query for invalid extended statistics', () => {
const query = {
range: { from: 'now-1h', to: 'now' },
rangeRaw: { from: 1483228800, to: 1483232400 },
@@ -141,8 +141,8 @@ describe('CloudWatchDatasource', function() {
expect(ctx.ds.query.bind(ctx.ds, query)).toThrow(/Invalid extended statistics/);
});
- it('should return series list', function(done) {
- ctx.ds.query(query).then(function(result) {
+ it('should return series list', done => {
+ ctx.ds.query(query).then(result => {
expect(result.data[0].target).toBe(response.results.A.series[0].name);
expect(result.data[0].datapoints[0][0]).toBe(response.results.A.series[0].points[0][0]);
done();
@@ -150,8 +150,8 @@ describe('CloudWatchDatasource', function() {
});
});
- describe('When query region is "default"', function() {
- it('should return the datasource region if empty or "default"', function() {
+ describe('When query region is "default"', () => {
+ it('should return the datasource region if empty or "default"', () => {
const defaultRegion = instanceSettings.jsonData.defaultRegion;
expect(ctx.ds.getActualRegion()).toBe(defaultRegion);
@@ -159,19 +159,19 @@ describe('CloudWatchDatasource', function() {
expect(ctx.ds.getActualRegion('default')).toBe(defaultRegion);
});
- it('should return the specified region if specified', function() {
+ it('should return the specified region if specified', () => {
expect(ctx.ds.getActualRegion('some-fake-region-1')).toBe('some-fake-region-1');
});
let requestParams;
- beforeEach(function() {
+ beforeEach(() => {
ctx.ds.performTimeSeriesQuery = jest.fn(request => {
requestParams = request;
return Promise.resolve({ data: {} });
});
});
- it('should query for the datasource region if empty or "default"', function(done) {
+ it('should query for the datasource region if empty or "default"', done => {
const query = {
range: { from: 'now-1h', to: 'now' },
rangeRaw: { from: 1483228800, to: 1483232400 },
@@ -189,14 +189,14 @@ describe('CloudWatchDatasource', function() {
],
};
- ctx.ds.query(query).then(function(result) {
+ ctx.ds.query(query).then(result => {
expect(requestParams.queries[0].region).toBe(instanceSettings.jsonData.defaultRegion);
done();
});
});
});
- describe('When performing CloudWatch query for extended statistics', function() {
+ describe('When performing CloudWatch query for extended statistics', () => {
const query = {
range: { from: 'now-1h', to: 'now' },
rangeRaw: { from: 1483228800, to: 1483232400 },
@@ -235,14 +235,14 @@ describe('CloudWatchDatasource', function() {
},
};
- beforeEach(function() {
+ beforeEach(() => {
ctx.backendSrv.datasourceRequest = jest.fn(params => {
return Promise.resolve({ data: response });
});
});
- it('should return series list', function(done) {
- ctx.ds.query(query).then(function(result) {
+ it('should return series list', done => {
+ ctx.ds.query(query).then(result => {
expect(result.data[0].target).toBe(response.results.A.series[0].name);
expect(result.data[0].datapoints[0][0]).toBe(response.results.A.series[0].points[0][0]);
done();
@@ -378,7 +378,7 @@ describe('CloudWatchDatasource', function() {
});
});
- it('should caclculate the correct period', function() {
+ it('should caclculate the correct period', () => {
const hourSec = 60 * 60;
const daySec = hourSec * 24;
const start = 1483196400 * 1000;
diff --git a/public/app/plugins/datasource/elasticsearch/bucket_agg.ts b/public/app/plugins/datasource/elasticsearch/bucket_agg.ts
index dba0596874d..8963f2c3f4b 100644
--- a/public/app/plugins/datasource/elasticsearch/bucket_agg.ts
+++ b/public/app/plugins/datasource/elasticsearch/bucket_agg.ts
@@ -23,36 +23,36 @@ export class ElasticBucketAggCtrl {
$scope.orderByOptions = [];
- $scope.getBucketAggTypes = function() {
+ $scope.getBucketAggTypes = () => {
return queryDef.bucketAggTypes;
};
- $scope.getOrderOptions = function() {
+ $scope.getOrderOptions = () => {
return queryDef.orderOptions;
};
- $scope.getSizeOptions = function() {
+ $scope.getSizeOptions = () => {
return queryDef.sizeOptions;
};
$rootScope.onAppEvent(
'elastic-query-updated',
- function() {
+ () => {
$scope.validateModel();
},
$scope
);
- $scope.init = function() {
+ $scope.init = () => {
$scope.agg = bucketAggs[$scope.index];
$scope.validateModel();
};
- $scope.onChangeInternal = function() {
+ $scope.onChangeInternal = () => {
$scope.onChange();
};
- $scope.onTypeChanged = function() {
+ $scope.onTypeChanged = () => {
$scope.agg.settings = {};
$scope.showOptions = false;
@@ -79,12 +79,12 @@ export class ElasticBucketAggCtrl {
$scope.onChange();
};
- $scope.validateModel = function() {
+ $scope.validateModel = () => {
$scope.index = _.indexOf(bucketAggs, $scope.agg);
$scope.isFirst = $scope.index === 0;
$scope.bucketAggCount = bucketAggs.length;
- var settingsLinkText = '';
+ let settingsLinkText = '';
const settings = $scope.agg.settings || {};
switch ($scope.agg.type) {
@@ -114,7 +114,7 @@ export class ElasticBucketAggCtrl {
settings.filters = settings.filters || [{ query: '*' }];
settingsLinkText = _.reduce(
settings.filters,
- function(memo, value, index) {
+ (memo, value, index) => {
memo += 'Q' + (index + 1) + ' = ' + value.query + ' ';
return memo;
},
@@ -168,23 +168,23 @@ export class ElasticBucketAggCtrl {
return true;
};
- $scope.addFiltersQuery = function() {
+ $scope.addFiltersQuery = () => {
$scope.agg.settings.filters.push({ query: '*' });
};
- $scope.removeFiltersQuery = function(filter) {
+ $scope.removeFiltersQuery = filter => {
$scope.agg.settings.filters = _.without($scope.agg.settings.filters, filter);
};
- $scope.toggleOptions = function() {
+ $scope.toggleOptions = () => {
$scope.showOptions = !$scope.showOptions;
};
- $scope.getOrderByOptions = function() {
+ $scope.getOrderByOptions = () => {
return queryDef.getOrderByOptions($scope.target);
};
- $scope.getFieldsInternal = function() {
+ $scope.getFieldsInternal = () => {
if ($scope.agg.type === 'date_histogram') {
return $scope.getFields({ $fieldType: 'date' });
} else {
@@ -192,14 +192,14 @@ export class ElasticBucketAggCtrl {
}
};
- $scope.getIntervalOptions = function() {
+ $scope.getIntervalOptions = () => {
return $q.when(uiSegmentSrv.transformToSegments(true, 'interval')(queryDef.intervalOptions));
};
- $scope.addBucketAgg = function() {
+ $scope.addBucketAgg = () => {
// if last is date histogram add it before
const lastBucket = bucketAggs[bucketAggs.length - 1];
- var addIndex = bucketAggs.length - 1;
+ let addIndex = bucketAggs.length - 1;
if (lastBucket && lastBucket.type === 'date_histogram') {
addIndex -= 1;
@@ -207,8 +207,8 @@ export class ElasticBucketAggCtrl {
const id = _.reduce(
$scope.target.bucketAggs.concat($scope.target.metrics),
- function(max, val) {
- return parseInt(val.id) > max ? parseInt(val.id) : max;
+ (max, val) => {
+ return parseInt(val.id, 10) > max ? parseInt(val.id, 10) : max;
},
0
);
@@ -217,7 +217,7 @@ export class ElasticBucketAggCtrl {
$scope.onChange();
};
- $scope.removeBucketAgg = function() {
+ $scope.removeBucketAgg = () => {
bucketAggs.splice($scope.index, 1);
$scope.onChange();
};
diff --git a/public/app/plugins/datasource/elasticsearch/datasource.ts b/public/app/plugins/datasource/elasticsearch/datasource.ts
index 632f8add61f..c2f2364d49d 100644
--- a/public/app/plugins/datasource/elasticsearch/datasource.ts
+++ b/public/app/plugins/datasource/elasticsearch/datasource.ts
@@ -57,14 +57,14 @@ export class ElasticDatasource {
private get(url) {
const range = this.timeSrv.timeRange();
- const index_list = this.indexPattern.getIndexList(range.from.valueOf(), range.to.valueOf());
- if (_.isArray(index_list) && index_list.length) {
- return this.request('GET', index_list[0] + url).then(function(results) {
+ const indexList = this.indexPattern.getIndexList(range.from.valueOf(), range.to.valueOf());
+ if (_.isArray(indexList) && indexList.length) {
+ return this.request('GET', indexList[0] + url).then(results => {
results.data.$$config = results.config;
return results.data;
});
} else {
- return this.request('GET', this.indexPattern.getIndexForToday() + url).then(function(results) {
+ return this.request('GET', this.indexPattern.getIndexForToday() + url).then(results => {
results.data.$$config = results.config;
return results.data;
});
@@ -73,7 +73,7 @@ export class ElasticDatasource {
private post(url, data) {
return this.request('POST', url, data)
- .then(function(results) {
+ .then(results => {
results.data.$$config = results.config;
return results.data;
})
@@ -145,15 +145,15 @@ export class ElasticDatasource {
const list = [];
const hits = res.responses[0].hits.hits;
- const getFieldFromSource = function(source, fieldName) {
+ const getFieldFromSource = (source, fieldName) => {
if (!fieldName) {
return;
}
const fieldNames = fieldName.split('.');
- var fieldValue = source;
+ let fieldValue = source;
- for (var i = 0; i < fieldNames.length; i++) {
+ for (let i = 0; i < fieldNames.length; i++) {
fieldValue = fieldValue[fieldNames[i]];
if (!fieldValue) {
console.log('could not find field in annotation: ', fieldName);
@@ -164,9 +164,9 @@ export class ElasticDatasource {
return fieldValue;
};
- for (var i = 0; i < hits.length; i++) {
+ for (let i = 0; i < hits.length; i++) {
const source = hits[i]._source;
- var time = getFieldFromSource(source, timeField);
+ let time = getFieldFromSource(source, timeField);
if (typeof hits[i].fields !== 'undefined') {
const fields = hits[i].fields;
if (_.isString(fields[timeField]) || _.isNumber(fields[timeField])) {
@@ -203,7 +203,7 @@ export class ElasticDatasource {
this.timeSrv.setTime({ from: 'now-1m', to: 'now' }, true);
// validate that the index exist and has date field
return this.getFields({ type: 'date' }).then(
- function(dateFields) {
+ dateFields => {
const timeField = _.find(dateFields, { text: this.timeField });
if (!timeField) {
return {
@@ -212,11 +212,11 @@ export class ElasticDatasource {
};
}
return { status: 'success', message: 'Index OK. Time field name OK.' };
- }.bind(this),
- function(err) {
+ },
+ err => {
console.log(err);
if (err.data && err.data.error) {
- var message = angular.toJson(err.data.error);
+ let message = angular.toJson(err.data.error);
if (err.data.error.reason) {
message = err.data.error.reason;
}
@@ -229,26 +229,26 @@ export class ElasticDatasource {
}
getQueryHeader(searchType, timeFrom, timeTo) {
- const query_header: any = {
+ const queryHeader: any = {
search_type: searchType,
ignore_unavailable: true,
index: this.indexPattern.getIndexList(timeFrom, timeTo),
};
if (this.esVersion >= 56) {
- query_header['max_concurrent_shard_requests'] = this.maxConcurrentShardRequests;
+ queryHeader['max_concurrent_shard_requests'] = this.maxConcurrentShardRequests;
}
- return angular.toJson(query_header);
+ return angular.toJson(queryHeader);
}
query(options) {
- var payload = '';
- var target;
+ let payload = '';
+ let target;
const sentTargets = [];
// add global adhoc filters to timeFilter
const adhocFilters = this.templateSrv.getAdhocFilters(this.name);
- for (var i = 0; i < options.targets.length; i++) {
+ for (let i = 0; i < options.targets.length; i++) {
target = options.targets[i];
if (target.hide) {
continue;
@@ -274,13 +274,13 @@ export class ElasticDatasource {
payload = payload.replace(/\$timeTo/g, options.range.to.valueOf());
payload = this.templateSrv.replace(payload, options.scopedVars);
- return this.post('_msearch', payload).then(function(res) {
+ return this.post('_msearch', payload).then(res => {
return new ElasticResponse(sentTargets, res).getTimeSeries();
});
}
getFields(query) {
- return this.get('/_mapping').then(function(result) {
+ return this.get('/_mapping').then(result => {
const typeMap = {
float: 'number',
double: 'number',
@@ -352,7 +352,7 @@ export class ElasticDatasource {
}
// transform to array
- return _.map(fields, function(value) {
+ return _.map(fields, value => {
return value;
});
});
@@ -362,19 +362,19 @@ export class ElasticDatasource {
const range = this.timeSrv.timeRange();
const searchType = this.esVersion >= 5 ? 'query_then_fetch' : 'count';
const header = this.getQueryHeader(searchType, range.from, range.to);
- var esQuery = angular.toJson(this.queryBuilder.getTermsQuery(queryDef));
+ let esQuery = angular.toJson(this.queryBuilder.getTermsQuery(queryDef));
esQuery = esQuery.replace(/\$timeFrom/g, range.from.valueOf());
esQuery = esQuery.replace(/\$timeTo/g, range.to.valueOf());
esQuery = header + '\n' + esQuery + '\n';
- return this.post('_msearch?search_type=' + searchType, esQuery).then(function(res) {
+ return this.post('_msearch?search_type=' + searchType, esQuery).then(res => {
if (!res.responses[0].aggregations) {
return [];
}
const buckets = res.responses[0].aggregations['1'].buckets;
- return _.map(buckets, function(bucket) {
+ return _.map(buckets, bucket => {
return {
text: bucket.key_as_string || bucket.key,
value: bucket.key,
diff --git a/public/app/plugins/datasource/elasticsearch/elastic_response.ts b/public/app/plugins/datasource/elasticsearch/elastic_response.ts
index 1341b7830e9..7adec22c545 100644
--- a/public/app/plugins/datasource/elasticsearch/elastic_response.ts
+++ b/public/app/plugins/datasource/elasticsearch/elastic_response.ts
@@ -9,7 +9,7 @@ export class ElasticResponse {
}
processMetrics(esAgg, target, seriesList, props) {
- var metric, y, i, newSeries, bucket, value;
+ let metric, y, i, newSeries, bucket, value;
for (y = 0; y < target.metrics.length; y++) {
metric = target.metrics[y];
@@ -177,7 +177,7 @@ export class ElasticResponse {
// This is quite complex
// need to recurise down the nested buckets to build series
processBuckets(aggs, target, seriesList, table, props, depth) {
- var bucket, aggDef, esAgg, aggId;
+ let bucket, aggDef, esAgg, aggId;
const maxDepth = target.bucketAggs.length - 1;
for (aggId in aggs) {
@@ -213,7 +213,7 @@ export class ElasticResponse {
}
private getMetricName(metric) {
- var metricDef = _.find(queryDef.metricAggTypes, { value: metric });
+ let metricDef = _.find(queryDef.metricAggTypes, { value: metric });
if (!metricDef) {
metricDef = _.find(queryDef.extendedStats, { value: metric });
}
@@ -222,12 +222,12 @@ export class ElasticResponse {
}
private getSeriesName(series, target, metricTypeCount) {
- var metricName = this.getMetricName(series.metric);
+ let metricName = this.getMetricName(series.metric);
if (target.alias) {
const regex = /\{\{([\s\S]+?)\}\}/g;
- return target.alias.replace(regex, function(match, g1, g2) {
+ return target.alias.replace(regex, (match, g1, g2) => {
const group = g1 || g2;
if (group.indexOf('term ') === 0) {
@@ -263,7 +263,7 @@ export class ElasticResponse {
return metricName;
}
- var name = '';
+ let name = '';
for (const propName in series.props) {
name += series.props[propName] + ' ';
}
@@ -278,7 +278,7 @@ export class ElasticResponse {
nameSeries(seriesList, target) {
const metricTypeCount = _.uniq(_.map(seriesList, 'metric')).length;
- for (var i = 0; i < seriesList.length; i++) {
+ for (let i = 0; i < seriesList.length; i++) {
const series = seriesList[i];
series.target = this.getSeriesName(series, target, metricTypeCount);
}
@@ -292,7 +292,7 @@ export class ElasticResponse {
total: hits.total,
filterable: true,
};
- var propName, hit, doc, i;
+ let propName, hit, doc, i;
for (i = 0; i < hits.hits.length; i++) {
hit = hits.hits[i];
@@ -351,7 +351,7 @@ export class ElasticResponse {
getTimeSeries() {
const seriesList = [];
- for (var i = 0; i < this.response.responses.length; i++) {
+ for (let i = 0; i < this.response.responses.length; i++) {
const response = this.response.responses[i];
if (response.error) {
throw this.getErrorFromElasticResponse(this.response, response.error);
@@ -371,7 +371,7 @@ export class ElasticResponse {
this.trimDatapoints(tmpSeriesList, target);
this.nameSeries(tmpSeriesList, target);
- for (var y = 0; y < tmpSeriesList.length; y++) {
+ for (let y = 0; y < tmpSeriesList.length; y++) {
seriesList.push(tmpSeriesList[y]);
}
diff --git a/public/app/plugins/datasource/elasticsearch/metric_agg.ts b/public/app/plugins/datasource/elasticsearch/metric_agg.ts
index e0e1215b00f..623eed68914 100644
--- a/public/app/plugins/datasource/elasticsearch/metric_agg.ts
+++ b/public/app/plugins/datasource/elasticsearch/metric_agg.ts
@@ -25,19 +25,19 @@ export class ElasticMetricAggCtrl {
$scope.pipelineAggOptions = [];
$scope.modelSettingsValues = {};
- $scope.init = function() {
+ $scope.init = () => {
$scope.agg = metricAggs[$scope.index];
$scope.validateModel();
$scope.updatePipelineAggOptions();
};
- $scope.updatePipelineAggOptions = function() {
+ $scope.updatePipelineAggOptions = () => {
$scope.pipelineAggOptions = queryDef.getPipelineAggOptions($scope.target);
};
$rootScope.onAppEvent(
'elastic-query-updated',
- function() {
+ () => {
$scope.index = _.indexOf(metricAggs, $scope.agg);
$scope.updatePipelineAggOptions();
$scope.validateModel();
@@ -45,7 +45,7 @@ export class ElasticMetricAggCtrl {
$scope
);
- $scope.validateModel = function() {
+ $scope.validateModel = () => {
$scope.isFirst = $scope.index === 0;
$scope.isSingle = metricAggs.length === 1;
$scope.settingsLinkText = '';
@@ -57,7 +57,7 @@ export class ElasticMetricAggCtrl {
const pipelineOptions = queryDef.getPipelineOptions($scope.agg);
if (pipelineOptions.length > 0) {
- _.each(pipelineOptions, function(opt) {
+ _.each(pipelineOptions, opt => {
$scope.agg.settings[opt.text] = $scope.agg.settings[opt.text] || opt.default;
});
$scope.settingsLinkText = 'Options';
@@ -67,8 +67,8 @@ export class ElasticMetricAggCtrl {
}
switch ($scope.agg.type) {
case 'cardinality': {
- const precision_threshold = $scope.agg.settings.precision_threshold || '';
- $scope.settingsLinkText = 'Precision threshold: ' + precision_threshold;
+ const precisionThreshold = $scope.agg.settings.precision_threshold || '';
+ $scope.settingsLinkText = 'Precision threshold: ' + precisionThreshold;
break;
}
case 'percentiles': {
@@ -84,7 +84,7 @@ export class ElasticMetricAggCtrl {
const stats = _.reduce(
$scope.agg.meta,
- function(memo, val, key) {
+ (memo, val, key) => {
if (val) {
const def = _.find($scope.extendedStats, { value: key });
memo.push(def.text);
@@ -128,19 +128,19 @@ export class ElasticMetricAggCtrl {
}
};
- $scope.toggleOptions = function() {
+ $scope.toggleOptions = () => {
$scope.showOptions = !$scope.showOptions;
$scope.updatePipelineAggOptions();
};
- $scope.onChangeInternal = function() {
+ $scope.onChangeInternal = () => {
$scope.onChange();
};
- $scope.updateMovingAvgModelSettings = function() {
+ $scope.updateMovingAvgModelSettings = () => {
const modelSettingsKeys = [];
const modelSettings = queryDef.getMovingAvgSettings($scope.agg.settings.model, false);
- for (var i = 0; i < modelSettings.length; i++) {
+ for (let i = 0; i < modelSettings.length; i++) {
modelSettingsKeys.push(modelSettings[i].value);
}
@@ -151,12 +151,12 @@ export class ElasticMetricAggCtrl {
}
};
- $scope.onChangeClearInternal = function() {
+ $scope.onChangeClearInternal = () => {
delete $scope.agg.settings.minimize;
$scope.onChange();
};
- $scope.onTypeChange = function() {
+ $scope.onTypeChange = () => {
$scope.agg.settings = {};
$scope.agg.meta = {};
$scope.showOptions = false;
@@ -164,20 +164,20 @@ export class ElasticMetricAggCtrl {
$scope.onChange();
};
- $scope.getFieldsInternal = function() {
+ $scope.getFieldsInternal = () => {
if ($scope.agg.type === 'cardinality') {
return $scope.getFields();
}
return $scope.getFields({ $fieldType: 'number' });
};
- $scope.addMetricAgg = function() {
+ $scope.addMetricAgg = () => {
const addIndex = metricAggs.length;
const id = _.reduce(
$scope.target.bucketAggs.concat($scope.target.metrics),
- function(max, val) {
- return parseInt(val.id) > max ? parseInt(val.id) : max;
+ (max, val) => {
+ return parseInt(val.id, 10) > max ? parseInt(val.id, 10) : max;
},
0
);
@@ -186,12 +186,12 @@ export class ElasticMetricAggCtrl {
$scope.onChange();
};
- $scope.removeMetricAgg = function() {
+ $scope.removeMetricAgg = () => {
metricAggs.splice($scope.index, 1);
$scope.onChange();
};
- $scope.toggleShowMetric = function() {
+ $scope.toggleShowMetric = () => {
$scope.agg.hide = !$scope.agg.hide;
if (!$scope.agg.hide) {
delete $scope.agg.hide;
diff --git a/public/app/plugins/datasource/elasticsearch/query_builder.ts b/public/app/plugins/datasource/elasticsearch/query_builder.ts
index 0ef1c4ceab5..a4d92397d80 100644
--- a/public/app/plugins/datasource/elasticsearch/query_builder.ts
+++ b/public/app/plugins/datasource/elasticsearch/query_builder.ts
@@ -21,7 +21,7 @@ export class ElasticQueryBuilder {
}
buildTermsAgg(aggDef, queryNode, target) {
- var metricRef, metric, y;
+ let metricRef, metric, y;
queryNode.terms = { field: aggDef.field };
if (!aggDef.settings) {
@@ -94,9 +94,9 @@ export class ElasticQueryBuilder {
getFiltersAgg(aggDef) {
const filterObj = {};
- for (var i = 0; i < aggDef.settings.filters.length; i++) {
+ for (let i = 0; i < aggDef.settings.filters.length; i++) {
const query = aggDef.settings.filters[i].query;
- var label = aggDef.settings.filters[i].label;
+ let label = aggDef.settings.filters[i].label;
label = label === '' || label === undefined ? query : label;
filterObj[label] = {
query_string: {
@@ -133,7 +133,7 @@ export class ElasticQueryBuilder {
return;
}
- var i, filter, condition, queryCondition;
+ let i, filter, condition, queryCondition;
for (i = 0; i < adhocFilters.length; i++) {
filter = adhocFilters[i];
@@ -181,7 +181,7 @@ export class ElasticQueryBuilder {
target.bucketAggs = target.bucketAggs || [{ type: 'date_histogram', id: '2', settings: { interval: 'auto' } }];
target.timeField = this.timeField;
- var i, nestedAggs, metric;
+ let i, nestedAggs, metric;
const query = {
size: 0,
query: {
@@ -258,7 +258,7 @@ export class ElasticQueryBuilder {
}
const aggField = {};
- var metricAgg = null;
+ let metricAgg = null;
if (queryDef.isPipelineAgg(metric.type)) {
if (metric.pipelineAgg && /^\d*$/.test(metric.pipelineAgg)) {
@@ -302,7 +302,7 @@ export class ElasticQueryBuilder {
});
}
- var size = 500;
+ let size = 500;
if (queryDef.size) {
size = queryDef.size;
}
diff --git a/public/app/plugins/datasource/elasticsearch/query_ctrl.ts b/public/app/plugins/datasource/elasticsearch/query_ctrl.ts
index 1aea8fe4717..422d35dd277 100644
--- a/public/app/plugins/datasource/elasticsearch/query_ctrl.ts
+++ b/public/app/plugins/datasource/elasticsearch/query_ctrl.ts
@@ -12,7 +12,7 @@ export class ElasticQueryCtrl extends QueryCtrl {
esVersion: any;
rawQueryOld: string;
- /** @ngInject **/
+ /** @ngInject */
constructor($scope, $injector, private $rootScope, private uiSegmentSrv) {
super($scope, $injector);
@@ -43,7 +43,7 @@ export class ElasticQueryCtrl extends QueryCtrl {
const bucketAggs = this.target.bucketAggs;
const metricAggTypes = queryDef.getMetricAggTypes(this.esVersion);
const bucketAggTypes = queryDef.bucketAggTypes;
- var text = '';
+ let text = '';
if (this.target.query) {
text += 'Query: ' + this.target.query + ', ';
diff --git a/public/app/plugins/datasource/elasticsearch/query_def.ts b/public/app/plugins/datasource/elasticsearch/query_def.ts
index eec219d0065..dd65a8b373e 100644
--- a/public/app/plugins/datasource/elasticsearch/query_def.ts
+++ b/public/app/plugins/datasource/elasticsearch/query_def.ts
@@ -145,7 +145,7 @@ export const movingAvgModelSettings = {
};
export function getMetricAggTypes(esVersion) {
- return _.filter(metricAggTypes, function(f) {
+ return _.filter(metricAggTypes, f => {
if (f.minVersion) {
return f.minVersion <= esVersion;
} else {
@@ -173,7 +173,7 @@ export function isPipelineAgg(metricType) {
export function getPipelineAggOptions(targets) {
const result = [];
- _.each(targets.metrics, function(metric) {
+ _.each(targets.metrics, metric => {
if (!isPipelineAgg(metric.type)) {
result.push({ text: describeMetric(metric), value: metric.id });
}
@@ -185,7 +185,7 @@ export function getPipelineAggOptions(targets) {
export function getMovingAvgSettings(model, filtered) {
const filteredResult = [];
if (filtered) {
- _.each(movingAvgModelSettings[model], function(setting) {
+ _.each(movingAvgModelSettings[model], setting => {
if (!setting.isCheckbox) {
filteredResult.push(setting);
}
@@ -197,7 +197,7 @@ export function getMovingAvgSettings(model, filtered) {
export function getOrderByOptions(target) {
const metricRefs = [];
- _.each(target.metrics, function(metric) {
+ _.each(target.metrics, metric => {
if (metric.type !== 'count') {
metricRefs.push({ text: describeMetric(metric), value: metric.id });
}
diff --git a/public/app/plugins/datasource/elasticsearch/specs/datasource.test.ts b/public/app/plugins/datasource/elasticsearch/specs/datasource.test.ts
index d37d1d86d54..4be0c35852c 100644
--- a/public/app/plugins/datasource/elasticsearch/specs/datasource.test.ts
+++ b/public/app/plugins/datasource/elasticsearch/specs/datasource.test.ts
@@ -5,7 +5,7 @@ import { ElasticDatasource } from '../datasource';
import * as dateMath from 'app/core/utils/datemath';
-describe('ElasticDatasource', function() {
+describe('ElasticDatasource', function(this: any) {
const backendSrv = {
datasourceRequest: jest.fn(),
};
@@ -33,18 +33,18 @@ describe('ElasticDatasource', function() {
}),
};
- const ctx = {
+ const ctx = {
$rootScope,
backendSrv,
- };
+ } as any;
function createDatasource(instanceSettings) {
instanceSettings.jsonData = instanceSettings.jsonData || {};
ctx.ds = new ElasticDatasource(instanceSettings, {}, backendSrv, templateSrv, timeSrv);
}
- describe('When testing datasource with index pattern', function() {
- beforeEach(function() {
+ describe('When testing datasource with index pattern', () => {
+ beforeEach(() => {
createDatasource({
url: 'http://es.com',
index: '[asd-]YYYY.MM.DD',
@@ -52,7 +52,7 @@ describe('ElasticDatasource', function() {
});
});
- it('should translate index pattern to current day', function() {
+ it('should translate index pattern to current day', () => {
let requestOptions;
ctx.backendSrv.datasourceRequest = jest.fn(options => {
requestOptions = options;
@@ -66,7 +66,7 @@ describe('ElasticDatasource', function() {
});
});
- describe('When issuing metric query with interval pattern', function() {
+ describe('When issuing metric query with interval pattern', () => {
let requestOptions, parts, header;
beforeEach(() => {
@@ -99,20 +99,20 @@ describe('ElasticDatasource', function() {
header = angular.fromJson(parts[0]);
});
- it('should translate index pattern to current day', function() {
+ it('should translate index pattern to current day', () => {
expect(header.index).toEqual(['asd-2015.05.30', 'asd-2015.05.31', 'asd-2015.06.01']);
});
- it('should json escape lucene query', function() {
+ it('should json escape lucene query', () => {
const body = angular.fromJson(parts[1]);
expect(body.query.bool.filter[1].query_string.query).toBe('escape\\:test');
});
});
- describe('When issuing document query', function() {
+ describe('When issuing document query', () => {
let requestOptions, parts, header;
- beforeEach(function() {
+ beforeEach(() => {
createDatasource({
url: 'http://es.com',
index: 'test',
@@ -142,17 +142,17 @@ describe('ElasticDatasource', function() {
header = angular.fromJson(parts[0]);
});
- it('should set search type to query_then_fetch', function() {
+ it('should set search type to query_then_fetch', () => {
expect(header.search_type).toEqual('query_then_fetch');
});
- it('should set size', function() {
+ it('should set size', () => {
const body = angular.fromJson(parts[1]);
expect(body.size).toBe(500);
});
});
- describe('When getting fields', function() {
+ describe('When getting fields', () => {
beforeEach(() => {
createDatasource({ url: 'http://es.com', index: 'metricbeat' });
@@ -203,7 +203,7 @@ describe('ElasticDatasource', function() {
});
});
- it('should return nested fields', function() {
+ it('should return nested fields', () => {
ctx.ds
.getFields({
find: 'fields',
@@ -224,7 +224,7 @@ describe('ElasticDatasource', function() {
});
});
- it('should return fields related to query type', function() {
+ it('should return fields related to query type', () => {
ctx.ds
.getFields({
find: 'fields',
@@ -249,10 +249,10 @@ describe('ElasticDatasource', function() {
});
});
- describe('When issuing aggregation query on es5.x', function() {
+ describe('When issuing aggregation query on es5.x', () => {
let requestOptions, parts, header;
- beforeEach(function() {
+ beforeEach(() => {
createDatasource({
url: 'http://es.com',
index: 'test',
@@ -282,17 +282,17 @@ describe('ElasticDatasource', function() {
header = angular.fromJson(parts[0]);
});
- it('should not set search type to count', function() {
+ it('should not set search type to count', () => {
expect(header.search_type).not.toEqual('count');
});
- it('should set size to 0', function() {
+ it('should set size to 0', () => {
const body = angular.fromJson(parts[1]);
expect(body.size).toBe(0);
});
});
- describe('When issuing metricFind query on es5.x', function() {
+ describe('When issuing metricFind query on es5.x', () => {
let requestOptions, parts, header, body, results;
beforeEach(() => {
diff --git a/public/app/plugins/datasource/elasticsearch/specs/elastic_response.test.ts b/public/app/plugins/datasource/elasticsearch/specs/elastic_response.test.ts
index ea9d10f989f..8b41e71145a 100644
--- a/public/app/plugins/datasource/elasticsearch/specs/elastic_response.test.ts
+++ b/public/app/plugins/datasource/elasticsearch/specs/elastic_response.test.ts
@@ -1,9 +1,9 @@
import { ElasticResponse } from '../elastic_response';
describe('ElasticResponse', () => {
- var targets;
- var response;
- var result;
+ let targets;
+ let response;
+ let result;
describe('simple query and count', () => {
beforeEach(() => {
@@ -48,7 +48,7 @@ describe('ElasticResponse', () => {
});
describe('simple query count & avg aggregation', () => {
- var result;
+ let result;
beforeEach(() => {
targets = [
@@ -97,7 +97,7 @@ describe('ElasticResponse', () => {
});
describe('single group by query one metric', () => {
- var result;
+ let result;
beforeEach(() => {
targets = [
@@ -149,7 +149,7 @@ describe('ElasticResponse', () => {
});
describe('single group by query two metrics', () => {
- var result;
+ let result;
beforeEach(() => {
targets = [
@@ -209,7 +209,7 @@ describe('ElasticResponse', () => {
});
describe('with percentiles ', () => {
- var result;
+ let result;
beforeEach(() => {
targets = [
@@ -257,7 +257,7 @@ describe('ElasticResponse', () => {
});
describe('with extended_stats', () => {
- var result;
+ let result;
beforeEach(() => {
targets = [
@@ -333,7 +333,7 @@ describe('ElasticResponse', () => {
});
describe('single group by with alias pattern', () => {
- var result;
+ let result;
beforeEach(() => {
targets = [
@@ -394,7 +394,7 @@ describe('ElasticResponse', () => {
});
describe('histogram response', () => {
- var result;
+ let result;
beforeEach(() => {
targets = [
@@ -426,7 +426,7 @@ describe('ElasticResponse', () => {
});
describe('with two filters agg', () => {
- var result;
+ let result;
beforeEach(() => {
targets = [
diff --git a/public/app/plugins/datasource/elasticsearch/specs/query_builder.test.ts b/public/app/plugins/datasource/elasticsearch/specs/query_builder.test.ts
index e4c9404e667..a9e570f366b 100644
--- a/public/app/plugins/datasource/elasticsearch/specs/query_builder.test.ts
+++ b/public/app/plugins/datasource/elasticsearch/specs/query_builder.test.ts
@@ -19,12 +19,12 @@ describe('ElasticQueryBuilder', () => {
});
it('with defaults on es5.x', () => {
- const builder_5x = new ElasticQueryBuilder({
+ const builder5x = new ElasticQueryBuilder({
timeField: '@timestamp',
esVersion: 5,
});
- const query = builder_5x.build({
+ const query = builder5x.build({
metrics: [{ type: 'Count', id: '0' }],
timeField: '@timestamp',
bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '1' }],
@@ -134,11 +134,11 @@ describe('ElasticQueryBuilder', () => {
});
it('with filters aggs on es5.x', () => {
- const builder_5x = new ElasticQueryBuilder({
+ const builder5x = new ElasticQueryBuilder({
timeField: '@timestamp',
esVersion: 5,
});
- const query = builder_5x.build({
+ const query = builder5x.build({
metrics: [{ type: 'count', id: '1' }],
timeField: '@timestamp',
bucketAggs: [
diff --git a/public/app/plugins/datasource/graphite/add_graphite_func.ts b/public/app/plugins/datasource/graphite/add_graphite_func.ts
index f025b4f1830..a5c1dc49959 100644
--- a/public/app/plugins/datasource/graphite/add_graphite_func.ts
+++ b/public/app/plugins/datasource/graphite/add_graphite_func.ts
@@ -24,7 +24,7 @@ export function graphiteAddFunc($compile) {
$input.appendTo(elem);
$button.appendTo(elem);
- ctrl.datasource.getFuncDefs().then(function(funcDefs) {
+ ctrl.datasource.getFuncDefs().then(funcDefs => {
const allFunctions = _.map(funcDefs, 'name').sort();
$scope.functionMenu = createFunctionDropDownMenu(funcDefs);
@@ -34,12 +34,12 @@ export function graphiteAddFunc($compile) {
source: allFunctions,
minLength: 1,
items: 10,
- updater: function(value) {
- var funcDef = ctrl.datasource.getFuncDef(value);
+ updater: value => {
+ let funcDef = ctrl.datasource.getFuncDef(value);
if (!funcDef) {
// try find close match
value = value.toLowerCase();
- funcDef = _.find(allFunctions, function(funcName) {
+ funcDef = _.find(allFunctions, funcName => {
return funcName.toLowerCase().indexOf(value) === 0;
});
@@ -48,7 +48,7 @@ export function graphiteAddFunc($compile) {
}
}
- $scope.$apply(function() {
+ $scope.$apply(() => {
ctrl.addFunction(funcDef);
});
@@ -57,20 +57,20 @@ export function graphiteAddFunc($compile) {
},
});
- $button.click(function() {
+ $button.click(() => {
$button.hide();
$input.show();
$input.focus();
});
- $input.keyup(function() {
+ $input.keyup(() => {
elem.toggleClass('open', $input.val() === '');
});
- $input.blur(function() {
+ $input.blur(() => {
// clicking the function dropdown menu won't
// work if you remove class at once
- setTimeout(function() {
+ setTimeout(() => {
$input.val('');
$input.hide();
$button.show();
@@ -81,8 +81,8 @@ export function graphiteAddFunc($compile) {
$compile(elem.contents())($scope);
});
- var drop;
- const cleanUpDrop = function() {
+ let drop;
+ const cleanUpDrop = () => {
if (drop) {
drop.destroy();
drop = null;
@@ -90,10 +90,10 @@ export function graphiteAddFunc($compile) {
};
$(elem)
- .on('mouseenter', 'ul.dropdown-menu li', function() {
+ .on('mouseenter', 'ul.dropdown-menu li', () => {
cleanUpDrop();
- var funcDef;
+ let funcDef;
try {
funcDef = ctrl.datasource.getFuncDef($('a', this).text());
} catch (e) {
@@ -101,7 +101,7 @@ export function graphiteAddFunc($compile) {
}
if (funcDef && funcDef.description) {
- var shortDesc = funcDef.description;
+ let shortDesc = funcDef.description;
if (shortDesc.length > 500) {
shortDesc = shortDesc.substring(0, 497) + '...';
}
@@ -121,7 +121,7 @@ export function graphiteAddFunc($compile) {
});
}
})
- .on('mouseout', 'ul.dropdown-menu li', function() {
+ .on('mouseout', 'ul.dropdown-menu li', () => {
cleanUpDrop();
});
@@ -135,7 +135,7 @@ angular.module('grafana.directives').directive('graphiteAddFunc', graphiteAddFun
function createFunctionDropDownMenu(funcDefs) {
const categories = {};
- _.forEach(funcDefs, function(funcDef) {
+ _.forEach(funcDefs, funcDef => {
if (!funcDef.category) {
return;
}
@@ -149,7 +149,7 @@ function createFunctionDropDownMenu(funcDefs) {
});
return _.sortBy(
- _.map(categories, function(submenu, category) {
+ _.map(categories, (submenu, category) => {
return {
text: category,
submenu: _.sortBy(submenu, 'text'),
diff --git a/public/app/plugins/datasource/graphite/datasource.ts b/public/app/plugins/datasource/graphite/datasource.ts
index e86bf828207..a89c0ea0034 100644
--- a/public/app/plugins/datasource/graphite/datasource.ts
+++ b/public/app/plugins/datasource/graphite/datasource.ts
@@ -4,7 +4,7 @@ import { isVersionGtOrEq, SemVersion } from 'app/core/utils/version';
import gfunc from './gfunc';
/** @ngInject */
-export function GraphiteDatasource(instanceSettings, $q, backendSrv, templateSrv) {
+export function GraphiteDatasource(this: any, instanceSettings, $q, backendSrv, templateSrv) {
this.basicAuth = instanceSettings.basicAuth;
this.url = instanceSettings.url;
this.name = instanceSettings.name;
@@ -16,7 +16,7 @@ export function GraphiteDatasource(instanceSettings, $q, backendSrv, templateSrv
this.funcDefs = null;
this.funcDefsPromise = null;
- this.getQueryOptionsInfo = function() {
+ this.getQueryOptionsInfo = () => {
return {
maxDataPoints: true,
cacheTimeout: true,
@@ -70,20 +70,20 @@ export function GraphiteDatasource(instanceSettings, $q, backendSrv, templateSrv
}
};
- this.convertDataPointsToMs = function(result) {
+ this.convertDataPointsToMs = result => {
if (!result || !result.data) {
return [];
}
- for (var i = 0; i < result.data.length; i++) {
+ for (let i = 0; i < result.data.length; i++) {
const series = result.data[i];
- for (var y = 0; y < series.datapoints.length; y++) {
+ for (let y = 0; y < series.datapoints.length; y++) {
series.datapoints[y][1] *= 1000;
}
}
return result;
};
- this.parseTags = function(tagString) {
+ this.parseTags = tagString => {
let tags = [];
tags = tagString.split(',');
if (tags.length === 1) {
@@ -106,13 +106,13 @@ export function GraphiteDatasource(instanceSettings, $q, backendSrv, templateSrv
maxDataPoints: 100,
};
- return this.query(graphiteQuery).then(function(result) {
+ return this.query(graphiteQuery).then(result => {
const list = [];
- for (var i = 0; i < result.data.length; i++) {
+ for (let i = 0; i < result.data.length; i++) {
const target = result.data[i];
- for (var y = 0; y < target.datapoints.length; y++) {
+ for (let y = 0; y < target.datapoints.length; y++) {
const datapoint = target.datapoints[y];
if (!datapoint[0]) {
continue;
@@ -133,10 +133,10 @@ export function GraphiteDatasource(instanceSettings, $q, backendSrv, templateSrv
const tags = templateSrv.replace(options.annotation.tags);
return this.events({ range: options.rangeRaw, tags: tags }).then(results => {
const list = [];
- for (var i = 0; i < results.data.length; i++) {
+ for (let i = 0; i < results.data.length; i++) {
const e = results.data[i];
- var tags = e.tags;
+ let tags = e.tags;
if (_.isString(e.tags)) {
tags = this.parseTags(e.tags);
}
@@ -157,7 +157,7 @@ export function GraphiteDatasource(instanceSettings, $q, backendSrv, templateSrv
this.events = function(options) {
try {
- var tags = '';
+ let tags = '';
if (options.tags) {
tags = '&tags=' + options.tags;
}
@@ -175,11 +175,11 @@ export function GraphiteDatasource(instanceSettings, $q, backendSrv, templateSrv
}
};
- this.targetContainsTemplate = function(target) {
+ this.targetContainsTemplate = target => {
return templateSrv.variableExists(target.target);
};
- this.translateTime = function(date, roundUp) {
+ this.translateTime = (date, roundUp) => {
if (_.isString(date)) {
if (date === 'now') {
return 'now';
@@ -218,9 +218,10 @@ export function GraphiteDatasource(instanceSettings, $q, backendSrv, templateSrv
if (matches) {
const expressions = [];
const exprRegex = /, *([^,]+)/g;
- let match;
- while ((match = exprRegex.exec(matches[2])) !== null) {
+ let match = exprRegex.exec(matches[2]);
+ while (match !== null) {
expressions.push(match[1]);
+ match = exprRegex.exec(matches[2]);
}
options.limit = 10000;
return this.getTagValuesAutoComplete(expressions, matches[1], undefined, options);
@@ -233,9 +234,10 @@ export function GraphiteDatasource(instanceSettings, $q, backendSrv, templateSrv
if (matches[1]) {
expressions.push(matches[1]);
const exprRegex = /, *([^,]+)/g;
- let match;
- while ((match = exprRegex.exec(matches[2])) !== null) {
+ let match = exprRegex.exec(matches[2]);
+ while (match !== null) {
expressions.push(match[1]);
+ match = exprRegex.exec(matches[2]);
}
}
options.limit = 10000;
@@ -467,7 +469,7 @@ export function GraphiteDatasource(instanceSettings, $q, backendSrv, templateSrv
targets: [{ target: 'constantLine(100)' }],
maxDataPoints: 300,
};
- return this.query(query).then(function() {
+ return this.query(query).then(() => {
return { status: 'success', message: 'Data source is working' };
});
};
@@ -490,13 +492,13 @@ export function GraphiteDatasource(instanceSettings, $q, backendSrv, templateSrv
this._seriesRefLetters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ';
this.buildGraphiteParams = function(options, scopedVars) {
- const graphite_options = ['from', 'until', 'rawData', 'format', 'maxDataPoints', 'cacheTimeout'];
- const clean_options = [],
+ const graphiteOptions = ['from', 'until', 'rawData', 'format', 'maxDataPoints', 'cacheTimeout'];
+ const cleanOptions = [],
targets = {};
- var target, targetValue, i;
+ let target, targetValue, i;
const regex = /\#([A-Z])/g;
const intervalFormatFixRegex = /'(\d+)m'/gi;
- var hasTargets = false;
+ let hasTargets = false;
options['format'] = 'json';
@@ -535,16 +537,16 @@ export function GraphiteDatasource(instanceSettings, $q, backendSrv, templateSrv
if (!target.hide) {
hasTargets = true;
- clean_options.push('target=' + encodeURIComponent(targetValue));
+ cleanOptions.push('target=' + encodeURIComponent(targetValue));
}
}
- _.each(options, function(value, key) {
- if (_.indexOf(graphite_options, key) === -1) {
+ _.each(options, (value, key) => {
+ if (_.indexOf(graphiteOptions, key) === -1) {
return;
}
if (value) {
- clean_options.push(key + '=' + encodeURIComponent(value));
+ cleanOptions.push(key + '=' + encodeURIComponent(value));
}
});
@@ -552,7 +554,7 @@ export function GraphiteDatasource(instanceSettings, $q, backendSrv, templateSrv
return [];
}
- return clean_options;
+ return cleanOptions;
};
}
diff --git a/public/app/plugins/datasource/graphite/func_editor.ts b/public/app/plugins/datasource/graphite/func_editor.ts
index 2d68dbbfa8a..68cc6f1452e 100644
--- a/public/app/plugins/datasource/graphite/func_editor.ts
+++ b/public/app/plugins/datasource/graphite/func_editor.ts
@@ -24,11 +24,11 @@ export function graphiteFuncEditor($compile, templateSrv, popoverSrv) {
const $funcControls = $(funcControlsTemplate);
const ctrl = $scope.ctrl;
const func = $scope.func;
- var scheduledRelink = false;
- var paramCountAtLink = 0;
- var cancelBlur = null;
+ let scheduledRelink = false;
+ let paramCountAtLink = 0;
+ let cancelBlur = null;
- function clickFuncParam(paramIndex) {
+ function clickFuncParam(this: any, paramIndex) {
/*jshint validthis:true */
const $link = $(this);
@@ -57,7 +57,7 @@ export function graphiteFuncEditor($compile, templateSrv, popoverSrv) {
if (!scheduledRelink) {
scheduledRelink = true;
- setTimeout(function() {
+ setTimeout(() => {
relink();
scheduledRelink = false;
}, 200);
@@ -93,7 +93,7 @@ export function graphiteFuncEditor($compile, templateSrv, popoverSrv) {
scheduledRelinkIfNeeded();
- $scope.$apply(function() {
+ $scope.$apply(() => {
ctrl.targetChanged();
});
@@ -108,24 +108,24 @@ export function graphiteFuncEditor($compile, templateSrv, popoverSrv) {
}
// this = input element
- function inputBlur(paramIndex) {
+ function inputBlur(this: any, paramIndex) {
/*jshint validthis:true */
const inputElem = this;
// happens long before the click event on the typeahead options
// need to have long delay because the blur
- cancelBlur = setTimeout(function() {
+ cancelBlur = setTimeout(() => {
switchToLink(inputElem, paramIndex);
}, 200);
}
- function inputKeyPress(paramIndex, e) {
+ function inputKeyPress(this: any, paramIndex, e) {
/*jshint validthis:true */
if (e.which === 13) {
$(this).blur();
}
}
- function inputKeyDown() {
+ function inputKeyDown(this: any) {
/*jshint validthis:true */
this.style.width = (3 + this.value.length) * 8 + 'px';
}
@@ -133,9 +133,9 @@ export function graphiteFuncEditor($compile, templateSrv, popoverSrv) {
function addTypeahead($input, paramIndex) {
$input.attr('data-provide', 'typeahead');
- var options = paramDef(paramIndex).options;
+ let options = paramDef(paramIndex).options;
if (paramDef(paramIndex).type === 'int') {
- options = _.map(options, function(val) {
+ options = _.map(options, val => {
return val.toString();
});
}
@@ -144,7 +144,7 @@ export function graphiteFuncEditor($compile, templateSrv, popoverSrv) {
source: options,
minLength: 0,
items: 20,
- updater: function(value) {
+ updater: value => {
$input.val(value);
switchToLink($input[0], paramIndex);
return value;
@@ -185,12 +185,12 @@ export function graphiteFuncEditor($compile, templateSrv, popoverSrv) {
defParams.push(_.assign({}, lastParam, { optional: true }));
}
- _.each(defParams, function(param, index) {
+ _.each(defParams, (param, index) => {
if (param.optional && func.params.length < index) {
return false;
}
- var paramValue = templateSrv.highlightVariablesAsHtml(func.params[index]);
+ let paramValue = templateSrv.highlightVariablesAsHtml(func.params[index]);
const last = index >= func.params.length - 1 && param.optional && !paramValue;
if (last && param.multiple) {
@@ -236,7 +236,7 @@ export function graphiteFuncEditor($compile, templateSrv, popoverSrv) {
function ifJustAddedFocusFirstParam() {
if ($scope.func.added) {
$scope.func.added = false;
- setTimeout(function() {
+ setTimeout(() => {
elem
.find('.graphite-func-param-link')
.first()
@@ -250,18 +250,18 @@ export function graphiteFuncEditor($compile, templateSrv, popoverSrv) {
}
function registerFuncControlsActions() {
- $funcControls.click(function(e) {
+ $funcControls.click(e => {
const $target = $(e.target);
if ($target.hasClass('fa-remove')) {
toggleFuncControls();
- $scope.$apply(function() {
+ $scope.$apply(() => {
ctrl.removeFunction($scope.func);
});
return;
}
if ($target.hasClass('fa-arrow-left')) {
- $scope.$apply(function() {
+ $scope.$apply(() => {
_.move(ctrl.queryModel.functions, $scope.$index, $scope.$index - 1);
ctrl.targetChanged();
});
@@ -269,7 +269,7 @@ export function graphiteFuncEditor($compile, templateSrv, popoverSrv) {
}
if ($target.hasClass('fa-arrow-right')) {
- $scope.$apply(function() {
+ $scope.$apply(() => {
_.move(ctrl.queryModel.functions, $scope.$index, $scope.$index + 1);
ctrl.targetChanged();
});
diff --git a/public/app/plugins/datasource/graphite/gfunc.ts b/public/app/plugins/datasource/graphite/gfunc.ts
index aee3e6ea19a..20cb884d617 100644
--- a/public/app/plugins/datasource/graphite/gfunc.ts
+++ b/public/app/plugins/datasource/graphite/gfunc.ts
@@ -964,26 +964,23 @@ export class FuncInstance {
render(metricExp) {
const str = this.def.name + '(';
- const parameters = _.map(
- this.params,
- function(value, index) {
- var paramType;
- if (index < this.def.params.length) {
- paramType = this.def.params[index].type;
- } else if (_.get(_.last(this.def.params), 'multiple')) {
- paramType = _.get(_.last(this.def.params), 'type');
- }
- // param types that should never be quoted
- if (_.includes(['value_or_series', 'boolean', 'int', 'float', 'node'], paramType)) {
- return value;
- }
- // param types that might be quoted
- if (_.includes(['int_or_interval', 'node_or_tag'], paramType) && _.isFinite(+value)) {
- return _.toString(+value);
- }
- return "'" + value + "'";
- }.bind(this)
- );
+ const parameters = _.map(this.params, (value, index) => {
+ let paramType;
+ if (index < this.def.params.length) {
+ paramType = this.def.params[index].type;
+ } else if (_.get(_.last(this.def.params), 'multiple')) {
+ paramType = _.get(_.last(this.def.params), 'type');
+ }
+ // param types that should never be quoted
+ if (_.includes(['value_or_series', 'boolean', 'int', 'float', 'node'], paramType)) {
+ return value;
+ }
+ // param types that might be quoted
+ if (_.includes(['int_or_interval', 'node_or_tag'], paramType) && _.isFinite(+value)) {
+ return _.toString(+value);
+ }
+ return "'" + value + "'";
+ });
// don't send any blank parameters to graphite
while (parameters[parameters.length - 1] === '') {
@@ -1017,12 +1014,9 @@ export class FuncInstance {
// handle optional parameters
// if string contains ',' and next param is optional, split and update both
if (this._hasMultipleParamsInString(strValue, index)) {
- _.each(
- strValue.split(','),
- function(partVal, idx) {
- this.updateParam(partVal.trim(), index + idx);
- }.bind(this)
- );
+ _.each(strValue.split(','), (partVal, idx) => {
+ this.updateParam(partVal.trim(), index + idx);
+ });
return;
}
@@ -1041,7 +1035,7 @@ export class FuncInstance {
return;
}
- var text = this.def.name + '(';
+ let text = this.def.name + '(';
text += this.params.join(', ');
text += ')';
this.text = text;
@@ -1064,10 +1058,10 @@ function getFuncDef(name, idx?) {
function getFuncDefs(graphiteVersion, idx?) {
const funcs = {};
- _.forEach(idx || index, function(funcDef) {
+ _.forEach(idx || index, funcDef => {
if (isVersionRelatedFunction(funcDef, graphiteVersion)) {
funcs[funcDef.name] = _.assign({}, funcDef, {
- params: _.filter(funcDef.params, function(param) {
+ params: _.filter(funcDef.params, param => {
return isVersionRelatedFunction(param, graphiteVersion);
}),
});
@@ -1086,7 +1080,7 @@ function parseFuncDefs(rawDefs) {
return;
}
- var description = funcDef.description;
+ let description = funcDef.description;
if (description) {
// tidy up some pydoc syntax that rst2html can't handle
description = description
diff --git a/public/app/plugins/datasource/graphite/graphite_query.ts b/public/app/plugins/datasource/graphite/graphite_query.ts
index 4cea22f13e2..ab137a6a299 100644
--- a/public/app/plugins/datasource/graphite/graphite_query.ts
+++ b/public/app/plugins/datasource/graphite/graphite_query.ts
@@ -73,7 +73,7 @@ export default class GraphiteQuery {
return _.reduce(
arr,
- function(result, segment) {
+ (result, segment) => {
return result ? result + '.' + segment.value : segment.value;
},
''
@@ -133,7 +133,7 @@ export default class GraphiteQuery {
}
moveAliasFuncLast() {
- const aliasFunc = _.find(this.functions, function(func) {
+ const aliasFunc = _.find(this.functions, func => {
return func.def.name.startsWith('alias');
});
@@ -179,7 +179,7 @@ export default class GraphiteQuery {
delete targetsByRefId[target.refId];
const nestedSeriesRefRegex = /\#([A-Z])/g;
- var targetWithNestedQueries = target.target;
+ let targetWithNestedQueries = target.target;
// Use ref count to track circular references
function countTargetRefs(targetsByRefId, refId) {
diff --git a/public/app/plugins/datasource/graphite/lexer.ts b/public/app/plugins/datasource/graphite/lexer.ts
index 2ed85026bca..ed4e3287aa2 100644
--- a/public/app/plugins/datasource/graphite/lexer.ts
+++ b/public/app/plugins/datasource/graphite/lexer.ts
@@ -900,7 +900,7 @@ const unicodeLetterTable = [
const identifierStartTable = [];
-for (var i = 0; i < 128; i++) {
+for (let i = 0; i < 128; i++) {
identifierStartTable[i] =
(i >= 48 && i <= 57) || // 0-9
i === 36 || // $
@@ -922,7 +922,7 @@ for (var i = 0; i < 128; i++) {
const identifierPartTable = identifierStartTable;
-export function Lexer(expression) {
+export function Lexer(this: any, expression) {
this.input = expression;
this.char = 1;
this.from = 1;
@@ -941,9 +941,10 @@ Lexer.prototype = {
tokenize: function() {
const list = [];
- var token;
- while ((token = this.next())) {
+ let token = this.next();
+ while (token) {
list.push(token);
+ token = this.next();
}
return list;
},
@@ -964,7 +965,7 @@ Lexer.prototype = {
}
}
- var match = this.scanStringLiteral();
+ let match = this.scanStringLiteral();
if (match) {
return match;
}
@@ -1007,9 +1008,9 @@ Lexer.prototype = {
* (true/false) and NullLiteral (null).
*/
scanIdentifier: function() {
- var id = '';
- var index = 0;
- var type, char;
+ let id = '';
+ let index = 0;
+ let type, char;
// Detects any character in the Unicode categories "Uppercase
// letter (Lu)", "Lowercase letter (Ll)", "Titlecase letter
@@ -1020,7 +1021,7 @@ Lexer.prototype = {
// Google's Traceur.
function isUnicodeLetter(code) {
- for (var i = 0; i < unicodeLetterTable.length; ) {
+ for (let i = 0; i < unicodeLetterTable.length; ) {
if (code < unicodeLetterTable[i++]) {
return false;
}
@@ -1037,7 +1038,7 @@ Lexer.prototype = {
return /^[0-9a-fA-F]$/.test(str);
}
- const readUnicodeEscapeSequence = _.bind(function() {
+ const readUnicodeEscapeSequence = _.bind(function(this: any) {
/*jshint validthis:true */
index += 1;
@@ -1049,7 +1050,7 @@ Lexer.prototype = {
const ch2 = this.peek(index + 2);
const ch3 = this.peek(index + 3);
const ch4 = this.peek(index + 4);
- var code;
+ let code;
if (isHexDigit(ch1) && isHexDigit(ch2) && isHexDigit(ch3) && isHexDigit(ch4)) {
code = parseInt(ch1 + ch2 + ch3 + ch4, 16);
@@ -1065,7 +1066,7 @@ Lexer.prototype = {
return null;
}, this);
- const getIdentifierStart = _.bind(function() {
+ const getIdentifierStart = _.bind(function(this: any) {
/*jshint validthis:true */
const chr = this.peek(index);
const code = chr.charCodeAt(0);
@@ -1096,7 +1097,7 @@ Lexer.prototype = {
return null;
}, this);
- const getIdentifierPart = _.bind(function() {
+ const getIdentifierPart = _.bind(function(this: any) {
/*jshint validthis:true */
const chr = this.peek(index);
const code = chr.charCodeAt(0);
@@ -1168,11 +1169,11 @@ Lexer.prototype = {
* scanNumericLiteral function in the Esprima parser's source code.
*/
scanNumericLiteral: function(): any {
- var index = 0;
- var value = '';
+ let index = 0;
+ let value = '';
const length = this.input.length;
- var char = this.peek(index);
- var bad;
+ let char = this.peek(index);
+ let bad;
function isDecimalDigit(str) {
return /^[0-9]$/.test(str);
@@ -1370,7 +1371,7 @@ Lexer.prototype = {
};
},
- isPunctuator: function(ch1) {
+ isPunctuator: ch1 => {
switch (ch1) {
case '.':
case '(':
@@ -1418,7 +1419,7 @@ Lexer.prototype = {
return null;
}
- var value = '';
+ let value = '';
this.skip();
diff --git a/public/app/plugins/datasource/graphite/parser.ts b/public/app/plugins/datasource/graphite/parser.ts
index 158fa94b6a6..cb394c05a15 100644
--- a/public/app/plugins/datasource/graphite/parser.ts
+++ b/public/app/plugins/datasource/graphite/parser.ts
@@ -1,6 +1,6 @@
import { Lexer } from './lexer';
-export function Parser(expression) {
+export function Parser(this: any, expression) {
this.expression = expression;
this.lexer = new Lexer(expression);
this.tokens = this.lexer.tokenize();
@@ -26,7 +26,7 @@ Parser.prototype = {
curlyBraceSegment: function() {
if (this.match('identifier', '{') || this.match('{')) {
- var curlySegment = '';
+ let curlySegment = '';
while (!this.match('') && !this.match('}')) {
curlySegment += this.consumeToken().value;
diff --git a/public/app/plugins/datasource/graphite/query_ctrl.ts b/public/app/plugins/datasource/graphite/query_ctrl.ts
index a83df9297de..fa908c5e955 100644
--- a/public/app/plugins/datasource/graphite/query_ctrl.ts
+++ b/public/app/plugins/datasource/graphite/query_ctrl.ts
@@ -19,7 +19,7 @@ export class GraphiteQueryCtrl extends QueryCtrl {
supportsTags: boolean;
paused: boolean;
- /** @ngInject **/
+ /** @ngInject */
constructor($scope, $injector, private uiSegmentSrv, private templateSrv, $timeout) {
super($scope, $injector);
this.supportsTags = this.datasource.supportsTags;
@@ -106,7 +106,7 @@ export class GraphiteQueryCtrl extends QueryCtrl {
}
getAltSegments(index, prefix) {
- var query = prefix && prefix.length > 0 ? '*' + prefix + '*' : '*';
+ let query = prefix && prefix.length > 0 ? '*' + prefix + '*' : '*';
if (index > 0) {
query = this.queryModel.getSegmentPathUpTo(index) + '.' + query;
}
@@ -291,7 +291,7 @@ export class GraphiteQueryCtrl extends QueryCtrl {
return;
}
- for (var i = 0; i < this.segments.length; i++) {
+ for (let i = 0; i < this.segments.length; i++) {
if (this.segments[i].value.indexOf('*') >= 0) {
func.params[0] = i;
func.added = false;
diff --git a/public/app/plugins/datasource/graphite/specs/datasource.test.ts b/public/app/plugins/datasource/graphite/specs/datasource.test.ts
index 563f1047cdb..cd60a059123 100644
--- a/public/app/plugins/datasource/graphite/specs/datasource.test.ts
+++ b/public/app/plugins/datasource/graphite/specs/datasource.test.ts
@@ -12,12 +12,12 @@ describe('graphiteDatasource', () => {
instanceSettings: { url: 'url', name: 'graphiteProd', jsonData: {} },
};
- beforeEach(function() {
+ beforeEach(() => {
ctx.instanceSettings.url = '/api/datasources/proxy/1';
ctx.ds = new GraphiteDatasource(ctx.instanceSettings, ctx.$q, ctx.backendSrv, ctx.templateSrv);
});
- describe('When querying graphite with one target using query editor target spec', function() {
+ describe('When querying graphite with one target using query editor target spec', () => {
const query = {
panelId: 3,
dashboardId: 5,
@@ -30,14 +30,14 @@ describe('graphiteDatasource', () => {
let requestOptions;
beforeEach(async () => {
- ctx.backendSrv.datasourceRequest = function(options) {
+ ctx.backendSrv.datasourceRequest = options => {
requestOptions = options;
return ctx.$q.when({
data: [{ target: 'prod1.count', datapoints: [[10, 1], [12, 1]] }],
});
};
- await ctx.ds.query(query).then(function(data) {
+ await ctx.ds.query(query).then(data => {
results = data;
});
});
@@ -47,15 +47,15 @@ describe('graphiteDatasource', () => {
expect(requestOptions.headers['X-Panel-Id']).toBe(3);
});
- it('should generate the correct query', function() {
+ it('should generate the correct query', () => {
expect(requestOptions.url).toBe('/api/datasources/proxy/1/render');
});
- it('should set unique requestId', function() {
+ it('should set unique requestId', () => {
expect(requestOptions.requestId).toBe('graphiteProd.panelId.3');
});
- it('should query correctly', function() {
+ it('should query correctly', () => {
const params = requestOptions.data.split('&');
expect(params).toContain('target=prod1.count');
expect(params).toContain('target=prod2.count');
@@ -63,17 +63,17 @@ describe('graphiteDatasource', () => {
expect(params).toContain('until=now');
});
- it('should exclude undefined params', function() {
+ it('should exclude undefined params', () => {
const params = requestOptions.data.split('&');
expect(params).not.toContain('cacheTimeout=undefined');
});
- it('should return series list', function() {
+ it('should return series list', () => {
expect(results.data.length).toBe(1);
expect(results.data[0].target).toBe('prod1.count');
});
- it('should convert to millisecond resolution', function() {
+ it('should convert to millisecond resolution', () => {
expect(results.data[0].datapoints[0][0]).toBe(10);
});
});
@@ -106,11 +106,11 @@ describe('graphiteDatasource', () => {
};
beforeEach(async () => {
- ctx.backendSrv.datasourceRequest = function(options) {
+ ctx.backendSrv.datasourceRequest = options => {
return ctx.$q.when(response);
};
- await ctx.ds.annotationQuery(options).then(function(data) {
+ await ctx.ds.annotationQuery(options).then(data => {
results = data;
});
});
@@ -136,11 +136,11 @@ describe('graphiteDatasource', () => {
],
};
beforeEach(() => {
- ctx.backendSrv.datasourceRequest = function(options) {
+ ctx.backendSrv.datasourceRequest = options => {
return ctx.$q.when(response);
};
- ctx.ds.annotationQuery(options).then(function(data) {
+ ctx.ds.annotationQuery(options).then(data => {
results = data;
});
// ctx.$rootScope.$apply();
@@ -155,29 +155,29 @@ describe('graphiteDatasource', () => {
});
});
- describe('building graphite params', function() {
- it('should return empty array if no targets', function() {
+ describe('building graphite params', () => {
+ it('should return empty array if no targets', () => {
const results = ctx.ds.buildGraphiteParams({
targets: [{}],
});
expect(results.length).toBe(0);
});
- it('should uri escape targets', function() {
+ it('should uri escape targets', () => {
const results = ctx.ds.buildGraphiteParams({
targets: [{ target: 'prod1.{test,test2}' }, { target: 'prod2.count' }],
});
expect(results).toContain('target=prod1.%7Btest%2Ctest2%7D');
});
- it('should replace target placeholder', function() {
+ it('should replace target placeholder', () => {
const results = ctx.ds.buildGraphiteParams({
targets: [{ target: 'series1' }, { target: 'series2' }, { target: 'asPercent(#A,#B)' }],
});
expect(results[2]).toBe('target=asPercent(series1%2Cseries2)');
});
- it('should replace target placeholder for hidden series', function() {
+ it('should replace target placeholder for hidden series', () => {
const results = ctx.ds.buildGraphiteParams({
targets: [
{ target: 'series1', hide: true },
@@ -188,28 +188,28 @@ describe('graphiteDatasource', () => {
expect(results[0]).toBe('target=' + encodeURIComponent('asPercent(series1,sumSeries(series1))'));
});
- it('should replace target placeholder when nesting query references', function() {
+ it('should replace target placeholder when nesting query references', () => {
const results = ctx.ds.buildGraphiteParams({
targets: [{ target: 'series1' }, { target: 'sumSeries(#A)' }, { target: 'asPercent(#A,#B)' }],
});
expect(results[2]).toBe('target=' + encodeURIComponent('asPercent(series1,sumSeries(series1))'));
});
- it('should fix wrong minute interval parameters', function() {
+ it('should fix wrong minute interval parameters', () => {
const results = ctx.ds.buildGraphiteParams({
targets: [{ target: "summarize(prod.25m.count, '25m', 'sum')" }],
});
expect(results[0]).toBe('target=' + encodeURIComponent("summarize(prod.25m.count, '25min', 'sum')"));
});
- it('should fix wrong month interval parameters', function() {
+ it('should fix wrong month interval parameters', () => {
const results = ctx.ds.buildGraphiteParams({
targets: [{ target: "summarize(prod.5M.count, '5M', 'sum')" }],
});
expect(results[0]).toBe('target=' + encodeURIComponent("summarize(prod.5M.count, '5mon', 'sum')"));
});
- it('should ignore empty targets', function() {
+ it('should ignore empty targets', () => {
const results = ctx.ds.buildGraphiteParams({
targets: [{ target: 'series1' }, { target: '' }],
});
@@ -222,7 +222,7 @@ describe('graphiteDatasource', () => {
let requestOptions;
beforeEach(() => {
- ctx.backendSrv.datasourceRequest = function(options) {
+ ctx.backendSrv.datasourceRequest = options => {
requestOptions = options;
return ctx.$q.when({
data: ['backend_01', 'backend_02'],
@@ -307,7 +307,7 @@ describe('graphiteDatasource', () => {
});
function accessScenario(name, url, fn) {
- describe('access scenario ' + name, function() {
+ describe('access scenario ' + name, () => {
const ctx: any = {
backendSrv: {},
$q: $q,
@@ -332,12 +332,12 @@ function accessScenario(name, url, fn) {
});
}
-accessScenario('with proxy access', '/api/datasources/proxy/1', function(httpOptions) {
+accessScenario('with proxy access', '/api/datasources/proxy/1', httpOptions => {
expect(httpOptions.headers['X-Dashboard-Id']).toBe(1);
expect(httpOptions.headers['X-Panel-Id']).toBe(2);
});
-accessScenario('with direct access', 'http://localhost:8080', function(httpOptions) {
+accessScenario('with direct access', 'http://localhost:8080', httpOptions => {
expect(httpOptions.headers['X-Dashboard-Id']).toBe(undefined);
expect(httpOptions.headers['X-Panel-Id']).toBe(undefined);
});
diff --git a/public/app/plugins/datasource/graphite/specs/gfunc.test.ts b/public/app/plugins/datasource/graphite/specs/gfunc.test.ts
index 61a0e896b0f..1809adc0940 100644
--- a/public/app/plugins/datasource/graphite/specs/gfunc.test.ts
+++ b/public/app/plugins/datasource/graphite/specs/gfunc.test.ts
@@ -1,7 +1,7 @@
import gfunc from '../gfunc';
-describe('when creating func instance from func names', function() {
- it('should return func instance', function() {
+describe('when creating func instance from func names', () => {
+ it('should return func instance', () => {
const func = gfunc.createFuncInstance('sumSeries');
expect(func).toBeTruthy();
expect(func.def.name).toEqual('sumSeries');
@@ -10,18 +10,18 @@ describe('when creating func instance from func names', function() {
expect(func.def.defaultParams.length).toEqual(1);
});
- it('should return func instance with shortName', function() {
+ it('should return func instance with shortName', () => {
const func = gfunc.createFuncInstance('sum');
expect(func).toBeTruthy();
});
- it('should return func instance from funcDef', function() {
+ it('should return func instance from funcDef', () => {
const func = gfunc.createFuncInstance('sum');
const func2 = gfunc.createFuncInstance(func.def);
expect(func2).toBeTruthy();
});
- it('func instance should have text representation', function() {
+ it('func instance should have text representation', () => {
const func = gfunc.createFuncInstance('groupByNode');
func.params[0] = 5;
func.params[1] = 'avg';
@@ -30,78 +30,78 @@ describe('when creating func instance from func names', function() {
});
});
-describe('when rendering func instance', function() {
- it('should handle single metric param', function() {
+describe('when rendering func instance', () => {
+ it('should handle single metric param', () => {
const func = gfunc.createFuncInstance('sumSeries');
expect(func.render('hello.metric')).toEqual('sumSeries(hello.metric)');
});
- it('should include default params if options enable it', function() {
+ it('should include default params if options enable it', () => {
const func = gfunc.createFuncInstance('scaleToSeconds', {
withDefaultParams: true,
});
expect(func.render('hello')).toEqual('scaleToSeconds(hello, 1)');
});
- it('should handle int or interval params with number', function() {
+ it('should handle int or interval params with number', () => {
const func = gfunc.createFuncInstance('movingMedian');
func.params[0] = '5';
expect(func.render('hello')).toEqual('movingMedian(hello, 5)');
});
- it('should handle int or interval params with interval string', function() {
+ it('should handle int or interval params with interval string', () => {
const func = gfunc.createFuncInstance('movingMedian');
func.params[0] = '5min';
expect(func.render('hello')).toEqual("movingMedian(hello, '5min')");
});
- it('should never quote boolean paramater', function() {
+ it('should never quote boolean paramater', () => {
const func = gfunc.createFuncInstance('sortByName');
func.params[0] = '$natural';
expect(func.render('hello')).toEqual('sortByName(hello, $natural)');
});
- it('should never quote int paramater', function() {
+ it('should never quote int paramater', () => {
const func = gfunc.createFuncInstance('maximumAbove');
func.params[0] = '$value';
expect(func.render('hello')).toEqual('maximumAbove(hello, $value)');
});
- it('should never quote node paramater', function() {
+ it('should never quote node paramater', () => {
const func = gfunc.createFuncInstance('aliasByNode');
func.params[0] = '$node';
expect(func.render('hello')).toEqual('aliasByNode(hello, $node)');
});
- it('should handle metric param and int param and string param', function() {
+ it('should handle metric param and int param and string param', () => {
const func = gfunc.createFuncInstance('groupByNode');
func.params[0] = 5;
func.params[1] = 'avg';
expect(func.render('hello.metric')).toEqual("groupByNode(hello.metric, 5, 'avg')");
});
- it('should handle function with no metric param', function() {
+ it('should handle function with no metric param', () => {
const func = gfunc.createFuncInstance('randomWalk');
func.params[0] = 'test';
expect(func.render(undefined)).toEqual("randomWalk('test')");
});
- it('should handle function multiple series params', function() {
+ it('should handle function multiple series params', () => {
const func = gfunc.createFuncInstance('asPercent');
func.params[0] = '#B';
expect(func.render('#A')).toEqual('asPercent(#A, #B)');
});
});
-describe('when requesting function definitions', function() {
- it('should return function definitions', function() {
+describe('when requesting function definitions', () => {
+ it('should return function definitions', () => {
const funcIndex = gfunc.getFuncDefs('1.0');
expect(Object.keys(funcIndex).length).toBeGreaterThan(8);
});
});
-describe('when updating func param', function() {
- it('should update param value and update text representation', function() {
+describe('when updating func param', () => {
+ it('should update param value and update text representation', () => {
const func = gfunc.createFuncInstance('summarize', {
withDefaultParams: true,
});
@@ -110,21 +110,21 @@ describe('when updating func param', function() {
expect(func.text).toBe('summarize(1h, sum, false)');
});
- it('should parse numbers as float', function() {
+ it('should parse numbers as float', () => {
const func = gfunc.createFuncInstance('scale');
func.updateParam('0.001', 0);
expect(func.params[0]).toBe('0.001');
});
});
-describe('when updating func param with optional second parameter', function() {
- it('should update value and text', function() {
+describe('when updating func param with optional second parameter', () => {
+ it('should update value and text', () => {
const func = gfunc.createFuncInstance('aliasByNode');
func.updateParam('1', 0);
expect(func.params[0]).toBe('1');
});
- it('should slit text and put value in second param', function() {
+ it('should slit text and put value in second param', () => {
const func = gfunc.createFuncInstance('aliasByNode');
func.updateParam('4,-5', 0);
expect(func.params[0]).toBe('4');
@@ -132,7 +132,7 @@ describe('when updating func param with optional second parameter', function() {
expect(func.text).toBe('aliasByNode(4, -5)');
});
- it('should remove second param when empty string is set', function() {
+ it('should remove second param when empty string is set', () => {
const func = gfunc.createFuncInstance('aliasByNode');
func.updateParam('4,-5', 0);
func.updateParam('', 1);
diff --git a/public/app/plugins/datasource/graphite/specs/lexer.test.ts b/public/app/plugins/datasource/graphite/specs/lexer.test.ts
index f00df17a725..4bfe7217bfa 100644
--- a/public/app/plugins/datasource/graphite/specs/lexer.test.ts
+++ b/public/app/plugins/datasource/graphite/specs/lexer.test.ts
@@ -1,7 +1,7 @@
import { Lexer } from '../lexer';
-describe('when lexing graphite expression', function() {
- it('should tokenize metric expression', function() {
+describe('when lexing graphite expression', () => {
+ it('should tokenize metric expression', () => {
const lexer = new Lexer('metric.test.*.asd.count');
const tokens = lexer.tokenize();
expect(tokens[0].value).toBe('metric');
@@ -11,27 +11,27 @@ describe('when lexing graphite expression', function() {
expect(tokens[4].pos).toBe(13);
});
- it('should tokenize metric expression with dash', function() {
+ it('should tokenize metric expression with dash', () => {
const lexer = new Lexer('metric.test.se1-server-*.asd.count');
const tokens = lexer.tokenize();
expect(tokens[4].type).toBe('identifier');
expect(tokens[4].value).toBe('se1-server-*');
});
- it('should tokenize metric expression with dash2', function() {
+ it('should tokenize metric expression with dash2', () => {
const lexer = new Lexer('net.192-168-1-1.192-168-1-9.ping_value.*');
const tokens = lexer.tokenize();
expect(tokens[0].value).toBe('net');
expect(tokens[2].value).toBe('192-168-1-1');
});
- it('should tokenize metric expression with equal sign', function() {
+ it('should tokenize metric expression with equal sign', () => {
const lexer = new Lexer('apps=test');
const tokens = lexer.tokenize();
expect(tokens[0].value).toBe('apps=test');
});
- it('simple function2', function() {
+ it('simple function2', () => {
const lexer = new Lexer('offset(test.metric, -100)');
const tokens = lexer.tokenize();
expect(tokens[2].type).toBe('identifier');
@@ -39,7 +39,7 @@ describe('when lexing graphite expression', function() {
expect(tokens[6].type).toBe('number');
});
- it('should tokenize metric expression with curly braces', function() {
+ it('should tokenize metric expression with curly braces', () => {
const lexer = new Lexer('metric.se1-{first, second}.count');
const tokens = lexer.tokenize();
expect(tokens.length).toBe(10);
@@ -49,7 +49,7 @@ describe('when lexing graphite expression', function() {
expect(tokens[6].value).toBe('second');
});
- it('should tokenize metric expression with number segments', function() {
+ it('should tokenize metric expression with number segments', () => {
const lexer = new Lexer('metric.10.12_10.test');
const tokens = lexer.tokenize();
expect(tokens[0].type).toBe('identifier');
@@ -59,7 +59,7 @@ describe('when lexing graphite expression', function() {
expect(tokens[4].type).toBe('identifier');
});
- it('should tokenize metric expression with segment that start with number', function() {
+ it('should tokenize metric expression with segment that start with number', () => {
const lexer = new Lexer('metric.001-server');
const tokens = lexer.tokenize();
expect(tokens[0].type).toBe('identifier');
@@ -67,7 +67,7 @@ describe('when lexing graphite expression', function() {
expect(tokens.length).toBe(3);
});
- it('should tokenize func call with numbered metric and number arg', function() {
+ it('should tokenize func call with numbered metric and number arg', () => {
const lexer = new Lexer('scale(metric.10, 15)');
const tokens = lexer.tokenize();
expect(tokens[0].type).toBe('identifier');
@@ -78,7 +78,7 @@ describe('when lexing graphite expression', function() {
expect(tokens[6].type).toBe('number');
});
- it('should tokenize metric with template parameter', function() {
+ it('should tokenize metric with template parameter', () => {
const lexer = new Lexer('metric.[[server]].test');
const tokens = lexer.tokenize();
expect(tokens[2].type).toBe('identifier');
@@ -86,7 +86,7 @@ describe('when lexing graphite expression', function() {
expect(tokens[4].type).toBe('identifier');
});
- it('should tokenize metric with question mark', function() {
+ it('should tokenize metric with question mark', () => {
const lexer = new Lexer('metric.server_??.test');
const tokens = lexer.tokenize();
expect(tokens[2].type).toBe('identifier');
@@ -94,7 +94,7 @@ describe('when lexing graphite expression', function() {
expect(tokens[4].type).toBe('identifier');
});
- it('should handle error with unterminated string', function() {
+ it('should handle error with unterminated string', () => {
const lexer = new Lexer("alias(metric, 'asd)");
const tokens = lexer.tokenize();
expect(tokens[0].value).toBe('alias');
@@ -106,14 +106,14 @@ describe('when lexing graphite expression', function() {
expect(tokens[4].pos).toBe(20);
});
- it('should handle float parameters', function() {
+ it('should handle float parameters', () => {
const lexer = new Lexer('alias(metric, 0.002)');
const tokens = lexer.tokenize();
expect(tokens[4].type).toBe('number');
expect(tokens[4].value).toBe('0.002');
});
- it('should handle bool parameters', function() {
+ it('should handle bool parameters', () => {
const lexer = new Lexer('alias(metric, true, false)');
const tokens = lexer.tokenize();
expect(tokens[4].type).toBe('bool');
diff --git a/public/app/plugins/datasource/graphite/specs/parser.test.ts b/public/app/plugins/datasource/graphite/specs/parser.test.ts
index 966eb213d64..25cabd5d20c 100644
--- a/public/app/plugins/datasource/graphite/specs/parser.test.ts
+++ b/public/app/plugins/datasource/graphite/specs/parser.test.ts
@@ -1,7 +1,7 @@
import { Parser } from '../parser';
-describe('when parsing', function() {
- it('simple metric expression', function() {
+describe('when parsing', () => {
+ it('simple metric expression', () => {
const parser = new Parser('metric.test.*.asd.count');
const rootNode = parser.getAst();
@@ -10,7 +10,7 @@ describe('when parsing', function() {
expect(rootNode.segments[0].value).toBe('metric');
});
- it('simple metric expression with numbers in segments', function() {
+ it('simple metric expression with numbers in segments', () => {
const parser = new Parser('metric.10.15_20.5');
const rootNode = parser.getAst();
@@ -21,7 +21,7 @@ describe('when parsing', function() {
expect(rootNode.segments[3].value).toBe('5');
});
- it('simple metric expression with curly braces', function() {
+ it('simple metric expression with curly braces', () => {
const parser = new Parser('metric.se1-{count, max}');
const rootNode = parser.getAst();
@@ -30,7 +30,7 @@ describe('when parsing', function() {
expect(rootNode.segments[1].value).toBe('se1-{count,max}');
});
- it('simple metric expression with curly braces at start of segment and with post chars', function() {
+ it('simple metric expression with curly braces at start of segment and with post chars', () => {
const parser = new Parser('metric.{count, max}-something.count');
const rootNode = parser.getAst();
@@ -39,14 +39,14 @@ describe('when parsing', function() {
expect(rootNode.segments[1].value).toBe('{count,max}-something');
});
- it('simple function', function() {
+ it('simple function', () => {
const parser = new Parser('sum(test)');
const rootNode = parser.getAst();
expect(rootNode.type).toBe('function');
expect(rootNode.params.length).toBe(1);
});
- it('simple function2', function() {
+ it('simple function2', () => {
const parser = new Parser('offset(test.metric, -100)');
const rootNode = parser.getAst();
expect(rootNode.type).toBe('function');
@@ -54,7 +54,7 @@ describe('when parsing', function() {
expect(rootNode.params[1].type).toBe('number');
});
- it('simple function with string arg', function() {
+ it('simple function with string arg', () => {
const parser = new Parser("randomWalk('test')");
const rootNode = parser.getAst();
expect(rootNode.type).toBe('function');
@@ -62,7 +62,7 @@ describe('when parsing', function() {
expect(rootNode.params[0].type).toBe('string');
});
- it('function with multiple args', function() {
+ it('function with multiple args', () => {
const parser = new Parser("sum(test, 1, 'test')");
const rootNode = parser.getAst();
@@ -73,7 +73,7 @@ describe('when parsing', function() {
expect(rootNode.params[2].type).toBe('string');
});
- it('function with nested function', function() {
+ it('function with nested function', () => {
const parser = new Parser('sum(scaleToSeconds(test, 1))');
const rootNode = parser.getAst();
@@ -86,7 +86,7 @@ describe('when parsing', function() {
expect(rootNode.params[0].params[1].type).toBe('number');
});
- it('function with multiple series', function() {
+ it('function with multiple series', () => {
const parser = new Parser('sum(test.test.*.count, test.timers.*.count)');
const rootNode = parser.getAst();
@@ -96,7 +96,7 @@ describe('when parsing', function() {
expect(rootNode.params[1].type).toBe('metric');
});
- it('function with templated series', function() {
+ it('function with templated series', () => {
const parser = new Parser('sum(test.[[server]].count)');
const rootNode = parser.getAst();
@@ -106,7 +106,7 @@ describe('when parsing', function() {
expect(rootNode.params[0].segments[1].value).toBe('[[server]]');
});
- it('invalid metric expression', function() {
+ it('invalid metric expression', () => {
const parser = new Parser('metric.test.*.asd.');
const rootNode = parser.getAst();
@@ -114,7 +114,7 @@ describe('when parsing', function() {
expect(rootNode.pos).toBe(19);
});
- it('invalid function expression missing closing parenthesis', function() {
+ it('invalid function expression missing closing parenthesis', () => {
const parser = new Parser('sum(test');
const rootNode = parser.getAst();
@@ -122,7 +122,7 @@ describe('when parsing', function() {
expect(rootNode.pos).toBe(9);
});
- it('unclosed string in function', function() {
+ it('unclosed string in function', () => {
const parser = new Parser("sum('test)");
const rootNode = parser.getAst();
@@ -130,13 +130,13 @@ describe('when parsing', function() {
expect(rootNode.pos).toBe(11);
});
- it('handle issue #69', function() {
+ it('handle issue #69', () => {
const parser = new Parser('cactiStyle(offset(scale(net.192-168-1-1.192-168-1-9.ping_value.*,0.001),-100))');
const rootNode = parser.getAst();
expect(rootNode.type).toBe('function');
});
- it('handle float function arguments', function() {
+ it('handle float function arguments', () => {
const parser = new Parser('scale(test, 0.002)');
const rootNode = parser.getAst();
expect(rootNode.type).toBe('function');
@@ -144,7 +144,7 @@ describe('when parsing', function() {
expect(rootNode.params[1].value).toBe(0.002);
});
- it('handle curly brace pattern at start', function() {
+ it('handle curly brace pattern at start', () => {
const parser = new Parser('{apps}.test');
const rootNode = parser.getAst();
expect(rootNode.type).toBe('metric');
@@ -152,7 +152,7 @@ describe('when parsing', function() {
expect(rootNode.segments[1].value).toBe('test');
});
- it('series parameters', function() {
+ it('series parameters', () => {
const parser = new Parser('asPercent(#A, #B)');
const rootNode = parser.getAst();
expect(rootNode.type).toBe('function');
@@ -161,7 +161,7 @@ describe('when parsing', function() {
expect(rootNode.params[1].value).toBe('#B');
});
- it('series parameters, issue 2788', function() {
+ it('series parameters, issue 2788', () => {
const parser = new Parser("summarize(diffSeries(#A, #B), '10m', 'sum', false)");
const rootNode = parser.getAst();
expect(rootNode.type).toBe('function');
@@ -170,7 +170,7 @@ describe('when parsing', function() {
expect(rootNode.params[3].type).toBe('bool');
});
- it('should parse metric expression with ip number segments', function() {
+ it('should parse metric expression with ip number segments', () => {
const parser = new Parser('5.10.123.5');
const rootNode = parser.getAst();
expect(rootNode.segments[0].value).toBe('5');
diff --git a/public/app/plugins/datasource/graphite/specs/query_ctrl.test.ts b/public/app/plugins/datasource/graphite/specs/query_ctrl.test.ts
index 7826a458968..13ac2a48223 100644
--- a/public/app/plugins/datasource/graphite/specs/query_ctrl.test.ts
+++ b/public/app/plugins/datasource/graphite/specs/query_ctrl.test.ts
@@ -3,7 +3,7 @@ import gfunc from '../gfunc';
import { GraphiteQueryCtrl } from '../query_ctrl';
describe('GraphiteQueryCtrl', () => {
- const ctx = {
+ const ctx = {
datasource: {
metricFindQuery: jest.fn(() => Promise.resolve([])),
getFuncDefs: jest.fn(() => Promise.resolve(gfunc.getFuncDefs('1.0'))),
@@ -15,7 +15,7 @@ describe('GraphiteQueryCtrl', () => {
panelCtrl: {
refresh: jest.fn(),
},
- };
+ } as any;
ctx.panelCtrl.panel = {
targets: [ctx.target],
@@ -137,7 +137,7 @@ describe('GraphiteQueryCtrl', () => {
ctx.ctrl.target.target = 'test.count';
ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([]);
ctx.ctrl.parseTarget();
- ctx.ctrl.getAltSegments(1).then(function(results) {
+ ctx.ctrl.getAltSegments(1).then(results => {
ctx.altSegments = results;
});
});
diff --git a/public/app/plugins/datasource/influxdb/datasource.ts b/public/app/plugins/datasource/influxdb/datasource.ts
index 07a11343f55..5ffbf7cf418 100644
--- a/public/app/plugins/datasource/influxdb/datasource.ts
+++ b/public/app/plugins/datasource/influxdb/datasource.ts
@@ -23,7 +23,7 @@ export default class InfluxDatasource {
/** @ngInject */
constructor(instanceSettings, private $q, private backendSrv, private templateSrv) {
this.type = 'influxdb';
- this.urls = _.map(instanceSettings.url.split(','), function(url) {
+ this.urls = _.map(instanceSettings.url.split(','), url => {
return url.trim();
});
@@ -40,14 +40,14 @@ export default class InfluxDatasource {
}
query(options) {
- var timeFilter = this.getTimeFilter(options);
+ let timeFilter = this.getTimeFilter(options);
const scopedVars = options.scopedVars;
const targets = _.cloneDeep(options.targets);
const queryTargets = [];
- var queryModel;
- var i, y;
+ let queryModel;
+ let i, y;
- var allQueries = _.map(targets, target => {
+ let allQueries = _.map(targets, target => {
if (target.hide) {
return '';
}
@@ -95,7 +95,7 @@ export default class InfluxDatasource {
}
const target = queryTargets[i];
- var alias = target.alias;
+ let alias = target.alias;
if (alias) {
alias = this.templateSrv.replace(target.alias, options.scopedVars);
}
@@ -132,7 +132,7 @@ export default class InfluxDatasource {
}
const timeFilter = this.getTimeFilter({ rangeRaw: options.rangeRaw });
- var query = options.annotation.query.replace('$timeFilter', timeFilter);
+ let query = options.annotation.query.replace('$timeFilter', timeFilter);
query = this.templateSrv.replace(query, null, 'regex');
return this._seriesQuery(query, options).then(data => {
@@ -274,7 +274,7 @@ export default class InfluxDatasource {
result => {
return result.data;
},
- function(err) {
+ err => {
if (err.status !== 0 || err.status >= 300) {
if (err.data && err.data.error) {
throw {
@@ -314,7 +314,7 @@ export default class InfluxDatasource {
const parts = /^now-(\d+)([d|h|m|s])$/.exec(date);
if (parts) {
- const amount = parseInt(parts[1]);
+ const amount = parseInt(parts[1], 10);
const unit = parts[2];
return 'now() - ' + amount + unit;
}
diff --git a/public/app/plugins/datasource/influxdb/influx_query.ts b/public/app/plugins/datasource/influxdb/influx_query.ts
index 503b341aa5b..60eac1d3f2b 100644
--- a/public/app/plugins/datasource/influxdb/influx_query.ts
+++ b/public/app/plugins/datasource/influxdb/influx_query.ts
@@ -27,15 +27,15 @@ export default class InfluxQuery {
}
updateProjection() {
- this.selectModels = _.map(this.target.select, function(parts: any) {
+ this.selectModels = _.map(this.target.select, (parts: any) => {
return _.map(parts, queryPart.create);
});
this.groupByParts = _.map(this.target.groupBy, queryPart.create);
}
updatePersistedParts() {
- this.target.select = _.map(this.selectModels, function(selectParts) {
- return _.map(selectParts, function(part: any) {
+ this.target.select = _.map(this.selectModels, selectParts => {
+ return _.map(selectParts, (part: any) => {
return { type: part.def.type, params: part.params };
});
});
@@ -125,9 +125,9 @@ export default class InfluxQuery {
}
private renderTagCondition(tag, index, interpolate) {
- var str = '';
- var operator = tag.operator;
- var value = tag.value;
+ let str = '';
+ let operator = tag.operator;
+ let value = tag.value;
if (index > 0) {
str = (tag.condition || 'AND') + ' ';
}
@@ -156,8 +156,8 @@ export default class InfluxQuery {
}
getMeasurementAndPolicy(interpolate) {
- var policy = this.target.policy;
- var measurement = this.target.measurement || 'measurement';
+ let policy = this.target.policy;
+ let measurement = this.target.measurement || 'measurement';
if (!measurement.match('^/.*/$')) {
measurement = '"' + measurement + '"';
@@ -199,11 +199,11 @@ export default class InfluxQuery {
}
}
- var query = 'SELECT ';
- var i, y;
+ let query = 'SELECT ';
+ let i, y;
for (i = 0; i < this.selectModels.length; i++) {
const parts = this.selectModels[i];
- var selectText = '';
+ let selectText = '';
for (y = 0; y < parts.length; y++) {
const part = parts[y];
selectText = part.render(selectText);
@@ -226,7 +226,7 @@ export default class InfluxQuery {
query += '$timeFilter';
- var groupBySection = '';
+ let groupBySection = '';
for (i = 0; i < this.groupByParts.length; i++) {
const part = this.groupByParts[i];
if (i > 0) {
diff --git a/public/app/plugins/datasource/influxdb/influx_series.ts b/public/app/plugins/datasource/influxdb/influx_series.ts
index d68d0c26696..d2a8482eced 100644
--- a/public/app/plugins/datasource/influxdb/influx_series.ts
+++ b/public/app/plugins/datasource/influxdb/influx_series.ts
@@ -14,7 +14,7 @@ export default class InfluxSeries {
getTimeSeries() {
const output = [];
- var i, j;
+ let i, j;
if (this.series.length === 0) {
return output;
@@ -22,12 +22,12 @@ export default class InfluxSeries {
_.each(this.series, series => {
const columns = series.columns.length;
- const tags = _.map(series.tags, function(value, key) {
+ const tags = _.map(series.tags, (value, key) => {
return key + ': ' + value;
});
for (j = 1; j < columns; j++) {
- var seriesName = series.name;
+ let seriesName = series.name;
const columnName = series.columns[j];
if (columnName !== 'value') {
seriesName = seriesName + '.' + columnName;
@@ -57,7 +57,7 @@ export default class InfluxSeries {
const regex = /\$(\w+)|\[\[([\s\S]+?)\]\]/g;
const segments = series.name.split('.');
- return this.alias.replace(regex, function(match, g1, g2) {
+ return this.alias.replace(regex, (match, g1, g2) => {
const group = g1 || g2;
const segIndex = parseInt(group, 10);
@@ -86,10 +86,10 @@ export default class InfluxSeries {
const list = [];
_.each(this.series, series => {
- var titleCol = null;
- var timeCol = null;
+ let titleCol = null;
+ let timeCol = null;
const tagsCol = [];
- var textCol = null;
+ let textCol = null;
_.each(series.columns, (column, index) => {
if (column === 'time') {
@@ -124,10 +124,10 @@ export default class InfluxSeries {
// Remove empty values, then split in different tags for comma separated values
tags: _.flatten(
tagsCol
- .filter(function(t) {
+ .filter(t => {
return value[t];
})
- .map(function(t) {
+ .map(t => {
return value[t].split(',');
})
),
@@ -143,7 +143,7 @@ export default class InfluxSeries {
getTable() {
const table = new TableModel();
- var i, j;
+ let i, j;
if (this.series.length === 0) {
return table;
@@ -158,7 +158,7 @@ export default class InfluxSeries {
table.columns.push({ text: 'Time', type: 'time' });
j++;
}
- _.each(_.keys(series.tags), function(key) {
+ _.each(_.keys(series.tags), key => {
table.columns.push({ text: key });
});
for (; j < series.columns.length; j++) {
diff --git a/public/app/plugins/datasource/influxdb/query_builder.ts b/public/app/plugins/datasource/influxdb/query_builder.ts
index f4ac4373ab6..a61216787d3 100644
--- a/public/app/plugins/datasource/influxdb/query_builder.ts
+++ b/public/app/plugins/datasource/influxdb/query_builder.ts
@@ -1,9 +1,9 @@
import _ from 'lodash';
function renderTagCondition(tag, index) {
- var str = '';
- var operator = tag.operator;
- var value = tag.value;
+ let str = '';
+ let operator = tag.operator;
+ let value = tag.value;
if (index > 0) {
str = (tag.condition || 'AND') + ' ';
}
@@ -28,9 +28,9 @@ export class InfluxQueryBuilder {
constructor(private target, private database?) {}
buildExploreQuery(type: string, withKey?: string, withMeasurementFilter?: string) {
- var query;
- var measurement;
- var policy;
+ let query;
+ let measurement;
+ let policy;
if (type === 'TAG_KEYS') {
query = 'SHOW TAG KEYS';
@@ -84,7 +84,7 @@ export class InfluxQueryBuilder {
if (this.target.tags && this.target.tags.length > 0) {
const whereConditions = _.reduce(
this.target.tags,
- function(memo, tag) {
+ (memo, tag) => {
// do not add a condition for the key we want to explore for
if (tag.key === withKey) {
return memo;
diff --git a/public/app/plugins/datasource/influxdb/query_ctrl.ts b/public/app/plugins/datasource/influxdb/query_ctrl.ts
index 5ef8f2c7d1d..f531fe6c4d9 100644
--- a/public/app/plugins/datasource/influxdb/query_ctrl.ts
+++ b/public/app/plugins/datasource/influxdb/query_ctrl.ts
@@ -19,7 +19,7 @@ export class InfluxQueryCtrl extends QueryCtrl {
measurementSegment: any;
removeTagFilterSegment: any;
- /** @ngInject **/
+ /** @ngInject */
constructor($scope, $injector, private templateSrv, private $q, private uiSegmentSrv) {
super($scope, $injector);
this.target = this.target;
@@ -70,7 +70,7 @@ export class InfluxQueryCtrl extends QueryCtrl {
const categories = queryPart.getCategories();
this.selectMenu = _.reduce(
categories,
- function(memo, cat, key) {
+ (memo, cat, key) => {
const menu = {
text: key,
submenu: cat.map(item => {
@@ -279,7 +279,7 @@ export class InfluxQueryCtrl extends QueryCtrl {
}
}
- var query, addTemplateVars;
+ let query, addTemplateVars;
if (segment.type === 'key' || segment.type === 'plus-button') {
query = this.queryBuilder.buildExploreQuery('TAG_KEYS');
addTemplateVars = false;
@@ -343,8 +343,8 @@ export class InfluxQueryCtrl extends QueryCtrl {
rebuildTargetTagConditions() {
const tags = [];
- var tagIndex = 0;
- var tagOperator = '';
+ let tagIndex = 0;
+ let tagOperator = '';
_.each(this.tagSegments, (segment2, index) => {
if (segment2.type === 'key') {
diff --git a/public/app/plugins/datasource/influxdb/query_part.ts b/public/app/plugins/datasource/influxdb/query_part.ts
index e03ff82d5f0..4bc92bcfe72 100644
--- a/public/app/plugins/datasource/influxdb/query_part.ts
+++ b/public/app/plugins/datasource/influxdb/query_part.ts
@@ -41,7 +41,7 @@ function fieldRenderer(part, innerExpr) {
function replaceAggregationAddStrategy(selectParts, partModel) {
// look for existing aggregation
- for (var i = 0; i < selectParts.length; i++) {
+ for (let i = 0; i < selectParts.length; i++) {
const part = selectParts[i];
if (part.def.category === categories.Aggregations) {
if (part.def.type === partModel.def.type) {
@@ -79,7 +79,7 @@ function replaceAggregationAddStrategy(selectParts, partModel) {
}
function addTransformationStrategy(selectParts, partModel) {
- var i;
+ let i;
// look for index to add transformation
for (i = 0; i < selectParts.length; i++) {
const part = selectParts[i];
@@ -126,7 +126,7 @@ function addAliasStrategy(selectParts, partModel) {
function addFieldStrategy(selectParts, partModel, query) {
// copy all parts
- const parts = _.map(selectParts, function(part: any) {
+ const parts = _.map(selectParts, (part: any) => {
return createPart({ type: part.def.type, params: _.clone(part.params) });
});
@@ -453,7 +453,7 @@ register({
export default {
create: createPart,
- getCategories: function() {
+ getCategories: () => {
return categories;
},
replaceAggregationAdd: replaceAggregationAddStrategy,
diff --git a/public/app/plugins/datasource/influxdb/specs/datasource.test.ts b/public/app/plugins/datasource/influxdb/specs/datasource.test.ts
index 60f49bd4905..62049535e3e 100644
--- a/public/app/plugins/datasource/influxdb/specs/datasource.test.ts
+++ b/public/app/plugins/datasource/influxdb/specs/datasource.test.ts
@@ -10,7 +10,7 @@ describe('InfluxDataSource', () => {
instanceSettings: { url: 'url', name: 'influxDb', jsonData: {} },
};
- beforeEach(function() {
+ beforeEach(() => {
ctx.instanceSettings.url = '/api/datasources/proxy/1';
ctx.ds = new InfluxDatasource(ctx.instanceSettings, ctx.$q, ctx.backendSrv, ctx.templateSrv);
});
@@ -26,7 +26,7 @@ describe('InfluxDataSource', () => {
let requestQuery;
beforeEach(async () => {
- ctx.backendSrv.datasourceRequest = function(req) {
+ ctx.backendSrv.datasourceRequest = req => {
requestQuery = req.params.q;
return ctx.$q.when({
results: [
@@ -43,7 +43,7 @@ describe('InfluxDataSource', () => {
});
};
- await ctx.ds.metricFindQuery(query, queryOptions).then(function(_) {});
+ await ctx.ds.metricFindQuery(query, queryOptions).then(_ => {});
});
it('should replace $timefilter', () => {
diff --git a/public/app/plugins/datasource/influxdb/specs/influx_query.test.ts b/public/app/plugins/datasource/influxdb/specs/influx_query.test.ts
index a62d5384ac6..f8e65c21f2d 100644
--- a/public/app/plugins/datasource/influxdb/specs/influx_query.test.ts
+++ b/public/app/plugins/datasource/influxdb/specs/influx_query.test.ts
@@ -1,10 +1,10 @@
import InfluxQuery from '../influx_query';
-describe('InfluxQuery', function() {
+describe('InfluxQuery', () => {
const templateSrv = { replace: val => val };
- describe('render series with mesurement only', function() {
- it('should generate correct query', function() {
+ describe('render series with mesurement only', () => {
+ it('should generate correct query', () => {
const query = new InfluxQuery(
{
measurement: 'cpu',
@@ -18,8 +18,8 @@ describe('InfluxQuery', function() {
});
});
- describe('render series with policy only', function() {
- it('should generate correct query', function() {
+ describe('render series with policy only', () => {
+ it('should generate correct query', () => {
const query = new InfluxQuery(
{
measurement: 'cpu',
@@ -36,8 +36,8 @@ describe('InfluxQuery', function() {
});
});
- describe('render series with math and alias', function() {
- it('should generate correct query', function() {
+ describe('render series with math and alias', () => {
+ it('should generate correct query', () => {
const query = new InfluxQuery(
{
measurement: 'cpu',
@@ -61,8 +61,8 @@ describe('InfluxQuery', function() {
});
});
- describe('series with single tag only', function() {
- it('should generate correct query', function() {
+ describe('series with single tag only', () => {
+ it('should generate correct query', () => {
const query = new InfluxQuery(
{
measurement: 'cpu',
@@ -81,7 +81,7 @@ describe('InfluxQuery', function() {
);
});
- it('should switch regex operator with tag value is regex', function() {
+ it('should switch regex operator with tag value is regex', () => {
const query = new InfluxQuery(
{
measurement: 'cpu',
@@ -99,8 +99,8 @@ describe('InfluxQuery', function() {
});
});
- describe('series with multiple tags only', function() {
- it('should generate correct query', function() {
+ describe('series with multiple tags only', () => {
+ it('should generate correct query', () => {
const query = new InfluxQuery(
{
measurement: 'cpu',
@@ -119,8 +119,8 @@ describe('InfluxQuery', function() {
});
});
- describe('series with tags OR condition', function() {
- it('should generate correct query', function() {
+ describe('series with tags OR condition', () => {
+ it('should generate correct query', () => {
const query = new InfluxQuery(
{
measurement: 'cpu',
@@ -139,8 +139,8 @@ describe('InfluxQuery', function() {
});
});
- describe('query with value condition', function() {
- it('should not quote value', function() {
+ describe('query with value condition', () => {
+ it('should not quote value', () => {
const query = new InfluxQuery(
{
measurement: 'cpu',
@@ -156,8 +156,8 @@ describe('InfluxQuery', function() {
});
});
- describe('series with groupByTag', function() {
- it('should generate correct query', function() {
+ describe('series with groupByTag', () => {
+ it('should generate correct query', () => {
const query = new InfluxQuery(
{
measurement: 'cpu',
@@ -173,8 +173,8 @@ describe('InfluxQuery', function() {
});
});
- describe('render series without group by', function() {
- it('should generate correct query', function() {
+ describe('render series without group by', () => {
+ it('should generate correct query', () => {
const query = new InfluxQuery(
{
measurement: 'cpu',
@@ -189,8 +189,8 @@ describe('InfluxQuery', function() {
});
});
- describe('render series without group by and fill', function() {
- it('should generate correct query', function() {
+ describe('render series without group by and fill', () => {
+ it('should generate correct query', () => {
const query = new InfluxQuery(
{
measurement: 'cpu',
@@ -205,8 +205,8 @@ describe('InfluxQuery', function() {
});
});
- describe('when adding group by part', function() {
- it('should add tag before fill', function() {
+ describe('when adding group by part', () => {
+ it('should add tag before fill', () => {
const query = new InfluxQuery(
{
measurement: 'cpu',
@@ -223,7 +223,7 @@ describe('InfluxQuery', function() {
expect(query.target.groupBy[2].type).toBe('fill');
});
- it('should add tag last if no fill', function() {
+ it('should add tag last if no fill', () => {
const query = new InfluxQuery(
{
measurement: 'cpu',
@@ -239,8 +239,8 @@ describe('InfluxQuery', function() {
});
});
- describe('when adding select part', function() {
- it('should add mean after after field', function() {
+ describe('when adding select part', () => {
+ it('should add mean after after field', () => {
const query = new InfluxQuery(
{
measurement: 'cpu',
@@ -255,7 +255,7 @@ describe('InfluxQuery', function() {
expect(query.target.select[0][1].type).toBe('mean');
});
- it('should replace sum by mean', function() {
+ it('should replace sum by mean', () => {
const query = new InfluxQuery(
{
measurement: 'cpu',
@@ -270,7 +270,7 @@ describe('InfluxQuery', function() {
expect(query.target.select[0][1].type).toBe('sum');
});
- it('should add math before alias', function() {
+ it('should add math before alias', () => {
const query = new InfluxQuery(
{
measurement: 'cpu',
@@ -285,7 +285,7 @@ describe('InfluxQuery', function() {
expect(query.target.select[0][2].type).toBe('math');
});
- it('should add math last', function() {
+ it('should add math last', () => {
const query = new InfluxQuery(
{
measurement: 'cpu',
@@ -300,7 +300,7 @@ describe('InfluxQuery', function() {
expect(query.target.select[0][2].type).toBe('math');
});
- it('should replace math', function() {
+ it('should replace math', () => {
const query = new InfluxQuery(
{
measurement: 'cpu',
@@ -315,7 +315,7 @@ describe('InfluxQuery', function() {
expect(query.target.select[0][2].type).toBe('math');
});
- it('should add math when one only query part', function() {
+ it('should add math when one only query part', () => {
const query = new InfluxQuery(
{
measurement: 'cpu',
@@ -330,8 +330,8 @@ describe('InfluxQuery', function() {
expect(query.target.select[0][1].type).toBe('math');
});
- describe('when render adhoc filters', function() {
- it('should generate correct query segment', function() {
+ describe('when render adhoc filters', () => {
+ it('should generate correct query segment', () => {
const query = new InfluxQuery({ measurement: 'cpu' }, templateSrv, {});
const queryText = query.renderAdhocFilters([
diff --git a/public/app/plugins/datasource/influxdb/specs/influx_series.test.ts b/public/app/plugins/datasource/influxdb/specs/influx_series.test.ts
index bb20db1ba76..44232173e27 100644
--- a/public/app/plugins/datasource/influxdb/specs/influx_series.test.ts
+++ b/public/app/plugins/datasource/influxdb/specs/influx_series.test.ts
@@ -1,7 +1,7 @@
import InfluxSeries from '../influx_series';
-describe('when generating timeseries from influxdb response', function() {
- describe('given multiple fields for series', function() {
+describe('when generating timeseries from influxdb response', () => {
+ describe('given multiple fields for series', () => {
const options = {
alias: '',
series: [
@@ -13,8 +13,8 @@ describe('when generating timeseries from influxdb response', function() {
},
],
};
- describe('and no alias', function() {
- it('should generate multiple datapoints for each column', function() {
+ describe('and no alias', () => {
+ it('should generate multiple datapoints for each column', () => {
const series = new InfluxSeries(options);
const result = series.getTimeSeries();
@@ -39,8 +39,8 @@ describe('when generating timeseries from influxdb response', function() {
});
});
- describe('and simple alias', function() {
- it('should use alias', function() {
+ describe('and simple alias', () => {
+ it('should use alias', () => {
options.alias = 'new series';
const series = new InfluxSeries(options);
const result = series.getTimeSeries();
@@ -51,8 +51,8 @@ describe('when generating timeseries from influxdb response', function() {
});
});
- describe('and alias patterns', function() {
- it('should replace patterns', function() {
+ describe('and alias patterns', () => {
+ it('should replace patterns', () => {
options.alias = 'alias: $m -> $tag_server ([[measurement]])';
const series = new InfluxSeries(options);
const result = series.getTimeSeries();
@@ -64,7 +64,7 @@ describe('when generating timeseries from influxdb response', function() {
});
});
- describe('given measurement with default fieldname', function() {
+ describe('given measurement with default fieldname', () => {
const options = {
series: [
{
@@ -82,8 +82,8 @@ describe('when generating timeseries from influxdb response', function() {
],
};
- describe('and no alias', function() {
- it('should generate label with no field', function() {
+ describe('and no alias', () => {
+ it('should generate label with no field', () => {
const series = new InfluxSeries(options);
const result = series.getTimeSeries();
@@ -93,7 +93,7 @@ describe('when generating timeseries from influxdb response', function() {
});
});
- describe('given two series', function() {
+ describe('given two series', () => {
const options = {
alias: '',
series: [
@@ -112,8 +112,8 @@ describe('when generating timeseries from influxdb response', function() {
],
};
- describe('and no alias', function() {
- it('should generate two time series', function() {
+ describe('and no alias', () => {
+ it('should generate two time series', () => {
const series = new InfluxSeries(options);
const result = series.getTimeSeries();
@@ -132,8 +132,8 @@ describe('when generating timeseries from influxdb response', function() {
});
});
- describe('and simple alias', function() {
- it('should use alias', function() {
+ describe('and simple alias', () => {
+ it('should use alias', () => {
options.alias = 'new series';
const series = new InfluxSeries(options);
const result = series.getTimeSeries();
@@ -142,8 +142,8 @@ describe('when generating timeseries from influxdb response', function() {
});
});
- describe('and alias patterns', function() {
- it('should replace patterns', function() {
+ describe('and alias patterns', () => {
+ it('should replace patterns', () => {
options.alias = 'alias: $m -> $tag_server ([[measurement]])';
const series = new InfluxSeries(options);
const result = series.getTimeSeries();
@@ -154,7 +154,7 @@ describe('when generating timeseries from influxdb response', function() {
});
});
- describe('given measurement with dots', function() {
+ describe('given measurement with dots', () => {
const options = {
alias: '',
series: [
@@ -167,7 +167,7 @@ describe('when generating timeseries from influxdb response', function() {
],
};
- it('should replace patterns', function() {
+ it('should replace patterns', () => {
options.alias = 'alias: $1 -> [[3]]';
const series = new InfluxSeries(options);
const result = series.getTimeSeries();
@@ -176,7 +176,7 @@ describe('when generating timeseries from influxdb response', function() {
});
});
- describe('given table response', function() {
+ describe('given table response', () => {
const options = {
alias: '',
series: [
@@ -189,7 +189,7 @@ describe('when generating timeseries from influxdb response', function() {
],
};
- it('should return table', function() {
+ it('should return table', () => {
const series = new InfluxSeries(options);
const table = series.getTable();
@@ -200,7 +200,7 @@ describe('when generating timeseries from influxdb response', function() {
});
});
- describe('given table response from SHOW CARDINALITY', function() {
+ describe('given table response from SHOW CARDINALITY', () => {
const options = {
alias: '',
series: [
@@ -212,7 +212,7 @@ describe('when generating timeseries from influxdb response', function() {
],
};
- it('should return table', function() {
+ it('should return table', () => {
const series = new InfluxSeries(options);
const table = series.getTable();
@@ -223,8 +223,8 @@ describe('when generating timeseries from influxdb response', function() {
});
});
- describe('given annotation response', function() {
- describe('with empty tagsColumn', function() {
+ describe('given annotation response', () => {
+ describe('with empty tagsColumn', () => {
const options = {
alias: '',
annotation: {},
@@ -238,7 +238,7 @@ describe('when generating timeseries from influxdb response', function() {
],
};
- it('should multiple tags', function() {
+ it('should multiple tags', () => {
const series = new InfluxSeries(options);
const annotations = series.getAnnotations();
@@ -246,7 +246,7 @@ describe('when generating timeseries from influxdb response', function() {
});
});
- describe('given annotation response', function() {
+ describe('given annotation response', () => {
const options = {
alias: '',
annotation: {
@@ -262,7 +262,7 @@ describe('when generating timeseries from influxdb response', function() {
],
};
- it('should multiple tags', function() {
+ it('should multiple tags', () => {
const series = new InfluxSeries(options);
const annotations = series.getAnnotations();
diff --git a/public/app/plugins/datasource/influxdb/specs/query_builder.test.ts b/public/app/plugins/datasource/influxdb/specs/query_builder.test.ts
index d8b27f8b1bf..e21b95ac374 100644
--- a/public/app/plugins/datasource/influxdb/specs/query_builder.test.ts
+++ b/public/app/plugins/datasource/influxdb/specs/query_builder.test.ts
@@ -1,14 +1,14 @@
import { InfluxQueryBuilder } from '../query_builder';
-describe('InfluxQueryBuilder', function() {
- describe('when building explore queries', function() {
- it('should only have measurement condition in tag keys query given query with measurement', function() {
+describe('InfluxQueryBuilder', () => {
+ describe('when building explore queries', () => {
+ it('should only have measurement condition in tag keys query given query with measurement', () => {
const builder = new InfluxQueryBuilder({ measurement: 'cpu', tags: [] });
const query = builder.buildExploreQuery('TAG_KEYS');
expect(query).toBe('SHOW TAG KEYS FROM "cpu"');
});
- it('should handle regex measurement in tag keys query', function() {
+ it('should handle regex measurement in tag keys query', () => {
const builder = new InfluxQueryBuilder({
measurement: '/.*/',
tags: [],
@@ -17,13 +17,13 @@ describe('InfluxQueryBuilder', function() {
expect(query).toBe('SHOW TAG KEYS FROM /.*/');
});
- it('should have no conditions in tags keys query given query with no measurement or tag', function() {
+ it('should have no conditions in tags keys query given query with no measurement or tag', () => {
const builder = new InfluxQueryBuilder({ measurement: '', tags: [] });
const query = builder.buildExploreQuery('TAG_KEYS');
expect(query).toBe('SHOW TAG KEYS');
});
- it('should have where condition in tag keys query with tags', function() {
+ it('should have where condition in tag keys query with tags', () => {
const builder = new InfluxQueryBuilder({
measurement: '',
tags: [{ key: 'host', value: 'se1' }],
@@ -32,25 +32,25 @@ describe('InfluxQueryBuilder', function() {
expect(query).toBe('SHOW TAG KEYS WHERE "host" = \'se1\'');
});
- it('should have no conditions in measurement query for query with no tags', function() {
+ it('should have no conditions in measurement query for query with no tags', () => {
const builder = new InfluxQueryBuilder({ measurement: '', tags: [] });
const query = builder.buildExploreQuery('MEASUREMENTS');
expect(query).toBe('SHOW MEASUREMENTS LIMIT 100');
});
- it('should have no conditions in measurement query for query with no tags and empty query', function() {
+ it('should have no conditions in measurement query for query with no tags and empty query', () => {
const builder = new InfluxQueryBuilder({ measurement: '', tags: [] });
const query = builder.buildExploreQuery('MEASUREMENTS', undefined, '');
expect(query).toBe('SHOW MEASUREMENTS LIMIT 100');
});
- it('should have WITH MEASUREMENT in measurement query for non-empty query with no tags', function() {
+ it('should have WITH MEASUREMENT in measurement query for non-empty query with no tags', () => {
const builder = new InfluxQueryBuilder({ measurement: '', tags: [] });
const query = builder.buildExploreQuery('MEASUREMENTS', undefined, 'something');
expect(query).toBe('SHOW MEASUREMENTS WITH MEASUREMENT =~ /something/ LIMIT 100');
});
- it('should have WITH MEASUREMENT WHERE in measurement query for non-empty query with tags', function() {
+ it('should have WITH MEASUREMENT WHERE in measurement query for non-empty query with tags', () => {
const builder = new InfluxQueryBuilder({
measurement: '',
tags: [{ key: 'app', value: 'email' }],
@@ -59,7 +59,7 @@ describe('InfluxQueryBuilder', function() {
expect(query).toBe('SHOW MEASUREMENTS WITH MEASUREMENT =~ /something/ WHERE "app" = \'email\' LIMIT 100');
});
- it('should have where condition in measurement query for query with tags', function() {
+ it('should have where condition in measurement query for query with tags', () => {
const builder = new InfluxQueryBuilder({
measurement: '',
tags: [{ key: 'app', value: 'email' }],
@@ -68,7 +68,7 @@ describe('InfluxQueryBuilder', function() {
expect(query).toBe('SHOW MEASUREMENTS WHERE "app" = \'email\' LIMIT 100');
});
- it('should have where tag name IN filter in tag values query for query with one tag', function() {
+ it('should have where tag name IN filter in tag values query for query with one tag', () => {
const builder = new InfluxQueryBuilder({
measurement: '',
tags: [{ key: 'app', value: 'asdsadsad' }],
@@ -77,7 +77,7 @@ describe('InfluxQueryBuilder', function() {
expect(query).toBe('SHOW TAG VALUES WITH KEY = "app"');
});
- it('should have measurement tag condition and tag name IN filter in tag values query', function() {
+ it('should have measurement tag condition and tag name IN filter in tag values query', () => {
const builder = new InfluxQueryBuilder({
measurement: 'cpu',
tags: [{ key: 'app', value: 'email' }, { key: 'host', value: 'server1' }],
@@ -86,7 +86,7 @@ describe('InfluxQueryBuilder', function() {
expect(query).toBe('SHOW TAG VALUES FROM "cpu" WITH KEY = "app" WHERE "host" = \'server1\'');
});
- it('should select from policy correctly if policy is specified', function() {
+ it('should select from policy correctly if policy is specified', () => {
const builder = new InfluxQueryBuilder({
measurement: 'cpu',
policy: 'one_week',
@@ -96,7 +96,7 @@ describe('InfluxQueryBuilder', function() {
expect(query).toBe('SHOW TAG VALUES FROM "one_week"."cpu" WITH KEY = "app" WHERE "host" = \'server1\'');
});
- it('should not include policy when policy is default', function() {
+ it('should not include policy when policy is default', () => {
const builder = new InfluxQueryBuilder({
measurement: 'cpu',
policy: 'default',
@@ -106,7 +106,7 @@ describe('InfluxQueryBuilder', function() {
expect(query).toBe('SHOW TAG VALUES FROM "cpu" WITH KEY = "app"');
});
- it('should switch to regex operator in tag condition', function() {
+ it('should switch to regex operator in tag condition', () => {
const builder = new InfluxQueryBuilder({
measurement: 'cpu',
tags: [{ key: 'host', value: '/server.*/' }],
@@ -115,7 +115,7 @@ describe('InfluxQueryBuilder', function() {
expect(query).toBe('SHOW TAG VALUES FROM "cpu" WITH KEY = "app" WHERE "host" =~ /server.*/');
});
- it('should build show field query', function() {
+ it('should build show field query', () => {
const builder = new InfluxQueryBuilder({
measurement: 'cpu',
tags: [{ key: 'app', value: 'email' }],
@@ -124,7 +124,7 @@ describe('InfluxQueryBuilder', function() {
expect(query).toBe('SHOW FIELD KEYS FROM "cpu"');
});
- it('should build show field query with regexp', function() {
+ it('should build show field query with regexp', () => {
const builder = new InfluxQueryBuilder({
measurement: '/$var/',
tags: [{ key: 'app', value: 'email' }],
@@ -133,7 +133,7 @@ describe('InfluxQueryBuilder', function() {
expect(query).toBe('SHOW FIELD KEYS FROM /$var/');
});
- it('should build show retention policies query', function() {
+ it('should build show retention policies query', () => {
const builder = new InfluxQueryBuilder({ measurement: 'cpu', tags: [] }, 'site');
const query = builder.buildExploreQuery('RETENTION POLICIES');
expect(query).toBe('SHOW RETENTION POLICIES on "site"');
diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.test.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.test.ts
index 88d4fb143cd..b50c7d8cde6 100644
--- a/public/app/plugins/datasource/influxdb/specs/query_ctrl.test.ts
+++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.test.ts
@@ -3,7 +3,7 @@ import { uiSegmentSrv } from 'app/core/services/segment_srv';
import { InfluxQueryCtrl } from '../query_ctrl';
describe('InfluxDBQueryCtrl', () => {
- const ctx = {};
+ const ctx = {} as any;
beforeEach(() => {
InfluxQueryCtrl.prototype.datasource = {
diff --git a/public/app/plugins/datasource/logging/result_transformer.ts b/public/app/plugins/datasource/logging/result_transformer.ts
index e238778614c..891f9268068 100644
--- a/public/app/plugins/datasource/logging/result_transformer.ts
+++ b/public/app/plugins/datasource/logging/result_transformer.ts
@@ -26,13 +26,14 @@ export function getSearchMatches(line: string, search: string) {
}
const regexp = new RegExp(`(?:${search})`, 'g');
const matches = [];
- let match;
- while ((match = regexp.exec(line))) {
+ let match = regexp.exec(line);
+ while (match) {
matches.push({
text: match[0],
start: match.index,
length: match[0].length,
});
+ match = regexp.exec(line);
}
return matches;
}
diff --git a/public/app/plugins/datasource/mixed/datasource.ts b/public/app/plugins/datasource/mixed/datasource.ts
index bfdfcd61c77..6018329093e 100644
--- a/public/app/plugins/datasource/mixed/datasource.ts
+++ b/public/app/plugins/datasource/mixed/datasource.ts
@@ -13,14 +13,14 @@ class MixedDatasource {
return this.$q([]);
}
- return this.datasourceSrv.get(dsName).then(function(ds) {
+ return this.datasourceSrv.get(dsName).then(ds => {
const opt = angular.copy(options);
opt.targets = targets;
return ds.query(opt);
});
});
- return this.$q.all(promises).then(function(results) {
+ return this.$q.all(promises).then(results => {
return { data: _.flatten(_.map(results, 'data')) };
});
}
diff --git a/public/app/plugins/datasource/mssql/datasource.ts b/public/app/plugins/datasource/mssql/datasource.ts
index f30ea4c97fe..23aa5504d3e 100644
--- a/public/app/plugins/datasource/mssql/datasource.ts
+++ b/public/app/plugins/datasource/mssql/datasource.ts
@@ -5,12 +5,14 @@ export class MssqlDatasource {
id: any;
name: any;
responseParser: ResponseParser;
+ interval: string;
- /** @ngInject **/
+ /** @ngInject */
constructor(instanceSettings, private backendSrv, private $q, private templateSrv) {
this.name = instanceSettings.name;
this.id = instanceSettings.id;
this.responseParser = new ResponseParser(this.$q);
+ this.interval = (instanceSettings.jsonData || {}).timeInterval;
}
interpolateVariable(value, variable) {
@@ -26,7 +28,7 @@ export class MssqlDatasource {
return value;
}
- const quotedValues = _.map(value, function(val) {
+ const quotedValues = _.map(value, val => {
if (typeof value === 'number') {
return value;
}
diff --git a/public/app/plugins/datasource/mssql/module.ts b/public/app/plugins/datasource/mssql/module.ts
index a2e1e923bc6..478ecadcb3e 100644
--- a/public/app/plugins/datasource/mssql/module.ts
+++ b/public/app/plugins/datasource/mssql/module.ts
@@ -21,7 +21,7 @@ class MssqlAnnotationsQueryCtrl {
annotation: any;
- /** @ngInject **/
+ /** @ngInject */
constructor() {
this.annotation.rawQuery = this.annotation.rawQuery || defaultQuery;
}
diff --git a/public/app/plugins/datasource/mssql/partials/config.html b/public/app/plugins/datasource/mssql/partials/config.html
index 7f9dc03f286..f8a36502009 100644
--- a/public/app/plugins/datasource/mssql/partials/config.html
+++ b/public/app/plugins/datasource/mssql/partials/config.html
@@ -29,6 +29,21 @@
+
MSSQL details
+
+
+
+
+ Min time interval
+
+
+ A lower limit for the auto group by time interval. Recommended to be set to write frequency,
+ for example 1m if your data is written every minute.
+
+
+ Min time interval
+
+
+ A lower limit for the auto group by time interval. Recommended to be set to write frequency,
+ for example 1m if your data is written every minute.
+
+
-
+
+ Version
+
+ This option controls what functions are available in the PostgreSQL query builder.
+
+
+
+
+
+
+
+
+
+
+
+
+ Min time interval
+
+
+ A lower limit for the auto group by time interval. Recommended to be set to write frequency,
+ for example 1m if your data is written every minute.
+
+
+
+
+
+
+ TimescaleDB is a time-series database built as a PostgreSQL extension. If enabled, Grafana will use time_bucket in the $__timeGroup macro and display TimescaleDB specific aggregate functions in the query builder.
+
Time series:
- return column named time (UTC in seconds or timestamp)
- return column(s) with numeric datatype as values
Optional:
@@ -73,13 +171,13 @@ Or build your own conditionals using these macros which just return the values:
- $__timeTo() -> '2017-04-21T05:01:17Z'
- $__unixEpochFrom() -> 1492750877
- $__unixEpochTo() -> 1492750877
-