diff --git a/.bra.toml b/.bra.toml
index 15961e1e3fd..aa7a1680adc 100644
--- a/.bra.toml
+++ b/.bra.toml
@@ -4,6 +4,7 @@ init_cmds = [
["./bin/grafana-server", "cfg:app_mode=development"]
]
watch_all = true
+follow_symlinks = true
watch_dirs = [
"$WORKDIR/pkg",
"$WORKDIR/public/views",
diff --git a/.circleci/config.yml b/.circleci/config.yml
index eb8724bed3c..a5497e6c7e8 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -32,7 +32,7 @@ jobs:
- run: sudo apt update
- run: sudo apt install -y mysql-client
- run: dockerize -wait tcp://127.0.0.1:3306 -timeout 120s
- - run: cat docker/blocks/mysql_tests/setup.sql | mysql -h 127.0.0.1 -P 3306 -u root -prootpass
+ - run: cat devenv/docker/blocks/mysql_tests/setup.sql | mysql -h 127.0.0.1 -P 3306 -u root -prootpass
- run:
name: mysql integration tests
command: 'GRAFANA_TEST_DB=mysql go test ./pkg/services/sqlstore/... ./pkg/tsdb/mysql/... '
@@ -51,7 +51,7 @@ jobs:
- run: sudo apt update
- run: sudo apt install -y postgresql-client
- run: dockerize -wait tcp://127.0.0.1:5432 -timeout 120s
- - run: 'PGPASSWORD=grafanatest psql -p 5432 -h 127.0.0.1 -U grafanatest -d grafanatest -f docker/blocks/postgres_tests/setup.sql'
+ - run: 'PGPASSWORD=grafanatest psql -p 5432 -h 127.0.0.1 -U grafanatest -d grafanatest -f devenv/docker/blocks/postgres_tests/setup.sql'
- run:
name: postgres integration tests
command: 'GRAFANA_TEST_DB=postgres go test ./pkg/services/sqlstore/... ./pkg/tsdb/postgres/...'
@@ -81,15 +81,16 @@ jobs:
working_directory: /go/src/github.com/grafana/grafana
steps:
- checkout
- - run: 'go get -u gopkg.in/alecthomas/gometalinter.v2'
+ - run: 'go get -u github.com/alecthomas/gometalinter'
- run: 'go get -u github.com/tsenart/deadcode'
+ - run: 'go get -u github.com/jgautheron/goconst/cmd/goconst'
- run: 'go get -u github.com/gordonklaus/ineffassign'
- run: 'go get -u github.com/opennota/check/cmd/structcheck'
- run: 'go get -u github.com/mdempsky/unconvert'
- run: 'go get -u github.com/opennota/check/cmd/varcheck'
- run:
name: run linters
- command: 'gometalinter.v2 --enable-gc --vendor --deadline 10m --disable-all --enable=deadcode --enable=ineffassign --enable=structcheck --enable=unconvert --enable=varcheck ./...'
+ command: 'gometalinter --enable-gc --vendor --deadline 10m --disable-all --enable=deadcode --enable=goconst --enable=ineffassign --enable=structcheck --enable=unconvert --enable=varcheck ./...'
- run:
name: run go vet
command: 'go vet ./pkg/...'
@@ -125,7 +126,7 @@ jobs:
build-all:
docker:
- - image: grafana/build-container:1.1.0
+ - image: grafana/build-container:1.2.0
working_directory: /go/src/github.com/grafana/grafana
steps:
- checkout
@@ -157,18 +158,23 @@ jobs:
name: sha-sum packages
command: 'go run build.go sha-dist'
- run:
- name: Build Grafana.com publisher
+ name: Build Grafana.com master publisher
command: 'go build -o scripts/publish scripts/build/publish.go'
+ - run:
+ name: Build Grafana.com release publisher
+ command: 'cd scripts/build/release_publisher && go build -o release_publisher .'
- persist_to_workspace:
root: .
paths:
- dist/grafana*
- scripts/*.sh
- scripts/publish
+ - scripts/build/release_publisher/release_publisher
+ - scripts/build/publish.sh
build:
docker:
- - image: grafana/build-container:1.1.0
+ - image: grafana/build-container:1.2.0
working_directory: /go/src/github.com/grafana/grafana
steps:
- checkout
@@ -227,7 +233,7 @@ jobs:
build-enterprise:
docker:
- - image: grafana/build-container:v0.1
+ - image: grafana/build-container:1.2.0
working_directory: /go/src/github.com/grafana/grafana
steps:
- checkout
@@ -298,8 +304,8 @@ jobs:
name: deploy to s3
command: 'aws s3 sync ./dist s3://$BUCKET_NAME/release'
- run:
- name: Trigger Windows build
- command: './scripts/trigger_windows_build.sh ${APPVEYOR_TOKEN} ${CIRCLE_SHA1} release'
+ name: Deploy to Grafana.com
+ command: './scripts/build/publish.sh'
workflows:
version: 2
diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
deleted file mode 100644
index 769ba2a519b..00000000000
--- a/.github/CONTRIBUTING.md
+++ /dev/null
@@ -1,22 +0,0 @@
-Follow the setup guide in README.md
-
-### Rebuild frontend assets on source change
-```
-yarn watch
-```
-
-### Rerun tests on source change
-```
-yarn jest
-```
-
-### Run tests for backend assets before commit
-```
-test -z "$(gofmt -s -l . | grep -v -E 'vendor/(github.com|golang.org|gopkg.in)' | tee /dev/stderr)"
-```
-
-### Run tests for frontend assets before commit
-```
-yarn test
-go test -v ./pkg/...
-```
diff --git a/.gitignore b/.gitignore
index bf97948d178..21083741e14 100644
--- a/.gitignore
+++ b/.gitignore
@@ -40,8 +40,8 @@ public/css/*.min.css
conf/custom.ini
fig.yml
-docker-compose.yml
-docker-compose.yaml
+devenv/docker-compose.yml
+devenv/docker-compose.yaml
/conf/provisioning/**/custom.yaml
/conf/provisioning/**/dev.yaml
/conf/ldap_dev.toml
@@ -54,6 +54,7 @@ profile.cov
/pkg/cmd/grafana-server/grafana-server
/pkg/cmd/grafana-server/debug
/pkg/extensions
+/public/app/extensions
debug.test
/examples/*/dist
/packaging/**/*.rpm
@@ -68,7 +69,9 @@ debug.test
/vendor/**/*.yml
/vendor/**/*_test.go
/vendor/**/.editorconfig
-/vendor/**/appengine*
*.orig
/devenv/bulk-dashboards/*.json
+/devenv/bulk_alerting_dashboards/*.json
+
+/scripts/build/release_publisher/release_publisher
diff --git a/CHANGELOG.md b/CHANGELOG.md
index b89e925e826..ccafcf3af2e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,8 +1,64 @@
-# 5.3.0 (unreleased)
+# 5.4.0 (unreleased)
+
+### New Features
+
+* **Postgres/MySQL/MSSQL**: Adds support for configuration of max open/idle connections and connection max lifetime. Also, panels with multiple SQL queries will now be executed concurrently [#11711](https://github.com/grafana/grafana/issues/11711), thx [@connection-reset](https://github.com/connection-reset)
### Minor
+* **Datasource Proxy**: Keep trailing slash for datasource proxy requests [#13326](https://github.com/grafana/grafana/pull/13326), thx [@ryantxu](https://github.com/ryantxu)
+* **Units**: New clock time format, to format ms or second values as for example `01h:59m`, [#13635](https://github.com/grafana/grafana/issues/13635), thx [@franciscocpg](https://github.com/franciscocpg)
+
+### Breaking changes
+
+* Postgres/MySQL/MSSQL datasources now per default uses `max open connections` = `unlimited` (earlier 10), `max idle connections` = `2` (earlier 10) and `connection max lifetime` = `4` hours (earlier unlimited)
+
+# 5.3.1 (unreleased)
+
+* **Render**: Fix PhantomJS render of graph panel when legend displayed as table to the right [#13616](https://github.com/grafana/grafana/issues/13616)
+* **Stackdriver**: Filter option disappears after removing initial filter [#13607](https://github.com/grafana/grafana/issues/13607)
+* **Elasticsearch**: Fix no limit size in terms aggregation for alerting queries [#13172](https://github.com/grafana/grafana/issues/13172), thx [@Yukinoshita-Yukino](https://github.com/Yukinoshita-Yukino)
+* **InfluxDB**: Fix for annotation issue that caused text to be shown twice [#13553](https://github.com/grafana/grafana/issues/13553)
+* **Variables**: Fix nesting variables leads to exception and missing refresh [#13628](https://github.com/grafana/grafana/issues/13628)
+* **Variables**: Prometheus: Single letter labels are not supported [#13641](https://github.com/grafana/grafana/issues/13641)
+* **Graph**: Fix graph time formatting for Last 24h ranges [#13650](https://github.com/grafana/grafana/issues/13650)
+* **Playlist**: Fix cannot add dashboards with long names to playlist [#13464](https://github.com/grafana/grafana/issues/13464), thx [@neufeldtech](https://github.com/neufeldtech)
+* **HTTP API**: Fix /api/org/users so that query and limit querystrings works
+
+# 5.3.0 (2018-10-10)
+
+* **Stackdriver**: Filter wildcards and regex matching are not yet supported [#13495](https://github.com/grafana/grafana/issues/13495)
+* **Stackdriver**: Support the distribution metric type for heatmaps [#13559](https://github.com/grafana/grafana/issues/13559)
+* **Cloudwatch**: Automatically set graph yaxis unit [#13575](https://github.com/grafana/grafana/issues/13575), thx [@mtanda](https://github.com/mtanda)
+
+# 5.3.0-beta3 (2018-10-03)
+
+* **Stackdriver**: Fix for missing ngInject [#13511](https://github.com/grafana/grafana/pull/13511)
+* **Permissions**: Fix for broken permissions selector [#13507](https://github.com/grafana/grafana/issues/13507)
+* **Alerting**: Alert reminders deduping not working as expected when running multiple Grafana instances [#13492](https://github.com/grafana/grafana/issues/13492)
+
+# 5.3.0-beta2 (2018-10-01)
+
+### New Features
+
+* **Annotations**: Enable template variables in tagged annotations queries [#9735](https://github.com/grafana/grafana/issues/9735)
+* **Stackdriver**: Support for Google Stackdriver Datasource [#13289](https://github.com/grafana/grafana/pull/13289)
+
+### Minor
+
+* **Provisioning**: Dashboard Provisioning now support symlinks that changes target [#12534](https://github.com/grafana/grafana/issues/12534), thx [@auhlig](https://github.com/auhlig)
+* **OAuth**: Allow oauth email attribute name to be configurable [#12986](https://github.com/grafana/grafana/issues/12986), thx [@bobmshannon](https://github.com/bobmshannon)
+* **Tags**: Default sort order for GetDashboardTags [#11681](https://github.com/grafana/grafana/pull/11681), thx [@Jonnymcc](https://github.com/Jonnymcc)
+* **Prometheus**: Label completion queries respect dashboard time range [#12251](https://github.com/grafana/grafana/pull/12251), thx [@mtanda](https://github.com/mtanda)
+* **Prometheus**: Allow to display annotations based on Prometheus series value [#10159](https://github.com/grafana/grafana/issues/10159), thx [@mtanda](https://github.com/mtanda)
+* **Prometheus**: Adhoc-filtering for Prometheus dashboards [#13212](https://github.com/grafana/grafana/issues/13212)
+* **Singlestat**: Fix gauge display accuracy for percents [#13270](https://github.com/grafana/grafana/issues/13270), thx [@tianon](https://github.com/tianon)
+* **Dashboard**: Prevent auto refresh from starting when loading dashboard with absolute time range [#12030](https://github.com/grafana/grafana/issues/12030)
+* **Templating**: New templating variable type `Text box` that allows free text input [#3173](https://github.com/grafana/grafana/issues/3173)
* **Alerting**: Link to view full size image in Microsoft Teams alert notifier [#13121](https://github.com/grafana/grafana/issues/13121), thx [@holiiveira](https://github.com/holiiveira)
+* **Alerting**: Fixes a bug where all alerts would send reminders after upgrade & restart [#13402](https://github.com/grafana/grafana/pull/13402)
+* **Alerting**: Concurrent render limit for graphs used in notifications [#13401](https://github.com/grafana/grafana/pull/13401)
+* **Postgres/MySQL/MSSQL**: Add support for replacing $__interval and $__interval_ms in alert queries [#11555](https://github.com/grafana/grafana/issues/11555), thx [@svenklemm](https://github.com/svenklemm)
# 5.3.0-beta1 (2018-09-06)
@@ -20,7 +76,7 @@
* **Profile**: List teams that the user is member of in current/active organization [#12476](https://github.com/grafana/grafana/issues/12476)
* **Configuration**: Allow auto-assigning users to specific organization (other than Main. Org) [#1823](https://github.com/grafana/grafana/issues/1823) [#12801](https://github.com/grafana/grafana/issues/12801), thx [@gzzo](https://github.com/gzzo) and [@ofosos](https://github.com/ofosos)
* **Dataproxy**: Pass configured/auth headers to a Datasource [#10971](https://github.com/grafana/grafana/issues/10971), thx [@mrsiano](https://github.com/mrsiano)
-* **Cloudwatch**: CloudWatch GetMetricData support [#11487](https://github.com/grafana/grafana/issues/11487), thx [@mtanda](https://github.com/mtanda)
+* **CloudWatch**: GetMetricData support [#11487](https://github.com/grafana/grafana/issues/11487), thx [@mtanda](https://github.com/mtanda)
* **Postgres**: TimescaleDB support, e.g. use `time_bucket` for grouping by time when option enabled [#12680](https://github.com/grafana/grafana/pull/12680), thx [svenklemm](https://github.com/svenklemm)
* **Cleanup**: Make temp file time to live configurable [#11607](https://github.com/grafana/grafana/issues/11607), thx [@xapon](https://github.com/xapon)
@@ -94,6 +150,10 @@ These are new features that's still being worked on and are in an experimental p
* **Frontend**: Convert all Frontend Karma tests to Jest tests [#12224](https://github.com/grafana/grafana/issues/12224)
* **Backend**: Upgrade to golang 1.11 [#13030](https://github.com/grafana/grafana/issues/13030)
+# 5.2.4 (2018-09-07)
+
+* **GrafanaCli**: Fixed issue with grafana-cli install plugin resulting in corrupt http response from source error. Fixes [#13079](https://github.com/grafana/grafana/issues/13079)
+
# 5.2.3 (2018-08-29)
### Important fix for LDAP & OAuth login vulnerability
@@ -298,7 +358,7 @@ See [security announcement](https://community.grafana.com/t/grafana-5-2-3-and-4-
* **Dashboard**: Sizing and positioning of settings menu icons [#11572](https://github.com/grafana/grafana/pull/11572)
* **Dashboard**: Add search filter/tabs to new panel control [#10427](https://github.com/grafana/grafana/issues/10427)
* **Folders**: User with org viewer role should not be able to save/move dashboards in/to general folder [#11553](https://github.com/grafana/grafana/issues/11553)
-* **Influxdb**: Dont assume the first column in table response is time. [#11476](https://github.com/grafana/grafana/issues/11476), thx [@hahnjo](https://github.com/hahnjo)
+* **Influxdb**: Don't assume the first column in table response is time. [#11476](https://github.com/grafana/grafana/issues/11476), thx [@hahnjo](https://github.com/hahnjo)
### Tech
* Backend code simplification [#11613](https://github.com/grafana/grafana/pull/11613), thx [@knweiss](https://github.com/knweiss)
@@ -485,7 +545,7 @@ See [security announcement](https://community.grafana.com/t/grafana-5-2-3-and-4-
# 4.6.2 (2017-11-16)
## Important
-* **Prometheus**: Fixes bug with new prometheus alerts in Grafana. Make sure to download this version if your using Prometheus for alerting. More details in the issue. [#9777](https://github.com/grafana/grafana/issues/9777)
+* **Prometheus**: Fixes bug with new prometheus alerts in Grafana. Make sure to download this version if you're using Prometheus for alerting. More details in the issue. [#9777](https://github.com/grafana/grafana/issues/9777)
## Fixes
* **Color picker**: Bug after using textbox input field to change/paste color string [#9769](https://github.com/grafana/grafana/issues/9769)
@@ -1444,7 +1504,7 @@ Grafana 2.x is fundamentally different from 1.x; it now ships with an integrated
**New features**
- [Issue #1623](https://github.com/grafana/grafana/issues/1623). Share Dashboard: Dashboard snapshot sharing (dash and data snapshot), save to local or save to public snapshot dashboard snapshots.raintank.io site
-- [Issue #1622](https://github.com/grafana/grafana/issues/1622). Share Panel: The share modal now has an embed option, gives you an iframe that you can use to embedd a single graph on another web site
+- [Issue #1622](https://github.com/grafana/grafana/issues/1622). Share Panel: The share modal now has an embed option, gives you an iframe that you can use to embed a single graph on another web site
- [Issue #718](https://github.com/grafana/grafana/issues/718). Dashboard: When saving a dashboard and another user has made changes in between the user is prompted with a warning if he really wants to overwrite the other's changes
- [Issue #1331](https://github.com/grafana/grafana/issues/1331). Graph & Singlestat: New axis/unit format selector and more units (kbytes, Joule, Watt, eV), and new design for graph axis & grid tab and single stat options tab views
- [Issue #1241](https://github.com/grafana/grafana/issues/1242). Timepicker: New option in timepicker (under dashboard settings), to change ``now`` to be for example ``now-1m``, useful when you want to ignore last minute because it contains incomplete data
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 00000000000..8b2ba090fe1
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,56 @@
+
+# Contributing
+
+Grafana uses GitHub to manage contributions.
+Contributions take the form of pull requests that will be reviewed by the core team.
+
+* If you are a new contributor see: [Steps to Contribute](#steps-to-contribute)
+
+* If you have a trivial fix or improvement, go ahead and create a pull request.
+
+* If you plan to do something more involved, discuss your idea on the respective [issue](https://github.com/grafana/grafana/issues) or create a [new issue](https://github.com/grafana/grafana/issues/new) if it does not exist. This will avoid unnecessary work and surely give you and us a good deal of inspiration.
+
+
+## Steps to Contribute
+
+Should you wish to work on a GitHub issue, check first if it is not already assigned to someone. If it is free, you claim it by commenting on the issue that you want to work on it. This is to prevent duplicated efforts from contributors on the same issue.
+
+Please check the [`beginner friendly`](https://github.com/grafana/grafana/issues?q=is%3Aopen+is%3Aissue+label%3A%22beginner+friendly%22) label to find issues that are good for getting started. If you have questions about one of the issues, with or without the tag, please comment on them and one of the core team or the original poster will clarify it.
+
+
+
+## Setup
+
+Follow the setup guide in README.md
+
+### Rebuild frontend assets on source change
+```
+yarn watch
+```
+
+### Rerun tests on source change
+```
+yarn jest
+```
+
+### Run tests for backend assets before commit
+```
+test -z "$(gofmt -s -l . | grep -v -E 'vendor/(github.com|golang.org|gopkg.in)' | tee /dev/stderr)"
+```
+
+### Run tests for frontend assets before commit
+```
+yarn test
+go test -v ./pkg/...
+```
+
+
+## Pull Request Checklist
+
+* Branch from the master branch and, if needed, rebase to the current master branch before submitting your pull request. If it doesn't merge cleanly with master you may be asked to rebase your changes.
+
+* Commits should be as small as possible, while ensuring that each commit is correct independently (i.e., each commit should compile and pass tests).
+
+* If your patch is not getting reviewed or you need a specific person to review it, you can @-reply a reviewer asking for a review in the pull request or a comment.
+
+* Add tests relevant to the fixed bug or new feature.
diff --git a/Gopkg.lock b/Gopkg.lock
index bd247d691dd..4286add847d 100644
--- a/Gopkg.lock
+++ b/Gopkg.lock
@@ -19,6 +19,12 @@
packages = ["."]
revision = "7677a1d7c1137cd3dd5ba7a076d0c898a1ef4520"
+[[projects]]
+ branch = "master"
+ name = "github.com/VividCortex/mysqlerr"
+ packages = ["."]
+ revision = "6c6b55f8796f578c870b7e19bafb16103bc40095"
+
[[projects]]
name = "github.com/aws/aws-sdk-go"
packages = [
@@ -258,7 +264,7 @@
branch = "master"
name = "github.com/hashicorp/yamux"
packages = ["."]
- revision = "2658be15c5f05e76244154714161f17e3e77de2e"
+ revision = "7221087c3d281fda5f794e28c2ea4c6e4d5c4558"
[[projects]]
name = "github.com/inconshreveable/log15"
@@ -501,6 +507,8 @@
branch = "master"
name = "golang.org/x/crypto"
packages = [
+ "ed25519",
+ "ed25519/internal/edwards25519",
"md4",
"pbkdf2"
]
@@ -664,6 +672,16 @@
revision = "e6179049628164864e6e84e973cfb56335748dea"
version = "v2.3.2"
+[[projects]]
+ name = "gopkg.in/square/go-jose.v2"
+ packages = [
+ ".",
+ "cipher",
+ "json"
+ ]
+ revision = "ef984e69dd356202fd4e4910d4d9c24468bdf0b8"
+ version = "v2.1.9"
+
[[projects]]
name = "gopkg.in/yaml.v2"
packages = ["."]
@@ -673,6 +691,6 @@
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
- inputs-digest = "81a37e747b875cf870c1b9486fa3147e704dea7db8ba86f7cb942d3ddc01d3e3"
+ inputs-digest = "6f7f271afd27f78b7d8ebe27436fee72c9925fb82a978bdc57fde44e01f3ca51"
solver-name = "gps-cdcl"
solver-version = 1
diff --git a/Gopkg.toml b/Gopkg.toml
index 6c91ec37221..e3cbdeabb5d 100644
--- a/Gopkg.toml
+++ b/Gopkg.toml
@@ -203,3 +203,11 @@ ignored = [
[[constraint]]
name = "github.com/denisenkom/go-mssqldb"
revision = "270bc3860bb94dd3a3ffd047377d746c5e276726"
+
+[[constraint]]
+ name = "github.com/VividCortex/mysqlerr"
+ branch = "master"
+
+[[constraint]]
+ name = "gopkg.in/square/go-jose.v2"
+ version = "2.1.9"
diff --git a/Gruntfile.js b/Gruntfile.js
index 8a71fb44148..2d5990b5f58 100644
--- a/Gruntfile.js
+++ b/Gruntfile.js
@@ -25,7 +25,6 @@ module.exports = function (grunt) {
}
}
- config.coverage = grunt.option('coverage');
config.phjs = grunt.option('phjsToRelease');
config.pkg.version = grunt.option('pkgVer') || config.pkg.version;
diff --git a/Makefile b/Makefile
index c6915409ed7..c9e51d897f3 100644
--- a/Makefile
+++ b/Makefile
@@ -43,6 +43,3 @@ test: test-go test-js
run:
./bin/grafana-server
-
-protoc:
- protoc -I pkg/tsdb/models pkg/tsdb/models/*.proto --go_out=plugins=grpc:pkg/tsdb/models/.
diff --git a/PLUGIN_DEV.md b/PLUGIN_DEV.md
index 4e2e080ebe6..168b21dbd88 100644
--- a/PLUGIN_DEV.md
+++ b/PLUGIN_DEV.md
@@ -6,8 +6,8 @@ upgrading Grafana please check here before creating an issue.
## Links
-- [Datasource plugin written in typescript](https://github.com/grafana/typescript-template-datasource)
-- [Simple json dataource plugin](https://github.com/grafana/simple-json-datasource)
+- [Datasource plugin written in TypeScript](https://github.com/grafana/typescript-template-datasource)
+- [Simple JSON datasource plugin](https://github.com/grafana/simple-json-datasource)
- [Plugin development guide](http://docs.grafana.org/plugins/developing/development/)
- [Webpack Grafana plugin template project](https://github.com/CorpGlory/grafana-plugin-template-webpack)
diff --git a/README.md b/README.md
index 133d9e50d07..1179385d10c 100644
--- a/README.md
+++ b/README.md
@@ -138,5 +138,5 @@ plugin development.
## License
-Grafana is distributed under Apache 2.0 License.
+Grafana is distributed under [Apache 2.0 License](https://github.com/grafana/grafana/blob/master/LICENSE.md).
diff --git a/build.go b/build.go
index 561dd70df0e..69fbf3bada8 100644
--- a/build.go
+++ b/build.go
@@ -22,6 +22,11 @@ import (
"time"
)
+const (
+ windows = "windows"
+ linux = "linux"
+)
+
var (
//versionRe = regexp.MustCompile(`-[0-9]{1,3}-g[0-9a-f]{5,10}`)
goarch string
@@ -110,17 +115,16 @@ func main() {
case "package":
grunt(gruntBuildArg("build")...)
grunt(gruntBuildArg("package")...)
- if goos == "linux" {
+ if goos == linux {
createLinuxPackages()
}
case "package-only":
grunt(gruntBuildArg("package")...)
- if goos == "linux" {
+ if goos == linux {
createLinuxPackages()
}
-
case "pkg-rpm":
grunt(gruntBuildArg("release")...)
createRpmPackages()
@@ -379,7 +383,7 @@ func ensureGoPath() {
}
func grunt(params ...string) {
- if runtime.GOOS == "windows" {
+ if runtime.GOOS == windows {
runPrint(`.\node_modules\.bin\grunt`, params...)
} else {
runPrint("./node_modules/.bin/grunt", params...)
@@ -417,11 +421,11 @@ func test(pkg string) {
func build(binaryName, pkg string, tags []string) {
binary := fmt.Sprintf("./bin/%s-%s/%s", goos, goarch, binaryName)
if isDev {
- //dont include os and arch in output path in dev environment
+ //don't include os and arch in output path in dev environment
binary = fmt.Sprintf("./bin/%s", binaryName)
}
- if goos == "windows" {
+ if goos == windows {
binary += ".exe"
}
@@ -485,11 +489,11 @@ func clean() {
func setBuildEnv() {
os.Setenv("GOOS", goos)
- if goos == "windows" {
+ if goos == windows {
// require windows >=7
os.Setenv("CGO_CFLAGS", "-D_WIN32_WINNT=0x0601")
}
- if goarch != "amd64" || goos != "linux" {
+ if goarch != "amd64" || goos != linux {
// needed for all other archs
cgo = true
}
diff --git a/codecov.yml b/codecov.yml
deleted file mode 100644
index b2a839365ac..00000000000
--- a/codecov.yml
+++ /dev/null
@@ -1,11 +0,0 @@
-coverage:
- precision: 2
- round: down
- range: "50...100"
-
- status:
- project: yes
- patch: yes
- changes: no
-
-comment: off
diff --git a/conf/defaults.ini b/conf/defaults.ini
index 85d0953c6af..eb8debc0094 100644
--- a/conf/defaults.ini
+++ b/conf/defaults.ini
@@ -321,6 +321,7 @@ allow_sign_up = true
client_id = some_id
client_secret = some_secret
scopes = user:email
+email_attribute_name = email:primary
auth_url =
token_url =
api_url =
@@ -473,6 +474,10 @@ error_or_timeout = alerting
# Default setting for how Grafana handles nodata or null values in alerting. (alerting, no_data, keep_state, ok)
nodata_or_nullvalues = no_data
+# Alert notifications can include images, but rendering many images at the same time can overload the server
+# This limit will protect the server from render overloading and make sure notifications are sent out quickly
+concurrent_render_limit = 5
+
#################################### Explore #############################
[explore]
# Enable the Explore section
diff --git a/conf/ldap.toml b/conf/ldap.toml
index 9a7088ed823..b684f2556d5 100644
--- a/conf/ldap.toml
+++ b/conf/ldap.toml
@@ -31,37 +31,11 @@ search_filter = "(cn=%s)"
# An array of base dns to search through
search_base_dns = ["dc=grafana,dc=org"]
-# In POSIX LDAP schemas, without memberOf attribute a secondary query must be made for groups.
-# This is done by enabling group_search_filter below. You must also set member_of= "cn"
-# in [servers.attributes] below.
-
-# Users with nested/recursive group membership and an LDAP server that supports LDAP_MATCHING_RULE_IN_CHAIN
-# can set group_search_filter, group_search_filter_user_attribute, group_search_base_dns and member_of
-# below in such a way that the user's recursive group membership is considered.
-#
-# Nested Groups + Active Directory (AD) Example:
-#
-# AD groups store the Distinguished Names (DNs) of members, so your filter must
-# recursively search your groups for the authenticating user's DN. For example:
-#
-# group_search_filter = "(member:1.2.840.113556.1.4.1941:=%s)"
-# group_search_filter_user_attribute = "distinguishedName"
-# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
-#
-# [servers.attributes]
-# ...
-# member_of = "distinguishedName"
-
-## Group search filter, to retrieve the groups of which the user is a member (only set if memberOf attribute is not available)
+## For Posix or LDAP setups that does not support member_of attribute you can define the below settings
+## Please check grafana LDAP docs for examples
# group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))"
-## Group search filter user attribute defines what user attribute gets substituted for %s in group_search_filter.
-## Defaults to the value of username in [server.attributes]
-## Valid options are any of your values in [servers.attributes]
-## If you are using nested groups you probably want to set this and member_of in
-## [servers.attributes] to "distinguishedName"
-# group_search_filter_user_attribute = "distinguishedName"
-## An array of the base DNs to search through for groups. Typically uses ou=groups
# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
+# group_search_filter_user_attribute = "uid"
# Specify names of the ldap attributes your ldap uses
[servers.attributes]
diff --git a/conf/sample.ini b/conf/sample.ini
index 2ef254f79b9..e6a03718d19 100644
--- a/conf/sample.ini
+++ b/conf/sample.ini
@@ -393,6 +393,10 @@ log_queries =
# Default setting for how Grafana handles nodata or null values in alerting. (alerting, no_data, keep_state, ok)
;nodata_or_nullvalues = no_data
+# Alert notifications can include images, but rendering many images at the same time can overload the server
+# This limit will protect the server from render overloading and make sure notifications are sent out quickly
+;concurrent_render_limit = 5
+
#################################### Explore #############################
[explore]
# Enable the Explore section
@@ -431,7 +435,7 @@ log_queries =
;sampler_param = 1
#################################### Grafana.com integration ##########################
-# Url used to to import dashboards directly from Grafana.com
+# Url used to import dashboards directly from Grafana.com
[grafana_com]
;url = https://grafana.com
diff --git a/scripts/benchmarks/ab/ab_test.sh b/devenv/benchmarks/ab/ab_test.sh
similarity index 100%
rename from scripts/benchmarks/ab/ab_test.sh
rename to devenv/benchmarks/ab/ab_test.sh
diff --git a/devenv/bulk_alerting_dashboards/bulk_alerting_dashboards.yaml b/devenv/bulk_alerting_dashboards/bulk_alerting_dashboards.yaml
new file mode 100644
index 00000000000..1ede5dcd30a
--- /dev/null
+++ b/devenv/bulk_alerting_dashboards/bulk_alerting_dashboards.yaml
@@ -0,0 +1,9 @@
+apiVersion: 1
+
+providers:
+ - name: 'Bulk alerting dashboards'
+ folder: 'Bulk alerting dashboards'
+ type: file
+ options:
+ path: devenv/bulk_alerting_dashboards
+
diff --git a/devenv/bulk_alerting_dashboards/bulkdash_alerting.jsonnet b/devenv/bulk_alerting_dashboards/bulkdash_alerting.jsonnet
new file mode 100644
index 00000000000..a7acd57745d
--- /dev/null
+++ b/devenv/bulk_alerting_dashboards/bulkdash_alerting.jsonnet
@@ -0,0 +1,168 @@
+{
+ "editable": true,
+ "gnetId": null,
+ "graphTooltip": 0,
+ "id": null,
+ "links": [],
+ "panels": [
+ {
+ "alert": {
+ "conditions": [
+ {
+ "evaluator": {
+ "params": [
+ 65
+ ],
+ "type": "gt"
+ },
+ "operator": {
+ "type": "and"
+ },
+ "query": {
+ "params": [
+ "A",
+ "5m",
+ "now"
+ ]
+ },
+ "reducer": {
+ "params": [],
+ "type": "avg"
+ },
+ "type": "query"
+ }
+ ],
+ "executionErrorState": "alerting",
+ "frequency": "10s",
+ "handler": 1,
+ "name": "bulk alerting",
+ "noDataState": "no_data",
+ "notifications": []
+ },
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "gdev-prometheus",
+ "fill": 1,
+ "gridPos": {
+ "h": 9,
+ "w": 12,
+ "x": 0,
+ "y": 0
+ },
+ "id": 2,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "$$hashKey": "object:117",
+ "expr": "go_goroutines",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "A"
+ }
+ ],
+ "thresholds": [
+ {
+ "colorMode": "critical",
+ "fill": true,
+ "line": true,
+ "op": "gt",
+ "value": 50
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Panel Title",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ]
+ }
+ ],
+ "schemaVersion": 16,
+ "style": "dark",
+ "tags": [],
+ "templating": {
+ "list": []
+ },
+ "time": {
+ "from": "now-6h",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": [
+ "5s",
+ "10s",
+ "30s",
+ "1m",
+ "5m",
+ "15m",
+ "30m",
+ "1h",
+ "2h",
+ "1d"
+ ],
+ "time_options": [
+ "5m",
+ "15m",
+ "1h",
+ "6h",
+ "12h",
+ "24h",
+ "2d",
+ "7d",
+ "30d"
+ ]
+ },
+ "timezone": "",
+ "title": "New dashboard",
+ "uid": null,
+ "version": 0
+}
\ No newline at end of file
diff --git a/docker/create_docker_compose.sh b/devenv/create_docker_compose.sh
similarity index 94%
rename from docker/create_docker_compose.sh
rename to devenv/create_docker_compose.sh
index 9d28ede8e7e..5da9e8f5c8f 100755
--- a/docker/create_docker_compose.sh
+++ b/devenv/create_docker_compose.sh
@@ -1,13 +1,13 @@
#!/bin/bash
-blocks_dir=blocks
+blocks_dir=docker/blocks
docker_dir=docker
template_dir=templates
grafana_config_file=conf.tmp
grafana_config=config
-compose_header_file=compose_header.yml
+compose_header_file=docker/compose_header.yml
fig_file=docker-compose.yaml
fig_config=docker-compose.yaml
diff --git a/devenv/dev-dashboards/panel_tests_polystat.json b/devenv/dev-dashboards/panel_tests_polystat.json
new file mode 100644
index 00000000000..51d3085c438
--- /dev/null
+++ b/devenv/dev-dashboards/panel_tests_polystat.json
@@ -0,0 +1,3343 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations & Alerts",
+ "type": "dashboard"
+ }
+ ]
+ },
+ "editable": true,
+ "gnetId": null,
+ "graphTooltip": 0,
+ "links": [],
+ "panels": [
+ {
+ "animationModes": [
+ {
+ "text": "Show All",
+ "value": "all"
+ },
+ {
+ "text": "Show Triggered",
+ "value": "triggered"
+ }
+ ],
+ "colors": [
+ "#299c46",
+ "rgba(237, 129, 40, 0.89)",
+ "#d44a3a"
+ ],
+ "d3DivId": "d3_svg_4",
+ "datasource": "gdev-testdata",
+ "decimals": 2,
+ "displayModes": [
+ {
+ "text": "Show All",
+ "value": "all"
+ },
+ {
+ "text": "Show Triggered",
+ "value": "triggered"
+ }
+ ],
+ "fontSizes": [
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 22,
+ 24,
+ 26,
+ 28,
+ 30,
+ 32,
+ 34,
+ 36,
+ 38,
+ 40,
+ 42,
+ 44,
+ 46,
+ 48,
+ 50,
+ 52,
+ 54,
+ 56,
+ 58,
+ 60,
+ 62,
+ 64,
+ 66,
+ 68,
+ 70
+ ],
+ "fontTypes": [
+ "Open Sans",
+ "Arial",
+ "Avant Garde",
+ "Bookman",
+ "Consolas",
+ "Courier",
+ "Courier New",
+ "Futura",
+ "Garamond",
+ "Helvetica",
+ "Palatino",
+ "Times",
+ "Times New Roman",
+ "Verdana"
+ ],
+ "format": "none",
+ "gridPos": {
+ "h": 9,
+ "w": 12,
+ "x": 0,
+ "y": 0
+ },
+ "id": 4,
+ "links": [],
+ "notcolors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "operatorName": "avg",
+ "operatorOptions": [
+ {
+ "text": "Average",
+ "value": "avg"
+ },
+ {
+ "text": "Count",
+ "value": "count"
+ },
+ {
+ "text": "Current",
+ "value": "current"
+ },
+ {
+ "text": "Delta",
+ "value": "delta"
+ },
+ {
+ "text": "Difference",
+ "value": "diff"
+ },
+ {
+ "text": "First",
+ "value": "first"
+ },
+ {
+ "text": "Log Min",
+ "value": "logmin"
+ },
+ {
+ "text": "Max",
+ "value": "max"
+ },
+ {
+ "text": "Min",
+ "value": "min"
+ },
+ {
+ "text": "Name",
+ "value": "name"
+ },
+ {
+ "text": "Time of Last Point",
+ "value": "last_time"
+ },
+ {
+ "text": "Time Step",
+ "value": "time_step"
+ },
+ {
+ "text": "Total",
+ "value": "total"
+ }
+ ],
+ "polystat": {
+ "animationSpeed": 2500,
+ "columnAutoSize": true,
+ "columns": "",
+ "defaultClickThrough": "",
+ "defaultClickThroughSanitize": true,
+ "displayLimit": 100,
+ "fontAutoScale": true,
+ "fontSize": 12,
+ "globalDisplayMode": "all",
+ "globalOperatorName": "avg",
+ "gradientEnabled": true,
+ "hexagonSortByDirection": "asc",
+ "hexagonSortByField": "name",
+ "maxMetrics": 0,
+ "polygonBorderColor": "black",
+ "polygonBorderSize": 2,
+ "radius": "",
+ "radiusAutoSize": true,
+ "rowAutoSize": true,
+ "rows": "",
+ "shape": "hexagon_pointed_top",
+ "tooltipDisplayMode": "all",
+ "tooltipDisplayTextTriggeredEmpty": "OK",
+ "tooltipFontSize": 12,
+ "tooltipFontType": "Open Sans",
+ "tooltipPrimarySortDirection": "desc",
+ "tooltipPrimarySortField": "thresholdLevel",
+ "tooltipSecondarySortDirection": "desc",
+ "tooltipSecondarySortField": "value",
+ "tooltipTimestampEnabled": true
+ },
+ "savedComposites": [],
+ "savedOverrides": [],
+ "shapes": [
+ {
+ "text": "Hexagon Pointed Top",
+ "value": "hexagon_pointed_top"
+ },
+ {
+ "text": "Hexagon Flat Top",
+ "value": "hexagon_flat_top"
+ },
+ {
+ "text": "Circle",
+ "value": "circle"
+ },
+ {
+ "text": "Cross",
+ "value": "cross"
+ },
+ {
+ "text": "Diamond",
+ "value": "diamond"
+ },
+ {
+ "text": "Square",
+ "value": "square"
+ },
+ {
+ "text": "Star",
+ "value": "star"
+ },
+ {
+ "text": "Triangle",
+ "value": "triangle"
+ },
+ {
+ "text": "Wye",
+ "value": "wye"
+ }
+ ],
+ "sortDirections": [
+ {
+ "text": "Ascending",
+ "value": "asc"
+ },
+ {
+ "text": "Descending",
+ "value": "desc"
+ }
+ ],
+ "sortFields": [
+ {
+ "text": "Name",
+ "value": "name"
+ },
+ {
+ "text": "Threshold Level",
+ "value": "thresholdLevel"
+ },
+ {
+ "text": "Value",
+ "value": "value"
+ }
+ ],
+ "svgContainer": {},
+ "targets": [
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "A",
+ "scenarioId": "random_walk"
+ },
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "B",
+ "scenarioId": "random_walk"
+ },
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "C",
+ "scenarioId": "random_walk"
+ },
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "D",
+ "scenarioId": "random_walk"
+ },
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "E",
+ "scenarioId": "random_walk"
+ }
+ ],
+ "thresholdStates": [
+ {
+ "text": "ok",
+ "value": 0
+ },
+ {
+ "text": "warning",
+ "value": 1
+ },
+ {
+ "text": "critical",
+ "value": 2
+ },
+ {
+ "text": "custom",
+ "value": 3
+ }
+ ],
+ "title": "Poor use of space",
+ "type": "grafana-polystat-panel",
+ "unitFormats": [
+ {
+ "submenu": [
+ {
+ "text": "none",
+ "value": "none"
+ },
+ {
+ "text": "short",
+ "value": "short"
+ },
+ {
+ "text": "percent (0-100)",
+ "value": "percent"
+ },
+ {
+ "text": "percent (0.0-1.0)",
+ "value": "percentunit"
+ },
+ {
+ "text": "Humidity (%H)",
+ "value": "humidity"
+ },
+ {
+ "text": "decibel",
+ "value": "dB"
+ },
+ {
+ "text": "hexadecimal (0x)",
+ "value": "hex0x"
+ },
+ {
+ "text": "hexadecimal",
+ "value": "hex"
+ },
+ {
+ "text": "scientific notation",
+ "value": "sci"
+ },
+ {
+ "text": "locale format",
+ "value": "locale"
+ }
+ ],
+ "text": "none"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Dollars ($)",
+ "value": "currencyUSD"
+ },
+ {
+ "text": "Pounds (£)",
+ "value": "currencyGBP"
+ },
+ {
+ "text": "Euro (€)",
+ "value": "currencyEUR"
+ },
+ {
+ "text": "Yen (¥)",
+ "value": "currencyJPY"
+ },
+ {
+ "text": "Rubles (₽)",
+ "value": "currencyRUB"
+ },
+ {
+ "text": "Hryvnias (₴)",
+ "value": "currencyUAH"
+ },
+ {
+ "text": "Real (R$)",
+ "value": "currencyBRL"
+ },
+ {
+ "text": "Danish Krone (kr)",
+ "value": "currencyDKK"
+ },
+ {
+ "text": "Icelandic Króna (kr)",
+ "value": "currencyISK"
+ },
+ {
+ "text": "Norwegian Krone (kr)",
+ "value": "currencyNOK"
+ },
+ {
+ "text": "Swedish Krona (kr)",
+ "value": "currencySEK"
+ },
+ {
+ "text": "Czech koruna (czk)",
+ "value": "currencyCZK"
+ },
+ {
+ "text": "Swiss franc (CHF)",
+ "value": "currencyCHF"
+ },
+ {
+ "text": "Polish Złoty (PLN)",
+ "value": "currencyPLN"
+ },
+ {
+ "text": "Bitcoin (฿)",
+ "value": "currencyBTC"
+ }
+ ],
+ "text": "currency"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Hertz (1/s)",
+ "value": "hertz"
+ },
+ {
+ "text": "nanoseconds (ns)",
+ "value": "ns"
+ },
+ {
+ "text": "microseconds (µs)",
+ "value": "µs"
+ },
+ {
+ "text": "milliseconds (ms)",
+ "value": "ms"
+ },
+ {
+ "text": "seconds (s)",
+ "value": "s"
+ },
+ {
+ "text": "minutes (m)",
+ "value": "m"
+ },
+ {
+ "text": "hours (h)",
+ "value": "h"
+ },
+ {
+ "text": "days (d)",
+ "value": "d"
+ },
+ {
+ "text": "duration (ms)",
+ "value": "dtdurationms"
+ },
+ {
+ "text": "duration (s)",
+ "value": "dtdurations"
+ },
+ {
+ "text": "duration (hh:mm:ss)",
+ "value": "dthms"
+ },
+ {
+ "text": "Timeticks (s/100)",
+ "value": "timeticks"
+ }
+ ],
+ "text": "time"
+ },
+ {
+ "submenu": [
+ {
+ "text": "YYYY-MM-DD HH:mm:ss",
+ "value": "dateTimeAsIso"
+ },
+ {
+ "text": "DD/MM/YYYY h:mm:ss a",
+ "value": "dateTimeAsUS"
+ },
+ {
+ "text": "From Now",
+ "value": "dateTimeFromNow"
+ }
+ ],
+ "text": "date & time"
+ },
+ {
+ "submenu": [
+ {
+ "text": "bits",
+ "value": "bits"
+ },
+ {
+ "text": "bytes",
+ "value": "bytes"
+ },
+ {
+ "text": "kibibytes",
+ "value": "kbytes"
+ },
+ {
+ "text": "mebibytes",
+ "value": "mbytes"
+ },
+ {
+ "text": "gibibytes",
+ "value": "gbytes"
+ }
+ ],
+ "text": "data (IEC)"
+ },
+ {
+ "submenu": [
+ {
+ "text": "bits",
+ "value": "decbits"
+ },
+ {
+ "text": "bytes",
+ "value": "decbytes"
+ },
+ {
+ "text": "kilobytes",
+ "value": "deckbytes"
+ },
+ {
+ "text": "megabytes",
+ "value": "decmbytes"
+ },
+ {
+ "text": "gigabytes",
+ "value": "decgbytes"
+ }
+ ],
+ "text": "data (Metric)"
+ },
+ {
+ "submenu": [
+ {
+ "text": "packets/sec",
+ "value": "pps"
+ },
+ {
+ "text": "bits/sec",
+ "value": "bps"
+ },
+ {
+ "text": "bytes/sec",
+ "value": "Bps"
+ },
+ {
+ "text": "kilobits/sec",
+ "value": "Kbits"
+ },
+ {
+ "text": "kilobytes/sec",
+ "value": "KBs"
+ },
+ {
+ "text": "megabits/sec",
+ "value": "Mbits"
+ },
+ {
+ "text": "megabytes/sec",
+ "value": "MBs"
+ },
+ {
+ "text": "gigabytes/sec",
+ "value": "GBs"
+ },
+ {
+ "text": "gigabits/sec",
+ "value": "Gbits"
+ }
+ ],
+ "text": "data rate"
+ },
+ {
+ "submenu": [
+ {
+ "text": "hashes/sec",
+ "value": "Hs"
+ },
+ {
+ "text": "kilohashes/sec",
+ "value": "KHs"
+ },
+ {
+ "text": "megahashes/sec",
+ "value": "MHs"
+ },
+ {
+ "text": "gigahashes/sec",
+ "value": "GHs"
+ },
+ {
+ "text": "terahashes/sec",
+ "value": "THs"
+ },
+ {
+ "text": "petahashes/sec",
+ "value": "PHs"
+ },
+ {
+ "text": "exahashes/sec",
+ "value": "EHs"
+ }
+ ],
+ "text": "hash rate"
+ },
+ {
+ "submenu": [
+ {
+ "text": "ops/sec (ops)",
+ "value": "ops"
+ },
+ {
+ "text": "requests/sec (rps)",
+ "value": "reqps"
+ },
+ {
+ "text": "reads/sec (rps)",
+ "value": "rps"
+ },
+ {
+ "text": "writes/sec (wps)",
+ "value": "wps"
+ },
+ {
+ "text": "I/O ops/sec (iops)",
+ "value": "iops"
+ },
+ {
+ "text": "ops/min (opm)",
+ "value": "opm"
+ },
+ {
+ "text": "reads/min (rpm)",
+ "value": "rpm"
+ },
+ {
+ "text": "writes/min (wpm)",
+ "value": "wpm"
+ }
+ ],
+ "text": "throughput"
+ },
+ {
+ "submenu": [
+ {
+ "text": "millimetre (mm)",
+ "value": "lengthmm"
+ },
+ {
+ "text": "meter (m)",
+ "value": "lengthm"
+ },
+ {
+ "text": "feet (ft)",
+ "value": "lengthft"
+ },
+ {
+ "text": "kilometer (km)",
+ "value": "lengthkm"
+ },
+ {
+ "text": "mile (mi)",
+ "value": "lengthmi"
+ }
+ ],
+ "text": "length"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Square Meters (m²)",
+ "value": "areaM2"
+ },
+ {
+ "text": "Square Feet (ft²)",
+ "value": "areaF2"
+ },
+ {
+ "text": "Square Miles (mi²)",
+ "value": "areaMI2"
+ }
+ ],
+ "text": "area"
+ },
+ {
+ "submenu": [
+ {
+ "text": "milligram (mg)",
+ "value": "massmg"
+ },
+ {
+ "text": "gram (g)",
+ "value": "massg"
+ },
+ {
+ "text": "kilogram (kg)",
+ "value": "masskg"
+ },
+ {
+ "text": "metric ton (t)",
+ "value": "masst"
+ }
+ ],
+ "text": "mass"
+ },
+ {
+ "submenu": [
+ {
+ "text": "metres/second (m/s)",
+ "value": "velocityms"
+ },
+ {
+ "text": "kilometers/hour (km/h)",
+ "value": "velocitykmh"
+ },
+ {
+ "text": "miles/hour (mph)",
+ "value": "velocitymph"
+ },
+ {
+ "text": "knot (kn)",
+ "value": "velocityknot"
+ }
+ ],
+ "text": "velocity"
+ },
+ {
+ "submenu": [
+ {
+ "text": "millilitre (mL)",
+ "value": "mlitre"
+ },
+ {
+ "text": "litre (L)",
+ "value": "litre"
+ },
+ {
+ "text": "cubic metre",
+ "value": "m3"
+ },
+ {
+ "text": "Normal cubic metre",
+ "value": "Nm3"
+ },
+ {
+ "text": "cubic decimetre",
+ "value": "dm3"
+ },
+ {
+ "text": "gallons",
+ "value": "gallons"
+ }
+ ],
+ "text": "volume"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Watt (W)",
+ "value": "watt"
+ },
+ {
+ "text": "Kilowatt (kW)",
+ "value": "kwatt"
+ },
+ {
+ "text": "Milliwatt (mW)",
+ "value": "mwatt"
+ },
+ {
+ "text": "Watt per square metre (W/m²)",
+ "value": "Wm2"
+ },
+ {
+ "text": "Volt-ampere (VA)",
+ "value": "voltamp"
+ },
+ {
+ "text": "Kilovolt-ampere (kVA)",
+ "value": "kvoltamp"
+ },
+ {
+ "text": "Volt-ampere reactive (var)",
+ "value": "voltampreact"
+ },
+ {
+ "text": "Kilovolt-ampere reactive (kvar)",
+ "value": "kvoltampreact"
+ },
+ {
+ "text": "Watt-hour (Wh)",
+ "value": "watth"
+ },
+ {
+ "text": "Kilowatt-hour (kWh)",
+ "value": "kwatth"
+ },
+ {
+ "text": "Kilowatt-min (kWm)",
+ "value": "kwattm"
+ },
+ {
+ "text": "Joule (J)",
+ "value": "joule"
+ },
+ {
+ "text": "Electron volt (eV)",
+ "value": "ev"
+ },
+ {
+ "text": "Ampere (A)",
+ "value": "amp"
+ },
+ {
+ "text": "Kiloampere (kA)",
+ "value": "kamp"
+ },
+ {
+ "text": "Milliampere (mA)",
+ "value": "mamp"
+ },
+ {
+ "text": "Volt (V)",
+ "value": "volt"
+ },
+ {
+ "text": "Kilovolt (kV)",
+ "value": "kvolt"
+ },
+ {
+ "text": "Millivolt (mV)",
+ "value": "mvolt"
+ },
+ {
+ "text": "Decibel-milliwatt (dBm)",
+ "value": "dBm"
+ },
+ {
+ "text": "Ohm (Ω)",
+ "value": "ohm"
+ },
+ {
+ "text": "Lumens (Lm)",
+ "value": "lumens"
+ }
+ ],
+ "text": "energy"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Celsius (°C)",
+ "value": "celsius"
+ },
+ {
+ "text": "Farenheit (°F)",
+ "value": "farenheit"
+ },
+ {
+ "text": "Kelvin (K)",
+ "value": "kelvin"
+ }
+ ],
+ "text": "temperature"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Millibars",
+ "value": "pressurembar"
+ },
+ {
+ "text": "Bars",
+ "value": "pressurebar"
+ },
+ {
+ "text": "Kilobars",
+ "value": "pressurekbar"
+ },
+ {
+ "text": "Hectopascals",
+ "value": "pressurehpa"
+ },
+ {
+ "text": "Kilopascals",
+ "value": "pressurekpa"
+ },
+ {
+ "text": "Inches of mercury",
+ "value": "pressurehg"
+ },
+ {
+ "text": "PSI",
+ "value": "pressurepsi"
+ }
+ ],
+ "text": "pressure"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Newton-meters (Nm)",
+ "value": "forceNm"
+ },
+ {
+ "text": "Kilonewton-meters (kNm)",
+ "value": "forcekNm"
+ },
+ {
+ "text": "Newtons (N)",
+ "value": "forceN"
+ },
+ {
+ "text": "Kilonewtons (kN)",
+ "value": "forcekN"
+ }
+ ],
+ "text": "force"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Gallons/min (gpm)",
+ "value": "flowgpm"
+ },
+ {
+ "text": "Cubic meters/sec (cms)",
+ "value": "flowcms"
+ },
+ {
+ "text": "Cubic feet/sec (cfs)",
+ "value": "flowcfs"
+ },
+ {
+ "text": "Cubic feet/min (cfm)",
+ "value": "flowcfm"
+ },
+ {
+ "text": "Litre/hour",
+ "value": "litreh"
+ },
+ {
+ "text": "Litre/min (l/min)",
+ "value": "flowlpm"
+ },
+ {
+ "text": "milliLitre/min (mL/min)",
+ "value": "flowmlpm"
+ }
+ ],
+ "text": "flow"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Degrees (°)",
+ "value": "degree"
+ },
+ {
+ "text": "Radians",
+ "value": "radian"
+ },
+ {
+ "text": "Gradian",
+ "value": "grad"
+ }
+ ],
+ "text": "angle"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Meters/sec²",
+ "value": "accMS2"
+ },
+ {
+ "text": "Feet/sec²",
+ "value": "accFS2"
+ },
+ {
+ "text": "G unit",
+ "value": "accG"
+ }
+ ],
+ "text": "acceleration"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Becquerel (Bq)",
+ "value": "radbq"
+ },
+ {
+ "text": "curie (Ci)",
+ "value": "radci"
+ },
+ {
+ "text": "Gray (Gy)",
+ "value": "radgy"
+ },
+ {
+ "text": "rad",
+ "value": "radrad"
+ },
+ {
+ "text": "Sievert (Sv)",
+ "value": "radsv"
+ },
+ {
+ "text": "rem",
+ "value": "radrem"
+ },
+ {
+ "text": "Exposure (C/kg)",
+ "value": "radexpckg"
+ },
+ {
+ "text": "roentgen (R)",
+ "value": "radr"
+ },
+ {
+ "text": "Sievert/hour (Sv/h)",
+ "value": "radsvh"
+ }
+ ],
+ "text": "radiation"
+ },
+ {
+ "submenu": [
+ {
+ "text": "parts-per-million (ppm)",
+ "value": "ppm"
+ },
+ {
+ "text": "parts-per-billion (ppb)",
+ "value": "conppb"
+ },
+ {
+ "text": "nanogram per cubic metre (ng/m³)",
+ "value": "conngm3"
+ },
+ {
+ "text": "nanogram per normal cubic metre (ng/Nm³)",
+ "value": "conngNm3"
+ },
+ {
+ "text": "microgram per cubic metre (μg/m³)",
+ "value": "conμgm3"
+ },
+ {
+ "text": "microgram per normal cubic metre (μg/Nm³)",
+ "value": "conμgNm3"
+ },
+ {
+ "text": "milligram per cubic metre (mg/m³)",
+ "value": "conmgm3"
+ },
+ {
+ "text": "milligram per normal cubic metre (mg/Nm³)",
+ "value": "conmgNm3"
+ },
+ {
+ "text": "gram per cubic metre (g/m³)",
+ "value": "congm3"
+ },
+ {
+ "text": "gram per normal cubic metre (g/Nm³)",
+ "value": "congNm3"
+ }
+ ],
+ "text": "concentration"
+ }
+ ]
+ },
+ {
+ "animationModes": [
+ {
+ "text": "Show All",
+ "value": "all"
+ },
+ {
+ "text": "Show Triggered",
+ "value": "triggered"
+ }
+ ],
+ "colors": [
+ "#299c46",
+ "rgba(237, 129, 40, 0.89)",
+ "#d44a3a"
+ ],
+ "d3DivId": "d3_svg_5",
+ "datasource": "gdev-testdata",
+ "decimals": 2,
+ "displayModes": [
+ {
+ "text": "Show All",
+ "value": "all"
+ },
+ {
+ "text": "Show Triggered",
+ "value": "triggered"
+ }
+ ],
+ "fontSizes": [
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 22,
+ 24,
+ 26,
+ 28,
+ 30,
+ 32,
+ 34,
+ 36,
+ 38,
+ 40,
+ 42,
+ 44,
+ 46,
+ 48,
+ 50,
+ 52,
+ 54,
+ 56,
+ 58,
+ 60,
+ 62,
+ 64,
+ 66,
+ 68,
+ 70
+ ],
+ "fontTypes": [
+ "Open Sans",
+ "Arial",
+ "Avant Garde",
+ "Bookman",
+ "Consolas",
+ "Courier",
+ "Courier New",
+ "Futura",
+ "Garamond",
+ "Helvetica",
+ "Palatino",
+ "Times",
+ "Times New Roman",
+ "Verdana"
+ ],
+ "format": "none",
+ "gridPos": {
+ "h": 9,
+ "w": 12,
+ "x": 12,
+ "y": 0
+ },
+ "id": 5,
+ "links": [],
+ "notcolors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "operatorName": "avg",
+ "operatorOptions": [
+ {
+ "text": "Average",
+ "value": "avg"
+ },
+ {
+ "text": "Count",
+ "value": "count"
+ },
+ {
+ "text": "Current",
+ "value": "current"
+ },
+ {
+ "text": "Delta",
+ "value": "delta"
+ },
+ {
+ "text": "Difference",
+ "value": "diff"
+ },
+ {
+ "text": "First",
+ "value": "first"
+ },
+ {
+ "text": "Log Min",
+ "value": "logmin"
+ },
+ {
+ "text": "Max",
+ "value": "max"
+ },
+ {
+ "text": "Min",
+ "value": "min"
+ },
+ {
+ "text": "Name",
+ "value": "name"
+ },
+ {
+ "text": "Time of Last Point",
+ "value": "last_time"
+ },
+ {
+ "text": "Time Step",
+ "value": "time_step"
+ },
+ {
+ "text": "Total",
+ "value": "total"
+ }
+ ],
+ "polystat": {
+ "animationSpeed": 2500,
+ "columnAutoSize": true,
+ "columns": "",
+ "defaultClickThrough": "",
+ "defaultClickThroughSanitize": true,
+ "displayLimit": 100,
+ "fontAutoScale": true,
+ "fontSize": 12,
+ "globalDisplayMode": "all",
+ "globalOperatorName": "avg",
+ "gradientEnabled": true,
+ "hexagonSortByDirection": "asc",
+ "hexagonSortByField": "name",
+ "maxMetrics": 0,
+ "polygonBorderColor": "black",
+ "polygonBorderSize": 2,
+ "radius": "",
+ "radiusAutoSize": true,
+ "rowAutoSize": true,
+ "rows": "",
+ "shape": "hexagon_pointed_top",
+ "tooltipDisplayMode": "all",
+ "tooltipDisplayTextTriggeredEmpty": "OK",
+ "tooltipFontSize": 12,
+ "tooltipFontType": "Open Sans",
+ "tooltipPrimarySortDirection": "desc",
+ "tooltipPrimarySortField": "thresholdLevel",
+ "tooltipSecondarySortDirection": "desc",
+ "tooltipSecondarySortField": "value",
+ "tooltipTimestampEnabled": true
+ },
+ "savedComposites": [
+ {
+ "compositeName": "comp",
+ "members": [
+ {
+ "seriesName": "A-series"
+ },
+ {
+ "seriesName": "B-series"
+ }
+ ],
+ "enabled": true,
+ "clickThrough": "",
+ "hideMembers": true,
+ "showName": true,
+ "showValue": true,
+ "animateMode": "all",
+ "thresholdLevel": 0,
+ "sanitizeURLEnabled": true,
+ "sanitizedURL": ""
+ }
+ ],
+ "savedOverrides": [],
+ "shapes": [
+ {
+ "text": "Hexagon Pointed Top",
+ "value": "hexagon_pointed_top"
+ },
+ {
+ "text": "Hexagon Flat Top",
+ "value": "hexagon_flat_top"
+ },
+ {
+ "text": "Circle",
+ "value": "circle"
+ },
+ {
+ "text": "Cross",
+ "value": "cross"
+ },
+ {
+ "text": "Diamond",
+ "value": "diamond"
+ },
+ {
+ "text": "Square",
+ "value": "square"
+ },
+ {
+ "text": "Star",
+ "value": "star"
+ },
+ {
+ "text": "Triangle",
+ "value": "triangle"
+ },
+ {
+ "text": "Wye",
+ "value": "wye"
+ }
+ ],
+ "sortDirections": [
+ {
+ "text": "Ascending",
+ "value": "asc"
+ },
+ {
+ "text": "Descending",
+ "value": "desc"
+ }
+ ],
+ "sortFields": [
+ {
+ "text": "Name",
+ "value": "name"
+ },
+ {
+ "text": "Threshold Level",
+ "value": "thresholdLevel"
+ },
+ {
+ "text": "Value",
+ "value": "value"
+ }
+ ],
+ "svgContainer": {},
+ "targets": [
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "A",
+ "scenarioId": "random_walk"
+ },
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "B",
+ "scenarioId": "random_walk"
+ },
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "C",
+ "scenarioId": "random_walk"
+ },
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "D",
+ "scenarioId": "random_walk"
+ },
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "E",
+ "scenarioId": "random_walk"
+ }
+ ],
+ "thresholdStates": [
+ {
+ "text": "ok",
+ "value": 0
+ },
+ {
+ "text": "warning",
+ "value": 1
+ },
+ {
+ "text": "critical",
+ "value": 2
+ },
+ {
+ "text": "custom",
+ "value": 3
+ }
+ ],
+ "title": "Composite crash",
+ "type": "grafana-polystat-panel",
+ "unitFormats": [
+ {
+ "submenu": [
+ {
+ "text": "none",
+ "value": "none"
+ },
+ {
+ "text": "short",
+ "value": "short"
+ },
+ {
+ "text": "percent (0-100)",
+ "value": "percent"
+ },
+ {
+ "text": "percent (0.0-1.0)",
+ "value": "percentunit"
+ },
+ {
+ "text": "Humidity (%H)",
+ "value": "humidity"
+ },
+ {
+ "text": "decibel",
+ "value": "dB"
+ },
+ {
+ "text": "hexadecimal (0x)",
+ "value": "hex0x"
+ },
+ {
+ "text": "hexadecimal",
+ "value": "hex"
+ },
+ {
+ "text": "scientific notation",
+ "value": "sci"
+ },
+ {
+ "text": "locale format",
+ "value": "locale"
+ }
+ ],
+ "text": "none"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Dollars ($)",
+ "value": "currencyUSD"
+ },
+ {
+ "text": "Pounds (£)",
+ "value": "currencyGBP"
+ },
+ {
+ "text": "Euro (€)",
+ "value": "currencyEUR"
+ },
+ {
+ "text": "Yen (¥)",
+ "value": "currencyJPY"
+ },
+ {
+ "text": "Rubles (₽)",
+ "value": "currencyRUB"
+ },
+ {
+ "text": "Hryvnias (₴)",
+ "value": "currencyUAH"
+ },
+ {
+ "text": "Real (R$)",
+ "value": "currencyBRL"
+ },
+ {
+ "text": "Danish Krone (kr)",
+ "value": "currencyDKK"
+ },
+ {
+ "text": "Icelandic Króna (kr)",
+ "value": "currencyISK"
+ },
+ {
+ "text": "Norwegian Krone (kr)",
+ "value": "currencyNOK"
+ },
+ {
+ "text": "Swedish Krona (kr)",
+ "value": "currencySEK"
+ },
+ {
+ "text": "Czech koruna (czk)",
+ "value": "currencyCZK"
+ },
+ {
+ "text": "Swiss franc (CHF)",
+ "value": "currencyCHF"
+ },
+ {
+ "text": "Polish Złoty (PLN)",
+ "value": "currencyPLN"
+ },
+ {
+ "text": "Bitcoin (฿)",
+ "value": "currencyBTC"
+ }
+ ],
+ "text": "currency"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Hertz (1/s)",
+ "value": "hertz"
+ },
+ {
+ "text": "nanoseconds (ns)",
+ "value": "ns"
+ },
+ {
+ "text": "microseconds (µs)",
+ "value": "µs"
+ },
+ {
+ "text": "milliseconds (ms)",
+ "value": "ms"
+ },
+ {
+ "text": "seconds (s)",
+ "value": "s"
+ },
+ {
+ "text": "minutes (m)",
+ "value": "m"
+ },
+ {
+ "text": "hours (h)",
+ "value": "h"
+ },
+ {
+ "text": "days (d)",
+ "value": "d"
+ },
+ {
+ "text": "duration (ms)",
+ "value": "dtdurationms"
+ },
+ {
+ "text": "duration (s)",
+ "value": "dtdurations"
+ },
+ {
+ "text": "duration (hh:mm:ss)",
+ "value": "dthms"
+ },
+ {
+ "text": "Timeticks (s/100)",
+ "value": "timeticks"
+ }
+ ],
+ "text": "time"
+ },
+ {
+ "submenu": [
+ {
+ "text": "YYYY-MM-DD HH:mm:ss",
+ "value": "dateTimeAsIso"
+ },
+ {
+ "text": "DD/MM/YYYY h:mm:ss a",
+ "value": "dateTimeAsUS"
+ },
+ {
+ "text": "From Now",
+ "value": "dateTimeFromNow"
+ }
+ ],
+ "text": "date & time"
+ },
+ {
+ "submenu": [
+ {
+ "text": "bits",
+ "value": "bits"
+ },
+ {
+ "text": "bytes",
+ "value": "bytes"
+ },
+ {
+ "text": "kibibytes",
+ "value": "kbytes"
+ },
+ {
+ "text": "mebibytes",
+ "value": "mbytes"
+ },
+ {
+ "text": "gibibytes",
+ "value": "gbytes"
+ }
+ ],
+ "text": "data (IEC)"
+ },
+ {
+ "submenu": [
+ {
+ "text": "bits",
+ "value": "decbits"
+ },
+ {
+ "text": "bytes",
+ "value": "decbytes"
+ },
+ {
+ "text": "kilobytes",
+ "value": "deckbytes"
+ },
+ {
+ "text": "megabytes",
+ "value": "decmbytes"
+ },
+ {
+ "text": "gigabytes",
+ "value": "decgbytes"
+ }
+ ],
+ "text": "data (Metric)"
+ },
+ {
+ "submenu": [
+ {
+ "text": "packets/sec",
+ "value": "pps"
+ },
+ {
+ "text": "bits/sec",
+ "value": "bps"
+ },
+ {
+ "text": "bytes/sec",
+ "value": "Bps"
+ },
+ {
+ "text": "kilobits/sec",
+ "value": "Kbits"
+ },
+ {
+ "text": "kilobytes/sec",
+ "value": "KBs"
+ },
+ {
+ "text": "megabits/sec",
+ "value": "Mbits"
+ },
+ {
+ "text": "megabytes/sec",
+ "value": "MBs"
+ },
+ {
+ "text": "gigabytes/sec",
+ "value": "GBs"
+ },
+ {
+ "text": "gigabits/sec",
+ "value": "Gbits"
+ }
+ ],
+ "text": "data rate"
+ },
+ {
+ "submenu": [
+ {
+ "text": "hashes/sec",
+ "value": "Hs"
+ },
+ {
+ "text": "kilohashes/sec",
+ "value": "KHs"
+ },
+ {
+ "text": "megahashes/sec",
+ "value": "MHs"
+ },
+ {
+ "text": "gigahashes/sec",
+ "value": "GHs"
+ },
+ {
+ "text": "terahashes/sec",
+ "value": "THs"
+ },
+ {
+ "text": "petahashes/sec",
+ "value": "PHs"
+ },
+ {
+ "text": "exahashes/sec",
+ "value": "EHs"
+ }
+ ],
+ "text": "hash rate"
+ },
+ {
+ "submenu": [
+ {
+ "text": "ops/sec (ops)",
+ "value": "ops"
+ },
+ {
+ "text": "requests/sec (rps)",
+ "value": "reqps"
+ },
+ {
+ "text": "reads/sec (rps)",
+ "value": "rps"
+ },
+ {
+ "text": "writes/sec (wps)",
+ "value": "wps"
+ },
+ {
+ "text": "I/O ops/sec (iops)",
+ "value": "iops"
+ },
+ {
+ "text": "ops/min (opm)",
+ "value": "opm"
+ },
+ {
+ "text": "reads/min (rpm)",
+ "value": "rpm"
+ },
+ {
+ "text": "writes/min (wpm)",
+ "value": "wpm"
+ }
+ ],
+ "text": "throughput"
+ },
+ {
+ "submenu": [
+ {
+ "text": "millimetre (mm)",
+ "value": "lengthmm"
+ },
+ {
+ "text": "meter (m)",
+ "value": "lengthm"
+ },
+ {
+ "text": "feet (ft)",
+ "value": "lengthft"
+ },
+ {
+ "text": "kilometer (km)",
+ "value": "lengthkm"
+ },
+ {
+ "text": "mile (mi)",
+ "value": "lengthmi"
+ }
+ ],
+ "text": "length"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Square Meters (m²)",
+ "value": "areaM2"
+ },
+ {
+ "text": "Square Feet (ft²)",
+ "value": "areaF2"
+ },
+ {
+ "text": "Square Miles (mi²)",
+ "value": "areaMI2"
+ }
+ ],
+ "text": "area"
+ },
+ {
+ "submenu": [
+ {
+ "text": "milligram (mg)",
+ "value": "massmg"
+ },
+ {
+ "text": "gram (g)",
+ "value": "massg"
+ },
+ {
+ "text": "kilogram (kg)",
+ "value": "masskg"
+ },
+ {
+ "text": "metric ton (t)",
+ "value": "masst"
+ }
+ ],
+ "text": "mass"
+ },
+ {
+ "submenu": [
+ {
+ "text": "metres/second (m/s)",
+ "value": "velocityms"
+ },
+ {
+ "text": "kilometers/hour (km/h)",
+ "value": "velocitykmh"
+ },
+ {
+ "text": "miles/hour (mph)",
+ "value": "velocitymph"
+ },
+ {
+ "text": "knot (kn)",
+ "value": "velocityknot"
+ }
+ ],
+ "text": "velocity"
+ },
+ {
+ "submenu": [
+ {
+ "text": "millilitre (mL)",
+ "value": "mlitre"
+ },
+ {
+ "text": "litre (L)",
+ "value": "litre"
+ },
+ {
+ "text": "cubic metre",
+ "value": "m3"
+ },
+ {
+ "text": "Normal cubic metre",
+ "value": "Nm3"
+ },
+ {
+ "text": "cubic decimetre",
+ "value": "dm3"
+ },
+ {
+ "text": "gallons",
+ "value": "gallons"
+ }
+ ],
+ "text": "volume"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Watt (W)",
+ "value": "watt"
+ },
+ {
+ "text": "Kilowatt (kW)",
+ "value": "kwatt"
+ },
+ {
+ "text": "Milliwatt (mW)",
+ "value": "mwatt"
+ },
+ {
+ "text": "Watt per square metre (W/m²)",
+ "value": "Wm2"
+ },
+ {
+ "text": "Volt-ampere (VA)",
+ "value": "voltamp"
+ },
+ {
+ "text": "Kilovolt-ampere (kVA)",
+ "value": "kvoltamp"
+ },
+ {
+ "text": "Volt-ampere reactive (var)",
+ "value": "voltampreact"
+ },
+ {
+ "text": "Kilovolt-ampere reactive (kvar)",
+ "value": "kvoltampreact"
+ },
+ {
+ "text": "Watt-hour (Wh)",
+ "value": "watth"
+ },
+ {
+ "text": "Kilowatt-hour (kWh)",
+ "value": "kwatth"
+ },
+ {
+ "text": "Kilowatt-min (kWm)",
+ "value": "kwattm"
+ },
+ {
+ "text": "Joule (J)",
+ "value": "joule"
+ },
+ {
+ "text": "Electron volt (eV)",
+ "value": "ev"
+ },
+ {
+ "text": "Ampere (A)",
+ "value": "amp"
+ },
+ {
+ "text": "Kiloampere (kA)",
+ "value": "kamp"
+ },
+ {
+ "text": "Milliampere (mA)",
+ "value": "mamp"
+ },
+ {
+ "text": "Volt (V)",
+ "value": "volt"
+ },
+ {
+ "text": "Kilovolt (kV)",
+ "value": "kvolt"
+ },
+ {
+ "text": "Millivolt (mV)",
+ "value": "mvolt"
+ },
+ {
+ "text": "Decibel-milliwatt (dBm)",
+ "value": "dBm"
+ },
+ {
+ "text": "Ohm (Ω)",
+ "value": "ohm"
+ },
+ {
+ "text": "Lumens (Lm)",
+ "value": "lumens"
+ }
+ ],
+ "text": "energy"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Celsius (°C)",
+ "value": "celsius"
+ },
+ {
+ "text": "Farenheit (°F)",
+ "value": "farenheit"
+ },
+ {
+ "text": "Kelvin (K)",
+ "value": "kelvin"
+ }
+ ],
+ "text": "temperature"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Millibars",
+ "value": "pressurembar"
+ },
+ {
+ "text": "Bars",
+ "value": "pressurebar"
+ },
+ {
+ "text": "Kilobars",
+ "value": "pressurekbar"
+ },
+ {
+ "text": "Hectopascals",
+ "value": "pressurehpa"
+ },
+ {
+ "text": "Kilopascals",
+ "value": "pressurekpa"
+ },
+ {
+ "text": "Inches of mercury",
+ "value": "pressurehg"
+ },
+ {
+ "text": "PSI",
+ "value": "pressurepsi"
+ }
+ ],
+ "text": "pressure"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Newton-meters (Nm)",
+ "value": "forceNm"
+ },
+ {
+ "text": "Kilonewton-meters (kNm)",
+ "value": "forcekNm"
+ },
+ {
+ "text": "Newtons (N)",
+ "value": "forceN"
+ },
+ {
+ "text": "Kilonewtons (kN)",
+ "value": "forcekN"
+ }
+ ],
+ "text": "force"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Gallons/min (gpm)",
+ "value": "flowgpm"
+ },
+ {
+ "text": "Cubic meters/sec (cms)",
+ "value": "flowcms"
+ },
+ {
+ "text": "Cubic feet/sec (cfs)",
+ "value": "flowcfs"
+ },
+ {
+ "text": "Cubic feet/min (cfm)",
+ "value": "flowcfm"
+ },
+ {
+ "text": "Litre/hour",
+ "value": "litreh"
+ },
+ {
+ "text": "Litre/min (l/min)",
+ "value": "flowlpm"
+ },
+ {
+ "text": "milliLitre/min (mL/min)",
+ "value": "flowmlpm"
+ }
+ ],
+ "text": "flow"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Degrees (°)",
+ "value": "degree"
+ },
+ {
+ "text": "Radians",
+ "value": "radian"
+ },
+ {
+ "text": "Gradian",
+ "value": "grad"
+ }
+ ],
+ "text": "angle"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Meters/sec²",
+ "value": "accMS2"
+ },
+ {
+ "text": "Feet/sec²",
+ "value": "accFS2"
+ },
+ {
+ "text": "G unit",
+ "value": "accG"
+ }
+ ],
+ "text": "acceleration"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Becquerel (Bq)",
+ "value": "radbq"
+ },
+ {
+ "text": "curie (Ci)",
+ "value": "radci"
+ },
+ {
+ "text": "Gray (Gy)",
+ "value": "radgy"
+ },
+ {
+ "text": "rad",
+ "value": "radrad"
+ },
+ {
+ "text": "Sievert (Sv)",
+ "value": "radsv"
+ },
+ {
+ "text": "rem",
+ "value": "radrem"
+ },
+ {
+ "text": "Exposure (C/kg)",
+ "value": "radexpckg"
+ },
+ {
+ "text": "roentgen (R)",
+ "value": "radr"
+ },
+ {
+ "text": "Sievert/hour (Sv/h)",
+ "value": "radsvh"
+ }
+ ],
+ "text": "radiation"
+ },
+ {
+ "submenu": [
+ {
+ "text": "parts-per-million (ppm)",
+ "value": "ppm"
+ },
+ {
+ "text": "parts-per-billion (ppb)",
+ "value": "conppb"
+ },
+ {
+ "text": "nanogram per cubic metre (ng/m³)",
+ "value": "conngm3"
+ },
+ {
+ "text": "nanogram per normal cubic metre (ng/Nm³)",
+ "value": "conngNm3"
+ },
+ {
+ "text": "microgram per cubic metre (μg/m³)",
+ "value": "conμgm3"
+ },
+ {
+ "text": "microgram per normal cubic metre (μg/Nm³)",
+ "value": "conμgNm3"
+ },
+ {
+ "text": "milligram per cubic metre (mg/m³)",
+ "value": "conmgm3"
+ },
+ {
+ "text": "milligram per normal cubic metre (mg/Nm³)",
+ "value": "conmgNm3"
+ },
+ {
+ "text": "gram per cubic metre (g/m³)",
+ "value": "congm3"
+ },
+ {
+ "text": "gram per normal cubic metre (g/Nm³)",
+ "value": "congNm3"
+ }
+ ],
+ "text": "concentration"
+ }
+ ]
+ },
+ {
+ "animationModes": [
+ {
+ "text": "Show All",
+ "value": "all"
+ },
+ {
+ "text": "Show Triggered",
+ "value": "triggered"
+ }
+ ],
+ "colors": [
+ "#299c46",
+ "rgba(237, 129, 40, 0.89)",
+ "#d44a3a"
+ ],
+ "d3DivId": "d3_svg_2",
+ "datasource": "gdev-testdata",
+ "decimals": 2,
+ "displayModes": [
+ {
+ "text": "Show All",
+ "value": "all"
+ },
+ {
+ "text": "Show Triggered",
+ "value": "triggered"
+ }
+ ],
+ "fontSizes": [
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ 22,
+ 24,
+ 26,
+ 28,
+ 30,
+ 32,
+ 34,
+ 36,
+ 38,
+ 40,
+ 42,
+ 44,
+ 46,
+ 48,
+ 50,
+ 52,
+ 54,
+ 56,
+ 58,
+ 60,
+ 62,
+ 64,
+ 66,
+ 68,
+ 70
+ ],
+ "fontTypes": [
+ "Open Sans",
+ "Arial",
+ "Avant Garde",
+ "Bookman",
+ "Consolas",
+ "Courier",
+ "Courier New",
+ "Futura",
+ "Garamond",
+ "Helvetica",
+ "Palatino",
+ "Times",
+ "Times New Roman",
+ "Verdana"
+ ],
+ "format": "none",
+ "gridPos": {
+ "h": 10,
+ "w": 12,
+ "x": 0,
+ "y": 9
+ },
+ "id": 2,
+ "links": [],
+ "notcolors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "operatorName": "avg",
+ "operatorOptions": [
+ {
+ "text": "Average",
+ "value": "avg"
+ },
+ {
+ "text": "Count",
+ "value": "count"
+ },
+ {
+ "text": "Current",
+ "value": "current"
+ },
+ {
+ "text": "Delta",
+ "value": "delta"
+ },
+ {
+ "text": "Difference",
+ "value": "diff"
+ },
+ {
+ "text": "First",
+ "value": "first"
+ },
+ {
+ "text": "Log Min",
+ "value": "logmin"
+ },
+ {
+ "text": "Max",
+ "value": "max"
+ },
+ {
+ "text": "Min",
+ "value": "min"
+ },
+ {
+ "text": "Name",
+ "value": "name"
+ },
+ {
+ "text": "Time of Last Point",
+ "value": "last_time"
+ },
+ {
+ "text": "Time Step",
+ "value": "time_step"
+ },
+ {
+ "text": "Total",
+ "value": "total"
+ }
+ ],
+ "polystat": {
+ "animationSpeed": 2500,
+ "columnAutoSize": true,
+ "columns": 1,
+ "defaultClickThrough": "",
+ "defaultClickThroughSanitize": true,
+ "displayLimit": 100,
+ "fontAutoScale": true,
+ "fontSize": 12,
+ "globalDisplayMode": "all",
+ "globalOperatorName": "avg",
+ "gradientEnabled": true,
+ "hexagonSortByDirection": "asc",
+ "hexagonSortByField": "name",
+ "maxMetrics": 0,
+ "polygonBorderColor": "black",
+ "polygonBorderSize": 2,
+ "radius": "",
+ "radiusAutoSize": true,
+ "rowAutoSize": true,
+ "rows": 1,
+ "shape": "hexagon_pointed_top",
+ "tooltipDisplayMode": "all",
+ "tooltipDisplayTextTriggeredEmpty": "OK",
+ "tooltipFontSize": 12,
+ "tooltipFontType": "Open Sans",
+ "tooltipPrimarySortDirection": "desc",
+ "tooltipPrimarySortField": "thresholdLevel",
+ "tooltipSecondarySortDirection": "desc",
+ "tooltipSecondarySortField": "value",
+ "tooltipTimestampEnabled": true
+ },
+ "savedComposites": [],
+ "savedOverrides": [],
+ "shapes": [
+ {
+ "text": "Hexagon Pointed Top",
+ "value": "hexagon_pointed_top"
+ },
+ {
+ "text": "Hexagon Flat Top",
+ "value": "hexagon_flat_top"
+ },
+ {
+ "text": "Circle",
+ "value": "circle"
+ },
+ {
+ "text": "Cross",
+ "value": "cross"
+ },
+ {
+ "text": "Diamond",
+ "value": "diamond"
+ },
+ {
+ "text": "Square",
+ "value": "square"
+ },
+ {
+ "text": "Star",
+ "value": "star"
+ },
+ {
+ "text": "Triangle",
+ "value": "triangle"
+ },
+ {
+ "text": "Wye",
+ "value": "wye"
+ }
+ ],
+ "sortDirections": [
+ {
+ "text": "Ascending",
+ "value": "asc"
+ },
+ {
+ "text": "Descending",
+ "value": "desc"
+ }
+ ],
+ "sortFields": [
+ {
+ "text": "Name",
+ "value": "name"
+ },
+ {
+ "text": "Threshold Level",
+ "value": "thresholdLevel"
+ },
+ {
+ "text": "Value",
+ "value": "value"
+ }
+ ],
+ "svgContainer": {},
+ "targets": [
+ {
+ "alias": "Sensor-A",
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "A",
+ "scenarioId": "csv_metric_values",
+ "stringInput": "1,20,90,30,5,0"
+ },
+ {
+ "alias": "Sensor-B",
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "B",
+ "scenarioId": "csv_metric_values",
+ "stringInput": "3433,23432,55"
+ },
+ {
+ "alias": "Sensor-C",
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "C",
+ "scenarioId": "csv_metric_values",
+ "stringInput": "1,2,3,4,5,6"
+ },
+ {
+ "alias": "Sensor-E",
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "D",
+ "scenarioId": "csv_metric_values",
+ "stringInput": "1,20,90,30,5,0"
+ }
+ ],
+ "thresholdStates": [
+ {
+ "text": "ok",
+ "value": 0
+ },
+ {
+ "text": "warning",
+ "value": 1
+ },
+ {
+ "text": "critical",
+ "value": 2
+ },
+ {
+ "text": "custom",
+ "value": 3
+ }
+ ],
+ "title": "No Value in Sensor-C Bug",
+ "type": "grafana-polystat-panel",
+ "unitFormats": [
+ {
+ "submenu": [
+ {
+ "text": "none",
+ "value": "none"
+ },
+ {
+ "text": "short",
+ "value": "short"
+ },
+ {
+ "text": "percent (0-100)",
+ "value": "percent"
+ },
+ {
+ "text": "percent (0.0-1.0)",
+ "value": "percentunit"
+ },
+ {
+ "text": "Humidity (%H)",
+ "value": "humidity"
+ },
+ {
+ "text": "decibel",
+ "value": "dB"
+ },
+ {
+ "text": "hexadecimal (0x)",
+ "value": "hex0x"
+ },
+ {
+ "text": "hexadecimal",
+ "value": "hex"
+ },
+ {
+ "text": "scientific notation",
+ "value": "sci"
+ },
+ {
+ "text": "locale format",
+ "value": "locale"
+ }
+ ],
+ "text": "none"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Dollars ($)",
+ "value": "currencyUSD"
+ },
+ {
+ "text": "Pounds (£)",
+ "value": "currencyGBP"
+ },
+ {
+ "text": "Euro (€)",
+ "value": "currencyEUR"
+ },
+ {
+ "text": "Yen (¥)",
+ "value": "currencyJPY"
+ },
+ {
+ "text": "Rubles (₽)",
+ "value": "currencyRUB"
+ },
+ {
+ "text": "Hryvnias (₴)",
+ "value": "currencyUAH"
+ },
+ {
+ "text": "Real (R$)",
+ "value": "currencyBRL"
+ },
+ {
+ "text": "Danish Krone (kr)",
+ "value": "currencyDKK"
+ },
+ {
+ "text": "Icelandic Króna (kr)",
+ "value": "currencyISK"
+ },
+ {
+ "text": "Norwegian Krone (kr)",
+ "value": "currencyNOK"
+ },
+ {
+ "text": "Swedish Krona (kr)",
+ "value": "currencySEK"
+ },
+ {
+ "text": "Czech koruna (czk)",
+ "value": "currencyCZK"
+ },
+ {
+ "text": "Swiss franc (CHF)",
+ "value": "currencyCHF"
+ },
+ {
+ "text": "Polish Złoty (PLN)",
+ "value": "currencyPLN"
+ },
+ {
+ "text": "Bitcoin (฿)",
+ "value": "currencyBTC"
+ }
+ ],
+ "text": "currency"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Hertz (1/s)",
+ "value": "hertz"
+ },
+ {
+ "text": "nanoseconds (ns)",
+ "value": "ns"
+ },
+ {
+ "text": "microseconds (µs)",
+ "value": "µs"
+ },
+ {
+ "text": "milliseconds (ms)",
+ "value": "ms"
+ },
+ {
+ "text": "seconds (s)",
+ "value": "s"
+ },
+ {
+ "text": "minutes (m)",
+ "value": "m"
+ },
+ {
+ "text": "hours (h)",
+ "value": "h"
+ },
+ {
+ "text": "days (d)",
+ "value": "d"
+ },
+ {
+ "text": "duration (ms)",
+ "value": "dtdurationms"
+ },
+ {
+ "text": "duration (s)",
+ "value": "dtdurations"
+ },
+ {
+ "text": "duration (hh:mm:ss)",
+ "value": "dthms"
+ },
+ {
+ "text": "Timeticks (s/100)",
+ "value": "timeticks"
+ }
+ ],
+ "text": "time"
+ },
+ {
+ "submenu": [
+ {
+ "text": "YYYY-MM-DD HH:mm:ss",
+ "value": "dateTimeAsIso"
+ },
+ {
+ "text": "DD/MM/YYYY h:mm:ss a",
+ "value": "dateTimeAsUS"
+ },
+ {
+ "text": "From Now",
+ "value": "dateTimeFromNow"
+ }
+ ],
+ "text": "date & time"
+ },
+ {
+ "submenu": [
+ {
+ "text": "bits",
+ "value": "bits"
+ },
+ {
+ "text": "bytes",
+ "value": "bytes"
+ },
+ {
+ "text": "kibibytes",
+ "value": "kbytes"
+ },
+ {
+ "text": "mebibytes",
+ "value": "mbytes"
+ },
+ {
+ "text": "gibibytes",
+ "value": "gbytes"
+ }
+ ],
+ "text": "data (IEC)"
+ },
+ {
+ "submenu": [
+ {
+ "text": "bits",
+ "value": "decbits"
+ },
+ {
+ "text": "bytes",
+ "value": "decbytes"
+ },
+ {
+ "text": "kilobytes",
+ "value": "deckbytes"
+ },
+ {
+ "text": "megabytes",
+ "value": "decmbytes"
+ },
+ {
+ "text": "gigabytes",
+ "value": "decgbytes"
+ }
+ ],
+ "text": "data (Metric)"
+ },
+ {
+ "submenu": [
+ {
+ "text": "packets/sec",
+ "value": "pps"
+ },
+ {
+ "text": "bits/sec",
+ "value": "bps"
+ },
+ {
+ "text": "bytes/sec",
+ "value": "Bps"
+ },
+ {
+ "text": "kilobits/sec",
+ "value": "Kbits"
+ },
+ {
+ "text": "kilobytes/sec",
+ "value": "KBs"
+ },
+ {
+ "text": "megabits/sec",
+ "value": "Mbits"
+ },
+ {
+ "text": "megabytes/sec",
+ "value": "MBs"
+ },
+ {
+ "text": "gigabytes/sec",
+ "value": "GBs"
+ },
+ {
+ "text": "gigabits/sec",
+ "value": "Gbits"
+ }
+ ],
+ "text": "data rate"
+ },
+ {
+ "submenu": [
+ {
+ "text": "hashes/sec",
+ "value": "Hs"
+ },
+ {
+ "text": "kilohashes/sec",
+ "value": "KHs"
+ },
+ {
+ "text": "megahashes/sec",
+ "value": "MHs"
+ },
+ {
+ "text": "gigahashes/sec",
+ "value": "GHs"
+ },
+ {
+ "text": "terahashes/sec",
+ "value": "THs"
+ },
+ {
+ "text": "petahashes/sec",
+ "value": "PHs"
+ },
+ {
+ "text": "exahashes/sec",
+ "value": "EHs"
+ }
+ ],
+ "text": "hash rate"
+ },
+ {
+ "submenu": [
+ {
+ "text": "ops/sec (ops)",
+ "value": "ops"
+ },
+ {
+ "text": "requests/sec (rps)",
+ "value": "reqps"
+ },
+ {
+ "text": "reads/sec (rps)",
+ "value": "rps"
+ },
+ {
+ "text": "writes/sec (wps)",
+ "value": "wps"
+ },
+ {
+ "text": "I/O ops/sec (iops)",
+ "value": "iops"
+ },
+ {
+ "text": "ops/min (opm)",
+ "value": "opm"
+ },
+ {
+ "text": "reads/min (rpm)",
+ "value": "rpm"
+ },
+ {
+ "text": "writes/min (wpm)",
+ "value": "wpm"
+ }
+ ],
+ "text": "throughput"
+ },
+ {
+ "submenu": [
+ {
+ "text": "millimetre (mm)",
+ "value": "lengthmm"
+ },
+ {
+ "text": "meter (m)",
+ "value": "lengthm"
+ },
+ {
+ "text": "feet (ft)",
+ "value": "lengthft"
+ },
+ {
+ "text": "kilometer (km)",
+ "value": "lengthkm"
+ },
+ {
+ "text": "mile (mi)",
+ "value": "lengthmi"
+ }
+ ],
+ "text": "length"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Square Meters (m²)",
+ "value": "areaM2"
+ },
+ {
+ "text": "Square Feet (ft²)",
+ "value": "areaF2"
+ },
+ {
+ "text": "Square Miles (mi²)",
+ "value": "areaMI2"
+ }
+ ],
+ "text": "area"
+ },
+ {
+ "submenu": [
+ {
+ "text": "milligram (mg)",
+ "value": "massmg"
+ },
+ {
+ "text": "gram (g)",
+ "value": "massg"
+ },
+ {
+ "text": "kilogram (kg)",
+ "value": "masskg"
+ },
+ {
+ "text": "metric ton (t)",
+ "value": "masst"
+ }
+ ],
+ "text": "mass"
+ },
+ {
+ "submenu": [
+ {
+ "text": "metres/second (m/s)",
+ "value": "velocityms"
+ },
+ {
+ "text": "kilometers/hour (km/h)",
+ "value": "velocitykmh"
+ },
+ {
+ "text": "miles/hour (mph)",
+ "value": "velocitymph"
+ },
+ {
+ "text": "knot (kn)",
+ "value": "velocityknot"
+ }
+ ],
+ "text": "velocity"
+ },
+ {
+ "submenu": [
+ {
+ "text": "millilitre (mL)",
+ "value": "mlitre"
+ },
+ {
+ "text": "litre (L)",
+ "value": "litre"
+ },
+ {
+ "text": "cubic metre",
+ "value": "m3"
+ },
+ {
+ "text": "Normal cubic metre",
+ "value": "Nm3"
+ },
+ {
+ "text": "cubic decimetre",
+ "value": "dm3"
+ },
+ {
+ "text": "gallons",
+ "value": "gallons"
+ }
+ ],
+ "text": "volume"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Watt (W)",
+ "value": "watt"
+ },
+ {
+ "text": "Kilowatt (kW)",
+ "value": "kwatt"
+ },
+ {
+ "text": "Milliwatt (mW)",
+ "value": "mwatt"
+ },
+ {
+ "text": "Watt per square metre (W/m²)",
+ "value": "Wm2"
+ },
+ {
+ "text": "Volt-ampere (VA)",
+ "value": "voltamp"
+ },
+ {
+ "text": "Kilovolt-ampere (kVA)",
+ "value": "kvoltamp"
+ },
+ {
+ "text": "Volt-ampere reactive (var)",
+ "value": "voltampreact"
+ },
+ {
+ "text": "Kilovolt-ampere reactive (kvar)",
+ "value": "kvoltampreact"
+ },
+ {
+ "text": "Watt-hour (Wh)",
+ "value": "watth"
+ },
+ {
+ "text": "Kilowatt-hour (kWh)",
+ "value": "kwatth"
+ },
+ {
+ "text": "Kilowatt-min (kWm)",
+ "value": "kwattm"
+ },
+ {
+ "text": "Joule (J)",
+ "value": "joule"
+ },
+ {
+ "text": "Electron volt (eV)",
+ "value": "ev"
+ },
+ {
+ "text": "Ampere (A)",
+ "value": "amp"
+ },
+ {
+ "text": "Kiloampere (kA)",
+ "value": "kamp"
+ },
+ {
+ "text": "Milliampere (mA)",
+ "value": "mamp"
+ },
+ {
+ "text": "Volt (V)",
+ "value": "volt"
+ },
+ {
+ "text": "Kilovolt (kV)",
+ "value": "kvolt"
+ },
+ {
+ "text": "Millivolt (mV)",
+ "value": "mvolt"
+ },
+ {
+ "text": "Decibel-milliwatt (dBm)",
+ "value": "dBm"
+ },
+ {
+ "text": "Ohm (Ω)",
+ "value": "ohm"
+ },
+ {
+ "text": "Lumens (Lm)",
+ "value": "lumens"
+ }
+ ],
+ "text": "energy"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Celsius (°C)",
+ "value": "celsius"
+ },
+ {
+ "text": "Farenheit (°F)",
+ "value": "farenheit"
+ },
+ {
+ "text": "Kelvin (K)",
+ "value": "kelvin"
+ }
+ ],
+ "text": "temperature"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Millibars",
+ "value": "pressurembar"
+ },
+ {
+ "text": "Bars",
+ "value": "pressurebar"
+ },
+ {
+ "text": "Kilobars",
+ "value": "pressurekbar"
+ },
+ {
+ "text": "Hectopascals",
+ "value": "pressurehpa"
+ },
+ {
+ "text": "Kilopascals",
+ "value": "pressurekpa"
+ },
+ {
+ "text": "Inches of mercury",
+ "value": "pressurehg"
+ },
+ {
+ "text": "PSI",
+ "value": "pressurepsi"
+ }
+ ],
+ "text": "pressure"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Newton-meters (Nm)",
+ "value": "forceNm"
+ },
+ {
+ "text": "Kilonewton-meters (kNm)",
+ "value": "forcekNm"
+ },
+ {
+ "text": "Newtons (N)",
+ "value": "forceN"
+ },
+ {
+ "text": "Kilonewtons (kN)",
+ "value": "forcekN"
+ }
+ ],
+ "text": "force"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Gallons/min (gpm)",
+ "value": "flowgpm"
+ },
+ {
+ "text": "Cubic meters/sec (cms)",
+ "value": "flowcms"
+ },
+ {
+ "text": "Cubic feet/sec (cfs)",
+ "value": "flowcfs"
+ },
+ {
+ "text": "Cubic feet/min (cfm)",
+ "value": "flowcfm"
+ },
+ {
+ "text": "Litre/hour",
+ "value": "litreh"
+ },
+ {
+ "text": "Litre/min (l/min)",
+ "value": "flowlpm"
+ },
+ {
+ "text": "milliLitre/min (mL/min)",
+ "value": "flowmlpm"
+ }
+ ],
+ "text": "flow"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Degrees (°)",
+ "value": "degree"
+ },
+ {
+ "text": "Radians",
+ "value": "radian"
+ },
+ {
+ "text": "Gradian",
+ "value": "grad"
+ }
+ ],
+ "text": "angle"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Meters/sec²",
+ "value": "accMS2"
+ },
+ {
+ "text": "Feet/sec²",
+ "value": "accFS2"
+ },
+ {
+ "text": "G unit",
+ "value": "accG"
+ }
+ ],
+ "text": "acceleration"
+ },
+ {
+ "submenu": [
+ {
+ "text": "Becquerel (Bq)",
+ "value": "radbq"
+ },
+ {
+ "text": "curie (Ci)",
+ "value": "radci"
+ },
+ {
+ "text": "Gray (Gy)",
+ "value": "radgy"
+ },
+ {
+ "text": "rad",
+ "value": "radrad"
+ },
+ {
+ "text": "Sievert (Sv)",
+ "value": "radsv"
+ },
+ {
+ "text": "rem",
+ "value": "radrem"
+ },
+ {
+ "text": "Exposure (C/kg)",
+ "value": "radexpckg"
+ },
+ {
+ "text": "roentgen (R)",
+ "value": "radr"
+ },
+ {
+ "text": "Sievert/hour (Sv/h)",
+ "value": "radsvh"
+ }
+ ],
+ "text": "radiation"
+ },
+ {
+ "submenu": [
+ {
+ "text": "parts-per-million (ppm)",
+ "value": "ppm"
+ },
+ {
+ "text": "parts-per-billion (ppb)",
+ "value": "conppb"
+ },
+ {
+ "text": "nanogram per cubic metre (ng/m³)",
+ "value": "conngm3"
+ },
+ {
+ "text": "nanogram per normal cubic metre (ng/Nm³)",
+ "value": "conngNm3"
+ },
+ {
+ "text": "microgram per cubic metre (μg/m³)",
+ "value": "conμgm3"
+ },
+ {
+ "text": "microgram per normal cubic metre (μg/Nm³)",
+ "value": "conμgNm3"
+ },
+ {
+ "text": "milligram per cubic metre (mg/m³)",
+ "value": "conmgm3"
+ },
+ {
+ "text": "milligram per normal cubic metre (mg/Nm³)",
+ "value": "conmgNm3"
+ },
+ {
+ "text": "gram per cubic metre (g/m³)",
+ "value": "congm3"
+ },
+ {
+ "text": "gram per normal cubic metre (g/Nm³)",
+ "value": "congNm3"
+ }
+ ],
+ "text": "concentration"
+ }
+ ]
+ }
+ ],
+ "schemaVersion": 16,
+ "style": "dark",
+ "tags": [
+ "panel-test",
+ "gdev"
+ ],
+ "templating": {
+ "list": []
+ },
+ "time": {
+ "from": "now-6h",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": [
+ "5s",
+ "10s",
+ "30s",
+ "1m",
+ "5m",
+ "15m",
+ "30m",
+ "1h",
+ "2h",
+ "1d"
+ ],
+ "time_options": [
+ "5m",
+ "15m",
+ "1h",
+ "6h",
+ "12h",
+ "24h",
+ "2d",
+ "7d",
+ "30d"
+ ]
+ },
+ "timezone": "",
+ "title": "Panel Tests - Polystat",
+ "uid": "Kp9Z0hTik",
+ "version": 5
+}
diff --git a/devenv/dev-dashboards/panel_tests_slow_queries_and_annotations.json b/devenv/dev-dashboards/panel_tests_slow_queries_and_annotations.json
new file mode 100644
index 00000000000..08bf6dce9d0
--- /dev/null
+++ b/devenv/dev-dashboards/panel_tests_slow_queries_and_annotations.json
@@ -0,0 +1,1166 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations & Alerts",
+ "type": "dashboard"
+ },
+ {
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": false,
+ "iconColor": "rgba(255, 96, 96, 1)",
+ "limit": 100,
+ "matchAny": false,
+ "name": "annotations",
+ "showIn": 0,
+ "tags": [
+ "asd"
+ ],
+ "type": "tags"
+ }
+ ]
+ },
+ "editable": true,
+ "gnetId": null,
+ "graphTooltip": 0,
+ "links": [],
+ "panels": [
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "gdev-testdata",
+ "fill": 1,
+ "gridPos": {
+ "h": 7,
+ "w": 13,
+ "x": 0,
+ "y": 0
+ },
+ "id": 6,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "A",
+ "scenarioId": "slow_query",
+ "stringInput": "30s"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Panel Title",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "gdev-testdata",
+ "fill": 1,
+ "gridPos": {
+ "h": 7,
+ "w": 11,
+ "x": 13,
+ "y": 0
+ },
+ "id": 7,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "A",
+ "scenarioId": "slow_query",
+ "stringInput": "30s"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Panel Title",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "gdev-testdata",
+ "fill": 1,
+ "gridPos": {
+ "h": 7,
+ "w": 8,
+ "x": 0,
+ "y": 7
+ },
+ "id": 8,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "A",
+ "scenarioId": "slow_query",
+ "stringInput": "30s"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Panel Title",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "gdev-testdata",
+ "fill": 1,
+ "gridPos": {
+ "h": 7,
+ "w": 8,
+ "x": 8,
+ "y": 7
+ },
+ "id": 18,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "A",
+ "scenarioId": "slow_query",
+ "stringInput": "30s"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Panel Title",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "gdev-testdata",
+ "fill": 1,
+ "gridPos": {
+ "h": 7,
+ "w": 8,
+ "x": 16,
+ "y": 7
+ },
+ "id": 17,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "A",
+ "scenarioId": "slow_query",
+ "stringInput": "30s"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Panel Title",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "gdev-testdata",
+ "fill": 1,
+ "gridPos": {
+ "h": 5,
+ "w": 8,
+ "x": 0,
+ "y": 14
+ },
+ "id": 10,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "A",
+ "scenarioId": "slow_query",
+ "stringInput": "5s"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Panel Title",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "gdev-testdata",
+ "fill": 1,
+ "gridPos": {
+ "h": 5,
+ "w": 8,
+ "x": 8,
+ "y": 14
+ },
+ "id": 9,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "A",
+ "scenarioId": "slow_query",
+ "stringInput": "5s"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Panel Title",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "gdev-testdata",
+ "fill": 1,
+ "gridPos": {
+ "h": 5,
+ "w": 8,
+ "x": 16,
+ "y": 14
+ },
+ "id": 11,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "A",
+ "scenarioId": "slow_query",
+ "stringInput": "5s"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Panel Title",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "gdev-testdata",
+ "fill": 1,
+ "gridPos": {
+ "h": 5,
+ "w": 8,
+ "x": 0,
+ "y": 19
+ },
+ "id": 14,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "A",
+ "scenarioId": "slow_query",
+ "stringInput": "5s"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Panel Title",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "gdev-testdata",
+ "fill": 1,
+ "gridPos": {
+ "h": 5,
+ "w": 8,
+ "x": 8,
+ "y": 19
+ },
+ "id": 15,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "A",
+ "scenarioId": "slow_query",
+ "stringInput": "5s"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Panel Title",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "gdev-testdata",
+ "fill": 1,
+ "gridPos": {
+ "h": 5,
+ "w": 8,
+ "x": 16,
+ "y": 19
+ },
+ "id": 12,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "A",
+ "scenarioId": "slow_query",
+ "stringInput": "5s"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Panel Title",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "gdev-testdata",
+ "fill": 1,
+ "gridPos": {
+ "h": 6,
+ "w": 16,
+ "x": 0,
+ "y": 24
+ },
+ "id": 13,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "A",
+ "scenarioId": "slow_query",
+ "stringInput": "5s"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Panel Title",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "gdev-testdata",
+ "fill": 1,
+ "gridPos": {
+ "h": 6,
+ "w": 8,
+ "x": 16,
+ "y": 24
+ },
+ "id": 16,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "A",
+ "scenarioId": "slow_query",
+ "stringInput": "5s"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Panel Title",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ }
+ ],
+ "schemaVersion": 16,
+ "style": "dark",
+ "tags": [],
+ "templating": {
+ "list": []
+ },
+ "time": {
+ "from": "now-6h",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": [
+ "5s",
+ "10s",
+ "30s",
+ "1m",
+ "5m",
+ "15m",
+ "30m",
+ "1h",
+ "2h",
+ "1d"
+ ],
+ "time_options": [
+ "5m",
+ "15m",
+ "1h",
+ "6h",
+ "12h",
+ "24h",
+ "2d",
+ "7d",
+ "30d"
+ ]
+ },
+ "timezone": "",
+ "title": "Panel tests - Slow Queries & Annotations",
+ "uid": "xtY_uCAiz",
+ "version": 11
+}
diff --git a/docker/blocks/apache_proxy/Dockerfile b/devenv/docker/blocks/apache_proxy/Dockerfile
similarity index 100%
rename from docker/blocks/apache_proxy/Dockerfile
rename to devenv/docker/blocks/apache_proxy/Dockerfile
diff --git a/docker/blocks/apache_proxy/docker-compose.yaml b/devenv/docker/blocks/apache_proxy/docker-compose.yaml
similarity index 88%
rename from docker/blocks/apache_proxy/docker-compose.yaml
rename to devenv/docker/blocks/apache_proxy/docker-compose.yaml
index 86d4befadd6..3791213f05a 100644
--- a/docker/blocks/apache_proxy/docker-compose.yaml
+++ b/devenv/docker/blocks/apache_proxy/docker-compose.yaml
@@ -5,5 +5,5 @@
# root_url = %(protocol)s://%(domain)s:10081/grafana/
apacheproxy:
- build: blocks/apache_proxy
+ build: docker/blocks/apache_proxy
network_mode: host
diff --git a/docker/blocks/apache_proxy/ports.conf b/devenv/docker/blocks/apache_proxy/ports.conf
similarity index 100%
rename from docker/blocks/apache_proxy/ports.conf
rename to devenv/docker/blocks/apache_proxy/ports.conf
diff --git a/docker/blocks/apache_proxy/proxy.conf b/devenv/docker/blocks/apache_proxy/proxy.conf
similarity index 100%
rename from docker/blocks/apache_proxy/proxy.conf
rename to devenv/docker/blocks/apache_proxy/proxy.conf
diff --git a/docker/blocks/collectd/Dockerfile b/devenv/docker/blocks/collectd/Dockerfile
similarity index 100%
rename from docker/blocks/collectd/Dockerfile
rename to devenv/docker/blocks/collectd/Dockerfile
diff --git a/docker/blocks/collectd/README.md b/devenv/docker/blocks/collectd/README.md
similarity index 100%
rename from docker/blocks/collectd/README.md
rename to devenv/docker/blocks/collectd/README.md
diff --git a/docker/blocks/collectd/collectd.conf.tpl b/devenv/docker/blocks/collectd/collectd.conf.tpl
similarity index 100%
rename from docker/blocks/collectd/collectd.conf.tpl
rename to devenv/docker/blocks/collectd/collectd.conf.tpl
diff --git a/docker/blocks/collectd/docker-compose.yaml b/devenv/docker/blocks/collectd/docker-compose.yaml
similarity index 87%
rename from docker/blocks/collectd/docker-compose.yaml
rename to devenv/docker/blocks/collectd/docker-compose.yaml
index c95827f7928..c5e189b58d8 100644
--- a/docker/blocks/collectd/docker-compose.yaml
+++ b/devenv/docker/blocks/collectd/docker-compose.yaml
@@ -1,5 +1,5 @@
collectd:
- build: blocks/collectd
+ build: docker/blocks/collectd
environment:
HOST_NAME: myserver
GRAPHITE_HOST: graphite
diff --git a/docker/blocks/collectd/etc_mtab b/devenv/docker/blocks/collectd/etc_mtab
similarity index 100%
rename from docker/blocks/collectd/etc_mtab
rename to devenv/docker/blocks/collectd/etc_mtab
diff --git a/docker/blocks/collectd/start_container b/devenv/docker/blocks/collectd/start_container
similarity index 100%
rename from docker/blocks/collectd/start_container
rename to devenv/docker/blocks/collectd/start_container
diff --git a/docker/blocks/elastic/docker-compose.yaml b/devenv/docker/blocks/elastic/docker-compose.yaml
similarity index 100%
rename from docker/blocks/elastic/docker-compose.yaml
rename to devenv/docker/blocks/elastic/docker-compose.yaml
diff --git a/docker/blocks/elastic/elasticsearch.yml b/devenv/docker/blocks/elastic/elasticsearch.yml
similarity index 100%
rename from docker/blocks/elastic/elasticsearch.yml
rename to devenv/docker/blocks/elastic/elasticsearch.yml
diff --git a/docker/blocks/elastic1/docker-compose.yaml b/devenv/docker/blocks/elastic1/docker-compose.yaml
similarity index 100%
rename from docker/blocks/elastic1/docker-compose.yaml
rename to devenv/docker/blocks/elastic1/docker-compose.yaml
diff --git a/docker/blocks/elastic1/elasticsearch.yml b/devenv/docker/blocks/elastic1/elasticsearch.yml
similarity index 100%
rename from docker/blocks/elastic1/elasticsearch.yml
rename to devenv/docker/blocks/elastic1/elasticsearch.yml
diff --git a/docker/blocks/elastic5/docker-compose.yaml b/devenv/docker/blocks/elastic5/docker-compose.yaml
similarity index 100%
rename from docker/blocks/elastic5/docker-compose.yaml
rename to devenv/docker/blocks/elastic5/docker-compose.yaml
diff --git a/docker/blocks/elastic5/elasticsearch.yml b/devenv/docker/blocks/elastic5/elasticsearch.yml
similarity index 100%
rename from docker/blocks/elastic5/elasticsearch.yml
rename to devenv/docker/blocks/elastic5/elasticsearch.yml
diff --git a/docker/blocks/elastic6/docker-compose.yaml b/devenv/docker/blocks/elastic6/docker-compose.yaml
similarity index 100%
rename from docker/blocks/elastic6/docker-compose.yaml
rename to devenv/docker/blocks/elastic6/docker-compose.yaml
diff --git a/docker/blocks/elastic6/elasticsearch.yml b/devenv/docker/blocks/elastic6/elasticsearch.yml
similarity index 100%
rename from docker/blocks/elastic6/elasticsearch.yml
rename to devenv/docker/blocks/elastic6/elasticsearch.yml
diff --git a/docker/blocks/graphite/Dockerfile b/devenv/docker/blocks/graphite/Dockerfile
similarity index 100%
rename from docker/blocks/graphite/Dockerfile
rename to devenv/docker/blocks/graphite/Dockerfile
diff --git a/docker/blocks/graphite/docker-compose.yaml b/devenv/docker/blocks/graphite/docker-compose.yaml
similarity index 89%
rename from docker/blocks/graphite/docker-compose.yaml
rename to devenv/docker/blocks/graphite/docker-compose.yaml
index 606e28638f7..acebd2bd9c0 100644
--- a/docker/blocks/graphite/docker-compose.yaml
+++ b/devenv/docker/blocks/graphite/docker-compose.yaml
@@ -1,5 +1,5 @@
graphite09:
- build: blocks/graphite
+ build: docker/blocks/graphite
ports:
- "8080:80"
- "2003:2003"
diff --git a/docker/blocks/graphite/files/carbon.conf b/devenv/docker/blocks/graphite/files/carbon.conf
similarity index 100%
rename from docker/blocks/graphite/files/carbon.conf
rename to devenv/docker/blocks/graphite/files/carbon.conf
diff --git a/docker/blocks/graphite/files/events_views.py b/devenv/docker/blocks/graphite/files/events_views.py
similarity index 100%
rename from docker/blocks/graphite/files/events_views.py
rename to devenv/docker/blocks/graphite/files/events_views.py
diff --git a/docker/blocks/graphite/files/initial_data.json b/devenv/docker/blocks/graphite/files/initial_data.json
similarity index 100%
rename from docker/blocks/graphite/files/initial_data.json
rename to devenv/docker/blocks/graphite/files/initial_data.json
diff --git a/docker/blocks/graphite/files/local_settings.py b/devenv/docker/blocks/graphite/files/local_settings.py
similarity index 100%
rename from docker/blocks/graphite/files/local_settings.py
rename to devenv/docker/blocks/graphite/files/local_settings.py
diff --git a/docker/blocks/graphite/files/my_htpasswd b/devenv/docker/blocks/graphite/files/my_htpasswd
similarity index 100%
rename from docker/blocks/graphite/files/my_htpasswd
rename to devenv/docker/blocks/graphite/files/my_htpasswd
diff --git a/docker/blocks/graphite/files/nginx.conf b/devenv/docker/blocks/graphite/files/nginx.conf
similarity index 100%
rename from docker/blocks/graphite/files/nginx.conf
rename to devenv/docker/blocks/graphite/files/nginx.conf
diff --git a/docker/blocks/graphite/files/statsd_config.js b/devenv/docker/blocks/graphite/files/statsd_config.js
similarity index 100%
rename from docker/blocks/graphite/files/statsd_config.js
rename to devenv/docker/blocks/graphite/files/statsd_config.js
diff --git a/docker/blocks/graphite/files/storage-aggregation.conf b/devenv/docker/blocks/graphite/files/storage-aggregation.conf
similarity index 100%
rename from docker/blocks/graphite/files/storage-aggregation.conf
rename to devenv/docker/blocks/graphite/files/storage-aggregation.conf
diff --git a/docker/blocks/graphite/files/storage-schemas.conf b/devenv/docker/blocks/graphite/files/storage-schemas.conf
similarity index 100%
rename from docker/blocks/graphite/files/storage-schemas.conf
rename to devenv/docker/blocks/graphite/files/storage-schemas.conf
diff --git a/docker/blocks/graphite/files/supervisord.conf b/devenv/docker/blocks/graphite/files/supervisord.conf
similarity index 100%
rename from docker/blocks/graphite/files/supervisord.conf
rename to devenv/docker/blocks/graphite/files/supervisord.conf
diff --git a/docker/blocks/graphite1/Dockerfile b/devenv/docker/blocks/graphite1/Dockerfile
similarity index 100%
rename from docker/blocks/graphite1/Dockerfile
rename to devenv/docker/blocks/graphite1/Dockerfile
diff --git a/docker/blocks/graphite1/big-dashboard.json b/devenv/docker/blocks/graphite1/big-dashboard.json
similarity index 100%
rename from docker/blocks/graphite1/big-dashboard.json
rename to devenv/docker/blocks/graphite1/big-dashboard.json
diff --git a/docker/blocks/graphite1/conf/etc/logrotate.d/graphite-statsd b/devenv/docker/blocks/graphite1/conf/etc/logrotate.d/graphite-statsd
similarity index 100%
rename from docker/blocks/graphite1/conf/etc/logrotate.d/graphite-statsd
rename to devenv/docker/blocks/graphite1/conf/etc/logrotate.d/graphite-statsd
diff --git a/docker/blocks/graphite1/conf/etc/my_init.d/01_conf_init.sh b/devenv/docker/blocks/graphite1/conf/etc/my_init.d/01_conf_init.sh
similarity index 100%
rename from docker/blocks/graphite1/conf/etc/my_init.d/01_conf_init.sh
rename to devenv/docker/blocks/graphite1/conf/etc/my_init.d/01_conf_init.sh
diff --git a/docker/blocks/graphite1/conf/etc/nginx/nginx.conf b/devenv/docker/blocks/graphite1/conf/etc/nginx/nginx.conf
similarity index 100%
rename from docker/blocks/graphite1/conf/etc/nginx/nginx.conf
rename to devenv/docker/blocks/graphite1/conf/etc/nginx/nginx.conf
diff --git a/docker/blocks/graphite1/conf/etc/nginx/sites-enabled/graphite-statsd.conf b/devenv/docker/blocks/graphite1/conf/etc/nginx/sites-enabled/graphite-statsd.conf
similarity index 100%
rename from docker/blocks/graphite1/conf/etc/nginx/sites-enabled/graphite-statsd.conf
rename to devenv/docker/blocks/graphite1/conf/etc/nginx/sites-enabled/graphite-statsd.conf
diff --git a/docker/blocks/graphite1/conf/etc/service/carbon-aggregator/run b/devenv/docker/blocks/graphite1/conf/etc/service/carbon-aggregator/run
similarity index 100%
rename from docker/blocks/graphite1/conf/etc/service/carbon-aggregator/run
rename to devenv/docker/blocks/graphite1/conf/etc/service/carbon-aggregator/run
diff --git a/docker/blocks/graphite1/conf/etc/service/carbon/run b/devenv/docker/blocks/graphite1/conf/etc/service/carbon/run
similarity index 100%
rename from docker/blocks/graphite1/conf/etc/service/carbon/run
rename to devenv/docker/blocks/graphite1/conf/etc/service/carbon/run
diff --git a/docker/blocks/graphite1/conf/etc/service/graphite/run b/devenv/docker/blocks/graphite1/conf/etc/service/graphite/run
similarity index 100%
rename from docker/blocks/graphite1/conf/etc/service/graphite/run
rename to devenv/docker/blocks/graphite1/conf/etc/service/graphite/run
diff --git a/docker/blocks/graphite1/conf/etc/service/nginx/run b/devenv/docker/blocks/graphite1/conf/etc/service/nginx/run
similarity index 100%
rename from docker/blocks/graphite1/conf/etc/service/nginx/run
rename to devenv/docker/blocks/graphite1/conf/etc/service/nginx/run
diff --git a/docker/blocks/graphite1/conf/etc/service/statsd/run b/devenv/docker/blocks/graphite1/conf/etc/service/statsd/run
similarity index 100%
rename from docker/blocks/graphite1/conf/etc/service/statsd/run
rename to devenv/docker/blocks/graphite1/conf/etc/service/statsd/run
diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/aggregation-rules.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/aggregation-rules.conf
similarity index 96%
rename from docker/blocks/graphite1/conf/opt/graphite/conf/aggregation-rules.conf
rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/aggregation-rules.conf
index c9520124a2a..792bbfd6857 100644
--- a/docker/blocks/graphite1/conf/opt/graphite/conf/aggregation-rules.conf
+++ b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/aggregation-rules.conf
@@ -8,7 +8,7 @@
# 'avg'. The name of the aggregate metric will be derived from
# 'output_template' filling in any captured fields from 'input_pattern'.
#
-# For example, if you're metric naming scheme is:
+# For example, if your metric naming scheme is:
#
# .applications...
#
diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/blacklist.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/blacklist.conf
similarity index 100%
rename from docker/blocks/graphite1/conf/opt/graphite/conf/blacklist.conf
rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/blacklist.conf
diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/carbon.amqp.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/carbon.amqp.conf
similarity index 100%
rename from docker/blocks/graphite1/conf/opt/graphite/conf/carbon.amqp.conf
rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/carbon.amqp.conf
diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/carbon.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/carbon.conf
similarity index 100%
rename from docker/blocks/graphite1/conf/opt/graphite/conf/carbon.conf
rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/carbon.conf
diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/dashboard.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/dashboard.conf
similarity index 100%
rename from docker/blocks/graphite1/conf/opt/graphite/conf/dashboard.conf
rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/dashboard.conf
diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/graphTemplates.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/graphTemplates.conf
similarity index 100%
rename from docker/blocks/graphite1/conf/opt/graphite/conf/graphTemplates.conf
rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/graphTemplates.conf
diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/relay-rules.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/relay-rules.conf
similarity index 100%
rename from docker/blocks/graphite1/conf/opt/graphite/conf/relay-rules.conf
rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/relay-rules.conf
diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/rewrite-rules.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/rewrite-rules.conf
similarity index 100%
rename from docker/blocks/graphite1/conf/opt/graphite/conf/rewrite-rules.conf
rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/rewrite-rules.conf
diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/storage-aggregation.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/storage-aggregation.conf
similarity index 100%
rename from docker/blocks/graphite1/conf/opt/graphite/conf/storage-aggregation.conf
rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/storage-aggregation.conf
diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/storage-schemas.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/storage-schemas.conf
similarity index 100%
rename from docker/blocks/graphite1/conf/opt/graphite/conf/storage-schemas.conf
rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/storage-schemas.conf
diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/whitelist.conf b/devenv/docker/blocks/graphite1/conf/opt/graphite/conf/whitelist.conf
similarity index 100%
rename from docker/blocks/graphite1/conf/opt/graphite/conf/whitelist.conf
rename to devenv/docker/blocks/graphite1/conf/opt/graphite/conf/whitelist.conf
diff --git a/docker/blocks/graphite1/conf/opt/graphite/webapp/graphite/app_settings.py b/devenv/docker/blocks/graphite1/conf/opt/graphite/webapp/graphite/app_settings.py
similarity index 100%
rename from docker/blocks/graphite1/conf/opt/graphite/webapp/graphite/app_settings.py
rename to devenv/docker/blocks/graphite1/conf/opt/graphite/webapp/graphite/app_settings.py
diff --git a/docker/blocks/graphite1/conf/opt/graphite/webapp/graphite/local_settings.py b/devenv/docker/blocks/graphite1/conf/opt/graphite/webapp/graphite/local_settings.py
similarity index 100%
rename from docker/blocks/graphite1/conf/opt/graphite/webapp/graphite/local_settings.py
rename to devenv/docker/blocks/graphite1/conf/opt/graphite/webapp/graphite/local_settings.py
diff --git a/docker/blocks/graphite1/conf/opt/statsd/config.js b/devenv/docker/blocks/graphite1/conf/opt/statsd/config.js
similarity index 100%
rename from docker/blocks/graphite1/conf/opt/statsd/config.js
rename to devenv/docker/blocks/graphite1/conf/opt/statsd/config.js
diff --git a/docker/blocks/graphite1/conf/usr/local/bin/django_admin_init.exp b/devenv/docker/blocks/graphite1/conf/usr/local/bin/django_admin_init.exp
similarity index 100%
rename from docker/blocks/graphite1/conf/usr/local/bin/django_admin_init.exp
rename to devenv/docker/blocks/graphite1/conf/usr/local/bin/django_admin_init.exp
diff --git a/docker/blocks/graphite1/conf/usr/local/bin/manage.sh b/devenv/docker/blocks/graphite1/conf/usr/local/bin/manage.sh
similarity index 100%
rename from docker/blocks/graphite1/conf/usr/local/bin/manage.sh
rename to devenv/docker/blocks/graphite1/conf/usr/local/bin/manage.sh
diff --git a/docker/blocks/graphite1/docker-compose.yaml b/devenv/docker/blocks/graphite1/docker-compose.yaml
similarity index 90%
rename from docker/blocks/graphite1/docker-compose.yaml
rename to devenv/docker/blocks/graphite1/docker-compose.yaml
index cd10593f423..1fa3e738ba8 100644
--- a/docker/blocks/graphite1/docker-compose.yaml
+++ b/devenv/docker/blocks/graphite1/docker-compose.yaml
@@ -1,6 +1,6 @@
graphite:
build:
- context: blocks/graphite1
+ context: docker/blocks/graphite1
args:
version: master
ports:
diff --git a/docker/blocks/graphite11/big-dashboard.json b/devenv/docker/blocks/graphite11/big-dashboard.json
similarity index 100%
rename from docker/blocks/graphite11/big-dashboard.json
rename to devenv/docker/blocks/graphite11/big-dashboard.json
diff --git a/docker/blocks/graphite11/docker-compose.yaml b/devenv/docker/blocks/graphite11/docker-compose.yaml
similarity index 100%
rename from docker/blocks/graphite11/docker-compose.yaml
rename to devenv/docker/blocks/graphite11/docker-compose.yaml
diff --git a/docker/blocks/influxdb/docker-compose.yaml b/devenv/docker/blocks/influxdb/docker-compose.yaml
similarity index 80%
rename from docker/blocks/influxdb/docker-compose.yaml
rename to devenv/docker/blocks/influxdb/docker-compose.yaml
index 3434f5d09b9..e1727807d41 100644
--- a/docker/blocks/influxdb/docker-compose.yaml
+++ b/devenv/docker/blocks/influxdb/docker-compose.yaml
@@ -6,7 +6,7 @@
- "8083:8083"
- "8086:8086"
volumes:
- - ./blocks/influxdb/influxdb.conf:/etc/influxdb/influxdb.conf
+ - ./docker/blocks/influxdb/influxdb.conf:/etc/influxdb/influxdb.conf
fake-influxdb-data:
image: grafana/fake-data-gen
diff --git a/docker/blocks/influxdb/influxdb.conf b/devenv/docker/blocks/influxdb/influxdb.conf
similarity index 100%
rename from docker/blocks/influxdb/influxdb.conf
rename to devenv/docker/blocks/influxdb/influxdb.conf
diff --git a/docker/blocks/jaeger/docker-compose.yaml b/devenv/docker/blocks/jaeger/docker-compose.yaml
similarity index 100%
rename from docker/blocks/jaeger/docker-compose.yaml
rename to devenv/docker/blocks/jaeger/docker-compose.yaml
diff --git a/docker/blocks/memcached/docker-compose.yaml b/devenv/docker/blocks/memcached/docker-compose.yaml
similarity index 100%
rename from docker/blocks/memcached/docker-compose.yaml
rename to devenv/docker/blocks/memcached/docker-compose.yaml
diff --git a/docker/blocks/mssql/build/Dockerfile b/devenv/docker/blocks/mssql/build/Dockerfile
similarity index 100%
rename from docker/blocks/mssql/build/Dockerfile
rename to devenv/docker/blocks/mssql/build/Dockerfile
diff --git a/docker/blocks/mssql/build/entrypoint.sh b/devenv/docker/blocks/mssql/build/entrypoint.sh
similarity index 100%
rename from docker/blocks/mssql/build/entrypoint.sh
rename to devenv/docker/blocks/mssql/build/entrypoint.sh
diff --git a/docker/blocks/mssql/build/setup.sh b/devenv/docker/blocks/mssql/build/setup.sh
similarity index 100%
rename from docker/blocks/mssql/build/setup.sh
rename to devenv/docker/blocks/mssql/build/setup.sh
diff --git a/docker/blocks/mssql/build/setup.sql.template b/devenv/docker/blocks/mssql/build/setup.sql.template
similarity index 100%
rename from docker/blocks/mssql/build/setup.sql.template
rename to devenv/docker/blocks/mssql/build/setup.sql.template
diff --git a/docker/blocks/mssql/docker-compose.yaml b/devenv/docker/blocks/mssql/docker-compose.yaml
similarity index 85%
rename from docker/blocks/mssql/docker-compose.yaml
rename to devenv/docker/blocks/mssql/docker-compose.yaml
index a346fb791f7..05a93629e73 100644
--- a/docker/blocks/mssql/docker-compose.yaml
+++ b/devenv/docker/blocks/mssql/docker-compose.yaml
@@ -1,6 +1,6 @@
mssql:
build:
- context: blocks/mssql/build
+ context: docker/blocks/mssql/build
environment:
ACCEPT_EULA: Y
MSSQL_SA_PASSWORD: Password!
diff --git a/docker/blocks/mssql_tests/docker-compose.yaml b/devenv/docker/blocks/mssql_tests/docker-compose.yaml
similarity index 79%
rename from docker/blocks/mssql_tests/docker-compose.yaml
rename to devenv/docker/blocks/mssql_tests/docker-compose.yaml
index 5da6aad82af..eea4d1e3561 100644
--- a/docker/blocks/mssql_tests/docker-compose.yaml
+++ b/devenv/docker/blocks/mssql_tests/docker-compose.yaml
@@ -1,6 +1,6 @@
mssqltests:
build:
- context: blocks/mssql/build
+ context: docker/blocks/mssql/build
environment:
ACCEPT_EULA: Y
MSSQL_SA_PASSWORD: Password!
diff --git a/docker/blocks/mysql/config b/devenv/docker/blocks/mysql/config
similarity index 100%
rename from docker/blocks/mysql/config
rename to devenv/docker/blocks/mysql/config
diff --git a/docker/blocks/mysql/docker-compose.yaml b/devenv/docker/blocks/mysql/docker-compose.yaml
similarity index 100%
rename from docker/blocks/mysql/docker-compose.yaml
rename to devenv/docker/blocks/mysql/docker-compose.yaml
diff --git a/docker/blocks/mysql_opendata/Dockerfile b/devenv/docker/blocks/mysql_opendata/Dockerfile
similarity index 100%
rename from docker/blocks/mysql_opendata/Dockerfile
rename to devenv/docker/blocks/mysql_opendata/Dockerfile
diff --git a/docker/blocks/mysql_opendata/docker-compose.yaml b/devenv/docker/blocks/mysql_opendata/docker-compose.yaml
similarity index 82%
rename from docker/blocks/mysql_opendata/docker-compose.yaml
rename to devenv/docker/blocks/mysql_opendata/docker-compose.yaml
index 594eeed284a..4d478ee0860 100644
--- a/docker/blocks/mysql_opendata/docker-compose.yaml
+++ b/devenv/docker/blocks/mysql_opendata/docker-compose.yaml
@@ -1,5 +1,5 @@
mysql_opendata:
- build: blocks/mysql_opendata
+ build: docker/blocks/mysql_opendata
environment:
MYSQL_ROOT_PASSWORD: rootpass
MYSQL_DATABASE: testdata
diff --git a/docker/blocks/mysql_opendata/import_csv.sql b/devenv/docker/blocks/mysql_opendata/import_csv.sql
similarity index 100%
rename from docker/blocks/mysql_opendata/import_csv.sql
rename to devenv/docker/blocks/mysql_opendata/import_csv.sql
diff --git a/docker/blocks/mysql_tests/Dockerfile b/devenv/docker/blocks/mysql_tests/Dockerfile
similarity index 100%
rename from docker/blocks/mysql_tests/Dockerfile
rename to devenv/docker/blocks/mysql_tests/Dockerfile
diff --git a/docker/blocks/mysql_tests/docker-compose.yaml b/devenv/docker/blocks/mysql_tests/docker-compose.yaml
similarity index 84%
rename from docker/blocks/mysql_tests/docker-compose.yaml
rename to devenv/docker/blocks/mysql_tests/docker-compose.yaml
index 035a6167017..a7509d47880 100644
--- a/docker/blocks/mysql_tests/docker-compose.yaml
+++ b/devenv/docker/blocks/mysql_tests/docker-compose.yaml
@@ -1,6 +1,6 @@
mysqltests:
build:
- context: blocks/mysql_tests
+ context: docker/blocks/mysql_tests
environment:
MYSQL_ROOT_PASSWORD: rootpass
MYSQL_DATABASE: grafana_tests
diff --git a/docker/blocks/mysql_tests/setup.sql b/devenv/docker/blocks/mysql_tests/setup.sql
similarity index 100%
rename from docker/blocks/mysql_tests/setup.sql
rename to devenv/docker/blocks/mysql_tests/setup.sql
diff --git a/docker/blocks/nginx_proxy/Dockerfile b/devenv/docker/blocks/nginx_proxy/Dockerfile
similarity index 100%
rename from docker/blocks/nginx_proxy/Dockerfile
rename to devenv/docker/blocks/nginx_proxy/Dockerfile
diff --git a/docker/blocks/nginx_proxy/docker-compose.yaml b/devenv/docker/blocks/nginx_proxy/docker-compose.yaml
similarity index 88%
rename from docker/blocks/nginx_proxy/docker-compose.yaml
rename to devenv/docker/blocks/nginx_proxy/docker-compose.yaml
index a0ceceb83ac..aefd7226f36 100644
--- a/docker/blocks/nginx_proxy/docker-compose.yaml
+++ b/devenv/docker/blocks/nginx_proxy/docker-compose.yaml
@@ -5,5 +5,5 @@
# root_url = %(protocol)s://%(domain)s:10080/grafana/
nginxproxy:
- build: blocks/nginx_proxy
+ build: docker/blocks/nginx_proxy
network_mode: host
diff --git a/docker/blocks/nginx_proxy/htpasswd b/devenv/docker/blocks/nginx_proxy/htpasswd
similarity index 100%
rename from docker/blocks/nginx_proxy/htpasswd
rename to devenv/docker/blocks/nginx_proxy/htpasswd
diff --git a/docker/blocks/nginx_proxy/nginx.conf b/devenv/docker/blocks/nginx_proxy/nginx.conf
similarity index 100%
rename from docker/blocks/nginx_proxy/nginx.conf
rename to devenv/docker/blocks/nginx_proxy/nginx.conf
diff --git a/docker/blocks/openldap/Dockerfile b/devenv/docker/blocks/openldap/Dockerfile
similarity index 100%
rename from docker/blocks/openldap/Dockerfile
rename to devenv/docker/blocks/openldap/Dockerfile
diff --git a/docker/blocks/openldap/docker-compose.yaml b/devenv/docker/blocks/openldap/docker-compose.yaml
similarity index 82%
rename from docker/blocks/openldap/docker-compose.yaml
rename to devenv/docker/blocks/openldap/docker-compose.yaml
index be06524a57d..d11858ccfb9 100644
--- a/docker/blocks/openldap/docker-compose.yaml
+++ b/devenv/docker/blocks/openldap/docker-compose.yaml
@@ -1,5 +1,5 @@
openldap:
- build: blocks/openldap
+ build: docker/blocks/openldap
environment:
SLAPD_PASSWORD: grafana
SLAPD_DOMAIN: grafana.org
diff --git a/docker/blocks/openldap/entrypoint.sh b/devenv/docker/blocks/openldap/entrypoint.sh
similarity index 100%
rename from docker/blocks/openldap/entrypoint.sh
rename to devenv/docker/blocks/openldap/entrypoint.sh
diff --git a/docker/blocks/openldap/ldap_dev.toml b/devenv/docker/blocks/openldap/ldap_dev.toml
similarity index 99%
rename from docker/blocks/openldap/ldap_dev.toml
rename to devenv/docker/blocks/openldap/ldap_dev.toml
index e79771b57de..8767ff3c64a 100644
--- a/docker/blocks/openldap/ldap_dev.toml
+++ b/devenv/docker/blocks/openldap/ldap_dev.toml
@@ -72,6 +72,7 @@ email = "email"
[[servers.group_mappings]]
group_dn = "cn=admins,ou=groups,dc=grafana,dc=org"
org_role = "Admin"
+grafana_admin = true
# The Grafana organization database id, optional, if left out the default org (id 1) will be used
# org_id = 1
diff --git a/docker/blocks/openldap/modules/memberof.ldif b/devenv/docker/blocks/openldap/modules/memberof.ldif
similarity index 100%
rename from docker/blocks/openldap/modules/memberof.ldif
rename to devenv/docker/blocks/openldap/modules/memberof.ldif
diff --git a/docker/blocks/openldap/notes.md b/devenv/docker/blocks/openldap/notes.md
similarity index 100%
rename from docker/blocks/openldap/notes.md
rename to devenv/docker/blocks/openldap/notes.md
diff --git a/docker/blocks/openldap/prepopulate.sh b/devenv/docker/blocks/openldap/prepopulate.sh
similarity index 100%
rename from docker/blocks/openldap/prepopulate.sh
rename to devenv/docker/blocks/openldap/prepopulate.sh
diff --git a/docker/blocks/openldap/prepopulate/1_units.ldif b/devenv/docker/blocks/openldap/prepopulate/1_units.ldif
similarity index 100%
rename from docker/blocks/openldap/prepopulate/1_units.ldif
rename to devenv/docker/blocks/openldap/prepopulate/1_units.ldif
diff --git a/docker/blocks/openldap/prepopulate/2_users.ldif b/devenv/docker/blocks/openldap/prepopulate/2_users.ldif
similarity index 100%
rename from docker/blocks/openldap/prepopulate/2_users.ldif
rename to devenv/docker/blocks/openldap/prepopulate/2_users.ldif
diff --git a/docker/blocks/openldap/prepopulate/3_groups.ldif b/devenv/docker/blocks/openldap/prepopulate/3_groups.ldif
similarity index 100%
rename from docker/blocks/openldap/prepopulate/3_groups.ldif
rename to devenv/docker/blocks/openldap/prepopulate/3_groups.ldif
diff --git a/docker/blocks/opentsdb/docker-compose.yaml b/devenv/docker/blocks/opentsdb/docker-compose.yaml
similarity index 100%
rename from docker/blocks/opentsdb/docker-compose.yaml
rename to devenv/docker/blocks/opentsdb/docker-compose.yaml
diff --git a/docker/blocks/postgres/docker-compose.yaml b/devenv/docker/blocks/postgres/docker-compose.yaml
similarity index 100%
rename from docker/blocks/postgres/docker-compose.yaml
rename to devenv/docker/blocks/postgres/docker-compose.yaml
diff --git a/docker/blocks/postgres_tests/Dockerfile b/devenv/docker/blocks/postgres_tests/Dockerfile
similarity index 100%
rename from docker/blocks/postgres_tests/Dockerfile
rename to devenv/docker/blocks/postgres_tests/Dockerfile
diff --git a/docker/blocks/postgres_tests/docker-compose.yaml b/devenv/docker/blocks/postgres_tests/docker-compose.yaml
similarity index 63%
rename from docker/blocks/postgres_tests/docker-compose.yaml
rename to devenv/docker/blocks/postgres_tests/docker-compose.yaml
index f5ce0a5a3d3..7e6da7d8517 100644
--- a/docker/blocks/postgres_tests/docker-compose.yaml
+++ b/devenv/docker/blocks/postgres_tests/docker-compose.yaml
@@ -1,6 +1,6 @@
postgrestest:
build:
- context: blocks/postgres_tests
+ context: docker/blocks/postgres_tests
environment:
POSTGRES_USER: grafanatest
POSTGRES_PASSWORD: grafanatest
diff --git a/docker/blocks/postgres_tests/setup.sql b/devenv/docker/blocks/postgres_tests/setup.sql
similarity index 100%
rename from docker/blocks/postgres_tests/setup.sql
rename to devenv/docker/blocks/postgres_tests/setup.sql
diff --git a/docker/blocks/prometheus/Dockerfile b/devenv/docker/blocks/prometheus/Dockerfile
similarity index 100%
rename from docker/blocks/prometheus/Dockerfile
rename to devenv/docker/blocks/prometheus/Dockerfile
diff --git a/docker/blocks/prometheus/alert.rules b/devenv/docker/blocks/prometheus/alert.rules
similarity index 100%
rename from docker/blocks/prometheus/alert.rules
rename to devenv/docker/blocks/prometheus/alert.rules
diff --git a/docker/blocks/prometheus2/docker-compose.yaml b/devenv/docker/blocks/prometheus/docker-compose.yaml
similarity index 86%
rename from docker/blocks/prometheus2/docker-compose.yaml
rename to devenv/docker/blocks/prometheus/docker-compose.yaml
index 589df868084..db778060dde 100644
--- a/docker/blocks/prometheus2/docker-compose.yaml
+++ b/devenv/docker/blocks/prometheus/docker-compose.yaml
@@ -1,5 +1,5 @@
prometheus:
- build: blocks/prometheus2
+ build: docker/blocks/prometheus
network_mode: host
ports:
- "9090:9090"
@@ -25,7 +25,7 @@
- "9093:9093"
prometheus-random-data:
- build: blocks/prometheus_random_data
+ build: docker/blocks/prometheus_random_data
network_mode: host
ports:
- "8081:8080"
diff --git a/docker/blocks/prometheus/prometheus.yml b/devenv/docker/blocks/prometheus/prometheus.yml
similarity index 100%
rename from docker/blocks/prometheus/prometheus.yml
rename to devenv/docker/blocks/prometheus/prometheus.yml
diff --git a/docker/blocks/prometheus2/Dockerfile b/devenv/docker/blocks/prometheus2/Dockerfile
similarity index 100%
rename from docker/blocks/prometheus2/Dockerfile
rename to devenv/docker/blocks/prometheus2/Dockerfile
diff --git a/docker/blocks/prometheus2/alert.rules b/devenv/docker/blocks/prometheus2/alert.rules
similarity index 100%
rename from docker/blocks/prometheus2/alert.rules
rename to devenv/docker/blocks/prometheus2/alert.rules
diff --git a/docker/blocks/prometheus/docker-compose.yaml b/devenv/docker/blocks/prometheus2/docker-compose.yaml
similarity index 85%
rename from docker/blocks/prometheus/docker-compose.yaml
rename to devenv/docker/blocks/prometheus2/docker-compose.yaml
index 3c304cc74ad..d586b4b5742 100644
--- a/docker/blocks/prometheus/docker-compose.yaml
+++ b/devenv/docker/blocks/prometheus2/docker-compose.yaml
@@ -1,5 +1,5 @@
prometheus:
- build: blocks/prometheus
+ build: docker/blocks/prometheus2
network_mode: host
ports:
- "9090:9090"
@@ -25,7 +25,7 @@
- "9093:9093"
prometheus-random-data:
- build: blocks/prometheus_random_data
+ build: docker/blocks/prometheus_random_data
network_mode: host
ports:
- "8081:8080"
diff --git a/docker/blocks/prometheus2/prometheus.yml b/devenv/docker/blocks/prometheus2/prometheus.yml
similarity index 100%
rename from docker/blocks/prometheus2/prometheus.yml
rename to devenv/docker/blocks/prometheus2/prometheus.yml
diff --git a/docker/blocks/prometheus_mac/Dockerfile b/devenv/docker/blocks/prometheus_mac/Dockerfile
similarity index 100%
rename from docker/blocks/prometheus_mac/Dockerfile
rename to devenv/docker/blocks/prometheus_mac/Dockerfile
diff --git a/docker/blocks/prometheus_mac/alert.rules b/devenv/docker/blocks/prometheus_mac/alert.rules
similarity index 100%
rename from docker/blocks/prometheus_mac/alert.rules
rename to devenv/docker/blocks/prometheus_mac/alert.rules
diff --git a/docker/blocks/prometheus_mac/docker-compose.yaml b/devenv/docker/blocks/prometheus_mac/docker-compose.yaml
similarity index 82%
rename from docker/blocks/prometheus_mac/docker-compose.yaml
rename to devenv/docker/blocks/prometheus_mac/docker-compose.yaml
index ef53b07418a..b73d278fae2 100644
--- a/docker/blocks/prometheus_mac/docker-compose.yaml
+++ b/devenv/docker/blocks/prometheus_mac/docker-compose.yaml
@@ -1,5 +1,5 @@
prometheus:
- build: blocks/prometheus_mac
+ build: docker/blocks/prometheus_mac
ports:
- "9090:9090"
@@ -21,6 +21,6 @@
- "9093:9093"
prometheus-random-data:
- build: blocks/prometheus_random_data
+ build: docker/blocks/prometheus_random_data
ports:
- "8081:8080"
diff --git a/docker/blocks/prometheus_mac/prometheus.yml b/devenv/docker/blocks/prometheus_mac/prometheus.yml
similarity index 100%
rename from docker/blocks/prometheus_mac/prometheus.yml
rename to devenv/docker/blocks/prometheus_mac/prometheus.yml
diff --git a/docker/blocks/prometheus_random_data/Dockerfile b/devenv/docker/blocks/prometheus_random_data/Dockerfile
similarity index 100%
rename from docker/blocks/prometheus_random_data/Dockerfile
rename to devenv/docker/blocks/prometheus_random_data/Dockerfile
diff --git a/docker/blocks/smtp/Dockerfile b/devenv/docker/blocks/smtp/Dockerfile
similarity index 100%
rename from docker/blocks/smtp/Dockerfile
rename to devenv/docker/blocks/smtp/Dockerfile
diff --git a/docker/blocks/smtp/bootstrap.sh b/devenv/docker/blocks/smtp/bootstrap.sh
similarity index 100%
rename from docker/blocks/smtp/bootstrap.sh
rename to devenv/docker/blocks/smtp/bootstrap.sh
diff --git a/docker/blocks/smtp/docker-compose.yaml b/devenv/docker/blocks/smtp/docker-compose.yaml
similarity index 100%
rename from docker/blocks/smtp/docker-compose.yaml
rename to devenv/docker/blocks/smtp/docker-compose.yaml
diff --git a/docker/buildcontainer/Dockerfile b/devenv/docker/buildcontainer/Dockerfile
similarity index 100%
rename from docker/buildcontainer/Dockerfile
rename to devenv/docker/buildcontainer/Dockerfile
diff --git a/docker/buildcontainer/build.sh b/devenv/docker/buildcontainer/build.sh
similarity index 100%
rename from docker/buildcontainer/build.sh
rename to devenv/docker/buildcontainer/build.sh
diff --git a/docker/buildcontainer/build_circle.sh b/devenv/docker/buildcontainer/build_circle.sh
similarity index 100%
rename from docker/buildcontainer/build_circle.sh
rename to devenv/docker/buildcontainer/build_circle.sh
diff --git a/docker/buildcontainer/run_circle.sh b/devenv/docker/buildcontainer/run_circle.sh
similarity index 100%
rename from docker/buildcontainer/run_circle.sh
rename to devenv/docker/buildcontainer/run_circle.sh
diff --git a/docker/compose_header.yml b/devenv/docker/compose_header.yml
similarity index 100%
rename from docker/compose_header.yml
rename to devenv/docker/compose_header.yml
diff --git a/docker/debtest/Dockerfile b/devenv/docker/debtest/Dockerfile
similarity index 100%
rename from docker/debtest/Dockerfile
rename to devenv/docker/debtest/Dockerfile
diff --git a/docker/debtest/build.sh b/devenv/docker/debtest/build.sh
similarity index 100%
rename from docker/debtest/build.sh
rename to devenv/docker/debtest/build.sh
diff --git a/devenv/docker/ha_test/.gitignore b/devenv/docker/ha_test/.gitignore
new file mode 100644
index 00000000000..0f4e139e204
--- /dev/null
+++ b/devenv/docker/ha_test/.gitignore
@@ -0,0 +1 @@
+grafana/provisioning/dashboards/alerts/alert-*
\ No newline at end of file
diff --git a/devenv/docker/ha_test/README.md b/devenv/docker/ha_test/README.md
new file mode 100644
index 00000000000..bc93727ceae
--- /dev/null
+++ b/devenv/docker/ha_test/README.md
@@ -0,0 +1,137 @@
+# Grafana High Availability (HA) test setup
+
+A set of docker compose services which together creates a Grafana HA test setup with capability of easily
+scaling up/down number of Grafana instances.
+
+Included services
+
+* Grafana
+* Mysql - Grafana configuration database and session storage
+* Prometheus - Monitoring of Grafana and used as datasource of provisioned alert rules
+* Nginx - Reverse proxy for Grafana and Prometheus. Enables browsing Grafana/Prometheus UI using a hostname
+
+## Prerequisites
+
+### Build grafana docker container
+
+Build a Grafana docker container from current branch and commit and tag it as grafana/grafana:dev.
+
+```bash
+$ cd
+$ make build-docker-full
+```
+
+### Virtual host names
+
+#### Alternative 1 - Use dnsmasq
+
+```bash
+$ sudo apt-get install dnsmasq
+$ echo 'address=/loc/127.0.0.1' | sudo tee /etc/dnsmasq.d/dnsmasq-loc.conf > /dev/null
+$ sudo /etc/init.d/dnsmasq restart
+$ ping whatever.loc
+PING whatever.loc (127.0.0.1) 56(84) bytes of data.
+64 bytes from localhost (127.0.0.1): icmp_seq=1 ttl=64 time=0.076 ms
+--- whatever.loc ping statistics ---
+1 packet transmitted, 1 received, 0% packet loss, time 1998ms
+```
+
+#### Alternative 2 - Manually update /etc/hosts
+
+Update your `/etc/hosts` to be able to access Grafana and/or Prometheus UI using a hostname.
+
+```bash
+$ cat /etc/hosts
+127.0.0.1 grafana.loc
+127.0.0.1 prometheus.loc
+```
+
+## Start services
+
+```bash
+$ docker-compose up -d
+```
+
+Browse
+* http://grafana.loc/
+* http://prometheus.loc/
+
+Check for any errors
+
+```bash
+$ docker-compose logs | grep error
+```
+
+### Scale Grafana instances up/down
+
+Scale number of Grafana instances to ``
+
+```bash
+$ docker-compose up --scale grafana= -d
+# for example 3 instances
+$ docker-compose up --scale grafana=3 -d
+```
+
+## Test alerting
+
+### Create notification channels
+
+Creates default notification channels, if not already exists
+
+```bash
+$ ./alerts.sh setup
+```
+
+### Slack notifications
+
+Disable
+
+```bash
+$ ./alerts.sh slack -d
+```
+
+Enable and configure url
+
+```bash
+$ ./alerts.sh slack -u https://hooks.slack.com/services/...
+```
+
+Enable, configure url and enable reminders
+
+```bash
+$ ./alerts.sh slack -u https://hooks.slack.com/services/... -r -e 10m
+```
+
+### Provision alert dashboards with alert rules
+
+Provision 1 dashboard/alert rule (default)
+
+```bash
+$ ./alerts.sh provision
+```
+
+Provision 10 dashboards/alert rules
+
+```bash
+$ ./alerts.sh provision -a 10
+```
+
+Provision 10 dashboards/alert rules and change condition to `gt > 100`
+
+```bash
+$ ./alerts.sh provision -a 10 -c 100
+```
+
+### Pause/unpause all alert rules
+
+Pause
+
+```bash
+$ ./alerts.sh pause
+```
+
+Unpause
+
+```bash
+$ ./alerts.sh unpause
+```
diff --git a/devenv/docker/ha_test/alerts.sh b/devenv/docker/ha_test/alerts.sh
new file mode 100755
index 00000000000..a05a4581739
--- /dev/null
+++ b/devenv/docker/ha_test/alerts.sh
@@ -0,0 +1,156 @@
+#!/bin/bash
+
+requiresJsonnet() {
+ if ! type "jsonnet" > /dev/null; then
+ echo "you need you install jsonnet to run this script"
+ echo "follow the instructions on https://github.com/google/jsonnet"
+ exit 1
+ fi
+}
+
+setup() {
+ STATUS=$(curl -s -o /dev/null -w '%{http_code}' http://admin:admin@grafana.loc/api/alert-notifications/1)
+ if [ $STATUS -eq 200 ]; then
+ echo "Email already exists, skipping..."
+ else
+ curl -H "Content-Type: application/json" \
+ -d '{
+ "name": "Email",
+ "type": "email",
+ "isDefault": false,
+ "sendReminder": false,
+ "uploadImage": true,
+ "settings": {
+ "addresses": "user@test.com"
+ }
+ }' \
+ http://admin:admin@grafana.loc/api/alert-notifications
+ fi
+
+ STATUS=$(curl -s -o /dev/null -w '%{http_code}' http://admin:admin@grafana.loc/api/alert-notifications/2)
+ if [ $STATUS -eq 200 ]; then
+ echo "Slack already exists, skipping..."
+ else
+ curl -H "Content-Type: application/json" \
+ -d '{
+ "name": "Slack",
+ "type": "slack",
+ "isDefault": false,
+ "sendReminder": false,
+ "uploadImage": true
+ }' \
+ http://admin:admin@grafana.loc/api/alert-notifications
+ fi
+}
+
+slack() {
+ enabled=true
+ url=''
+ remind=false
+ remindEvery='10m'
+
+ while getopts ":e:u:dr" o; do
+ case "${o}" in
+ e)
+ remindEvery=${OPTARG}
+ ;;
+ u)
+ url=${OPTARG}
+ ;;
+ d)
+ enabled=false
+ ;;
+ r)
+ remind=true
+ ;;
+ esac
+ done
+ shift $((OPTIND-1))
+
+ curl -X PUT \
+ -H "Content-Type: application/json" \
+ -d '{
+ "id": 2,
+ "name": "Slack",
+ "type": "slack",
+ "isDefault": '$enabled',
+ "sendReminder": '$remind',
+ "frequency": "'$remindEvery'",
+ "uploadImage": true,
+ "settings": {
+ "url": "'$url'"
+ }
+ }' \
+ http://admin:admin@grafana.loc/api/alert-notifications/2
+}
+
+provision() {
+ alerts=1
+ condition=65
+ while getopts ":a:c:" o; do
+ case "${o}" in
+ a)
+ alerts=${OPTARG}
+ ;;
+ c)
+ condition=${OPTARG}
+ ;;
+ esac
+ done
+ shift $((OPTIND-1))
+
+ requiresJsonnet
+
+ rm -rf grafana/provisioning/dashboards/alerts/alert-*.json
+ jsonnet -m grafana/provisioning/dashboards/alerts grafana/provisioning/alerts.jsonnet --ext-code alerts=$alerts --ext-code condition=$condition
+}
+
+pause() {
+ curl -H "Content-Type: application/json" \
+ -d '{"paused":true}' \
+ http://admin:admin@grafana.loc/api/admin/pause-all-alerts
+}
+
+unpause() {
+ curl -H "Content-Type: application/json" \
+ -d '{"paused":false}' \
+ http://admin:admin@grafana.loc/api/admin/pause-all-alerts
+}
+
+usage() {
+ echo -e "Usage: ./alerts.sh COMMAND [OPTIONS]\n"
+ echo -e "Commands"
+ echo -e " setup\t\t creates default alert notification channels"
+ echo -e " slack\t\t configure slack notification channel"
+ echo -e " [-d]\t\t\t disable notifier, default enabled"
+ echo -e " [-u]\t\t\t url"
+ echo -e " [-r]\t\t\t send reminders"
+ echo -e " [-e ]\t\t default 10m\n"
+ echo -e " provision\t provision alerts"
+ echo -e " [-a ]\t default 1"
+ echo -e " [-c ]\t default 65\n"
+ echo -e " pause\t\t pause all alerts"
+ echo -e " unpause\t unpause all alerts"
+}
+
+main() {
+ local cmd=$1
+
+ if [[ $cmd == "setup" ]]; then
+ setup
+ elif [[ $cmd == "slack" ]]; then
+ slack "${@:2}"
+ elif [[ $cmd == "provision" ]]; then
+ provision "${@:2}"
+ elif [[ $cmd == "pause" ]]; then
+ pause
+ elif [[ $cmd == "unpause" ]]; then
+ unpause
+ fi
+
+ if [[ -z "$cmd" ]]; then
+ usage
+ fi
+}
+
+main "$@"
diff --git a/devenv/docker/ha_test/docker-compose.yaml b/devenv/docker/ha_test/docker-compose.yaml
new file mode 100644
index 00000000000..ce8630d88a4
--- /dev/null
+++ b/devenv/docker/ha_test/docker-compose.yaml
@@ -0,0 +1,78 @@
+version: "2.1"
+
+services:
+ nginx-proxy:
+ image: jwilder/nginx-proxy
+ ports:
+ - "80:80"
+ volumes:
+ - /var/run/docker.sock:/tmp/docker.sock:ro
+
+ db:
+ image: mysql
+ environment:
+ MYSQL_ROOT_PASSWORD: rootpass
+ MYSQL_DATABASE: grafana
+ MYSQL_USER: grafana
+ MYSQL_PASSWORD: password
+ ports:
+ - 3306
+ healthcheck:
+ test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"]
+ timeout: 10s
+ retries: 10
+
+ # db:
+ # image: postgres:9.3
+ # environment:
+ # POSTGRES_DATABASE: grafana
+ # POSTGRES_USER: grafana
+ # POSTGRES_PASSWORD: password
+ # ports:
+ # - 5432
+ # healthcheck:
+ # test: ["CMD-SHELL", "pg_isready -d grafana -U grafana"]
+ # timeout: 10s
+ # retries: 10
+
+ grafana:
+ image: grafana/grafana:dev
+ volumes:
+ - ./grafana/provisioning/:/etc/grafana/provisioning/
+ environment:
+ - VIRTUAL_HOST=grafana.loc
+ - GF_SERVER_ROOT_URL=http://grafana.loc
+ - GF_DATABASE_NAME=grafana
+ - GF_DATABASE_USER=grafana
+ - GF_DATABASE_PASSWORD=password
+ - GF_DATABASE_TYPE=mysql
+ - GF_DATABASE_HOST=db:3306
+ - GF_SESSION_PROVIDER=mysql
+ - GF_SESSION_PROVIDER_CONFIG=grafana:password@tcp(db:3306)/grafana?allowNativePasswords=true
+ # - GF_DATABASE_TYPE=postgres
+ # - GF_DATABASE_HOST=db:5432
+ # - GF_DATABASE_SSL_MODE=disable
+ # - GF_SESSION_PROVIDER=postgres
+ # - GF_SESSION_PROVIDER_CONFIG=user=grafana password=password host=db port=5432 dbname=grafana sslmode=disable
+ - GF_LOG_FILTERS=alerting.notifier:debug,alerting.notifier.slack:debug
+ ports:
+ - 3000
+ depends_on:
+ db:
+ condition: service_healthy
+
+ prometheus:
+ image: prom/prometheus:v2.4.2
+ volumes:
+ - ./prometheus/:/etc/prometheus/
+ environment:
+ - VIRTUAL_HOST=prometheus.loc
+ ports:
+ - 9090
+
+ # mysqld-exporter:
+ # image: prom/mysqld-exporter
+ # environment:
+ # - DATA_SOURCE_NAME=grafana:password@(mysql:3306)/
+ # ports:
+ # - 9104
diff --git a/devenv/docker/ha_test/grafana/provisioning/alerts.jsonnet b/devenv/docker/ha_test/grafana/provisioning/alerts.jsonnet
new file mode 100644
index 00000000000..86ded7e79d6
--- /dev/null
+++ b/devenv/docker/ha_test/grafana/provisioning/alerts.jsonnet
@@ -0,0 +1,202 @@
+local numAlerts = std.extVar('alerts');
+local condition = std.extVar('condition');
+local arr = std.range(1, numAlerts);
+
+local alertDashboardTemplate = {
+ "editable": true,
+ "gnetId": null,
+ "graphTooltip": 0,
+ "id": null,
+ "links": [],
+ "panels": [
+ {
+ "alert": {
+ "conditions": [
+ {
+ "evaluator": {
+ "params": [
+ 65
+ ],
+ "type": "gt"
+ },
+ "operator": {
+ "type": "and"
+ },
+ "query": {
+ "params": [
+ "A",
+ "5m",
+ "now"
+ ]
+ },
+ "reducer": {
+ "params": [],
+ "type": "avg"
+ },
+ "type": "query"
+ }
+ ],
+ "executionErrorState": "alerting",
+ "frequency": "10s",
+ "handler": 1,
+ "name": "bulk alerting",
+ "noDataState": "no_data",
+ "notifications": [
+ {
+ "id": 2
+ }
+ ]
+ },
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "Prometheus",
+ "fill": 1,
+ "gridPos": {
+ "h": 9,
+ "w": 12,
+ "x": 0,
+ "y": 0
+ },
+ "id": 2,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "$$hashKey": "object:117",
+ "expr": "go_goroutines",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "refId": "A"
+ }
+ ],
+ "thresholds": [
+ {
+ "colorMode": "critical",
+ "fill": true,
+ "line": true,
+ "op": "gt",
+ "value": 50
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Panel Title",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ]
+ }
+ ],
+ "schemaVersion": 16,
+ "style": "dark",
+ "tags": [],
+ "templating": {
+ "list": []
+ },
+ "time": {
+ "from": "now-6h",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": [
+ "5s",
+ "10s",
+ "30s",
+ "1m",
+ "5m",
+ "15m",
+ "30m",
+ "1h",
+ "2h",
+ "1d"
+ ],
+ "time_options": [
+ "5m",
+ "15m",
+ "1h",
+ "6h",
+ "12h",
+ "24h",
+ "2d",
+ "7d",
+ "30d"
+ ]
+ },
+ "timezone": "",
+ "title": "New dashboard",
+ "uid": null,
+ "version": 0
+};
+
+
+{
+ ['alert-' + std.toString(x) + '.json']:
+ alertDashboardTemplate + {
+ panels: [
+ alertDashboardTemplate.panels[0] +
+ {
+ alert+: {
+ name: 'Alert rule ' + x,
+ conditions: [
+ alertDashboardTemplate.panels[0].alert.conditions[0] +
+ {
+ evaluator+: {
+ params: [condition]
+ }
+ },
+ ],
+ },
+ },
+ ],
+ uid: 'alert-' + x,
+ title: 'Alert ' + x
+ },
+ for x in arr
+}
\ No newline at end of file
diff --git a/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts.yaml b/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts.yaml
new file mode 100644
index 00000000000..60b6cd4bb04
--- /dev/null
+++ b/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts.yaml
@@ -0,0 +1,8 @@
+apiVersion: 1
+
+providers:
+ - name: 'Alerts'
+ folder: 'Alerts'
+ type: file
+ options:
+ path: /etc/grafana/provisioning/dashboards/alerts
diff --git a/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts/overview.json b/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts/overview.json
new file mode 100644
index 00000000000..53e33c37b1f
--- /dev/null
+++ b/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts/overview.json
@@ -0,0 +1,172 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations & Alerts",
+ "type": "dashboard"
+ }
+ ]
+ },
+ "editable": true,
+ "gnetId": null,
+ "graphTooltip": 0,
+ "links": [],
+ "panels": [
+ {
+ "aliasColors": {
+ "Active alerts": "#bf1b00"
+ },
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "Prometheus",
+ "fill": 1,
+ "gridPos": {
+ "h": 12,
+ "w": 24,
+ "x": 0,
+ "y": 0
+ },
+ "id": 2,
+ "interval": "",
+ "legend": {
+ "alignAsTable": true,
+ "avg": false,
+ "current": true,
+ "max": false,
+ "min": false,
+ "rightSide": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "null",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [
+ {
+ "alias": "Active grafana instances",
+ "dashes": true,
+ "fill": 0
+ }
+ ],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "expr": "sum(increase(grafana_alerting_notification_sent_total[1m])) by(job)",
+ "format": "time_series",
+ "instant": false,
+ "interval": "1m",
+ "intervalFactor": 1,
+ "legendFormat": "Notifications sent",
+ "refId": "A"
+ },
+ {
+ "expr": "min(grafana_alerting_active_alerts) without(instance)",
+ "format": "time_series",
+ "interval": "1m",
+ "intervalFactor": 1,
+ "legendFormat": "Active alerts",
+ "refId": "B"
+ },
+ {
+ "expr": "count(up{job=\"grafana\"})",
+ "format": "time_series",
+ "intervalFactor": 1,
+ "legendFormat": "Active grafana instances",
+ "refId": "C"
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Notifications sent vs active alerts",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": 3
+ }
+ }
+ ],
+ "schemaVersion": 16,
+ "style": "dark",
+ "tags": [],
+ "templating": {
+ "list": []
+ },
+ "time": {
+ "from": "now-1h",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": [
+ "5s",
+ "10s",
+ "30s",
+ "1m",
+ "5m",
+ "15m",
+ "30m",
+ "1h",
+ "2h",
+ "1d"
+ ],
+ "time_options": [
+ "5m",
+ "15m",
+ "1h",
+ "6h",
+ "12h",
+ "24h",
+ "2d",
+ "7d",
+ "30d"
+ ]
+ },
+ "timezone": "",
+ "title": "Overview",
+ "uid": "xHy7-hAik",
+ "version": 6
+}
\ No newline at end of file
diff --git a/devenv/docker/ha_test/grafana/provisioning/datasources/datasources.yaml b/devenv/docker/ha_test/grafana/provisioning/datasources/datasources.yaml
new file mode 100644
index 00000000000..8d59793be16
--- /dev/null
+++ b/devenv/docker/ha_test/grafana/provisioning/datasources/datasources.yaml
@@ -0,0 +1,11 @@
+apiVersion: 1
+
+datasources:
+ - name: Prometheus
+ type: prometheus
+ access: proxy
+ url: http://prometheus:9090
+ jsonData:
+ timeInterval: 10s
+ queryTimeout: 30s
+ httpMethod: POST
\ No newline at end of file
diff --git a/devenv/docker/ha_test/prometheus/prometheus.yml b/devenv/docker/ha_test/prometheus/prometheus.yml
new file mode 100644
index 00000000000..ea97ba8ba05
--- /dev/null
+++ b/devenv/docker/ha_test/prometheus/prometheus.yml
@@ -0,0 +1,39 @@
+# my global config
+global:
+ scrape_interval: 10s # By default, scrape targets every 15 seconds.
+ evaluation_interval: 10s # By default, scrape targets every 15 seconds.
+ # scrape_timeout is set to the global default (10s).
+
+# Load and evaluate rules in this file every 'evaluation_interval' seconds.
+#rule_files:
+# - "alert.rules"
+# - "first.rules"
+# - "second.rules"
+
+# alerting:
+# alertmanagers:
+# - scheme: http
+# static_configs:
+# - targets:
+# - "127.0.0.1:9093"
+
+scrape_configs:
+ - job_name: 'prometheus'
+ static_configs:
+ - targets: ['localhost:9090']
+
+ - job_name: 'grafana'
+ dns_sd_configs:
+ - names:
+ - 'grafana'
+ type: 'A'
+ port: 3000
+ refresh_interval: 10s
+
+ # - job_name: 'mysql'
+ # dns_sd_configs:
+ # - names:
+ # - 'mysqld-exporter'
+ # type: 'A'
+ # port: 9104
+ # refresh_interval: 10s
\ No newline at end of file
diff --git a/docker/rpmtest/build.sh b/devenv/docker/rpmtest/build.sh
similarity index 100%
rename from docker/rpmtest/build.sh
rename to devenv/docker/rpmtest/build.sh
diff --git a/tests/api/clearState.test.ts b/devenv/e2e-api-tests/clearState.test.ts
similarity index 100%
rename from tests/api/clearState.test.ts
rename to devenv/e2e-api-tests/clearState.test.ts
diff --git a/tests/api/client.ts b/devenv/e2e-api-tests/client.ts
similarity index 100%
rename from tests/api/client.ts
rename to devenv/e2e-api-tests/client.ts
diff --git a/tests/api/dashboard.test.ts b/devenv/e2e-api-tests/dashboard.test.ts
similarity index 100%
rename from tests/api/dashboard.test.ts
rename to devenv/e2e-api-tests/dashboard.test.ts
diff --git a/tests/api/folder.test.ts b/devenv/e2e-api-tests/folder.test.ts
similarity index 100%
rename from tests/api/folder.test.ts
rename to devenv/e2e-api-tests/folder.test.ts
diff --git a/tests/api/jest.js b/devenv/e2e-api-tests/jest.js
similarity index 100%
rename from tests/api/jest.js
rename to devenv/e2e-api-tests/jest.js
diff --git a/tests/api/search.test.ts b/devenv/e2e-api-tests/search.test.ts
similarity index 100%
rename from tests/api/search.test.ts
rename to devenv/e2e-api-tests/search.test.ts
diff --git a/tests/api/setup.ts b/devenv/e2e-api-tests/setup.ts
similarity index 100%
rename from tests/api/setup.ts
rename to devenv/e2e-api-tests/setup.ts
diff --git a/tests/api/tsconfig.json b/devenv/e2e-api-tests/tsconfig.json
similarity index 100%
rename from tests/api/tsconfig.json
rename to devenv/e2e-api-tests/tsconfig.json
diff --git a/tests/api/user.test.ts b/devenv/e2e-api-tests/user.test.ts
similarity index 100%
rename from tests/api/user.test.ts
rename to devenv/e2e-api-tests/user.test.ts
diff --git a/devenv/setup.sh b/devenv/setup.sh
index cc71ecc71bf..c9cc0d47a6f 100755
--- a/devenv/setup.sh
+++ b/devenv/setup.sh
@@ -11,7 +11,21 @@ bulkDashboard() {
let COUNTER=COUNTER+1
done
- ln -s -f -r ./bulk-dashboards/bulk-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml
+ ln -s -f ../../../devenv/bulk-dashboards/bulk-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml
+}
+
+bulkAlertingDashboard() {
+
+ requiresJsonnet
+
+ COUNTER=0
+ MAX=100
+ while [ $COUNTER -lt $MAX ]; do
+ jsonnet -o "bulk_alerting_dashboards/alerting_dashboard${COUNTER}.json" -e "local bulkDash = import 'bulk_alerting_dashboards/bulkdash_alerting.jsonnet'; bulkDash + { uid: 'bd-${COUNTER}', title: 'alerting-title-${COUNTER}' }"
+ let COUNTER=COUNTER+1
+ done
+
+ ln -s -f ../../../devenv/bulk_alerting_dashboards/bulk_alerting_dashboards.yaml ../conf/provisioning/dashboards/custom.yaml
}
requiresJsonnet() {
@@ -36,8 +50,9 @@ devDatasources() {
usage() {
echo -e "\n"
echo "Usage:"
- echo " bulk-dashboards - create and provisioning 400 dashboards"
- echo " no args - provisiong core datasources and dev dashboards"
+ echo " bulk-dashboards - create and provisioning 400 dashboards"
+ echo " bulk-alerting-dashboards - create and provisioning 400 dashboards with alerts"
+ echo " no args - provisiong core datasources and dev dashboards"
}
main() {
@@ -48,7 +63,9 @@ main() {
local cmd=$1
- if [[ $cmd == "bulk-dashboards" ]]; then
+ if [[ $cmd == "bulk-alerting-dashboards" ]]; then
+ bulkAlertingDashboard
+ elif [[ $cmd == "bulk-dashboards" ]]; then
bulkDashboard
else
devDashboards
diff --git a/docs/README.md b/docs/README.md
index ff5ef6a4131..7310f184a60 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -65,7 +65,7 @@ make docs-build
This will rebuild the docs docker container.
-To be able to use the image your have to quit (CTRL-C) the `make watch` command (that you run in the same directory as this README). Then simply rerun `make watch`, it will restart the docs server but now with access to your image.
+To be able to use the image you have to quit (CTRL-C) the `make watch` command (that you run in the same directory as this README). Then simply rerun `make watch`, it will restart the docs server but now with access to your image.
### Editing content
diff --git a/docs/sources/administration/permissions.md b/docs/sources/administration/permissions.md
index 1d1a70607c8..0d374f03647 100644
--- a/docs/sources/administration/permissions.md
+++ b/docs/sources/administration/permissions.md
@@ -55,7 +55,7 @@ This admin flag makes a user a `Super Admin`. This means they can access the `Se
{{< docs-imagebox img="/img/docs/v50/folder_permissions.png" max-width="500px" class="docs-image--right" >}}
For dashboards and dashboard folders there is a **Permissions** page that make it possible to
-remove the default role based permssions for Editors and Viewers. It's here you can add and assign permissions to specific **Users** and **Teams**.
+remove the default role based permissions for Editors and Viewers. It's here you can add and assign permissions to specific **Users** and **Teams**.
You can assign & remove permissions for **Organization Roles**, **Users** and **Teams**.
@@ -102,7 +102,7 @@ Permissions for a dashboard:
Result: You cannot override to a lower permission. `user1` has Admin permission as the highest permission always wins.
-- **View**: Can only view existing dashboars/folders.
+- **View**: Can only view existing dashboards/folders.
- You cannot override permissions for users with **Org Admin Role**
- A more specific permission with lower permission level will not have any effect if a more general rule exists with higher permission level. For example if "Everyone with Editor Role Can Edit" exists in the ACL list then **John Doe** will still have Edit permission even after you have specifically added a permission for this user with the permission set to **View**. You need to remove or lower the permission level of the more general rule.
diff --git a/docs/sources/administration/provisioning.md b/docs/sources/administration/provisioning.md
index c8d83ea1c54..e4c78d692b3 100644
--- a/docs/sources/administration/provisioning.md
+++ b/docs/sources/administration/provisioning.md
@@ -71,6 +71,7 @@ Puppet | [https://forge.puppet.com/puppet/grafana](https://forge.puppet.com/pupp
Ansible | [https://github.com/cloudalchemy/ansible-grafana](https://github.com/cloudalchemy/ansible-grafana)
Chef | [https://github.com/JonathanTron/chef-grafana](https://github.com/JonathanTron/chef-grafana)
Saltstack | [https://github.com/salt-formulas/salt-formula-grafana](https://github.com/salt-formulas/salt-formula-grafana)
+Jsonnet | [https://github.com/grafana/grafonnet-lib/](https://github.com/grafana/grafonnet-lib/)
## Datasources
@@ -122,7 +123,7 @@ datasources:
withCredentials:
# mark as default datasource. Max one per org
isDefault:
- # fields that will be converted to json and stored in json_data
+ # fields that will be converted to json and stored in jsonData
jsonData:
graphiteVersion: "1.1"
tlsAuth: true
@@ -146,7 +147,7 @@ Please refer to each datasource documentation for specific provisioning examples
#### Json Data
-Since not all datasources have the same configuration settings we only have the most common ones as fields. The rest should be stored as a json blob in the `json_data` field. Here are the most common settings that the core datasources use.
+Since not all datasources have the same configuration settings we only have the most common ones as fields. The rest should be stored as a json blob in the `jsonData` field. Here are the most common settings that the core datasources use.
| Name | Type | Datasource | Description |
| ---- | ---- | ---- | ---- |
@@ -155,9 +156,9 @@ Since not all datasources have the same configuration settings we only have the
| tlsSkipVerify | boolean | *All* | Controls whether a client verifies the server's certificate chain and host name. |
| graphiteVersion | string | Graphite | Graphite version |
| timeInterval | string | Prometheus, Elasticsearch, InfluxDB, MySQL, PostgreSQL & MSSQL | Lowest interval/step value that should be used for this data source |
-| esVersion | number | Elastic | Elasticsearch version as a number (2/5/56) |
-| timeField | string | Elastic | Which field that should be used as timestamp |
-| interval | string | Elastic | Index date time format |
+| esVersion | number | Elasticsearch | Elasticsearch version as a number (2/5/56) |
+| timeField | string | Elasticsearch | Which field that should be used as timestamp |
+| interval | string | Elasticsearch | Index date time format |
| authType | string | Cloudwatch | Auth provider. keys/credentials/arn |
| assumeRoleArn | string | Cloudwatch | ARN of Assume Role |
| defaultRegion | string | Cloudwatch | AWS region |
@@ -167,6 +168,9 @@ Since not all datasources have the same configuration settings we only have the
| sslmode | string | PostgreSQL | SSLmode. 'disable', 'require', 'verify-ca' or 'verify-full' |
| postgresVersion | number | PostgreSQL | Postgres version as a number (903/904/905/906/1000) meaning v9.3, v9.4, ..., v10 |
| timescaledb | boolean | PostgreSQL | Enable usage of TimescaleDB extension |
+| maxOpenConns | number | MySQL, PostgreSQL & MSSQL | Maximum number of open connections to the database (Grafana v5.4+) |
+| maxIdleConns | number | MySQL, PostgreSQL & MSSQL | Maximum number of connections in the idle connection pool (Grafana v5.4+) |
+| connMaxLifetime | number | MySQL, PostgreSQL & MSSQL | Maximum amount of time in seconds a connection may be reused (Grafana v5.4+) |
#### Secure Json Data
@@ -199,7 +203,7 @@ providers:
folder: ''
type: file
disableDeletion: false
- updateIntervalSeconds: 3 #how often Grafana will scan for changed dashboards
+ updateIntervalSeconds: 10 #how often Grafana will scan for changed dashboards
options:
path: /var/lib/grafana/dashboards
```
@@ -216,7 +220,7 @@ Note: The JSON shown in input field and when using `Copy JSON to Clipboard` and/
{{< docs-imagebox img="/img/docs/v51/provisioning_cannot_save_dashboard.png" max-width="500px" class="docs-image--no-shadow" >}}
-### Reuseable Dashboard Urls
+### Reusable Dashboard Urls
If the dashboard in the json file contains an [uid](/reference/dashboard/#json-fields), Grafana will force insert/update on that uid. This allows you to migrate dashboards betweens Grafana instances and provisioning Grafana from configuration without breaking the urls given since the new dashboard url uses the uid as identifier.
When Grafana starts, it will update/insert all dashboards available in the configured folders. If you modify the file, the dashboard will also be updated.
diff --git a/docs/sources/auth/generic-oauth.md b/docs/sources/auth/generic-oauth.md
index bec5a98e04a..6fa6531fc98 100644
--- a/docs/sources/auth/generic-oauth.md
+++ b/docs/sources/auth/generic-oauth.md
@@ -17,6 +17,9 @@ can find examples using Okta, BitBucket, OneLogin and Azure.
This callback URL must match the full HTTP address that you use in your browser to access Grafana, but with the prefix path of `/login/generic_oauth`.
+You may have to set the `root_url` option of `[server]` for the callback URL to be
+correct. For example in case you are serving Grafana behind a proxy.
+
Example config:
```bash
@@ -32,7 +35,14 @@ allowed_domains = mycompany.com mycompany.org
allow_sign_up = true
```
-Set api_url to the resource that returns [OpenID UserInfo](https://connect2id.com/products/server/docs/api/userinfo) compatible information.
+Set `api_url` to the resource that returns [OpenID UserInfo](https://connect2id.com/products/server/docs/api/userinfo) compatible information.
+
+Grafana will attempt to determine the user's e-mail address by querying the OAuth provider as described below in the following order until an e-mail address is found:
+
+1. Check for the presence of an e-mail address via the `email` field encoded in the OAuth `id_token` parameter.
+2. Check for the presence of an e-mail address in the `attributes` map encoded in the OAuth `id_token` parameter. By default Grafana will perform a lookup into the attributes map using the `email:primary` key, however, this is configurable and can be adjusted by using the `email_attribute_name` configuration option.
+3. Query the `/emails` endpoint of the OAuth provider's API (configured with `api_url`) and check for the presence of an e-mail address marked as a primary address.
+4. If no e-mail address is found in steps (1-3), then the e-mail address of the user is set to the empty string.
## Set up OAuth2 with Okta
@@ -167,6 +177,38 @@ allowed_organizations =
allowed_organizations =
```
+> Note: It's important to ensure that the [root_url](/installation/configuration/#root-url) in Grafana is set in your Azure Application Reply URLs (App -> Settings -> Reply URLs)
+
+## Set up OAuth2 with Centrify
+
+1. Create a new Custom OpenID Connect application configuration in the Centrify dashboard.
+
+2. Create a memorable unique Application ID, e.g. "grafana", "grafana_aws", etc.
+
+3. Put in other basic configuration (name, description, logo, category)
+
+4. On the Trust tab, generate a long password and put it into the OpenID Connect Client Secret field.
+
+5. Put the URL to the front page of your Grafana instance into the "Resource Application URL" field.
+
+6. Add an authorized Redirect URI like https://your-grafana-server/login/generic_oauth
+
+7. Set up permissions, policies, etc. just like any other Centrify app
+
+8. Configure Grafana as follows:
+
+ ```bash
+ [auth.generic_oauth]
+ name = Centrify
+ enabled = true
+ allow_sign_up = true
+ client_id =
+ client_secret = .my.centrify.com/OAuth2/Authorize/
+ token_url = https://.my.centrify.com/OAuth2/Token/
+ ```
+
diff --git a/docs/sources/auth/github.md b/docs/sources/auth/github.md
index 263b3cc5d4d..b4ffc0fc2d4 100644
--- a/docs/sources/auth/github.md
+++ b/docs/sources/auth/github.md
@@ -46,6 +46,9 @@ team_ids =
allowed_organizations =
```
+You may have to set the `root_url` option of `[server]` for the callback URL to be
+correct. For example in case you are serving Grafana behind a proxy.
+
Restart the Grafana back-end. You should now see a GitHub login button
on the login page. You can now login or sign up with your GitHub
accounts.
diff --git a/docs/sources/auth/gitlab.md b/docs/sources/auth/gitlab.md
index 32910167f16..e3a450f9fc7 100644
--- a/docs/sources/auth/gitlab.md
+++ b/docs/sources/auth/gitlab.md
@@ -58,6 +58,9 @@ api_url = https://gitlab.com/api/v4
allowed_groups =
```
+You may have to set the `root_url` option of `[server]` for the callback URL to be
+correct. For example in case you are serving Grafana behind a proxy.
+
Restart the Grafana backend for your changes to take effect.
If you use your own instance of GitLab instead of `gitlab.com`, adjust
diff --git a/docs/sources/auth/google.md b/docs/sources/auth/google.md
index eeb78044d3e..f7faf1a1097 100644
--- a/docs/sources/auth/google.md
+++ b/docs/sources/auth/google.md
@@ -45,6 +45,9 @@ allowed_domains = mycompany.com mycompany.org
allow_sign_up = true
```
+You may have to set the `root_url` option of `[server]` for the callback URL to be
+correct. For example in case you are serving Grafana behind a proxy.
+
Restart the Grafana back-end. You should now see a Google login button
on the login page. You can now login or sign up with your Google
accounts. The `allowed_domains` option is optional, and domains were separated by space.
diff --git a/docs/sources/auth/ldap.md b/docs/sources/auth/ldap.md
index f63a44e1750..4a884a60d15 100644
--- a/docs/sources/auth/ldap.md
+++ b/docs/sources/auth/ldap.md
@@ -1,7 +1,7 @@
+++
title = "LDAP Authentication"
description = "Grafana LDAP Authentication Guide "
-keywords = ["grafana", "configuration", "documentation", "ldap"]
+keywords = ["grafana", "configuration", "documentation", "ldap", "active directory"]
type = "docs"
[menu.docs]
name = "LDAP"
@@ -10,35 +10,42 @@ parent = "authentication"
weight = 2
+++
-# LDAP
+# LDAP Authentication
The LDAP integration in Grafana allows your Grafana users to login with their LDAP credentials. You can also specify mappings between LDAP
-group memberships and Grafana Organization user roles. Below we detail grafana.ini config file
-settings and ldap.toml config file options.
+group memberships and Grafana Organization user roles.
+
+## Supported LDAP Servers
+
+Grafana uses a [third-party LDAP library](https://github.com/go-ldap/ldap) under the hood that supports basic LDAP v3 functionality.
+This means that you should be able to configure LDAP integration using any compliant LDAPv3 server, for example [OpenLDAP](#openldap) or
+[Active Directory](#active-directory) among [others](https://en.wikipedia.org/wiki/Directory_service#LDAP_implementations).
## Enable LDAP
-You turn on LDAP in the [main config file]({{< relref "installation/configuration.md" >}}) as well as specify the path to the LDAP
+In order to use LDAP integration you'll first need to enable LDAP in the [main config file]({{< relref "installation/configuration.md" >}}) as well as specify the path to the LDAP
specific configuration file (default: `/etc/grafana/ldap.toml`).
```bash
[auth.ldap]
# Set to `true` to enable LDAP integration (default: `false`)
enabled = true
+
# Path to the LDAP specific configuration file (default: `/etc/grafana/ldap.toml`)
-config_file = /etc/grafana/ldap.toml`
+config_file = /etc/grafana/ldap.toml
+
# Allow sign up should almost always be true (default) to allow new Grafana users to be created (if ldap authentication is ok). If set to
# false only pre-existing Grafana users will be able to login (if ldap authentication is ok).
allow_sign_up = true
```
-## LDAP Configuration
+## Grafana LDAP Configuration
+Depending on which LDAP server you're using and how that's configured your Grafana LDAP configuration may vary.
+See [configuration examples](#configuration-examples) for more information.
+
+**LDAP specific configuration file (ldap.toml) example:**
```bash
-# To troubleshoot and get more log info enable ldap debug logging in grafana.ini
-# [log]
-# filters = ldap:debug
-
[[servers]]
# Ldap server host (specify multiple hosts space separated)
host = "127.0.0.1"
@@ -69,13 +76,8 @@ search_filter = "(cn=%s)"
# An array of base dns to search through
search_base_dns = ["dc=grafana,dc=org"]
-# In POSIX LDAP schemas, without memberOf attribute a secondary query must be made for groups.
-# This is done by enabling group_search_filter below. You must also set member_of= "cn"
-# in [servers.attributes] below.
-
-## Group search filter, to retrieve the groups of which the user is a member (only set if memberOf attribute is not available)
# group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))"
-## An array of the base DNs to search through for groups. Typically uses ou=groups
+# group_search_filter_user_attribute = "distinguishedName"
# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
# Specify names of the ldap attributes your ldap uses
@@ -85,28 +87,11 @@ surname = "sn"
username = "cn"
member_of = "memberOf"
email = "email"
-
-# Map ldap groups to grafana org roles
-[[servers.group_mappings]]
-group_dn = "cn=admins,dc=grafana,dc=org"
-org_role = "Admin"
-# To make user an instance admin (Grafana Admin) uncomment line below
-# grafana_admin = true
-# The Grafana organization database id, optional, if left out the default org (id 1) will be used. Setting this allows for multiple group_dn's to be assigned to the same org_role provided the org_id differs
-# org_id = 1
-
-[[servers.group_mappings]]
-group_dn = "cn=users,dc=grafana,dc=org"
-org_role = "Editor"
-
-[[servers.group_mappings]]
-# If you want to match all (or no ldap groups) then you can use wildcard
-group_dn = "*"
-org_role = "Viewer"
-
```
-## Bind & Bind Password
+### Bind
+
+#### Bind & Bind Password
By default the configuration expects you to specify a bind DN and bind password. This should be a read only user that can perform LDAP searches.
When the user DN is found a second bind is performed with the user provided username & password (in the normal Grafana login form).
@@ -116,7 +101,7 @@ bind_dn = "cn=admin,dc=grafana,dc=org"
bind_password = "grafana"
```
-### Single Bind Example
+#### Single Bind Example
If you can provide a single bind expression that matches all possible users, you can skip the second bind and bind against the user DN directly.
This allows you to not specify a bind_password in the configuration file.
@@ -128,7 +113,7 @@ bind_dn = "cn=%s,o=users,dc=grafana,dc=org"
In this case you skip providing a `bind_password` and instead provide a `bind_dn` value with a `%s` somewhere. This will be replaced with the username entered in on the Grafana login page.
The search filter and search bases settings are still needed to perform the LDAP search to retrieve the other LDAP information (like LDAP groups and email).
-## POSIX schema (no memberOf attribute)
+### POSIX schema
If your ldap server does not support the memberOf attribute add these options:
```bash
@@ -136,27 +121,141 @@ If your ldap server does not support the memberOf attribute add these options:
group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))"
## An array of the base DNs to search through for groups. Typically uses ou=groups
group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
+## the %s in the search filter will be replaced with the attribute defined below
+group_search_filter_user_attribute = "uid"
```
-Also change set `member_of = "cn"` in the `[servers.attributes]` section.
+Also set `member_of = "dn"` in the `[servers.attributes]` section.
+### Group Mappings
-## LDAP to Grafana Org Role Sync
+In `[[servers.group_mappings]]` you can map an LDAP group to a Grafana organization and role. These will be synced every time the user logs in, with LDAP being
+the authoritative source. So, if you change a user's role in the Grafana Org. Users page, this change will be reset the next time the user logs in. If you
+change the LDAP groups of a user, the change will take effect the next time the user logs in.
-### Mappings
-In `[[servers.group_mappings]]` you can map an LDAP group to a Grafana organization
-and role. These will be synced every time the user logs in, with LDAP being
-the authoritative source. So, if you change a user's role in the Grafana Org.
-Users page, this change will be reset the next time the user logs in. If you
-change the LDAP groups of a user, the change will take effect the next
-time the user logs in.
+The first group mapping that an LDAP user is matched to will be used for the sync. If you have LDAP users that fit multiple mappings, the topmost mapping in the
+TOML config will be used.
-### Grafana Admin
-with a servers.group_mappings section you can set grafana_admin = true or false to sync Grafana Admin permission. A Grafana server admin has admin access over all orgs &
-users.
+**LDAP specific configuration file (ldap.toml) example:**
+```bash
+[[servers]]
+# other settings omitted for clarity
-### Priority
-The first group mapping that an LDAP user is matched to will be used for the sync. If you have LDAP users that fit multiple mappings, the topmost mapping in the TOML config will be used.
+[[servers.group_mappings]]
+group_dn = "cn=superadmins,dc=grafana,dc=org"
+org_role = "Admin"
+grafana_admin = true # Available in Grafana v5.3 and above
+[[servers.group_mappings]]
+group_dn = "cn=admins,dc=grafana,dc=org"
+org_role = "Admin"
+[[servers.group_mappings]]
+group_dn = "cn=users,dc=grafana,dc=org"
+org_role = "Editor"
+[[servers.group_mappings]]
+group_dn = "*"
+org_role = "Viewer"
+```
+
+Setting | Required | Description | Default
+------------ | ------------ | ------------- | -------------
+`group_dn` | Yes | LDAP distinguished name (DN) of LDAP group. If you want to match all (or no LDAP groups) then you can use wildcard (`"*"`) |
+`org_role` | Yes | Assign users of `group_dn` the organisation role `"Admin"`, `"Editor"` or `"Viewer"` |
+`org_id` | No | The Grafana organization database id. Setting this allows for multiple group_dn's to be assigned to the same `org_role` provided the `org_id` differs | `1` (default org id)
+`grafana_admin` | No | When `true` makes user of `group_dn` Grafana server admin. A Grafana server admin has admin access over all organisations and users. Available in Grafana v5.3 and above | `false`
+
+### Nested/recursive group membership
+
+Users with nested/recursive group membership must have an LDAP server that supports `LDAP_MATCHING_RULE_IN_CHAIN`
+and configure `group_search_filter` in a way that it returns the groups the submitted username is a member of.
+
+**Active Directory example:**
+
+Active Directory groups store the Distinguished Names (DNs) of members, so your filter will need to know the DN for the user based only on the submitted username.
+Multiple DN templates can be searched by combining filters with the LDAP OR-operator. Examples:
+
+```bash
+group_search_filter = "(member:1.2.840.113556.1.4.1941:=CN=%s,[user container/OU])"
+group_search_filter = "(|(member:1.2.840.113556.1.4.1941:=CN=%s,[user container/OU])(member:1.2.840.113556.1.4.1941:=CN=%s,[another user container/OU]))"
+group_search_filter_user_attribute = "cn"
+```
+For more information on AD searches see [Microsoft's Search Filter Syntax](https://docs.microsoft.com/en-us/windows/desktop/adsi/search-filter-syntax) documentation.
+
+For troubleshooting, by changing `member_of` in `[servers.attributes]` to "dn" it will show you more accurate group memberships when [debug is enabled](#troubleshooting).
+
+## Configuration examples
+
+### OpenLDAP
+
+[OpenLDAP](http://www.openldap.org/) is an open source directory service.
+
+**LDAP specific configuration file (ldap.toml):**
+```bash
+[[servers]]
+host = "127.0.0.1"
+port = 389
+use_ssl = false
+start_tls = false
+ssl_skip_verify = false
+bind_dn = "cn=admin,dc=grafana,dc=org"
+bind_password = 'grafana'
+search_filter = "(cn=%s)"
+search_base_dns = ["dc=grafana,dc=org"]
+
+[servers.attributes]
+name = "givenName"
+surname = "sn"
+username = "cn"
+member_of = "memberOf"
+email = "email"
+
+# [[servers.group_mappings]] omitted for clarity
+```
+
+### Active Directory
+
+[Active Directory](https://technet.microsoft.com/en-us/library/hh831484(v=ws.11).aspx) is a directory service which is commonly used in Windows environments.
+
+Assuming the following Active Directory server setup:
+
+* IP address: `10.0.0.1`
+* Domain: `CORP`
+* DNS name: `corp.local`
+
+**LDAP specific configuration file (ldap.toml):**
+```bash
+[[servers]]
+host = "10.0.0.1"
+port = 3269
+use_ssl = true
+start_tls = false
+ssl_skip_verify = true
+bind_dn = "CORP\\%s"
+search_filter = "(sAMAccountName=%s)"
+search_base_dns = ["dc=corp,dc=local"]
+
+[servers.attributes]
+name = "givenName"
+surname = "sn"
+username = "sAMAccountName"
+member_of = "memberOf"
+email = "mail"
+
+# [[servers.group_mappings]] omitted for clarity
+```
+
+#### Port requirements
+
+In above example SSL is enabled and an encrypted port have been configured. If your Active Directory don't support SSL please change `enable_ssl = false` and `port = 389`.
+Please inspect your Active Directory configuration and documentation to find the correct settings. For more information about Active Directory and port requirements see [link](https://technet.microsoft.com/en-us/library/dd772723(v=ws.10)).
+
+## Troubleshooting
+
+To troubleshoot and get more log info enable ldap debug logging in the [main config file]({{< relref "installation/configuration.md" >}}).
+
+```bash
+[log]
+filters = ldap:debug
+```
diff --git a/docs/sources/auth/overview.md b/docs/sources/auth/overview.md
index 3a38ed83988..a372600ac46 100644
--- a/docs/sources/auth/overview.md
+++ b/docs/sources/auth/overview.md
@@ -32,11 +32,11 @@ permissions and org memberships.
## Grafana Auth
-Grafana of course has a built in user authentication system with password authenticaten enabled by default. You can
+Grafana of course has a built in user authentication system with password authentication enabled by default. You can
disable authentication by enabling anonymous access. You can also hide login form and only allow login through an auth
provider (listed above). There is also options for allowing self sign up.
-### Anonymous authenticaten
+### Anonymous authentication
You can make Grafana accessible without any login required by enabling anonymous access in the configuration file.
@@ -58,7 +58,7 @@ If you change your organization name in the Grafana UI this setting needs to be
### Basic authentication
Basic auth is enabled by default and works with the built in Grafana user password authentication system and LDAP
-authenticaten integration.
+authentication integration.
To disable basic auth:
@@ -84,4 +84,3 @@ Set to the option detailed below to true to hide sign-out menu link. Useful if y
[auth]
disable_signout_menu = true
```
-
diff --git a/docs/sources/contribute/cla.md b/docs/sources/contribute/cla.md
index ffb2aaef1b9..a073a9a4eae 100644
--- a/docs/sources/contribute/cla.md
+++ b/docs/sources/contribute/cla.md
@@ -101,4 +101,4 @@ TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, IN NO EVENT WILL YOU [OR US]
-This CLA agreement is based on the [Harmony Contributor Aggrement Template (combined)](http://www.harmonyagreements.org/agreements.html), [Creative Commons Attribution 3.0 Unported License](https://creativecommons.org/licenses/by/3.0/)
+This CLA agreement is based on the [Harmony Contributor Agreement Template (combined)](http://www.harmonyagreements.org/agreements.html), [Creative Commons Attribution 3.0 Unported License](https://creativecommons.org/licenses/by/3.0/)
diff --git a/docs/sources/features/datasources/mssql.md b/docs/sources/features/datasources/mssql.md
index 6bfcfd807f1..4a3478b161a 100644
--- a/docs/sources/features/datasources/mssql.md
+++ b/docs/sources/features/datasources/mssql.md
@@ -32,6 +32,9 @@ Name | Description
*Database* | Name of your MSSQL database.
*User* | Database user's login/username
*Password* | Database user's password
+*Max open* | The maximum number of open connections to the database, default `unlimited` (Grafana v5.4+).
+*Max idle* | The maximum number of connections in the idle connection pool, default `2` (Grafana v5.4+).
+*Max lifetime* | The maximum amount of time in seconds a connection may be reused, default `14400`/4 hours (Grafana v5.4+).
### Min time interval
@@ -174,6 +177,8 @@ The resulting table panel:
If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must must have a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch in seconds. You may return a column named `metric` that is used as metric name for the value column. Any column except `time` and `metric` is treated as a value column. If you omit the `metric` column, the name of the value column will be the metric name. You may select multiple value columns, each will have its name as metric.
If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name (only available in Grafana 5.3+).
+Resultsets of time series queries need to be sorted by time.
+
**Example database table:**
```sql
@@ -223,7 +228,7 @@ When above query are used in a graph panel the result will be two series named `
{{< docs-imagebox img="/img/docs/v51/mssql_time_series_two.png" class="docs-image--no-shadow docs-image--right" >}}
-**Example with multiple `value` culumns:**
+**Example with multiple `value` columns:**
```sql
SELECT
@@ -583,6 +588,10 @@ datasources:
url: localhost:1433
database: grafana
user: grafana
+ jsonData:
+ maxOpenConns: 0 # Grafana v5.4+
+ maxIdleConns: 2 # Grafana v5.4+
+ connMaxLifetime: 14400 # Grafana v5.4+
secureJsonData:
password: "Password!"
diff --git a/docs/sources/features/datasources/mysql.md b/docs/sources/features/datasources/mysql.md
index e13abcf80a2..988f632bff3 100644
--- a/docs/sources/features/datasources/mysql.md
+++ b/docs/sources/features/datasources/mysql.md
@@ -35,6 +35,9 @@ Name | Description
*Database* | Name of your MySQL database.
*User* | Database user's login/username
*Password* | Database user's password
+*Max open* | The maximum number of open connections to the database, default `unlimited` (Grafana v5.4+).
+*Max idle* | The maximum number of connections in the idle connection pool, default `2` (Grafana v5.4+).
+*Max lifetime* | The maximum amount of time in seconds a connection may be reused, default `14400`/4 hours. This should always be lower than configured [wait_timeout](https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_wait_timeout) in MySQL (Grafana v5.4+).
### Min time interval
@@ -59,7 +62,7 @@ Identifier | Description
The database user you specify when you add the data source should only be granted SELECT permissions on
the specified database & tables you want to query. Grafana does not validate that the query is safe. The query
could include any SQL statement. For example, statements like `USE otherdb;` and `DROP TABLE user;` would be
-executed. To protect against this we **Highly** recommmend you create a specific mysql user with restricted permissions.
+executed. To protect against this we **Highly** recommend you create a specific mysql user with restricted permissions.
Example:
@@ -129,6 +132,8 @@ Any column except `time` and `metric` is treated as a value column.
You may return a column named `metric` that is used as metric name for the value column.
If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name (only available in Grafana 5.3+).
+Resultsets of time series queries need to be sorted by time.
+
**Example with `metric` column:**
```sql
@@ -314,4 +319,8 @@ datasources:
database: grafana
user: grafana
password: password
+ jsonData:
+ maxOpenConns: 0 # Grafana v5.4+
+ maxIdleConns: 2 # Grafana v5.4+
+ connMaxLifetime: 14400 # Grafana v5.4+
```
diff --git a/docs/sources/features/datasources/opentsdb.md b/docs/sources/features/datasources/opentsdb.md
index 1f6f022a18c..d2cd0b1dc0e 100644
--- a/docs/sources/features/datasources/opentsdb.md
+++ b/docs/sources/features/datasources/opentsdb.md
@@ -84,7 +84,7 @@ Some examples are mentioned below to make nested template queries work successfu
Query | Description
------------ | -------------
*tag_values(cpu, hostname, env=$env)* | Return tag values for cpu metric, selected env tag value and tag key hostname
-*tag_values(cpu, hostanme, env=$env, region=$region)* | Return tag values for cpu metric, selected env tag value, selected region tag value and tag key hostname
+*tag_values(cpu, hostname, env=$env, region=$region)* | Return tag values for cpu metric, selected env tag value, selected region tag value and tag key hostname
For details on OpenTSDB metric queries checkout the official [OpenTSDB documentation](http://opentsdb.net/docs/build/html/index.html)
diff --git a/docs/sources/features/datasources/postgres.md b/docs/sources/features/datasources/postgres.md
index 013d6342634..52f8804f27f 100644
--- a/docs/sources/features/datasources/postgres.md
+++ b/docs/sources/features/datasources/postgres.md
@@ -16,7 +16,7 @@ Grafana ships with a built-in PostgreSQL data source plugin that allows you to q
## Adding the data source
1. Open the side menu by clicking the Grafana icon in the top header.
-2. In the side menu under the `Dashboards` link you should find a link named `Data Sources`.
+2. In the side menu under the `Configuration` icon you should find a link named `Data Sources`.
3. Click the `+ Add data source` button in the top header.
4. Select *PostgreSQL* from the *Type* dropdown.
@@ -31,6 +31,9 @@ Name | Description
*User* | Database user's login/username
*Password* | Database user's password
*SSL Mode* | This option determines whether or with what priority a secure SSL TCP/IP connection will be negotiated with the server.
+*Max open* | The maximum number of open connections to the database, default `unlimited` (Grafana v5.4+).
+*Max idle* | The maximum number of connections in the idle connection pool, default `2` (Grafana v5.4+).
+*Max lifetime* | The maximum amount of time in seconds a connection may be reused, default `14400`/4 hours (Grafana v5.4+).
*Version* | This option determines which functions are available in the query builder (only available in Grafana 5.3+).
*TimescaleDB* | TimescaleDB is a time-series database built as a PostgreSQL extension. If enabled, Grafana will use `time_bucket` in the `$__timeGroup` macro and display TimescaleDB specific aggregate functions in the query builder (only available in Grafana 5.3+).
@@ -57,7 +60,7 @@ Identifier | Description
The database user you specify when you add the data source should only be granted SELECT permissions on
the specified database & tables you want to query. Grafana does not validate that the query is safe. The query
could include any SQL statement. For example, statements like `DELETE FROM user;` and `DROP TABLE user;` would be
-executed. To protect against this we **Highly** recommmend you create a specific postgresql user with restricted permissions.
+executed. To protect against this we **highly** recommend you create a specific PostgreSQL user with restricted permissions.
Example:
@@ -69,9 +72,72 @@ Example:
Make sure the user does not get any unwanted privileges from the public role.
+## Query Editor
+
+> Only available in Grafana v5.3+.
+
+{{< docs-imagebox img="/img/docs/v53/postgres_query_still.png" class="docs-image--no-shadow" animated-gif="/img/docs/v53/postgres_query.gif" >}}
+
+You find the PostgreSQL query editor in the metrics tab in Graph or Singlestat panel's edit mode. You enter edit mode by clicking the
+panel title, then edit.
+
+The query editor has a link named `Generated SQL` that shows up after a query has been executed, while in panel edit mode. Click on it and it will expand and show the raw interpolated SQL string that was executed.
+
+### Select table, time column and metric column (FROM)
+
+When you enter edit mode for the first time or add a new query Grafana will try to prefill the query builder with the first table that has a timestamp column and a numeric column.
+
+In the FROM field, Grafana will suggest tables that are in the `search_path` of the database user. To select a table or view not in your `search_path`
+you can manually enter a fully qualified name (schema.table) like `public.metrics`.
+
+The Time column field refers to the name of the column holding your time values. Selecting a value for the Metric column field is optional. If a value is selected, the Metric column field will be used as the series name.
+
+The metric column suggestions will only contain columns with a text datatype (char,varchar,text).
+If you want to use a column with a different datatype as metric column you may enter the column name with a cast: `ip::text`.
+You may also enter arbitrary SQL expressions in the metric column field that evaluate to a text datatype like
+`hostname || ' ' || container_name`.
+
+### Columns, Window and Aggregation functions (SELECT)
+
+In the `SELECT` row you can specify what columns and functions you want to use.
+In the column field you may write arbitrary expressions instead of a column name like `column1 * column2 / column3`.
+
+The available functions in the query editor depend on the PostgreSQL version you selected when configuring the datasource.
+If you use aggregate functions you need to group your resultset. The editor will automatically add a `GROUP BY time` if you add an aggregate function.
+
+The editor tries to simplify and unify this part of the query. For example:
+
+
+The above will generate the following PostgreSQL `SELECT` clause:
+
+```sql
+avg(tx_bytes) OVER (ORDER BY "time" ROWS 5 PRECEDING) AS "tx_bytes"
+```
+
+You may add further value columns by clicking the plus button and selecting `Column` from the menu. Multiple value columns will be plotted as separate series in the graph panel.
+
+### Filter data (WHERE)
+To add a filter click the plus icon to the right of the `WHERE` condition. You can remove filters by clicking on
+the filter and selecting `Remove`. A filter for the current selected timerange is automatically added to new queries.
+
+### Group By
+To group by time or any other columns click the plus icon at the end of the GROUP BY row. The suggestion dropdown will only show text columns of your currently selected table but you may manually enter any column.
+You can remove the group by clicking on the item and then selecting `Remove`.
+
+If you add any grouping, all selected columns need to have an aggregate function applied. The query builder will automatically add aggregate functions to all columns without aggregate functions when you add groupings.
+
+#### Gap Filling
+
+Grafana can fill in missing values when you group by time. The time function accepts two arguments. The first argument is the time window that you would like to group by, and the second argument is the value you want Grafana to fill missing items with.
+
+### Text Editor Mode (RAW)
+You can switch to the raw query editor mode by clicking the hamburger icon and selecting `Switch editor mode` or by clicking `Edit SQL` below the query.
+
+> If you use the raw query editor, be sure your query at minimum has `ORDER BY time` and a filter on the returned time range.
+
## Macros
-To simplify syntax and to allow for dynamic parts, like date range filters, the query can contain macros.
+Macros can be used within a query to simplify syntax and allow for dynamic parts.
Macro example | Description
------------ | -------------
@@ -80,21 +146,19 @@ Macro example | Description
*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:06:17Z'*
*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *'2017-04-21T05:01:17Z'*
*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *'2017-04-21T05:06:17Z'*
-*$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in GROUP BY clause. For example, *(extract(epoch from dateColumn)/300)::bigint*300*
-*$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so missing points in that series will be added by grafana and 0 will be used as value.
+*$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in a GROUP BY clause. For example, *(extract(epoch from dateColumn)/300)::bigint*300*
+*$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so missing points in that series will be added by Grafana and 0 will be used as the value.
*$__timeGroup(dateColumn,'5m', NULL)* | Same as above but NULL will be used as value for missing points.
-*$__timeGroup(dateColumn,'5m', previous)* | Same as above but the previous value in that series will be used as fill value if no value has been seen yet NULL will be used (only available in Grafana 5.3+).
-*$__timeGroupAlias(dateColumn,'5m')* | Will be replaced identical to $__timeGroup but with an added column alias (only available in Grafana 5.3+).
-*$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn >= 1494410783 AND dateColumn <= 1494497183*
+*$__timeGroup(dateColumn,'5m', previous)* | Same as above but the previous value in that series will be used as fill value. If no value has been seen yet, NULL will be used (only available in Grafana 5.3+).
+*$__timeGroupAlias(dateColumn,'5m')* | Will be replaced with an expression identical to $__timeGroup, but with an added column alias (only available in Grafana 5.3+).
+*$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamps. For example, *dateColumn >= 1494410783 AND dateColumn <= 1494497183*
*$__unixEpochFrom()* | Will be replaced by the start of the currently active time selection as unix timestamp. For example, *1494410783*
*$__unixEpochTo()* | Will be replaced by the end of the currently active time selection as unix timestamp. For example, *1494497183*
-*$__unixEpochGroup(dateColumn,'5m', [fillmode])* | Same as $__timeGroup but for times stored as unix timestamp (only available in Grafana 5.3+).
-*$__unixEpochGroupAlias(dateColumn,'5m', [fillmode])* | Same as above but also adds a column alias (only available in Grafana 5.3+).
+*$__unixEpochGroup(dateColumn,'5m', [fillmode])* | Same as $__timeGroup, but for times stored as unix timestamp (only available in Grafana 5.3+).
+*$__unixEpochGroupAlias(dateColumn,'5m', [fillmode])* | Same as above, but also adds a column alias (only available in Grafana 5.3+).
We plan to add many more macros. If you have suggestions for what macros you would like to see, please [open an issue](https://github.com/grafana/grafana) in our GitHub repo.
-The query editor has a link named `Generated SQL` that shows up after a query as been executed, while in panel edit mode. Click on it and it will expand and show the raw interpolated SQL string that was executed.
-
## Table queries
If the `Format as` query option is set to `Table` then you can basically do any type of SQL query. The table panel will automatically show the results of whatever columns & rows your query returns.
@@ -124,11 +188,13 @@ The resulting table panel:
## Time series queries
-If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must return a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch.
-Any column except `time` and `metric` is treated as a value column.
+If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must return a column named `time` that returns either a SQL datetime or any numeric datatype representing unix epoch.
+Any column except `time` and `metric` are treated as a value column.
You may return a column named `metric` that is used as metric name for the value column.
If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name (only available in Grafana 5.3+).
+Resultsets of time series queries need to be sorted by time.
+
**Example with `metric` column:**
```sql
@@ -204,7 +270,7 @@ Another option is a query that can create a key/value variable. The query should
SELECT hostname AS __text, id AS __value FROM host
```
-You can also create nested variables. For example if you had another variable named `region`. Then you could have
+You can also create nested variables. Using a variable named `region`, you could have
the hosts variable only show hosts from the current selected region with a query like this (if `region` is a multi-value variable then use the `IN` comparison operator rather than `=` to match against multiple values):
```sql
@@ -213,7 +279,7 @@ SELECT hostname FROM host WHERE region IN($region)
### Using Variables in Queries
-From Grafana 4.3.0 to 4.6.0, template variables are always quoted automatically so if it is a string value do not wrap them in quotes in where clauses.
+From Grafana 4.3.0 to 4.6.0, template variables are always quoted automatically. If your template variables are strings, do not wrap them in quotes in where clauses.
From Grafana 4.7.0, template variable values are only quoted when the template variable is a `multi-value`.
@@ -245,7 +311,7 @@ ORDER BY atimestamp ASC
#### Disabling Quoting for Multi-value Variables
-Grafana automatically creates a quoted, comma-separated string for multi-value variables. For example: if `server01` and `server02` are selected then it will be formatted as: `'server01', 'server02'`. Do disable quoting, use the csv formatting option for variables:
+Grafana automatically creates a quoted, comma-separated string for multi-value variables. For example: if `server01` and `server02` are selected then it will be formatted as: `'server01', 'server02'`. To disable quoting, use the csv formatting option for variables:
`${servers:csv}`
@@ -289,7 +355,7 @@ tags | Optional field name to use for event tags as a comma separated string.
## Alerting
-Time series queries should work in alerting conditions. Table formatted queries is not yet supported in alert rule
+Time series queries should work in alerting conditions. Table formatted queries are not yet supported in alert rule
conditions.
## Configure the Datasource with Provisioning
@@ -311,6 +377,9 @@ datasources:
password: "Password!"
jsonData:
sslmode: "disable" # disable/require/verify-ca/verify-full
+ maxOpenConns: 0 # Grafana v5.4+
+ maxIdleConns: 2 # Grafana v5.4+
+ connMaxLifetime: 14400 # Grafana v5.4+
postgresVersion: 903 # 903=9.3, 904=9.4, 905=9.5, 906=9.6, 1000=10
timescaledb: false
```
diff --git a/docs/sources/features/datasources/stackdriver.md b/docs/sources/features/datasources/stackdriver.md
new file mode 100644
index 00000000000..cea86e96faf
--- /dev/null
+++ b/docs/sources/features/datasources/stackdriver.md
@@ -0,0 +1,216 @@
++++
+title = "Using Stackdriver in Grafana"
+description = "Guide for using Stackdriver in Grafana"
+keywords = ["grafana", "stackdriver", "google", "guide"]
+type = "docs"
+aliases = ["/datasources/stackdriver"]
+[menu.docs]
+name = "Stackdriver"
+parent = "datasources"
+weight = 11
++++
+
+# Using Google Stackdriver in Grafana
+
+> Only available in Grafana v5.3+.
+> The datasource is currently a beta feature and is subject to change.
+
+Grafana ships with built-in support for Google Stackdriver. Just add it as a datasource and you are ready to build dashboards for your Stackdriver metrics.
+
+## Adding the data source to Grafana
+
+1. Open the side menu by clicking the Grafana icon in the top header.
+2. In the side menu under the `Dashboards` link you should find a link named `Data Sources`.
+3. Click the `+ Add data source` button in the top header.
+4. Select `Stackdriver` from the _Type_ dropdown.
+5. Upload or paste in the Service Account Key file. See below for steps on how to create a Service Account Key file.
+
+> NOTE: If you're not seeing the `Data Sources` link in your side menu it means that your current user does not have the `Admin` role for the current organization.
+
+| Name | Description |
+| --------------------- | ----------------------------------------------------------------------------------- |
+| _Name_ | The datasource name. This is how you refer to the datasource in panels & queries. |
+| _Default_ | Default datasource means that it will be pre-selected for new panels. |
+| _Service Account Key_ | Service Account Key File for a GCP Project. Instructions below on how to create it. |
+
+## Authentication
+
+### Service Account Credentials - Private Key File
+
+To authenticate with the Stackdriver API, you need to create a Google Cloud Platform (GCP) Service Account for the Project you want to show data for. A Grafana datasource integrates with one GCP Project. If you want to visualize data from multiple GCP Projects then you need to create one datasource per GCP Project.
+
+#### Enable APIs
+
+The following APIs need to be enabled first:
+
+* [Monitoring API](https://console.cloud.google.com/apis/library/monitoring.googleapis.com)
+* [Cloud Resource Manager API](https://console.cloud.google.com/apis/library/cloudresourcemanager.googleapis.com)
+
+Click on the links above and click the `Enable` button:
+
+{{< docs-imagebox img="/img/docs/v53/stackdriver_enable_api.png" class="docs-image--no-shadow" caption="Enable GCP APIs" >}}
+
+#### Create a GCP Service Account for a Project
+
+1. Navigate to the [APIs & Services Credentials page](https://console.cloud.google.com/apis/credentials).
+2. Click on the `Create credentials` dropdown/button and choose the `Service account key` option.
+
+ {{< docs-imagebox img="/img/docs/v53/stackdriver_create_service_account_button.png" class="docs-image--no-shadow" caption="Create service account button" >}}
+
+3. On the `Create service account key` page, choose key type `JSON`. Then in the `Service Account` dropdown, choose the `New service account` option:
+
+ {{< docs-imagebox img="/img/docs/v53/stackdriver_create_service_account_key.png" class="docs-image--no-shadow" caption="Create service account key" >}}
+
+4. Some new fields will appear. Fill in a name for the service account in the `Service account name` field and then choose the `Monitoring Viewer` role from the `Role` dropdown:
+
+ {{< docs-imagebox img="/img/docs/v53/stackdriver_service_account_choose_role.png" class="docs-image--no-shadow" caption="Choose role" >}}
+
+5. Click the Create button. A JSON key file will be created and downloaded to your computer. Store this file in a secure place as it allows access to your Stackdriver data.
+6. Upload it to Grafana on the datasource Configuration page. You can either upload the file or paste in the contents of the file.
+
+ {{< docs-imagebox img="/img/docs/v53/stackdriver_grafana_upload_key.png" class="docs-image--no-shadow" caption="Upload service key file to Grafana" >}}
+
+7. The file contents will be encrypted and saved in the Grafana database. Don't forget to save after uploading the file!
+
+ {{< docs-imagebox img="/img/docs/v53/stackdriver_grafana_key_uploaded.png" class="docs-image--no-shadow" caption="Service key file is uploaded to Grafana" >}}
+
+## Metric Query Editor
+
+{{< docs-imagebox img="/img/docs/v53/stackdriver_query_editor.png" max-width= "400px" class="docs-image--right" >}}
+
+The Stackdriver query editor allows you to select metrics, group/aggregate by labels and by time, and use filters to specify which time series you want in the results.
+
+Begin by choosing a `Service` and then a metric from the `Metric` dropdown. Use the plus and minus icons in the filter and group by sections to add/remove filters or group by clauses.
+
+Stackdriver metrics can be of different kinds (GAUGE, DELTA, CUMULATIVE) and these kinds have support for different aggregation options (reducers and aligners). The Grafana query editor shows the list of available aggregation methods for a selected metric and sets a default reducer and aligner when you select the metric. Units for the Y-axis are also automatically selected by the query editor.
+
+### Filter
+
+To add a filter, click the plus icon and choose a field to filter by and enter a filter value e.g. `instance_name = grafana-1`. You can remove the filter by clicking on the filter name and select `--remove filter--`.
+
+#### Simple wildcards
+
+When the operator is set to `=` or `!=` it is possible to add wildcards to the filter value field. E.g `us-*` will capture all values that starts with "us-" and `*central-a` will capture all values that ends with "central-a". `*-central-*` captures all values that has the substring of -central-. Simple wildcards are less expensive than regular expressions.
+
+#### Regular expressions
+
+When the operator is set to `=~` or `!=~` it is possible to add regular expressions to the filter value field. E.g `us-central[1-3]-[af]` would match all values that starts with "us-central", is followed by a number in the range of 1 to 3, a dash and then either an "a" or an "f". Leading and trailing slashes are not needed when creating regular expressions.
+
+### Aggregation
+
+The aggregation field lets you combine time series based on common statistics. Read more about this option [here](https://cloud.google.com/monitoring/charts/metrics-selector#aggregation-options).
+
+The `Aligner` field allows you to align multiple time series after the same group by time interval. Read more about how it works [here](https://cloud.google.com/monitoring/charts/metrics-selector#alignment).
+
+#### Alignment Period/Group by Time
+
+The `Alignment Period` groups a metric by time if an aggregation is chosen. The default is to use the GCP Stackdriver default groupings (which allows you to compare graphs in Grafana with graphs in the Stackdriver UI).
+The option is called `Stackdriver auto` and the defaults are:
+
+* 1m for time ranges < 23 hours
+* 5m for time ranges >= 23 hours and < 6 days
+* 1h for time ranges >= 6 days
+
+The other automatic option is `Grafana auto`. This will automatically set the group by time depending on the time range chosen and the width of the graph panel. Read more about the details [here](http://docs.grafana.org/reference/templating/#the-interval-variable).
+
+It is also possible to choose fixed time intervals to group by, like `1h` or `1d`.
+
+### Group By
+
+Group by resource or metric labels to reduce the number of time series and to aggregate the results by a group by. E.g. Group by instance_name to see an aggregated metric for a Compute instance.
+
+### Alias Patterns
+
+The Alias By field allows you to control the format of the legend keys. The default is to show the metric name and labels. This can be long and hard to read. Using the following patterns in the alias field, you can format the legend key the way you want it.
+
+#### Metric Type Patterns
+
+| Alias Pattern | Description | Example Result |
+| -------------------- | ---------------------------- | ------------------------------------------------- |
+| `{{metric.type}}` | returns the full Metric Type | `compute.googleapis.com/instance/cpu/utilization` |
+| `{{metric.name}}` | returns the metric name part | `instance/cpu/utilization` |
+| `{{metric.service}}` | returns the service part | `compute` |
+
+#### Label Patterns
+
+In the Group By dropdown, you can see a list of metric and resource labels for a metric. These can be included in the legend key using alias patterns.
+
+| Alias Pattern Format | Description | Alias Pattern Example | Example Result |
+| ------------------------ | -------------------------------- | -------------------------------- | ---------------- |
+| `{{metric.label.xxx}}` | returns the metric label value | `{{metric.label.instance_name}}` | `grafana-1-prod` |
+| `{{resource.label.xxx}}` | returns the resource label value | `{{resource.label.zone}}` | `us-east1-b` |
+
+Example Alias By: `{{metric.type}} - {{metric.labels.instance_name}}`
+
+Example Result: `compute.googleapis.com/instance/cpu/usage_time - server1-prod`
+
+## Templating
+
+Instead of hard-coding things like server, application and sensor name in you metric queries you can use variables in their place.
+Variables are shown as dropdown select boxes at the top of the dashboard. These dropdowns makes it easy to change the data
+being displayed in your dashboard.
+
+Checkout the [Templating]({{< relref "reference/templating.md" >}}) documentation for an introduction to the templating feature and the different
+types of template variables.
+
+### Query Variable
+
+Writing variable queries is not supported yet.
+
+### Using variables in queries
+
+There are two syntaxes:
+
+* `$` Example: `metric.label.$metric_label`
+* `[[varname]]` Example: `metric.label.[[metric_label]]`
+
+Why two ways? The first syntax is easier to read and write but does not allow you to use a variable in the middle of a word. When the _Multi-value_ or _Include all value_ options are enabled, Grafana converts the labels from plain text to a regex compatible string, which means you have to use `=~` instead of `=`.
+
+## Annotations
+
+{{< docs-imagebox img="/img/docs/v53/stackdriver_annotations_query_editor.png" max-width= "400px" class="docs-image--right" >}}
+
+[Annotations]({{< relref "reference/annotations.md" >}}) allows you to overlay rich event information on top of graphs. You add annotation
+queries via the Dashboard menu / Annotations view. Annotation rendering is expensive so it is important to limit the number of rows returned. There is no support for showing Stackdriver annotations and events yet but it works well with [custom metrics](https://cloud.google.com/monitoring/custom-metrics/) in Stackdriver.
+
+With the query editor for annotations, you can select a metric and filters. The `Title` and `Text` fields support templating and can use data returned from the query. For example, the Title field could have the following text:
+
+`{{metric.type}} has value: {{metric.value}}`
+
+Example Result: `monitoring.googleapis.com/uptime_check/http_status has this value: 502`
+
+### Patterns for the Annotation Query Editor
+
+| Alias Pattern Format | Description | Alias Pattern Example | Example Result |
+| ------------------------ | -------------------------------- | -------------------------------- | ------------------------------------------------- |
+| `{{metric.value}}` | value of the metric/point | `{{metric.value}}` | `555` |
+| `{{metric.type}}` | returns the full Metric Type | `{{metric.type}}` | `compute.googleapis.com/instance/cpu/utilization` |
+| `{{metric.name}}` | returns the metric name part | `{{metric.name}}` | `instance/cpu/utilization` |
+| `{{metric.service}}` | returns the service part | `{{metric.service}}` | `compute` |
+| `{{metric.label.xxx}}` | returns the metric label value | `{{metric.label.instance_name}}` | `grafana-1-prod` |
+| `{{resource.label.xxx}}` | returns the resource label value | `{{resource.label.zone}}` | `us-east1-b` |
+
+## Configure the Datasource with Provisioning
+
+It's now possible to configure datasources using config files with Grafana's provisioning system. You can read more about how it works and all the settings you can set for datasources on the [provisioning docs page](/administration/provisioning/#datasources)
+
+Here is a provisioning example for this datasource.
+
+```yaml
+apiVersion: 1
+
+datasources:
+ - name: Stackdriver
+ type: stackdriver
+ access: proxy
+ jsonData:
+ tokenUri: https://oauth2.googleapis.com/token
+ clientEmail: stackdriver@myproject.iam.gserviceaccount.com
+ secureJsonData:
+ privateKey: |
+ -----BEGIN PRIVATE KEY-----
+ POSEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCb1u1Srw8ICYHS
+ ...
+ yA+23427282348234=
+ -----END PRIVATE KEY-----
+```
diff --git a/docs/sources/features/panels/alertlist.md b/docs/sources/features/panels/alertlist.md
index 58aa2c0966a..a1ea8f0f600 100644
--- a/docs/sources/features/panels/alertlist.md
+++ b/docs/sources/features/panels/alertlist.md
@@ -22,6 +22,6 @@ The alert list panel allows you to display your dashboards alerts. The list can
1. **Show**: Lets you choose between current state or recent state changes.
2. **Max Items**: Max items set the maximum of items in a list.
-3. **Sort Order**: Lets you sort your list alphabeticaly(asc/desc) or by importance.
+3. **Sort Order**: Lets you sort your list alphabetically(asc/desc) or by importance.
4. **Alerts From** This Dashboard`: Shows alerts only from the dashboard the alert list is in.
5. **State Filter**: Here you can filter your list by one or more parameters.
diff --git a/docs/sources/features/panels/heatmap.md b/docs/sources/features/panels/heatmap.md
index 56ffe29f20f..aa87fbef1df 100644
--- a/docs/sources/features/panels/heatmap.md
+++ b/docs/sources/features/panels/heatmap.md
@@ -80,7 +80,7 @@ the upper or lower bound of the interval.
There are a number of datasources supporting histogram over time like Elasticsearch (by using a Histogram bucket
aggregation) or Prometheus (with [histogram](https://prometheus.io/docs/concepts/metric_types/#histogram) metric type
and *Format as* option set to Heatmap). But generally, any datasource could be used if it meets the requirements:
-returns series with names representing bucket bound or returns sereis sorted by the bound in ascending order.
+returns series with names representing bucket bound or returns series sorted by the bound in ascending order.
With Elasticsearch you control the size of the buckets using the Histogram interval (Y-Axis) and the Date Histogram interval (X-axis).
diff --git a/docs/sources/guides/getting_started.md b/docs/sources/guides/getting_started.md
index a27c6ca4c99..27957990265 100644
--- a/docs/sources/guides/getting_started.md
+++ b/docs/sources/guides/getting_started.md
@@ -69,7 +69,7 @@ The image above shows you the top header for a Dashboard.
## Dashboards, Panels, the building blocks of Grafana...
-Dashboards are at the core of what Grafana is all about. Dashboards are composed of individual Panels arranged on a grid. Grafana ships with a variety of Panels. Grafana makes it easy to construct the right queries, and customize the display properties so that you can create the perfect Dashboard for your need. Each Panel can interact with data from any configured Grafana Data Source (currently InfluxDB, Graphite, OpenTSDB, Prometheus and Cloudwatch). The [Basic Concepts](/guides/basic_concepts) guide explores these key ideas in detail.
+Dashboards are at the core of what Grafana is all about. Dashboards are composed of individual Panels arranged on a grid. Grafana ships with a variety of Panels. Grafana makes it easy to construct the right queries, and customize the display properties so that you can create the perfect Dashboard for your need. Each Panel can interact with data from any configured Grafana Data Source (currently Graphite, Prometheus, Elasticsearch, InfluxDB, OpenTSDB, MySQL, PostgreSQL, Microsoft SQL Server and AWS Cloudwatch). The [Basic Concepts](/guides/basic_concepts) guide explores these key ideas in detail.
diff --git a/docs/sources/guides/whats-new-in-v2-5.md b/docs/sources/guides/whats-new-in-v2-5.md
index 90270ea1121..08d51ba5bd7 100644
--- a/docs/sources/guides/whats-new-in-v2-5.md
+++ b/docs/sources/guides/whats-new-in-v2-5.md
@@ -25,7 +25,7 @@ correctly in UTC mode.
This release brings a fully featured query editor for Elasticsearch. You will now be able to visualize
-logs or any kind of data stored in Elasticserarch. The query editor allows you to build both simple
+logs or any kind of data stored in Elasticsearch. The query editor allows you to build both simple
and complex queries for logs or metrics.
- Compute metrics from your documents, supported Elasticsearch aggregations:
diff --git a/docs/sources/guides/whats-new-in-v2.md b/docs/sources/guides/whats-new-in-v2.md
index 499849c8d83..28d068b1cd6 100644
--- a/docs/sources/guides/whats-new-in-v2.md
+++ b/docs/sources/guides/whats-new-in-v2.md
@@ -34,7 +34,7 @@ Organizations via a role. That role can be:
There are currently no permissions on individual dashboards.
-Read more about Grafanas new user model on the [Admin section](../reference/admin/)
+Read more about Grafana's new user model on the [Admin section](../reference/admin/)
## Dashboard Snapshot sharing
diff --git a/docs/sources/guides/whats-new-in-v3-1.md b/docs/sources/guides/whats-new-in-v3-1.md
index 1e8ef87297b..ab6c5281275 100644
--- a/docs/sources/guides/whats-new-in-v3-1.md
+++ b/docs/sources/guides/whats-new-in-v3-1.md
@@ -21,7 +21,7 @@ The export feature is now accessed from the share menu.
Dashboards exported from Grafana 3.1 are now more portable and easier for others to import than before.
The export process extracts information data source types used by panels and adds these to a new `inputs`
section in the dashboard json. So when you or another person tries to import the dashboard they will be asked to
-select data source and optional metrix prefix options.
+select data source and optional metric prefix options.
@@ -53,7 +53,7 @@ Grafana url to share with a colleague without having to use the Share modal.
## Internal metrics
-Do you want metrics about viewing metrics? Ofc you do! In this release we added support for sending metrics about Grafana to graphite.
+Do you want metrics about viewing metrics? Of course you do! In this release we added support for sending metrics about Grafana to graphite.
You can configure interval and server in the config file.
## Logging
diff --git a/docs/sources/guides/whats-new-in-v3.md b/docs/sources/guides/whats-new-in-v3.md
index d82a833ec90..dbd9b685a2b 100644
--- a/docs/sources/guides/whats-new-in-v3.md
+++ b/docs/sources/guides/whats-new-in-v3.md
@@ -197,7 +197,7 @@ you can install it manually from [Grafana.com](https://grafana.com)
## Plugin showcase
Discovering and installing plugins is very quick and easy with Grafana 3.0 and [Grafana.com](https://grafana.com). Here
-are a couple that I incurage you try!
+are a couple that I encourage you try!
#### [Clock Panel](https://grafana.com/plugins/grafana-clock-panel)
Support's both current time and count down mode.
diff --git a/docs/sources/guides/whats-new-in-v4-2.md b/docs/sources/guides/whats-new-in-v4-2.md
index e976ed24700..7a00023172a 100644
--- a/docs/sources/guides/whats-new-in-v4-2.md
+++ b/docs/sources/guides/whats-new-in-v4-2.md
@@ -45,7 +45,7 @@ We might add more global built in variables in the future and if we do we will p
### Dedupe alert notifications when running multiple servers
-In this release we will dedupe alert notificiations when you are running multiple servers.
+In this release we will dedupe alert notifications when you are running multiple servers.
This makes it possible to run alerting on multiple servers and only get one notification.
We currently solve this with sql transactions which puts some limitations for how many servers you can use to execute the same rules.
@@ -67,7 +67,7 @@ Making it possible to have users in multiple groups and have detailed access con
## Upgrade & Breaking changes
-If your using https in grafana we now force you to use tls 1.2 and the most secure ciphers.
+If you're using https in grafana we now force you to use tls 1.2 and the most secure ciphers.
We think its better to be secure by default rather then making it configurable.
If you want to run https with lower versions of tls we suggest you put a reserve proxy in front of grafana.
diff --git a/docs/sources/guides/whats-new-in-v4-5.md b/docs/sources/guides/whats-new-in-v4-5.md
index a5cd3ca982d..c6cfcf64720 100644
--- a/docs/sources/guides/whats-new-in-v4-5.md
+++ b/docs/sources/guides/whats-new-in-v4-5.md
@@ -45,7 +45,7 @@ More information [here](https://community.grafana.com/t/using-grafanas-query-ins
### Enhancements
* **GitHub OAuth**: Support for GitHub organizations with 100+ teams. [#8846](https://github.com/grafana/grafana/issues/8846), thx [@skwashd](https://github.com/skwashd)
-* **Graphite**: Calls to Graphite api /metrics/find now include panel or dashboad time range (from & until) in most cases, [#8055](https://github.com/grafana/grafana/issues/8055)
+* **Graphite**: Calls to Graphite api /metrics/find now include panel or dashboard time range (from & until) in most cases, [#8055](https://github.com/grafana/grafana/issues/8055)
* **Graphite**: Added new graphite 1.0 functions, available if you set version to 1.0.x in data source settings. New Functions: mapSeries, reduceSeries, isNonNull, groupByNodes, offsetToZero, grep, weightedAverage, removeEmptySeries, aggregateLine, averageOutsidePercentile, delay, exponentialMovingAverage, fallbackSeries, integralByInterval, interpolate, invert, linearRegression, movingMin, movingMax, movingSum, multiplySeriesWithWildcards, pow, powSeries, removeBetweenPercentile, squareRoot, timeSlice, closes [#8261](https://github.com/grafana/grafana/issues/8261)
- **Elasticsearch**: Ad-hoc filters now use query phrase match filters instead of term filters, works on non keyword/raw fields [#9095](https://github.com/grafana/grafana/issues/9095).
@@ -53,7 +53,7 @@ More information [here](https://community.grafana.com/t/using-grafanas-query-ins
* **InfluxDB/Elasticsearch**: The panel & data source option named "Group by time interval" is now named "Min time interval" and does now always define a lower limit for the auto group by time. Without having to use `>` prefix (that prefix still works). This should in theory have close to zero actual impact on existing dashboards. It does mean that if you used this setting to define a hard group by time interval of, say "1d", if you zoomed to a time range wide enough the time range could increase above the "1d" range as the setting is now always considered a lower limit.
-This option is now rennamed (and moved to Options sub section above your queries):
+This option is now renamed (and moved to Options sub section above your queries):

Datas source selection & options & help are now above your metric queries.
diff --git a/docs/sources/guides/whats-new-in-v4-6.md b/docs/sources/guides/whats-new-in-v4-6.md
index ee0c4ea7a04..91fa74084a8 100644
--- a/docs/sources/guides/whats-new-in-v4-6.md
+++ b/docs/sources/guides/whats-new-in-v4-6.md
@@ -61,7 +61,7 @@ This makes exploring and filtering Prometheus data much easier.
### Minor Changes
* **SMTP**: Make it possible to set specific EHLO for smtp client. [#9319](https://github.com/grafana/grafana/issues/9319)
-* **Dataproxy**: Allow grafan to renegotiate tls connection [#9250](https://github.com/grafana/grafana/issues/9250)
+* **Dataproxy**: Allow Grafana to renegotiate tls connection [#9250](https://github.com/grafana/grafana/issues/9250)
* **HTTP**: set net.Dialer.DualStack to true for all http clients [#9367](https://github.com/grafana/grafana/pull/9367)
* **Alerting**: Add diff and percent diff as series reducers [#9386](https://github.com/grafana/grafana/pull/9386), thx [@shanhuhai5739](https://github.com/shanhuhai5739)
* **Slack**: Allow images to be uploaded to slack when Token is present [#7175](https://github.com/grafana/grafana/issues/7175), thx [@xginn8](https://github.com/xginn8)
diff --git a/docs/sources/guides/whats-new-in-v5-3.md b/docs/sources/guides/whats-new-in-v5-3.md
index 4a2674c9b39..5dcadc0813d 100644
--- a/docs/sources/guides/whats-new-in-v5-3.md
+++ b/docs/sources/guides/whats-new-in-v5-3.md
@@ -12,6 +12,80 @@ weight = -9
# What's New in Grafana v5.3
+Grafana v5.3 brings new features, many enhancements and bug fixes. This article will detail the major new features and enhancements.
+
+- [Google Stackdriver]({{< relref "#google-stackdriver" >}}) as a core datasource!
+- [TV mode]({{< relref "#tv-and-kiosk-mode" >}}) is improved and more accessible
+- [Alerting]({{< relref "#notification-reminders" >}}) with notification reminders
+- [Postgres]({{< relref "#postgres-query-builder" >}}) gets a new query builder!
+- [OAuth]({{< relref "#improved-oauth-support-for-gitlab" >}}) support for Gitlab is improved
+- [Annotations]({{< relref "#annotations" >}}) with template variable filtering
+- [Variables]({{< relref "#variables" >}}) with free text support
+
+## Google Stackdriver
+
+{{< docs-imagebox img="/img/docs/v53/stackdriver-with-heatmap.png" max-width= "600px" class="docs-image--no-shadow docs-image--right" >}}
+
+Grafana v5.3 ships with built-in support for [Google Stackdriver](https://cloud.google.com/stackdriver/) and enables you to visualize your Stackdriver metrics in Grafana.
+
+Getting started with the plugin is easy. Simply create a GCE Service account that has access to the Stackdriver API scope, download the Service Account key file from Google and upload it on the Stackdriver datasource config page in Grafana and you should have a secure server-to-server authentication setup. Like other core plugins, Stackdriver has built-in support for alerting. It also comes with support for heatmaps and basic variables.
+
+If you're already accustomed to the Stackdriver Metrics Explorer UI, you'll notice that there are a lot of similarities to the query editor in Grafana. It is possible to add filters using wildcards and regular expressions. You can do Group By, Primary Aggregation and Alignment.
+
+Alias By allows you to format the legend the way you want, and it's a feature that is not yet present in the Metrics Explorer. Two other features that are only supported in the Grafana plugin are the abilities to manually set the Alignment Period in the query editor and to add Annotations queries.
+
+The Grafana Stackdriver plugin comes with support for automatic unit detection. Grafana will try to map the Stackdriver unit type to a corresponding unit type in Grafana, and if successful the panel Y-axes will be updated accordingly to display the correct unit of measure. This is the first core plugin to provide support for unit detection, and it is our intention to provide support for this in other core plugins in the near future.
+
+The datasource is still in the `beta` phase, meaning it's currently in active development and is still missing one important feature - templating queries.
+Please try it out, but be aware of that it might be subject to changes and possible bugs. We would love to hear your feedback.
+
+Please read [Using Google Stackdriver in Grafana](/features/datasources/stackdriver/) for more detailed information on how to get started and use it.
+
+## TV and Kiosk Mode
+
+{{< docs-imagebox img="/img/docs/v53/tv_mode_still.png" max-width="600px" class="docs-image--no-shadow docs-image--right" animated-gif="/img/docs/v53/tv_mode.gif" >}}
+
+We've improved the TV & kiosk mode to make it easier to use. There's now an icon in the top bar that will let you cycle through the different view modes.
+
+1. In the first view mode, the sidebar and most of the buttons in the top bar will be hidden.
+2. In the second view mode, the top bar is completely hidden so that only the dashboard itself is shown.
+3. Hit the escape key to go back to the default view mode.
+
+When switching view modes, the url will be updated to reflect the view mode selected. This allows a dashboard to be opened with a
+certain view mode enabled. Additionally, this also enables [playlists](/reference/playlist) to be started with a certain view mode enabled.
+
+
+
+## Notification Reminders
+
+Do you use Grafana alerting and have some notifications that are more important than others? Then it's possible to set reminders so that you continue to be alerted until the problem is fixed. This is done on the notification channel itself and will affect all alerts that use that channel.
+For additional examples of why reminders might be useful for you, see [multiple series](/alerting/rules/#multiple-series).
+
+Learn how to enable and configure reminders [here](/alerting/notifications/#send-reminders).
+
+## Postgres Query Builder
+
+Grafana 5.3 comes with a new graphical query builder for Postgres. This brings Postgres integration more in line with some of the other datasources and makes it easier for both advanced users and beginners to work with timeseries in Postgres. Learn more about it in the [documentation](/features/datasources/postgres/#query-editor).
+
+{{< docs-imagebox img="/img/docs/v53/postgres_query_still.png" class="docs-image--no-shadow" animated-gif="/img/docs/v53/postgres_query.gif" >}}
+
+## Improved OAuth Support for Gitlab
+
+Grafana 5.3 comes with a new OAuth integration for Gitlab that enables configuration to only allow users that are a member of certain Gitlab groups to authenticate. This makes it possible to use Gitlab OAuth with Grafana in a shared environment without giving everyone access to Grafana.
+Learn how to enable and configure it in the [documentation](/auth/gitlab/).
+
+## Annotations
+
+Grafana 5.3 brings improved support for [native annotations](/reference/annotations/#native-annotations) and makes it possible to use template variables when filtering by tags.
+Learn more about it in the [documentation](/reference/annotations/#query-by-tag).
+
+{{< docs-imagebox img="/img/docs/v53/annotation_tag_filter_variable.png" max-width="600px" >}}
+
+## Variables
+
+Grafana 5.3 ships with a brand new variable type named `Text box` which makes it easier and more convenient to provide free text input to a variable.
+This new variable type will display as a free text input field with an optional prefilled default value.
+
## Changelog
Checkout the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list
diff --git a/docs/sources/http_api/alerting.md b/docs/sources/http_api/alerting.md
index 032fd508dd0..103de190793 100644
--- a/docs/sources/http_api/alerting.md
+++ b/docs/sources/http_api/alerting.md
@@ -227,7 +227,7 @@ Content-Type: application/json
## Create alert notification
-You can find the full list of [supported notifers](/alerting/notifications/#all-supported-notifier) at the alert notifiers page.
+You can find the full list of [supported notifiers](/alerting/notifications/#all-supported-notifier) at the alert notifiers page.
`POST /api/alert-notifications`
diff --git a/docs/sources/http_api/dashboard_versions.md b/docs/sources/http_api/dashboard_versions.md
index 3d0ec27a3a3..0be22674997 100644
--- a/docs/sources/http_api/dashboard_versions.md
+++ b/docs/sources/http_api/dashboard_versions.md
@@ -291,7 +291,7 @@ Content-Type: text/html; charset=UTF-8
```
-The response is a textual respresentation of the diff, with the dashboard values being in JSON, similar to the diffs seen on sites like GitHub or GitLab.
+The response is a textual representation of the diff, with the dashboard values being in JSON, similar to the diffs seen on sites like GitHub or GitLab.
Status Codes:
diff --git a/docs/sources/index.md b/docs/sources/index.md
index da977b73e0c..e9a900d93f1 100644
--- a/docs/sources/index.md
+++ b/docs/sources/index.md
@@ -60,9 +60,9 @@ aliases = ["v1.1", "guides/reference/admin"]
Provisioning
A guide to help you automate your Grafana setup & configuration.
- }}" class="nav-cards__item nav-cards__item--guide">
- What's new in v5.2
- Article on all the new cool features and enhancements in v5.2
+ }}" class="nav-cards__item nav-cards__item--guide">
+ What's new in v5.3
+ Article on all the new cool features and enhancements in v5.3
}}" class="nav-cards__item nav-cards__item--guide">
Screencasts
@@ -88,9 +88,13 @@ aliases = ["v1.1", "guides/reference/admin"]
Prometheus
- }}" class="nav-cards__item nav-cards__item--ds">
-
- OpenTSDB
+ }}" class="nav-cards__item nav-cards__item--ds">
+
+ Google Stackdriver
+
+ }}" class="nav-cards__item nav-cards__item--ds">
+
+ Cloudwatch
}}" class="nav-cards__item nav-cards__item--ds">
@@ -100,8 +104,12 @@ aliases = ["v1.1", "guides/reference/admin"]
Postgres
- }}" class="nav-cards__item nav-cards__item--ds">
-
- Cloudwatch
+ }}" class="nav-cards__item nav-cards__item--ds">
+
+ Microsoft SQL Server
+
+ }}" class="nav-cards__item nav-cards__item--ds">
+
+ OpenTSDB
diff --git a/docs/sources/installation/configuration.md b/docs/sources/installation/configuration.md
index 2bf4789257d..8d156e739bf 100644
--- a/docs/sources/installation/configuration.md
+++ b/docs/sources/installation/configuration.md
@@ -127,10 +127,13 @@ Another way is put a webserver like Nginx or Apache in front of Grafana and have
### protocol
-`http` or `https`
+`http`,`https` or `socket`
> **Note** Grafana versions earlier than 3.0 are vulnerable to [POODLE](https://en.wikipedia.org/wiki/POODLE). So we strongly recommend to upgrade to 3.x or use a reverse proxy for ssl termination.
+### socket
+Path where the socket should be created when `protocol=socket`. Please make sure that Grafana has appropriate permissions.
+
### domain
This setting is only used in as a part of the `root_url` setting (see below). Important if you
@@ -566,3 +569,11 @@ Default setting for new alert rules. Defaults to categorize error and timeouts a
> Available in 5.3 and above
Default setting for how Grafana handles nodata or null values in alerting. (alerting, no_data, keep_state, ok)
+
+# concurrent_render_limit
+
+> Available in 5.3 and above
+
+Alert notifications can include images, but rendering many images at the same time can overload the server.
+This limit will protect the server from render overloading and make sure notifications are sent out quickly. Default
+value is `5`.
diff --git a/docs/sources/installation/debian.md b/docs/sources/installation/debian.md
index 13fa3440170..2ae2e9dc40d 100644
--- a/docs/sources/installation/debian.md
+++ b/docs/sources/installation/debian.md
@@ -100,6 +100,8 @@ This will start the `grafana-server` process as the `grafana` user,
which was created during the package installation. The default HTTP port
is `3000` and default user and group is `admin`.
+Default login and password `admin`/ `admin`
+
To configure the Grafana server to start at boot time:
```bash
diff --git a/docs/sources/installation/mac.md b/docs/sources/installation/mac.md
index fbc00c01737..336e46c895d 100644
--- a/docs/sources/installation/mac.md
+++ b/docs/sources/installation/mac.md
@@ -60,6 +60,8 @@ Then start Grafana using:
brew services start grafana
```
+Default login and password `admin`/ `admin`
+
### Configuration
diff --git a/docs/sources/installation/rpm.md b/docs/sources/installation/rpm.md
index 24c301c5763..5bf3b7ed745 100644
--- a/docs/sources/installation/rpm.md
+++ b/docs/sources/installation/rpm.md
@@ -115,6 +115,8 @@ This will start the `grafana-server` process as the `grafana` user,
which is created during package installation. The default HTTP port is
`3000`, and default user and group is `admin`.
+Default login and password `admin`/ `admin`
+
To configure the Grafana server to start at boot time:
```bash
diff --git a/docs/sources/installation/windows.md b/docs/sources/installation/windows.md
index 572081a1c54..b17d625a76e 100644
--- a/docs/sources/installation/windows.md
+++ b/docs/sources/installation/windows.md
@@ -31,6 +31,9 @@ on windows. Edit `custom.ini` and uncomment the `http_port`
configuration option (`;` is the comment character in ini files) and change it to something like `8080` or similar.
That port should not require extra Windows privileges.
+Default login and password `admin`/ `admin`
+
+
Start Grafana by executing `grafana-server.exe`, located in the `bin` directory, preferably from the
command line. If you want to run Grafana as windows service, download
[NSSM](https://nssm.cc/). It is very easy to add Grafana as a Windows
diff --git a/docs/sources/plugins/developing/development.md b/docs/sources/plugins/developing/development.md
index f2e70a50c6a..48410b06732 100644
--- a/docs/sources/plugins/developing/development.md
+++ b/docs/sources/plugins/developing/development.md
@@ -10,7 +10,7 @@ weight = 1
# Developer Guide
-You can extend Grafana by writing your own plugins and then share then with other users in [our plugin repository](https://grafana.com/plugins).
+You can extend Grafana by writing your own plugins and then share them with other users in [our plugin repository](https://grafana.com/plugins).
## Short version
@@ -33,7 +33,7 @@ There are two blog posts about authoring a plugin that might also be of interest
## What languages?
Since everything turns into javascript it's up to you to choose which language you want. That said it's probably a good idea to choose es6 or typescript since
-we use es6 classes in Grafana. So it's easier to get inspiration from the Grafana repo is you choose one of those languages.
+we use es6 classes in Grafana. So it's easier to get inspiration from the Grafana repo if you choose one of those languages.
## Buildscript
@@ -60,7 +60,6 @@ and [apps]({{< relref "apps.md" >}}) plugins in the documentation.
The Grafana SDK is quite small so far and can be found here:
- [SDK file in Grafana](https://github.com/grafana/grafana/blob/master/public/app/plugins/sdk.ts)
-- [SDK Readme](https://github.com/grafana/grafana/blob/master/public/app/plugins/plugin_api.md)
The SDK contains three different plugin classes: PanelCtrl, MetricsPanelCtrl and QueryCtrl. For plugins of the panel type, the module.js file should export one of these. There are some extra classes for [data sources]({{< relref "datasources.md" >}}).
diff --git a/docs/sources/reference/annotations.md b/docs/sources/reference/annotations.md
index bfc104ef522..8732c8c709b 100644
--- a/docs/sources/reference/annotations.md
+++ b/docs/sources/reference/annotations.md
@@ -45,8 +45,11 @@ can still show them if you add a new **Annotation Query** and filter by tags. Bu
### Query by tag
You can create new annotation queries that fetch annotations from the native annotation store via the `-- Grafana --` data source and by setting *Filter by* to `Tags`. Specify at least
-one tag. For example create an annotation query name `outages` and specify a tag named `outage`. This query will show all annotations you create (from any dashboard or via API) that
-have the `outage` tag.
+one tag. For example create an annotation query name `outages` and specify a tag named `outage`. This query will show all annotations you create (from any dashboard or via API) that have the `outage` tag. By default, if you add multiple tags in the annotation query, Grafana will only show annotations that have all the tags you supplied. You can invert the behavior by enabling `Match any` which means that Grafana will show annotations that contains at least one of the tags you supplied.
+
+In Grafana v5.3+ it's possible to use template variables in the tag query. So if you have a dashboard showing stats for different services and a template variable that dictates which services to show, you can now use the same template variable in your annotation query to only show annotations for those services.
+
+{{< docs-imagebox img="/img/docs/v53/annotation_tag_filter_variable.png" max-width="600px" >}}
## Querying other data sources
diff --git a/docs/sources/reference/templating.md b/docs/sources/reference/templating.md
index 31251fd6389..403dabba8ae 100644
--- a/docs/sources/reference/templating.md
+++ b/docs/sources/reference/templating.md
@@ -90,6 +90,7 @@ Type | Description
*Custom* | Define the variable options manually using a comma separated list.
*Constant* | Define a hidden constant. Useful for metric path prefixes for dashboards you want to share. During dashboard export, constant variables will be made into an import option.
*Ad hoc filters* | Very special kind of variable that only works with some data sources, InfluxDB & Elasticsearch currently. It allows you to add key/value filters that will automatically be added to all metric queries that use the specified data source.
+*Text box* | This variable type will display as a free text input field with an optional default value.
### Query options
diff --git a/docs/sources/tutorials/ha_setup.md b/docs/sources/tutorials/ha_setup.md
index 0f138b20a17..f141392e223 100644
--- a/docs/sources/tutorials/ha_setup.md
+++ b/docs/sources/tutorials/ha_setup.md
@@ -22,13 +22,13 @@ Setting up Grafana for high availability is fairly simple. It comes down to two
First, you need to do is to setup MySQL or Postgres on another server and configure Grafana to use that database.
You can find the configuration for doing that in the [[database]]({{< relref "configuration.md" >}}#database) section in the grafana config.
-Grafana will now persist all long term data in the database. How to configure the database for high availability is out of scope for this guide. We recommend finding an expert on for the database your using.
+Grafana will now persist all long term data in the database. How to configure the database for high availability is out of scope for this guide. We recommend finding an expert on for the database you're using.
## User sessions
-The second thing to consider is how to deal with user sessions and how to configure your load balancer infront of Grafana.
+The second thing to consider is how to deal with user sessions and how to configure your load balancer in front of Grafana.
Grafana supports two ways of storing session data: locally on disk or in a database/cache-server.
-If you want to store sessions on disk you can use `sticky sessions` in your load balanacer. If you prefer to store session data in a database/cache-server
+If you want to store sessions on disk you can use `sticky sessions` in your load balancer. If you prefer to store session data in a database/cache-server
you can use any stateless routing strategy in your load balancer (ex round robin or least connections).
### Sticky sessions
diff --git a/docs/versions.json b/docs/versions.json
index caefbe198d6..48962a783ae 100644
--- a/docs/versions.json
+++ b/docs/versions.json
@@ -1,5 +1,6 @@
[
- { "version": "v5.2", "path": "/", "archived": false, "current": true },
+ { "version": "v5.3", "path": "/", "archived": false, "current": true },
+ { "version": "v5.2", "path": "/v5.2", "archived": true },
{ "version": "v5.1", "path": "/v5.1", "archived": true },
{ "version": "v5.0", "path": "/v5.0", "archived": true },
{ "version": "v4.6", "path": "/v4.6", "archived": true },
diff --git a/examples/README.md b/examples/README.md
deleted file mode 100644
index 75f1f9a9a86..00000000000
--- a/examples/README.md
+++ /dev/null
@@ -1,5 +0,0 @@
-## Example plugin implementations
-
-datasource:[simple-json-datasource](https://github.com/grafana/simple-json-datasource)
-app: [example-app](https://github.com/grafana/example-app)
-panel: [grafana-piechart-panel](https://github.com/grafana/piechart-panel)
diff --git a/examples/alerting-dashboard.json b/examples/alerting-dashboard.json
deleted file mode 100644
index 744460d7847..00000000000
--- a/examples/alerting-dashboard.json
+++ /dev/null
@@ -1,800 +0,0 @@
-{
- "__inputs": [
- {
- "name": "DS_GRAPHITE",
- "label": "graphite",
- "description": "",
- "type": "datasource",
- "pluginId": "graphite",
- "pluginName": "Graphite"
- }
- ],
- "__requires": [
- {
- "type": "panel",
- "id": "graph",
- "name": "Graph",
- "version": ""
- },
- {
- "type": "grafana",
- "id": "grafana",
- "name": "Grafana",
- "version": "3.1.0"
- },
- {
- "type": "datasource",
- "id": "graphite",
- "name": "Graphite",
- "version": "1.0.0"
- }
- ],
- "id": null,
- "title": "Alerting example",
- "tags": [],
- "style": "dark",
- "timezone": "browser",
- "editable": true,
- "hideControls": false,
- "sharedCrosshair": false,
- "rows": [
- {
- "collapse": false,
- "editable": true,
- "height": "250px",
- "panels": [
- {
- "alert": {
- "conditions": [
- {
- "evaluator": {
- "params": [
- 355
- ],
- "type": "gt"
- },
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "params": [],
- "type": "avg"
- },
- "type": "query"
- }
- ],
- "enabled": true,
- "frequency": "60s",
- "handler": 1,
- "name": "Critical alert panel",
- "notifications": [],
- "severity": "critical"
- },
- "alerting": {},
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "grid": {},
- "id": 1,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 4,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- },
- {
- "refId": "B",
- "target": "aliasByNode(scale(statsd.$apa.counters.session_start.*.count, 10), 4)"
- }
- ],
- "thresholds": [
- {
- "colorMode": "critical",
- "fill": true,
- "line": true,
- "op": "gt",
- "value": 355
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Critical panel",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- },
- {
- "alert": {
- "conditions": [
- {
- "evaluator": {
- "params": [
- 20
- ],
- "type": "gt"
- },
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "params": [],
- "type": "avg"
- },
- "type": "query"
- }
- ],
- "enabled": true,
- "frequency": "60s",
- "handler": 1,
- "name": "Warning panel alert",
- "notifications": [],
- "severity": "warning"
- },
- "alerting": {},
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "id": 2,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 4,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- }
- ],
- "thresholds": [
- {
- "colorMode": "warning",
- "fill": true,
- "fillColor": "rgba(235, 138, 14, 0.12)",
- "line": true,
- "lineColor": "rgba(247, 149, 32, 0.60)",
- "op": "gt",
- "value": 20
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Warning panel",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- },
- {
- "alert": {
- "conditions": [
- {
- "evaluator": {
- "params": [
- 1
- ],
- "type": "lt"
- },
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "params": [],
- "type": "count"
- },
- "type": "query"
- }
- ],
- "enabled": true,
- "frequency": "60s",
- "handler": 1,
- "name": "No datapoints",
- "notifications": [],
- "severity": "critical"
- },
- "alerting": {},
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "id": 20,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 4,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- }
- ],
- "thresholds": [
- {
- "value": 1,
- "op": "lt",
- "fill": true,
- "line": true,
- "colorMode": "critical"
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Count datapoints",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- }
- ],
- "title": "Row"
- },
- {
- "collapse": false,
- "editable": true,
- "height": "250px",
- "panels": [
- {
- "alert": {
- "conditions": [
- {
- "evaluator": {
- "params": [
- 20
- ],
- "type": "lt"
- },
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "params": [],
- "type": "avg"
- },
- "type": "query"
- }
- ],
- "enabled": true,
- "frequency": "60s",
- "handler": 1,
- "name": "Alert below value",
- "notifications": [],
- "severity": "critical"
- },
- "alerting": {},
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "id": 17,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 3,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- }
- ],
- "thresholds": [
- {
- "colorMode": "critical",
- "fill": true,
- "fillColor": "rgba(234, 112, 112, 0.12)",
- "line": true,
- "lineColor": "rgba(237, 46, 24, 0.60)",
- "op": "lt",
- "value": 20
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Alert below value",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- },
- {
- "alert": {
- "conditions": [
- {
- "evaluator": {
- "params": [
- 10,
- 80
- ],
- "type": "outside_range"
- },
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "params": [],
- "type": "avg"
- },
- "type": "query"
- }
- ],
- "enabled": true,
- "frequency": "10s",
- "handler": 1,
- "name": "Alert is outside range",
- "notifications": [],
- "severity": "critical"
- },
- "alerting": {},
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "id": 18,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 3,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- }
- ],
- "thresholds": [
- {
- "colorMode": "critical",
- "fill": true,
- "fillColor": "rgba(234, 112, 112, 0.12)",
- "line": true,
- "lineColor": "rgba(237, 46, 24, 0.60)",
- "op": "lt",
- "value": 10
- },
- {
- "colorMode": "critical",
- "fill": true,
- "fillColor": "rgba(234, 112, 112, 0.12)",
- "line": true,
- "lineColor": "rgba(237, 46, 24, 0.60)",
- "op": "gt",
- "value": 80
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Alert is outside range",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- },
- {
- "alert": {
- "conditions": [
- {
- "evaluator": {
- "params": [
- 60,
- 80
- ],
- "type": "within_range"
- },
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "params": [],
- "type": "avg"
- },
- "type": "query"
- }
- ],
- "enabled": true,
- "frequency": "10s",
- "handler": 1,
- "name": "Alert is within range",
- "notifications": [],
- "severity": "critical"
- },
- "alerting": {},
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "id": 19,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 3,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- }
- ],
- "thresholds": [
- {
- "colorMode": "critical",
- "fill": true,
- "fillColor": "rgba(234, 112, 112, 0.12)",
- "line": true,
- "lineColor": "rgba(237, 46, 24, 0.60)",
- "op": "gt",
- "value": 60
- },
- {
- "colorMode": "critical",
- "fill": true,
- "fillColor": "rgba(234, 112, 112, 0.12)",
- "line": true,
- "lineColor": "rgba(237, 46, 24, 0.60)",
- "op": "lt",
- "value": 80
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Alert is within range",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- }
- ],
- "title": "New row"
- }
- ],
- "time": {
- "from": "now-6h",
- "to": "now"
- },
- "timepicker": {
- "refresh_intervals": [
- "5s",
- "10s",
- "30s",
- "1m",
- "5m",
- "15m",
- "30m",
- "1h",
- "2h",
- "1d"
- ],
- "time_options": [
- "5m",
- "15m",
- "1h",
- "6h",
- "12h",
- "24h",
- "2d",
- "7d",
- "30d"
- ]
- },
- "templating": {
- "list": [
- {
- "current": {
- "text": "fakesite",
- "value": "fakesite"
- },
- "datasource": null,
- "hide": 0,
- "includeAll": false,
- "multi": false,
- "name": "apa",
- "options": [
- {
- "selected": true,
- "text": "fakesite",
- "value": "fakesite"
- }
- ],
- "query": "fakesite",
- "refresh": 0,
- "type": "custom"
- }
- ]
- },
- "annotations": {
- "list": []
- },
- "schemaVersion": 13,
- "version": 15,
- "links": [],
- "gnetId": null
-}
\ No newline at end of file
diff --git a/examples/alerting-multiple-alerts.json b/examples/alerting-multiple-alerts.json
deleted file mode 100644
index e6e729ecc06..00000000000
--- a/examples/alerting-multiple-alerts.json
+++ /dev/null
@@ -1,2216 +0,0 @@
-{
- "__inputs": [
- {
- "name": "DS_GRAPHITE",
- "label": "graphite",
- "description": "",
- "type": "datasource",
- "pluginId": "graphite",
- "pluginName": "Graphite"
- }
- ],
- "__requires": [
- {
- "type": "panel",
- "id": "graph",
- "name": "Graph",
- "version": ""
- },
- {
- "type": "grafana",
- "id": "grafana",
- "name": "Grafana",
- "version": "3.1.0"
- },
- {
- "type": "datasource",
- "id": "graphite",
- "name": "Graphite",
- "version": "1.0.0"
- }
- ],
- "id": null,
- "title": "Dashboard with many alerts",
- "tags": [],
- "style": "dark",
- "timezone": "browser",
- "editable": true,
- "hideControls": false,
- "sharedCrosshair": false,
- "rows": [
- {
- "collapse": false,
- "editable": true,
- "height": "250px",
- "panels": [
- {
- "alert": {
- "conditions": [
- {
- "evaluator": {
- "params": [
- 30
- ],
- "type": "gt"
- },
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "params": [],
- "type": "sum"
- },
- "type": "query"
- }
- ],
- "enabled": true,
- "frequency": "60s",
- "handler": 1,
- "name": "Critical alert panel",
- "notifications": [],
- "severity": "critical"
- },
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "grid": {},
- "id": 1,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 3,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- }
- ],
- "thresholds": [
- {
- "colorMode": "critical",
- "fill": true,
- "fillColor": "rgba(234, 112, 112, 0.12)",
- "line": true,
- "lineColor": "rgba(237, 46, 24, 0.60)",
- "op": "gt",
- "value": 30
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Critical panel",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- },
- {
- "alert": {
- "conditions": [
- {
- "evaluator": {
- "params": [
- 30
- ],
- "type": "gt"
- },
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "params": [],
- "type": "sum"
- },
- "type": "query"
- }
- ],
- "enabled": true,
- "frequency": "60s",
- "handler": 1,
- "name": "Critical alert panel",
- "notifications": [],
- "severity": "critical"
- },
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "grid": {},
- "id": 5,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 3,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- }
- ],
- "thresholds": [
- {
- "colorMode": "critical",
- "fill": true,
- "fillColor": "rgba(234, 112, 112, 0.12)",
- "line": true,
- "lineColor": "rgba(237, 46, 24, 0.60)",
- "op": "gt",
- "value": 30
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Critical panel",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- },
- {
- "alert": {
- "conditions": [
- {
- "evaluator": {
- "params": [
- 30
- ],
- "type": "gt"
- },
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "params": [],
- "type": "sum"
- },
- "type": "query"
- }
- ],
- "enabled": true,
- "frequency": "60s",
- "handler": 1,
- "name": "Critical alert panel",
- "notifications": [],
- "severity": "critical"
- },
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "grid": {},
- "id": 6,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 3,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- }
- ],
- "thresholds": [
- {
- "colorMode": "critical",
- "fill": true,
- "fillColor": "rgba(234, 112, 112, 0.12)",
- "line": true,
- "lineColor": "rgba(237, 46, 24, 0.60)",
- "op": "gt",
- "value": 30
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Critical panel",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- },
- {
- "alert": {
- "conditions": [
- {
- "evaluator": {
- "params": [
- 30
- ],
- "type": "gt"
- },
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "params": [],
- "type": "sum"
- },
- "type": "query"
- }
- ],
- "enabled": true,
- "frequency": "60s",
- "handler": 1,
- "name": "Critical alert panel",
- "notifications": [],
- "severity": "critical"
- },
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "grid": {},
- "id": 8,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 3,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- }
- ],
- "thresholds": [
- {
- "colorMode": "critical",
- "fill": true,
- "fillColor": "rgba(234, 112, 112, 0.12)",
- "line": true,
- "lineColor": "rgba(237, 46, 24, 0.60)",
- "op": "gt",
- "value": 30
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Critical panel",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- }
- ],
- "title": "Row"
- },
- {
- "collapse": false,
- "editable": true,
- "height": "250px",
- "panels": [
- {
- "alert": {
- "conditions": [
- {
- "evaluator": {
- "params": [
- 20
- ],
- "type": "gt"
- },
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "params": [],
- "type": "avg"
- },
- "type": "query"
- }
- ],
- "enabled": true,
- "frequency": "60s",
- "handler": 1,
- "name": "Warning panel alert",
- "notifications": [],
- "severity": "warning"
- },
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "id": 2,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 3,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- }
- ],
- "thresholds": [
- {
- "colorMode": "warning",
- "fill": true,
- "fillColor": "rgba(235, 138, 14, 0.12)",
- "line": true,
- "lineColor": "rgba(247, 149, 32, 0.60)",
- "op": "gt",
- "value": 20
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Warning panel",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- },
- {
- "alert": {
- "conditions": [
- {
- "evaluator": {
- "params": [
- 20
- ],
- "type": "gt"
- },
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "params": [],
- "type": "avg"
- },
- "type": "query"
- }
- ],
- "enabled": true,
- "frequency": "60s",
- "handler": 1,
- "name": "Warning panel alert",
- "notifications": [],
- "severity": "warning"
- },
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "id": 3,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 3,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- }
- ],
- "thresholds": [
- {
- "colorMode": "warning",
- "fill": true,
- "fillColor": "rgba(235, 138, 14, 0.12)",
- "line": true,
- "lineColor": "rgba(247, 149, 32, 0.60)",
- "op": "gt",
- "value": 20
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Warning panel",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- },
- {
- "alert": {
- "conditions": [
- {
- "evaluator": {
- "params": [
- 20
- ],
- "type": "gt"
- },
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "params": [],
- "type": "avg"
- },
- "type": "query"
- }
- ],
- "enabled": true,
- "frequency": "60s",
- "handler": 1,
- "name": "Warning panel alert",
- "notifications": [],
- "severity": "warning"
- },
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "id": 4,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 3,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- }
- ],
- "thresholds": [
- {
- "colorMode": "warning",
- "fill": true,
- "fillColor": "rgba(235, 138, 14, 0.12)",
- "line": true,
- "lineColor": "rgba(247, 149, 32, 0.60)",
- "op": "gt",
- "value": 20
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Warning panel",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- },
- {
- "alert": {
- "conditions": [
- {
- "evaluator": {
- "params": [
- 20
- ],
- "type": "gt"
- },
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "params": [],
- "type": "avg"
- },
- "type": "query"
- }
- ],
- "enabled": true,
- "frequency": "60s",
- "handler": 1,
- "name": "Warning panel alert",
- "notifications": [],
- "severity": "warning"
- },
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "id": 7,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 3,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- }
- ],
- "thresholds": [
- {
- "colorMode": "warning",
- "fill": true,
- "fillColor": "rgba(235, 138, 14, 0.12)",
- "line": true,
- "lineColor": "rgba(247, 149, 32, 0.60)",
- "op": "gt",
- "value": 20
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Warning panel",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- }
- ],
- "title": "New row"
- },
- {
- "collapse": false,
- "editable": true,
- "height": "250px",
- "panels": [
- {
- "alert": {
- "conditions": [
- {
- "evaluator": {
- "params": [
- 50
- ],
- "type": "gt"
- },
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "params": [],
- "type": "avg"
- },
- "type": "query"
- }
- ],
- "enabled": true,
- "frequency": "10s",
- "handler": 1,
- "name": "Fast Critical panel alert",
- "notifications": [],
- "severity": "critical"
- },
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "id": 9,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 3,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- }
- ],
- "thresholds": [
- {
- "colorMode": "critical",
- "fill": true,
- "fillColor": "rgba(234, 112, 112, 0.12)",
- "line": true,
- "lineColor": "rgba(237, 46, 24, 0.60)",
- "op": "gt",
- "value": 50
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Fast Critical panel",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- },
- {
- "alert": {
- "conditions": [
- {
- "evaluator": {
- "params": [
- 50
- ],
- "type": "gt"
- },
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "params": [],
- "type": "avg"
- },
- "type": "query"
- }
- ],
- "enabled": true,
- "frequency": "10s",
- "handler": 1,
- "name": "Fast Critical panel alert",
- "notifications": [],
- "severity": "critical"
- },
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "id": 10,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 3,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- }
- ],
- "thresholds": [
- {
- "colorMode": "critical",
- "fill": true,
- "fillColor": "rgba(234, 112, 112, 0.12)",
- "line": true,
- "lineColor": "rgba(237, 46, 24, 0.60)",
- "op": "gt",
- "value": 50
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Fast Critical panel",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- },
- {
- "alert": {
- "conditions": [
- {
- "evaluator": {
- "params": [
- 50
- ],
- "type": "gt"
- },
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "params": [],
- "type": "avg"
- },
- "type": "query"
- }
- ],
- "enabled": true,
- "frequency": "10s",
- "handler": 1,
- "name": "Fast Critical panel alert",
- "notifications": [],
- "severity": "critical"
- },
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "id": 11,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 3,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- }
- ],
- "thresholds": [
- {
- "colorMode": "critical",
- "fill": true,
- "fillColor": "rgba(234, 112, 112, 0.12)",
- "line": true,
- "lineColor": "rgba(237, 46, 24, 0.60)",
- "op": "gt",
- "value": 50
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Fast Critical panel",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- },
- {
- "alert": {
- "conditions": [
- {
- "evaluator": {
- "params": [
- 50
- ],
- "type": "gt"
- },
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "params": [],
- "type": "avg"
- },
- "type": "query"
- }
- ],
- "enabled": true,
- "frequency": "10s",
- "handler": 1,
- "name": "Fast Critical panel alert",
- "notifications": [],
- "severity": "critical"
- },
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "id": 12,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 3,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- }
- ],
- "thresholds": [
- {
- "colorMode": "critical",
- "fill": true,
- "fillColor": "rgba(234, 112, 112, 0.12)",
- "line": true,
- "lineColor": "rgba(237, 46, 24, 0.60)",
- "op": "gt",
- "value": 50
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Fast Critical panel",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- }
- ],
- "title": "New row"
- },
- {
- "collapse": false,
- "editable": true,
- "height": "250px",
- "panels": [
- {
- "alert": {
- "enabled": true,
- "conditions": [
- {
- "type": "query",
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "type": "avg",
- "params": []
- },
- "evaluator": {
- "type": "gt",
- "params": [
- 10
- ]
- }
- }
- ],
- "severity": "warning",
- "frequency": "1s",
- "handler": 1,
- "notifications": [],
- "name": "Fast Warning panel alert"
- },
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "id": 13,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 3,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- }
- ],
- "thresholds": [
- {
- "value": 10,
- "op": "gt",
- "fill": true,
- "line": true,
- "colorMode": "warning",
- "fillColor": "rgba(235, 138, 14, 0.12)",
- "lineColor": "rgba(247, 149, 32, 0.60)"
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Fast Warning panel",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- },
- {
- "alert": {
- "enabled": true,
- "conditions": [
- {
- "type": "query",
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "type": "avg",
- "params": []
- },
- "evaluator": {
- "type": "gt",
- "params": [
- 10
- ]
- }
- }
- ],
- "severity": "warning",
- "frequency": "1s",
- "handler": 1,
- "notifications": [],
- "name": "Fast Warning panel alert"
- },
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "id": 14,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 3,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- }
- ],
- "thresholds": [
- {
- "value": 10,
- "op": "gt",
- "fill": true,
- "line": true,
- "colorMode": "warning",
- "fillColor": "rgba(235, 138, 14, 0.12)",
- "lineColor": "rgba(247, 149, 32, 0.60)"
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Fast Warning panel",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- },
- {
- "alert": {
- "enabled": true,
- "conditions": [
- {
- "type": "query",
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "type": "avg",
- "params": []
- },
- "evaluator": {
- "type": "gt",
- "params": [
- 10
- ]
- }
- }
- ],
- "severity": "warning",
- "frequency": "1s",
- "handler": 1,
- "notifications": [],
- "name": "Fast Warning panel alert"
- },
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "id": 15,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 3,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- }
- ],
- "thresholds": [
- {
- "value": 10,
- "op": "gt",
- "fill": true,
- "line": true,
- "colorMode": "warning",
- "fillColor": "rgba(235, 138, 14, 0.12)",
- "lineColor": "rgba(247, 149, 32, 0.60)"
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Fast Warning panel",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- },
- {
- "alert": {
- "enabled": true,
- "conditions": [
- {
- "type": "query",
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "type": "avg",
- "params": []
- },
- "evaluator": {
- "type": "gt",
- "params": [
- 10
- ]
- }
- }
- ],
- "severity": "warning",
- "frequency": "1s",
- "handler": 1,
- "notifications": [],
- "name": "Fast Warning panel alert"
- },
- "aliasColors": {},
- "bars": false,
- "datasource": "${DS_GRAPHITE}",
- "editable": true,
- "error": false,
- "fill": 1,
- "id": 16,
- "isNew": true,
- "legend": {
- "avg": false,
- "current": false,
- "max": false,
- "min": false,
- "show": true,
- "total": false,
- "values": false
- },
- "lines": true,
- "linewidth": 2,
- "links": [],
- "nullPointMode": "connected",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [],
- "span": 3,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "refId": "A",
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)"
- }
- ],
- "thresholds": [
- {
- "value": 10,
- "op": "gt",
- "fill": true,
- "line": true,
- "colorMode": "warning",
- "fillColor": "rgba(235, 138, 14, 0.12)",
- "lineColor": "rgba(247, 149, 32, 0.60)"
- }
- ],
- "timeFrom": null,
- "timeShift": null,
- "title": "Fast Warning panel",
- "tooltip": {
- "msResolution": false,
- "shared": true,
- "sort": 0,
- "value_type": "cumulative"
- },
- "type": "graph",
- "xaxis": {
- "show": true
- },
- "yaxes": [
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ]
- }
- ],
- "title": "New row"
- },
- {
- "title": "New row",
- "height": "250px",
- "editable": true,
- "collapse": false,
- "panels": [
- {
- "title": "Alert below value",
- "error": false,
- "span": 3,
- "editable": true,
- "type": "graph",
- "isNew": true,
- "id": 17,
- "targets": [
- {
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)",
- "refId": "A"
- }
- ],
- "datasource": "${DS_GRAPHITE}",
- "renderer": "flot",
- "yaxes": [
- {
- "label": null,
- "show": true,
- "logBase": 1,
- "min": null,
- "max": null,
- "format": "short"
- },
- {
- "label": null,
- "show": true,
- "logBase": 1,
- "min": null,
- "max": null,
- "format": "short"
- }
- ],
- "xaxis": {
- "show": true
- },
- "alert": {
- "conditions": [
- {
- "type": "query",
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "type": "avg",
- "params": []
- },
- "evaluator": {
- "type": "lt",
- "params": [
- 20
- ]
- }
- }
- ],
- "severity": "critical",
- "frequency": "60s",
- "handler": 1,
- "notifications": [],
- "name": "Alert below value",
- "enabled": true
- },
- "lines": true,
- "fill": 1,
- "linewidth": 2,
- "points": false,
- "pointradius": 5,
- "bars": false,
- "stack": false,
- "percentage": false,
- "legend": {
- "show": true,
- "values": false,
- "min": false,
- "max": false,
- "current": false,
- "total": false,
- "avg": false
- },
- "nullPointMode": "connected",
- "steppedLine": false,
- "tooltip": {
- "value_type": "cumulative",
- "shared": true,
- "sort": 0,
- "msResolution": false
- },
- "timeFrom": null,
- "timeShift": null,
- "aliasColors": {},
- "seriesOverrides": [],
- "thresholds": [
- {
- "value": 20,
- "op": "lt",
- "fill": true,
- "line": true,
- "colorMode": "critical",
- "fillColor": "rgba(234, 112, 112, 0.12)",
- "lineColor": "rgba(237, 46, 24, 0.60)"
- }
- ],
- "links": []
- },
- {
- "title": "Alert is outside range",
- "error": false,
- "span": 3,
- "editable": true,
- "type": "graph",
- "isNew": true,
- "id": 18,
- "targets": [
- {
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)",
- "refId": "A"
- }
- ],
- "datasource": "${DS_GRAPHITE}",
- "renderer": "flot",
- "yaxes": [
- {
- "label": null,
- "show": true,
- "logBase": 1,
- "min": null,
- "max": null,
- "format": "short"
- },
- {
- "label": null,
- "show": true,
- "logBase": 1,
- "min": null,
- "max": null,
- "format": "short"
- }
- ],
- "xaxis": {
- "show": true
- },
- "alert": {
- "conditions": [
- {
- "type": "query",
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "type": "avg",
- "params": []
- },
- "evaluator": {
- "type": "outside_range",
- "params": [
- 10,
- 80
- ]
- }
- }
- ],
- "severity": "critical",
- "frequency": "10s",
- "handler": 1,
- "notifications": [],
- "name": "Alert is outside range",
- "enabled": true
- },
- "lines": true,
- "fill": 1,
- "linewidth": 2,
- "points": false,
- "pointradius": 5,
- "bars": false,
- "stack": false,
- "percentage": false,
- "legend": {
- "show": true,
- "values": false,
- "min": false,
- "max": false,
- "current": false,
- "total": false,
- "avg": false
- },
- "nullPointMode": "connected",
- "steppedLine": false,
- "tooltip": {
- "value_type": "cumulative",
- "shared": true,
- "sort": 0,
- "msResolution": false
- },
- "timeFrom": null,
- "timeShift": null,
- "aliasColors": {},
- "seriesOverrides": [],
- "thresholds": [
- {
- "value": 10,
- "op": "lt",
- "fill": true,
- "line": true,
- "colorMode": "critical",
- "fillColor": "rgba(234, 112, 112, 0.12)",
- "lineColor": "rgba(237, 46, 24, 0.60)"
- },
- {
- "value": 80,
- "op": "gt",
- "fill": true,
- "line": true,
- "colorMode": "critical",
- "fillColor": "rgba(234, 112, 112, 0.12)",
- "lineColor": "rgba(237, 46, 24, 0.60)"
- }
- ],
- "links": []
- },
- {
- "title": "Alert is within range",
- "error": false,
- "span": 3,
- "editable": true,
- "type": "graph",
- "isNew": true,
- "id": 19,
- "targets": [
- {
- "target": "aliasByNode(statsd.fakesite.counters.session_start.*.count, 4)",
- "refId": "A"
- }
- ],
- "datasource": "${DS_GRAPHITE}",
- "renderer": "flot",
- "yaxes": [
- {
- "label": null,
- "show": true,
- "logBase": 1,
- "min": null,
- "max": null,
- "format": "short"
- },
- {
- "label": null,
- "show": true,
- "logBase": 1,
- "min": null,
- "max": null,
- "format": "short"
- }
- ],
- "xaxis": {
- "show": true
- },
- "alert": {
- "conditions": [
- {
- "type": "query",
- "query": {
- "params": [
- "A",
- "5m",
- "now"
- ]
- },
- "reducer": {
- "type": "avg",
- "params": []
- },
- "evaluator": {
- "type": "within_range",
- "params": [
- 60,
- 80
- ]
- }
- }
- ],
- "severity": "critical",
- "frequency": "10s",
- "handler": 1,
- "notifications": [],
- "name": "Alert is within range",
- "enabled": true
- },
- "lines": true,
- "fill": 1,
- "linewidth": 2,
- "points": false,
- "pointradius": 5,
- "bars": false,
- "stack": false,
- "percentage": false,
- "legend": {
- "show": true,
- "values": false,
- "min": false,
- "max": false,
- "current": false,
- "total": false,
- "avg": false
- },
- "nullPointMode": "connected",
- "steppedLine": false,
- "tooltip": {
- "value_type": "cumulative",
- "shared": true,
- "sort": 0,
- "msResolution": false
- },
- "timeFrom": null,
- "timeShift": null,
- "aliasColors": {},
- "seriesOverrides": [],
- "thresholds": [
- {
- "value": 60,
- "op": "gt",
- "fill": true,
- "line": true,
- "colorMode": "critical",
- "fillColor": "rgba(234, 112, 112, 0.12)",
- "lineColor": "rgba(237, 46, 24, 0.60)"
- },
- {
- "value": 80,
- "op": "lt",
- "fill": true,
- "line": true,
- "colorMode": "critical",
- "fillColor": "rgba(234, 112, 112, 0.12)",
- "lineColor": "rgba(237, 46, 24, 0.60)"
- }
- ],
- "links": []
- }
- ]
- }
- ],
- "time": {
- "from": "now-6h",
- "to": "now"
- },
- "timepicker": {
- "refresh_intervals": [
- "5s",
- "10s",
- "30s",
- "1m",
- "5m",
- "15m",
- "30m",
- "1h",
- "2h",
- "1d"
- ],
- "time_options": [
- "5m",
- "15m",
- "1h",
- "6h",
- "12h",
- "24h",
- "2d",
- "7d",
- "30d"
- ]
- },
- "templating": {
- "list": []
- },
- "annotations": {
- "list": []
- },
- "schemaVersion": 13,
- "version": 50,
- "links": [],
- "gnetId": null
-}
\ No newline at end of file
diff --git a/jest.config.js b/jest.config.js
index a5cd3416f75..cac634fbf10 100644
--- a/jest.config.js
+++ b/jest.config.js
@@ -1,13 +1,8 @@
module.exports = {
verbose: false,
- "globals": {
- "ts-jest": {
- "tsConfigFile": "tsconfig.json"
- }
- },
"transform": {
- "^.+\\.tsx?$": "/node_modules/ts-jest/preprocessor.js"
+ "^.+\\.(ts|tsx)$": "ts-jest"
},
"moduleDirectories": ["node_modules", "public"],
"roots": [
diff --git a/latest.json b/latest.json
index 7b36131fea2..3b5a9e0c227 100644
--- a/latest.json
+++ b/latest.json
@@ -1,4 +1,4 @@
{
- "stable": "5.2.3",
- "testing": "5.2.3"
+ "stable": "5.3.0",
+ "testing": "5.3.0"
}
diff --git a/package.json b/package.json
index 9ee81d7f8ac..0517c0d5ba5 100644
--- a/package.json
+++ b/package.json
@@ -4,19 +4,20 @@
"company": "Grafana Labs"
},
"name": "grafana",
- "version": "5.3.0-pre1",
+ "version": "5.4.0-pre1",
"repository": {
"type": "git",
"url": "http://github.com/grafana/grafana.git"
},
"devDependencies": {
"@types/d3": "^4.10.1",
- "@types/enzyme": "^2.8.9",
- "@types/jest": "^21.1.4",
+ "@types/enzyme": "^3.1.13",
+ "@types/jest": "^23.3.2",
"@types/node": "^8.0.31",
- "@types/react": "^16.0.25",
+ "@types/react": "^16.4.14",
"@types/react-custom-scrollbars": "^4.0.5",
- "@types/react-dom": "^16.0.3",
+ "@types/react-dom": "^16.0.7",
+ "@types/react-select": "^2.0.4",
"angular-mocks": "1.6.6",
"autoprefixer": "^6.4.0",
"axios": "^0.17.1",
@@ -26,15 +27,15 @@
"babel-preset-es2015": "^6.24.1",
"clean-webpack-plugin": "^0.1.19",
"css-loader": "^0.28.7",
- "enzyme": "^3.1.0",
- "enzyme-adapter-react-16": "^1.0.1",
- "enzyme-to-json": "^3.3.0",
+ "enzyme": "^3.6.0",
+ "enzyme-adapter-react-16": "^1.5.0",
+ "enzyme-to-json": "^3.3.4",
"es6-promise": "^3.0.2",
"es6-shim": "^0.35.3",
"expect.js": "~0.2.0",
"expose-loader": "^0.7.3",
"file-loader": "^1.1.11",
- "fork-ts-checker-webpack-plugin": "^0.4.2",
+ "fork-ts-checker-webpack-plugin": "^0.4.9",
"gaze": "^1.1.2",
"glob": "~7.0.0",
"grunt": "1.0.1",
@@ -56,11 +57,10 @@
"html-webpack-harddisk-plugin": "^0.2.0",
"html-webpack-plugin": "^3.2.0",
"husky": "^0.14.3",
- "jest": "^22.0.4",
+ "jest": "^23.6.0",
"lint-staged": "^6.0.0",
"load-grunt-tasks": "3.5.2",
"mini-css-extract-plugin": "^0.4.0",
- "mobx-react-devtools": "^4.2.15",
"mocha": "^4.0.1",
"ng-annotate-loader": "^0.6.1",
"ng-annotate-webpack-plugin": "^0.3.0",
@@ -72,22 +72,22 @@
"postcss-loader": "^2.0.6",
"postcss-reporter": "^5.0.0",
"prettier": "1.9.2",
- "react-hot-loader": "^4.2.0",
- "react-test-renderer": "^16.0.0",
+ "react-hot-loader": "^4.3.6",
+ "react-test-renderer": "^16.5.0",
"sass-lint": "^1.10.2",
"sass-loader": "^7.0.1",
"sinon": "1.17.6",
"style-loader": "^0.21.0",
"systemjs": "0.20.19",
"systemjs-plugin-css": "^0.1.36",
- "ts-jest": "^22.4.6",
- "ts-loader": "^4.3.0",
+ "ts-jest": "^23.10.4",
+ "ts-loader": "^5.1.0",
"tslib": "^1.9.3",
"tslint": "^5.8.0",
"tslint-loader": "^3.5.3",
- "typescript": "^2.6.2",
+ "typescript": "^3.0.3",
"uglifyjs-webpack-plugin": "^1.2.7",
- "webpack": "^4.8.0",
+ "webpack": "4.19.1",
"webpack-bundle-analyzer": "^2.9.0",
"webpack-cleanup-plugin": "^0.5.1",
"webpack-cli": "^2.1.4",
@@ -101,7 +101,6 @@
"watch": "webpack --progress --colors --watch --mode development --config scripts/webpack/webpack.dev.js",
"build": "grunt build",
"test": "grunt test",
- "test:coverage": "grunt test --coverage=true",
"lint": "tslint -c tslint.json --project tsconfig.json",
"jest": "jest --notify --watch",
"api-tests": "jest --notify --watch --config=tests/api/jest.js",
@@ -133,6 +132,7 @@
"angular-native-dragdrop": "1.2.2",
"angular-route": "1.6.6",
"angular-sanitize": "1.6.6",
+ "babel-jest": "^23.6.0",
"babel-polyfill": "^6.26.0",
"baron": "^3.0.3",
"brace": "^0.10.0",
@@ -145,29 +145,31 @@
"immutable": "^3.8.2",
"jquery": "^3.2.1",
"lodash": "^4.17.10",
- "mobx": "^3.4.1",
- "mobx-react": "^4.3.5",
- "mobx-state-tree": "^1.3.1",
"moment": "^2.22.2",
"mousetrap": "^1.6.0",
"mousetrap-global-bind": "^1.1.0",
"prismjs": "^1.6.0",
- "prop-types": "^15.6.0",
+ "prop-types": "^15.6.2",
"rc-cascader": "^0.14.0",
- "react": "^16.2.0",
+ "react": "^16.5.0",
"react-custom-scrollbars": "^4.2.1",
- "react-dom": "^16.2.0",
+ "react-dom": "^16.5.0",
"react-grid-layout": "0.16.6",
"react-highlight-words": "^0.10.0",
"react-popper": "^0.7.5",
- "react-select": "^1.1.0",
+ "react-redux": "^5.0.7",
+ "react-select": "2.1.0",
"react-sizeme": "^2.3.6",
"react-transition-group": "^2.2.1",
+ "redux": "^4.0.0",
+ "redux-logger": "^3.0.6",
+ "redux-thunk": "^2.3.0",
"remarkable": "^1.7.1",
"rst2html": "github:thoward/rst2html#990cb89",
"rxjs": "^5.4.3",
"slate": "^0.33.4",
"slate-plain-serializer": "^0.5.10",
+ "slate-prism": "^0.5.0",
"slate-react": "^0.12.4",
"tether": "^1.4.0",
"tether-drop": "https://github.com/torkelo/drop/tarball/master",
diff --git a/pkg/api/annotations.go b/pkg/api/annotations.go
index 55c9c954940..242b5531f51 100644
--- a/pkg/api/annotations.go
+++ b/pkg/api/annotations.go
@@ -24,6 +24,7 @@ func GetAnnotations(c *m.ReqContext) Response {
Limit: c.QueryInt64("limit"),
Tags: c.QueryStrings("tags"),
Type: c.Query("type"),
+ MatchAny: c.QueryBool("matchAny"),
}
repo := annotations.GetRepository()
diff --git a/pkg/api/api.go b/pkg/api/api.go
index 906481bbb8a..5c5596d5da2 100644
--- a/pkg/api/api.go
+++ b/pkg/api/api.go
@@ -10,10 +10,10 @@ import (
)
func (hs *HTTPServer) registerRoutes() {
- reqSignedIn := middleware.Auth(&middleware.AuthOptions{ReqSignedIn: true})
- reqGrafanaAdmin := middleware.Auth(&middleware.AuthOptions{ReqSignedIn: true, ReqGrafanaAdmin: true})
- reqEditorRole := middleware.RoleAuth(m.ROLE_EDITOR, m.ROLE_ADMIN)
- reqOrgAdmin := middleware.RoleAuth(m.ROLE_ADMIN)
+ reqSignedIn := middleware.ReqSignedIn
+ reqGrafanaAdmin := middleware.ReqGrafanaAdmin
+ reqEditorRole := middleware.ReqEditorRole
+ reqOrgAdmin := middleware.ReqOrgAdmin
redirectFromLegacyDashboardURL := middleware.RedirectFromLegacyDashboardURL()
redirectFromLegacyDashboardSoloURL := middleware.RedirectFromLegacyDashboardSoloURL()
quota := middleware.Quota
@@ -22,66 +22,66 @@ func (hs *HTTPServer) registerRoutes() {
r := hs.RouteRegister
// not logged in views
- r.Get("/", reqSignedIn, Index)
+ r.Get("/", reqSignedIn, hs.Index)
r.Get("/logout", Logout)
r.Post("/login", quota("session"), bind(dtos.LoginCommand{}), Wrap(LoginPost))
r.Get("/login/:name", quota("session"), OAuthLogin)
- r.Get("/login", LoginView)
- r.Get("/invite/:code", Index)
+ r.Get("/login", hs.LoginView)
+ r.Get("/invite/:code", hs.Index)
// authed views
- r.Get("/profile/", reqSignedIn, Index)
- r.Get("/profile/password", reqSignedIn, Index)
- r.Get("/profile/switch-org/:id", reqSignedIn, ChangeActiveOrgAndRedirectToHome)
- r.Get("/org/", reqSignedIn, Index)
- r.Get("/org/new", reqSignedIn, Index)
- r.Get("/datasources/", reqSignedIn, Index)
- r.Get("/datasources/new", reqSignedIn, Index)
- r.Get("/datasources/edit/*", reqSignedIn, Index)
- r.Get("/org/users", reqSignedIn, Index)
- r.Get("/org/users/new", reqSignedIn, Index)
- r.Get("/org/users/invite", reqSignedIn, Index)
- r.Get("/org/teams", reqSignedIn, Index)
- r.Get("/org/teams/*", reqSignedIn, Index)
- r.Get("/org/apikeys/", reqSignedIn, Index)
- r.Get("/dashboard/import/", reqSignedIn, Index)
- r.Get("/configuration", reqGrafanaAdmin, Index)
- r.Get("/admin", reqGrafanaAdmin, Index)
- r.Get("/admin/settings", reqGrafanaAdmin, Index)
- r.Get("/admin/users", reqGrafanaAdmin, Index)
- r.Get("/admin/users/create", reqGrafanaAdmin, Index)
- r.Get("/admin/users/edit/:id", reqGrafanaAdmin, Index)
- r.Get("/admin/orgs", reqGrafanaAdmin, Index)
- r.Get("/admin/orgs/edit/:id", reqGrafanaAdmin, Index)
- r.Get("/admin/stats", reqGrafanaAdmin, Index)
+ r.Get("/profile/", reqSignedIn, hs.Index)
+ r.Get("/profile/password", reqSignedIn, hs.Index)
+ r.Get("/profile/switch-org/:id", reqSignedIn, hs.ChangeActiveOrgAndRedirectToHome)
+ r.Get("/org/", reqSignedIn, hs.Index)
+ r.Get("/org/new", reqSignedIn, hs.Index)
+ r.Get("/datasources/", reqSignedIn, hs.Index)
+ r.Get("/datasources/new", reqSignedIn, hs.Index)
+ r.Get("/datasources/edit/*", reqSignedIn, hs.Index)
+ r.Get("/org/users", reqSignedIn, hs.Index)
+ r.Get("/org/users/new", reqSignedIn, hs.Index)
+ r.Get("/org/users/invite", reqSignedIn, hs.Index)
+ r.Get("/org/teams", reqSignedIn, hs.Index)
+ r.Get("/org/teams/*", reqSignedIn, hs.Index)
+ r.Get("/org/apikeys/", reqSignedIn, hs.Index)
+ r.Get("/dashboard/import/", reqSignedIn, hs.Index)
+ r.Get("/configuration", reqGrafanaAdmin, hs.Index)
+ r.Get("/admin", reqGrafanaAdmin, hs.Index)
+ r.Get("/admin/settings", reqGrafanaAdmin, hs.Index)
+ r.Get("/admin/users", reqGrafanaAdmin, hs.Index)
+ r.Get("/admin/users/create", reqGrafanaAdmin, hs.Index)
+ r.Get("/admin/users/edit/:id", reqGrafanaAdmin, hs.Index)
+ r.Get("/admin/orgs", reqGrafanaAdmin, hs.Index)
+ r.Get("/admin/orgs/edit/:id", reqGrafanaAdmin, hs.Index)
+ r.Get("/admin/stats", reqGrafanaAdmin, hs.Index)
- r.Get("/styleguide", reqSignedIn, Index)
+ r.Get("/styleguide", reqSignedIn, hs.Index)
- r.Get("/plugins", reqSignedIn, Index)
- r.Get("/plugins/:id/edit", reqSignedIn, Index)
- r.Get("/plugins/:id/page/:page", reqSignedIn, Index)
+ r.Get("/plugins", reqSignedIn, hs.Index)
+ r.Get("/plugins/:id/edit", reqSignedIn, hs.Index)
+ r.Get("/plugins/:id/page/:page", reqSignedIn, hs.Index)
- r.Get("/d/:uid/:slug", reqSignedIn, Index)
- r.Get("/d/:uid", reqSignedIn, Index)
- r.Get("/dashboard/db/:slug", reqSignedIn, redirectFromLegacyDashboardURL, Index)
- r.Get("/dashboard/script/*", reqSignedIn, Index)
- r.Get("/dashboard-solo/snapshot/*", Index)
- r.Get("/d-solo/:uid/:slug", reqSignedIn, Index)
- r.Get("/dashboard-solo/db/:slug", reqSignedIn, redirectFromLegacyDashboardSoloURL, Index)
- r.Get("/dashboard-solo/script/*", reqSignedIn, Index)
- r.Get("/import/dashboard", reqSignedIn, Index)
- r.Get("/dashboards/", reqSignedIn, Index)
- r.Get("/dashboards/*", reqSignedIn, Index)
+ r.Get("/d/:uid/:slug", reqSignedIn, hs.Index)
+ r.Get("/d/:uid", reqSignedIn, hs.Index)
+ r.Get("/dashboard/db/:slug", reqSignedIn, redirectFromLegacyDashboardURL, hs.Index)
+ r.Get("/dashboard/script/*", reqSignedIn, hs.Index)
+ r.Get("/dashboard-solo/snapshot/*", hs.Index)
+ r.Get("/d-solo/:uid/:slug", reqSignedIn, hs.Index)
+ r.Get("/dashboard-solo/db/:slug", reqSignedIn, redirectFromLegacyDashboardSoloURL, hs.Index)
+ r.Get("/dashboard-solo/script/*", reqSignedIn, hs.Index)
+ r.Get("/import/dashboard", reqSignedIn, hs.Index)
+ r.Get("/dashboards/", reqSignedIn, hs.Index)
+ r.Get("/dashboards/*", reqSignedIn, hs.Index)
- r.Get("/explore", reqEditorRole, Index)
+ r.Get("/explore", reqEditorRole, hs.Index)
- r.Get("/playlists/", reqSignedIn, Index)
- r.Get("/playlists/*", reqSignedIn, Index)
- r.Get("/alerting/", reqSignedIn, Index)
- r.Get("/alerting/*", reqSignedIn, Index)
+ r.Get("/playlists/", reqSignedIn, hs.Index)
+ r.Get("/playlists/*", reqSignedIn, hs.Index)
+ r.Get("/alerting/", reqSignedIn, hs.Index)
+ r.Get("/alerting/*", reqSignedIn, hs.Index)
// sign up
- r.Get("/signup", Index)
+ r.Get("/signup", hs.Index)
r.Get("/api/user/signup/options", Wrap(GetSignUpOptions))
r.Post("/api/user/signup", quota("user"), bind(dtos.SignUpForm{}), Wrap(SignUp))
r.Post("/api/user/signup/step2", bind(dtos.SignUpStep2Form{}), Wrap(SignUpStep2))
@@ -91,15 +91,15 @@ func (hs *HTTPServer) registerRoutes() {
r.Post("/api/user/invite/complete", bind(dtos.CompleteInviteForm{}), Wrap(CompleteInvite))
// reset password
- r.Get("/user/password/send-reset-email", Index)
- r.Get("/user/password/reset", Index)
+ r.Get("/user/password/send-reset-email", hs.Index)
+ r.Get("/user/password/reset", hs.Index)
r.Post("/api/user/password/send-reset-email", bind(dtos.SendResetPasswordEmailForm{}), Wrap(SendResetPasswordEmail))
r.Post("/api/user/password/reset", bind(dtos.ResetUserPasswordForm{}), Wrap(ResetPassword))
// dashboard snapshots
- r.Get("/dashboard/snapshot/*", Index)
- r.Get("/dashboard/snapshots/", reqSignedIn, Index)
+ r.Get("/dashboard/snapshot/*", hs.Index)
+ r.Get("/dashboard/snapshots/", reqSignedIn, hs.Index)
// api for dashboard snapshots
r.Post("/api/snapshots/", bind(m.CreateDashboardSnapshotCommand{}), CreateDashboardSnapshot)
@@ -320,7 +320,7 @@ func (hs *HTTPServer) registerRoutes() {
apiRoute.Get("/search/", Search)
// metrics
- apiRoute.Post("/tsdb/query", bind(dtos.MetricRequest{}), Wrap(QueryMetrics))
+ apiRoute.Post("/tsdb/query", bind(dtos.MetricRequest{}), Wrap(hs.QueryMetrics))
apiRoute.Get("/tsdb/testdata/scenarios", Wrap(GetTestDataScenarios))
apiRoute.Get("/tsdb/testdata/gensql", reqGrafanaAdmin, Wrap(GenerateSQLTestData))
apiRoute.Get("/tsdb/testdata/random-walk", Wrap(GetTestDataRandomWalk))
diff --git a/pkg/api/avatar/avatar.go b/pkg/api/avatar/avatar.go
index 5becf90ca35..6cf164285bf 100644
--- a/pkg/api/avatar/avatar.go
+++ b/pkg/api/avatar/avatar.go
@@ -97,15 +97,6 @@ type CacheServer struct {
cache *gocache.Cache
}
-func (this *CacheServer) mustInt(r *http.Request, defaultValue int, keys ...string) (v int) {
- for _, k := range keys {
- if _, err := fmt.Sscanf(r.FormValue(k), "%d", &v); err == nil {
- defaultValue = v
- }
- }
- return defaultValue
-}
-
func (this *CacheServer) Handler(ctx *macaron.Context) {
urlPath := ctx.Req.URL.Path
hash := urlPath[strings.LastIndex(urlPath, "/")+1:]
diff --git a/pkg/api/dashboard.go b/pkg/api/dashboard.go
index c2ab6dd9a1a..02248334b9c 100644
--- a/pkg/api/dashboard.go
+++ b/pkg/api/dashboard.go
@@ -6,6 +6,7 @@ import (
"os"
"path"
+ "github.com/grafana/grafana/pkg/services/alerting"
"github.com/grafana/grafana/pkg/services/dashboards"
"github.com/grafana/grafana/pkg/api/dtos"
@@ -22,6 +23,10 @@ import (
"github.com/grafana/grafana/pkg/util"
)
+const (
+ anonString = "Anonymous"
+)
+
func isDashboardStarredByUser(c *m.ReqContext, dashID int64) (bool, error) {
if !c.IsSignedIn {
return false, nil
@@ -64,7 +69,7 @@ func GetDashboard(c *m.ReqContext) Response {
}
// Finding creator and last updater of the dashboard
- updater, creator := "Anonymous", "Anonymous"
+ updater, creator := anonString, anonString
if dash.UpdatedBy > 0 {
updater = getUserLogin(dash.UpdatedBy)
}
@@ -128,7 +133,7 @@ func getUserLogin(userID int64) string {
query := m.GetUserByIdQuery{Id: userID}
err := bus.Dispatch(&query)
if err != nil {
- return "Anonymous"
+ return anonString
}
return query.Result.Login
}
@@ -247,8 +252,8 @@ func PostDashboard(c *m.ReqContext, cmd m.SaveDashboardCommand) Response {
return Error(403, err.Error(), err)
}
- if err == m.ErrDashboardContainsInvalidAlertData {
- return Error(500, "Invalid alert data. Cannot save dashboard", err)
+ if validationErr, ok := err.(alerting.ValidationError); ok {
+ return Error(422, validationErr.Error(), nil)
}
if err != nil {
@@ -403,7 +408,7 @@ func GetDashboardVersion(c *m.ReqContext) Response {
return Error(500, fmt.Sprintf("Dashboard version %d not found for dashboardId %d", query.Version, dashID), err)
}
- creator := "Anonymous"
+ creator := anonString
if query.Result.CreatedBy > 0 {
creator = getUserLogin(query.Result.CreatedBy)
}
diff --git a/pkg/api/dashboard_test.go b/pkg/api/dashboard_test.go
index 283a9b5f12c..2726623c242 100644
--- a/pkg/api/dashboard_test.go
+++ b/pkg/api/dashboard_test.go
@@ -9,6 +9,7 @@ import (
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson"
m "github.com/grafana/grafana/pkg/models"
+ "github.com/grafana/grafana/pkg/services/alerting"
"github.com/grafana/grafana/pkg/services/dashboards"
"github.com/grafana/grafana/pkg/setting"
@@ -725,7 +726,7 @@ func TestDashboardApiEndpoint(t *testing.T) {
{SaveError: m.ErrDashboardVersionMismatch, ExpectedStatusCode: 412},
{SaveError: m.ErrDashboardTitleEmpty, ExpectedStatusCode: 400},
{SaveError: m.ErrDashboardFolderCannotHaveParent, ExpectedStatusCode: 400},
- {SaveError: m.ErrDashboardContainsInvalidAlertData, ExpectedStatusCode: 500},
+ {SaveError: alerting.ValidationError{Reason: "Mu"}, ExpectedStatusCode: 422},
{SaveError: m.ErrDashboardFailedToUpdateAlertData, ExpectedStatusCode: 500},
{SaveError: m.ErrDashboardFailedGenerateUniqueUid, ExpectedStatusCode: 500},
{SaveError: m.ErrDashboardTypeMismatch, ExpectedStatusCode: 400},
diff --git a/pkg/api/dataproxy.go b/pkg/api/dataproxy.go
index 33839ca985d..eddfb884f8f 100644
--- a/pkg/api/dataproxy.go
+++ b/pkg/api/dataproxy.go
@@ -13,19 +13,20 @@ import (
const HeaderNameNoBackendCache = "X-Grafana-NoCache"
-func (hs *HTTPServer) getDatasourceByID(id int64, orgID int64, nocache bool) (*m.DataSource, error) {
+func (hs *HTTPServer) getDatasourceFromCache(id int64, c *m.ReqContext) (*m.DataSource, error) {
+ nocache := c.Req.Header.Get(HeaderNameNoBackendCache) == "true"
cacheKey := fmt.Sprintf("ds-%d", id)
if !nocache {
if cached, found := hs.cache.Get(cacheKey); found {
ds := cached.(*m.DataSource)
- if ds.OrgId == orgID {
+ if ds.OrgId == c.OrgId {
return ds, nil
}
}
}
- query := m.GetDataSourceByIdQuery{Id: id, OrgId: orgID}
+ query := m.GetDataSourceByIdQuery{Id: id, OrgId: c.OrgId}
if err := bus.Dispatch(&query); err != nil {
return nil, err
}
@@ -37,10 +38,7 @@ func (hs *HTTPServer) getDatasourceByID(id int64, orgID int64, nocache bool) (*m
func (hs *HTTPServer) ProxyDataSourceRequest(c *m.ReqContext) {
c.TimeRequest(metrics.M_DataSource_ProxyReq_Timer)
- nocache := c.Req.Header.Get(HeaderNameNoBackendCache) == "true"
-
- ds, err := hs.getDatasourceByID(c.ParamsInt64(":id"), c.OrgId, nocache)
-
+ ds, err := hs.getDatasourceFromCache(c.ParamsInt64(":id"), c)
if err != nil {
c.JsonApiErr(500, "Unable to load datasource meta data", err)
return
@@ -53,7 +51,21 @@ func (hs *HTTPServer) ProxyDataSourceRequest(c *m.ReqContext) {
return
}
- proxyPath := c.Params("*")
+ // macaron does not include trailing slashes when resolving a wildcard path
+ proxyPath := ensureProxyPathTrailingSlash(c.Req.URL.Path, c.Params("*"))
+
proxy := pluginproxy.NewDataSourceProxy(ds, plugin, c, proxyPath)
proxy.HandleRequest()
}
+
+// ensureProxyPathTrailingSlash Check for a trailing slash in original path and makes
+// sure that a trailing slash is added to proxy path, if not already exists.
+func ensureProxyPathTrailingSlash(originalPath, proxyPath string) string {
+ if len(proxyPath) > 1 {
+ if originalPath[len(originalPath)-1] == '/' && proxyPath[len(proxyPath)-1] != '/' {
+ return proxyPath + "/"
+ }
+ }
+
+ return proxyPath
+}
diff --git a/pkg/api/dataproxy_test.go b/pkg/api/dataproxy_test.go
new file mode 100644
index 00000000000..a1d7cf68a37
--- /dev/null
+++ b/pkg/api/dataproxy_test.go
@@ -0,0 +1,19 @@
+package api
+
+import (
+ "testing"
+
+ . "github.com/smartystreets/goconvey/convey"
+)
+
+func TestDataProxy(t *testing.T) {
+ Convey("Data proxy test", t, func() {
+ Convey("Should append trailing slash to proxy path if original path has a trailing slash", func() {
+ So(ensureProxyPathTrailingSlash("/api/datasources/proxy/6/api/v1/query_range/", "api/v1/query_range/"), ShouldEqual, "api/v1/query_range/")
+ })
+
+ Convey("Should not append trailing slash to proxy path if original path doesn't have a trailing slash", func() {
+ So(ensureProxyPathTrailingSlash("/api/datasources/proxy/6/api/v1/query_range", "api/v1/query_range"), ShouldEqual, "api/v1/query_range")
+ })
+ })
+}
diff --git a/pkg/api/dtos/alerting_test.go b/pkg/api/dtos/alerting_test.go
index c38f281be9c..f4c09f202cb 100644
--- a/pkg/api/dtos/alerting_test.go
+++ b/pkg/api/dtos/alerting_test.go
@@ -29,7 +29,7 @@ func TestFormatShort(t *testing.T) {
}
if parsed != tc.interval {
- t.Errorf("expectes the parsed duration to equal the interval. Got %v expected: %v", parsed, tc.interval)
+ t.Errorf("expects the parsed duration to equal the interval. Got %v expected: %v", parsed, tc.interval)
}
}
}
diff --git a/pkg/api/folder.go b/pkg/api/folder.go
index f0cdff24d20..0e08343b556 100644
--- a/pkg/api/folder.go
+++ b/pkg/api/folder.go
@@ -95,7 +95,7 @@ func toFolderDto(g guardian.DashboardGuardian, folder *m.Folder) dtos.Folder {
canAdmin, _ := g.CanAdmin()
// Finding creator and last updater of the folder
- updater, creator := "Anonymous", "Anonymous"
+ updater, creator := anonString, anonString
if folder.CreatedBy > 0 {
creator = getUserLogin(folder.CreatedBy)
}
diff --git a/pkg/api/folder_test.go b/pkg/api/folder_test.go
index 6e24e432535..880de338c8f 100644
--- a/pkg/api/folder_test.go
+++ b/pkg/api/folder_test.go
@@ -133,16 +133,6 @@ func TestFoldersApiEndpoint(t *testing.T) {
})
}
-func callGetFolderByUID(sc *scenarioContext) {
- sc.handlerFunc = GetFolderByUID
- sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec()
-}
-
-func callDeleteFolder(sc *scenarioContext) {
- sc.handlerFunc = DeleteFolder
- sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec()
-}
-
func callCreateFolder(sc *scenarioContext) {
sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec()
}
diff --git a/pkg/api/http_server.go b/pkg/api/http_server.go
index 0de63ce5e08..858b3c5a8c5 100644
--- a/pkg/api/http_server.go
+++ b/pkg/api/http_server.go
@@ -28,6 +28,7 @@ import (
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/registry"
+ "github.com/grafana/grafana/pkg/services/hooks"
"github.com/grafana/grafana/pkg/services/rendering"
"github.com/grafana/grafana/pkg/setting"
)
@@ -52,6 +53,7 @@ type HTTPServer struct {
Bus bus.Bus `inject:""`
RenderService rendering.Service `inject:""`
Cfg *setting.Cfg `inject:""`
+ HooksService *hooks.HooksService `inject:""`
}
func (hs *HTTPServer) Init() error {
@@ -184,7 +186,7 @@ func (hs *HTTPServer) applyRoutes() {
// then custom app proxy routes
hs.initAppPluginRoutes(hs.macaron)
// lastly not found route
- hs.macaron.NotFound(NotFoundHandler)
+ hs.macaron.NotFound(hs.NotFoundHandler)
}
func (hs *HTTPServer) addMiddlewaresAndStaticRoutes() {
@@ -233,6 +235,10 @@ func (hs *HTTPServer) addMiddlewaresAndStaticRoutes() {
}
func (hs *HTTPServer) metricsEndpoint(ctx *macaron.Context) {
+ if !hs.Cfg.MetricsEndpointEnabled {
+ return
+ }
+
if ctx.Req.Method != "GET" || ctx.Req.URL.Path != "/metrics" {
return
}
diff --git a/pkg/api/index.go b/pkg/api/index.go
index ea10940d3ba..9f867d51cad 100644
--- a/pkg/api/index.go
+++ b/pkg/api/index.go
@@ -11,7 +11,13 @@ import (
"github.com/grafana/grafana/pkg/setting"
)
-func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) {
+const (
+ // Themes
+ lightName = "light"
+ darkName = "dark"
+)
+
+func (hs *HTTPServer) setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) {
settings, err := getFrontendSettingsMap(c)
if err != nil {
return nil, err
@@ -60,7 +66,7 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) {
OrgRole: c.OrgRole,
GravatarUrl: dtos.GetGravatarUrl(c.Email),
IsGrafanaAdmin: c.IsGrafanaAdmin,
- LightTheme: prefs.Theme == "light",
+ LightTheme: prefs.Theme == lightName,
Timezone: prefs.Timezone,
Locale: locale,
HelpFlags1: c.HelpFlags1,
@@ -88,9 +94,12 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) {
}
themeURLParam := c.Query("theme")
- if themeURLParam == "light" {
+ if themeURLParam == lightName {
data.User.LightTheme = true
- data.Theme = "light"
+ data.Theme = lightName
+ } else if themeURLParam == darkName {
+ data.User.LightTheme = false
+ data.Theme = darkName
}
if hasEditPermissionInFoldersQuery.Result {
@@ -341,11 +350,12 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) {
},
})
+ hs.HooksService.RunIndexDataHooks(&data)
return &data, nil
}
-func Index(c *m.ReqContext) {
- data, err := setIndexViewData(c)
+func (hs *HTTPServer) Index(c *m.ReqContext) {
+ data, err := hs.setIndexViewData(c)
if err != nil {
c.Handle(500, "Failed to get settings", err)
return
@@ -353,13 +363,13 @@ func Index(c *m.ReqContext) {
c.HTML(200, "index", data)
}
-func NotFoundHandler(c *m.ReqContext) {
+func (hs *HTTPServer) NotFoundHandler(c *m.ReqContext) {
if c.IsApiRequest() {
c.JsonApiErr(404, "Not found", nil)
return
}
- data, err := setIndexViewData(c)
+ data, err := hs.setIndexViewData(c)
if err != nil {
c.Handle(500, "Failed to get settings", err)
return
diff --git a/pkg/api/live/hub.go b/pkg/api/live/hub.go
index 37ab5667e55..9708bc515d1 100644
--- a/pkg/api/live/hub.go
+++ b/pkg/api/live/hub.go
@@ -37,9 +37,6 @@ func newHub() *hub {
}
}
-func (h *hub) removeConnection() {
-}
-
func (h *hub) run(ctx context.Context) {
for {
select {
diff --git a/pkg/api/login.go b/pkg/api/login.go
index 632d04e37f1..1083f89adfd 100644
--- a/pkg/api/login.go
+++ b/pkg/api/login.go
@@ -17,8 +17,8 @@ const (
ViewIndex = "index"
)
-func LoginView(c *m.ReqContext) {
- viewData, err := setIndexViewData(c)
+func (hs *HTTPServer) LoginView(c *m.ReqContext) {
+ viewData, err := hs.setIndexViewData(c)
if err != nil {
c.Handle(500, "Failed to get settings", err)
return
diff --git a/pkg/api/metrics.go b/pkg/api/metrics.go
index f2bc79df7ad..cb80bd346b8 100644
--- a/pkg/api/metrics.go
+++ b/pkg/api/metrics.go
@@ -13,21 +13,21 @@ import (
)
// POST /api/tsdb/query
-func QueryMetrics(c *m.ReqContext, reqDto dtos.MetricRequest) Response {
+func (hs *HTTPServer) QueryMetrics(c *m.ReqContext, reqDto dtos.MetricRequest) Response {
timeRange := tsdb.NewTimeRange(reqDto.From, reqDto.To)
if len(reqDto.Queries) == 0 {
return Error(400, "No queries found in query", nil)
}
- dsID, err := reqDto.Queries[0].Get("datasourceId").Int64()
+ datasourceId, err := reqDto.Queries[0].Get("datasourceId").Int64()
if err != nil {
return Error(400, "Query missing datasourceId", nil)
}
- dsQuery := m.GetDataSourceByIdQuery{Id: dsID, OrgId: c.OrgId}
- if err := bus.Dispatch(&dsQuery); err != nil {
- return Error(500, "failed to fetch data source", err)
+ ds, err := hs.getDatasourceFromCache(datasourceId, c)
+ if err != nil {
+ return Error(500, "Unable to load datasource meta data", err)
}
request := &tsdb.TsdbQuery{TimeRange: timeRange}
@@ -38,11 +38,11 @@ func QueryMetrics(c *m.ReqContext, reqDto dtos.MetricRequest) Response {
MaxDataPoints: query.Get("maxDataPoints").MustInt64(100),
IntervalMs: query.Get("intervalMs").MustInt64(1000),
Model: query,
- DataSource: dsQuery.Result,
+ DataSource: ds,
})
}
- resp, err := tsdb.HandleRequest(context.Background(), dsQuery.Result, request)
+ resp, err := tsdb.HandleRequest(c.Req.Context(), ds, request)
if err != nil {
return Error(500, "Metric request error", err)
}
diff --git a/pkg/api/org_users.go b/pkg/api/org_users.go
index 4e2ed36431e..e750662c764 100644
--- a/pkg/api/org_users.go
+++ b/pkg/api/org_users.go
@@ -45,7 +45,7 @@ func addOrgUserHelper(cmd m.AddOrgUserCommand) Response {
// GET /api/org/users
func GetOrgUsersForCurrentOrg(c *m.ReqContext) Response {
- return getOrgUsersHelper(c.OrgId, c.Params("query"), c.ParamsInt("limit"))
+ return getOrgUsersHelper(c.OrgId, c.Query("query"), c.QueryInt("limit"))
}
// GET /api/orgs/:orgId/users
@@ -102,26 +102,32 @@ func updateOrgUserHelper(cmd m.UpdateOrgUserCommand) Response {
// DELETE /api/org/users/:userId
func RemoveOrgUserForCurrentOrg(c *m.ReqContext) Response {
- userID := c.ParamsInt64(":userId")
- return removeOrgUserHelper(c.OrgId, userID)
+ return removeOrgUserHelper(&m.RemoveOrgUserCommand{
+ UserId: c.ParamsInt64(":userId"),
+ OrgId: c.OrgId,
+ ShouldDeleteOrphanedUser: true,
+ })
}
// DELETE /api/orgs/:orgId/users/:userId
func RemoveOrgUser(c *m.ReqContext) Response {
- userID := c.ParamsInt64(":userId")
- orgID := c.ParamsInt64(":orgId")
- return removeOrgUserHelper(orgID, userID)
+ return removeOrgUserHelper(&m.RemoveOrgUserCommand{
+ UserId: c.ParamsInt64(":userId"),
+ OrgId: c.ParamsInt64(":orgId"),
+ })
}
-func removeOrgUserHelper(orgID int64, userID int64) Response {
- cmd := m.RemoveOrgUserCommand{OrgId: orgID, UserId: userID}
-
- if err := bus.Dispatch(&cmd); err != nil {
+func removeOrgUserHelper(cmd *m.RemoveOrgUserCommand) Response {
+ if err := bus.Dispatch(cmd); err != nil {
if err == m.ErrLastOrgAdmin {
return Error(400, "Cannot remove last organization admin", nil)
}
return Error(500, "Failed to remove user from organization", err)
}
+ if cmd.UserWasDeleted {
+ return Success("User deleted")
+ }
+
return Success("User removed from organization")
}
diff --git a/pkg/api/pluginproxy/access_token_provider.go b/pkg/api/pluginproxy/access_token_provider.go
new file mode 100644
index 00000000000..22407823ff9
--- /dev/null
+++ b/pkg/api/pluginproxy/access_token_provider.go
@@ -0,0 +1,171 @@
+package pluginproxy
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "net/url"
+ "strconv"
+ "sync"
+ "time"
+
+ "golang.org/x/oauth2"
+
+ "github.com/grafana/grafana/pkg/models"
+ "github.com/grafana/grafana/pkg/plugins"
+ "golang.org/x/oauth2/jwt"
+)
+
+var (
+ tokenCache = tokenCacheType{
+ cache: map[string]*jwtToken{},
+ }
+ oauthJwtTokenCache = oauthJwtTokenCacheType{
+ cache: map[string]*oauth2.Token{},
+ }
+)
+
+type tokenCacheType struct {
+ cache map[string]*jwtToken
+ sync.Mutex
+}
+
+type oauthJwtTokenCacheType struct {
+ cache map[string]*oauth2.Token
+ sync.Mutex
+}
+
+type accessTokenProvider struct {
+ route *plugins.AppPluginRoute
+ datasourceId int64
+ datasourceVersion int
+}
+
+type jwtToken struct {
+ ExpiresOn time.Time `json:"-"`
+ ExpiresOnString string `json:"expires_on"`
+ AccessToken string `json:"access_token"`
+}
+
+func newAccessTokenProvider(ds *models.DataSource, pluginRoute *plugins.AppPluginRoute) *accessTokenProvider {
+ return &accessTokenProvider{
+ datasourceId: ds.Id,
+ datasourceVersion: ds.Version,
+ route: pluginRoute,
+ }
+}
+
+func (provider *accessTokenProvider) getAccessToken(data templateData) (string, error) {
+ tokenCache.Lock()
+ defer tokenCache.Unlock()
+ if cachedToken, found := tokenCache.cache[provider.getAccessTokenCacheKey()]; found {
+ if cachedToken.ExpiresOn.After(time.Now().Add(time.Second * 10)) {
+ logger.Info("Using token from cache")
+ return cachedToken.AccessToken, nil
+ }
+ }
+
+ urlInterpolated, err := interpolateString(provider.route.TokenAuth.Url, data)
+ if err != nil {
+ return "", err
+ }
+
+ params := make(url.Values)
+ for key, value := range provider.route.TokenAuth.Params {
+ interpolatedParam, err := interpolateString(value, data)
+ if err != nil {
+ return "", err
+ }
+ params.Add(key, interpolatedParam)
+ }
+
+ getTokenReq, _ := http.NewRequest("POST", urlInterpolated, bytes.NewBufferString(params.Encode()))
+ getTokenReq.Header.Add("Content-Type", "application/x-www-form-urlencoded")
+ getTokenReq.Header.Add("Content-Length", strconv.Itoa(len(params.Encode())))
+
+ resp, err := client.Do(getTokenReq)
+ if err != nil {
+ return "", err
+ }
+
+ defer resp.Body.Close()
+
+ var token jwtToken
+ if err := json.NewDecoder(resp.Body).Decode(&token); err != nil {
+ return "", err
+ }
+
+ expiresOnEpoch, _ := strconv.ParseInt(token.ExpiresOnString, 10, 64)
+ token.ExpiresOn = time.Unix(expiresOnEpoch, 0)
+ tokenCache.cache[provider.getAccessTokenCacheKey()] = &token
+
+ logger.Info("Got new access token", "ExpiresOn", token.ExpiresOn)
+
+ return token.AccessToken, nil
+}
+
+func (provider *accessTokenProvider) getJwtAccessToken(ctx context.Context, data templateData) (string, error) {
+ oauthJwtTokenCache.Lock()
+ defer oauthJwtTokenCache.Unlock()
+ if cachedToken, found := oauthJwtTokenCache.cache[provider.getAccessTokenCacheKey()]; found {
+ if cachedToken.Expiry.After(time.Now().Add(time.Second * 10)) {
+ logger.Debug("Using token from cache")
+ return cachedToken.AccessToken, nil
+ }
+ }
+
+ conf := &jwt.Config{}
+
+ if val, ok := provider.route.JwtTokenAuth.Params["client_email"]; ok {
+ interpolatedVal, err := interpolateString(val, data)
+ if err != nil {
+ return "", err
+ }
+ conf.Email = interpolatedVal
+ }
+
+ if val, ok := provider.route.JwtTokenAuth.Params["private_key"]; ok {
+ interpolatedVal, err := interpolateString(val, data)
+ if err != nil {
+ return "", err
+ }
+ conf.PrivateKey = []byte(interpolatedVal)
+ }
+
+ if val, ok := provider.route.JwtTokenAuth.Params["token_uri"]; ok {
+ interpolatedVal, err := interpolateString(val, data)
+ if err != nil {
+ return "", err
+ }
+ conf.TokenURL = interpolatedVal
+ }
+
+ conf.Scopes = provider.route.JwtTokenAuth.Scopes
+
+ token, err := getTokenSource(conf, ctx)
+ if err != nil {
+ return "", err
+ }
+
+ oauthJwtTokenCache.cache[provider.getAccessTokenCacheKey()] = token
+
+ logger.Info("Got new access token", "ExpiresOn", token.Expiry)
+
+ return token.AccessToken, nil
+}
+
+var getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) {
+ tokenSrc := conf.TokenSource(ctx)
+ token, err := tokenSrc.Token()
+ if err != nil {
+ return nil, err
+ }
+
+ return token, nil
+}
+
+func (provider *accessTokenProvider) getAccessTokenCacheKey() string {
+ return fmt.Sprintf("%v_%v_%v_%v", provider.datasourceId, provider.datasourceVersion, provider.route.Path, provider.route.Method)
+}
diff --git a/pkg/api/pluginproxy/access_token_provider_test.go b/pkg/api/pluginproxy/access_token_provider_test.go
new file mode 100644
index 00000000000..e75748e4660
--- /dev/null
+++ b/pkg/api/pluginproxy/access_token_provider_test.go
@@ -0,0 +1,94 @@
+package pluginproxy
+
+import (
+ "context"
+ "testing"
+ "time"
+
+ "github.com/grafana/grafana/pkg/models"
+ "github.com/grafana/grafana/pkg/plugins"
+ . "github.com/smartystreets/goconvey/convey"
+ "golang.org/x/oauth2"
+ "golang.org/x/oauth2/jwt"
+)
+
+func TestAccessToken(t *testing.T) {
+ Convey("Plugin with JWT token auth route", t, func() {
+ pluginRoute := &plugins.AppPluginRoute{
+ Path: "pathwithjwttoken1",
+ Url: "https://api.jwt.io/some/path",
+ Method: "GET",
+ JwtTokenAuth: &plugins.JwtTokenAuth{
+ Url: "https://login.server.com/{{.JsonData.tenantId}}/oauth2/token",
+ Scopes: []string{
+ "https://www.testapi.com/auth/monitoring.read",
+ "https://www.testapi.com/auth/cloudplatformprojects.readonly",
+ },
+ Params: map[string]string{
+ "token_uri": "{{.JsonData.tokenUri}}",
+ "client_email": "{{.JsonData.clientEmail}}",
+ "private_key": "{{.SecureJsonData.privateKey}}",
+ },
+ },
+ }
+
+ templateData := templateData{
+ JsonData: map[string]interface{}{
+ "clientEmail": "test@test.com",
+ "tokenUri": "login.url.com/token",
+ },
+ SecureJsonData: map[string]string{
+ "privateKey": "testkey",
+ },
+ }
+
+ ds := &models.DataSource{Id: 1, Version: 2}
+
+ Convey("should fetch token using jwt private key", func() {
+ getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) {
+ return &oauth2.Token{AccessToken: "abc"}, nil
+ }
+ provider := newAccessTokenProvider(ds, pluginRoute)
+ token, err := provider.getJwtAccessToken(context.Background(), templateData)
+ So(err, ShouldBeNil)
+
+ So(token, ShouldEqual, "abc")
+ })
+
+ Convey("should set jwt config values", func() {
+ getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) {
+ So(conf.Email, ShouldEqual, "test@test.com")
+ So(conf.PrivateKey, ShouldResemble, []byte("testkey"))
+ So(len(conf.Scopes), ShouldEqual, 2)
+ So(conf.Scopes[0], ShouldEqual, "https://www.testapi.com/auth/monitoring.read")
+ So(conf.Scopes[1], ShouldEqual, "https://www.testapi.com/auth/cloudplatformprojects.readonly")
+ So(conf.TokenURL, ShouldEqual, "login.url.com/token")
+
+ return &oauth2.Token{AccessToken: "abc"}, nil
+ }
+
+ provider := newAccessTokenProvider(ds, pluginRoute)
+ _, err := provider.getJwtAccessToken(context.Background(), templateData)
+ So(err, ShouldBeNil)
+ })
+
+ Convey("should use cached token on second call", func() {
+ getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) {
+ return &oauth2.Token{
+ AccessToken: "abc",
+ Expiry: time.Now().Add(1 * time.Minute)}, nil
+ }
+ provider := newAccessTokenProvider(ds, pluginRoute)
+ token1, err := provider.getJwtAccessToken(context.Background(), templateData)
+ So(err, ShouldBeNil)
+ So(token1, ShouldEqual, "abc")
+
+ getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) {
+ return &oauth2.Token{AccessToken: "error: cache not used"}, nil
+ }
+ token2, err := provider.getJwtAccessToken(context.Background(), templateData)
+ So(err, ShouldBeNil)
+ So(token2, ShouldEqual, "abc")
+ })
+ })
+}
diff --git a/pkg/api/pluginproxy/ds_auth_provider.go b/pkg/api/pluginproxy/ds_auth_provider.go
new file mode 100644
index 00000000000..c68da839d13
--- /dev/null
+++ b/pkg/api/pluginproxy/ds_auth_provider.go
@@ -0,0 +1,93 @@
+package pluginproxy
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "net/http"
+ "net/url"
+ "strings"
+ "text/template"
+
+ m "github.com/grafana/grafana/pkg/models"
+ "github.com/grafana/grafana/pkg/plugins"
+ "github.com/grafana/grafana/pkg/util"
+)
+
+//ApplyRoute should use the plugin route data to set auth headers and custom headers
+func ApplyRoute(ctx context.Context, req *http.Request, proxyPath string, route *plugins.AppPluginRoute, ds *m.DataSource) {
+ proxyPath = strings.TrimPrefix(proxyPath, route.Path)
+
+ data := templateData{
+ JsonData: ds.JsonData.Interface().(map[string]interface{}),
+ SecureJsonData: ds.SecureJsonData.Decrypt(),
+ }
+
+ interpolatedURL, err := interpolateString(route.Url, data)
+ if err != nil {
+ logger.Error("Error interpolating proxy url", "error", err)
+ return
+ }
+
+ routeURL, err := url.Parse(interpolatedURL)
+ if err != nil {
+ logger.Error("Error parsing plugin route url", "error", err)
+ return
+ }
+
+ req.URL.Scheme = routeURL.Scheme
+ req.URL.Host = routeURL.Host
+ req.Host = routeURL.Host
+ req.URL.Path = util.JoinUrlFragments(routeURL.Path, proxyPath)
+
+ if err := addHeaders(&req.Header, route, data); err != nil {
+ logger.Error("Failed to render plugin headers", "error", err)
+ }
+
+ tokenProvider := newAccessTokenProvider(ds, route)
+
+ if route.TokenAuth != nil {
+ if token, err := tokenProvider.getAccessToken(data); err != nil {
+ logger.Error("Failed to get access token", "error", err)
+ } else {
+ req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token))
+ }
+ }
+
+ if route.JwtTokenAuth != nil {
+ if token, err := tokenProvider.getJwtAccessToken(ctx, data); err != nil {
+ logger.Error("Failed to get access token", "error", err)
+ } else {
+ req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token))
+ }
+ }
+ logger.Info("Requesting", "url", req.URL.String())
+
+}
+
+func interpolateString(text string, data templateData) (string, error) {
+ t, err := template.New("content").Parse(text)
+ if err != nil {
+ return "", fmt.Errorf("could not parse template %s", text)
+ }
+
+ var contentBuf bytes.Buffer
+ err = t.Execute(&contentBuf, data)
+ if err != nil {
+ return "", fmt.Errorf("failed to execute template %s", text)
+ }
+
+ return contentBuf.String(), nil
+}
+
+func addHeaders(reqHeaders *http.Header, route *plugins.AppPluginRoute, data templateData) error {
+ for _, header := range route.Headers {
+ interpolated, err := interpolateString(header.Content, data)
+ if err != nil {
+ return err
+ }
+ reqHeaders.Add(header.Name, interpolated)
+ }
+
+ return nil
+}
diff --git a/pkg/api/pluginproxy/ds_auth_provider_test.go b/pkg/api/pluginproxy/ds_auth_provider_test.go
new file mode 100644
index 00000000000..9bd98a339e5
--- /dev/null
+++ b/pkg/api/pluginproxy/ds_auth_provider_test.go
@@ -0,0 +1,21 @@
+package pluginproxy
+
+import (
+ "testing"
+
+ . "github.com/smartystreets/goconvey/convey"
+)
+
+func TestDsAuthProvider(t *testing.T) {
+ Convey("When interpolating string", t, func() {
+ data := templateData{
+ SecureJsonData: map[string]string{
+ "Test": "0asd+asd",
+ },
+ }
+
+ interpolated, err := interpolateString("{{.SecureJsonData.Test}}", data)
+ So(err, ShouldBeNil)
+ So(interpolated, ShouldEqual, "0asd+asd")
+ })
+}
diff --git a/pkg/api/pluginproxy/ds_proxy.go b/pkg/api/pluginproxy/ds_proxy.go
index fb2cab9b9b1..0c000058e4b 100644
--- a/pkg/api/pluginproxy/ds_proxy.go
+++ b/pkg/api/pluginproxy/ds_proxy.go
@@ -2,7 +2,6 @@ package pluginproxy
import (
"bytes"
- "encoding/json"
"errors"
"fmt"
"io/ioutil"
@@ -12,7 +11,6 @@ import (
"net/url"
"strconv"
"strings"
- "text/template"
"time"
"github.com/opentracing/opentracing-go"
@@ -25,17 +23,10 @@ import (
)
var (
- logger = log.New("data-proxy-log")
- tokenCache = map[string]*jwtToken{}
- client = newHTTPClient()
+ logger = log.New("data-proxy-log")
+ client = newHTTPClient()
)
-type jwtToken struct {
- ExpiresOn time.Time `json:"-"`
- ExpiresOnString string `json:"expires_on"`
- AccessToken string `json:"access_token"`
-}
-
type DataSourceProxy struct {
ds *m.DataSource
ctx *m.ReqContext
@@ -162,7 +153,6 @@ func (proxy *DataSourceProxy) getDirector() func(req *http.Request) {
} else {
req.URL.Path = util.JoinUrlFragments(proxy.targetUrl.Path, proxy.proxyPath)
}
-
if proxy.ds.BasicAuth {
req.Header.Del("Authorization")
req.Header.Add("Authorization", util.GetBasicAuthHeader(proxy.ds.BasicAuthUser, proxy.ds.BasicAuthPassword))
@@ -219,7 +209,7 @@ func (proxy *DataSourceProxy) getDirector() func(req *http.Request) {
}
if proxy.route != nil {
- proxy.applyRoute(req)
+ ApplyRoute(proxy.ctx.Req.Context(), req, proxy.proxyPath, proxy.route, proxy.ds)
}
}
}
@@ -311,120 +301,3 @@ func checkWhiteList(c *m.ReqContext, host string) bool {
return true
}
-
-func (proxy *DataSourceProxy) applyRoute(req *http.Request) {
- proxy.proxyPath = strings.TrimPrefix(proxy.proxyPath, proxy.route.Path)
-
- data := templateData{
- JsonData: proxy.ds.JsonData.Interface().(map[string]interface{}),
- SecureJsonData: proxy.ds.SecureJsonData.Decrypt(),
- }
-
- interpolatedURL, err := interpolateString(proxy.route.Url, data)
- if err != nil {
- logger.Error("Error interpolating proxy url", "error", err)
- return
- }
-
- routeURL, err := url.Parse(interpolatedURL)
- if err != nil {
- logger.Error("Error parsing plugin route url", "error", err)
- return
- }
-
- req.URL.Scheme = routeURL.Scheme
- req.URL.Host = routeURL.Host
- req.Host = routeURL.Host
- req.URL.Path = util.JoinUrlFragments(routeURL.Path, proxy.proxyPath)
-
- if err := addHeaders(&req.Header, proxy.route, data); err != nil {
- logger.Error("Failed to render plugin headers", "error", err)
- }
-
- if proxy.route.TokenAuth != nil {
- if token, err := proxy.getAccessToken(data); err != nil {
- logger.Error("Failed to get access token", "error", err)
- } else {
- req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token))
- }
- }
-
- logger.Info("Requesting", "url", req.URL.String())
-}
-
-func (proxy *DataSourceProxy) getAccessToken(data templateData) (string, error) {
- if cachedToken, found := tokenCache[proxy.getAccessTokenCacheKey()]; found {
- if cachedToken.ExpiresOn.After(time.Now().Add(time.Second * 10)) {
- logger.Info("Using token from cache")
- return cachedToken.AccessToken, nil
- }
- }
-
- urlInterpolated, err := interpolateString(proxy.route.TokenAuth.Url, data)
- if err != nil {
- return "", err
- }
-
- params := make(url.Values)
- for key, value := range proxy.route.TokenAuth.Params {
- interpolatedParam, err := interpolateString(value, data)
- if err != nil {
- return "", err
- }
- params.Add(key, interpolatedParam)
- }
-
- getTokenReq, _ := http.NewRequest("POST", urlInterpolated, bytes.NewBufferString(params.Encode()))
- getTokenReq.Header.Add("Content-Type", "application/x-www-form-urlencoded")
- getTokenReq.Header.Add("Content-Length", strconv.Itoa(len(params.Encode())))
-
- resp, err := client.Do(getTokenReq)
- if err != nil {
- return "", err
- }
-
- defer resp.Body.Close()
-
- var token jwtToken
- if err := json.NewDecoder(resp.Body).Decode(&token); err != nil {
- return "", err
- }
-
- expiresOnEpoch, _ := strconv.ParseInt(token.ExpiresOnString, 10, 64)
- token.ExpiresOn = time.Unix(expiresOnEpoch, 0)
- tokenCache[proxy.getAccessTokenCacheKey()] = &token
-
- logger.Info("Got new access token", "ExpiresOn", token.ExpiresOn)
- return token.AccessToken, nil
-}
-
-func (proxy *DataSourceProxy) getAccessTokenCacheKey() string {
- return fmt.Sprintf("%v_%v_%v", proxy.ds.Id, proxy.route.Path, proxy.route.Method)
-}
-
-func interpolateString(text string, data templateData) (string, error) {
- t, err := template.New("content").Parse(text)
- if err != nil {
- return "", fmt.Errorf("could not parse template %s", text)
- }
-
- var contentBuf bytes.Buffer
- err = t.Execute(&contentBuf, data)
- if err != nil {
- return "", fmt.Errorf("failed to execute template %s", text)
- }
-
- return contentBuf.String(), nil
-}
-
-func addHeaders(reqHeaders *http.Header, route *plugins.AppPluginRoute, data templateData) error {
- for _, header := range route.Headers {
- interpolated, err := interpolateString(header.Content, data)
- if err != nil {
- return err
- }
- reqHeaders.Add(header.Name, interpolated)
- }
-
- return nil
-}
diff --git a/pkg/api/pluginproxy/ds_proxy_test.go b/pkg/api/pluginproxy/ds_proxy_test.go
index e6d05872787..ab0effb298f 100644
--- a/pkg/api/pluginproxy/ds_proxy_test.go
+++ b/pkg/api/pluginproxy/ds_proxy_test.go
@@ -83,7 +83,7 @@ func TestDSRouteRule(t *testing.T) {
Convey("When matching route path", func() {
proxy := NewDataSourceProxy(ds, plugin, ctx, "api/v4/some/method")
proxy.route = plugin.Routes[0]
- proxy.applyRoute(req)
+ ApplyRoute(proxy.ctx.Req.Context(), req, proxy.proxyPath, proxy.route, proxy.ds)
Convey("should add headers and update url", func() {
So(req.URL.String(), ShouldEqual, "https://www.google.com/some/method")
@@ -94,7 +94,7 @@ func TestDSRouteRule(t *testing.T) {
Convey("When matching route path and has dynamic url", func() {
proxy := NewDataSourceProxy(ds, plugin, ctx, "api/common/some/method")
proxy.route = plugin.Routes[3]
- proxy.applyRoute(req)
+ ApplyRoute(proxy.ctx.Req.Context(), req, proxy.proxyPath, proxy.route, proxy.ds)
Convey("should add headers and interpolate the url", func() {
So(req.URL.String(), ShouldEqual, "https://dynamic.grafana.com/some/method")
@@ -188,7 +188,7 @@ func TestDSRouteRule(t *testing.T) {
client = newFakeHTTPClient(json)
proxy1 := NewDataSourceProxy(ds, plugin, ctx, "pathwithtoken1")
proxy1.route = plugin.Routes[0]
- proxy1.applyRoute(req)
+ ApplyRoute(proxy1.ctx.Req.Context(), req, proxy1.proxyPath, proxy1.route, proxy1.ds)
authorizationHeaderCall1 = req.Header.Get("Authorization")
So(req.URL.String(), ShouldEqual, "https://api.nr1.io/some/path")
@@ -202,7 +202,7 @@ func TestDSRouteRule(t *testing.T) {
client = newFakeHTTPClient(json2)
proxy2 := NewDataSourceProxy(ds, plugin, ctx, "pathwithtoken2")
proxy2.route = plugin.Routes[1]
- proxy2.applyRoute(req)
+ ApplyRoute(proxy2.ctx.Req.Context(), req, proxy2.proxyPath, proxy2.route, proxy2.ds)
authorizationHeaderCall2 = req.Header.Get("Authorization")
@@ -217,7 +217,7 @@ func TestDSRouteRule(t *testing.T) {
client = newFakeHTTPClient([]byte{})
proxy3 := NewDataSourceProxy(ds, plugin, ctx, "pathwithtoken1")
proxy3.route = plugin.Routes[0]
- proxy3.applyRoute(req)
+ ApplyRoute(proxy3.ctx.Req.Context(), req, proxy3.proxyPath, proxy3.route, proxy3.ds)
authorizationHeaderCall3 := req.Header.Get("Authorization")
So(req.URL.String(), ShouldEqual, "https://api.nr1.io/some/path")
@@ -331,18 +331,6 @@ func TestDSRouteRule(t *testing.T) {
})
})
- Convey("When interpolating string", func() {
- data := templateData{
- SecureJsonData: map[string]string{
- "Test": "0asd+asd",
- },
- }
-
- interpolated, err := interpolateString("{{.SecureJsonData.Test}}", data)
- So(err, ShouldBeNil)
- So(interpolated, ShouldEqual, "0asd+asd")
- })
-
Convey("When proxying a data source with custom headers specified", func() {
plugin := &plugins.DataSourcePlugin{}
@@ -374,6 +362,23 @@ func TestDSRouteRule(t *testing.T) {
})
})
+ Convey("When proxying a custom datasource", func() {
+ plugin := &plugins.DataSourcePlugin{}
+ ds := &m.DataSource{
+ Type: "custom-datasource",
+ Url: "http://host/root/",
+ }
+ ctx := &m.ReqContext{}
+ proxy := NewDataSourceProxy(ds, plugin, ctx, "/path/to/folder/")
+ req, err := http.NewRequest(http.MethodGet, "http://grafana.com/sub", nil)
+ So(err, ShouldBeNil)
+
+ proxy.getDirector()(req)
+
+ Convey("Shoudl keep user request (including trailing slash)", func() {
+ So(req.URL.String(), ShouldEqual, "http://host/root/path/to/folder/")
+ })
+ })
})
}
diff --git a/pkg/api/render.go b/pkg/api/render.go
index b8ef6cc5cb6..cf672af9bea 100644
--- a/pkg/api/render.go
+++ b/pkg/api/render.go
@@ -41,15 +41,16 @@ func (hs *HTTPServer) RenderToPng(c *m.ReqContext) {
}
result, err := hs.RenderService.Render(c.Req.Context(), rendering.Opts{
- Width: width,
- Height: height,
- Timeout: time.Duration(timeout) * time.Second,
- OrgId: c.OrgId,
- UserId: c.UserId,
- OrgRole: c.OrgRole,
- Path: c.Params("*") + queryParams,
- Timezone: queryReader.Get("tz", ""),
- Encoding: queryReader.Get("encoding", ""),
+ Width: width,
+ Height: height,
+ Timeout: time.Duration(timeout) * time.Second,
+ OrgId: c.OrgId,
+ UserId: c.UserId,
+ OrgRole: c.OrgRole,
+ Path: c.Params("*") + queryParams,
+ Timezone: queryReader.Get("tz", ""),
+ Encoding: queryReader.Get("encoding", ""),
+ ConcurrentLimit: 30,
})
if err != nil && err == rendering.ErrTimeout {
diff --git a/pkg/api/team_members.go b/pkg/api/team_members.go
index 60a170a8c31..5b5970de6ad 100644
--- a/pkg/api/team_members.go
+++ b/pkg/api/team_members.go
@@ -4,6 +4,7 @@ import (
"github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/bus"
m "github.com/grafana/grafana/pkg/models"
+ "github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util"
)
@@ -17,6 +18,11 @@ func GetTeamMembers(c *m.ReqContext) Response {
for _, member := range query.Result {
member.AvatarUrl = dtos.GetGravatarUrl(member.Email)
+ member.Labels = []string{}
+
+ if setting.IsEnterprise && setting.LdapEnabled && member.External {
+ member.Labels = append(member.Labels, "LDAP")
+ }
}
return JSON(200, query.Result)
diff --git a/pkg/api/user.go b/pkg/api/user.go
index 4b916202e65..7116ad83f3f 100644
--- a/pkg/api/user.go
+++ b/pkg/api/user.go
@@ -177,17 +177,17 @@ func UserSetUsingOrg(c *m.ReqContext) Response {
}
// GET /profile/switch-org/:id
-func ChangeActiveOrgAndRedirectToHome(c *m.ReqContext) {
+func (hs *HTTPServer) ChangeActiveOrgAndRedirectToHome(c *m.ReqContext) {
orgID := c.ParamsInt64(":id")
if !validateUsingOrg(c.UserId, orgID) {
- NotFoundHandler(c)
+ hs.NotFoundHandler(c)
}
cmd := m.SetUsingOrgCommand{UserId: c.UserId, OrgId: orgID}
if err := bus.Dispatch(&cmd); err != nil {
- NotFoundHandler(c)
+ hs.NotFoundHandler(c)
}
c.Redirect(setting.AppSubUrl + "/")
diff --git a/pkg/cmd/grafana-cli/commands/commands.go b/pkg/cmd/grafana-cli/commands/commands.go
index 5e69559b9fa..902fd415977 100644
--- a/pkg/cmd/grafana-cli/commands/commands.go
+++ b/pkg/cmd/grafana-cli/commands/commands.go
@@ -6,6 +6,7 @@ import (
"github.com/codegangsta/cli"
"github.com/fatih/color"
+ "github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/cmd/grafana-cli/logger"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/setting"
@@ -24,6 +25,7 @@ func runDbCommand(command func(commandLine CommandLine) error) func(context *cli
engine := &sqlstore.SqlStore{}
engine.Cfg = cfg
+ engine.Bus = bus.GetBus()
engine.Init()
if err := command(cmd); err != nil {
diff --git a/pkg/cmd/grafana-cli/commands/install_command.go b/pkg/cmd/grafana-cli/commands/install_command.go
index 5d4969e06af..f88bb9bbfff 100644
--- a/pkg/cmd/grafana-cli/commands/install_command.go
+++ b/pkg/cmd/grafana-cli/commands/install_command.go
@@ -112,7 +112,7 @@ func SelectVersion(plugin m.Plugin, version string) (m.Version, error) {
}
}
- return m.Version{}, errors.New("Could not find the version your looking for")
+ return m.Version{}, errors.New("Could not find the version you're looking for")
}
func RemoveGitBuildFromName(pluginName, filename string) string {
diff --git a/pkg/cmd/grafana-cli/commands/upgrade_command.go b/pkg/cmd/grafana-cli/commands/upgrade_command.go
index 355ccab3d1c..396371d3577 100644
--- a/pkg/cmd/grafana-cli/commands/upgrade_command.go
+++ b/pkg/cmd/grafana-cli/commands/upgrade_command.go
@@ -16,7 +16,7 @@ func upgradeCommand(c CommandLine) error {
return err
}
- v, err2 := s.GetPlugin(localPlugin.Id, c.RepoDirectory())
+ v, err2 := s.GetPlugin(pluginName, c.RepoDirectory())
if err2 != nil {
return err2
@@ -24,9 +24,9 @@ func upgradeCommand(c CommandLine) error {
if ShouldUpgrade(localPlugin.Info.Version, v) {
s.RemoveInstalledPlugin(pluginsDir, pluginName)
- return InstallPlugin(localPlugin.Id, "", c)
+ return InstallPlugin(pluginName, "", c)
}
- logger.Infof("%s %s is up to date \n", color.GreenString("✔"), localPlugin.Id)
+ logger.Infof("%s %s is up to date \n", color.GreenString("✔"), pluginName)
return nil
}
diff --git a/pkg/cmd/grafana-cli/services/services.go b/pkg/cmd/grafana-cli/services/services.go
index b4e50ac84df..338975bc130 100644
--- a/pkg/cmd/grafana-cli/services/services.go
+++ b/pkg/cmd/grafana-cli/services/services.go
@@ -63,7 +63,7 @@ func ListAllPlugins(repoUrl string) (m.PluginRepo, error) {
var data m.PluginRepo
err = json.Unmarshal(body, &data)
if err != nil {
- logger.Info("Failed to unmarshal graphite response error:", err)
+ logger.Info("Failed to unmarshal plugin repo response error:", err)
return m.PluginRepo{}, err
}
@@ -140,7 +140,7 @@ func GetPlugin(pluginId, repoUrl string) (m.Plugin, error) {
var data m.Plugin
err = json.Unmarshal(body, &data)
if err != nil {
- logger.Info("Failed to unmarshal graphite response error:", err)
+ logger.Info("Failed to unmarshal plugin repo response error:", err)
return m.Plugin{}, err
}
diff --git a/pkg/cmd/grafana-server/main.go b/pkg/cmd/grafana-server/main.go
index f1e298671d7..06c07a2887c 100644
--- a/pkg/cmd/grafana-server/main.go
+++ b/pkg/cmd/grafana-server/main.go
@@ -29,6 +29,7 @@ import (
_ "github.com/grafana/grafana/pkg/tsdb/opentsdb"
_ "github.com/grafana/grafana/pkg/tsdb/postgres"
_ "github.com/grafana/grafana/pkg/tsdb/prometheus"
+ _ "github.com/grafana/grafana/pkg/tsdb/stackdriver"
_ "github.com/grafana/grafana/pkg/tsdb/testdata"
)
@@ -99,11 +100,11 @@ func listenToSystemSignals(server *GrafanaServerImpl) {
sighupChan := make(chan os.Signal, 1)
signal.Notify(sighupChan, syscall.SIGHUP)
- signal.Notify(signalChan, os.Interrupt, os.Kill, syscall.SIGTERM)
+ signal.Notify(signalChan, os.Interrupt, syscall.SIGTERM)
for {
select {
- case _ = <-sighupChan:
+ case <-sighupChan:
log.Reload()
case sig := <-signalChan:
server.Shutdown(fmt.Sprintf("System signal: %s", sig))
diff --git a/pkg/components/imguploader/azureblobuploader.go b/pkg/components/imguploader/azureblobuploader.go
index 3c0ac5b8884..b37763931c8 100644
--- a/pkg/components/imguploader/azureblobuploader.go
+++ b/pkg/components/imguploader/azureblobuploader.go
@@ -52,7 +52,7 @@ func (az *AzureBlobUploader) Upload(ctx context.Context, imageDiskPath string) (
}
randomFileName := util.GetRandomString(30) + ".png"
// upload image
- az.log.Debug("Uploading image to azure_blob", "conatiner_name", az.container_name, "blob_name", randomFileName)
+ az.log.Debug("Uploading image to azure_blob", "container_name", az.container_name, "blob_name", randomFileName)
resp, err := blob.FileUpload(az.container_name, randomFileName, file)
if err != nil {
return "", err
@@ -127,8 +127,6 @@ type xmlError struct {
const ms_date_layout = "Mon, 02 Jan 2006 15:04:05 GMT"
const version = "2017-04-17"
-var client = &http.Client{}
-
type StorageClient struct {
Auth *Auth
Transport http.RoundTripper
@@ -274,10 +272,10 @@ func (a *Auth) canonicalizedHeaders(req *http.Request) string {
}
}
- splitted := strings.Split(buffer.String(), "\n")
- sort.Strings(splitted)
+ split := strings.Split(buffer.String(), "\n")
+ sort.Strings(split)
- return strings.Join(splitted, "\n")
+ return strings.Join(split, "\n")
}
/*
@@ -313,8 +311,8 @@ func (a *Auth) canonicalizedResource(req *http.Request) string {
buffer.WriteString(fmt.Sprintf("\n%s:%s", key, strings.Join(values, ",")))
}
- splitted := strings.Split(buffer.String(), "\n")
- sort.Strings(splitted)
+ split := strings.Split(buffer.String(), "\n")
+ sort.Strings(split)
- return strings.Join(splitted, "\n")
+ return strings.Join(split, "\n")
}
diff --git a/pkg/components/imguploader/s3uploader.go b/pkg/components/imguploader/s3uploader.go
index a1e4aed0f47..9c8af21e39e 100644
--- a/pkg/components/imguploader/s3uploader.go
+++ b/pkg/components/imguploader/s3uploader.go
@@ -2,12 +2,15 @@ package imguploader
import (
"context"
+ "fmt"
"os"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/credentials/ec2rolecreds"
+ "github.com/aws/aws-sdk-go/aws/credentials/endpointcreds"
+ "github.com/aws/aws-sdk-go/aws/defaults"
"github.com/aws/aws-sdk-go/aws/ec2metadata"
"github.com/aws/aws-sdk-go/aws/endpoints"
"github.com/aws/aws-sdk-go/aws/session"
@@ -50,7 +53,7 @@ func (u *S3Uploader) Upload(ctx context.Context, imageDiskPath string) (string,
SecretAccessKey: u.secretKey,
}},
&credentials.EnvProvider{},
- &ec2rolecreds.EC2RoleProvider{Client: ec2metadata.New(sess), ExpiryWindow: 5 * time.Minute},
+ remoteCredProvider(sess),
})
cfg := &aws.Config{
Region: aws.String(u.region),
@@ -85,3 +88,27 @@ func (u *S3Uploader) Upload(ctx context.Context, imageDiskPath string) (string,
}
return image_url, nil
}
+
+func remoteCredProvider(sess *session.Session) credentials.Provider {
+ ecsCredURI := os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI")
+
+ if len(ecsCredURI) > 0 {
+ return ecsCredProvider(sess, ecsCredURI)
+ }
+ return ec2RoleProvider(sess)
+}
+
+func ecsCredProvider(sess *session.Session, uri string) credentials.Provider {
+ const host = `169.254.170.2`
+
+ d := defaults.Get()
+ return endpointcreds.NewProviderClient(
+ *d.Config,
+ d.Handlers,
+ fmt.Sprintf("http://%s%s", host, uri),
+ func(p *endpointcreds.Provider) { p.ExpiryWindow = 5 * time.Minute })
+}
+
+func ec2RoleProvider(sess *session.Session) credentials.Provider {
+ return &ec2rolecreds.EC2RoleProvider{Client: ec2metadata.New(sess), ExpiryWindow: 5 * time.Minute}
+}
diff --git a/pkg/components/null/float.go b/pkg/components/null/float.go
index 4f783f2c584..9082c831084 100644
--- a/pkg/components/null/float.go
+++ b/pkg/components/null/float.go
@@ -8,6 +8,10 @@ import (
"strconv"
)
+const (
+ nullString = "null"
+)
+
// Float is a nullable float64.
// It does not consider zero values to be null.
// It will decode to null, not zero, if null.
@@ -68,7 +72,7 @@ func (f *Float) UnmarshalJSON(data []byte) error {
// It will return an error if the input is not an integer, blank, or "null".
func (f *Float) UnmarshalText(text []byte) error {
str := string(text)
- if str == "" || str == "null" {
+ if str == "" || str == nullString {
f.Valid = false
return nil
}
@@ -82,7 +86,7 @@ func (f *Float) UnmarshalText(text []byte) error {
// It will encode null if this Float is null.
func (f Float) MarshalJSON() ([]byte, error) {
if !f.Valid {
- return []byte("null"), nil
+ return []byte(nullString), nil
}
return []byte(strconv.FormatFloat(f.Float64, 'f', -1, 64)), nil
}
@@ -100,7 +104,7 @@ func (f Float) MarshalText() ([]byte, error) {
// It will encode a blank string if this Float is null.
func (f Float) String() string {
if !f.Valid {
- return "null"
+ return nullString
}
return fmt.Sprintf("%1.3f", f.Float64)
@@ -109,7 +113,7 @@ func (f Float) String() string {
// FullString returns float as string in full precision
func (f Float) FullString() string {
if !f.Valid {
- return "null"
+ return nullString
}
return fmt.Sprintf("%f", f.Float64)
diff --git a/pkg/components/simplejson/simplejson.go b/pkg/components/simplejson/simplejson.go
index 85e2f955943..35e305eb414 100644
--- a/pkg/components/simplejson/simplejson.go
+++ b/pkg/components/simplejson/simplejson.go
@@ -256,7 +256,7 @@ func (j *Json) StringArray() ([]string, error) {
// MustArray guarantees the return of a `[]interface{}` (with optional default)
//
-// useful when you want to interate over array values in a succinct manner:
+// useful when you want to iterate over array values in a succinct manner:
// for i, v := range js.Get("results").MustArray() {
// fmt.Println(i, v)
// }
@@ -281,7 +281,7 @@ func (j *Json) MustArray(args ...[]interface{}) []interface{} {
// MustMap guarantees the return of a `map[string]interface{}` (with optional default)
//
-// useful when you want to interate over map values in a succinct manner:
+// useful when you want to iterate over map values in a succinct manner:
// for k, v := range js.Get("dictionary").MustMap() {
// fmt.Println(k, v)
// }
@@ -329,7 +329,7 @@ func (j *Json) MustString(args ...string) string {
// MustStringArray guarantees the return of a `[]string` (with optional default)
//
-// useful when you want to interate over array values in a succinct manner:
+// useful when you want to iterate over array values in a succinct manner:
// for i, s := range js.Get("results").MustStringArray() {
// fmt.Println(i, s)
// }
diff --git a/pkg/extensions/main.go b/pkg/extensions/main.go
index 6e3461da8a8..1d8bbce03f3 100644
--- a/pkg/extensions/main.go
+++ b/pkg/extensions/main.go
@@ -1,3 +1,7 @@
package extensions
+import (
+ _ "gopkg.in/square/go-jose.v2"
+)
+
var IsEnterprise bool = false
diff --git a/pkg/login/ldap.go b/pkg/login/ldap.go
index 053778e8deb..43f45f900d9 100644
--- a/pkg/login/ldap.go
+++ b/pkg/login/ldap.go
@@ -326,15 +326,19 @@ func (a *ldapAuther) searchForUser(username string) (*LdapUserInfo, error) {
a.log.Info("Searching for user's groups", "filter", filter)
+ // support old way of reading settings
+ groupIdAttribute := a.server.Attr.MemberOf
+ // but prefer dn attribute if default settings are used
+ if groupIdAttribute == "" || groupIdAttribute == "memberOf" {
+ groupIdAttribute = "dn"
+ }
+
groupSearchReq := ldap.SearchRequest{
BaseDN: groupSearchBase,
Scope: ldap.ScopeWholeSubtree,
DerefAliases: ldap.NeverDerefAliases,
- Attributes: []string{
- // Here MemberOf would be the thing that identifies the group, which is normally 'cn'
- a.server.Attr.MemberOf,
- },
- Filter: filter,
+ Attributes: []string{groupIdAttribute},
+ Filter: filter,
}
groupSearchResult, err = a.conn.Search(&groupSearchReq)
@@ -344,7 +348,7 @@ func (a *ldapAuther) searchForUser(username string) (*LdapUserInfo, error) {
if len(groupSearchResult.Entries) > 0 {
for i := range groupSearchResult.Entries {
- memberOf = append(memberOf, getLdapAttrN(a.server.Attr.MemberOf, groupSearchResult, i))
+ memberOf = append(memberOf, getLdapAttrN(groupIdAttribute, groupSearchResult, i))
}
break
}
diff --git a/pkg/login/ldap_settings.go b/pkg/login/ldap_settings.go
index 7ebfbc79ba8..40791a509db 100644
--- a/pkg/login/ldap_settings.go
+++ b/pkg/login/ldap_settings.go
@@ -48,7 +48,7 @@ type LdapAttributeMap struct {
type LdapGroupToOrgRole struct {
GroupDN string `toml:"group_dn"`
OrgId int64 `toml:"org_id"`
- IsGrafanaAdmin *bool `toml:"grafana_admin"` // This is a pointer to know if it was set or not (for backwards compatability)
+ IsGrafanaAdmin *bool `toml:"grafana_admin"` // This is a pointer to know if it was set or not (for backwards compatibility)
OrgRole m.RoleType `toml:"org_role"`
}
diff --git a/pkg/metrics/metrics.go b/pkg/metrics/metrics.go
index dcdfbf124e1..9a514fdb6f3 100644
--- a/pkg/metrics/metrics.go
+++ b/pkg/metrics/metrics.go
@@ -61,6 +61,23 @@ var (
M_Grafana_Version *prometheus.GaugeVec
)
+func newCounterVecStartingAtZero(opts prometheus.CounterOpts, labels []string, labelValues ...string) *prometheus.CounterVec {
+ counter := prometheus.NewCounterVec(opts, labels)
+
+ for _, label := range labelValues {
+ counter.WithLabelValues(label).Add(0)
+ }
+
+ return counter
+}
+
+func newCounterStartingAtZero(opts prometheus.CounterOpts, labelValues ...string) prometheus.Counter {
+ counter := prometheus.NewCounter(opts)
+ counter.Add(0)
+
+ return counter
+}
+
func init() {
M_Instance_Start = prometheus.NewCounter(prometheus.CounterOpts{
Name: "instance_start_total",
@@ -68,32 +85,27 @@ func init() {
Namespace: exporterName,
})
- M_Page_Status = prometheus.NewCounterVec(
+ httpStatusCodes := []string{"200", "404", "500", "unknown"}
+ M_Page_Status = newCounterVecStartingAtZero(
prometheus.CounterOpts{
Name: "page_response_status_total",
Help: "page http response status",
Namespace: exporterName,
- },
- []string{"code"},
- )
+ }, []string{"code"}, httpStatusCodes...)
- M_Api_Status = prometheus.NewCounterVec(
+ M_Api_Status = newCounterVecStartingAtZero(
prometheus.CounterOpts{
Name: "api_response_status_total",
Help: "api http response status",
Namespace: exporterName,
- },
- []string{"code"},
- )
+ }, []string{"code"}, httpStatusCodes...)
- M_Proxy_Status = prometheus.NewCounterVec(
+ M_Proxy_Status = newCounterVecStartingAtZero(
prometheus.CounterOpts{
Name: "proxy_response_status_total",
Help: "proxy http response status",
Namespace: exporterName,
- },
- []string{"code"},
- )
+ }, []string{"code"}, httpStatusCodes...)
M_Http_Request_Total = prometheus.NewCounterVec(
prometheus.CounterOpts{
@@ -111,19 +123,19 @@ func init() {
[]string{"handler", "statuscode", "method"},
)
- M_Api_User_SignUpStarted = prometheus.NewCounter(prometheus.CounterOpts{
+ M_Api_User_SignUpStarted = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_user_signup_started_total",
Help: "amount of users who started the signup flow",
Namespace: exporterName,
})
- M_Api_User_SignUpCompleted = prometheus.NewCounter(prometheus.CounterOpts{
+ M_Api_User_SignUpCompleted = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_user_signup_completed_total",
Help: "amount of users who completed the signup flow",
Namespace: exporterName,
})
- M_Api_User_SignUpInvite = prometheus.NewCounter(prometheus.CounterOpts{
+ M_Api_User_SignUpInvite = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_user_signup_invite_total",
Help: "amount of users who have been invited",
Namespace: exporterName,
@@ -147,49 +159,49 @@ func init() {
Namespace: exporterName,
})
- M_Api_Admin_User_Create = prometheus.NewCounter(prometheus.CounterOpts{
+ M_Api_Admin_User_Create = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_admin_user_created_total",
Help: "api admin user created counter",
Namespace: exporterName,
})
- M_Api_Login_Post = prometheus.NewCounter(prometheus.CounterOpts{
+ M_Api_Login_Post = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_login_post_total",
Help: "api login post counter",
Namespace: exporterName,
})
- M_Api_Login_OAuth = prometheus.NewCounter(prometheus.CounterOpts{
+ M_Api_Login_OAuth = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_login_oauth_total",
Help: "api login oauth counter",
Namespace: exporterName,
})
- M_Api_Org_Create = prometheus.NewCounter(prometheus.CounterOpts{
+ M_Api_Org_Create = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_org_create_total",
Help: "api org created counter",
Namespace: exporterName,
})
- M_Api_Dashboard_Snapshot_Create = prometheus.NewCounter(prometheus.CounterOpts{
+ M_Api_Dashboard_Snapshot_Create = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_dashboard_snapshot_create_total",
Help: "dashboard snapshots created",
Namespace: exporterName,
})
- M_Api_Dashboard_Snapshot_External = prometheus.NewCounter(prometheus.CounterOpts{
+ M_Api_Dashboard_Snapshot_External = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_dashboard_snapshot_external_total",
Help: "external dashboard snapshots created",
Namespace: exporterName,
})
- M_Api_Dashboard_Snapshot_Get = prometheus.NewCounter(prometheus.CounterOpts{
+ M_Api_Dashboard_Snapshot_Get = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_dashboard_snapshot_get_total",
Help: "loaded dashboards",
Namespace: exporterName,
})
- M_Api_Dashboard_Insert = prometheus.NewCounter(prometheus.CounterOpts{
+ M_Api_Dashboard_Insert = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "api_models_dashboard_insert_total",
Help: "dashboards inserted ",
Namespace: exporterName,
@@ -207,25 +219,25 @@ func init() {
Namespace: exporterName,
}, []string{"type"})
- M_Aws_CloudWatch_GetMetricStatistics = prometheus.NewCounter(prometheus.CounterOpts{
+ M_Aws_CloudWatch_GetMetricStatistics = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "aws_cloudwatch_get_metric_statistics_total",
Help: "counter for getting metric statistics from aws",
Namespace: exporterName,
})
- M_Aws_CloudWatch_ListMetrics = prometheus.NewCounter(prometheus.CounterOpts{
+ M_Aws_CloudWatch_ListMetrics = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "aws_cloudwatch_list_metrics_total",
Help: "counter for getting list of metrics from aws",
Namespace: exporterName,
})
- M_Aws_CloudWatch_GetMetricData = prometheus.NewCounter(prometheus.CounterOpts{
+ M_Aws_CloudWatch_GetMetricData = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "aws_cloudwatch_get_metric_data_total",
Help: "counter for getting metric data time series from aws",
Namespace: exporterName,
})
- M_DB_DataSource_QueryById = prometheus.NewCounter(prometheus.CounterOpts{
+ M_DB_DataSource_QueryById = newCounterStartingAtZero(prometheus.CounterOpts{
Name: "db_datasource_query_by_id_total",
Help: "counter for getting datasource by id",
Namespace: exporterName,
@@ -350,7 +362,7 @@ func getEdition() string {
}
}
-func sendUsageStats() {
+func sendUsageStats(oauthProviders map[string]bool) {
if !setting.ReportingEnabled {
return
}
@@ -450,6 +462,24 @@ func sendUsageStats() {
metrics["stats.alert_notifiers."+stats.Type+".count"] = stats.Count
}
+ authTypes := map[string]bool{}
+ authTypes["anonymous"] = setting.AnonymousEnabled
+ authTypes["basic_auth"] = setting.BasicAuthEnabled
+ authTypes["ldap"] = setting.LdapEnabled
+ authTypes["auth_proxy"] = setting.AuthProxyEnabled
+
+ for provider, enabled := range oauthProviders {
+ authTypes["oauth_"+provider] = enabled
+ }
+
+ for authType, enabled := range authTypes {
+ enabledValue := 0
+ if enabled {
+ enabledValue = 1
+ }
+ metrics["stats.auth_enabled."+authType+".count"] = enabledValue
+ }
+
out, _ := json.MarshalIndent(report, "", " ")
data := bytes.NewBuffer(out)
diff --git a/pkg/metrics/metrics_test.go b/pkg/metrics/metrics_test.go
index 9fbfd0c26a2..43739221f1e 100644
--- a/pkg/metrics/metrics_test.go
+++ b/pkg/metrics/metrics_test.go
@@ -147,11 +147,19 @@ func TestMetrics(t *testing.T) {
}))
usageStatsURL = ts.URL
- sendUsageStats()
+ oauthProviders := map[string]bool{
+ "github": true,
+ "gitlab": true,
+ "google": true,
+ "generic_oauth": true,
+ "grafana_com": true,
+ }
+
+ sendUsageStats(oauthProviders)
Convey("Given reporting not enabled and sending usage stats", func() {
setting.ReportingEnabled = false
- sendUsageStats()
+ sendUsageStats(oauthProviders)
Convey("Should not gather stats or call http endpoint", func() {
So(getSystemStatsQuery, ShouldBeNil)
@@ -164,8 +172,13 @@ func TestMetrics(t *testing.T) {
Convey("Given reporting enabled and sending usage stats", func() {
setting.ReportingEnabled = true
setting.BuildVersion = "5.0.0"
+ setting.AnonymousEnabled = true
+ setting.BasicAuthEnabled = true
+ setting.LdapEnabled = true
+ setting.AuthProxyEnabled = true
+
wg.Add(1)
- sendUsageStats()
+ sendUsageStats(oauthProviders)
Convey("Should gather stats and call http endpoint", func() {
if waitTimeout(&wg, 2*time.Second) {
@@ -220,6 +233,16 @@ func TestMetrics(t *testing.T) {
So(metrics.Get("stats.alert_notifiers.slack.count").MustInt(), ShouldEqual, 1)
So(metrics.Get("stats.alert_notifiers.webhook.count").MustInt(), ShouldEqual, 2)
+
+ So(metrics.Get("stats.auth_enabled.anonymous.count").MustInt(), ShouldEqual, 1)
+ So(metrics.Get("stats.auth_enabled.basic_auth.count").MustInt(), ShouldEqual, 1)
+ So(metrics.Get("stats.auth_enabled.ldap.count").MustInt(), ShouldEqual, 1)
+ So(metrics.Get("stats.auth_enabled.auth_proxy.count").MustInt(), ShouldEqual, 1)
+ So(metrics.Get("stats.auth_enabled.oauth_github.count").MustInt(), ShouldEqual, 1)
+ So(metrics.Get("stats.auth_enabled.oauth_gitlab.count").MustInt(), ShouldEqual, 1)
+ So(metrics.Get("stats.auth_enabled.oauth_google.count").MustInt(), ShouldEqual, 1)
+ So(metrics.Get("stats.auth_enabled.oauth_generic_oauth.count").MustInt(), ShouldEqual, 1)
+ So(metrics.Get("stats.auth_enabled.oauth_grafana_com.count").MustInt(), ShouldEqual, 1)
})
})
diff --git a/pkg/metrics/service.go b/pkg/metrics/service.go
index ec38e0acfec..d2c0c815da9 100644
--- a/pkg/metrics/service.go
+++ b/pkg/metrics/service.go
@@ -28,9 +28,9 @@ func init() {
type InternalMetricsService struct {
Cfg *setting.Cfg `inject:""`
- enabled bool
intervalSeconds int64
graphiteCfg *graphitebridge.Config
+ oauthProviders map[string]bool
}
func (im *InternalMetricsService) Init() error {
@@ -61,7 +61,7 @@ func (im *InternalMetricsService) Run(ctx context.Context) error {
for {
select {
case <-onceEveryDayTick.C:
- sendUsageStats()
+ sendUsageStats(im.oauthProviders)
case <-everyMinuteTicker.C:
updateTotalStats()
case <-ctx.Done():
diff --git a/pkg/metrics/settings.go b/pkg/metrics/settings.go
index 58b84a7192f..18b9e78d6ff 100644
--- a/pkg/metrics/settings.go
+++ b/pkg/metrics/settings.go
@@ -5,6 +5,8 @@ import (
"strings"
"time"
+ "github.com/grafana/grafana/pkg/social"
+
"github.com/grafana/grafana/pkg/metrics/graphitebridge"
"github.com/grafana/grafana/pkg/setting"
"github.com/prometheus/client_golang/prometheus"
@@ -16,17 +18,14 @@ func (im *InternalMetricsService) readSettings() error {
return fmt.Errorf("Unable to find metrics config section %v", err)
}
- im.enabled = section.Key("enabled").MustBool(false)
im.intervalSeconds = section.Key("interval_seconds").MustInt64(10)
- if !im.enabled {
- return nil
- }
-
if err := im.parseGraphiteSettings(); err != nil {
return fmt.Errorf("Unable to parse metrics graphite section, %v", err)
}
+ im.oauthProviders = social.GetOAuthProviders(im.Cfg)
+
return nil
}
diff --git a/pkg/middleware/middleware.go b/pkg/middleware/middleware.go
index 475dce089b1..3e83a60f94b 100644
--- a/pkg/middleware/middleware.go
+++ b/pkg/middleware/middleware.go
@@ -14,6 +14,13 @@ import (
"github.com/grafana/grafana/pkg/util"
)
+var (
+ ReqGrafanaAdmin = Auth(&AuthOptions{ReqSignedIn: true, ReqGrafanaAdmin: true})
+ ReqSignedIn = Auth(&AuthOptions{ReqSignedIn: true})
+ ReqEditorRole = RoleAuth(m.ROLE_EDITOR, m.ROLE_ADMIN)
+ ReqOrgAdmin = RoleAuth(m.ROLE_ADMIN)
+)
+
func GetContextHandler() macaron.Handler {
return func(c *macaron.Context) {
ctx := &m.ReqContext{
diff --git a/pkg/middleware/middleware_test.go b/pkg/middleware/middleware_test.go
index 87c23a7b49a..1830b3eb161 100644
--- a/pkg/middleware/middleware_test.go
+++ b/pkg/middleware/middleware_test.go
@@ -435,11 +435,6 @@ func (sc *scenarioContext) withValidApiKey() *scenarioContext {
return sc
}
-func (sc *scenarioContext) withInvalidApiKey() *scenarioContext {
- sc.apiKey = "nvalidhhhhds"
- return sc
-}
-
func (sc *scenarioContext) withAuthorizationHeader(authHeader string) *scenarioContext {
sc.authHeader = authHeader
return sc
diff --git a/pkg/models/alert.go b/pkg/models/alert.go
index fba2aa63df9..ba1fc0779ba 100644
--- a/pkg/models/alert.go
+++ b/pkg/models/alert.go
@@ -75,7 +75,7 @@ type Alert struct {
EvalData *simplejson.Json
NewStateDate time.Time
- StateChanges int
+ StateChanges int64
Created time.Time
Updated time.Time
@@ -156,7 +156,7 @@ type SetAlertStateCommand struct {
Error string
EvalData *simplejson.Json
- Timestamp time.Time
+ Result Alert
}
//Queries
diff --git a/pkg/models/alert_notifications.go b/pkg/models/alert_notifications.go
index 42d33d5ed22..2128b469fa4 100644
--- a/pkg/models/alert_notifications.go
+++ b/pkg/models/alert_notifications.go
@@ -8,8 +8,18 @@ import (
)
var (
- ErrNotificationFrequencyNotFound = errors.New("Notification frequency not specified")
- ErrJournalingNotFound = errors.New("alert notification journaling not found")
+ ErrNotificationFrequencyNotFound = errors.New("Notification frequency not specified")
+ ErrAlertNotificationStateNotFound = errors.New("alert notification state not found")
+ ErrAlertNotificationStateVersionConflict = errors.New("alert notification state update version conflict")
+ ErrAlertNotificationStateAlreadyExist = errors.New("alert notification state already exists.")
+)
+
+type AlertNotificationStateType string
+
+var (
+ AlertNotificationStatePending = AlertNotificationStateType("pending")
+ AlertNotificationStateCompleted = AlertNotificationStateType("completed")
+ AlertNotificationStateUnknown = AlertNotificationStateType("unknown")
)
type AlertNotification struct {
@@ -76,33 +86,34 @@ type GetAllAlertNotificationsQuery struct {
Result []*AlertNotification
}
-type AlertNotificationJournal struct {
- Id int64
- OrgId int64
- AlertId int64
- NotifierId int64
- SentAt int64
- Success bool
+type AlertNotificationState struct {
+ Id int64
+ OrgId int64
+ AlertId int64
+ NotifierId int64
+ State AlertNotificationStateType
+ Version int64
+ UpdatedAt int64
+ AlertRuleStateUpdatedVersion int64
}
-type RecordNotificationJournalCommand struct {
- OrgId int64
- AlertId int64
- NotifierId int64
- SentAt int64
- Success bool
+type SetAlertNotificationStateToPendingCommand struct {
+ Id int64
+ AlertRuleStateUpdatedVersion int64
+ Version int64
+
+ ResultVersion int64
}
-type GetLatestNotificationQuery struct {
+type SetAlertNotificationStateToCompleteCommand struct {
+ Id int64
+ Version int64
+}
+
+type GetOrCreateNotificationStateQuery struct {
OrgId int64
AlertId int64
NotifierId int64
- Result *AlertNotificationJournal
-}
-
-type CleanNotificationJournalCommand struct {
- OrgId int64
- AlertId int64
- NotifierId int64
+ Result *AlertNotificationState
}
diff --git a/pkg/models/dashboards.go b/pkg/models/dashboards.go
index 4b84d840113..e8aebb1d1f4 100644
--- a/pkg/models/dashboards.go
+++ b/pkg/models/dashboards.go
@@ -21,7 +21,6 @@ var (
ErrDashboardVersionMismatch = errors.New("The dashboard has been changed by someone else")
ErrDashboardTitleEmpty = errors.New("Dashboard title cannot be empty")
ErrDashboardFolderCannotHaveParent = errors.New("A Dashboard Folder cannot be added to another folder")
- ErrDashboardContainsInvalidAlertData = errors.New("Invalid alert data. Cannot save dashboard")
ErrDashboardFailedToUpdateAlertData = errors.New("Failed to save alert data")
ErrDashboardsWithSameSlugExists = errors.New("Multiple dashboards with the same slug exists")
ErrDashboardFailedGenerateUniqueUid = errors.New("Failed to generate unique dashboard id")
diff --git a/pkg/models/datasource.go b/pkg/models/datasource.go
index cbdd0136f4d..c730622512f 100644
--- a/pkg/models/datasource.go
+++ b/pkg/models/datasource.go
@@ -22,6 +22,7 @@ const (
DS_MSSQL = "mssql"
DS_ACCESS_DIRECT = "direct"
DS_ACCESS_PROXY = "proxy"
+ DS_STACKDRIVER = "stackdriver"
)
var (
@@ -70,12 +71,12 @@ var knownDatasourcePlugins = map[string]bool{
DS_POSTGRES: true,
DS_MYSQL: true,
DS_MSSQL: true,
+ DS_STACKDRIVER: true,
"opennms": true,
"abhisant-druid-datasource": true,
"dalmatinerdb-datasource": true,
"gnocci": true,
"zabbix": true,
- "alexanderzobnin-zabbix-datasource": true,
"newrelic-app": true,
"grafana-datadog-datasource": true,
"grafana-simple-json": true,
@@ -88,6 +89,7 @@ var knownDatasourcePlugins = map[string]bool{
"ayoungprogrammer-finance-datasource": true,
"monasca-datasource": true,
"vertamedia-clickhouse-datasource": true,
+ "alexanderzobnin-zabbix-datasource": true,
}
func IsKnownDataSourcePlugin(dsType string) bool {
diff --git a/pkg/models/org_user.go b/pkg/models/org_user.go
index 9231d18cfd6..b6ecd924e9a 100644
--- a/pkg/models/org_user.go
+++ b/pkg/models/org_user.go
@@ -72,8 +72,10 @@ type OrgUser struct {
// COMMANDS
type RemoveOrgUserCommand struct {
- UserId int64
- OrgId int64
+ UserId int64
+ OrgId int64
+ ShouldDeleteOrphanedUser bool
+ UserWasDeleted bool
}
type AddOrgUserCommand struct {
diff --git a/pkg/models/team_member.go b/pkg/models/team_member.go
index 9434dad8ecd..dd64787f465 100644
--- a/pkg/models/team_member.go
+++ b/pkg/models/team_member.go
@@ -12,10 +12,11 @@ var (
// TeamMember model
type TeamMember struct {
- Id int64
- OrgId int64
- TeamId int64
- UserId int64
+ Id int64
+ OrgId int64
+ TeamId int64
+ UserId int64
+ External bool
Created time.Time
Updated time.Time
@@ -25,9 +26,10 @@ type TeamMember struct {
// COMMANDS
type AddTeamMemberCommand struct {
- UserId int64 `json:"userId" binding:"Required"`
- OrgId int64 `json:"-"`
- TeamId int64 `json:"-"`
+ UserId int64 `json:"userId" binding:"Required"`
+ OrgId int64 `json:"-"`
+ TeamId int64 `json:"-"`
+ External bool `json:"-"`
}
type RemoveTeamMemberCommand struct {
@@ -40,20 +42,23 @@ type RemoveTeamMemberCommand struct {
// QUERIES
type GetTeamMembersQuery struct {
- OrgId int64
- TeamId int64
- UserId int64
- Result []*TeamMemberDTO
+ OrgId int64
+ TeamId int64
+ UserId int64
+ External bool
+ Result []*TeamMemberDTO
}
// ----------------------
// Projections and DTOs
type TeamMemberDTO struct {
- OrgId int64 `json:"orgId"`
- TeamId int64 `json:"teamId"`
- UserId int64 `json:"userId"`
- Email string `json:"email"`
- Login string `json:"login"`
- AvatarUrl string `json:"avatarUrl"`
+ OrgId int64 `json:"orgId"`
+ TeamId int64 `json:"teamId"`
+ UserId int64 `json:"userId"`
+ External bool `json:"-"`
+ Email string `json:"email"`
+ Login string `json:"login"`
+ AvatarUrl string `json:"avatarUrl"`
+ Labels []string `json:"labels"`
}
diff --git a/pkg/plugins/app_plugin.go b/pkg/plugins/app_plugin.go
index b070ba592f0..922b2444b7b 100644
--- a/pkg/plugins/app_plugin.go
+++ b/pkg/plugins/app_plugin.go
@@ -23,12 +23,13 @@ type AppPlugin struct {
}
type AppPluginRoute struct {
- Path string `json:"path"`
- Method string `json:"method"`
- ReqRole models.RoleType `json:"reqRole"`
- Url string `json:"url"`
- Headers []AppPluginRouteHeader `json:"headers"`
- TokenAuth *JwtTokenAuth `json:"tokenAuth"`
+ Path string `json:"path"`
+ Method string `json:"method"`
+ ReqRole models.RoleType `json:"reqRole"`
+ Url string `json:"url"`
+ Headers []AppPluginRouteHeader `json:"headers"`
+ TokenAuth *JwtTokenAuth `json:"tokenAuth"`
+ JwtTokenAuth *JwtTokenAuth `json:"jwtTokenAuth"`
}
type AppPluginRouteHeader struct {
@@ -36,8 +37,11 @@ type AppPluginRouteHeader struct {
Content string `json:"content"`
}
+// JwtTokenAuth struct is both for normal Token Auth and JWT Token Auth with
+// an uploaded JWT file.
type JwtTokenAuth struct {
Url string `json:"url"`
+ Scopes []string `json:"scopes"`
Params map[string]string `json:"params"`
}
diff --git a/pkg/plugins/dashboard_importer_test.go b/pkg/plugins/dashboard_importer_test.go
index 6f31b49f99d..ca8dfcd515c 100644
--- a/pkg/plugins/dashboard_importer_test.go
+++ b/pkg/plugins/dashboard_importer_test.go
@@ -35,7 +35,7 @@ func TestDashboardImport(t *testing.T) {
So(cmd.Result, ShouldNotBeNil)
resultStr, _ := mock.SavedDashboards[0].Dashboard.Data.EncodePretty()
- expectedBytes, _ := ioutil.ReadFile("../../tests/test-app/dashboards/connections_result.json")
+ expectedBytes, _ := ioutil.ReadFile("testdata/test-app/dashboards/connections_result.json")
expectedJson, _ := simplejson.NewJson(expectedBytes)
expectedStr, _ := expectedJson.EncodePretty()
@@ -89,7 +89,7 @@ func pluginScenario(desc string, t *testing.T, fn func()) {
Convey("Given a plugin", t, func() {
setting.Raw = ini.Empty()
sec, _ := setting.Raw.NewSection("plugin.test-app")
- sec.NewKey("path", "../../tests/test-app")
+ sec.NewKey("path", "testdata/test-app")
pm := &PluginManager{}
err := pm.Init()
diff --git a/pkg/plugins/dashboards_test.go b/pkg/plugins/dashboards_test.go
index c422a1431c0..6fc6ace0e00 100644
--- a/pkg/plugins/dashboards_test.go
+++ b/pkg/plugins/dashboards_test.go
@@ -16,7 +16,7 @@ func TestPluginDashboards(t *testing.T) {
Convey("When asking plugin dashboard info", t, func() {
setting.Raw = ini.Empty()
sec, _ := setting.Raw.NewSection("plugin.test-app")
- sec.NewKey("path", "../../tests/test-app")
+ sec.NewKey("path", "testdata/test-app")
pm := &PluginManager{}
err := pm.Init()
diff --git a/pkg/plugins/dashboards_updater.go b/pkg/plugins/dashboards_updater.go
index ebe11ed32d4..616d4541bec 100644
--- a/pkg/plugins/dashboards_updater.go
+++ b/pkg/plugins/dashboards_updater.go
@@ -48,11 +48,7 @@ func autoUpdateAppDashboard(pluginDashInfo *PluginDashboardInfoDTO, orgId int64)
Path: pluginDashInfo.Path,
}
- if err := bus.Dispatch(&updateCmd); err != nil {
- return err
- }
-
- return nil
+ return bus.Dispatch(&updateCmd)
}
func syncPluginDashboards(pluginDef *PluginBase, orgId int64) {
diff --git a/pkg/plugins/plugins_test.go b/pkg/plugins/plugins_test.go
index fa68ae4389d..d16e6abb4c7 100644
--- a/pkg/plugins/plugins_test.go
+++ b/pkg/plugins/plugins_test.go
@@ -30,7 +30,7 @@ func TestPluginScans(t *testing.T) {
Convey("When reading app plugin definition", t, func() {
setting.Raw = ini.Empty()
sec, _ := setting.Raw.NewSection("plugin.nginx-app")
- sec.NewKey("path", "../../tests/test-app")
+ sec.NewKey("path", "testdata/test-app")
pm := &PluginManager{}
err := pm.Init()
diff --git a/tests/datasource-test/module.js b/pkg/plugins/testdata/datasource-test/module.js
similarity index 100%
rename from tests/datasource-test/module.js
rename to pkg/plugins/testdata/datasource-test/module.js
diff --git a/tests/datasource-test/plugin.json b/pkg/plugins/testdata/datasource-test/plugin.json
similarity index 100%
rename from tests/datasource-test/plugin.json
rename to pkg/plugins/testdata/datasource-test/plugin.json
diff --git a/tests/test-app/dashboards/connections.json b/pkg/plugins/testdata/test-app/dashboards/connections.json
similarity index 100%
rename from tests/test-app/dashboards/connections.json
rename to pkg/plugins/testdata/test-app/dashboards/connections.json
diff --git a/tests/test-app/dashboards/connections_result.json b/pkg/plugins/testdata/test-app/dashboards/connections_result.json
similarity index 100%
rename from tests/test-app/dashboards/connections_result.json
rename to pkg/plugins/testdata/test-app/dashboards/connections_result.json
diff --git a/tests/test-app/dashboards/memory.json b/pkg/plugins/testdata/test-app/dashboards/memory.json
similarity index 100%
rename from tests/test-app/dashboards/memory.json
rename to pkg/plugins/testdata/test-app/dashboards/memory.json
diff --git a/tests/test-app/plugin.json b/pkg/plugins/testdata/test-app/plugin.json
similarity index 100%
rename from tests/test-app/plugin.json
rename to pkg/plugins/testdata/test-app/plugin.json
diff --git a/pkg/services/alerting/conditions/evaluator.go b/pkg/services/alerting/conditions/evaluator.go
index 8d7ca57f010..eef593d39e2 100644
--- a/pkg/services/alerting/conditions/evaluator.go
+++ b/pkg/services/alerting/conditions/evaluator.go
@@ -2,6 +2,7 @@ package conditions
import (
"encoding/json"
+ "fmt"
"github.com/grafana/grafana/pkg/components/null"
"github.com/grafana/grafana/pkg/components/simplejson"
@@ -31,12 +32,12 @@ type ThresholdEvaluator struct {
func newThresholdEvaluator(typ string, model *simplejson.Json) (*ThresholdEvaluator, error) {
params := model.Get("params").MustArray()
if len(params) == 0 {
- return nil, alerting.ValidationError{Reason: "Evaluator missing threshold parameter"}
+ return nil, fmt.Errorf("Evaluator missing threshold parameter")
}
firstParam, ok := params[0].(json.Number)
if !ok {
- return nil, alerting.ValidationError{Reason: "Evaluator has invalid parameter"}
+ return nil, fmt.Errorf("Evaluator has invalid parameter")
}
defaultEval := &ThresholdEvaluator{Type: typ}
@@ -107,7 +108,7 @@ func (e *RangedEvaluator) Eval(reducedValue null.Float) bool {
func NewAlertEvaluator(model *simplejson.Json) (AlertEvaluator, error) {
typ := model.Get("type").MustString()
if typ == "" {
- return nil, alerting.ValidationError{Reason: "Evaluator missing type property"}
+ return nil, fmt.Errorf("Evaluator missing type property")
}
if inSlice(typ, defaultTypes) {
@@ -122,7 +123,7 @@ func NewAlertEvaluator(model *simplejson.Json) (AlertEvaluator, error) {
return &NoValueEvaluator{}, nil
}
- return nil, alerting.ValidationError{Reason: "Evaluator invalid evaluator type: " + typ}
+ return nil, fmt.Errorf("Evaluator invalid evaluator type: %s", typ)
}
func inSlice(a string, list []string) bool {
diff --git a/pkg/services/alerting/extractor.go b/pkg/services/alerting/extractor.go
index e1c1bfacb2e..edfab2dedee 100644
--- a/pkg/services/alerting/extractor.go
+++ b/pkg/services/alerting/extractor.go
@@ -82,12 +82,12 @@ func (e *DashAlertExtractor) getAlertFromPanels(jsonWithPanels *simplejson.Json,
if collapsed && collapsedJSON.MustBool() {
// extract alerts from sub panels for collapsed panels
- als, err := e.getAlertFromPanels(panel, validateAlertFunc)
+ alertSlice, err := e.getAlertFromPanels(panel, validateAlertFunc)
if err != nil {
return nil, err
}
- alerts = append(alerts, als...)
+ alerts = append(alerts, alertSlice...)
continue
}
@@ -99,7 +99,7 @@ func (e *DashAlertExtractor) getAlertFromPanels(jsonWithPanels *simplejson.Json,
panelID, err := panel.Get("id").Int64()
if err != nil {
- return nil, fmt.Errorf("panel id is required. err %v", err)
+ return nil, ValidationError{Reason: "A numeric panel id property is missing"}
}
// backward compatibility check, can be removed later
@@ -145,7 +145,8 @@ func (e *DashAlertExtractor) getAlertFromPanels(jsonWithPanels *simplejson.Json,
datasource, err := e.lookupDatasourceID(dsName)
if err != nil {
- return nil, err
+ e.log.Debug("Error looking up datasource", "error", err)
+ return nil, ValidationError{Reason: fmt.Sprintf("Data source used by alert rule not found, alertName=%v, datasource=%s", alert.Name, dsName)}
}
jsonQuery.SetPath([]string{"datasourceId"}, datasource.Id)
@@ -166,8 +167,7 @@ func (e *DashAlertExtractor) getAlertFromPanels(jsonWithPanels *simplejson.Json,
}
if !validateAlertFunc(alert) {
- e.log.Debug("Invalid Alert Data. Dashboard, Org or Panel ID is not correct", "alertName", alert.Name, "panelId", alert.PanelId)
- return nil, m.ErrDashboardContainsInvalidAlertData
+ return nil, ValidationError{Reason: fmt.Sprintf("Panel id is not correct, alertName=%v, panelId=%v", alert.Name, alert.PanelId)}
}
alerts = append(alerts, alert)
diff --git a/pkg/services/alerting/extractor_test.go b/pkg/services/alerting/extractor_test.go
index c7212e48174..e2dc01a1181 100644
--- a/pkg/services/alerting/extractor_test.go
+++ b/pkg/services/alerting/extractor_test.go
@@ -258,7 +258,7 @@ func TestAlertRuleExtraction(t *testing.T) {
Convey("Should fail on save", func() {
_, err := extractor.GetAlerts()
- So(err, ShouldEqual, m.ErrDashboardContainsInvalidAlertData)
+ So(err.Error(), ShouldEqual, "Alert validation error: Panel id is not correct, alertName=Influxdb, panelId=1")
})
})
})
diff --git a/pkg/services/alerting/interfaces.go b/pkg/services/alerting/interfaces.go
index 46f8b3c769c..96294f0624f 100644
--- a/pkg/services/alerting/interfaces.go
+++ b/pkg/services/alerting/interfaces.go
@@ -3,6 +3,8 @@ package alerting
import (
"context"
"time"
+
+ "github.com/grafana/grafana/pkg/models"
)
type EvalHandler interface {
@@ -20,7 +22,7 @@ type Notifier interface {
NeedsImage() bool
// ShouldNotify checks this evaluation should send an alert notification
- ShouldNotify(ctx context.Context, evalContext *EvalContext) bool
+ ShouldNotify(ctx context.Context, evalContext *EvalContext, notificationState *models.AlertNotificationState) bool
GetNotifierId() int64
GetIsDefault() bool
@@ -28,11 +30,16 @@ type Notifier interface {
GetFrequency() time.Duration
}
-type NotifierSlice []Notifier
+type notifierState struct {
+ notifier Notifier
+ state *models.AlertNotificationState
+}
-func (notifiers NotifierSlice) ShouldUploadImage() bool {
- for _, notifier := range notifiers {
- if notifier.NeedsImage() {
+type notifierStateSlice []*notifierState
+
+func (notifiers notifierStateSlice) ShouldUploadImage() bool {
+ for _, ns := range notifiers {
+ if ns.notifier.NeedsImage() {
return true
}
}
diff --git a/pkg/services/alerting/notifier.go b/pkg/services/alerting/notifier.go
index 7fbd956f4f9..9ce50eadd6b 100644
--- a/pkg/services/alerting/notifier.go
+++ b/pkg/services/alerting/notifier.go
@@ -1,16 +1,15 @@
package alerting
import (
- "context"
"errors"
"fmt"
- "time"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/imguploader"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/metrics"
"github.com/grafana/grafana/pkg/services/rendering"
+ "github.com/grafana/grafana/pkg/setting"
m "github.com/grafana/grafana/pkg/models"
)
@@ -40,61 +39,78 @@ type notificationService struct {
}
func (n *notificationService) SendIfNeeded(context *EvalContext) error {
- notifiers, err := n.getNeededNotifiers(context.Rule.OrgId, context.Rule.Notifications, context)
+ notifierStates, err := n.getNeededNotifiers(context.Rule.OrgId, context.Rule.Notifications, context)
if err != nil {
return err
}
- if len(notifiers) == 0 {
+ if len(notifierStates) == 0 {
return nil
}
- if notifiers.ShouldUploadImage() {
+ if notifierStates.ShouldUploadImage() {
if err = n.uploadImage(context); err != nil {
n.log.Error("Failed to upload alert panel image.", "error", err)
}
}
- return n.sendNotifications(context, notifiers)
+ return n.sendNotifications(context, notifierStates)
}
-func (n *notificationService) sendNotifications(evalContext *EvalContext, notifiers []Notifier) error {
- for _, notifier := range notifiers {
- not := notifier
+func (n *notificationService) sendAndMarkAsComplete(evalContext *EvalContext, notifierState *notifierState) error {
+ notifier := notifierState.notifier
- err := bus.InTransaction(evalContext.Ctx, func(ctx context.Context) error {
- n.log.Debug("trying to send notification", "id", not.GetNotifierId())
+ n.log.Debug("Sending notification", "type", notifier.GetType(), "id", notifier.GetNotifierId(), "isDefault", notifier.GetIsDefault())
+ metrics.M_Alerting_Notification_Sent.WithLabelValues(notifier.GetType()).Inc()
- // Verify that we can send the notification again
- // but this time within the same transaction.
- if !evalContext.IsTestRun && !not.ShouldNotify(context.Background(), evalContext) {
- return nil
- }
+ err := notifier.Notify(evalContext)
- n.log.Debug("Sending notification", "type", not.GetType(), "id", not.GetNotifierId(), "isDefault", not.GetIsDefault())
- metrics.M_Alerting_Notification_Sent.WithLabelValues(not.GetType()).Inc()
+ if err != nil {
+ n.log.Error("failed to send notification", "id", notifier.GetNotifierId(), "error", err)
+ }
- //send notification
- success := not.Notify(evalContext) == nil
+ if evalContext.IsTestRun {
+ return nil
+ }
- if evalContext.IsTestRun {
- return nil
- }
+ cmd := &m.SetAlertNotificationStateToCompleteCommand{
+ Id: notifierState.state.Id,
+ Version: notifierState.state.Version,
+ }
- //write result to db.
- cmd := &m.RecordNotificationJournalCommand{
- OrgId: evalContext.Rule.OrgId,
- AlertId: evalContext.Rule.Id,
- NotifierId: not.GetNotifierId(),
- SentAt: time.Now().Unix(),
- Success: success,
- }
+ return bus.DispatchCtx(evalContext.Ctx, cmd)
+}
- return bus.DispatchCtx(ctx, cmd)
- })
+func (n *notificationService) sendNotification(evalContext *EvalContext, notifierState *notifierState) error {
+ if !evalContext.IsTestRun {
+ setPendingCmd := &m.SetAlertNotificationStateToPendingCommand{
+ Id: notifierState.state.Id,
+ Version: notifierState.state.Version,
+ AlertRuleStateUpdatedVersion: evalContext.Rule.StateChanges,
+ }
+
+ err := bus.DispatchCtx(evalContext.Ctx, setPendingCmd)
+ if err == m.ErrAlertNotificationStateVersionConflict {
+ return nil
+ }
if err != nil {
- n.log.Error("failed to send notification", "id", not.GetNotifierId())
+ return err
+ }
+
+ // We need to update state version to be able to log
+ // unexpected version conflicts when marking notifications as ok
+ notifierState.state.Version = setPendingCmd.ResultVersion
+ }
+
+ return n.sendAndMarkAsComplete(evalContext, notifierState)
+}
+
+func (n *notificationService) sendNotifications(evalContext *EvalContext, notifierStates notifierStateSlice) error {
+ for _, notifierState := range notifierStates {
+ err := n.sendNotification(evalContext, notifierState)
+ if err != nil {
+ n.log.Error("failed to send notification", "id", notifierState.notifier.GetNotifierId(), "error", err)
}
}
@@ -108,11 +124,12 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) {
}
renderOpts := rendering.Opts{
- Width: 1000,
- Height: 500,
- Timeout: alertTimeout / 2,
- OrgId: context.Rule.OrgId,
- OrgRole: m.ROLE_ADMIN,
+ Width: 1000,
+ Height: 500,
+ Timeout: alertTimeout / 2,
+ OrgId: context.Rule.OrgId,
+ OrgRole: m.ROLE_ADMIN,
+ ConcurrentLimit: setting.AlertingRenderLimit,
}
ref, err := context.GetDashboardUID()
@@ -140,22 +157,38 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) {
return nil
}
-func (n *notificationService) getNeededNotifiers(orgId int64, notificationIds []int64, evalContext *EvalContext) (NotifierSlice, error) {
+func (n *notificationService) getNeededNotifiers(orgId int64, notificationIds []int64, evalContext *EvalContext) (notifierStateSlice, error) {
query := &m.GetAlertNotificationsToSendQuery{OrgId: orgId, Ids: notificationIds}
if err := bus.Dispatch(query); err != nil {
return nil, err
}
- var result []Notifier
+ var result notifierStateSlice
for _, notification := range query.Result {
not, err := n.createNotifierFor(notification)
if err != nil {
- return nil, err
+ n.log.Error("Could not create notifier", "notifier", notification.Id, "error", err)
+ continue
}
- if not.ShouldNotify(evalContext.Ctx, evalContext) {
- result = append(result, not)
+ query := &m.GetOrCreateNotificationStateQuery{
+ NotifierId: notification.Id,
+ AlertId: evalContext.Rule.Id,
+ OrgId: evalContext.Rule.OrgId,
+ }
+
+ err = bus.DispatchCtx(evalContext.Ctx, query)
+ if err != nil {
+ n.log.Error("Could not get notification state.", "notifier", notification.Id, "error", err)
+ continue
+ }
+
+ if not.ShouldNotify(evalContext.Ctx, evalContext, query.Result) {
+ result = append(result, ¬ifierState{
+ notifier: not,
+ state: query.Result,
+ })
}
}
diff --git a/pkg/services/alerting/notifiers/alertmanager.go b/pkg/services/alerting/notifiers/alertmanager.go
index 9826dd1dffb..2caa4d5ab58 100644
--- a/pkg/services/alerting/notifiers/alertmanager.go
+++ b/pkg/services/alerting/notifiers/alertmanager.go
@@ -46,7 +46,7 @@ type AlertmanagerNotifier struct {
log log.Logger
}
-func (this *AlertmanagerNotifier) ShouldNotify(ctx context.Context, evalContext *alerting.EvalContext) bool {
+func (this *AlertmanagerNotifier) ShouldNotify(ctx context.Context, evalContext *alerting.EvalContext, notificationState *m.AlertNotificationState) bool {
this.log.Debug("Should notify", "ruleId", evalContext.Rule.Id, "state", evalContext.Rule.State, "previousState", evalContext.PrevAlertState)
// Do not notify when we become OK for the first time.
diff --git a/pkg/services/alerting/notifiers/base.go b/pkg/services/alerting/notifiers/base.go
index ca011356247..fbade2eccac 100644
--- a/pkg/services/alerting/notifiers/base.go
+++ b/pkg/services/alerting/notifiers/base.go
@@ -4,13 +4,16 @@ import (
"context"
"time"
- "github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/alerting"
)
+const (
+ triggMetrString = "Triggered metrics:\n\n"
+)
+
type NotifierBase struct {
Name string
Type string
@@ -42,55 +45,47 @@ func NewNotifierBase(model *models.AlertNotification) NotifierBase {
}
}
-func defaultShouldNotify(context *alerting.EvalContext, sendReminder bool, frequency time.Duration, lastNotify time.Time) bool {
+// ShouldNotify checks this evaluation should send an alert notification
+func (n *NotifierBase) ShouldNotify(ctx context.Context, context *alerting.EvalContext, notiferState *models.AlertNotificationState) bool {
// Only notify on state change.
- if context.PrevAlertState == context.Rule.State && !sendReminder {
+ if context.PrevAlertState == context.Rule.State && !n.SendReminder {
return false
}
- // Do not notify if interval has not elapsed
- if sendReminder && !lastNotify.IsZero() && lastNotify.Add(frequency).After(time.Now()) {
- return false
- }
+ if context.PrevAlertState == context.Rule.State && n.SendReminder {
+ // Do not notify if interval has not elapsed
+ lastNotify := time.Unix(notiferState.UpdatedAt, 0)
+ if notiferState.UpdatedAt != 0 && lastNotify.Add(n.Frequency).After(time.Now()) {
+ return false
+ }
- // Do not notify if alert state if OK or pending even on repeated notify
- if sendReminder && (context.Rule.State == models.AlertStateOK || context.Rule.State == models.AlertStatePending) {
- return false
+ // Do not notify if alert state is OK or pending even on repeated notify
+ if context.Rule.State == models.AlertStateOK || context.Rule.State == models.AlertStatePending {
+ return false
+ }
}
// Do not notify when we become OK for the first time.
- if (context.PrevAlertState == models.AlertStatePending) && (context.Rule.State == models.AlertStateOK) {
+ if context.PrevAlertState == models.AlertStatePending && context.Rule.State == models.AlertStateOK {
return false
}
+ // Do not notify when we OK -> Pending
+ if context.PrevAlertState == models.AlertStateOK && context.Rule.State == models.AlertStatePending {
+ return false
+ }
+
+ // Do not notifu if state pending and it have been updated last minute
+ if notiferState.State == models.AlertNotificationStatePending {
+ lastUpdated := time.Unix(notiferState.UpdatedAt, 0)
+ if lastUpdated.Add(1 * time.Minute).After(time.Now()) {
+ return false
+ }
+ }
+
return true
}
-// ShouldNotify checks this evaluation should send an alert notification
-func (n *NotifierBase) ShouldNotify(ctx context.Context, c *alerting.EvalContext) bool {
- cmd := &models.GetLatestNotificationQuery{
- OrgId: c.Rule.OrgId,
- AlertId: c.Rule.Id,
- NotifierId: n.Id,
- }
-
- err := bus.DispatchCtx(ctx, cmd)
- if err == models.ErrJournalingNotFound {
- return true
- }
-
- if err != nil {
- n.log.Error("Could not determine last time alert notifier fired", "Alert name", c.Rule.Name, "Error", err)
- return false
- }
-
- if !cmd.Result.Success {
- return true
- }
-
- return defaultShouldNotify(c, n.SendReminder, n.Frequency, time.Unix(cmd.Result.SentAt, 0))
-}
-
func (n *NotifierBase) GetType() string {
return n.Type
}
diff --git a/pkg/services/alerting/notifiers/base_test.go b/pkg/services/alerting/notifiers/base_test.go
index 57b82f32466..5e46d3ad72e 100644
--- a/pkg/services/alerting/notifiers/base_test.go
+++ b/pkg/services/alerting/notifiers/base_test.go
@@ -2,12 +2,9 @@ package notifiers
import (
"context"
- "errors"
"testing"
"time"
- "github.com/grafana/grafana/pkg/bus"
-
"github.com/grafana/grafana/pkg/components/simplejson"
m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/alerting"
@@ -15,100 +12,144 @@ import (
)
func TestShouldSendAlertNotification(t *testing.T) {
+ tnow := time.Now()
+
tcs := []struct {
name string
prevState m.AlertStateType
newState m.AlertStateType
- expected bool
sendReminder bool
+ frequency time.Duration
+ state *m.AlertNotificationState
+
+ expect bool
}{
{
- name: "pending -> ok should not trigger an notification",
- newState: m.AlertStatePending,
- prevState: m.AlertStateOK,
- expected: false,
+ name: "pending -> ok should not trigger an notification",
+ newState: m.AlertStateOK,
+ prevState: m.AlertStatePending,
+ sendReminder: false,
+ state: &m.AlertNotificationState{},
+
+ expect: false,
},
{
- name: "ok -> alerting should trigger an notification",
- newState: m.AlertStateOK,
- prevState: m.AlertStateAlerting,
- expected: true,
+ name: "ok -> alerting should trigger an notification",
+ newState: m.AlertStateAlerting,
+ prevState: m.AlertStateOK,
+ sendReminder: false,
+ state: &m.AlertNotificationState{},
+
+ expect: true,
},
{
- name: "ok -> pending should not trigger an notification",
- newState: m.AlertStateOK,
- prevState: m.AlertStatePending,
- expected: false,
+ name: "ok -> pending should not trigger an notification",
+ newState: m.AlertStatePending,
+ prevState: m.AlertStateOK,
+ sendReminder: false,
+ state: &m.AlertNotificationState{},
+
+ expect: false,
},
{
name: "ok -> ok should not trigger an notification",
newState: m.AlertStateOK,
prevState: m.AlertStateOK,
- expected: false,
sendReminder: false,
- },
- {
- name: "ok -> alerting should not trigger an notification",
- newState: m.AlertStateOK,
- prevState: m.AlertStateAlerting,
- expected: true,
- sendReminder: true,
+ state: &m.AlertNotificationState{},
+
+ expect: false,
},
{
name: "ok -> ok with reminder should not trigger an notification",
newState: m.AlertStateOK,
prevState: m.AlertStateOK,
- expected: false,
sendReminder: true,
+ state: &m.AlertNotificationState{},
+
+ expect: false,
+ },
+ {
+ name: "alerting -> ok should trigger an notification",
+ newState: m.AlertStateOK,
+ prevState: m.AlertStateAlerting,
+ sendReminder: false,
+ state: &m.AlertNotificationState{},
+
+ expect: true,
+ },
+ {
+ name: "alerting -> ok should trigger an notification when reminders enabled",
+ newState: m.AlertStateOK,
+ prevState: m.AlertStateAlerting,
+ frequency: time.Minute * 10,
+ sendReminder: true,
+ state: &m.AlertNotificationState{UpdatedAt: tnow.Add(-time.Minute).Unix()},
+
+ expect: true,
+ },
+ {
+ name: "alerting -> alerting with reminder and no state should trigger",
+ newState: m.AlertStateAlerting,
+ prevState: m.AlertStateAlerting,
+ frequency: time.Minute * 10,
+ sendReminder: true,
+ state: &m.AlertNotificationState{},
+
+ expect: true,
+ },
+ {
+ name: "alerting -> alerting with reminder and last notification sent 1 minute ago should not trigger",
+ newState: m.AlertStateAlerting,
+ prevState: m.AlertStateAlerting,
+ frequency: time.Minute * 10,
+ sendReminder: true,
+ state: &m.AlertNotificationState{UpdatedAt: tnow.Add(-time.Minute).Unix()},
+
+ expect: false,
+ },
+ {
+ name: "alerting -> alerting with reminder and last notifciation sent 11 minutes ago should trigger",
+ newState: m.AlertStateAlerting,
+ prevState: m.AlertStateAlerting,
+ frequency: time.Minute * 10,
+ sendReminder: true,
+ state: &m.AlertNotificationState{UpdatedAt: tnow.Add(-11 * time.Minute).Unix()},
+
+ expect: true,
+ },
+ {
+ name: "OK -> alerting with notifciation state pending and updated 30 seconds ago should not trigger",
+ newState: m.AlertStateAlerting,
+ prevState: m.AlertStateOK,
+ state: &m.AlertNotificationState{State: m.AlertNotificationStatePending, UpdatedAt: tnow.Add(-30 * time.Second).Unix()},
+
+ expect: false,
+ },
+ {
+ name: "OK -> alerting with notifciation state pending and updated 2 minutes ago should trigger",
+ newState: m.AlertStateAlerting,
+ prevState: m.AlertStateOK,
+ state: &m.AlertNotificationState{State: m.AlertNotificationStatePending, UpdatedAt: tnow.Add(-2 * time.Minute).Unix()},
+
+ expect: true,
},
}
for _, tc := range tcs {
evalContext := alerting.NewEvalContext(context.TODO(), &alerting.Rule{
- State: tc.newState,
+ State: tc.prevState,
})
- evalContext.Rule.State = tc.prevState
- if defaultShouldNotify(evalContext, true, 0, time.Now()) != tc.expected {
- t.Errorf("failed %s. expected %+v to return %v", tc.name, tc, tc.expected)
+ evalContext.Rule.State = tc.newState
+ nb := &NotifierBase{SendReminder: tc.sendReminder, Frequency: tc.frequency}
+
+ if nb.ShouldNotify(evalContext.Ctx, evalContext, tc.state) != tc.expect {
+ t.Errorf("failed test %s.\n expected \n%+v \nto return: %v", tc.name, tc, tc.expect)
}
}
}
-func TestShouldNotifyWhenNoJournalingIsFound(t *testing.T) {
- Convey("base notifier", t, func() {
- bus.ClearBusHandlers()
-
- notifier := NewNotifierBase(&m.AlertNotification{
- Id: 1,
- Name: "name",
- Type: "email",
- Settings: simplejson.New(),
- })
- evalContext := alerting.NewEvalContext(context.TODO(), &alerting.Rule{})
-
- Convey("should notify if no journaling is found", func() {
- bus.AddHandlerCtx("", func(ctx context.Context, q *m.GetLatestNotificationQuery) error {
- return m.ErrJournalingNotFound
- })
-
- if !notifier.ShouldNotify(context.Background(), evalContext) {
- t.Errorf("should send notifications when ErrJournalingNotFound is returned")
- }
- })
-
- Convey("should not notify query returns error", func() {
- bus.AddHandlerCtx("", func(ctx context.Context, q *m.GetLatestNotificationQuery) error {
- return errors.New("some kind of error unknown error")
- })
-
- if notifier.ShouldNotify(context.Background(), evalContext) {
- t.Errorf("should not send notifications when query returns error")
- }
- })
- })
-}
-
func TestBaseNotifier(t *testing.T) {
Convey("default constructor for notifiers", t, func() {
bJson := simplejson.New()
diff --git a/pkg/services/alerting/notifiers/hipchat.go b/pkg/services/alerting/notifiers/hipchat.go
index 1c284ec3d2b..388cec79597 100644
--- a/pkg/services/alerting/notifiers/hipchat.go
+++ b/pkg/services/alerting/notifiers/hipchat.go
@@ -125,7 +125,7 @@ func (this *HipChatNotifier) Notify(evalContext *alerting.EvalContext) error {
case models.AlertStateOK:
color = "green"
case models.AlertStateNoData:
- color = "grey"
+ color = "gray"
case models.AlertStateAlerting:
color = "red"
}
diff --git a/pkg/services/alerting/notifiers/kafka.go b/pkg/services/alerting/notifiers/kafka.go
index d8d19fc5dae..a8a424c87a7 100644
--- a/pkg/services/alerting/notifiers/kafka.go
+++ b/pkg/services/alerting/notifiers/kafka.go
@@ -61,7 +61,7 @@ func (this *KafkaNotifier) Notify(evalContext *alerting.EvalContext) error {
state := evalContext.Rule.State
- customData := "Triggered metrics:\n\n"
+ customData := triggMetrString
for _, evt := range evalContext.EvalMatches {
customData = customData + fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value)
}
diff --git a/pkg/services/alerting/notifiers/opsgenie.go b/pkg/services/alerting/notifiers/opsgenie.go
index 84148a0d99c..629968b5102 100644
--- a/pkg/services/alerting/notifiers/opsgenie.go
+++ b/pkg/services/alerting/notifiers/opsgenie.go
@@ -95,7 +95,7 @@ func (this *OpsGenieNotifier) createAlert(evalContext *alerting.EvalContext) err
return err
}
- customData := "Triggered metrics:\n\n"
+ customData := triggMetrString
for _, evt := range evalContext.EvalMatches {
customData = customData + fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value)
}
diff --git a/pkg/services/alerting/notifiers/pagerduty.go b/pkg/services/alerting/notifiers/pagerduty.go
index bf85466388f..9f6ce3c2dc8 100644
--- a/pkg/services/alerting/notifiers/pagerduty.go
+++ b/pkg/services/alerting/notifiers/pagerduty.go
@@ -76,7 +76,7 @@ func (this *PagerdutyNotifier) Notify(evalContext *alerting.EvalContext) error {
if evalContext.Rule.State == m.AlertStateOK {
eventType = "resolve"
}
- customData := "Triggered metrics:\n\n"
+ customData := triggMetrString
for _, evt := range evalContext.EvalMatches {
customData = customData + fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value)
}
diff --git a/pkg/services/alerting/notifiers/teams.go b/pkg/services/alerting/notifiers/teams.go
index 7beb71e5c65..2dad11285b4 100644
--- a/pkg/services/alerting/notifiers/teams.go
+++ b/pkg/services/alerting/notifiers/teams.go
@@ -74,7 +74,7 @@ func (this *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error {
}
message := ""
- if evalContext.Rule.State != m.AlertStateOK { //dont add message when going back to alert state ok.
+ if evalContext.Rule.State != m.AlertStateOK { //don't add message when going back to alert state ok.
message = evalContext.Rule.Message
}
diff --git a/pkg/services/alerting/notifiers/telegram.go b/pkg/services/alerting/notifiers/telegram.go
index 5492de45d39..6c47c92972c 100644
--- a/pkg/services/alerting/notifiers/telegram.go
+++ b/pkg/services/alerting/notifiers/telegram.go
@@ -127,7 +127,13 @@ func (this *TelegramNotifier) buildMessageInlineImage(evalContext *alerting.Eval
var err error
imageFile, err = os.Open(evalContext.ImageOnDiskPath)
- defer imageFile.Close()
+ defer func() {
+ err := imageFile.Close()
+ if err != nil {
+ log.Error2("Could not close Telegram inline image.", "err", err)
+ }
+ }()
+
if err != nil {
return nil, err
}
diff --git a/pkg/services/alerting/notifiers/telegram_test.go b/pkg/services/alerting/notifiers/telegram_test.go
index 98c8d884ad0..911323ae9d1 100644
--- a/pkg/services/alerting/notifiers/telegram_test.go
+++ b/pkg/services/alerting/notifiers/telegram_test.go
@@ -1,6 +1,7 @@
package notifiers
import (
+ "context"
"testing"
"github.com/grafana/grafana/pkg/components/simplejson"
@@ -52,11 +53,12 @@ func TestTelegramNotifier(t *testing.T) {
})
Convey("generateCaption should generate a message with all pertinent details", func() {
- evalContext := alerting.NewEvalContext(nil, &alerting.Rule{
- Name: "This is an alarm",
- Message: "Some kind of message.",
- State: m.AlertStateOK,
- })
+ evalContext := alerting.NewEvalContext(context.Background(),
+ &alerting.Rule{
+ Name: "This is an alarm",
+ Message: "Some kind of message.",
+ State: m.AlertStateOK,
+ })
caption := generateImageCaption(evalContext, "http://grafa.url/abcdef", "")
So(len(caption), ShouldBeLessThanOrEqualTo, 200)
@@ -68,11 +70,12 @@ func TestTelegramNotifier(t *testing.T) {
Convey("When generating a message", func() {
Convey("URL should be skipped if it's too long", func() {
- evalContext := alerting.NewEvalContext(nil, &alerting.Rule{
- Name: "This is an alarm",
- Message: "Some kind of message.",
- State: m.AlertStateOK,
- })
+ evalContext := alerting.NewEvalContext(context.Background(),
+ &alerting.Rule{
+ Name: "This is an alarm",
+ Message: "Some kind of message.",
+ State: m.AlertStateOK,
+ })
caption := generateImageCaption(evalContext,
"http://grafa.url/abcdefaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
@@ -85,11 +88,12 @@ func TestTelegramNotifier(t *testing.T) {
})
Convey("Message should be trimmed if it's too long", func() {
- evalContext := alerting.NewEvalContext(nil, &alerting.Rule{
- Name: "This is an alarm",
- Message: "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I promise I will. Yes siree that's it.",
- State: m.AlertStateOK,
- })
+ evalContext := alerting.NewEvalContext(context.Background(),
+ &alerting.Rule{
+ Name: "This is an alarm",
+ Message: "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I promise I will. Yes siree that's it.",
+ State: m.AlertStateOK,
+ })
caption := generateImageCaption(evalContext,
"http://grafa.url/foo",
@@ -101,11 +105,12 @@ func TestTelegramNotifier(t *testing.T) {
})
Convey("Metrics should be skipped if they don't fit", func() {
- evalContext := alerting.NewEvalContext(nil, &alerting.Rule{
- Name: "This is an alarm",
- Message: "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I ",
- State: m.AlertStateOK,
- })
+ evalContext := alerting.NewEvalContext(context.Background(),
+ &alerting.Rule{
+ Name: "This is an alarm",
+ Message: "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I ",
+ State: m.AlertStateOK,
+ })
caption := generateImageCaption(evalContext,
"http://grafa.url/foo",
diff --git a/pkg/services/alerting/result_handler.go b/pkg/services/alerting/result_handler.go
index 363d06d1132..420ffeb9a55 100644
--- a/pkg/services/alerting/result_handler.go
+++ b/pkg/services/alerting/result_handler.go
@@ -67,6 +67,12 @@ func (handler *DefaultResultHandler) Handle(evalContext *EvalContext) error {
}
handler.log.Error("Failed to save state", "error", err)
+ } else {
+
+ // StateChanges is used for de duping alert notifications
+ // when two servers are raising. This makes sure that the server
+ // with the last state change always sends a notification.
+ evalContext.Rule.StateChanges = cmd.Result.StateChanges
}
// save annotation
@@ -88,19 +94,6 @@ func (handler *DefaultResultHandler) Handle(evalContext *EvalContext) error {
}
}
- if evalContext.Rule.State == m.AlertStateOK && evalContext.PrevAlertState != m.AlertStateOK {
- for _, notifierId := range evalContext.Rule.Notifications {
- cmd := &m.CleanNotificationJournalCommand{
- AlertId: evalContext.Rule.Id,
- NotifierId: notifierId,
- OrgId: evalContext.Rule.OrgId,
- }
- if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil {
- handler.log.Error("Failed to clean up old notification records", "notifier", notifierId, "alert", evalContext.Rule.Id, "Error", err)
- }
- }
- }
handler.notifier.SendIfNeeded(evalContext)
-
return nil
}
diff --git a/pkg/services/alerting/rule.go b/pkg/services/alerting/rule.go
index 018d138dbe4..999611f15c4 100644
--- a/pkg/services/alerting/rule.go
+++ b/pkg/services/alerting/rule.go
@@ -23,6 +23,8 @@ type Rule struct {
State m.AlertStateType
Conditions []Condition
Notifications []int64
+
+ StateChanges int64
}
type ValidationError struct {
@@ -34,13 +36,13 @@ type ValidationError struct {
}
func (e ValidationError) Error() string {
- extraInfo := ""
+ extraInfo := e.Reason
if e.Alertid != 0 {
extraInfo = fmt.Sprintf("%s AlertId: %v", extraInfo, e.Alertid)
}
if e.PanelId != 0 {
- extraInfo = fmt.Sprintf("%s PanelId: %v ", extraInfo, e.PanelId)
+ extraInfo = fmt.Sprintf("%s PanelId: %v", extraInfo, e.PanelId)
}
if e.DashboardId != 0 {
@@ -48,10 +50,10 @@ func (e ValidationError) Error() string {
}
if e.Err != nil {
- return fmt.Sprintf("%s %s%s", e.Err.Error(), e.Reason, extraInfo)
+ return fmt.Sprintf("Alert validation error: %s%s", e.Err.Error(), extraInfo)
}
- return fmt.Sprintf("Failed to extract alert.Reason: %s %s", e.Reason, extraInfo)
+ return fmt.Sprintf("Alert validation error: %s", extraInfo)
}
var (
@@ -100,6 +102,7 @@ func NewRuleFromDBAlert(ruleDef *m.Alert) (*Rule, error) {
model.State = ruleDef.State
model.NoDataState = m.NoDataOption(ruleDef.Settings.Get("noDataState").MustString("no_data"))
model.ExecutionErrorState = m.ExecutionErrorOption(ruleDef.Settings.Get("executionErrorState").MustString("alerting"))
+ model.StateChanges = ruleDef.StateChanges
for _, v := range ruleDef.Settings.Get("notifications").MustArray() {
jsonModel := simplejson.NewFromAny(v)
@@ -125,7 +128,7 @@ func NewRuleFromDBAlert(ruleDef *m.Alert) (*Rule, error) {
}
if len(model.Conditions) == 0 {
- return nil, fmt.Errorf("Alert is missing conditions")
+ return nil, ValidationError{Reason: "Alert is missing conditions"}
}
return model, nil
diff --git a/pkg/services/alerting/test_notification.go b/pkg/services/alerting/test_notification.go
index 8421360b5ed..8aa1b80aa22 100644
--- a/pkg/services/alerting/test_notification.go
+++ b/pkg/services/alerting/test_notification.go
@@ -39,7 +39,7 @@ func handleNotificationTestCommand(cmd *NotificationTestCommand) error {
return err
}
- return notifier.sendNotifications(createTestEvalContext(cmd), []Notifier{notifiers})
+ return notifier.sendNotifications(createTestEvalContext(cmd), notifierStateSlice{{notifier: notifiers}})
}
func createTestEvalContext(cmd *NotificationTestCommand) *EvalContext {
diff --git a/pkg/services/alerting/ticker.go b/pkg/services/alerting/ticker.go
index 5ce19b1b232..8cee2653ee9 100644
--- a/pkg/services/alerting/ticker.go
+++ b/pkg/services/alerting/ticker.go
@@ -37,10 +37,6 @@ func NewTicker(last time.Time, initialOffset time.Duration, c clock.Clock) *Tick
return t
}
-func (t *Ticker) updateOffset(offset time.Duration) {
- t.newOffset <- offset
-}
-
func (t *Ticker) run() {
for {
next := t.last.Add(time.Duration(1) * time.Second)
diff --git a/pkg/services/annotations/annotations.go b/pkg/services/annotations/annotations.go
index 9b490169d3b..60a92aa897a 100644
--- a/pkg/services/annotations/annotations.go
+++ b/pkg/services/annotations/annotations.go
@@ -21,6 +21,7 @@ type ItemQuery struct {
RegionId int64 `json:"regionId"`
Tags []string `json:"tags"`
Type string `json:"type"`
+ MatchAny bool `json:"matchAny"`
Limit int64 `json:"limit"`
}
diff --git a/pkg/services/cleanup/cleanup.go b/pkg/services/cleanup/cleanup.go
index 521601a358b..c15ae8ef36c 100644
--- a/pkg/services/cleanup/cleanup.go
+++ b/pkg/services/cleanup/cleanup.go
@@ -73,7 +73,7 @@ func (srv *CleanUpService) cleanUpTmpFiles() {
}
}
- srv.log.Debug("Found old rendered image to delete", "deleted", len(toDelete), "keept", len(files))
+ srv.log.Debug("Found old rendered image to delete", "deleted", len(toDelete), "kept", len(files))
}
func (srv *CleanUpService) shouldCleanupTempFile(filemtime time.Time, now time.Time) bool {
diff --git a/pkg/services/dashboards/dashboard_service.go b/pkg/services/dashboards/dashboard_service.go
index 278421e6be7..8eb7f4a6e72 100644
--- a/pkg/services/dashboards/dashboard_service.go
+++ b/pkg/services/dashboards/dashboard_service.go
@@ -5,6 +5,7 @@ import (
"time"
"github.com/grafana/grafana/pkg/bus"
+ "github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/guardian"
"github.com/grafana/grafana/pkg/util"
@@ -25,7 +26,9 @@ type DashboardProvisioningService interface {
// NewService factory for creating a new dashboard service
var NewService = func() DashboardService {
- return &dashboardServiceImpl{}
+ return &dashboardServiceImpl{
+ log: log.New("dashboard-service"),
+ }
}
// NewProvisioningService factory for creating a new dashboard provisioning service
@@ -45,6 +48,7 @@ type SaveDashboardDTO struct {
type dashboardServiceImpl struct {
orgId int64
user *models.SignedInUser
+ log log.Logger
}
func (dr *dashboardServiceImpl) GetProvisionedDashboardData(name string) ([]*models.DashboardProvisioning, error) {
@@ -89,7 +93,7 @@ func (dr *dashboardServiceImpl) buildSaveDashboardCommand(dto *SaveDashboardDTO,
}
if err := bus.Dispatch(&validateAlertsCmd); err != nil {
- return nil, models.ErrDashboardContainsInvalidAlertData
+ return nil, err
}
}
diff --git a/pkg/services/dashboards/dashboard_service_test.go b/pkg/services/dashboards/dashboard_service_test.go
index f9d487f625c..b8300a5af8d 100644
--- a/pkg/services/dashboards/dashboard_service_test.go
+++ b/pkg/services/dashboards/dashboard_service_test.go
@@ -117,12 +117,12 @@ func TestDashboardService(t *testing.T) {
})
bus.AddHandler("test", func(cmd *models.ValidateDashboardAlertsCommand) error {
- return errors.New("error")
+ return errors.New("Alert validation error")
})
dto.Dashboard = models.NewDashboard("Dash")
_, err := service.SaveDashboard(dto)
- So(err, ShouldEqual, models.ErrDashboardContainsInvalidAlertData)
+ So(err.Error(), ShouldEqual, "Alert validation error")
})
})
diff --git a/pkg/services/hooks/hooks.go b/pkg/services/hooks/hooks.go
new file mode 100644
index 00000000000..c51650cf6c9
--- /dev/null
+++ b/pkg/services/hooks/hooks.go
@@ -0,0 +1,30 @@
+package hooks
+
+import (
+ "github.com/grafana/grafana/pkg/api/dtos"
+ "github.com/grafana/grafana/pkg/registry"
+)
+
+type IndexDataHook func(indexData *dtos.IndexViewData)
+
+type HooksService struct {
+ indexDataHooks []IndexDataHook
+}
+
+func init() {
+ registry.RegisterService(&HooksService{})
+}
+
+func (srv *HooksService) Init() error {
+ return nil
+}
+
+func (srv *HooksService) AddIndexDataHook(hook IndexDataHook) {
+ srv.indexDataHooks = append(srv.indexDataHooks, hook)
+}
+
+func (srv *HooksService) RunIndexDataHooks(indexData *dtos.IndexViewData) {
+ for _, hook := range srv.indexDataHooks {
+ hook(indexData)
+ }
+}
diff --git a/pkg/services/notifications/notifications_test.go b/pkg/services/notifications/notifications_test.go
index 504c10c22ec..d54b70e704f 100644
--- a/pkg/services/notifications/notifications_test.go
+++ b/pkg/services/notifications/notifications_test.go
@@ -9,12 +9,6 @@ import (
. "github.com/smartystreets/goconvey/convey"
)
-type testTriggeredAlert struct {
- ActualValue float64
- Name string
- State string
-}
-
func TestNotifications(t *testing.T) {
Convey("Given the notifications service", t, func() {
diff --git a/pkg/services/provisioning/dashboards/config_reader.go b/pkg/services/provisioning/dashboards/config_reader.go
index 7508550838f..bfef06b558e 100644
--- a/pkg/services/provisioning/dashboards/config_reader.go
+++ b/pkg/services/provisioning/dashboards/config_reader.go
@@ -83,7 +83,7 @@ func (cr *configReader) readConfig() ([]*DashboardsAsConfig, error) {
}
if dashboards[i].UpdateIntervalSeconds == 0 {
- dashboards[i].UpdateIntervalSeconds = 3
+ dashboards[i].UpdateIntervalSeconds = 10
}
}
diff --git a/pkg/services/provisioning/dashboards/config_reader_test.go b/pkg/services/provisioning/dashboards/config_reader_test.go
index df0d2ae038e..d386e42349d 100644
--- a/pkg/services/provisioning/dashboards/config_reader_test.go
+++ b/pkg/services/provisioning/dashboards/config_reader_test.go
@@ -70,7 +70,7 @@ func validateDashboardAsConfig(t *testing.T, cfg []*DashboardsAsConfig) {
So(len(ds.Options), ShouldEqual, 1)
So(ds.Options["path"], ShouldEqual, "/var/lib/grafana/dashboards")
So(ds.DisableDeletion, ShouldBeTrue)
- So(ds.UpdateIntervalSeconds, ShouldEqual, 10)
+ So(ds.UpdateIntervalSeconds, ShouldEqual, 15)
ds2 := cfg[1]
So(ds2.Name, ShouldEqual, "default")
@@ -81,5 +81,5 @@ func validateDashboardAsConfig(t *testing.T, cfg []*DashboardsAsConfig) {
So(len(ds2.Options), ShouldEqual, 1)
So(ds2.Options["path"], ShouldEqual, "/var/lib/grafana/dashboards")
So(ds2.DisableDeletion, ShouldBeFalse)
- So(ds2.UpdateIntervalSeconds, ShouldEqual, 3)
+ So(ds2.UpdateIntervalSeconds, ShouldEqual, 10)
}
diff --git a/pkg/services/provisioning/dashboards/file_reader.go b/pkg/services/provisioning/dashboards/file_reader.go
index ef27ba97235..ea093860f3e 100644
--- a/pkg/services/provisioning/dashboards/file_reader.go
+++ b/pkg/services/provisioning/dashboards/file_reader.go
@@ -43,26 +43,6 @@ func NewDashboardFileReader(cfg *DashboardsAsConfig, log log.Logger) (*fileReade
log.Warn("[Deprecated] The folder property is deprecated. Please use path instead.")
}
- if _, err := os.Stat(path); os.IsNotExist(err) {
- log.Error("Cannot read directory", "error", err)
- }
-
- copy := path
- path, err := filepath.Abs(path)
- if err != nil {
- log.Error("Could not create absolute path ", "path", path)
- }
-
- path, err = filepath.EvalSymlinks(path)
- if err != nil {
- log.Error("Failed to read content of symlinked path: %s", path)
- }
-
- if path == "" {
- path = copy
- log.Info("falling back to original path due to EvalSymlink/Abs failure")
- }
-
return &fileReader{
Cfg: cfg,
Path: path,
@@ -99,7 +79,8 @@ func (fr *fileReader) ReadAndListen(ctx context.Context) error {
}
func (fr *fileReader) startWalkingDisk() error {
- if _, err := os.Stat(fr.Path); err != nil {
+ resolvedPath := fr.resolvePath(fr.Path)
+ if _, err := os.Stat(resolvedPath); err != nil {
if os.IsNotExist(err) {
return err
}
@@ -116,7 +97,7 @@ func (fr *fileReader) startWalkingDisk() error {
}
filesFoundOnDisk := map[string]os.FileInfo{}
- err = filepath.Walk(fr.Path, createWalkFn(filesFoundOnDisk))
+ err = filepath.Walk(resolvedPath, createWalkFn(filesFoundOnDisk))
if err != nil {
return err
}
@@ -156,7 +137,7 @@ func (fr *fileReader) deleteDashboardIfFileIsMissing(provisionedDashboardRefs ma
cmd := &models.DeleteDashboardCommand{OrgId: fr.Cfg.OrgId, Id: dashboardId}
err := bus.Dispatch(cmd)
if err != nil {
- fr.log.Error("failed to delete dashboard", "id", cmd.Id)
+ fr.log.Error("failed to delete dashboard", "id", cmd.Id, "error", err)
}
}
}
@@ -344,6 +325,29 @@ func (fr *fileReader) readDashboardFromFile(path string, lastModified time.Time,
}, nil
}
+func (fr *fileReader) resolvePath(path string) string {
+ if _, err := os.Stat(path); os.IsNotExist(err) {
+ fr.log.Error("Cannot read directory", "error", err)
+ }
+
+ copy := path
+ path, err := filepath.Abs(path)
+ if err != nil {
+ fr.log.Error("Could not create absolute path ", "path", path)
+ }
+
+ path, err = filepath.EvalSymlinks(path)
+ if err != nil {
+ fr.log.Error("Failed to read content of symlinked path: %s", path)
+ }
+
+ if path == "" {
+ path = copy
+ fr.log.Info("falling back to original path due to EvalSymlink/Abs failure")
+ }
+ return path
+}
+
type provisioningMetadata struct {
uid string
title string
diff --git a/pkg/services/provisioning/dashboards/file_reader_linux_test.go b/pkg/services/provisioning/dashboards/file_reader_linux_test.go
index 9d4cdae8609..77f488ebcfb 100644
--- a/pkg/services/provisioning/dashboards/file_reader_linux_test.go
+++ b/pkg/services/provisioning/dashboards/file_reader_linux_test.go
@@ -30,10 +30,11 @@ func TestProvsionedSymlinkedFolder(t *testing.T) {
want, err := filepath.Abs(containingId)
if err != nil {
- t.Errorf("expected err to be nill")
+ t.Errorf("expected err to be nil")
}
- if reader.Path != want {
- t.Errorf("got %s want %s", reader.Path, want)
+ resolvedPath := reader.resolvePath(reader.Path)
+ if resolvedPath != want {
+ t.Errorf("got %s want %s", resolvedPath, want)
}
}
diff --git a/pkg/services/provisioning/dashboards/file_reader_test.go b/pkg/services/provisioning/dashboards/file_reader_test.go
index bdc1e95aafe..fe849816553 100644
--- a/pkg/services/provisioning/dashboards/file_reader_test.go
+++ b/pkg/services/provisioning/dashboards/file_reader_test.go
@@ -67,7 +67,8 @@ func TestCreatingNewDashboardFileReader(t *testing.T) {
reader, err := NewDashboardFileReader(cfg, log.New("test-logger"))
So(err, ShouldBeNil)
- So(filepath.IsAbs(reader.Path), ShouldBeTrue)
+ resolvedPath := reader.resolvePath(reader.Path)
+ So(filepath.IsAbs(resolvedPath), ShouldBeTrue)
})
})
}
diff --git a/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml b/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml
index e26c329f87c..c43c4a14c53 100644
--- a/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml
+++ b/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml
@@ -6,7 +6,7 @@ providers:
folder: 'developers'
editable: true
disableDeletion: true
- updateIntervalSeconds: 10
+ updateIntervalSeconds: 15
type: file
options:
path: /var/lib/grafana/dashboards
diff --git a/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml b/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml
index 69a317fb396..8b7b8991759 100644
--- a/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml
+++ b/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml
@@ -3,7 +3,7 @@
folder: 'developers'
editable: true
disableDeletion: true
- updateIntervalSeconds: 10
+ updateIntervalSeconds: 15
type: file
options:
path: /var/lib/grafana/dashboards
diff --git a/pkg/services/provisioning/datasources/testdata/broken-yaml/commented.yaml b/pkg/services/provisioning/datasources/testdata/broken-yaml/commented.yaml
index 1bb9cb53b45..b532c9012ec 100644
--- a/pkg/services/provisioning/datasources/testdata/broken-yaml/commented.yaml
+++ b/pkg/services/provisioning/datasources/testdata/broken-yaml/commented.yaml
@@ -4,7 +4,7 @@
# org_id: 1
# # list of datasources to insert/update depending
-# # whats available in the datbase
+# # what's available in the database
#datasources:
# # name of the datasource. Required
# - name: Graphite
diff --git a/pkg/services/rendering/http_mode.go b/pkg/services/rendering/http_mode.go
index d47dfaeaae1..40259c44746 100644
--- a/pkg/services/rendering/http_mode.go
+++ b/pkg/services/rendering/http_mode.go
@@ -70,7 +70,7 @@ func (rs *RenderingService) renderViaHttp(ctx context.Context, opts Opts) (*Rend
return nil, ErrTimeout
}
- // if we didnt get a 200 response, something went wrong.
+ // if we didn't get a 200 response, something went wrong.
if resp.StatusCode != http.StatusOK {
rs.log.Error("Remote rendering request failed", "error", resp.Status)
return nil, fmt.Errorf("Remote rendering request failed. %d: %s", resp.StatusCode, resp.Status)
@@ -83,7 +83,7 @@ func (rs *RenderingService) renderViaHttp(ctx context.Context, opts Opts) (*Rend
defer out.Close()
_, err = io.Copy(out, resp.Body)
if err != nil {
- // check that we didnt timeout while receiving the response.
+ // check that we didn't timeout while receiving the response.
if reqContext.Err() == context.DeadlineExceeded {
rs.log.Info("Rendering timed out")
return nil, ErrTimeout
diff --git a/pkg/services/rendering/interface.go b/pkg/services/rendering/interface.go
index 85c139cfc04..39cb1ada0f5 100644
--- a/pkg/services/rendering/interface.go
+++ b/pkg/services/rendering/interface.go
@@ -13,15 +13,16 @@ var ErrNoRenderer = errors.New("No renderer plugin found nor is an external rend
var ErrPhantomJSNotInstalled = errors.New("PhantomJS executable not found")
type Opts struct {
- Width int
- Height int
- Timeout time.Duration
- OrgId int64
- UserId int64
- OrgRole models.RoleType
- Path string
- Encoding string
- Timezone string
+ Width int
+ Height int
+ Timeout time.Duration
+ OrgId int64
+ UserId int64
+ OrgRole models.RoleType
+ Path string
+ Encoding string
+ Timezone string
+ ConcurrentLimit int
}
type RenderResult struct {
diff --git a/pkg/services/rendering/rendering.go b/pkg/services/rendering/rendering.go
index ff4a67cc9b6..0b4f23e93b4 100644
--- a/pkg/services/rendering/rendering.go
+++ b/pkg/services/rendering/rendering.go
@@ -24,12 +24,13 @@ func init() {
}
type RenderingService struct {
- log log.Logger
- pluginClient *plugin.Client
- grpcPlugin pluginModel.RendererPlugin
- pluginInfo *plugins.RendererPlugin
- renderAction renderFunc
- domain string
+ log log.Logger
+ pluginClient *plugin.Client
+ grpcPlugin pluginModel.RendererPlugin
+ pluginInfo *plugins.RendererPlugin
+ renderAction renderFunc
+ domain string
+ inProgressCount int
Cfg *setting.Cfg `inject:""`
}
@@ -45,7 +46,7 @@ func (rs *RenderingService) Init() error {
// set value used for domain attribute of renderKey cookie
if rs.Cfg.RendererUrl != "" {
- // RendererCallbackUrl has already been passed, it wont generate an error.
+ // RendererCallbackUrl has already been passed, it won't generate an error.
u, _ := url.Parse(rs.Cfg.RendererCallbackUrl)
rs.domain = u.Hostname()
} else if setting.HttpAddr != setting.DEFAULT_HTTP_ADDR {
@@ -90,6 +91,18 @@ func (rs *RenderingService) Run(ctx context.Context) error {
}
func (rs *RenderingService) Render(ctx context.Context, opts Opts) (*RenderResult, error) {
+ if rs.inProgressCount > opts.ConcurrentLimit {
+ return &RenderResult{
+ FilePath: filepath.Join(setting.HomePath, "public/img/rendering_limit.png"),
+ }, nil
+ }
+
+ defer func() {
+ rs.inProgressCount -= 1
+ }()
+
+ rs.inProgressCount += 1
+
if rs.renderAction != nil {
return rs.renderAction(ctx, opts)
} else {
diff --git a/pkg/services/sqlstore/alert.go b/pkg/services/sqlstore/alert.go
index af911dc22e6..2f17402b80c 100644
--- a/pkg/services/sqlstore/alert.go
+++ b/pkg/services/sqlstore/alert.go
@@ -40,7 +40,7 @@ func GetAlertById(query *m.GetAlertByIdQuery) error {
func GetAllAlertQueryHandler(query *m.GetAllAlertsQuery) error {
var alerts []*m.Alert
- err := x.Sql("select * from alert").Find(&alerts)
+ err := x.SQL("select * from alert").Find(&alerts)
if err != nil {
return err
}
@@ -60,6 +60,10 @@ func deleteAlertByIdInternal(alertId int64, reason string, sess *DBSession) erro
return err
}
+ if _, err := sess.Exec("DELETE FROM alert_notification_state WHERE alert_id = ?", alertId); err != nil {
+ return err
+ }
+
return nil
}
@@ -190,7 +194,7 @@ func updateAlerts(existingAlerts []*m.Alert, cmd *m.SaveAlertsCommand, sess *DBS
alert.Updated = timeNow()
alert.State = alertToUpdate.State
sess.MustCols("message")
- _, err := sess.Id(alert.Id).Update(alert)
+ _, err := sess.ID(alert.Id).Update(alert)
if err != nil {
return err
}
@@ -249,7 +253,7 @@ func SetAlertState(cmd *m.SetAlertStateCommand) error {
return inTransaction(func(sess *DBSession) error {
alert := m.Alert{}
- if has, err := sess.Id(cmd.AlertId).Get(&alert); err != nil {
+ if has, err := sess.ID(cmd.AlertId).Get(&alert); err != nil {
return err
} else if !has {
return fmt.Errorf("Could not find alert")
@@ -275,6 +279,8 @@ func SetAlertState(cmd *m.SetAlertStateCommand) error {
}
sess.ID(alert.Id).Update(&alert)
+
+ cmd.Result = alert
return nil
})
}
diff --git a/pkg/services/sqlstore/alert_notification.go b/pkg/services/sqlstore/alert_notification.go
index 8fb1e2212a9..daaef945b96 100644
--- a/pkg/services/sqlstore/alert_notification.go
+++ b/pkg/services/sqlstore/alert_notification.go
@@ -3,6 +3,7 @@ package sqlstore
import (
"bytes"
"context"
+ "errors"
"fmt"
"strings"
"time"
@@ -18,16 +19,23 @@ func init() {
bus.AddHandler("sql", DeleteAlertNotification)
bus.AddHandler("sql", GetAlertNotificationsToSend)
bus.AddHandler("sql", GetAllAlertNotifications)
- bus.AddHandlerCtx("sql", RecordNotificationJournal)
- bus.AddHandlerCtx("sql", GetLatestNotification)
- bus.AddHandlerCtx("sql", CleanNotificationJournal)
+ bus.AddHandlerCtx("sql", GetOrCreateAlertNotificationState)
+ bus.AddHandlerCtx("sql", SetAlertNotificationStateToCompleteCommand)
+ bus.AddHandlerCtx("sql", SetAlertNotificationStateToPendingCommand)
}
func DeleteAlertNotification(cmd *m.DeleteAlertNotificationCommand) error {
return inTransaction(func(sess *DBSession) error {
sql := "DELETE FROM alert_notification WHERE alert_notification.org_id = ? AND alert_notification.id = ?"
- _, err := sess.Exec(sql, cmd.OrgId, cmd.Id)
- return err
+ if _, err := sess.Exec(sql, cmd.OrgId, cmd.Id); err != nil {
+ return err
+ }
+
+ if _, err := sess.Exec("DELETE FROM alert_notification_state WHERE alert_notification_state.org_id = ? AND alert_notification_state.notifier_id = ?", cmd.OrgId, cmd.Id); err != nil {
+ return err
+ }
+
+ return nil
})
}
@@ -119,7 +127,7 @@ func getAlertNotificationInternal(query *m.GetAlertNotificationsQuery, sess *DBS
}
results := make([]*m.AlertNotification, 0)
- if err := sess.Sql(sql.String(), params...).Find(&results); err != nil {
+ if err := sess.SQL(sql.String(), params...).Find(&results); err != nil {
return err
}
@@ -229,49 +237,123 @@ func UpdateAlertNotification(cmd *m.UpdateAlertNotificationCommand) error {
})
}
-func RecordNotificationJournal(ctx context.Context, cmd *m.RecordNotificationJournalCommand) error {
+func SetAlertNotificationStateToCompleteCommand(ctx context.Context, cmd *m.SetAlertNotificationStateToCompleteCommand) error {
return inTransactionCtx(ctx, func(sess *DBSession) error {
- journalEntry := &m.AlertNotificationJournal{
- OrgId: cmd.OrgId,
- AlertId: cmd.AlertId,
- NotifierId: cmd.NotifierId,
- SentAt: cmd.SentAt,
- Success: cmd.Success,
- }
+ version := cmd.Version
+ var current m.AlertNotificationState
+ sess.ID(cmd.Id).Get(¤t)
- if _, err := sess.Insert(journalEntry); err != nil {
- return err
- }
+ newVersion := cmd.Version + 1
- return nil
- })
-}
+ sql := `UPDATE alert_notification_state SET
+ state = ?,
+ version = ?,
+ updated_at = ?
+ WHERE
+ id = ?`
-func GetLatestNotification(ctx context.Context, cmd *m.GetLatestNotificationQuery) error {
- return inTransactionCtx(ctx, func(sess *DBSession) error {
- nj := &m.AlertNotificationJournal{}
-
- _, err := sess.Desc("alert_notification_journal.sent_at").
- Limit(1).
- Where("alert_notification_journal.org_id = ? AND alert_notification_journal.alert_id = ? AND alert_notification_journal.notifier_id = ?", cmd.OrgId, cmd.AlertId, cmd.NotifierId).Get(nj)
+ _, err := sess.Exec(sql, m.AlertNotificationStateCompleted, newVersion, timeNow().Unix(), cmd.Id)
if err != nil {
return err
}
- if nj.AlertId == 0 && nj.Id == 0 && nj.NotifierId == 0 && nj.OrgId == 0 {
- return m.ErrJournalingNotFound
+ if current.Version != version {
+ sqlog.Error("notification state out of sync. the notification is marked as complete but has been modified between set as pending and completion.", "notifierId", current.NotifierId)
}
- cmd.Result = nj
return nil
})
}
-func CleanNotificationJournal(ctx context.Context, cmd *m.CleanNotificationJournalCommand) error {
- return inTransactionCtx(ctx, func(sess *DBSession) error {
- sql := "DELETE FROM alert_notification_journal WHERE alert_notification_journal.org_id = ? AND alert_notification_journal.alert_id = ? AND alert_notification_journal.notifier_id = ?"
- _, err := sess.Exec(sql, cmd.OrgId, cmd.AlertId, cmd.NotifierId)
- return err
+func SetAlertNotificationStateToPendingCommand(ctx context.Context, cmd *m.SetAlertNotificationStateToPendingCommand) error {
+ return withDbSession(ctx, func(sess *DBSession) error {
+ newVersion := cmd.Version + 1
+ sql := `UPDATE alert_notification_state SET
+ state = ?,
+ version = ?,
+ updated_at = ?,
+ alert_rule_state_updated_version = ?
+ WHERE
+ id = ? AND
+ (version = ? OR alert_rule_state_updated_version < ?)`
+
+ res, err := sess.Exec(sql,
+ m.AlertNotificationStatePending,
+ newVersion,
+ timeNow().Unix(),
+ cmd.AlertRuleStateUpdatedVersion,
+ cmd.Id,
+ cmd.Version,
+ cmd.AlertRuleStateUpdatedVersion)
+
+ if err != nil {
+ return err
+ }
+
+ affected, _ := res.RowsAffected()
+ if affected == 0 {
+ return m.ErrAlertNotificationStateVersionConflict
+ }
+
+ cmd.ResultVersion = newVersion
+
+ return nil
})
}
+
+func GetOrCreateAlertNotificationState(ctx context.Context, cmd *m.GetOrCreateNotificationStateQuery) error {
+ return inTransactionCtx(ctx, func(sess *DBSession) error {
+ nj := &m.AlertNotificationState{}
+
+ exist, err := getAlertNotificationState(sess, cmd, nj)
+
+ // if exists, return it, otherwise create it with default values
+ if err != nil {
+ return err
+ }
+
+ if exist {
+ cmd.Result = nj
+ return nil
+ }
+
+ notificationState := &m.AlertNotificationState{
+ OrgId: cmd.OrgId,
+ AlertId: cmd.AlertId,
+ NotifierId: cmd.NotifierId,
+ State: m.AlertNotificationStateUnknown,
+ UpdatedAt: timeNow().Unix(),
+ }
+
+ if _, err := sess.Insert(notificationState); err != nil {
+ if dialect.IsUniqueConstraintViolation(err) {
+ exist, err = getAlertNotificationState(sess, cmd, nj)
+
+ if err != nil {
+ return err
+ }
+
+ if !exist {
+ return errors.New("Should not happen")
+ }
+
+ cmd.Result = nj
+ return nil
+ }
+
+ return err
+ }
+
+ cmd.Result = notificationState
+ return nil
+ })
+}
+
+func getAlertNotificationState(sess *DBSession, cmd *m.GetOrCreateNotificationStateQuery, nj *m.AlertNotificationState) (bool, error) {
+ return sess.
+ Where("alert_notification_state.org_id = ?", cmd.OrgId).
+ Where("alert_notification_state.alert_id = ?", cmd.AlertId).
+ Where("alert_notification_state.notifier_id = ?", cmd.NotifierId).
+ Get(nj)
+}
diff --git a/pkg/services/sqlstore/alert_notification_test.go b/pkg/services/sqlstore/alert_notification_test.go
index 83fb42db9bb..ed682bae5c6 100644
--- a/pkg/services/sqlstore/alert_notification_test.go
+++ b/pkg/services/sqlstore/alert_notification_test.go
@@ -6,7 +6,7 @@ import (
"time"
"github.com/grafana/grafana/pkg/components/simplejson"
- m "github.com/grafana/grafana/pkg/models"
+ "github.com/grafana/grafana/pkg/models"
. "github.com/smartystreets/goconvey/convey"
)
@@ -14,50 +14,133 @@ func TestAlertNotificationSQLAccess(t *testing.T) {
Convey("Testing Alert notification sql access", t, func() {
InitTestDB(t)
- Convey("Alert notification journal", func() {
- var alertId int64 = 5
- var orgId int64 = 5
- var notifierId int64 = 5
+ Convey("Alert notification state", func() {
+ var alertID int64 = 7
+ var orgID int64 = 5
+ var notifierID int64 = 10
+ oldTimeNow := timeNow
+ now := time.Date(2018, 9, 30, 0, 0, 0, 0, time.UTC)
+ timeNow = func() time.Time { return now }
- Convey("Getting last journal should raise error if no one exists", func() {
- query := &m.GetLatestNotificationQuery{AlertId: alertId, OrgId: orgId, NotifierId: notifierId}
- err := GetLatestNotification(context.Background(), query)
- So(err, ShouldEqual, m.ErrJournalingNotFound)
+ Convey("Get no existing state should create a new state", func() {
+ query := &models.GetOrCreateNotificationStateQuery{AlertId: alertID, OrgId: orgID, NotifierId: notifierID}
+ err := GetOrCreateAlertNotificationState(context.Background(), query)
+ So(err, ShouldBeNil)
+ So(query.Result, ShouldNotBeNil)
+ So(query.Result.State, ShouldEqual, "unknown")
+ So(query.Result.Version, ShouldEqual, 0)
+ So(query.Result.UpdatedAt, ShouldEqual, now.Unix())
- Convey("shoulbe be able to record two journaling events", func() {
- createCmd := &m.RecordNotificationJournalCommand{AlertId: alertId, NotifierId: notifierId, OrgId: orgId, Success: true, SentAt: 1}
-
- err := RecordNotificationJournal(context.Background(), createCmd)
+ Convey("Get existing state should not create a new state", func() {
+ query2 := &models.GetOrCreateNotificationStateQuery{AlertId: alertID, OrgId: orgID, NotifierId: notifierID}
+ err := GetOrCreateAlertNotificationState(context.Background(), query2)
So(err, ShouldBeNil)
+ So(query2.Result, ShouldNotBeNil)
+ So(query2.Result.Id, ShouldEqual, query.Result.Id)
+ So(query2.Result.UpdatedAt, ShouldEqual, now.Unix())
+ })
- createCmd.SentAt += 1000 //increase epoch
+ Convey("Update existing state to pending with correct version should update database", func() {
+ s := *query.Result
- err = RecordNotificationJournal(context.Background(), createCmd)
+ cmd := models.SetAlertNotificationStateToPendingCommand{
+ Id: s.Id,
+ Version: s.Version,
+ AlertRuleStateUpdatedVersion: s.AlertRuleStateUpdatedVersion,
+ }
+
+ err := SetAlertNotificationStateToPendingCommand(context.Background(), &cmd)
So(err, ShouldBeNil)
+ So(cmd.ResultVersion, ShouldEqual, 1)
- Convey("get last journaling event", func() {
- err := GetLatestNotification(context.Background(), query)
+ query2 := &models.GetOrCreateNotificationStateQuery{AlertId: alertID, OrgId: orgID, NotifierId: notifierID}
+ err = GetOrCreateAlertNotificationState(context.Background(), query2)
+ So(err, ShouldBeNil)
+ So(query2.Result.Version, ShouldEqual, 1)
+ So(query2.Result.State, ShouldEqual, models.AlertNotificationStatePending)
+ So(query2.Result.UpdatedAt, ShouldEqual, now.Unix())
+
+ Convey("Update existing state to completed should update database", func() {
+ s := *query.Result
+ setStateCmd := models.SetAlertNotificationStateToCompleteCommand{
+ Id: s.Id,
+ Version: cmd.ResultVersion,
+ }
+ err := SetAlertNotificationStateToCompleteCommand(context.Background(), &setStateCmd)
So(err, ShouldBeNil)
- So(query.Result.SentAt, ShouldEqual, 1001)
- Convey("be able to clear all journaling for an notifier", func() {
- cmd := &m.CleanNotificationJournalCommand{AlertId: alertId, NotifierId: notifierId, OrgId: orgId}
- err := CleanNotificationJournal(context.Background(), cmd)
- So(err, ShouldBeNil)
+ query3 := &models.GetOrCreateNotificationStateQuery{AlertId: alertID, OrgId: orgID, NotifierId: notifierID}
+ err = GetOrCreateAlertNotificationState(context.Background(), query3)
+ So(err, ShouldBeNil)
+ So(query3.Result.Version, ShouldEqual, 2)
+ So(query3.Result.State, ShouldEqual, models.AlertNotificationStateCompleted)
+ So(query3.Result.UpdatedAt, ShouldEqual, now.Unix())
+ })
- Convey("querying for last junaling should raise error", func() {
- query := &m.GetLatestNotificationQuery{AlertId: alertId, OrgId: orgId, NotifierId: notifierId}
- err := GetLatestNotification(context.Background(), query)
- So(err, ShouldEqual, m.ErrJournalingNotFound)
- })
- })
+ Convey("Update existing state to completed should update database. regardless of version", func() {
+ s := *query.Result
+ unknownVersion := int64(1000)
+ cmd := models.SetAlertNotificationStateToCompleteCommand{
+ Id: s.Id,
+ Version: unknownVersion,
+ }
+ err := SetAlertNotificationStateToCompleteCommand(context.Background(), &cmd)
+ So(err, ShouldBeNil)
+
+ query3 := &models.GetOrCreateNotificationStateQuery{AlertId: alertID, OrgId: orgID, NotifierId: notifierID}
+ err = GetOrCreateAlertNotificationState(context.Background(), query3)
+ So(err, ShouldBeNil)
+ So(query3.Result.Version, ShouldEqual, unknownVersion+1)
+ So(query3.Result.State, ShouldEqual, models.AlertNotificationStateCompleted)
+ So(query3.Result.UpdatedAt, ShouldEqual, now.Unix())
})
})
+
+ Convey("Update existing state to pending with incorrect version should return version mismatch error", func() {
+ s := *query.Result
+ s.Version = 1000
+ cmd := models.SetAlertNotificationStateToPendingCommand{
+ Id: s.NotifierId,
+ Version: s.Version,
+ AlertRuleStateUpdatedVersion: s.AlertRuleStateUpdatedVersion,
+ }
+ err := SetAlertNotificationStateToPendingCommand(context.Background(), &cmd)
+ So(err, ShouldEqual, models.ErrAlertNotificationStateVersionConflict)
+ })
+
+ Convey("Updating existing state to pending with incorrect version since alert rule state update version is higher", func() {
+ s := *query.Result
+ cmd := models.SetAlertNotificationStateToPendingCommand{
+ Id: s.Id,
+ Version: s.Version,
+ AlertRuleStateUpdatedVersion: 1000,
+ }
+ err := SetAlertNotificationStateToPendingCommand(context.Background(), &cmd)
+ So(err, ShouldBeNil)
+
+ So(cmd.ResultVersion, ShouldEqual, 1)
+ })
+
+ Convey("different version and same alert state change version should return error", func() {
+ s := *query.Result
+ s.Version = 1000
+ cmd := models.SetAlertNotificationStateToPendingCommand{
+ Id: s.Id,
+ Version: s.Version,
+ AlertRuleStateUpdatedVersion: s.AlertRuleStateUpdatedVersion,
+ }
+ err := SetAlertNotificationStateToPendingCommand(context.Background(), &cmd)
+ So(err, ShouldNotBeNil)
+ })
+ })
+
+ Reset(func() {
+ timeNow = oldTimeNow
})
})
Convey("Alert notifications should be empty", func() {
- cmd := &m.GetAlertNotificationsQuery{
+ cmd := &models.GetAlertNotificationsQuery{
OrgId: 2,
Name: "email",
}
@@ -68,7 +151,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) {
})
Convey("Cannot save alert notifier with send reminder = true", func() {
- cmd := &m.CreateAlertNotificationCommand{
+ cmd := &models.CreateAlertNotificationCommand{
Name: "ops",
Type: "email",
OrgId: 1,
@@ -78,7 +161,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) {
Convey("and missing frequency", func() {
err := CreateAlertNotificationCommand(cmd)
- So(err, ShouldEqual, m.ErrNotificationFrequencyNotFound)
+ So(err, ShouldEqual, models.ErrNotificationFrequencyNotFound)
})
Convey("invalid frequency", func() {
@@ -90,7 +173,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) {
})
Convey("Cannot update alert notifier with send reminder = false", func() {
- cmd := &m.CreateAlertNotificationCommand{
+ cmd := &models.CreateAlertNotificationCommand{
Name: "ops update",
Type: "email",
OrgId: 1,
@@ -101,14 +184,14 @@ func TestAlertNotificationSQLAccess(t *testing.T) {
err := CreateAlertNotificationCommand(cmd)
So(err, ShouldBeNil)
- updateCmd := &m.UpdateAlertNotificationCommand{
+ updateCmd := &models.UpdateAlertNotificationCommand{
Id: cmd.Result.Id,
SendReminder: true,
}
Convey("and missing frequency", func() {
err := UpdateAlertNotification(updateCmd)
- So(err, ShouldEqual, m.ErrNotificationFrequencyNotFound)
+ So(err, ShouldEqual, models.ErrNotificationFrequencyNotFound)
})
Convey("invalid frequency", func() {
@@ -121,7 +204,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) {
})
Convey("Can save Alert Notification", func() {
- cmd := &m.CreateAlertNotificationCommand{
+ cmd := &models.CreateAlertNotificationCommand{
Name: "ops",
Type: "email",
OrgId: 1,
@@ -143,7 +226,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) {
})
Convey("Can update alert notification", func() {
- newCmd := &m.UpdateAlertNotificationCommand{
+ newCmd := &models.UpdateAlertNotificationCommand{
Name: "NewName",
Type: "webhook",
OrgId: cmd.Result.OrgId,
@@ -159,7 +242,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) {
})
Convey("Can update alert notification to disable sending of reminders", func() {
- newCmd := &m.UpdateAlertNotificationCommand{
+ newCmd := &models.UpdateAlertNotificationCommand{
Name: "NewName",
Type: "webhook",
OrgId: cmd.Result.OrgId,
@@ -174,12 +257,12 @@ func TestAlertNotificationSQLAccess(t *testing.T) {
})
Convey("Can search using an array of ids", func() {
- cmd1 := m.CreateAlertNotificationCommand{Name: "nagios", Type: "webhook", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()}
- cmd2 := m.CreateAlertNotificationCommand{Name: "slack", Type: "webhook", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()}
- cmd3 := m.CreateAlertNotificationCommand{Name: "ops2", Type: "email", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()}
- cmd4 := m.CreateAlertNotificationCommand{IsDefault: true, Name: "default", Type: "email", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()}
+ cmd1 := models.CreateAlertNotificationCommand{Name: "nagios", Type: "webhook", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()}
+ cmd2 := models.CreateAlertNotificationCommand{Name: "slack", Type: "webhook", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()}
+ cmd3 := models.CreateAlertNotificationCommand{Name: "ops2", Type: "email", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()}
+ cmd4 := models.CreateAlertNotificationCommand{IsDefault: true, Name: "default", Type: "email", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()}
- otherOrg := m.CreateAlertNotificationCommand{Name: "default", Type: "email", OrgId: 2, SendReminder: true, Frequency: "10s", Settings: simplejson.New()}
+ otherOrg := models.CreateAlertNotificationCommand{Name: "default", Type: "email", OrgId: 2, SendReminder: true, Frequency: "10s", Settings: simplejson.New()}
So(CreateAlertNotificationCommand(&cmd1), ShouldBeNil)
So(CreateAlertNotificationCommand(&cmd2), ShouldBeNil)
@@ -188,7 +271,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) {
So(CreateAlertNotificationCommand(&otherOrg), ShouldBeNil)
Convey("search", func() {
- query := &m.GetAlertNotificationsToSendQuery{
+ query := &models.GetAlertNotificationsToSendQuery{
Ids: []int64{cmd1.Result.Id, cmd2.Result.Id, 112341231},
OrgId: 1,
}
@@ -199,7 +282,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) {
})
Convey("all", func() {
- query := &m.GetAllAlertNotificationsQuery{
+ query := &models.GetAllAlertNotificationsQuery{
OrgId: 1,
}
diff --git a/pkg/services/sqlstore/annotation.go b/pkg/services/sqlstore/annotation.go
index a65bc136554..274481baeca 100644
--- a/pkg/services/sqlstore/annotation.go
+++ b/pkg/services/sqlstore/annotation.go
@@ -110,7 +110,7 @@ func (r *SqlAnnotationRepo) Update(item *annotations.Item) error {
existing.Tags = item.Tags
- _, err = sess.Table("annotation").Id(existing.Id).Cols("epoch", "text", "region_id", "updated", "tags").Update(existing)
+ _, err = sess.Table("annotation").ID(existing.Id).Cols("epoch", "text", "region_id", "updated", "tags").Update(existing)
return err
})
}
@@ -211,7 +211,12 @@ func (r *SqlAnnotationRepo) Find(query *annotations.ItemQuery) ([]*annotations.I
)
`, strings.Join(keyValueFilters, " OR "))
- sql.WriteString(fmt.Sprintf(" AND (%s) = %d ", tagsSubQuery, len(tags)))
+ if query.MatchAny {
+ sql.WriteString(fmt.Sprintf(" AND (%s) > 0 ", tagsSubQuery))
+ } else {
+ sql.WriteString(fmt.Sprintf(" AND (%s) = %d ", tagsSubQuery, len(tags)))
+ }
+
}
}
@@ -223,7 +228,7 @@ func (r *SqlAnnotationRepo) Find(query *annotations.ItemQuery) ([]*annotations.I
items := make([]*annotations.ItemDTO, 0)
- if err := x.Sql(sql.String(), params...).Find(&items); err != nil {
+ if err := x.SQL(sql.String(), params...).Find(&items); err != nil {
return nil, err
}
diff --git a/pkg/services/sqlstore/annotation_test.go b/pkg/services/sqlstore/annotation_test.go
index c0d267f2578..d3459527e7d 100644
--- a/pkg/services/sqlstore/annotation_test.go
+++ b/pkg/services/sqlstore/annotation_test.go
@@ -78,7 +78,31 @@ func TestAnnotations(t *testing.T) {
So(err, ShouldBeNil)
So(annotation2.Id, ShouldBeGreaterThan, 0)
- Convey("Can query for annotation", func() {
+ globalAnnotation1 := &annotations.Item{
+ OrgId: 1,
+ UserId: 1,
+ Text: "deploy",
+ Type: "",
+ Epoch: 15,
+ Tags: []string{"deploy"},
+ }
+ err = repo.Save(globalAnnotation1)
+ So(err, ShouldBeNil)
+ So(globalAnnotation1.Id, ShouldBeGreaterThan, 0)
+
+ globalAnnotation2 := &annotations.Item{
+ OrgId: 1,
+ UserId: 1,
+ Text: "rollback",
+ Type: "",
+ Epoch: 17,
+ Tags: []string{"rollback"},
+ }
+ err = repo.Save(globalAnnotation2)
+ So(err, ShouldBeNil)
+ So(globalAnnotation2.Id, ShouldBeGreaterThan, 0)
+
+ Convey("Can query for annotation by dashboard id", func() {
items, err := repo.Find(&annotations.ItemQuery{
OrgId: 1,
DashboardId: 1,
@@ -165,7 +189,7 @@ func TestAnnotations(t *testing.T) {
OrgId: 1,
DashboardId: 1,
From: 1,
- To: 15,
+ To: 15, //this will exclude the second test annotation
Tags: []string{"outage", "error"},
})
@@ -173,6 +197,19 @@ func TestAnnotations(t *testing.T) {
So(items, ShouldHaveLength, 1)
})
+ Convey("Should find two annotations using partial match", func() {
+ items, err := repo.Find(&annotations.ItemQuery{
+ OrgId: 1,
+ From: 1,
+ To: 25,
+ MatchAny: true,
+ Tags: []string{"rollback", "deploy"},
+ })
+
+ So(err, ShouldBeNil)
+ So(items, ShouldHaveLength, 2)
+ })
+
Convey("Should find one when all key value tag filters does match", func() {
items, err := repo.Find(&annotations.ItemQuery{
OrgId: 1,
diff --git a/pkg/services/sqlstore/dashboard.go b/pkg/services/sqlstore/dashboard.go
index aff532bb3b5..e43279208e7 100644
--- a/pkg/services/sqlstore/dashboard.go
+++ b/pkg/services/sqlstore/dashboard.go
@@ -225,7 +225,7 @@ func findDashboards(query *search.FindPersistedDashboardsQuery) ([]DashboardSear
var res []DashboardSearchProjection
sql, params := sb.ToSql()
- err := x.Sql(sql, params...).Find(&res)
+ err := x.SQL(sql, params...).Find(&res)
if err != nil {
return nil, err
}
@@ -295,10 +295,11 @@ func GetDashboardTags(query *m.GetDashboardTagsQuery) error {
FROM dashboard
INNER JOIN dashboard_tag on dashboard_tag.dashboard_id = dashboard.id
WHERE dashboard.org_id=?
- GROUP BY term`
+ GROUP BY term
+ ORDER BY term`
query.Result = make([]*m.DashboardTagCloudItem, 0)
- sess := x.Sql(sql, query.OrgId)
+ sess := x.SQL(sql, query.OrgId)
err := sess.Find(&query.Result)
return err
}
@@ -412,7 +413,7 @@ func GetDashboardPermissionsForUser(query *m.GetDashboardPermissionsForUserQuery
params = append(params, query.UserId)
params = append(params, dialect.BooleanStr(false))
- err := x.Sql(sql, params...).Find(&query.Result)
+ err := x.SQL(sql, params...).Find(&query.Result)
for _, p := range query.Result {
p.PermissionName = p.Permission.String()
@@ -631,7 +632,7 @@ func HasEditPermissionInFolders(query *m.HasEditPermissionInFoldersQuery) error
}
resp := make([]*folderCount, 0)
- if err := x.Sql(builder.GetSqlString(), builder.params...).Find(&resp); err != nil {
+ if err := x.SQL(builder.GetSqlString(), builder.params...).Find(&resp); err != nil {
return err
}
diff --git a/pkg/services/sqlstore/dashboard_service_integration_test.go b/pkg/services/sqlstore/dashboard_service_integration_test.go
index a9658f7ab76..a4e76aca340 100644
--- a/pkg/services/sqlstore/dashboard_service_integration_test.go
+++ b/pkg/services/sqlstore/dashboard_service_integration_test.go
@@ -932,29 +932,6 @@ func TestIntegratedDashboardService(t *testing.T) {
})
}
-type scenarioContext struct {
- dashboardGuardianMock *guardian.FakeDashboardGuardian
-}
-
-type scenarioFunc func(c *scenarioContext)
-
-func dashboardGuardianScenario(desc string, mock *guardian.FakeDashboardGuardian, fn scenarioFunc) {
- Convey(desc, func() {
- origNewDashboardGuardian := guardian.New
- guardian.MockDashboardGuardian(mock)
-
- sc := &scenarioContext{
- dashboardGuardianMock: mock,
- }
-
- defer func() {
- guardian.New = origNewDashboardGuardian
- }()
-
- fn(sc)
- })
-}
-
type dashboardPermissionScenarioContext struct {
dashboardGuardianMock *guardian.FakeDashboardGuardian
}
diff --git a/pkg/services/sqlstore/migrations/alert_mig.go b/pkg/services/sqlstore/migrations/alert_mig.go
index e27e64c6124..cadcccf6c95 100644
--- a/pkg/services/sqlstore/migrations/alert_mig.go
+++ b/pkg/services/sqlstore/migrations/alert_mig.go
@@ -107,4 +107,27 @@ func addAlertMigrations(mg *Migrator) {
mg.AddMigration("create notification_journal table v1", NewAddTableMigration(notification_journal))
mg.AddMigration("add index notification_journal org_id & alert_id & notifier_id", NewAddIndexMigration(notification_journal, notification_journal.Indices[0]))
+
+ mg.AddMigration("drop alert_notification_journal", NewDropTableMigration("alert_notification_journal"))
+
+ alert_notification_state := Table{
+ Name: "alert_notification_state",
+ Columns: []*Column{
+ {Name: "id", Type: DB_BigInt, IsPrimaryKey: true, IsAutoIncrement: true},
+ {Name: "org_id", Type: DB_BigInt, Nullable: false},
+ {Name: "alert_id", Type: DB_BigInt, Nullable: false},
+ {Name: "notifier_id", Type: DB_BigInt, Nullable: false},
+ {Name: "state", Type: DB_NVarchar, Length: 50, Nullable: false},
+ {Name: "version", Type: DB_BigInt, Nullable: false},
+ {Name: "updated_at", Type: DB_BigInt, Nullable: false},
+ {Name: "alert_rule_state_updated_version", Type: DB_BigInt, Nullable: false},
+ },
+ Indices: []*Index{
+ {Cols: []string{"org_id", "alert_id", "notifier_id"}, Type: UniqueIndex},
+ },
+ }
+
+ mg.AddMigration("create alert_notification_state table v1", NewAddTableMigration(alert_notification_state))
+ mg.AddMigration("add index alert_notification_state org_id & alert_id & notifier_id",
+ NewAddIndexMigration(alert_notification_state, alert_notification_state.Indices[0]))
}
diff --git a/pkg/services/sqlstore/migrations/annotation_mig.go b/pkg/services/sqlstore/migrations/annotation_mig.go
index d231d3283e2..49920dee490 100644
--- a/pkg/services/sqlstore/migrations/annotation_mig.go
+++ b/pkg/services/sqlstore/migrations/annotation_mig.go
@@ -105,7 +105,7 @@ func addAnnotationMig(mg *Migrator) {
}))
//
- // Convert epoch saved as seconds to miliseconds
+ // Convert epoch saved as seconds to milliseconds
//
updateEpochSql := "UPDATE annotation SET epoch = (epoch*1000) where epoch < 9999999999"
mg.AddMigration("Convert existing annotations from seconds to milliseconds", NewRawSqlMigration(updateEpochSql))
diff --git a/pkg/services/sqlstore/migrations/team_mig.go b/pkg/services/sqlstore/migrations/team_mig.go
index 9800d27f8ab..34c46ad13cf 100644
--- a/pkg/services/sqlstore/migrations/team_mig.go
+++ b/pkg/services/sqlstore/migrations/team_mig.go
@@ -51,4 +51,7 @@ func addTeamMigrations(mg *Migrator) {
Name: "email", Type: DB_NVarchar, Nullable: true, Length: 190,
}))
+ mg.AddMigration("Add column external to team_member table", NewAddColumnMigration(teamMemberV1, &Column{
+ Name: "external", Type: DB_Bool, Nullable: true,
+ }))
}
diff --git a/pkg/services/sqlstore/migrations/user_mig.go b/pkg/services/sqlstore/migrations/user_mig.go
index 400033aaa33..e273cb7d542 100644
--- a/pkg/services/sqlstore/migrations/user_mig.go
+++ b/pkg/services/sqlstore/migrations/user_mig.go
@@ -134,7 +134,7 @@ type TempUserDTO struct {
func (m *AddMissingUserSaltAndRandsMigration) Exec(sess *xorm.Session, mg *Migrator) error {
users := make([]*TempUserDTO, 0)
- err := sess.Sql(fmt.Sprintf("SELECT id, login from %s WHERE rands = ''", mg.Dialect.Quote("user"))).Find(&users)
+ err := sess.SQL(fmt.Sprintf("SELECT id, login from %s WHERE rands = ''", mg.Dialect.Quote("user"))).Find(&users)
if err != nil {
return err
}
diff --git a/pkg/services/sqlstore/migrator/dialect.go b/pkg/services/sqlstore/migrator/dialect.go
index 427d102b280..506a01c3ed8 100644
--- a/pkg/services/sqlstore/migrator/dialect.go
+++ b/pkg/services/sqlstore/migrator/dialect.go
@@ -44,6 +44,8 @@ type Dialect interface {
CleanDB() error
NoOpSql() string
+
+ IsUniqueConstraintViolation(err error) bool
}
func NewDialect(engine *xorm.Engine) Dialect {
diff --git a/pkg/services/sqlstore/migrator/mysql_dialect.go b/pkg/services/sqlstore/migrator/mysql_dialect.go
index 1ed16871c15..7daa4597430 100644
--- a/pkg/services/sqlstore/migrator/mysql_dialect.go
+++ b/pkg/services/sqlstore/migrator/mysql_dialect.go
@@ -5,6 +5,8 @@ import (
"strconv"
"strings"
+ "github.com/VividCortex/mysqlerr"
+ "github.com/go-sql-driver/mysql"
"github.com/go-xorm/xorm"
)
@@ -125,3 +127,13 @@ func (db *Mysql) CleanDB() error {
return nil
}
+
+func (db *Mysql) IsUniqueConstraintViolation(err error) bool {
+ if driverErr, ok := err.(*mysql.MySQLError); ok {
+ if driverErr.Number == mysqlerr.ER_DUP_ENTRY {
+ return true
+ }
+ }
+
+ return false
+}
diff --git a/pkg/services/sqlstore/migrator/postgres_dialect.go b/pkg/services/sqlstore/migrator/postgres_dialect.go
index eae9ad3ca3f..ab8812a1e26 100644
--- a/pkg/services/sqlstore/migrator/postgres_dialect.go
+++ b/pkg/services/sqlstore/migrator/postgres_dialect.go
@@ -6,6 +6,7 @@ import (
"strings"
"github.com/go-xorm/xorm"
+ "github.com/lib/pq"
)
type Postgres struct {
@@ -136,3 +137,13 @@ func (db *Postgres) CleanDB() error {
return nil
}
+
+func (db *Postgres) IsUniqueConstraintViolation(err error) bool {
+ if driverErr, ok := err.(*pq.Error); ok {
+ if driverErr.Code == "23505" {
+ return true
+ }
+ }
+
+ return false
+}
diff --git a/pkg/services/sqlstore/migrator/sqlite_dialect.go b/pkg/services/sqlstore/migrator/sqlite_dialect.go
index 01082b95c88..446e3fcef12 100644
--- a/pkg/services/sqlstore/migrator/sqlite_dialect.go
+++ b/pkg/services/sqlstore/migrator/sqlite_dialect.go
@@ -4,6 +4,7 @@ import (
"fmt"
"github.com/go-xorm/xorm"
+ sqlite3 "github.com/mattn/go-sqlite3"
)
type Sqlite3 struct {
@@ -82,3 +83,13 @@ func (db *Sqlite3) DropIndexSql(tableName string, index *Index) string {
func (db *Sqlite3) CleanDB() error {
return nil
}
+
+func (db *Sqlite3) IsUniqueConstraintViolation(err error) bool {
+ if driverErr, ok := err.(sqlite3.Error); ok {
+ if driverErr.ExtendedCode == sqlite3.ErrConstraintUnique {
+ return true
+ }
+ }
+
+ return false
+}
diff --git a/pkg/services/sqlstore/org.go b/pkg/services/sqlstore/org.go
index 8931f1cf0f5..e36a80322d8 100644
--- a/pkg/services/sqlstore/org.go
+++ b/pkg/services/sqlstore/org.go
@@ -133,7 +133,7 @@ func UpdateOrg(cmd *m.UpdateOrgCommand) error {
Updated: time.Now(),
}
- affectedRows, err := sess.Id(cmd.OrgId).Update(&org)
+ affectedRows, err := sess.ID(cmd.OrgId).Update(&org)
if err != nil {
return err
@@ -166,7 +166,7 @@ func UpdateOrgAddress(cmd *m.UpdateOrgAddressCommand) error {
Updated: time.Now(),
}
- if _, err := sess.Id(cmd.OrgId).Update(&org); err != nil {
+ if _, err := sess.ID(cmd.OrgId).Update(&org); err != nil {
return err
}
diff --git a/pkg/services/sqlstore/org_test.go b/pkg/services/sqlstore/org_test.go
index af8500707d5..c02686c24ba 100644
--- a/pkg/services/sqlstore/org_test.go
+++ b/pkg/services/sqlstore/org_test.go
@@ -182,6 +182,21 @@ func TestAccountDataAccess(t *testing.T) {
})
})
+ Convey("Removing user from org should delete user completely if in no other org", func() {
+ // make sure ac2 has no org
+ err := DeleteOrg(&m.DeleteOrgCommand{Id: ac2.OrgId})
+ So(err, ShouldBeNil)
+
+ // remove frome ac2 from ac1 org
+ remCmd := m.RemoveOrgUserCommand{OrgId: ac1.OrgId, UserId: ac2.Id, ShouldDeleteOrphanedUser: true}
+ err = RemoveOrgUser(&remCmd)
+ So(err, ShouldBeNil)
+ So(remCmd.UserWasDeleted, ShouldBeTrue)
+
+ err = GetSignedInUser(&m.GetSignedInUserQuery{UserId: ac2.Id})
+ So(err, ShouldEqual, m.ErrUserNotFound)
+ })
+
Convey("Cannot delete last admin org user", func() {
cmd := m.RemoveOrgUserCommand{OrgId: ac1.OrgId, UserId: ac1.Id}
err := RemoveOrgUser(&cmd)
diff --git a/pkg/services/sqlstore/org_users.go b/pkg/services/sqlstore/org_users.go
index aad72cdacb4..abbc320020e 100644
--- a/pkg/services/sqlstore/org_users.go
+++ b/pkg/services/sqlstore/org_users.go
@@ -21,7 +21,7 @@ func AddOrgUser(cmd *m.AddOrgUserCommand) error {
return inTransaction(func(sess *DBSession) error {
// check if user exists
var user m.User
- if exists, err := sess.Id(cmd.UserId).Get(&user); err != nil {
+ if exists, err := sess.ID(cmd.UserId).Get(&user); err != nil {
return err
} else if !exists {
return m.ErrUserNotFound
@@ -85,7 +85,7 @@ func UpdateOrgUser(cmd *m.UpdateOrgUserCommand) error {
orgUser.Role = cmd.Role
orgUser.Updated = time.Now()
- _, err = sess.Id(orgUser.Id).Update(&orgUser)
+ _, err = sess.ID(orgUser.Id).Update(&orgUser)
if err != nil {
return err
}
@@ -138,7 +138,7 @@ func RemoveOrgUser(cmd *m.RemoveOrgUserCommand) error {
return inTransaction(func(sess *DBSession) error {
// check if user exists
var user m.User
- if exists, err := sess.Id(cmd.UserId).Get(&user); err != nil {
+ if exists, err := sess.ID(cmd.UserId).Get(&user); err != nil {
return err
} else if !exists {
return m.ErrUserNotFound
@@ -157,6 +157,12 @@ func RemoveOrgUser(cmd *m.RemoveOrgUserCommand) error {
}
}
+ // validate that after delete there is at least one user with admin role in org
+ if err := validateOneAdminLeftInOrg(cmd.OrgId, sess); err != nil {
+ return err
+ }
+
+ // check user other orgs and update user current org
var userOrgs []*m.UserOrgDTO
sess.Table("org_user")
sess.Join("INNER", "org", "org_user.org_id=org.id")
@@ -168,22 +174,31 @@ func RemoveOrgUser(cmd *m.RemoveOrgUserCommand) error {
return err
}
- hasCurrentOrgSet := false
- for _, userOrg := range userOrgs {
- if user.OrgId == userOrg.OrgId {
- hasCurrentOrgSet = true
- break
+ if len(userOrgs) > 0 {
+ hasCurrentOrgSet := false
+ for _, userOrg := range userOrgs {
+ if user.OrgId == userOrg.OrgId {
+ hasCurrentOrgSet = true
+ break
+ }
}
- }
- if !hasCurrentOrgSet && len(userOrgs) > 0 {
- err = setUsingOrgInTransaction(sess, user.Id, userOrgs[0].OrgId)
- if err != nil {
+ if !hasCurrentOrgSet {
+ err = setUsingOrgInTransaction(sess, user.Id, userOrgs[0].OrgId)
+ if err != nil {
+ return err
+ }
+ }
+ } else if cmd.ShouldDeleteOrphanedUser {
+ // no other orgs, delete the full user
+ if err := deleteUserInTransaction(sess, &m.DeleteUserCommand{UserId: user.Id}); err != nil {
return err
}
+
+ cmd.UserWasDeleted = true
}
- return validateOneAdminLeftInOrg(cmd.OrgId, sess)
+ return nil
})
}
diff --git a/pkg/services/sqlstore/plugin_setting.go b/pkg/services/sqlstore/plugin_setting.go
index 676d26fad56..8fbf1b6be1c 100644
--- a/pkg/services/sqlstore/plugin_setting.go
+++ b/pkg/services/sqlstore/plugin_setting.go
@@ -26,7 +26,7 @@ func GetPluginSettings(query *m.GetPluginSettingsQuery) error {
params = append(params, query.OrgId)
}
- sess := x.Sql(sql, params...)
+ sess := x.SQL(sql, params...)
query.Result = make([]*m.PluginSettingInfoDTO, 0)
return sess.Find(&query.Result)
}
@@ -100,7 +100,7 @@ func UpdatePluginSetting(cmd *m.UpdatePluginSettingCmd) error {
pluginSetting.Pinned = cmd.Pinned
pluginSetting.PluginVersion = cmd.PluginVersion
- _, err = sess.Id(pluginSetting.Id).Update(&pluginSetting)
+ _, err = sess.ID(pluginSetting.Id).Update(&pluginSetting)
return err
})
}
diff --git a/pkg/services/sqlstore/preferences.go b/pkg/services/sqlstore/preferences.go
index 885837764fc..04e787971d9 100644
--- a/pkg/services/sqlstore/preferences.go
+++ b/pkg/services/sqlstore/preferences.go
@@ -94,7 +94,7 @@ func SavePreferences(cmd *m.SavePreferencesCommand) error {
prefs.Theme = cmd.Theme
prefs.Updated = time.Now()
prefs.Version += 1
- _, err = sess.Id(prefs.Id).AllCols().Update(&prefs)
+ _, err = sess.ID(prefs.Id).AllCols().Update(&prefs)
return err
})
}
diff --git a/pkg/services/sqlstore/quota.go b/pkg/services/sqlstore/quota.go
index 539555ddc50..7005b341268 100644
--- a/pkg/services/sqlstore/quota.go
+++ b/pkg/services/sqlstore/quota.go
@@ -38,7 +38,7 @@ func GetOrgQuotaByTarget(query *m.GetOrgQuotaByTargetQuery) error {
//get quota used.
rawSql := fmt.Sprintf("SELECT COUNT(*) as count from %s where org_id=?", dialect.Quote(query.Target))
resp := make([]*targetCount, 0)
- if err := x.Sql(rawSql, query.OrgId).Find(&resp); err != nil {
+ if err := x.SQL(rawSql, query.OrgId).Find(&resp); err != nil {
return err
}
@@ -81,7 +81,7 @@ func GetOrgQuotas(query *m.GetOrgQuotasQuery) error {
//get quota used.
rawSql := fmt.Sprintf("SELECT COUNT(*) as count from %s where org_id=?", dialect.Quote(q.Target))
resp := make([]*targetCount, 0)
- if err := x.Sql(rawSql, q.OrgId).Find(&resp); err != nil {
+ if err := x.SQL(rawSql, q.OrgId).Find(&resp); err != nil {
return err
}
result[i] = &m.OrgQuotaDTO{
@@ -116,7 +116,7 @@ func UpdateOrgQuota(cmd *m.UpdateOrgQuotaCmd) error {
}
} else {
//update existing quota entry in the DB.
- if _, err := sess.Id(quota.Id).Update("a); err != nil {
+ if _, err := sess.ID(quota.Id).Update("a); err != nil {
return err
}
}
@@ -140,7 +140,7 @@ func GetUserQuotaByTarget(query *m.GetUserQuotaByTargetQuery) error {
//get quota used.
rawSql := fmt.Sprintf("SELECT COUNT(*) as count from %s where user_id=?", dialect.Quote(query.Target))
resp := make([]*targetCount, 0)
- if err := x.Sql(rawSql, query.UserId).Find(&resp); err != nil {
+ if err := x.SQL(rawSql, query.UserId).Find(&resp); err != nil {
return err
}
@@ -183,7 +183,7 @@ func GetUserQuotas(query *m.GetUserQuotasQuery) error {
//get quota used.
rawSql := fmt.Sprintf("SELECT COUNT(*) as count from %s where user_id=?", dialect.Quote(q.Target))
resp := make([]*targetCount, 0)
- if err := x.Sql(rawSql, q.UserId).Find(&resp); err != nil {
+ if err := x.SQL(rawSql, q.UserId).Find(&resp); err != nil {
return err
}
result[i] = &m.UserQuotaDTO{
@@ -218,7 +218,7 @@ func UpdateUserQuota(cmd *m.UpdateUserQuotaCmd) error {
}
} else {
//update existing quota entry in the DB.
- if _, err := sess.Id(quota.Id).Update("a); err != nil {
+ if _, err := sess.ID(quota.Id).Update("a); err != nil {
return err
}
}
@@ -231,7 +231,7 @@ func GetGlobalQuotaByTarget(query *m.GetGlobalQuotaByTargetQuery) error {
//get quota used.
rawSql := fmt.Sprintf("SELECT COUNT(*) as count from %s", dialect.Quote(query.Target))
resp := make([]*targetCount, 0)
- if err := x.Sql(rawSql).Find(&resp); err != nil {
+ if err := x.SQL(rawSql).Find(&resp); err != nil {
return err
}
diff --git a/pkg/services/sqlstore/sqlstore.go b/pkg/services/sqlstore/sqlstore.go
index 5477bc7b2d1..67f1bd7f75a 100644
--- a/pkg/services/sqlstore/sqlstore.go
+++ b/pkg/services/sqlstore/sqlstore.go
@@ -233,7 +233,7 @@ func (ss *SqlStore) buildConnectionString() (string, error) {
case migrator.SQLITE:
// special case for tests
if !filepath.IsAbs(ss.dbCfg.Path) {
- ss.dbCfg.Path = filepath.Join(setting.DataPath, ss.dbCfg.Path)
+ ss.dbCfg.Path = filepath.Join(ss.Cfg.DataPath, ss.dbCfg.Path)
}
os.MkdirAll(path.Dir(ss.dbCfg.Path), os.ModePerm)
cnnstr = "file:" + ss.dbCfg.Path + "?cache=shared&mode=rwc"
diff --git a/pkg/services/sqlstore/team.go b/pkg/services/sqlstore/team.go
index 72955df9a6a..a3010a086e5 100644
--- a/pkg/services/sqlstore/team.go
+++ b/pkg/services/sqlstore/team.go
@@ -74,7 +74,7 @@ func UpdateTeam(cmd *m.UpdateTeamCommand) error {
sess.MustCols("email")
- affectedRows, err := sess.Id(cmd.Id).Update(&team)
+ affectedRows, err := sess.ID(cmd.Id).Update(&team)
if err != nil {
return err
@@ -169,7 +169,7 @@ func SearchTeams(query *m.SearchTeamsQuery) error {
sql.WriteString(dialect.LimitOffset(int64(query.Limit), int64(offset)))
}
- if err := x.Sql(sql.String(), params...).Find(&query.Result.Teams); err != nil {
+ if err := x.SQL(sql.String(), params...).Find(&query.Result.Teams); err != nil {
return err
}
@@ -196,7 +196,7 @@ func GetTeamById(query *m.GetTeamByIdQuery) error {
sql.WriteString(` WHERE team.org_id = ? and team.id = ?`)
var team m.TeamDTO
- exists, err := x.Sql(sql.String(), query.OrgId, query.Id).Get(&team)
+ exists, err := x.SQL(sql.String(), query.OrgId, query.Id).Get(&team)
if err != nil {
return err
@@ -220,7 +220,7 @@ func GetTeamsByUser(query *m.GetTeamsByUserQuery) error {
sql.WriteString(` INNER JOIN team_member on team.id = team_member.team_id`)
sql.WriteString(` WHERE team.org_id = ? and team_member.user_id = ?`)
- err := x.Sql(sql.String(), query.OrgId, query.UserId).Find(&query.Result)
+ err := x.SQL(sql.String(), query.OrgId, query.UserId).Find(&query.Result)
return err
}
@@ -240,11 +240,12 @@ func AddTeamMember(cmd *m.AddTeamMemberCommand) error {
}
entity := m.TeamMember{
- OrgId: cmd.OrgId,
- TeamId: cmd.TeamId,
- UserId: cmd.UserId,
- Created: time.Now(),
- Updated: time.Now(),
+ OrgId: cmd.OrgId,
+ TeamId: cmd.TeamId,
+ UserId: cmd.UserId,
+ External: cmd.External,
+ Created: time.Now(),
+ Updated: time.Now(),
}
_, err := sess.Insert(&entity)
@@ -289,7 +290,10 @@ func GetTeamMembers(query *m.GetTeamMembersQuery) error {
if query.UserId != 0 {
sess.Where("team_member.user_id=?", query.UserId)
}
- sess.Cols("user.org_id", "team_member.team_id", "team_member.user_id", "user.email", "user.login")
+ if query.External {
+ sess.Where("team_member.external=?", dialect.BooleanStr(true))
+ }
+ sess.Cols("team_member.org_id", "team_member.team_id", "team_member.user_id", "user.email", "user.login", "team_member.external")
sess.Asc("user.login", "user.email")
err := sess.Find(&query.Result)
diff --git a/pkg/services/sqlstore/team_test.go b/pkg/services/sqlstore/team_test.go
index abaa973957d..8f243617262 100644
--- a/pkg/services/sqlstore/team_test.go
+++ b/pkg/services/sqlstore/team_test.go
@@ -50,13 +50,29 @@ func TestTeamCommandsAndQueries(t *testing.T) {
err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: team1.Id, UserId: userIds[0]})
So(err, ShouldBeNil)
+ err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: team1.Id, UserId: userIds[1], External: true})
+ So(err, ShouldBeNil)
q1 := &m.GetTeamMembersQuery{OrgId: testOrgId, TeamId: team1.Id}
err = GetTeamMembers(q1)
So(err, ShouldBeNil)
+ So(q1.Result, ShouldHaveLength, 2)
So(q1.Result[0].TeamId, ShouldEqual, team1.Id)
So(q1.Result[0].Login, ShouldEqual, "loginuser0")
So(q1.Result[0].OrgId, ShouldEqual, testOrgId)
+ So(q1.Result[1].TeamId, ShouldEqual, team1.Id)
+ So(q1.Result[1].Login, ShouldEqual, "loginuser1")
+ So(q1.Result[1].OrgId, ShouldEqual, testOrgId)
+ So(q1.Result[1].External, ShouldEqual, true)
+
+ q2 := &m.GetTeamMembersQuery{OrgId: testOrgId, TeamId: team1.Id, External: true}
+ err = GetTeamMembers(q2)
+ So(err, ShouldBeNil)
+ So(q2.Result, ShouldHaveLength, 1)
+ So(q2.Result[0].TeamId, ShouldEqual, team1.Id)
+ So(q2.Result[0].Login, ShouldEqual, "loginuser1")
+ So(q2.Result[0].OrgId, ShouldEqual, testOrgId)
+ So(q2.Result[0].External, ShouldEqual, true)
})
Convey("Should be able to search for teams", func() {
diff --git a/pkg/services/sqlstore/temp_user.go b/pkg/services/sqlstore/temp_user.go
index e93ba2fd641..f13752f8038 100644
--- a/pkg/services/sqlstore/temp_user.go
+++ b/pkg/services/sqlstore/temp_user.go
@@ -96,7 +96,7 @@ func GetTempUsersQuery(query *m.GetTempUsersQuery) error {
rawSql += " ORDER BY tu.created desc"
query.Result = make([]*m.TempUserDTO, 0)
- sess := x.Sql(rawSql, params...)
+ sess := x.SQL(rawSql, params...)
err := sess.Find(&query.Result)
return err
}
@@ -121,7 +121,7 @@ func GetTempUserByCode(query *m.GetTempUserByCodeQuery) error {
WHERE tu.code=?`
var tempUser m.TempUserDTO
- sess := x.Sql(rawSql, query.Code)
+ sess := x.SQL(rawSql, query.Code)
has, err := sess.Get(&tempUser)
if err != nil {
diff --git a/pkg/services/sqlstore/transactions_test.go b/pkg/services/sqlstore/transactions_test.go
index 937649921ba..041359cf1d3 100644
--- a/pkg/services/sqlstore/transactions_test.go
+++ b/pkg/services/sqlstore/transactions_test.go
@@ -10,10 +10,6 @@ import (
. "github.com/smartystreets/goconvey/convey"
)
-type testQuery struct {
- result bool
-}
-
var ProvokedError = errors.New("testing error.")
func TestTransaction(t *testing.T) {
@@ -39,7 +35,7 @@ func TestTransaction(t *testing.T) {
So(err, ShouldEqual, models.ErrInvalidApiKey)
})
- Convey("wont update if one handler fails", func() {
+ Convey("won't update if one handler fails", func() {
err := ss.InTransaction(context.Background(), func(ctx context.Context) error {
err := DeleteApiKeyCtx(ctx, deleteApiKeyCmd)
if err != nil {
diff --git a/pkg/services/sqlstore/user.go b/pkg/services/sqlstore/user.go
index 5d1b827e79f..72d5654a777 100644
--- a/pkg/services/sqlstore/user.go
+++ b/pkg/services/sqlstore/user.go
@@ -240,7 +240,7 @@ func UpdateUser(cmd *m.UpdateUserCommand) error {
Updated: time.Now(),
}
- if _, err := sess.Id(cmd.UserId).Update(&user); err != nil {
+ if _, err := sess.ID(cmd.UserId).Update(&user); err != nil {
return err
}
@@ -264,22 +264,19 @@ func ChangeUserPassword(cmd *m.ChangeUserPasswordCommand) error {
Updated: time.Now(),
}
- _, err := sess.Id(cmd.UserId).Update(&user)
+ _, err := sess.ID(cmd.UserId).Update(&user)
return err
})
}
func UpdateUserLastSeenAt(cmd *m.UpdateUserLastSeenAtCommand) error {
return inTransaction(func(sess *DBSession) error {
- if cmd.UserId <= 0 {
- }
-
user := m.User{
Id: cmd.UserId,
LastSeenAt: time.Now(),
}
- _, err := sess.Id(cmd.UserId).Update(&user)
+ _, err := sess.ID(cmd.UserId).Update(&user)
return err
})
}
@@ -310,7 +307,7 @@ func setUsingOrgInTransaction(sess *DBSession, userID int64, orgID int64) error
OrgId: orgID,
}
- _, err := sess.Id(userID).Update(&user)
+ _, err := sess.ID(userID).Update(&user)
return err
}
@@ -372,11 +369,11 @@ func GetSignedInUser(query *m.GetSignedInUserQuery) error {
sess := x.Table("user")
if query.UserId > 0 {
- sess.Sql(rawSql+"WHERE u.id=?", query.UserId)
+ sess.SQL(rawSql+"WHERE u.id=?", query.UserId)
} else if query.Login != "" {
- sess.Sql(rawSql+"WHERE u.login=?", query.Login)
+ sess.SQL(rawSql+"WHERE u.login=?", query.Login)
} else if query.Email != "" {
- sess.Sql(rawSql+"WHERE u.email=?", query.Email)
+ sess.SQL(rawSql+"WHERE u.email=?", query.Email)
}
var user m.SignedInUser
@@ -448,35 +445,39 @@ func SearchUsers(query *m.SearchUsersQuery) error {
func DeleteUser(cmd *m.DeleteUserCommand) error {
return inTransaction(func(sess *DBSession) error {
- deletes := []string{
- "DELETE FROM star WHERE user_id = ?",
- "DELETE FROM " + dialect.Quote("user") + " WHERE id = ?",
- "DELETE FROM org_user WHERE user_id = ?",
- "DELETE FROM dashboard_acl WHERE user_id = ?",
- "DELETE FROM preferences WHERE user_id = ?",
- "DELETE FROM team_member WHERE user_id = ?",
- "DELETE FROM user_auth WHERE user_id = ?",
- }
-
- for _, sql := range deletes {
- _, err := sess.Exec(sql, cmd.UserId)
- if err != nil {
- return err
- }
- }
-
- return nil
+ return deleteUserInTransaction(sess, cmd)
})
}
+func deleteUserInTransaction(sess *DBSession, cmd *m.DeleteUserCommand) error {
+ deletes := []string{
+ "DELETE FROM star WHERE user_id = ?",
+ "DELETE FROM " + dialect.Quote("user") + " WHERE id = ?",
+ "DELETE FROM org_user WHERE user_id = ?",
+ "DELETE FROM dashboard_acl WHERE user_id = ?",
+ "DELETE FROM preferences WHERE user_id = ?",
+ "DELETE FROM team_member WHERE user_id = ?",
+ "DELETE FROM user_auth WHERE user_id = ?",
+ }
+
+ for _, sql := range deletes {
+ _, err := sess.Exec(sql, cmd.UserId)
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
func UpdateUserPermissions(cmd *m.UpdateUserPermissionsCommand) error {
return inTransaction(func(sess *DBSession) error {
user := m.User{}
- sess.Id(cmd.UserId).Get(&user)
+ sess.ID(cmd.UserId).Get(&user)
user.IsAdmin = cmd.IsGrafanaAdmin
sess.UseBool("is_admin")
- _, err := sess.Id(user.Id).Update(&user)
+ _, err := sess.ID(user.Id).Update(&user)
return err
})
}
@@ -490,7 +491,7 @@ func SetUserHelpFlag(cmd *m.SetUserHelpFlagCommand) error {
Updated: time.Now(),
}
- _, err := sess.Id(cmd.UserId).Cols("help_flags1").Update(&user)
+ _, err := sess.ID(cmd.UserId).Cols("help_flags1").Update(&user)
return err
})
}
diff --git a/pkg/services/sqlstore/user_auth_test.go b/pkg/services/sqlstore/user_auth_test.go
index 5ad93dc7a3b..a0dd714fe6f 100644
--- a/pkg/services/sqlstore/user_auth_test.go
+++ b/pkg/services/sqlstore/user_auth_test.go
@@ -16,7 +16,6 @@ func TestUserAuth(t *testing.T) {
Convey("Given 5 users", t, func() {
var err error
var cmd *m.CreateUserCommand
- users := []m.User{}
for i := 0; i < 5; i++ {
cmd = &m.CreateUserCommand{
Email: fmt.Sprint("user", i, "@test.com"),
@@ -25,7 +24,6 @@ func TestUserAuth(t *testing.T) {
}
err = CreateUser(context.Background(), cmd)
So(err, ShouldBeNil)
- users = append(users, cmd.Result)
}
Reset(func() {
diff --git a/pkg/setting/setting.go b/pkg/setting/setting.go
index fb23a192a85..58901e55c6b 100644
--- a/pkg/setting/setting.go
+++ b/pkg/setting/setting.go
@@ -54,14 +54,11 @@ var (
ApplicationName string
// Paths
- LogsPath string
HomePath string
- DataPath string
PluginsPath string
CustomInitPath = "conf/custom.ini"
// Log settings.
- LogModes []string
LogConfigs []util.DynMap
// Http server options
@@ -166,6 +163,7 @@ var (
// Alerting
AlertingEnabled bool
ExecuteAlerts bool
+ AlertingRenderLimit int
AlertingErrorOrTimeout string
AlertingNoDataOrNullValues string
@@ -186,23 +184,35 @@ var (
ImageUploadProvider string
)
+// TODO move all global vars to this struct
type Cfg struct {
Raw *ini.File
+ // HTTP Server Settings
+ AppUrl string
+ AppSubUrl string
+
// Paths
ProvisioningPath string
+ DataPath string
+ LogsPath string
// SMTP email settings
Smtp SmtpSettings
// Rendering
- ImagesDir string
- PhantomDir string
- RendererUrl string
- RendererCallbackUrl string
+ ImagesDir string
+ PhantomDir string
+ RendererUrl string
+ RendererCallbackUrl string
+ RendererLimit int
+ RendererLimitAlerting int
+
DisableBruteForceLoginProtection bool
TempDataLifetime time.Duration
+
+ MetricsEndpointEnabled bool
}
type CommandLineArgs struct {
@@ -405,7 +415,7 @@ func loadSpecifedConfigFile(configFile string, masterFile *ini.File) error {
return nil
}
-func loadConfiguration(args *CommandLineArgs) (*ini.File, error) {
+func (cfg *Cfg) loadConfiguration(args *CommandLineArgs) (*ini.File, error) {
var err error
// load config defaults
@@ -436,7 +446,7 @@ func loadConfiguration(args *CommandLineArgs) (*ini.File, error) {
// load specified config file
err = loadSpecifedConfigFile(args.Config, parsedFile)
if err != nil {
- initLogging(parsedFile)
+ cfg.initLogging(parsedFile)
log.Fatal(3, err.Error())
}
@@ -453,8 +463,8 @@ func loadConfiguration(args *CommandLineArgs) (*ini.File, error) {
evalConfigValues(parsedFile)
// update data path and logging config
- DataPath = makeAbsolute(parsedFile.Section("paths").Key("data").String(), HomePath)
- initLogging(parsedFile)
+ cfg.DataPath = makeAbsolute(parsedFile.Section("paths").Key("data").String(), HomePath)
+ cfg.initLogging(parsedFile)
return parsedFile, err
}
@@ -511,7 +521,7 @@ func NewCfg() *Cfg {
func (cfg *Cfg) Load(args *CommandLineArgs) error {
setHomePath(args)
- iniFile, err := loadConfiguration(args)
+ iniFile, err := cfg.loadConfiguration(args)
if err != nil {
return err
}
@@ -532,6 +542,8 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error {
cfg.ProvisioningPath = makeAbsolute(iniFile.Section("paths").Key("provisioning").String(), HomePath)
server := iniFile.Section("server")
AppUrl, AppSubUrl = parseAppUrlAndSubUrl(server)
+ cfg.AppUrl = AppUrl
+ cfg.AppSubUrl = AppSubUrl
Protocol = HTTP
if server.Key("protocol").MustString("http") == "https" {
@@ -656,9 +668,10 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error {
log.Fatal(4, "Invalid callback_url(%s): %s", cfg.RendererCallbackUrl, err)
}
}
- cfg.ImagesDir = filepath.Join(DataPath, "png")
+ cfg.ImagesDir = filepath.Join(cfg.DataPath, "png")
cfg.PhantomDir = filepath.Join(HomePath, "tools/phantomjs")
cfg.TempDataLifetime = iniFile.Section("paths").Key("temp_data_lifetime").MustDuration(time.Second * 3600 * 24)
+ cfg.MetricsEndpointEnabled = iniFile.Section("metrics").Key("enabled").MustBool(true)
analytics := iniFile.Section("analytics")
ReportingEnabled = analytics.Key("reporting_enabled").MustBool(true)
@@ -674,6 +687,7 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error {
alerting := iniFile.Section("alerting")
AlertingEnabled = alerting.Key("enabled").MustBool(true)
ExecuteAlerts = alerting.Key("execute_alerts").MustBool(true)
+ AlertingRenderLimit = alerting.Key("concurrent_render_limit").MustInt(5)
AlertingErrorOrTimeout = alerting.Key("error_or_timeout").MustString("alerting")
AlertingNoDataOrNullValues = alerting.Key("nodata_or_nullvalues").MustString("no_data")
@@ -712,7 +726,7 @@ func (cfg *Cfg) readSessionConfig() {
SessionOptions.IDLength = 16
if SessionOptions.Provider == "file" {
- SessionOptions.ProviderConfig = makeAbsolute(SessionOptions.ProviderConfig, DataPath)
+ SessionOptions.ProviderConfig = makeAbsolute(SessionOptions.ProviderConfig, cfg.DataPath)
os.MkdirAll(path.Dir(SessionOptions.ProviderConfig), os.ModePerm)
}
@@ -723,15 +737,15 @@ func (cfg *Cfg) readSessionConfig() {
SessionConnMaxLifetime = cfg.Raw.Section("session").Key("conn_max_lifetime").MustInt64(14400)
}
-func initLogging(file *ini.File) {
+func (cfg *Cfg) initLogging(file *ini.File) {
// split on comma
- LogModes = strings.Split(file.Section("log").Key("mode").MustString("console"), ",")
+ logModes := strings.Split(file.Section("log").Key("mode").MustString("console"), ",")
// also try space
- if len(LogModes) == 1 {
- LogModes = strings.Split(file.Section("log").Key("mode").MustString("console"), " ")
+ if len(logModes) == 1 {
+ logModes = strings.Split(file.Section("log").Key("mode").MustString("console"), " ")
}
- LogsPath = makeAbsolute(file.Section("paths").Key("logs").String(), HomePath)
- log.ReadLoggingConfig(LogModes, LogsPath, file)
+ cfg.LogsPath = makeAbsolute(file.Section("paths").Key("logs").String(), HomePath)
+ log.ReadLoggingConfig(logModes, cfg.LogsPath, file)
}
func (cfg *Cfg) LogConfigSources() {
@@ -755,8 +769,8 @@ func (cfg *Cfg) LogConfigSources() {
}
logger.Info("Path Home", "path", HomePath)
- logger.Info("Path Data", "path", DataPath)
- logger.Info("Path Logs", "path", LogsPath)
+ logger.Info("Path Data", "path", cfg.DataPath)
+ logger.Info("Path Logs", "path", cfg.LogsPath)
logger.Info("Path Plugins", "path", PluginsPath)
logger.Info("Path Provisioning", "path", cfg.ProvisioningPath)
logger.Info("App mode " + Env)
diff --git a/pkg/setting/setting_oauth.go b/pkg/setting/setting_oauth.go
index ee2e812415b..93b1ab6f101 100644
--- a/pkg/setting/setting_oauth.go
+++ b/pkg/setting/setting_oauth.go
@@ -5,6 +5,7 @@ type OAuthInfo struct {
Scopes []string
AuthUrl, TokenUrl string
Enabled bool
+ EmailAttributeName string
AllowedDomains []string
HostedDomain string
ApiUrl string
diff --git a/pkg/setting/setting_test.go b/pkg/setting/setting_test.go
index affb3c3e7ca..72dbe2378c7 100644
--- a/pkg/setting/setting_test.go
+++ b/pkg/setting/setting_test.go
@@ -30,8 +30,8 @@ func TestLoadingSettings(t *testing.T) {
cfg.Load(&CommandLineArgs{HomePath: "../../"})
So(AdminUser, ShouldEqual, "superduper")
- So(DataPath, ShouldEqual, filepath.Join(HomePath, "data"))
- So(LogsPath, ShouldEqual, filepath.Join(DataPath, "log"))
+ So(cfg.DataPath, ShouldEqual, filepath.Join(HomePath, "data"))
+ So(cfg.LogsPath, ShouldEqual, filepath.Join(cfg.DataPath, "log"))
})
Convey("Should replace password when defined in environment", func() {
@@ -76,8 +76,8 @@ func TestLoadingSettings(t *testing.T) {
HomePath: "../../",
Args: []string{`cfg:paths.data=c:\tmp\data`, `cfg:paths.logs=c:\tmp\logs`},
})
- So(DataPath, ShouldEqual, `c:\tmp\data`)
- So(LogsPath, ShouldEqual, `c:\tmp\logs`)
+ So(cfg.DataPath, ShouldEqual, `c:\tmp\data`)
+ So(cfg.LogsPath, ShouldEqual, `c:\tmp\logs`)
} else {
cfg := NewCfg()
cfg.Load(&CommandLineArgs{
@@ -85,8 +85,8 @@ func TestLoadingSettings(t *testing.T) {
Args: []string{"cfg:paths.data=/tmp/data", "cfg:paths.logs=/tmp/logs"},
})
- So(DataPath, ShouldEqual, "/tmp/data")
- So(LogsPath, ShouldEqual, "/tmp/logs")
+ So(cfg.DataPath, ShouldEqual, "/tmp/data")
+ So(cfg.LogsPath, ShouldEqual, "/tmp/logs")
}
})
@@ -97,7 +97,7 @@ func TestLoadingSettings(t *testing.T) {
Args: []string{
"cfg:default.server.domain=test2",
},
- Config: filepath.Join(HomePath, "tests/config-files/override.ini"),
+ Config: filepath.Join(HomePath, "pkg/setting/testdata/override.ini"),
})
So(Domain, ShouldEqual, "test2")
@@ -108,20 +108,20 @@ func TestLoadingSettings(t *testing.T) {
cfg := NewCfg()
cfg.Load(&CommandLineArgs{
HomePath: "../../",
- Config: filepath.Join(HomePath, "tests/config-files/override_windows.ini"),
+ Config: filepath.Join(HomePath, "pkg/setting/testdata/override_windows.ini"),
Args: []string{`cfg:default.paths.data=c:\tmp\data`},
})
- So(DataPath, ShouldEqual, `c:\tmp\override`)
+ So(cfg.DataPath, ShouldEqual, `c:\tmp\override`)
} else {
cfg := NewCfg()
cfg.Load(&CommandLineArgs{
HomePath: "../../",
- Config: filepath.Join(HomePath, "tests/config-files/override.ini"),
+ Config: filepath.Join(HomePath, "pkg/setting/testdata/override.ini"),
Args: []string{"cfg:default.paths.data=/tmp/data"},
})
- So(DataPath, ShouldEqual, "/tmp/override")
+ So(cfg.DataPath, ShouldEqual, "/tmp/override")
}
})
@@ -130,20 +130,20 @@ func TestLoadingSettings(t *testing.T) {
cfg := NewCfg()
cfg.Load(&CommandLineArgs{
HomePath: "../../",
- Config: filepath.Join(HomePath, "tests/config-files/override_windows.ini"),
+ Config: filepath.Join(HomePath, "pkg/setting/testdata/override_windows.ini"),
Args: []string{`cfg:paths.data=c:\tmp\data`},
})
- So(DataPath, ShouldEqual, `c:\tmp\data`)
+ So(cfg.DataPath, ShouldEqual, `c:\tmp\data`)
} else {
cfg := NewCfg()
cfg.Load(&CommandLineArgs{
HomePath: "../../",
- Config: filepath.Join(HomePath, "tests/config-files/override.ini"),
+ Config: filepath.Join(HomePath, "pkg/setting/testdata/override.ini"),
Args: []string{"cfg:paths.data=/tmp/data"},
})
- So(DataPath, ShouldEqual, "/tmp/data")
+ So(cfg.DataPath, ShouldEqual, "/tmp/data")
}
})
@@ -156,7 +156,7 @@ func TestLoadingSettings(t *testing.T) {
Args: []string{"cfg:paths.data=${GF_DATA_PATH}"},
})
- So(DataPath, ShouldEqual, `c:\tmp\env_override`)
+ So(cfg.DataPath, ShouldEqual, `c:\tmp\env_override`)
} else {
os.Setenv("GF_DATA_PATH", "/tmp/env_override")
cfg := NewCfg()
@@ -165,7 +165,7 @@ func TestLoadingSettings(t *testing.T) {
Args: []string{"cfg:paths.data=${GF_DATA_PATH}"},
})
- So(DataPath, ShouldEqual, "/tmp/env_override")
+ So(cfg.DataPath, ShouldEqual, "/tmp/env_override")
}
})
diff --git a/tests/config-files/override.ini b/pkg/setting/testdata/override.ini
similarity index 100%
rename from tests/config-files/override.ini
rename to pkg/setting/testdata/override.ini
diff --git a/tests/config-files/override_windows.ini b/pkg/setting/testdata/override_windows.ini
similarity index 100%
rename from tests/config-files/override_windows.ini
rename to pkg/setting/testdata/override_windows.ini
diff --git a/pkg/social/generic_oauth.go b/pkg/social/generic_oauth.go
index 8c02076096d..a97d58334c7 100644
--- a/pkg/social/generic_oauth.go
+++ b/pkg/social/generic_oauth.go
@@ -20,6 +20,7 @@ type SocialGenericOAuth struct {
allowedOrganizations []string
apiUrl string
allowSignup bool
+ emailAttributeName string
teamIds []int
}
@@ -264,8 +265,9 @@ func (s *SocialGenericOAuth) extractEmail(data *UserInfoJson) string {
return data.Email
}
- if data.Attributes["email:primary"] != nil {
- return data.Attributes["email:primary"][0]
+ emails, ok := data.Attributes[s.emailAttributeName]
+ if ok && len(emails) != 0 {
+ return emails[0]
}
if data.Upn != "" {
diff --git a/pkg/social/social.go b/pkg/social/social.go
index 2be71514629..8918507f3b9 100644
--- a/pkg/social/social.go
+++ b/pkg/social/social.go
@@ -46,35 +46,39 @@ func (e *Error) Error() string {
return e.s
}
+const (
+ grafanaCom = "grafana_com"
+)
+
var (
SocialBaseUrl = "/login/"
SocialMap = make(map[string]SocialConnector)
+ allOauthes = []string{"github", "gitlab", "google", "generic_oauth", "grafananet", grafanaCom}
)
func NewOAuthService() {
setting.OAuthService = &setting.OAuther{}
setting.OAuthService.OAuthInfos = make(map[string]*setting.OAuthInfo)
- allOauthes := []string{"github", "gitlab", "google", "generic_oauth", "grafananet", "grafana_com"}
-
for _, name := range allOauthes {
sec := setting.Raw.Section("auth." + name)
info := &setting.OAuthInfo{
- ClientId: sec.Key("client_id").String(),
- ClientSecret: sec.Key("client_secret").String(),
- Scopes: util.SplitString(sec.Key("scopes").String()),
- AuthUrl: sec.Key("auth_url").String(),
- TokenUrl: sec.Key("token_url").String(),
- ApiUrl: sec.Key("api_url").String(),
- Enabled: sec.Key("enabled").MustBool(),
- AllowedDomains: util.SplitString(sec.Key("allowed_domains").String()),
- HostedDomain: sec.Key("hosted_domain").String(),
- AllowSignup: sec.Key("allow_sign_up").MustBool(),
- Name: sec.Key("name").MustString(name),
- TlsClientCert: sec.Key("tls_client_cert").String(),
- TlsClientKey: sec.Key("tls_client_key").String(),
- TlsClientCa: sec.Key("tls_client_ca").String(),
- TlsSkipVerify: sec.Key("tls_skip_verify_insecure").MustBool(),
+ ClientId: sec.Key("client_id").String(),
+ ClientSecret: sec.Key("client_secret").String(),
+ Scopes: util.SplitString(sec.Key("scopes").String()),
+ AuthUrl: sec.Key("auth_url").String(),
+ TokenUrl: sec.Key("token_url").String(),
+ ApiUrl: sec.Key("api_url").String(),
+ Enabled: sec.Key("enabled").MustBool(),
+ EmailAttributeName: sec.Key("email_attribute_name").String(),
+ AllowedDomains: util.SplitString(sec.Key("allowed_domains").String()),
+ HostedDomain: sec.Key("hosted_domain").String(),
+ AllowSignup: sec.Key("allow_sign_up").MustBool(),
+ Name: sec.Key("name").MustString(name),
+ TlsClientCert: sec.Key("tls_client_cert").String(),
+ TlsClientKey: sec.Key("tls_client_key").String(),
+ TlsClientCa: sec.Key("tls_client_ca").String(),
+ TlsSkipVerify: sec.Key("tls_skip_verify_insecure").MustBool(),
}
if !info.Enabled {
@@ -82,7 +86,7 @@ func NewOAuthService() {
}
if name == "grafananet" {
- name = "grafana_com"
+ name = grafanaCom
}
setting.OAuthService.OAuthInfos[name] = info
@@ -153,12 +157,13 @@ func NewOAuthService() {
allowedDomains: info.AllowedDomains,
apiUrl: info.ApiUrl,
allowSignup: info.AllowSignup,
+ emailAttributeName: info.EmailAttributeName,
teamIds: sec.Key("team_ids").Ints(","),
allowedOrganizations: util.SplitString(sec.Key("allowed_organizations").String()),
}
}
- if name == "grafana_com" {
+ if name == grafanaCom {
config = oauth2.Config{
ClientID: info.ClientId,
ClientSecret: info.ClientSecret,
@@ -170,7 +175,7 @@ func NewOAuthService() {
Scopes: info.Scopes,
}
- SocialMap["grafana_com"] = &SocialGrafanaCom{
+ SocialMap[grafanaCom] = &SocialGrafanaCom{
SocialBase: &SocialBase{
Config: &config,
log: logger,
@@ -182,3 +187,26 @@ func NewOAuthService() {
}
}
}
+
+// GetOAuthProviders returns available oauth providers and if they're enabled or not
+var GetOAuthProviders = func(cfg *setting.Cfg) map[string]bool {
+ result := map[string]bool{}
+
+ if cfg == nil || cfg.Raw == nil {
+ return result
+ }
+
+ for _, name := range allOauthes {
+ if name == "grafananet" {
+ name = grafanaCom
+ }
+
+ sec := cfg.Raw.Section("auth." + name)
+ if sec == nil {
+ continue
+ }
+ result[name] = sec.Key("enabled").MustBool()
+ }
+
+ return result
+}
diff --git a/pkg/tracing/tracing.go b/pkg/tracing/tracing.go
index 61f45af3635..fd7258b7a0a 100644
--- a/pkg/tracing/tracing.go
+++ b/pkg/tracing/tracing.go
@@ -58,7 +58,8 @@ func (ts *TracingService) parseSettings() {
func (ts *TracingService) initGlobalTracer() error {
cfg := jaegercfg.Configuration{
- Disabled: !ts.enabled,
+ ServiceName: "grafana",
+ Disabled: !ts.enabled,
Sampler: &jaegercfg.SamplerConfig{
Type: ts.samplerType,
Param: ts.samplerParam,
@@ -78,7 +79,7 @@ func (ts *TracingService) initGlobalTracer() error {
options = append(options, jaegercfg.Tag(tag, value))
}
- tracer, closer, err := cfg.New("grafana", options...)
+ tracer, closer, err := cfg.NewTracer(options...)
if err != nil {
return err
}
diff --git a/pkg/tsdb/cloudwatch/cloudwatch.go b/pkg/tsdb/cloudwatch/cloudwatch.go
index 92352a51315..61bbc04394a 100644
--- a/pkg/tsdb/cloudwatch/cloudwatch.go
+++ b/pkg/tsdb/cloudwatch/cloudwatch.go
@@ -129,10 +129,13 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo
if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" {
return err
}
- result.Results[queryRes.RefId] = queryRes
if err != nil {
- result.Results[queryRes.RefId].Error = err
+ result.Results[query.RefId] = &tsdb.QueryResult{
+ Error: err,
+ }
+ return nil
}
+ result.Results[queryRes.RefId] = queryRes
return nil
})
}
@@ -196,7 +199,7 @@ func (e *CloudWatchExecutor) executeQuery(ctx context.Context, query *CloudWatch
params.ExtendedStatistics = query.ExtendedStatistics
}
- // 1 minutes resolutin metrics is stored for 15 days, 15 * 24 * 60 = 21600
+ // 1 minutes resolution metrics is stored for 15 days, 15 * 24 * 60 = 21600
if query.HighResolution && (((endTime.Unix() - startTime.Unix()) / int64(query.Period)) > 21600) {
return nil, errors.New("too long query period")
}
@@ -267,9 +270,9 @@ func (e *CloudWatchExecutor) executeGetMetricDataQuery(ctx context.Context, regi
ScanBy: aws.String("TimestampAscending"),
}
for _, query := range queries {
- // 1 minutes resolutin metrics is stored for 15 days, 15 * 24 * 60 = 21600
+ // 1 minutes resolution metrics is stored for 15 days, 15 * 24 * 60 = 21600
if query.HighResolution && (((endTime.Unix() - startTime.Unix()) / int64(query.Period)) > 21600) {
- return nil, errors.New("too long query period")
+ return queryResponses, errors.New("too long query period")
}
mdq := &cloudwatch.MetricDataQuery{
@@ -362,6 +365,7 @@ func (e *CloudWatchExecutor) executeGetMetricDataQuery(ctx context.Context, regi
}
queryRes.Series = append(queryRes.Series, &series)
+ queryRes.Meta = simplejson.New()
queryResponses = append(queryResponses, queryRes)
}
@@ -565,6 +569,12 @@ func parseResponse(resp *cloudwatch.GetMetricStatisticsOutput, query *CloudWatch
}
queryRes.Series = append(queryRes.Series, &series)
+ queryRes.Meta = simplejson.New()
+ if len(resp.Datapoints) > 0 && resp.Datapoints[0].Unit != nil {
+ if unit, ok := cloudwatchUnitMappings[*resp.Datapoints[0].Unit]; ok {
+ queryRes.Meta.Set("unit", unit)
+ }
+ }
}
return queryRes, nil
diff --git a/pkg/tsdb/cloudwatch/cloudwatch_test.go b/pkg/tsdb/cloudwatch/cloudwatch_test.go
index 719edba08ba..32b8c910f2b 100644
--- a/pkg/tsdb/cloudwatch/cloudwatch_test.go
+++ b/pkg/tsdb/cloudwatch/cloudwatch_test.go
@@ -71,6 +71,7 @@ func TestCloudWatch(t *testing.T) {
"p50.00": aws.Float64(30.0),
"p90.00": aws.Float64(40.0),
},
+ Unit: aws.String("Seconds"),
},
},
}
@@ -103,6 +104,7 @@ func TestCloudWatch(t *testing.T) {
So(queryRes.Series[1].Points[0][0].String(), ShouldEqual, null.FloatFrom(20.0).String())
So(queryRes.Series[2].Points[0][0].String(), ShouldEqual, null.FloatFrom(30.0).String())
So(queryRes.Series[3].Points[0][0].String(), ShouldEqual, null.FloatFrom(40.0).String())
+ So(queryRes.Meta.Get("unit").MustString(), ShouldEqual, "s")
})
Convey("terminate gap of data points", func() {
@@ -118,6 +120,7 @@ func TestCloudWatch(t *testing.T) {
"p50.00": aws.Float64(30.0),
"p90.00": aws.Float64(40.0),
},
+ Unit: aws.String("Seconds"),
},
{
Timestamp: aws.Time(timestamp.Add(60 * time.Second)),
@@ -127,6 +130,7 @@ func TestCloudWatch(t *testing.T) {
"p50.00": aws.Float64(40.0),
"p90.00": aws.Float64(50.0),
},
+ Unit: aws.String("Seconds"),
},
{
Timestamp: aws.Time(timestamp.Add(180 * time.Second)),
@@ -136,6 +140,7 @@ func TestCloudWatch(t *testing.T) {
"p50.00": aws.Float64(50.0),
"p90.00": aws.Float64(60.0),
},
+ Unit: aws.String("Seconds"),
},
},
}
diff --git a/pkg/tsdb/cloudwatch/constants.go b/pkg/tsdb/cloudwatch/constants.go
new file mode 100644
index 00000000000..23817b1d133
--- /dev/null
+++ b/pkg/tsdb/cloudwatch/constants.go
@@ -0,0 +1,30 @@
+package cloudwatch
+
+var cloudwatchUnitMappings = map[string]string{
+ "Seconds": "s",
+ "Microseconds": "µs",
+ "Milliseconds": "ms",
+ "Bytes": "bytes",
+ "Kilobytes": "kbytes",
+ "Megabytes": "mbytes",
+ "Gigabytes": "gbytes",
+ //"Terabytes": "",
+ "Bits": "bits",
+ //"Kilobits": "",
+ //"Megabits": "",
+ //"Gigabits": "",
+ //"Terabits": "",
+ "Percent": "percent",
+ //"Count": "",
+ "Bytes/Second": "Bps",
+ "Kilobytes/Second": "KBs",
+ "Megabytes/Second": "MBs",
+ "Gigabytes/Second": "GBs",
+ //"Terabytes/Second": "",
+ "Bits/Second": "bps",
+ "Kilobits/Second": "Kbits",
+ "Megabits/Second": "Mbits",
+ "Gigabits/Second": "Gbits",
+ //"Terabits/Second": "",
+ //"Count/Second": "",
+}
diff --git a/pkg/tsdb/cloudwatch/credentials.go b/pkg/tsdb/cloudwatch/credentials.go
index 8b32c76daa3..165f8fdbe97 100644
--- a/pkg/tsdb/cloudwatch/credentials.go
+++ b/pkg/tsdb/cloudwatch/credentials.go
@@ -42,8 +42,7 @@ func GetCredentials(dsInfo *DatasourceInfo) (*credentials.Credentials, error) {
accessKeyId := ""
secretAccessKey := ""
sessionToken := ""
- var expiration *time.Time
- expiration = nil
+ var expiration *time.Time = nil
if dsInfo.AuthType == "arn" && strings.Index(dsInfo.AssumeRoleArn, "arn:aws:iam:") == 0 {
params := &sts.AssumeRoleInput{
RoleArn: aws.String(dsInfo.AssumeRoleArn),
diff --git a/pkg/tsdb/cloudwatch/metric_find_query.go b/pkg/tsdb/cloudwatch/metric_find_query.go
index ef1b53eaf1b..ee9d9583c4e 100644
--- a/pkg/tsdb/cloudwatch/metric_find_query.go
+++ b/pkg/tsdb/cloudwatch/metric_find_query.go
@@ -235,7 +235,7 @@ func parseMultiSelectValue(input string) []string {
func (e *CloudWatchExecutor) handleGetRegions(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) {
regions := []string{
"ap-northeast-1", "ap-northeast-2", "ap-southeast-1", "ap-southeast-2", "ap-south-1", "ca-central-1", "cn-north-1", "cn-northwest-1",
- "eu-central-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "us-east-1", "us-east-2", "us-gov-west-1", "us-west-1", "us-west-2",
+ "eu-central-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "us-east-1", "us-east-2", "us-gov-west-1", "us-west-1", "us-west-2", "us-isob-east-1", "us-iso-east-1",
}
result := make([]suggestData, 0)
@@ -466,6 +466,9 @@ func (e *CloudWatchExecutor) handleGetEc2InstanceAttribute(ctx context.Context,
return nil, errors.New("invalid attribute path")
}
v = v.FieldByName(key)
+ if !v.IsValid() {
+ return nil, errors.New("invalid attribute path")
+ }
}
if attr, ok := v.Interface().(*string); ok {
data = *attr
diff --git a/pkg/tsdb/elasticsearch/client/client.go b/pkg/tsdb/elasticsearch/client/client.go
index dff626a79eb..4ebe0db8f89 100644
--- a/pkg/tsdb/elasticsearch/client/client.go
+++ b/pkg/tsdb/elasticsearch/client/client.go
@@ -138,13 +138,13 @@ func (c *baseClientImpl) encodeBatchRequests(requests []*multiRequest) ([]byte,
}
body := string(reqBody)
- body = strings.Replace(body, "$__interval_ms", strconv.FormatInt(r.interval.Value.Nanoseconds()/int64(time.Millisecond), 10), -1)
+ body = strings.Replace(body, "$__interval_ms", strconv.FormatInt(r.interval.Milliseconds(), 10), -1)
body = strings.Replace(body, "$__interval", r.interval.Text, -1)
payload.WriteString(body + "\n")
}
- elapsed := time.Now().Sub(start)
+ elapsed := time.Since(start)
clientLog.Debug("Encoded batch requests to json", "took", elapsed)
return payload.Bytes(), nil
@@ -187,7 +187,7 @@ func (c *baseClientImpl) executeRequest(method, uriPath string, body []byte) (*h
start := time.Now()
defer func() {
- elapsed := time.Now().Sub(start)
+ elapsed := time.Since(start)
clientLog.Debug("Executed request", "took", elapsed)
}()
return ctxhttp.Do(c.ctx, httpClient, req)
@@ -215,7 +215,7 @@ func (c *baseClientImpl) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearch
return nil, err
}
- elapsed := time.Now().Sub(start)
+ elapsed := time.Since(start)
clientLog.Debug("Decoded multisearch json response", "took", elapsed)
msr.Status = res.StatusCode
diff --git a/pkg/tsdb/elasticsearch/client/client_test.go b/pkg/tsdb/elasticsearch/client/client_test.go
index 11d1cdb1d71..540a999688a 100644
--- a/pkg/tsdb/elasticsearch/client/client_test.go
+++ b/pkg/tsdb/elasticsearch/client/client_test.go
@@ -25,7 +25,7 @@ func TestClient(t *testing.T) {
JsonData: simplejson.NewFromAny(make(map[string]interface{})),
}
- _, err := NewClient(nil, ds, nil)
+ _, err := NewClient(context.Background(), ds, nil)
So(err, ShouldNotBeNil)
})
@@ -36,11 +36,11 @@ func TestClient(t *testing.T) {
}),
}
- _, err := NewClient(nil, ds, nil)
+ _, err := NewClient(context.Background(), ds, nil)
So(err, ShouldNotBeNil)
})
- Convey("When unspported version set should return error", func() {
+ Convey("When unsupported version set should return error", func() {
ds := &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"esVersion": 6,
@@ -48,7 +48,7 @@ func TestClient(t *testing.T) {
}),
}
- _, err := NewClient(nil, ds, nil)
+ _, err := NewClient(context.Background(), ds, nil)
So(err, ShouldNotBeNil)
})
@@ -60,7 +60,7 @@ func TestClient(t *testing.T) {
}),
}
- c, err := NewClient(nil, ds, nil)
+ c, err := NewClient(context.Background(), ds, nil)
So(err, ShouldBeNil)
So(c.GetVersion(), ShouldEqual, 2)
})
@@ -73,7 +73,7 @@ func TestClient(t *testing.T) {
}),
}
- c, err := NewClient(nil, ds, nil)
+ c, err := NewClient(context.Background(), ds, nil)
So(err, ShouldBeNil)
So(c.GetVersion(), ShouldEqual, 5)
})
@@ -86,7 +86,7 @@ func TestClient(t *testing.T) {
}),
}
- c, err := NewClient(nil, ds, nil)
+ c, err := NewClient(context.Background(), ds, nil)
So(err, ShouldBeNil)
So(c.GetVersion(), ShouldEqual, 56)
})
diff --git a/pkg/tsdb/elasticsearch/client/search_request.go b/pkg/tsdb/elasticsearch/client/search_request.go
index 2b833ce78d3..4c577a2c31d 100644
--- a/pkg/tsdb/elasticsearch/client/search_request.go
+++ b/pkg/tsdb/elasticsearch/client/search_request.go
@@ -56,9 +56,7 @@ func (b *SearchRequestBuilder) Build() (*SearchRequest, error) {
if err != nil {
return nil, err
}
- for _, agg := range aggArray {
- sr.Aggs = append(sr.Aggs, agg)
- }
+ sr.Aggs = append(sr.Aggs, aggArray...)
}
}
@@ -112,7 +110,7 @@ func (b *SearchRequestBuilder) Query() *QueryBuilder {
return b.queryBuilder
}
-// Agg initaite and returns a new aggregation builder
+// Agg initiate and returns a new aggregation builder
func (b *SearchRequestBuilder) Agg() AggBuilder {
aggBuilder := newAggBuilder()
b.aggBuilders = append(b.aggBuilders, aggBuilder)
@@ -300,9 +298,7 @@ func (b *aggBuilderImpl) Build() (AggArray, error) {
return nil, err
}
- for _, childAgg := range childAggs {
- agg.Aggregation.Aggs = append(agg.Aggregation.Aggs, childAgg)
- }
+ agg.Aggregation.Aggs = append(agg.Aggregation.Aggs, childAggs...)
}
aggs = append(aggs, agg)
diff --git a/pkg/tsdb/elasticsearch/response_parser.go b/pkg/tsdb/elasticsearch/response_parser.go
index 7bdab60389c..0837c3dd9d5 100644
--- a/pkg/tsdb/elasticsearch/response_parser.go
+++ b/pkg/tsdb/elasticsearch/response_parser.go
@@ -13,6 +13,19 @@ import (
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
)
+const (
+ // Metric types
+ countType = "count"
+ percentilesType = "percentiles"
+ extendedStatsType = "extended_stats"
+ // Bucket types
+ dateHistType = "date_histogram"
+ histogramType = "histogram"
+ filtersType = "filters"
+ termsType = "terms"
+ geohashGridType = "geohash_grid"
+)
+
type responseParser struct {
Responses []*es.SearchResponse
Targets []*Query
@@ -81,7 +94,7 @@ func (rp *responseParser) processBuckets(aggs map[string]interface{}, target *Qu
}
if depth == maxDepth {
- if aggDef.Type == "date_histogram" {
+ if aggDef.Type == dateHistType {
err = rp.processMetrics(esAgg, target, series, props)
} else {
err = rp.processAggregationDocs(esAgg, aggDef, target, table, props)
@@ -92,7 +105,7 @@ func (rp *responseParser) processBuckets(aggs map[string]interface{}, target *Qu
} else {
for _, b := range esAgg.Get("buckets").MustArray() {
bucket := simplejson.NewFromAny(b)
- newProps := make(map[string]string, 0)
+ newProps := make(map[string]string)
for k, v := range props {
newProps[k] = v
@@ -122,7 +135,7 @@ func (rp *responseParser) processBuckets(aggs map[string]interface{}, target *Qu
for _, bucketKey := range bucketKeys {
bucket := simplejson.NewFromAny(buckets[bucketKey])
- newProps := make(map[string]string, 0)
+ newProps := make(map[string]string)
for k, v := range props {
newProps[k] = v
@@ -149,7 +162,7 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query,
}
switch metric.Type {
- case "count":
+ case countType:
newSeries := tsdb.TimeSeries{
Tags: make(map[string]string),
}
@@ -164,10 +177,10 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query,
for k, v := range props {
newSeries.Tags[k] = v
}
- newSeries.Tags["metric"] = "count"
+ newSeries.Tags["metric"] = countType
*series = append(*series, &newSeries)
- case "percentiles":
+ case percentilesType:
buckets := esAgg.Get("buckets").MustArray()
if len(buckets) == 0 {
break
@@ -198,7 +211,7 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query,
}
*series = append(*series, &newSeries)
}
- case "extended_stats":
+ case extendedStatsType:
buckets := esAgg.Get("buckets").MustArray()
metaKeys := make([]string, 0)
@@ -312,10 +325,9 @@ func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef
for _, metric := range target.Metrics {
switch metric.Type {
- case "count":
+ case countType:
addMetricValue(&values, rp.getMetricName(metric.Type), castToNullFloat(bucket.Get("doc_count")))
- break
- case "extended_stats":
+ case extendedStatsType:
metaKeys := make([]string, 0)
meta := metric.Meta.MustMap()
for k := range meta {
@@ -355,7 +367,6 @@ func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef
}
addMetricValue(&values, metricName, castToNullFloat(bucket.GetPath(metric.ID, "value")))
- break
}
}
@@ -368,7 +379,7 @@ func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef
func (rp *responseParser) trimDatapoints(series *tsdb.TimeSeriesSlice, target *Query) {
var histogram *BucketAgg
for _, bucketAgg := range target.BucketAggs {
- if bucketAgg.Type == "date_histogram" {
+ if bucketAgg.Type == dateHistType {
histogram = bucketAgg
break
}
diff --git a/pkg/tsdb/elasticsearch/time_series_query.go b/pkg/tsdb/elasticsearch/time_series_query.go
index c9bb05dd09a..869e23e21ce 100644
--- a/pkg/tsdb/elasticsearch/time_series_query.go
+++ b/pkg/tsdb/elasticsearch/time_series_query.go
@@ -75,15 +75,15 @@ func (e *timeSeriesQuery) execute() (*tsdb.Response, error) {
// iterate backwards to create aggregations bottom-down
for _, bucketAgg := range q.BucketAggs {
switch bucketAgg.Type {
- case "date_histogram":
+ case dateHistType:
aggBuilder = addDateHistogramAgg(aggBuilder, bucketAgg, from, to)
- case "histogram":
+ case histogramType:
aggBuilder = addHistogramAgg(aggBuilder, bucketAgg)
- case "filters":
+ case filtersType:
aggBuilder = addFiltersAgg(aggBuilder, bucketAgg)
- case "terms":
+ case termsType:
aggBuilder = addTermsAgg(aggBuilder, bucketAgg, q.Metrics)
- case "geohash_grid":
+ case geohashGridType:
aggBuilder = addGeoHashGridAgg(aggBuilder, bucketAgg)
}
}
@@ -171,6 +171,10 @@ func addTermsAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg, metrics []*Metr
} else {
a.Size = 500
}
+ if a.Size == 0 {
+ a.Size = 500
+ }
+
if minDocCount, err := bucketAgg.Settings.Get("min_doc_count").Int(); err == nil {
a.MinDocCount = &minDocCount
}
diff --git a/pkg/tsdb/elasticsearch/time_series_query_test.go b/pkg/tsdb/elasticsearch/time_series_query_test.go
index 49bf5f5bc75..fe8ae0fa8f2 100644
--- a/pkg/tsdb/elasticsearch/time_series_query_test.go
+++ b/pkg/tsdb/elasticsearch/time_series_query_test.go
@@ -60,7 +60,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
- { "type": "terms", "field": "@host", "id": "2" },
+ { "type": "terms", "field": "@host", "id": "2", "settings": { "size": "0", "order": "asc" } },
{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
],
"metrics": [{"type": "count", "id": "1" }]
@@ -69,7 +69,9 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
So(firstLevel.Key, ShouldEqual, "2")
- So(firstLevel.Aggregation.Aggregation.(*es.TermsAggregation).Field, ShouldEqual, "@host")
+ termsAgg := firstLevel.Aggregation.Aggregation.(*es.TermsAggregation)
+ So(termsAgg.Field, ShouldEqual, "@host")
+ So(termsAgg.Size, ShouldEqual, 500)
secondLevel := firstLevel.Aggregation.Aggs[0]
So(secondLevel.Key, ShouldEqual, "3")
So(secondLevel.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, ShouldEqual, "@timestamp")
diff --git a/pkg/tsdb/influxdb/query.go b/pkg/tsdb/influxdb/query.go
index 0637a5bbb44..7cb8f0ecd82 100644
--- a/pkg/tsdb/influxdb/query.go
+++ b/pkg/tsdb/influxdb/query.go
@@ -4,7 +4,6 @@ import (
"fmt"
"strconv"
"strings"
- "time"
"regexp"
@@ -34,7 +33,7 @@ func (query *Query) Build(queryContext *tsdb.TsdbQuery) (string, error) {
res = strings.Replace(res, "$timeFilter", query.renderTimeFilter(queryContext), -1)
res = strings.Replace(res, "$interval", interval.Text, -1)
- res = strings.Replace(res, "$__interval_ms", strconv.FormatInt(interval.Value.Nanoseconds()/int64(time.Millisecond), 10), -1)
+ res = strings.Replace(res, "$__interval_ms", strconv.FormatInt(interval.Milliseconds(), 10), -1)
res = strings.Replace(res, "$__interval", interval.Text, -1)
return res, nil
}
diff --git a/pkg/tsdb/influxdb/query_test.go b/pkg/tsdb/influxdb/query_test.go
index f1270560269..cc1358a72d7 100644
--- a/pkg/tsdb/influxdb/query_test.go
+++ b/pkg/tsdb/influxdb/query_test.go
@@ -158,7 +158,7 @@ func TestInfluxdbQueryBuilder(t *testing.T) {
So(strings.Join(query.renderTags(), ""), ShouldEqual, `"key" < 10001`)
})
- Convey("can render number greather then condition tags", func() {
+ Convey("can render number greater then condition tags", func() {
query := &Query{Tags: []*Tag{{Operator: ">", Value: "10001", Key: "key"}}}
So(strings.Join(query.renderTags(), ""), ShouldEqual, `"key" > 10001`)
diff --git a/pkg/tsdb/interval.go b/pkg/tsdb/interval.go
index 49904f27a37..fd6adee39d7 100644
--- a/pkg/tsdb/interval.go
+++ b/pkg/tsdb/interval.go
@@ -49,6 +49,10 @@ func NewIntervalCalculator(opt *IntervalOptions) *intervalCalculator {
return calc
}
+func (i *Interval) Milliseconds() int64 {
+ return i.Value.Nanoseconds() / int64(time.Millisecond)
+}
+
func (ic *intervalCalculator) Calculate(timerange *TimeRange, minInterval time.Duration) Interval {
to := timerange.MustGetTo().UnixNano()
from := timerange.MustGetFrom().UnixNano()
diff --git a/pkg/tsdb/mssql/macros.go b/pkg/tsdb/mssql/macros.go
index caba043e7b6..0a260f7ad70 100644
--- a/pkg/tsdb/mssql/macros.go
+++ b/pkg/tsdb/mssql/macros.go
@@ -13,12 +13,13 @@ const rsIdentifier = `([_a-zA-Z0-9]+)`
const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)`
type msSqlMacroEngine struct {
+ *tsdb.SqlMacroEngineBase
timeRange *tsdb.TimeRange
query *tsdb.Query
}
func newMssqlMacroEngine() tsdb.SqlMacroEngine {
- return &msSqlMacroEngine{}
+ return &msSqlMacroEngine{SqlMacroEngineBase: tsdb.NewSqlMacroEngineBase()}
}
func (m *msSqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
@@ -27,7 +28,7 @@ func (m *msSqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRa
rExp, _ := regexp.Compile(sExpr)
var macroError error
- sql = replaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string {
+ sql = m.ReplaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string {
args := strings.Split(groups[2], ",")
for i, arg := range args {
args[i] = strings.Trim(arg, " ")
@@ -47,23 +48,6 @@ func (m *msSqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRa
return sql, nil
}
-func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]string) string) string {
- result := ""
- lastIndex := 0
-
- for _, v := range re.FindAllSubmatchIndex([]byte(str), -1) {
- groups := []string{}
- for i := 0; i < len(v); i += 2 {
- groups = append(groups, str[v[i]:v[i+1]])
- }
-
- result += str[lastIndex:v[0]] + repl(groups)
- lastIndex = v[1]
- }
-
- return result + str[lastIndex:]
-}
-
func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, error) {
switch name {
case "__time":
@@ -82,10 +66,6 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
}
return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
- case "__timeFrom":
- return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
- case "__timeTo":
- return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
case "__timeGroup":
if len(args) < 2 {
return "", fmt.Errorf("macro %v needs time column and interval", name)
@@ -112,10 +92,6 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil
- case "__unixEpochFrom":
- return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil
- case "__unixEpochTo":
- return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil
case "__unixEpochGroup":
if len(args) < 2 {
return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)
diff --git a/pkg/tsdb/mssql/macros_test.go b/pkg/tsdb/mssql/macros_test.go
index 8e0973b750c..7456238efa4 100644
--- a/pkg/tsdb/mssql/macros_test.go
+++ b/pkg/tsdb/mssql/macros_test.go
@@ -111,20 +111,6 @@ func TestMacroEngine(t *testing.T) {
So(fillInterval, ShouldEqual, 5*time.Minute.Seconds())
})
- Convey("interpolate __timeFrom function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
- })
-
- Convey("interpolate __timeTo function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
- })
-
Convey("interpolate __unixEpochFilter function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time_column)")
So(err, ShouldBeNil)
@@ -132,20 +118,6 @@ func TestMacroEngine(t *testing.T) {
So(sql, ShouldEqual, fmt.Sprintf("select time_column >= %d AND time_column <= %d", from.Unix(), to.Unix()))
})
- Convey("interpolate __unixEpochFrom function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix()))
- })
-
- Convey("interpolate __unixEpochTo function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
- })
-
Convey("interpolate __unixEpochGroup function", func() {
sql, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroup(time_column,'5m')")
@@ -171,40 +143,12 @@ func TestMacroEngine(t *testing.T) {
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
})
- Convey("interpolate __timeFrom function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
- })
-
- Convey("interpolate __timeTo function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
- })
-
Convey("interpolate __unixEpochFilter function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select time_column >= %d AND time_column <= %d", from.Unix(), to.Unix()))
})
-
- Convey("interpolate __unixEpochFrom function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix()))
- })
-
- Convey("interpolate __unixEpochTo function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
- })
})
Convey("Given a time range between 1960-02-01 07:00 and 1980-02-03 08:00", func() {
@@ -219,40 +163,12 @@ func TestMacroEngine(t *testing.T) {
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
})
- Convey("interpolate __timeFrom function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
- })
-
- Convey("interpolate __timeTo function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
- })
-
Convey("interpolate __unixEpochFilter function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select time_column >= %d AND time_column <= %d", from.Unix(), to.Unix()))
})
-
- Convey("interpolate __unixEpochFrom function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix()))
- })
-
- Convey("interpolate __unixEpochTo function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
- })
})
})
}
diff --git a/pkg/tsdb/mssql/mssql_test.go b/pkg/tsdb/mssql/mssql_test.go
index 30d1da3bda1..c3d4470603d 100644
--- a/pkg/tsdb/mssql/mssql_test.go
+++ b/pkg/tsdb/mssql/mssql_test.go
@@ -1,6 +1,7 @@
package mssql
import (
+ "context"
"fmt"
"math/rand"
"strings"
@@ -35,6 +36,11 @@ func TestMSSQL(t *testing.T) {
return x, nil
}
+ origInterpolate := tsdb.Interpolate
+ tsdb.Interpolate = func(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
+ return sql, nil
+ }
+
endpoint, err := newMssqlQueryEndpoint(&models.DataSource{
JsonData: simplejson.New(),
SecureJsonData: securejsondata.SecureJsonData{},
@@ -47,6 +53,7 @@ func TestMSSQL(t *testing.T) {
Reset(func() {
sess.Close()
tsdb.NewXormEngine = origXormEngine
+ tsdb.Interpolate = origInterpolate
})
Convey("Given a table with different native data types", func() {
@@ -122,7 +129,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
queryResult := resp.Results["A"]
So(err, ShouldBeNil)
@@ -212,7 +219,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -259,7 +266,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -295,6 +302,40 @@ func TestMSSQL(t *testing.T) {
})
+ Convey("When doing a metric query using timeGroup and $__interval", func() {
+ mockInterpolate := tsdb.Interpolate
+ tsdb.Interpolate = origInterpolate
+
+ Reset(func() {
+ tsdb.Interpolate = mockInterpolate
+ })
+
+ Convey("Should replace $__interval", func() {
+ query := &tsdb.TsdbQuery{
+ Queries: []*tsdb.Query{
+ {
+ DataSource: &models.DataSource{},
+ Model: simplejson.NewFromAny(map[string]interface{}{
+ "rawSql": "SELECT $__timeGroup(time, $__interval) AS time, avg(value) as value FROM metric GROUP BY $__timeGroup(time, $__interval) ORDER BY 1",
+ "format": "time_series",
+ }),
+ RefId: "A",
+ },
+ },
+ TimeRange: &tsdb.TimeRange{
+ From: fmt.Sprintf("%v", fromStart.Unix()*1000),
+ To: fmt.Sprintf("%v", fromStart.Add(30*time.Minute).Unix()*1000),
+ },
+ }
+
+ resp, err := endpoint.Query(context.Background(), nil, query)
+ So(err, ShouldBeNil)
+ queryResult := resp.Results["A"]
+ So(queryResult.Error, ShouldBeNil)
+ So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT FLOOR(DATEDIFF(second, '1970-01-01', time)/60)*60 AS time, avg(value) as value FROM metric GROUP BY FLOOR(DATEDIFF(second, '1970-01-01', time)/60)*60 ORDER BY 1")
+ })
+ })
+
Convey("When doing a metric query using timeGroup with float fill enabled", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
@@ -312,7 +353,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -401,7 +442,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -423,7 +464,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -445,7 +486,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -467,7 +508,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -489,7 +530,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -511,7 +552,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -533,7 +574,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -555,7 +596,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -577,7 +618,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -600,7 +641,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -623,7 +664,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -635,6 +676,30 @@ func TestMSSQL(t *testing.T) {
So(queryResult.Series[3].Name, ShouldEqual, "Metric B valueTwo")
})
+ Convey("When doing a query with timeFrom,timeTo,unixEpochFrom,unixEpochTo macros", func() {
+ tsdb.Interpolate = origInterpolate
+ query := &tsdb.TsdbQuery{
+ TimeRange: tsdb.NewFakeTimeRange("5m", "now", fromStart),
+ Queries: []*tsdb.Query{
+ {
+ DataSource: &models.DataSource{JsonData: simplejson.New()},
+ Model: simplejson.NewFromAny(map[string]interface{}{
+ "rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeFrom() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`,
+ "format": "time_series",
+ }),
+ RefId: "A",
+ },
+ },
+ }
+
+ resp, err := endpoint.Query(context.Background(), nil, query)
+ So(err, ShouldBeNil)
+ queryResult := resp.Results["A"]
+ So(queryResult.Error, ShouldBeNil)
+ So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT time FROM metric_values WHERE time > '2018-03-15T12:55:00Z' OR time < '2018-03-15T12:55:00Z' OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1")
+
+ })
+
Convey("Given a stored procedure that takes @from and @to in epoch time", func() {
sql := `
IF object_id('sp_test_epoch') IS NOT NULL
@@ -679,9 +744,11 @@ func TestMSSQL(t *testing.T) {
So(err, ShouldBeNil)
Convey("When doing a metric query using stored procedure should return correct result", func() {
+ tsdb.Interpolate = origInterpolate
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
+ DataSource: &models.DataSource{JsonData: simplejson.New()},
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `DECLARE
@from int = $__unixEpochFrom(),
@@ -699,7 +766,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
queryResult := resp.Results["A"]
So(err, ShouldBeNil)
So(queryResult.Error, ShouldBeNil)
@@ -756,9 +823,11 @@ func TestMSSQL(t *testing.T) {
So(err, ShouldBeNil)
Convey("When doing a metric query using stored procedure should return correct result", func() {
+ tsdb.Interpolate = origInterpolate
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
+ DataSource: &models.DataSource{JsonData: simplejson.New()},
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `DECLARE
@from int = $__unixEpochFrom(),
@@ -776,7 +845,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
queryResult := resp.Results["A"]
So(err, ShouldBeNil)
So(queryResult.Error, ShouldBeNil)
@@ -852,7 +921,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
queryResult := resp.Results["Deploys"]
So(err, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 3)
@@ -875,7 +944,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
queryResult := resp.Results["Tickets"]
So(err, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 3)
@@ -901,7 +970,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -931,7 +1000,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -961,7 +1030,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -991,7 +1060,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -1019,7 +1088,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -1047,7 +1116,7 @@ func TestMSSQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
diff --git a/pkg/tsdb/mysql/macros.go b/pkg/tsdb/mysql/macros.go
index 0dabdd7c283..a037aa9277a 100644
--- a/pkg/tsdb/mysql/macros.go
+++ b/pkg/tsdb/mysql/macros.go
@@ -9,17 +9,17 @@ import (
"github.com/grafana/grafana/pkg/tsdb"
)
-//const rsString = `(?:"([^"]*)")`;
const rsIdentifier = `([_a-zA-Z0-9]+)`
const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)`
type mySqlMacroEngine struct {
+ *tsdb.SqlMacroEngineBase
timeRange *tsdb.TimeRange
query *tsdb.Query
}
func newMysqlMacroEngine() tsdb.SqlMacroEngine {
- return &mySqlMacroEngine{}
+ return &mySqlMacroEngine{SqlMacroEngineBase: tsdb.NewSqlMacroEngineBase()}
}
func (m *mySqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
@@ -28,7 +28,7 @@ func (m *mySqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRa
rExp, _ := regexp.Compile(sExpr)
var macroError error
- sql = replaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string {
+ sql = m.ReplaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string {
args := strings.Split(groups[2], ",")
for i, arg := range args {
args[i] = strings.Trim(arg, " ")
@@ -48,23 +48,6 @@ func (m *mySqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRa
return sql, nil
}
-func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]string) string) string {
- result := ""
- lastIndex := 0
-
- for _, v := range re.FindAllSubmatchIndex([]byte(str), -1) {
- groups := []string{}
- for i := 0; i < len(v); i += 2 {
- groups = append(groups, str[v[i]:v[i+1]])
- }
-
- result += str[lastIndex:v[0]] + repl(groups)
- lastIndex = v[1]
- }
-
- return result + str[lastIndex:]
-}
-
func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, error) {
switch name {
case "__timeEpoch", "__time":
@@ -78,10 +61,6 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
}
return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
- case "__timeFrom":
- return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
- case "__timeTo":
- return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
case "__timeGroup":
if len(args) < 2 {
return "", fmt.Errorf("macro %v needs time column and interval", name)
@@ -108,10 +87,6 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil
- case "__unixEpochFrom":
- return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil
- case "__unixEpochTo":
- return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil
case "__unixEpochGroup":
if len(args) < 2 {
return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)
diff --git a/pkg/tsdb/mysql/macros_test.go b/pkg/tsdb/mysql/macros_test.go
index fe153ca3e2d..3c9a5a26c94 100644
--- a/pkg/tsdb/mysql/macros_test.go
+++ b/pkg/tsdb/mysql/macros_test.go
@@ -63,20 +63,6 @@ func TestMacroEngine(t *testing.T) {
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
})
- Convey("interpolate __timeFrom function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
- })
-
- Convey("interpolate __timeTo function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
- })
-
Convey("interpolate __unixEpochFilter function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)")
So(err, ShouldBeNil)
@@ -84,20 +70,6 @@ func TestMacroEngine(t *testing.T) {
So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix()))
})
- Convey("interpolate __unixEpochFrom function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix()))
- })
-
- Convey("interpolate __unixEpochTo function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
- })
-
Convey("interpolate __unixEpochGroup function", func() {
sql, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroup(time_column,'5m')")
@@ -123,40 +95,12 @@ func TestMacroEngine(t *testing.T) {
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
})
- Convey("interpolate __timeFrom function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
- })
-
- Convey("interpolate __timeTo function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
- })
-
Convey("interpolate __unixEpochFilter function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix()))
})
-
- Convey("interpolate __unixEpochFrom function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix()))
- })
-
- Convey("interpolate __unixEpochTo function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
- })
})
Convey("Given a time range between 1960-02-01 07:00 and 1980-02-03 08:00", func() {
@@ -171,40 +115,12 @@ func TestMacroEngine(t *testing.T) {
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
})
- Convey("interpolate __timeFrom function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
- })
-
- Convey("interpolate __timeTo function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
- })
-
Convey("interpolate __unixEpochFilter function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix()))
})
-
- Convey("interpolate __unixEpochFrom function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix()))
- })
-
- Convey("interpolate __unixEpochTo function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
- })
})
})
}
diff --git a/pkg/tsdb/mysql/mysql_test.go b/pkg/tsdb/mysql/mysql_test.go
index ca6df8e360e..476e3ba6586 100644
--- a/pkg/tsdb/mysql/mysql_test.go
+++ b/pkg/tsdb/mysql/mysql_test.go
@@ -1,6 +1,7 @@
package mysql
import (
+ "context"
"fmt"
"math/rand"
"strings"
@@ -42,6 +43,11 @@ func TestMySQL(t *testing.T) {
return x, nil
}
+ origInterpolate := tsdb.Interpolate
+ tsdb.Interpolate = func(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
+ return sql, nil
+ }
+
endpoint, err := newMysqlQueryEndpoint(&models.DataSource{
JsonData: simplejson.New(),
SecureJsonData: securejsondata.SecureJsonData{},
@@ -54,6 +60,7 @@ func TestMySQL(t *testing.T) {
Reset(func() {
sess.Close()
tsdb.NewXormEngine = origXormEngine
+ tsdb.Interpolate = origInterpolate
})
Convey("Given a table with different native data types", func() {
@@ -123,7 +130,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -211,7 +218,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -258,7 +265,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -295,6 +302,40 @@ func TestMySQL(t *testing.T) {
})
+ Convey("When doing a metric query using timeGroup and $__interval", func() {
+ mockInterpolate := tsdb.Interpolate
+ tsdb.Interpolate = origInterpolate
+
+ Reset(func() {
+ tsdb.Interpolate = mockInterpolate
+ })
+
+ Convey("Should replace $__interval", func() {
+ query := &tsdb.TsdbQuery{
+ Queries: []*tsdb.Query{
+ {
+ DataSource: &models.DataSource{JsonData: simplejson.New()},
+ Model: simplejson.NewFromAny(map[string]interface{}{
+ "rawSql": "SELECT $__timeGroup(time, $__interval) AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
+ "format": "time_series",
+ }),
+ RefId: "A",
+ },
+ },
+ TimeRange: &tsdb.TimeRange{
+ From: fmt.Sprintf("%v", fromStart.Unix()*1000),
+ To: fmt.Sprintf("%v", fromStart.Add(30*time.Minute).Unix()*1000),
+ },
+ }
+
+ resp, err := endpoint.Query(context.Background(), nil, query)
+ So(err, ShouldBeNil)
+ queryResult := resp.Results["A"]
+ So(queryResult.Error, ShouldBeNil)
+ So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT UNIX_TIMESTAMP(time) DIV 60 * 60 AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1")
+ })
+ })
+
Convey("When doing a metric query using timeGroup with value fill enabled", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
@@ -312,7 +353,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -338,7 +379,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -433,7 +474,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -455,7 +496,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -477,7 +518,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -499,7 +540,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -521,7 +562,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -543,7 +584,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -565,7 +606,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -587,7 +628,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -609,7 +650,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -631,7 +672,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -653,7 +694,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -676,7 +717,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -701,7 +742,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -712,6 +753,30 @@ func TestMySQL(t *testing.T) {
})
})
+ Convey("When doing a query with timeFrom,timeTo,unixEpochFrom,unixEpochTo macros", func() {
+ tsdb.Interpolate = origInterpolate
+ query := &tsdb.TsdbQuery{
+ TimeRange: tsdb.NewFakeTimeRange("5m", "now", fromStart),
+ Queries: []*tsdb.Query{
+ {
+ DataSource: &models.DataSource{JsonData: simplejson.New()},
+ Model: simplejson.NewFromAny(map[string]interface{}{
+ "rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeFrom() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`,
+ "format": "time_series",
+ }),
+ RefId: "A",
+ },
+ },
+ }
+
+ resp, err := endpoint.Query(context.Background(), nil, query)
+ So(err, ShouldBeNil)
+ queryResult := resp.Results["A"]
+ So(queryResult.Error, ShouldBeNil)
+ So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT time FROM metric_values WHERE time > '2018-03-15T12:55:00Z' OR time < '2018-03-15T12:55:00Z' OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1")
+
+ })
+
Convey("Given a table with event data", func() {
type event struct {
TimeSec int64
@@ -762,7 +827,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
queryResult := resp.Results["Deploys"]
So(err, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 3)
@@ -785,7 +850,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
queryResult := resp.Results["Tickets"]
So(err, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 3)
@@ -811,7 +876,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -841,7 +906,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -871,7 +936,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -901,7 +966,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -929,7 +994,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -957,7 +1022,7 @@ func TestMySQL(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
diff --git a/pkg/tsdb/postgres/macros.go b/pkg/tsdb/postgres/macros.go
index 0a2ea1d2af6..0fa5d8077e1 100644
--- a/pkg/tsdb/postgres/macros.go
+++ b/pkg/tsdb/postgres/macros.go
@@ -9,18 +9,21 @@ import (
"github.com/grafana/grafana/pkg/tsdb"
)
-//const rsString = `(?:"([^"]*)")`;
const rsIdentifier = `([_a-zA-Z0-9]+)`
const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)`
type postgresMacroEngine struct {
+ *tsdb.SqlMacroEngineBase
timeRange *tsdb.TimeRange
query *tsdb.Query
timescaledb bool
}
func newPostgresMacroEngine(timescaledb bool) tsdb.SqlMacroEngine {
- return &postgresMacroEngine{timescaledb: timescaledb}
+ return &postgresMacroEngine{
+ SqlMacroEngineBase: tsdb.NewSqlMacroEngineBase(),
+ timescaledb: timescaledb,
+ }
}
func (m *postgresMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
@@ -29,7 +32,7 @@ func (m *postgresMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.Tim
rExp, _ := regexp.Compile(sExpr)
var macroError error
- sql = replaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string {
+ sql = m.ReplaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string {
// detect if $__timeGroup is supposed to add AS time for pre 5.3 compatibility
// if there is a ',' directly after the macro call $__timeGroup is probably used
@@ -66,23 +69,6 @@ func (m *postgresMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.Tim
return sql, nil
}
-func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]string) string) string {
- result := ""
- lastIndex := 0
-
- for _, v := range re.FindAllSubmatchIndex([]byte(str), -1) {
- groups := []string{}
- for i := 0; i < len(v); i += 2 {
- groups = append(groups, str[v[i]:v[i+1]])
- }
-
- result += str[lastIndex:v[0]] + repl(groups)
- lastIndex = v[1]
- }
-
- return result + str[lastIndex:]
-}
-
func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string, error) {
switch name {
case "__time":
@@ -101,10 +87,6 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string,
}
return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
- case "__timeFrom":
- return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
- case "__timeTo":
- return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
case "__timeGroup":
if len(args) < 2 {
return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)
@@ -136,10 +118,6 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string,
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil
- case "__unixEpochFrom":
- return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil
- case "__unixEpochTo":
- return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil
case "__unixEpochGroup":
if len(args) < 2 {
return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)
diff --git a/pkg/tsdb/postgres/macros_test.go b/pkg/tsdb/postgres/macros_test.go
index b0b7a28ddd4..8a3699f82b2 100644
--- a/pkg/tsdb/postgres/macros_test.go
+++ b/pkg/tsdb/postgres/macros_test.go
@@ -44,13 +44,6 @@ func TestMacroEngine(t *testing.T) {
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
})
- Convey("interpolate __timeFrom function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
- })
-
Convey("interpolate __timeGroup function pre 5.3 compatibility", func() {
sql, err := engine.Interpolate(query, timeRange, "SELECT $__timeGroup(time_column,'5m'), value")
@@ -102,13 +95,6 @@ func TestMacroEngine(t *testing.T) {
So(sql, ShouldEqual, "GROUP BY time_bucket('300s',time_column)")
})
- Convey("interpolate __timeTo function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
- })
-
Convey("interpolate __unixEpochFilter function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)")
So(err, ShouldBeNil)
@@ -116,20 +102,6 @@ func TestMacroEngine(t *testing.T) {
So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix()))
})
- Convey("interpolate __unixEpochFrom function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix()))
- })
-
- Convey("interpolate __unixEpochTo function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
- })
-
Convey("interpolate __unixEpochGroup function", func() {
sql, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroup(time_column,'5m')")
@@ -155,40 +127,12 @@ func TestMacroEngine(t *testing.T) {
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
})
- Convey("interpolate __timeFrom function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
- })
-
- Convey("interpolate __timeTo function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
- })
-
Convey("interpolate __unixEpochFilter function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix()))
})
-
- Convey("interpolate __unixEpochFrom function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix()))
- })
-
- Convey("interpolate __unixEpochTo function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
- })
})
Convey("Given a time range between 1960-02-01 07:00 and 1980-02-03 08:00", func() {
@@ -203,40 +147,12 @@ func TestMacroEngine(t *testing.T) {
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
})
- Convey("interpolate __timeFrom function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
- })
-
- Convey("interpolate __timeTo function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
- })
-
Convey("interpolate __unixEpochFilter function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix()))
})
-
- Convey("interpolate __unixEpochFrom function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix()))
- })
-
- Convey("interpolate __unixEpochTo function", func() {
- sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()")
- So(err, ShouldBeNil)
-
- So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
- })
})
})
}
diff --git a/pkg/tsdb/postgres/postgres_test.go b/pkg/tsdb/postgres/postgres_test.go
index 4e05f676682..c381938aead 100644
--- a/pkg/tsdb/postgres/postgres_test.go
+++ b/pkg/tsdb/postgres/postgres_test.go
@@ -1,6 +1,7 @@
package postgres
import (
+ "context"
"fmt"
"math/rand"
"strings"
@@ -43,6 +44,11 @@ func TestPostgres(t *testing.T) {
return x, nil
}
+ origInterpolate := tsdb.Interpolate
+ tsdb.Interpolate = func(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
+ return sql, nil
+ }
+
endpoint, err := newPostgresQueryEndpoint(&models.DataSource{
JsonData: simplejson.New(),
SecureJsonData: securejsondata.SecureJsonData{},
@@ -55,6 +61,7 @@ func TestPostgres(t *testing.T) {
Reset(func() {
sess.Close()
tsdb.NewXormEngine = origXormEngine
+ tsdb.Interpolate = origInterpolate
})
Convey("Given a table with different native data types", func() {
@@ -111,7 +118,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -191,7 +198,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -222,6 +229,40 @@ func TestPostgres(t *testing.T) {
}
})
+ Convey("When doing a metric query using timeGroup and $__interval", func() {
+ mockInterpolate := tsdb.Interpolate
+ tsdb.Interpolate = origInterpolate
+
+ Reset(func() {
+ tsdb.Interpolate = mockInterpolate
+ })
+
+ Convey("Should replace $__interval", func() {
+ query := &tsdb.TsdbQuery{
+ Queries: []*tsdb.Query{
+ {
+ DataSource: &models.DataSource{},
+ Model: simplejson.NewFromAny(map[string]interface{}{
+ "rawSql": "SELECT $__timeGroup(time, $__interval) AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
+ "format": "time_series",
+ }),
+ RefId: "A",
+ },
+ },
+ TimeRange: &tsdb.TimeRange{
+ From: fmt.Sprintf("%v", fromStart.Unix()*1000),
+ To: fmt.Sprintf("%v", fromStart.Add(30*time.Minute).Unix()*1000),
+ },
+ }
+
+ resp, err := endpoint.Query(context.Background(), nil, query)
+ So(err, ShouldBeNil)
+ queryResult := resp.Results["A"]
+ So(queryResult.Error, ShouldBeNil)
+ So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT floor(extract(epoch from time)/60)*60 AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1")
+ })
+ })
+
Convey("When doing a metric query using timeGroup with NULL fill enabled", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
@@ -239,7 +280,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -293,7 +334,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -320,7 +361,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -410,7 +451,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -432,7 +473,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -454,7 +495,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -476,7 +517,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -498,7 +539,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -520,7 +561,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -542,7 +583,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -564,7 +605,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -586,7 +627,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -609,7 +650,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -634,7 +675,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -643,6 +684,30 @@ func TestPostgres(t *testing.T) {
So(queryResult.Series[0].Name, ShouldEqual, "valueOne")
So(queryResult.Series[1].Name, ShouldEqual, "valueTwo")
})
+
+ Convey("When doing a query with timeFrom,timeTo,unixEpochFrom,unixEpochTo macros", func() {
+ tsdb.Interpolate = origInterpolate
+ query := &tsdb.TsdbQuery{
+ TimeRange: tsdb.NewFakeTimeRange("5m", "now", fromStart),
+ Queries: []*tsdb.Query{
+ {
+ DataSource: &models.DataSource{JsonData: simplejson.New()},
+ Model: simplejson.NewFromAny(map[string]interface{}{
+ "rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeFrom() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`,
+ "format": "time_series",
+ }),
+ RefId: "A",
+ },
+ },
+ }
+
+ resp, err := endpoint.Query(context.Background(), nil, query)
+ So(err, ShouldBeNil)
+ queryResult := resp.Results["A"]
+ So(queryResult.Error, ShouldBeNil)
+ So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT time FROM metric_values WHERE time > '2018-03-15T12:55:00Z' OR time < '2018-03-15T12:55:00Z' OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1")
+
+ })
})
Convey("Given a table with event data", func() {
@@ -695,7 +760,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
queryResult := resp.Results["Deploys"]
So(err, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 3)
@@ -718,7 +783,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
queryResult := resp.Results["Tickets"]
So(err, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 3)
@@ -744,7 +809,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -774,7 +839,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -804,7 +869,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -834,7 +899,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -862,7 +927,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
@@ -890,7 +955,7 @@ func TestPostgres(t *testing.T) {
},
}
- resp, err := endpoint.Query(nil, nil, query)
+ resp, err := endpoint.Query(context.Background(), nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
diff --git a/pkg/tsdb/prometheus/prometheus.go b/pkg/tsdb/prometheus/prometheus.go
index bf9fe9f152c..83bb683fccf 100644
--- a/pkg/tsdb/prometheus/prometheus.go
+++ b/pkg/tsdb/prometheus/prometheus.go
@@ -92,12 +92,12 @@ func (e *PrometheusExecutor) Query(ctx context.Context, dsInfo *models.DataSourc
return nil, err
}
- querys, err := parseQuery(dsInfo, tsdbQuery.Queries, tsdbQuery)
+ queries, err := parseQuery(dsInfo, tsdbQuery.Queries, tsdbQuery)
if err != nil {
return nil, err
}
- for _, query := range querys {
+ for _, query := range queries {
timeRange := apiv1.Range{
Start: query.Start,
End: query.End,
diff --git a/pkg/tsdb/sql_engine.go b/pkg/tsdb/sql_engine.go
index 454853c7cc8..1a4e2bd3943 100644
--- a/pkg/tsdb/sql_engine.go
+++ b/pkg/tsdb/sql_engine.go
@@ -6,6 +6,7 @@ import (
"database/sql"
"fmt"
"math"
+ "regexp"
"strconv"
"strings"
"sync"
@@ -43,6 +44,8 @@ var engineCache = engineCacheType{
versions: make(map[int64]int),
}
+var sqlIntervalCalculator = NewIntervalCalculator(nil)
+
var NewXormEngine = func(driverName string, connectionString string) (*xorm.Engine, error) {
return xorm.NewEngine(driverName, connectionString)
}
@@ -95,8 +98,12 @@ var NewSqlQueryEndpoint = func(config *SqlQueryEndpointConfiguration, rowTransfo
return nil, err
}
- engine.SetMaxOpenConns(10)
- engine.SetMaxIdleConns(10)
+ maxOpenConns := config.Datasource.JsonData.Get("maxOpenConns").MustInt(0)
+ engine.SetMaxOpenConns(maxOpenConns)
+ maxIdleConns := config.Datasource.JsonData.Get("maxIdleConns").MustInt(2)
+ engine.SetMaxIdleConns(maxIdleConns)
+ connMaxLifetime := config.Datasource.JsonData.Get("connMaxLifetime").MustInt(14400)
+ engine.SetConnMaxLifetime(time.Duration(connMaxLifetime) * time.Second)
engineCache.versions[config.Datasource.Id] = config.Datasource.Version
engineCache.cache[config.Datasource.Id] = engine
@@ -113,9 +120,7 @@ func (e *sqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource,
Results: make(map[string]*QueryResult),
}
- session := e.engine.NewSession()
- defer session.Close()
- db := session.DB()
+ var wg sync.WaitGroup
for _, query := range tsdbQuery.Queries {
rawSQL := query.Model.Get("rawSql").MustString()
@@ -126,7 +131,15 @@ func (e *sqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource,
queryResult := &QueryResult{Meta: simplejson.New(), RefId: query.RefId}
result.Results[query.RefId] = queryResult
- rawSQL, err := e.macroEngine.Interpolate(query, tsdbQuery.TimeRange, rawSQL)
+ // global substitutions
+ rawSQL, err := Interpolate(query, tsdbQuery.TimeRange, rawSQL)
+ if err != nil {
+ queryResult.Error = err
+ continue
+ }
+
+ // datasource specific substitutions
+ rawSQL, err = e.macroEngine.Interpolate(query, tsdbQuery.TimeRange, rawSQL)
if err != nil {
queryResult.Error = err
continue
@@ -134,35 +147,63 @@ func (e *sqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource,
queryResult.Meta.Set("sql", rawSQL)
- rows, err := db.Query(rawSQL)
- if err != nil {
- queryResult.Error = err
- continue
- }
+ wg.Add(1)
- defer rows.Close()
+ go func(rawSQL string, query *Query, queryResult *QueryResult) {
+ defer wg.Done()
+ session := e.engine.NewSession()
+ defer session.Close()
+ db := session.DB()
- format := query.Model.Get("format").MustString("time_series")
-
- switch format {
- case "time_series":
- err := e.transformToTimeSeries(query, rows, queryResult, tsdbQuery)
+ rows, err := db.Query(rawSQL)
if err != nil {
queryResult.Error = err
- continue
+ return
}
- case "table":
- err := e.transformToTable(query, rows, queryResult, tsdbQuery)
- if err != nil {
- queryResult.Error = err
- continue
+
+ defer rows.Close()
+
+ format := query.Model.Get("format").MustString("time_series")
+
+ switch format {
+ case "time_series":
+ err := e.transformToTimeSeries(query, rows, queryResult, tsdbQuery)
+ if err != nil {
+ queryResult.Error = err
+ return
+ }
+ case "table":
+ err := e.transformToTable(query, rows, queryResult, tsdbQuery)
+ if err != nil {
+ queryResult.Error = err
+ return
+ }
}
- }
+ }(rawSQL, query, queryResult)
}
+ wg.Wait()
return result, nil
}
+// global macros/substitutions for all sql datasources
+var Interpolate = func(query *Query, timeRange *TimeRange, sql string) (string, error) {
+ minInterval, err := GetIntervalFrom(query.DataSource, query.Model, time.Second*60)
+ if err != nil {
+ return sql, nil
+ }
+ interval := sqlIntervalCalculator.Calculate(timeRange, minInterval)
+
+ sql = strings.Replace(sql, "$__interval_ms", strconv.FormatInt(interval.Milliseconds(), 10), -1)
+ sql = strings.Replace(sql, "$__interval", interval.Text, -1)
+ sql = strings.Replace(sql, "$__timeFrom()", fmt.Sprintf("'%s'", timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), -1)
+ sql = strings.Replace(sql, "$__timeTo()", fmt.Sprintf("'%s'", timeRange.GetToAsTimeUTC().Format(time.RFC3339)), -1)
+ sql = strings.Replace(sql, "$__unixEpochFrom()", fmt.Sprintf("%d", timeRange.GetFromAsSecondsEpoch()), -1)
+ sql = strings.Replace(sql, "$__unixEpochTo()", fmt.Sprintf("%d", timeRange.GetToAsSecondsEpoch()), -1)
+
+ return sql, nil
+}
+
func (e *sqlQueryEndpoint) transformToTable(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error {
columnNames, err := rows.Columns()
columnCount := len(columnNames)
@@ -589,3 +630,26 @@ func SetupFillmode(query *Query, interval time.Duration, fillmode string) error
return nil
}
+
+type SqlMacroEngineBase struct{}
+
+func NewSqlMacroEngineBase() *SqlMacroEngineBase {
+ return &SqlMacroEngineBase{}
+}
+
+func (m *SqlMacroEngineBase) ReplaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]string) string) string {
+ result := ""
+ lastIndex := 0
+
+ for _, v := range re.FindAllSubmatchIndex([]byte(str), -1) {
+ groups := []string{}
+ for i := 0; i < len(v); i += 2 {
+ groups = append(groups, str[v[i]:v[i+1]])
+ }
+
+ result += str[lastIndex:v[0]] + repl(groups)
+ lastIndex = v[1]
+ }
+
+ return result + str[lastIndex:]
+}
diff --git a/pkg/tsdb/sql_engine_test.go b/pkg/tsdb/sql_engine_test.go
index 854734fac31..bfcc82aac47 100644
--- a/pkg/tsdb/sql_engine_test.go
+++ b/pkg/tsdb/sql_engine_test.go
@@ -1,10 +1,13 @@
package tsdb
import (
+ "fmt"
"testing"
"time"
"github.com/grafana/grafana/pkg/components/null"
+ "github.com/grafana/grafana/pkg/components/simplejson"
+ "github.com/grafana/grafana/pkg/models"
. "github.com/smartystreets/goconvey/convey"
)
@@ -14,6 +17,63 @@ func TestSqlEngine(t *testing.T) {
dt := time.Date(2018, 3, 14, 21, 20, 6, int(527345*time.Microsecond), time.UTC)
earlyDt := time.Date(1970, 3, 14, 21, 20, 6, int(527345*time.Microsecond), time.UTC)
+ Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() {
+ from := time.Date(2018, 4, 12, 18, 0, 0, 0, time.UTC)
+ to := from.Add(5 * time.Minute)
+ timeRange := NewFakeTimeRange("5m", "now", to)
+ query := &Query{DataSource: &models.DataSource{}, Model: simplejson.New()}
+
+ Convey("interpolate $__interval", func() {
+ sql, err := Interpolate(query, timeRange, "select $__interval ")
+ So(err, ShouldBeNil)
+
+ So(sql, ShouldEqual, "select 1m ")
+ })
+
+ Convey("interpolate $__interval in $__timeGroup", func() {
+ sql, err := Interpolate(query, timeRange, "select $__timeGroupAlias(time,$__interval)")
+ So(err, ShouldBeNil)
+
+ So(sql, ShouldEqual, "select $__timeGroupAlias(time,1m)")
+ })
+
+ Convey("interpolate $__interval_ms", func() {
+ sql, err := Interpolate(query, timeRange, "select $__interval_ms ")
+ So(err, ShouldBeNil)
+
+ So(sql, ShouldEqual, "select 60000 ")
+ })
+
+ Convey("interpolate __timeFrom function", func() {
+ sql, err := Interpolate(query, timeRange, "select $__timeFrom()")
+ So(err, ShouldBeNil)
+
+ So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
+ })
+
+ Convey("interpolate __timeTo function", func() {
+ sql, err := Interpolate(query, timeRange, "select $__timeTo()")
+ So(err, ShouldBeNil)
+
+ So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
+ })
+
+ Convey("interpolate __unixEpochFrom function", func() {
+ sql, err := Interpolate(query, timeRange, "select $__unixEpochFrom()")
+ So(err, ShouldBeNil)
+
+ So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix()))
+ })
+
+ Convey("interpolate __unixEpochTo function", func() {
+ sql, err := Interpolate(query, timeRange, "select $__unixEpochTo()")
+ So(err, ShouldBeNil)
+
+ So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
+ })
+
+ })
+
Convey("Given row values with time.Time as time columns", func() {
var nilPointer *time.Time
diff --git a/pkg/tsdb/stackdriver/annotation_query.go b/pkg/tsdb/stackdriver/annotation_query.go
new file mode 100644
index 00000000000..db35171ad70
--- /dev/null
+++ b/pkg/tsdb/stackdriver/annotation_query.go
@@ -0,0 +1,120 @@
+package stackdriver
+
+import (
+ "context"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/grafana/grafana/pkg/tsdb"
+)
+
+func (e *StackdriverExecutor) executeAnnotationQuery(ctx context.Context, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
+ result := &tsdb.Response{
+ Results: make(map[string]*tsdb.QueryResult),
+ }
+
+ firstQuery := tsdbQuery.Queries[0]
+
+ queries, err := e.buildQueries(tsdbQuery)
+ if err != nil {
+ return nil, err
+ }
+
+ queryRes, resp, err := e.executeQuery(ctx, queries[0], tsdbQuery)
+ if err != nil {
+ return nil, err
+ }
+ title := firstQuery.Model.Get("title").MustString()
+ text := firstQuery.Model.Get("text").MustString()
+ tags := firstQuery.Model.Get("tags").MustString()
+ err = e.parseToAnnotations(queryRes, resp, queries[0], title, text, tags)
+ result.Results[firstQuery.RefId] = queryRes
+
+ return result, err
+}
+
+func (e *StackdriverExecutor) parseToAnnotations(queryRes *tsdb.QueryResult, data StackdriverResponse, query *StackdriverQuery, title string, text string, tags string) error {
+ annotations := make([]map[string]string, 0)
+
+ for _, series := range data.TimeSeries {
+ // reverse the order to be ascending
+ for i := len(series.Points) - 1; i >= 0; i-- {
+ point := series.Points[i]
+ value := strconv.FormatFloat(point.Value.DoubleValue, 'f', 6, 64)
+ if series.ValueType == "STRING" {
+ value = point.Value.StringValue
+ }
+ annotation := make(map[string]string)
+ annotation["time"] = point.Interval.EndTime.UTC().Format(time.RFC3339)
+ annotation["title"] = formatAnnotationText(title, value, series.Metric.Type, series.Metric.Labels, series.Resource.Labels)
+ annotation["tags"] = tags
+ annotation["text"] = formatAnnotationText(text, value, series.Metric.Type, series.Metric.Labels, series.Resource.Labels)
+ annotations = append(annotations, annotation)
+ }
+ }
+
+ transformAnnotationToTable(annotations, queryRes)
+ return nil
+}
+
+func transformAnnotationToTable(data []map[string]string, result *tsdb.QueryResult) {
+ table := &tsdb.Table{
+ Columns: make([]tsdb.TableColumn, 4),
+ Rows: make([]tsdb.RowValues, 0),
+ }
+ table.Columns[0].Text = "time"
+ table.Columns[1].Text = "title"
+ table.Columns[2].Text = "tags"
+ table.Columns[3].Text = "text"
+
+ for _, r := range data {
+ values := make([]interface{}, 4)
+ values[0] = r["time"]
+ values[1] = r["title"]
+ values[2] = r["tags"]
+ values[3] = r["text"]
+ table.Rows = append(table.Rows, values)
+ }
+ result.Tables = append(result.Tables, table)
+ result.Meta.Set("rowCount", len(data))
+ slog.Info("anno", "len", len(data))
+}
+
+func formatAnnotationText(annotationText string, pointValue string, metricType string, metricLabels map[string]string, resourceLabels map[string]string) string {
+ result := legendKeyFormat.ReplaceAllFunc([]byte(annotationText), func(in []byte) []byte {
+ metaPartName := strings.Replace(string(in), "{{", "", 1)
+ metaPartName = strings.Replace(metaPartName, "}}", "", 1)
+ metaPartName = strings.TrimSpace(metaPartName)
+
+ if metaPartName == "metric.type" {
+ return []byte(metricType)
+ }
+
+ metricPart := replaceWithMetricPart(metaPartName, metricType)
+
+ if metricPart != nil {
+ return metricPart
+ }
+
+ if metaPartName == "metric.value" {
+ return []byte(pointValue)
+ }
+
+ metaPartName = strings.Replace(metaPartName, "metric.label.", "", 1)
+
+ if val, exists := metricLabels[metaPartName]; exists {
+ return []byte(val)
+ }
+
+ metaPartName = strings.Replace(metaPartName, "resource.label.", "", 1)
+
+ if val, exists := resourceLabels[metaPartName]; exists {
+ return []byte(val)
+ }
+
+ return in
+ })
+
+ return string(result)
+}
diff --git a/pkg/tsdb/stackdriver/annotation_query_test.go b/pkg/tsdb/stackdriver/annotation_query_test.go
new file mode 100644
index 00000000000..8229470d665
--- /dev/null
+++ b/pkg/tsdb/stackdriver/annotation_query_test.go
@@ -0,0 +1,33 @@
+package stackdriver
+
+import (
+ "testing"
+
+ "github.com/grafana/grafana/pkg/components/simplejson"
+ "github.com/grafana/grafana/pkg/tsdb"
+
+ . "github.com/smartystreets/goconvey/convey"
+)
+
+func TestStackdriverAnnotationQuery(t *testing.T) {
+ Convey("Stackdriver Annotation Query Executor", t, func() {
+ executor := &StackdriverExecutor{}
+ Convey("When parsing the stackdriver api response", func() {
+ data, err := loadTestFile("./test-data/2-series-response-no-agg.json")
+ So(err, ShouldBeNil)
+ So(len(data.TimeSeries), ShouldEqual, 3)
+
+ res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "annotationQuery"}
+ query := &StackdriverQuery{}
+ err = executor.parseToAnnotations(res, data, query, "atitle {{metric.label.instance_name}} {{metric.value}}", "atext {{resource.label.zone}}", "atag")
+ So(err, ShouldBeNil)
+
+ Convey("Should return annotations table", func() {
+ So(len(res.Tables), ShouldEqual, 1)
+ So(len(res.Tables[0].Rows), ShouldEqual, 9)
+ So(res.Tables[0].Rows[0][1], ShouldEqual, "atitle collector-asia-east-1 9.856650")
+ So(res.Tables[0].Rows[0][3], ShouldEqual, "atext asia-east1-a")
+ })
+ })
+ })
+}
diff --git a/pkg/tsdb/stackdriver/stackdriver.go b/pkg/tsdb/stackdriver/stackdriver.go
new file mode 100644
index 00000000000..96242dfdec4
--- /dev/null
+++ b/pkg/tsdb/stackdriver/stackdriver.go
@@ -0,0 +1,567 @@
+package stackdriver
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io/ioutil"
+ "math"
+ "net/http"
+ "net/url"
+ "path"
+ "regexp"
+ "strconv"
+ "strings"
+ "time"
+
+ "golang.org/x/net/context/ctxhttp"
+
+ "github.com/grafana/grafana/pkg/api/pluginproxy"
+ "github.com/grafana/grafana/pkg/components/null"
+ "github.com/grafana/grafana/pkg/components/simplejson"
+ "github.com/grafana/grafana/pkg/log"
+ "github.com/grafana/grafana/pkg/models"
+ "github.com/grafana/grafana/pkg/plugins"
+ "github.com/grafana/grafana/pkg/setting"
+ "github.com/grafana/grafana/pkg/tsdb"
+ "github.com/opentracing/opentracing-go"
+)
+
+var (
+ slog log.Logger
+ legendKeyFormat *regexp.Regexp
+ metricNameFormat *regexp.Regexp
+)
+
+// StackdriverExecutor executes queries for the Stackdriver datasource
+type StackdriverExecutor struct {
+ httpClient *http.Client
+ dsInfo *models.DataSource
+}
+
+// NewStackdriverExecutor initializes a http client
+func NewStackdriverExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
+ httpClient, err := dsInfo.GetHttpClient()
+ if err != nil {
+ return nil, err
+ }
+
+ return &StackdriverExecutor{
+ httpClient: httpClient,
+ dsInfo: dsInfo,
+ }, nil
+}
+
+func init() {
+ slog = log.New("tsdb.stackdriver")
+ tsdb.RegisterTsdbQueryEndpoint("stackdriver", NewStackdriverExecutor)
+ legendKeyFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`)
+ metricNameFormat = regexp.MustCompile(`([\w\d_]+)\.googleapis\.com/(.+)`)
+}
+
+// Query takes in the frontend queries, parses them into the Stackdriver query format
+// executes the queries against the Stackdriver API and parses the response into
+// the time series or table format
+func (e *StackdriverExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
+ var result *tsdb.Response
+ var err error
+ queryType := tsdbQuery.Queries[0].Model.Get("type").MustString("")
+
+ switch queryType {
+ case "annotationQuery":
+ result, err = e.executeAnnotationQuery(ctx, tsdbQuery)
+ case "timeSeriesQuery":
+ fallthrough
+ default:
+ result, err = e.executeTimeSeriesQuery(ctx, tsdbQuery)
+ }
+
+ return result, err
+}
+
+func (e *StackdriverExecutor) executeTimeSeriesQuery(ctx context.Context, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
+ result := &tsdb.Response{
+ Results: make(map[string]*tsdb.QueryResult),
+ }
+
+ queries, err := e.buildQueries(tsdbQuery)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, query := range queries {
+ queryRes, resp, err := e.executeQuery(ctx, query, tsdbQuery)
+ if err != nil {
+ return nil, err
+ }
+ err = e.parseResponse(queryRes, resp, query)
+ if err != nil {
+ queryRes.Error = err
+ }
+ result.Results[query.RefID] = queryRes
+ }
+
+ return result, nil
+}
+
+func (e *StackdriverExecutor) buildQueries(tsdbQuery *tsdb.TsdbQuery) ([]*StackdriverQuery, error) {
+ stackdriverQueries := []*StackdriverQuery{}
+
+ startTime, err := tsdbQuery.TimeRange.ParseFrom()
+ if err != nil {
+ return nil, err
+ }
+
+ endTime, err := tsdbQuery.TimeRange.ParseTo()
+ if err != nil {
+ return nil, err
+ }
+
+ durationSeconds := int(endTime.Sub(startTime).Seconds())
+
+ for _, query := range tsdbQuery.Queries {
+ var target string
+
+ metricType := query.Model.Get("metricType").MustString()
+ filterParts := query.Model.Get("filters").MustArray()
+
+ params := url.Values{}
+ params.Add("interval.startTime", startTime.UTC().Format(time.RFC3339))
+ params.Add("interval.endTime", endTime.UTC().Format(time.RFC3339))
+ params.Add("filter", buildFilterString(metricType, filterParts))
+ params.Add("view", query.Model.Get("view").MustString("FULL"))
+ setAggParams(¶ms, query, durationSeconds)
+
+ target = params.Encode()
+
+ if setting.Env == setting.DEV {
+ slog.Debug("Stackdriver request", "params", params)
+ }
+
+ groupBys := query.Model.Get("groupBys").MustArray()
+ groupBysAsStrings := make([]string, 0)
+ for _, groupBy := range groupBys {
+ groupBysAsStrings = append(groupBysAsStrings, groupBy.(string))
+ }
+
+ aliasBy := query.Model.Get("aliasBy").MustString()
+
+ stackdriverQueries = append(stackdriverQueries, &StackdriverQuery{
+ Target: target,
+ Params: params,
+ RefID: query.RefId,
+ GroupBys: groupBysAsStrings,
+ AliasBy: aliasBy,
+ })
+ }
+
+ return stackdriverQueries, nil
+}
+
+func reverse(s string) string {
+ chars := []rune(s)
+ for i, j := 0, len(chars)-1; i < j; i, j = i+1, j-1 {
+ chars[i], chars[j] = chars[j], chars[i]
+ }
+ return string(chars)
+}
+
+func interpolateFilterWildcards(value string) string {
+ re := regexp.MustCompile("[*]")
+ matches := len(re.FindAllStringIndex(value, -1))
+ if matches == 2 && strings.HasSuffix(value, "*") && strings.HasPrefix(value, "*") {
+ value = strings.Replace(value, "*", "", -1)
+ value = fmt.Sprintf(`has_substring("%s")`, value)
+ } else if matches == 1 && strings.HasPrefix(value, "*") {
+ value = strings.Replace(value, "*", "", 1)
+ value = fmt.Sprintf(`ends_with("%s")`, value)
+ } else if matches == 1 && strings.HasSuffix(value, "*") {
+ value = reverse(strings.Replace(reverse(value), "*", "", 1))
+ value = fmt.Sprintf(`starts_with("%s")`, value)
+ } else if matches != 0 {
+ re := regexp.MustCompile(`[-\/^$+?.()|[\]{}]`)
+ value = string(re.ReplaceAllFunc([]byte(value), func(in []byte) []byte {
+ return []byte(strings.Replace(string(in), string(in), `\\`+string(in), 1))
+ }))
+ value = strings.Replace(value, "*", ".*", -1)
+ value = strings.Replace(value, `"`, `\\"`, -1)
+ value = fmt.Sprintf(`monitoring.regex.full_match("^%s$")`, value)
+ }
+
+ return value
+}
+
+func buildFilterString(metricType string, filterParts []interface{}) string {
+ filterString := ""
+ for i, part := range filterParts {
+ mod := i % 4
+ if part == "AND" {
+ filterString += " "
+ } else if mod == 2 {
+ operator := filterParts[i-1]
+ if operator == "=~" || operator == "!=~" {
+ filterString = reverse(strings.Replace(reverse(filterString), "~", "", 1))
+ filterString += fmt.Sprintf(`monitoring.regex.full_match("%s")`, part)
+ } else if strings.Contains(part.(string), "*") {
+ filterString += interpolateFilterWildcards(part.(string))
+ } else {
+ filterString += fmt.Sprintf(`"%s"`, part)
+ }
+ } else {
+ filterString += part.(string)
+ }
+ }
+ return strings.Trim(fmt.Sprintf(`metric.type="%s" %s`, metricType, filterString), " ")
+}
+
+func setAggParams(params *url.Values, query *tsdb.Query, durationSeconds int) {
+ primaryAggregation := query.Model.Get("primaryAggregation").MustString()
+ perSeriesAligner := query.Model.Get("perSeriesAligner").MustString()
+ alignmentPeriod := query.Model.Get("alignmentPeriod").MustString()
+
+ if primaryAggregation == "" {
+ primaryAggregation = "REDUCE_NONE"
+ }
+
+ if perSeriesAligner == "" {
+ perSeriesAligner = "ALIGN_MEAN"
+ }
+
+ if alignmentPeriod == "grafana-auto" || alignmentPeriod == "" {
+ alignmentPeriodValue := int(math.Max(float64(query.IntervalMs)/1000, 60.0))
+ alignmentPeriod = "+" + strconv.Itoa(alignmentPeriodValue) + "s"
+ }
+
+ if alignmentPeriod == "stackdriver-auto" {
+ alignmentPeriodValue := int(math.Max(float64(durationSeconds), 60.0))
+ if alignmentPeriodValue < 60*60*23 {
+ alignmentPeriod = "+60s"
+ } else if alignmentPeriodValue < 60*60*24*6 {
+ alignmentPeriod = "+300s"
+ } else {
+ alignmentPeriod = "+3600s"
+ }
+ }
+
+ re := regexp.MustCompile("[0-9]+")
+ seconds, err := strconv.ParseInt(re.FindString(alignmentPeriod), 10, 64)
+ if err != nil || seconds > 3600 {
+ alignmentPeriod = "+3600s"
+ }
+
+ params.Add("aggregation.crossSeriesReducer", primaryAggregation)
+ params.Add("aggregation.perSeriesAligner", perSeriesAligner)
+ params.Add("aggregation.alignmentPeriod", alignmentPeriod)
+
+ groupBys := query.Model.Get("groupBys").MustArray()
+ if len(groupBys) > 0 {
+ for i := 0; i < len(groupBys); i++ {
+ params.Add("aggregation.groupByFields", groupBys[i].(string))
+ }
+ }
+}
+
+func (e *StackdriverExecutor) executeQuery(ctx context.Context, query *StackdriverQuery, tsdbQuery *tsdb.TsdbQuery) (*tsdb.QueryResult, StackdriverResponse, error) {
+ queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefID}
+
+ req, err := e.createRequest(ctx, e.dsInfo)
+ if err != nil {
+ queryResult.Error = err
+ return queryResult, StackdriverResponse{}, nil
+ }
+
+ req.URL.RawQuery = query.Params.Encode()
+ queryResult.Meta.Set("rawQuery", req.URL.RawQuery)
+ alignmentPeriod, ok := req.URL.Query()["aggregation.alignmentPeriod"]
+
+ if ok {
+ re := regexp.MustCompile("[0-9]+")
+ seconds, err := strconv.ParseInt(re.FindString(alignmentPeriod[0]), 10, 64)
+ if err == nil {
+ queryResult.Meta.Set("alignmentPeriod", seconds)
+ }
+ }
+
+ span, ctx := opentracing.StartSpanFromContext(ctx, "stackdriver query")
+ span.SetTag("target", query.Target)
+ span.SetTag("from", tsdbQuery.TimeRange.From)
+ span.SetTag("until", tsdbQuery.TimeRange.To)
+ span.SetTag("datasource_id", e.dsInfo.Id)
+ span.SetTag("org_id", e.dsInfo.OrgId)
+
+ defer span.Finish()
+
+ opentracing.GlobalTracer().Inject(
+ span.Context(),
+ opentracing.HTTPHeaders,
+ opentracing.HTTPHeadersCarrier(req.Header))
+
+ res, err := ctxhttp.Do(ctx, e.httpClient, req)
+ if err != nil {
+ queryResult.Error = err
+ return queryResult, StackdriverResponse{}, nil
+ }
+
+ data, err := e.unmarshalResponse(res)
+ if err != nil {
+ queryResult.Error = err
+ return queryResult, StackdriverResponse{}, nil
+ }
+
+ return queryResult, data, nil
+}
+
+func (e *StackdriverExecutor) unmarshalResponse(res *http.Response) (StackdriverResponse, error) {
+ body, err := ioutil.ReadAll(res.Body)
+ defer res.Body.Close()
+ if err != nil {
+ return StackdriverResponse{}, err
+ }
+
+ if res.StatusCode/100 != 2 {
+ slog.Error("Request failed", "status", res.Status, "body", string(body))
+ return StackdriverResponse{}, fmt.Errorf(string(body))
+ }
+
+ var data StackdriverResponse
+ err = json.Unmarshal(body, &data)
+ if err != nil {
+ slog.Error("Failed to unmarshal Stackdriver response", "error", err, "status", res.Status, "body", string(body))
+ return StackdriverResponse{}, err
+ }
+
+ return data, nil
+}
+
+func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data StackdriverResponse, query *StackdriverQuery) error {
+ metricLabels := make(map[string][]string)
+ resourceLabels := make(map[string][]string)
+
+ for _, series := range data.TimeSeries {
+ points := make([]tsdb.TimePoint, 0)
+
+ defaultMetricName := series.Metric.Type
+
+ for key, value := range series.Metric.Labels {
+ if !containsLabel(metricLabels[key], value) {
+ metricLabels[key] = append(metricLabels[key], value)
+ }
+ if len(query.GroupBys) == 0 || containsLabel(query.GroupBys, "metric.label."+key) {
+ defaultMetricName += " " + value
+ }
+ }
+
+ for key, value := range series.Resource.Labels {
+ if !containsLabel(resourceLabels[key], value) {
+ resourceLabels[key] = append(resourceLabels[key], value)
+ }
+ if containsLabel(query.GroupBys, "resource.label."+key) {
+ defaultMetricName += " " + value
+ }
+ }
+
+ // reverse the order to be ascending
+ if series.ValueType != "DISTRIBUTION" {
+ for i := len(series.Points) - 1; i >= 0; i-- {
+ point := series.Points[i]
+ value := point.Value.DoubleValue
+
+ if series.ValueType == "INT64" {
+ parsedValue, err := strconv.ParseFloat(point.Value.IntValue, 64)
+ if err == nil {
+ value = parsedValue
+ }
+ }
+
+ if series.ValueType == "BOOL" {
+ if point.Value.BoolValue {
+ value = 1
+ } else {
+ value = 0
+ }
+ }
+
+ points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000))
+ }
+
+ metricName := formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, make(map[string]string), query)
+
+ queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{
+ Name: metricName,
+ Points: points,
+ })
+ } else {
+ buckets := make(map[int]*tsdb.TimeSeries)
+
+ for i := len(series.Points) - 1; i >= 0; i-- {
+ point := series.Points[i]
+ if len(point.Value.DistributionValue.BucketCounts) == 0 {
+ continue
+ }
+ maxKey := 0
+ for i := 0; i < len(point.Value.DistributionValue.BucketCounts); i++ {
+ value, err := strconv.ParseFloat(point.Value.DistributionValue.BucketCounts[i], 64)
+ if err != nil {
+ continue
+ }
+ if _, ok := buckets[i]; !ok {
+ // set lower bounds
+ // https://cloud.google.com/monitoring/api/ref_v3/rest/v3/TimeSeries#Distribution
+ bucketBound := calcBucketBound(point.Value.DistributionValue.BucketOptions, i)
+ additionalLabels := map[string]string{"bucket": bucketBound}
+ buckets[i] = &tsdb.TimeSeries{
+ Name: formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, additionalLabels, query),
+ Points: make([]tsdb.TimePoint, 0),
+ }
+ if maxKey < i {
+ maxKey = i
+ }
+ }
+ buckets[i].Points = append(buckets[i].Points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000))
+ }
+
+ // fill empty bucket
+ for i := 0; i < maxKey; i++ {
+ if _, ok := buckets[i]; !ok {
+ bucketBound := calcBucketBound(point.Value.DistributionValue.BucketOptions, i)
+ additionalLabels := map[string]string{"bucket": bucketBound}
+ buckets[i] = &tsdb.TimeSeries{
+ Name: formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, additionalLabels, query),
+ Points: make([]tsdb.TimePoint, 0),
+ }
+ }
+ }
+ }
+ for i := 0; i < len(buckets); i++ {
+ queryRes.Series = append(queryRes.Series, buckets[i])
+ }
+ }
+ }
+
+ queryRes.Meta.Set("resourceLabels", resourceLabels)
+ queryRes.Meta.Set("metricLabels", metricLabels)
+ queryRes.Meta.Set("groupBys", query.GroupBys)
+
+ return nil
+}
+
+func containsLabel(labels []string, newLabel string) bool {
+ for _, val := range labels {
+ if val == newLabel {
+ return true
+ }
+ }
+ return false
+}
+
+func formatLegendKeys(metricType string, defaultMetricName string, metricLabels map[string]string, resourceLabels map[string]string, additionalLabels map[string]string, query *StackdriverQuery) string {
+ if query.AliasBy == "" {
+ return defaultMetricName
+ }
+
+ result := legendKeyFormat.ReplaceAllFunc([]byte(query.AliasBy), func(in []byte) []byte {
+ metaPartName := strings.Replace(string(in), "{{", "", 1)
+ metaPartName = strings.Replace(metaPartName, "}}", "", 1)
+ metaPartName = strings.TrimSpace(metaPartName)
+
+ if metaPartName == "metric.type" {
+ return []byte(metricType)
+ }
+
+ metricPart := replaceWithMetricPart(metaPartName, metricType)
+
+ if metricPart != nil {
+ return metricPart
+ }
+
+ metaPartName = strings.Replace(metaPartName, "metric.label.", "", 1)
+
+ if val, exists := metricLabels[metaPartName]; exists {
+ return []byte(val)
+ }
+
+ metaPartName = strings.Replace(metaPartName, "resource.label.", "", 1)
+
+ if val, exists := resourceLabels[metaPartName]; exists {
+ return []byte(val)
+ }
+
+ if val, exists := additionalLabels[metaPartName]; exists {
+ return []byte(val)
+ }
+
+ return in
+ })
+
+ return string(result)
+}
+
+func replaceWithMetricPart(metaPartName string, metricType string) []byte {
+ // https://cloud.google.com/monitoring/api/v3/metrics-details#label_names
+ shortMatches := metricNameFormat.FindStringSubmatch(metricType)
+
+ if metaPartName == "metric.name" {
+ if len(shortMatches) > 0 {
+ return []byte(shortMatches[2])
+ }
+ }
+
+ if metaPartName == "metric.service" {
+ if len(shortMatches) > 0 {
+ return []byte(shortMatches[1])
+ }
+ }
+
+ return nil
+}
+
+func calcBucketBound(bucketOptions StackdriverBucketOptions, n int) string {
+ bucketBound := "0"
+ if n == 0 {
+ return bucketBound
+ }
+
+ if bucketOptions.LinearBuckets != nil {
+ bucketBound = strconv.FormatInt(bucketOptions.LinearBuckets.Offset+(bucketOptions.LinearBuckets.Width*int64(n-1)), 10)
+ } else if bucketOptions.ExponentialBuckets != nil {
+ bucketBound = strconv.FormatInt(int64(bucketOptions.ExponentialBuckets.Scale*math.Pow(bucketOptions.ExponentialBuckets.GrowthFactor, float64(n-1))), 10)
+ } else if bucketOptions.ExplicitBuckets != nil {
+ bucketBound = strconv.FormatInt(bucketOptions.ExplicitBuckets.Bounds[(n-1)], 10)
+ }
+ return bucketBound
+}
+
+func (e *StackdriverExecutor) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) {
+ u, _ := url.Parse(dsInfo.Url)
+ u.Path = path.Join(u.Path, "render")
+
+ req, err := http.NewRequest(http.MethodGet, "https://monitoring.googleapis.com/", nil)
+ if err != nil {
+ slog.Error("Failed to create request", "error", err)
+ return nil, fmt.Errorf("Failed to create request. error: %v", err)
+ }
+
+ req.Header.Set("Content-Type", "application/json")
+ req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion))
+
+ // find plugin
+ plugin, ok := plugins.DataSources[dsInfo.Type]
+ if !ok {
+ return nil, errors.New("Unable to find datasource plugin Stackdriver")
+ }
+ projectName := dsInfo.JsonData.Get("defaultProject").MustString()
+ proxyPass := fmt.Sprintf("stackdriver%s", "v3/projects/"+projectName+"/timeSeries")
+
+ var stackdriverRoute *plugins.AppPluginRoute
+ for _, route := range plugin.Routes {
+ if route.Path == "stackdriver" {
+ stackdriverRoute = route
+ break
+ }
+ }
+
+ pluginproxy.ApplyRoute(ctx, req, proxyPass, stackdriverRoute, dsInfo)
+
+ return req, nil
+}
diff --git a/pkg/tsdb/stackdriver/stackdriver_test.go b/pkg/tsdb/stackdriver/stackdriver_test.go
new file mode 100644
index 00000000000..784bf4a7fbb
--- /dev/null
+++ b/pkg/tsdb/stackdriver/stackdriver_test.go
@@ -0,0 +1,490 @@
+package stackdriver
+
+import (
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "math"
+ "strconv"
+ "testing"
+ "time"
+
+ "github.com/grafana/grafana/pkg/components/simplejson"
+ "github.com/grafana/grafana/pkg/tsdb"
+
+ . "github.com/smartystreets/goconvey/convey"
+)
+
+func TestStackdriver(t *testing.T) {
+ Convey("Stackdriver", t, func() {
+ executor := &StackdriverExecutor{}
+
+ Convey("Parse queries from frontend and build Stackdriver API queries", func() {
+ fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
+ tsdbQuery := &tsdb.TsdbQuery{
+ TimeRange: &tsdb.TimeRange{
+ From: fmt.Sprintf("%v", fromStart.Unix()*1000),
+ To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
+ },
+ Queries: []*tsdb.Query{
+ {
+ Model: simplejson.NewFromAny(map[string]interface{}{
+ "metricType": "a/metric/type",
+ "view": "FULL",
+ "aliasBy": "testalias",
+ "type": "timeSeriesQuery",
+ }),
+ RefId: "A",
+ },
+ },
+ }
+
+ Convey("and query has no aggregation set", func() {
+ queries, err := executor.buildQueries(tsdbQuery)
+ So(err, ShouldBeNil)
+
+ So(len(queries), ShouldEqual, 1)
+ So(queries[0].RefID, ShouldEqual, "A")
+ So(queries[0].Target, ShouldEqual, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL")
+ So(len(queries[0].Params), ShouldEqual, 7)
+ So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z")
+ So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z")
+ So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN")
+ So(queries[0].Params["filter"][0], ShouldEqual, "metric.type=\"a/metric/type\"")
+ So(queries[0].Params["view"][0], ShouldEqual, "FULL")
+ So(queries[0].AliasBy, ShouldEqual, "testalias")
+ })
+
+ Convey("and query has filters", func() {
+ tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
+ "metricType": "a/metric/type",
+ "filters": []interface{}{"key", "=", "value", "AND", "key2", "=", "value2"},
+ })
+
+ queries, err := executor.buildQueries(tsdbQuery)
+ So(err, ShouldBeNil)
+ So(len(queries), ShouldEqual, 1)
+ So(queries[0].Params["filter"][0], ShouldEqual, `metric.type="a/metric/type" key="value" key2="value2"`)
+ })
+
+ Convey("and alignmentPeriod is set to grafana-auto", func() {
+ Convey("and IntervalMs is larger than 60000", func() {
+ tsdbQuery.Queries[0].IntervalMs = 1000000
+ tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
+ "alignmentPeriod": "grafana-auto",
+ "filters": []interface{}{"key", "=", "value", "AND", "key2", "=", "value2"},
+ })
+
+ queries, err := executor.buildQueries(tsdbQuery)
+ So(err, ShouldBeNil)
+ So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+1000s`)
+ })
+ Convey("and IntervalMs is less than 60000", func() {
+ tsdbQuery.Queries[0].IntervalMs = 30000
+ tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
+ "alignmentPeriod": "grafana-auto",
+ "filters": []interface{}{"key", "=", "value", "AND", "key2", "=", "value2"},
+ })
+
+ queries, err := executor.buildQueries(tsdbQuery)
+ So(err, ShouldBeNil)
+ So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`)
+ })
+ })
+
+ Convey("and alignmentPeriod is set to stackdriver-auto", func() {
+ Convey("and range is two hours", func() {
+ tsdbQuery.TimeRange.From = "1538033322461"
+ tsdbQuery.TimeRange.To = "1538040522461"
+ tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
+ "target": "target",
+ "alignmentPeriod": "stackdriver-auto",
+ })
+
+ queries, err := executor.buildQueries(tsdbQuery)
+ So(err, ShouldBeNil)
+ So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`)
+ })
+
+ Convey("and range is 22 hours", func() {
+ tsdbQuery.TimeRange.From = "1538034524922"
+ tsdbQuery.TimeRange.To = "1538113724922"
+ tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
+ "target": "target",
+ "alignmentPeriod": "stackdriver-auto",
+ })
+
+ queries, err := executor.buildQueries(tsdbQuery)
+ So(err, ShouldBeNil)
+ So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`)
+ })
+
+ Convey("and range is 23 hours", func() {
+ tsdbQuery.TimeRange.From = "1538034567985"
+ tsdbQuery.TimeRange.To = "1538117367985"
+ tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
+ "target": "target",
+ "alignmentPeriod": "stackdriver-auto",
+ })
+
+ queries, err := executor.buildQueries(tsdbQuery)
+ So(err, ShouldBeNil)
+ So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+300s`)
+ })
+
+ Convey("and range is 7 days", func() {
+ tsdbQuery.TimeRange.From = "1538036324073"
+ tsdbQuery.TimeRange.To = "1538641124073"
+ tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
+ "target": "target",
+ "alignmentPeriod": "stackdriver-auto",
+ })
+
+ queries, err := executor.buildQueries(tsdbQuery)
+ So(err, ShouldBeNil)
+ So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+3600s`)
+ })
+ })
+
+ Convey("and alignmentPeriod is set in frontend", func() {
+ Convey("and alignment period is too big", func() {
+ tsdbQuery.Queries[0].IntervalMs = 1000
+ tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
+ "alignmentPeriod": "+360000s",
+ })
+
+ queries, err := executor.buildQueries(tsdbQuery)
+ So(err, ShouldBeNil)
+ So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+3600s`)
+ })
+
+ Convey("and alignment period is within accepted range", func() {
+ tsdbQuery.Queries[0].IntervalMs = 1000
+ tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
+ "alignmentPeriod": "+600s",
+ })
+
+ queries, err := executor.buildQueries(tsdbQuery)
+ So(err, ShouldBeNil)
+ So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+600s`)
+ })
+ })
+
+ Convey("and query has aggregation mean set", func() {
+ tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
+ "metricType": "a/metric/type",
+ "primaryAggregation": "REDUCE_MEAN",
+ "view": "FULL",
+ })
+
+ queries, err := executor.buildQueries(tsdbQuery)
+ So(err, ShouldBeNil)
+
+ So(len(queries), ShouldEqual, 1)
+ So(queries[0].RefID, ShouldEqual, "A")
+ So(queries[0].Target, ShouldEqual, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_MEAN&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL")
+ So(len(queries[0].Params), ShouldEqual, 7)
+ So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z")
+ So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z")
+ So(queries[0].Params["aggregation.crossSeriesReducer"][0], ShouldEqual, "REDUCE_MEAN")
+ So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN")
+ So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, "+60s")
+ So(queries[0].Params["filter"][0], ShouldEqual, "metric.type=\"a/metric/type\"")
+ So(queries[0].Params["view"][0], ShouldEqual, "FULL")
+ })
+
+ Convey("and query has group bys", func() {
+ tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
+ "metricType": "a/metric/type",
+ "primaryAggregation": "REDUCE_NONE",
+ "groupBys": []interface{}{"metric.label.group1", "metric.label.group2"},
+ "view": "FULL",
+ })
+
+ queries, err := executor.buildQueries(tsdbQuery)
+ So(err, ShouldBeNil)
+
+ So(len(queries), ShouldEqual, 1)
+ So(queries[0].RefID, ShouldEqual, "A")
+ So(queries[0].Target, ShouldEqual, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.groupByFields=metric.label.group1&aggregation.groupByFields=metric.label.group2&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL")
+ So(len(queries[0].Params), ShouldEqual, 8)
+ So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z")
+ So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z")
+ So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN")
+ So(queries[0].Params["aggregation.groupByFields"][0], ShouldEqual, "metric.label.group1")
+ So(queries[0].Params["aggregation.groupByFields"][1], ShouldEqual, "metric.label.group2")
+ So(queries[0].Params["filter"][0], ShouldEqual, "metric.type=\"a/metric/type\"")
+ So(queries[0].Params["view"][0], ShouldEqual, "FULL")
+ })
+
+ })
+
+ Convey("Parse stackdriver response in the time series format", func() {
+ Convey("when data from query aggregated to one time series", func() {
+ data, err := loadTestFile("./test-data/1-series-response-agg-one-metric.json")
+ So(err, ShouldBeNil)
+ So(len(data.TimeSeries), ShouldEqual, 1)
+
+ res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
+ query := &StackdriverQuery{}
+ err = executor.parseResponse(res, data, query)
+ So(err, ShouldBeNil)
+
+ So(len(res.Series), ShouldEqual, 1)
+ So(res.Series[0].Name, ShouldEqual, "serviceruntime.googleapis.com/api/request_count")
+ So(len(res.Series[0].Points), ShouldEqual, 3)
+
+ Convey("timestamps should be in ascending order", func() {
+ So(res.Series[0].Points[0][0].Float64, ShouldEqual, 0.05)
+ So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1536670020000)
+
+ So(res.Series[0].Points[1][0].Float64, ShouldEqual, 1.05)
+ So(res.Series[0].Points[1][1].Float64, ShouldEqual, 1536670080000)
+
+ So(res.Series[0].Points[2][0].Float64, ShouldEqual, 1.0666666666667)
+ So(res.Series[0].Points[2][1].Float64, ShouldEqual, 1536670260000)
+ })
+ })
+
+ Convey("when data from query with no aggregation", func() {
+ data, err := loadTestFile("./test-data/2-series-response-no-agg.json")
+ So(err, ShouldBeNil)
+ So(len(data.TimeSeries), ShouldEqual, 3)
+
+ res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
+ query := &StackdriverQuery{}
+ err = executor.parseResponse(res, data, query)
+ So(err, ShouldBeNil)
+
+ Convey("Should add labels to metric name", func() {
+ So(len(res.Series), ShouldEqual, 3)
+ So(res.Series[0].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1")
+ So(res.Series[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1")
+ So(res.Series[2].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1")
+ })
+
+ Convey("Should parse to time series", func() {
+ So(len(res.Series[0].Points), ShouldEqual, 3)
+ So(res.Series[0].Points[0][0].Float64, ShouldEqual, 9.8566497180145)
+ So(res.Series[0].Points[1][0].Float64, ShouldEqual, 9.7323568146676)
+ So(res.Series[0].Points[2][0].Float64, ShouldEqual, 9.7730520330369)
+ })
+
+ Convey("Should add meta for labels to the response", func() {
+ metricLabels := res.Meta.Get("metricLabels").Interface().(map[string][]string)
+ So(metricLabels, ShouldNotBeNil)
+ So(len(metricLabels["instance_name"]), ShouldEqual, 3)
+ So(metricLabels["instance_name"][0], ShouldEqual, "collector-asia-east-1")
+ So(metricLabels["instance_name"][1], ShouldEqual, "collector-europe-west-1")
+ So(metricLabels["instance_name"][2], ShouldEqual, "collector-us-east-1")
+
+ resourceLabels := res.Meta.Get("resourceLabels").Interface().(map[string][]string)
+ So(resourceLabels, ShouldNotBeNil)
+ So(len(resourceLabels["zone"]), ShouldEqual, 3)
+ So(resourceLabels["zone"][0], ShouldEqual, "asia-east1-a")
+ So(resourceLabels["zone"][1], ShouldEqual, "europe-west1-b")
+ So(resourceLabels["zone"][2], ShouldEqual, "us-east1-b")
+
+ So(len(resourceLabels["project_id"]), ShouldEqual, 1)
+ So(resourceLabels["project_id"][0], ShouldEqual, "grafana-prod")
+ })
+ })
+
+ Convey("when data from query with no aggregation and group bys", func() {
+ data, err := loadTestFile("./test-data/2-series-response-no-agg.json")
+ So(err, ShouldBeNil)
+ So(len(data.TimeSeries), ShouldEqual, 3)
+
+ res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
+ query := &StackdriverQuery{GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}}
+ err = executor.parseResponse(res, data, query)
+ So(err, ShouldBeNil)
+
+ Convey("Should add instance name and zone labels to metric name", func() {
+ So(len(res.Series), ShouldEqual, 3)
+ So(res.Series[0].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1 asia-east1-a")
+ So(res.Series[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1 europe-west1-b")
+ So(res.Series[2].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1 us-east1-b")
+ })
+ })
+
+ Convey("when data from query with no aggregation and alias by", func() {
+ data, err := loadTestFile("./test-data/2-series-response-no-agg.json")
+ So(err, ShouldBeNil)
+ So(len(data.TimeSeries), ShouldEqual, 3)
+
+ res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
+
+ Convey("and the alias pattern is for metric type, a metric label and a resource label", func() {
+
+ query := &StackdriverQuery{AliasBy: "{{metric.type}} - {{metric.label.instance_name}} - {{resource.label.zone}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}}
+ err = executor.parseResponse(res, data, query)
+ So(err, ShouldBeNil)
+
+ Convey("Should use alias by formatting and only show instance name", func() {
+ So(len(res.Series), ShouldEqual, 3)
+ So(res.Series[0].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-asia-east-1 - asia-east1-a")
+ So(res.Series[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-europe-west-1 - europe-west1-b")
+ So(res.Series[2].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-us-east-1 - us-east1-b")
+ })
+ })
+
+ Convey("and the alias pattern is for metric name", func() {
+
+ query := &StackdriverQuery{AliasBy: "metric {{metric.name}} service {{metric.service}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}}
+ err = executor.parseResponse(res, data, query)
+ So(err, ShouldBeNil)
+
+ Convey("Should use alias by formatting and only show instance name", func() {
+ So(len(res.Series), ShouldEqual, 3)
+ So(res.Series[0].Name, ShouldEqual, "metric instance/cpu/usage_time service compute")
+ So(res.Series[1].Name, ShouldEqual, "metric instance/cpu/usage_time service compute")
+ So(res.Series[2].Name, ShouldEqual, "metric instance/cpu/usage_time service compute")
+ })
+ })
+ })
+
+ Convey("when data from query is distribution", func() {
+ data, err := loadTestFile("./test-data/3-series-response-distribution.json")
+ So(err, ShouldBeNil)
+ So(len(data.TimeSeries), ShouldEqual, 1)
+
+ res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
+ query := &StackdriverQuery{AliasBy: "{{bucket}}"}
+ err = executor.parseResponse(res, data, query)
+ So(err, ShouldBeNil)
+
+ So(len(res.Series), ShouldEqual, 11)
+ for i := 0; i < 11; i++ {
+ if i == 0 {
+ So(res.Series[i].Name, ShouldEqual, "0")
+ } else {
+ So(res.Series[i].Name, ShouldEqual, strconv.FormatInt(int64(math.Pow(float64(2), float64(i-1))), 10))
+ }
+ So(len(res.Series[i].Points), ShouldEqual, 3)
+ }
+
+ Convey("timestamps should be in ascending order", func() {
+ So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1536668940000)
+ So(res.Series[0].Points[1][1].Float64, ShouldEqual, 1536669000000)
+ So(res.Series[0].Points[2][1].Float64, ShouldEqual, 1536669060000)
+ })
+
+ Convey("value should be correct", func() {
+ So(res.Series[8].Points[0][0].Float64, ShouldEqual, 1)
+ So(res.Series[9].Points[0][0].Float64, ShouldEqual, 1)
+ So(res.Series[10].Points[0][0].Float64, ShouldEqual, 1)
+ So(res.Series[8].Points[1][0].Float64, ShouldEqual, 0)
+ So(res.Series[9].Points[1][0].Float64, ShouldEqual, 0)
+ So(res.Series[10].Points[1][0].Float64, ShouldEqual, 1)
+ So(res.Series[8].Points[2][0].Float64, ShouldEqual, 0)
+ So(res.Series[9].Points[2][0].Float64, ShouldEqual, 1)
+ So(res.Series[10].Points[2][0].Float64, ShouldEqual, 0)
+ })
+ })
+
+ })
+
+ Convey("when interpolating filter wildcards", func() {
+ Convey("and wildcard is used in the beginning and the end of the word", func() {
+ Convey("and theres no wildcard in the middle of the word", func() {
+ value := interpolateFilterWildcards("*-central1*")
+ So(value, ShouldEqual, `has_substring("-central1")`)
+ })
+ Convey("and there is a wildcard in the middle of the word", func() {
+ value := interpolateFilterWildcards("*-cent*ral1*")
+ So(value, ShouldNotStartWith, `has_substring`)
+ })
+ })
+
+ Convey("and wildcard is used in the beginning of the word", func() {
+ Convey("and there is not a wildcard elsewhere in the word", func() {
+ value := interpolateFilterWildcards("*-central1")
+ So(value, ShouldEqual, `ends_with("-central1")`)
+ })
+ Convey("and there is a wildcard elsewhere in the word", func() {
+ value := interpolateFilterWildcards("*-cent*al1")
+ So(value, ShouldNotStartWith, `ends_with`)
+ })
+ })
+
+ Convey("and wildcard is used at the end of the word", func() {
+ Convey("and there is not a wildcard elsewhere in the word", func() {
+ value := interpolateFilterWildcards("us-central*")
+ So(value, ShouldEqual, `starts_with("us-central")`)
+ })
+ Convey("and there is a wildcard elsewhere in the word", func() {
+ value := interpolateFilterWildcards("*us-central*")
+ So(value, ShouldNotStartWith, `starts_with`)
+ })
+ })
+
+ Convey("and wildcard is used in the middle of the word", func() {
+ Convey("and there is only one wildcard", func() {
+ value := interpolateFilterWildcards("us-ce*tral1-b")
+ So(value, ShouldEqual, `monitoring.regex.full_match("^us\\-ce.*tral1\\-b$")`)
+ })
+
+ Convey("and there is more than one wildcard", func() {
+ value := interpolateFilterWildcards("us-ce*tra*1-b")
+ So(value, ShouldEqual, `monitoring.regex.full_match("^us\\-ce.*tra.*1\\-b$")`)
+ })
+ })
+
+ Convey("and wildcard is used in the middle of the word and in the beginning of the word", func() {
+ value := interpolateFilterWildcards("*s-ce*tral1-b")
+ So(value, ShouldEqual, `monitoring.regex.full_match("^.*s\\-ce.*tral1\\-b$")`)
+ })
+
+ Convey("and wildcard is used in the middle of the word and in the ending of the word", func() {
+ value := interpolateFilterWildcards("us-ce*tral1-*")
+ So(value, ShouldEqual, `monitoring.regex.full_match("^us\\-ce.*tral1\\-.*$")`)
+ })
+
+ Convey("and no wildcard is used", func() {
+ value := interpolateFilterWildcards("us-central1-a}")
+ So(value, ShouldEqual, `us-central1-a}`)
+ })
+ })
+
+ Convey("when building filter string", func() {
+ Convey("and theres no regex operator", func() {
+ Convey("and there are wildcards in a filter value", func() {
+ filterParts := []interface{}{"zone", "=", "*-central1*"}
+ value := buildFilterString("somemetrictype", filterParts)
+ So(value, ShouldEqual, `metric.type="somemetrictype" zone=has_substring("-central1")`)
+ })
+
+ Convey("and there are no wildcards in any filter value", func() {
+ filterParts := []interface{}{"zone", "!=", "us-central1-a"}
+ value := buildFilterString("somemetrictype", filterParts)
+ So(value, ShouldEqual, `metric.type="somemetrictype" zone!="us-central1-a"`)
+ })
+ })
+
+ Convey("and there is a regex operator", func() {
+ filterParts := []interface{}{"zone", "=~", "us-central1-a~"}
+ value := buildFilterString("somemetrictype", filterParts)
+ Convey("it should remove the ~ character from the operator that belongs to the value", func() {
+ So(value, ShouldNotContainSubstring, `=~`)
+ So(value, ShouldContainSubstring, `zone=`)
+ })
+
+ Convey("it should insert monitoring.regex.full_match before filter value", func() {
+ So(value, ShouldContainSubstring, `zone=monitoring.regex.full_match("us-central1-a~")`)
+ })
+ })
+ })
+ })
+}
+
+func loadTestFile(path string) (StackdriverResponse, error) {
+ var data StackdriverResponse
+
+ jsonBody, err := ioutil.ReadFile(path)
+ if err != nil {
+ return data, err
+ }
+ err = json.Unmarshal(jsonBody, &data)
+ return data, err
+}
diff --git a/pkg/tsdb/stackdriver/test-data/1-series-response-agg-one-metric.json b/pkg/tsdb/stackdriver/test-data/1-series-response-agg-one-metric.json
new file mode 100644
index 00000000000..e1a84583cc4
--- /dev/null
+++ b/pkg/tsdb/stackdriver/test-data/1-series-response-agg-one-metric.json
@@ -0,0 +1,46 @@
+{
+ "timeSeries": [
+ {
+ "metric": {
+ "type": "serviceruntime.googleapis.com\/api\/request_count"
+ },
+ "resource": {
+ "type": "consumed_api",
+ "labels": {
+ "project_id": "grafana-prod"
+ }
+ },
+ "metricKind": "GAUGE",
+ "valueType": "DOUBLE",
+ "points": [
+ {
+ "interval": {
+ "startTime": "2018-09-11T12:51:00Z",
+ "endTime": "2018-09-11T12:51:00Z"
+ },
+ "value": {
+ "doubleValue": 1.0666666666667
+ }
+ },
+ {
+ "interval": {
+ "startTime": "2018-09-11T12:48:00Z",
+ "endTime": "2018-09-11T12:48:00Z"
+ },
+ "value": {
+ "doubleValue": 1.05
+ }
+ },
+ {
+ "interval": {
+ "startTime": "2018-09-11T12:47:00Z",
+ "endTime": "2018-09-11T12:47:00Z"
+ },
+ "value": {
+ "doubleValue": 0.05
+ }
+ }
+ ]
+ }
+ ]
+}
diff --git a/pkg/tsdb/stackdriver/test-data/2-series-response-no-agg.json b/pkg/tsdb/stackdriver/test-data/2-series-response-no-agg.json
new file mode 100644
index 00000000000..da615a168bf
--- /dev/null
+++ b/pkg/tsdb/stackdriver/test-data/2-series-response-no-agg.json
@@ -0,0 +1,145 @@
+{
+ "timeSeries": [
+ {
+ "metric": {
+ "labels": {
+ "instance_name": "collector-asia-east-1"
+ },
+ "type": "compute.googleapis.com\/instance\/cpu\/usage_time"
+ },
+ "resource": {
+ "type": "gce_instance",
+ "labels": {
+ "instance_id": "1119268429530133111",
+ "zone": "asia-east1-a",
+ "project_id": "grafana-prod"
+ }
+ },
+ "metricKind": "DELTA",
+ "valueType": "DOUBLE",
+ "points": [
+ {
+ "interval": {
+ "startTime": "2018-09-11T12:30:00Z",
+ "endTime": "2018-09-11T12:31:00Z"
+ },
+ "value": {
+ "doubleValue": 9.7730520330369
+ }
+ },
+ {
+ "interval": {
+ "startTime": "2018-09-11T12:29:00Z",
+ "endTime": "2018-09-11T12:30:00Z"
+ },
+ "value": {
+ "doubleValue": 9.7323568146676
+ }
+ },
+ {
+ "interval": {
+ "startTime": "2018-09-11T12:28:00Z",
+ "endTime": "2018-09-11T12:29:00Z"
+ },
+ "value": {
+ "doubleValue": 9.8566497180145
+ }
+ }
+ ]
+ },
+ {
+ "metric": {
+ "labels": {
+ "instance_name": "collector-europe-west-1"
+ },
+ "type": "compute.googleapis.com\/instance\/cpu\/usage_time"
+ },
+ "resource": {
+ "type": "gce_instance",
+ "labels": {
+ "instance_id": "22241654114540837222",
+ "zone": "europe-west1-b",
+ "project_id": "grafana-prod"
+ }
+ },
+ "metricKind": "DELTA",
+ "valueType": "DOUBLE",
+ "points": [
+ {
+ "interval": {
+ "startTime": "2018-09-11T12:30:00Z",
+ "endTime": "2018-09-11T12:31:00Z"
+ },
+ "value": {
+ "doubleValue": 8.8210971239023
+ }
+ },
+ {
+ "interval": {
+ "startTime": "2018-09-11T12:29:00Z",
+ "endTime": "2018-09-11T12:30:00Z"
+ },
+ "value": {
+ "doubleValue": 8.9689492364414
+ }
+ },
+ {
+ "interval": {
+ "startTime": "2018-09-11T12:28:00Z",
+ "endTime": "2018-09-11T12:29:00Z"
+ },
+ "value": {
+ "doubleValue": 9.0238475054502
+ }
+ }
+ ]
+ },
+ {
+ "metric": {
+ "labels": {
+ "instance_name": "collector-us-east-1"
+ },
+ "type": "compute.googleapis.com\/instance\/cpu\/usage_time"
+ },
+ "resource": {
+ "type": "gce_instance",
+ "labels": {
+ "instance_id": "3332264424035095333",
+ "zone": "us-east1-b",
+ "project_id": "grafana-prod"
+ }
+ },
+ "metricKind": "DELTA",
+ "valueType": "DOUBLE",
+ "points": [
+ {
+ "interval": {
+ "startTime": "2018-09-11T12:30:00Z",
+ "endTime": "2018-09-11T12:31:00Z"
+ },
+ "value": {
+ "doubleValue": 30.807846801355
+ }
+ },
+ {
+ "interval": {
+ "startTime": "2018-09-11T12:29:00Z",
+ "endTime": "2018-09-11T12:30:00Z"
+ },
+ "value": {
+ "doubleValue": 30.903974115849
+ }
+ },
+ {
+ "interval": {
+ "startTime": "2018-09-11T12:28:00Z",
+ "endTime": "2018-09-11T12:29:00Z"
+ },
+ "value": {
+ "doubleValue": 30.829426143318
+ }
+ }
+ ]
+ }
+ ]
+}
diff --git a/pkg/tsdb/stackdriver/test-data/3-series-response-distribution.json b/pkg/tsdb/stackdriver/test-data/3-series-response-distribution.json
new file mode 100644
index 00000000000..8603f78eab4
--- /dev/null
+++ b/pkg/tsdb/stackdriver/test-data/3-series-response-distribution.json
@@ -0,0 +1,112 @@
+{
+ "timeSeries": [
+ {
+ "metric": {
+ "type": "loadbalancing.googleapis.com\/https\/backend_latencies"
+ },
+ "resource": {
+ "type": "https_lb_rule",
+ "labels": {
+ "project_id": "grafana-prod"
+ }
+ },
+ "metricKind": "DELTA",
+ "valueType": "DISTRIBUTION",
+ "points": [
+ {
+ "interval": {
+ "startTime": "2018-09-11T12:30:00Z",
+ "endTime": "2018-09-11T12:31:00Z"
+ },
+ "value": {
+ "distributionValue": {
+ "count": "1",
+ "bucketOptions": {
+ "exponentialBuckets": {
+ "numFiniteBuckets": 10,
+ "growthFactor": 2,
+ "scale": 1
+ }
+ },
+ "bucketCounts": [
+ "0",
+ "0",
+ "0",
+ "0",
+ "0",
+ "0",
+ "0",
+ "0",
+ "0",
+ "1",
+ "0"
+ ]
+ }
+ }
+ },
+ {
+ "interval": {
+ "startTime": "2018-09-11T12:29:00Z",
+ "endTime": "2018-09-11T12:30:00Z"
+ },
+ "value": {
+ "distributionValue": {
+ "count": "1",
+ "bucketOptions": {
+ "exponentialBuckets": {
+ "numFiniteBuckets": 10,
+ "growthFactor": 2,
+ "scale": 1
+ }
+ },
+ "bucketCounts": [
+ "0",
+ "0",
+ "0",
+ "0",
+ "0",
+ "0",
+ "0",
+ "0",
+ "0",
+ "0",
+ "1"
+ ]
+ }
+ }
+ },
+ {
+ "interval": {
+ "startTime": "2018-09-11T12:28:00Z",
+ "endTime": "2018-09-11T12:29:00Z"
+ },
+ "value": {
+ "distributionValue": {
+ "count": "3",
+ "bucketOptions": {
+ "exponentialBuckets": {
+ "numFiniteBuckets": 10,
+ "growthFactor": 2,
+ "scale": 1
+ }
+ },
+ "bucketCounts": [
+ "0",
+ "0",
+ "0",
+ "0",
+ "0",
+ "0",
+ "0",
+ "0",
+ "1",
+ "1",
+ "1"
+ ]
+ }
+ }
+ }
+ ]
+ }
+ ]
+}
diff --git a/pkg/tsdb/stackdriver/types.go b/pkg/tsdb/stackdriver/types.go
new file mode 100644
index 00000000000..3821ce7ceda
--- /dev/null
+++ b/pkg/tsdb/stackdriver/types.go
@@ -0,0 +1,75 @@
+package stackdriver
+
+import (
+ "net/url"
+ "time"
+)
+
+// StackdriverQuery is the query that Grafana sends from the frontend
+type StackdriverQuery struct {
+ Target string
+ Params url.Values
+ RefID string
+ GroupBys []string
+ AliasBy string
+}
+
+type StackdriverBucketOptions struct {
+ LinearBuckets *struct {
+ NumFiniteBuckets int64 `json:"numFiniteBuckets"`
+ Width int64 `json:"width"`
+ Offset int64 `json:"offset"`
+ } `json:"linearBuckets"`
+ ExponentialBuckets *struct {
+ NumFiniteBuckets int64 `json:"numFiniteBuckets"`
+ GrowthFactor float64 `json:"growthFactor"`
+ Scale float64 `json:"scale"`
+ } `json:"exponentialBuckets"`
+ ExplicitBuckets *struct {
+ Bounds []int64 `json:"bounds"`
+ } `json:"explicitBuckets"`
+}
+
+// StackdriverResponse is the data returned from the external Google Stackdriver API
+type StackdriverResponse struct {
+ TimeSeries []struct {
+ Metric struct {
+ Labels map[string]string `json:"labels"`
+ Type string `json:"type"`
+ } `json:"metric"`
+ Resource struct {
+ Type string `json:"type"`
+ Labels map[string]string `json:"labels"`
+ } `json:"resource"`
+ MetricKind string `json:"metricKind"`
+ ValueType string `json:"valueType"`
+ Points []struct {
+ Interval struct {
+ StartTime time.Time `json:"startTime"`
+ EndTime time.Time `json:"endTime"`
+ } `json:"interval"`
+ Value struct {
+ DoubleValue float64 `json:"doubleValue"`
+ StringValue string `json:"stringValue"`
+ BoolValue bool `json:"boolValue"`
+ IntValue string `json:"int64Value"`
+ DistributionValue struct {
+ Count string `json:"count"`
+ Mean float64 `json:"mean"`
+ SumOfSquaredDeviation float64 `json:"sumOfSquaredDeviation"`
+ Range struct {
+ Min int `json:"min"`
+ Max int `json:"max"`
+ } `json:"range"`
+ BucketOptions StackdriverBucketOptions `json:"bucketOptions"`
+ BucketCounts []string `json:"bucketCounts"`
+ Examplars []struct {
+ Value float64 `json:"value"`
+ Timestamp string `json:"timestamp"`
+ // attachments
+ } `json:"examplars"`
+ } `json:"distributionValue"`
+ } `json:"value"`
+ } `json:"points"`
+ } `json:"timeSeries"`
+}
diff --git a/pkg/tsdb/testdata/scenarios.go b/pkg/tsdb/testdata/scenarios.go
index e907fa8aae0..421a907b5e9 100644
--- a/pkg/tsdb/testdata/scenarios.go
+++ b/pkg/tsdb/testdata/scenarios.go
@@ -95,27 +95,20 @@ func init() {
Id: "random_walk",
Name: "Random Walk",
- Handler: func(query *tsdb.Query, tsdbQuery *tsdb.TsdbQuery) *tsdb.QueryResult {
- timeWalkerMs := tsdbQuery.TimeRange.GetFromAsMsEpoch()
- to := tsdbQuery.TimeRange.GetToAsMsEpoch()
+ Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult {
+ return getRandomWalk(query, context)
+ },
+ })
- series := newSeriesForQuery(query)
-
- points := make(tsdb.TimeSeriesPoints, 0)
- walker := rand.Float64() * 100
-
- for i := int64(0); i < 10000 && timeWalkerMs < to; i++ {
- points = append(points, tsdb.NewTimePoint(null.FloatFrom(walker), float64(timeWalkerMs)))
-
- walker += rand.Float64() - 0.5
- timeWalkerMs += query.IntervalMs
- }
-
- series.Points = points
-
- queryRes := tsdb.NewQueryResult()
- queryRes.Series = append(queryRes.Series, series)
- return queryRes
+ registerScenario(&Scenario{
+ Id: "slow_query",
+ Name: "Slow Query",
+ StringInput: "5s",
+ Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult {
+ stringInput := query.Model.Get("stringInput").MustString()
+ parsedInterval, _ := time.ParseDuration(stringInput)
+ time.Sleep(parsedInterval)
+ return getRandomWalk(query, context)
},
})
@@ -221,6 +214,57 @@ func init() {
return queryRes
},
})
+
+ registerScenario(&Scenario{
+ Id: "table_static",
+ Name: "Table Static",
+
+ Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult {
+ timeWalkerMs := context.TimeRange.GetFromAsMsEpoch()
+ to := context.TimeRange.GetToAsMsEpoch()
+
+ table := tsdb.Table{
+ Columns: []tsdb.TableColumn{
+ {Text: "Time"},
+ {Text: "Message"},
+ {Text: "Description"},
+ {Text: "Value"},
+ },
+ Rows: []tsdb.RowValues{},
+ }
+ for i := int64(0); i < 10 && timeWalkerMs < to; i++ {
+ table.Rows = append(table.Rows, tsdb.RowValues{float64(timeWalkerMs), "This is a message", "Description", 23.1})
+ timeWalkerMs += query.IntervalMs
+ }
+
+ queryRes := tsdb.NewQueryResult()
+ queryRes.Tables = append(queryRes.Tables, &table)
+ return queryRes
+ },
+ })
+}
+
+func getRandomWalk(query *tsdb.Query, tsdbQuery *tsdb.TsdbQuery) *tsdb.QueryResult {
+ timeWalkerMs := tsdbQuery.TimeRange.GetFromAsMsEpoch()
+ to := tsdbQuery.TimeRange.GetToAsMsEpoch()
+
+ series := newSeriesForQuery(query)
+
+ points := make(tsdb.TimeSeriesPoints, 0)
+ walker := rand.Float64() * 100
+
+ for i := int64(0); i < 10000 && timeWalkerMs < to; i++ {
+ points = append(points, tsdb.NewTimePoint(null.FloatFrom(walker), float64(timeWalkerMs)))
+
+ walker += rand.Float64() - 0.5
+ timeWalkerMs += query.IntervalMs
+ }
+
+ series.Points = points
+
+ queryRes := tsdb.NewQueryResult()
+ queryRes.Series = append(queryRes.Series, series)
+ return queryRes
}
func registerScenario(scenario *Scenario) {
diff --git a/pkg/util/md5_test.go b/pkg/util/md5_test.go
index 1338d42bb51..43c685b8763 100644
--- a/pkg/util/md5_test.go
+++ b/pkg/util/md5_test.go
@@ -3,14 +3,14 @@ package util
import "testing"
func TestMd5Sum(t *testing.T) {
- input := "dont hash passwords with md5"
+ input := "don't hash passwords with md5"
have, err := Md5SumString(input)
if err != nil {
t.Fatal("expected err to be nil")
}
- want := "2d6a56c82d09d374643b926d3417afba"
+ want := "dd1f7fdb3466c0d09c2e839d1f1530f8"
if have != want {
t.Fatalf("expected: %s got: %s", want, have)
}
diff --git a/public/app/app.ts b/public/app/app.ts
index 8e30747072e..298bf5609cd 100644
--- a/public/app/app.ts
+++ b/public/app/app.ts
@@ -29,7 +29,11 @@ _.move = (array, fromIndex, toIndex) => {
import { coreModule, registerAngularDirectives } from './core/core';
import { setupAngularRoutes } from './routes/routes';
-declare var System: any;
+// import symlinked extensions
+const extensionsIndex = (require as any).context('.', true, /extensions\/index.ts/);
+extensionsIndex.keys().forEach(key => {
+ extensionsIndex(key);
+});
export class GrafanaApp {
registerFunctions: any;
@@ -119,7 +123,7 @@ export class GrafanaApp {
coreModule.config(setupAngularRoutes);
registerAngularDirectives();
- const preBootRequires = [System.import('app/features/all')];
+ const preBootRequires = [import('app/features/all')];
Promise.all(preBootRequires)
.then(() => {
diff --git a/public/app/containers/AlertRuleList/AlertRuleList.test.tsx b/public/app/containers/AlertRuleList/AlertRuleList.test.tsx
deleted file mode 100644
index f88ff4522d4..00000000000
--- a/public/app/containers/AlertRuleList/AlertRuleList.test.tsx
+++ /dev/null
@@ -1,69 +0,0 @@
-import React from 'react';
-import moment from 'moment';
-import { AlertRuleList } from './AlertRuleList';
-import { RootStore } from 'app/stores/RootStore/RootStore';
-import { backendSrv, createNavTree } from 'test/mocks/common';
-import { mount } from 'enzyme';
-import toJson from 'enzyme-to-json';
-
-describe('AlertRuleList', () => {
- let page, store;
-
- beforeAll(() => {
- backendSrv.get.mockReturnValue(
- Promise.resolve([
- {
- id: 11,
- dashboardId: 58,
- panelId: 3,
- name: 'Panel Title alert',
- state: 'ok',
- newStateDate: moment()
- .subtract(5, 'minutes')
- .format(),
- evalData: {},
- executionError: '',
- url: 'd/ufkcofof/my-goal',
- canEdit: true,
- },
- ])
- );
-
- store = RootStore.create(
- {},
- {
- backendSrv: backendSrv,
- navTree: createNavTree('alerting', 'alert-list'),
- }
- );
-
- page = mount( );
- });
-
- it('should call api to get rules', () => {
- expect(backendSrv.get.mock.calls[0][0]).toEqual('/api/alerts');
- });
-
- it('should render 1 rule', () => {
- page.update();
- const ruleNode = page.find('.alert-rule-item');
- expect(toJson(ruleNode)).toMatchSnapshot();
- });
-
- it('toggle state should change pause rule if not paused', async () => {
- backendSrv.post.mockReturnValue(
- Promise.resolve({
- state: 'paused',
- })
- );
-
- page.find('.fa-pause').simulate('click');
-
- // wait for api call to resolve
- await Promise.resolve();
- page.update();
-
- expect(store.alertList.rules[0].state).toBe('paused');
- expect(page.find('.fa-play')).toHaveLength(1);
- });
-});
diff --git a/public/app/containers/AlertRuleList/AlertRuleList.tsx b/public/app/containers/AlertRuleList/AlertRuleList.tsx
deleted file mode 100644
index 668136dee6f..00000000000
--- a/public/app/containers/AlertRuleList/AlertRuleList.tsx
+++ /dev/null
@@ -1,178 +0,0 @@
-import React from 'react';
-import { hot } from 'react-hot-loader';
-import classNames from 'classnames';
-import { inject, observer } from 'mobx-react';
-import PageHeader from 'app/core/components/PageHeader/PageHeader';
-import { AlertRule } from 'app/stores/AlertListStore/AlertListStore';
-import appEvents from 'app/core/app_events';
-import ContainerProps from 'app/containers/ContainerProps';
-import Highlighter from 'react-highlight-words';
-
-@inject('view', 'nav', 'alertList')
-@observer
-export class AlertRuleList extends React.Component {
- stateFilters = [
- { text: 'All', value: 'all' },
- { text: 'OK', value: 'ok' },
- { text: 'Not OK', value: 'not_ok' },
- { text: 'Alerting', value: 'alerting' },
- { text: 'No Data', value: 'no_data' },
- { text: 'Paused', value: 'paused' },
- ];
-
- constructor(props) {
- super(props);
-
- this.props.nav.load('alerting', 'alert-list');
- this.fetchRules();
- }
-
- onStateFilterChanged = evt => {
- this.props.view.updateQuery({ state: evt.target.value });
- this.fetchRules();
- };
-
- fetchRules() {
- this.props.alertList.loadRules({
- state: this.props.view.query.get('state') || 'all',
- });
- }
-
- onOpenHowTo = () => {
- appEvents.emit('show-modal', {
- src: 'public/app/features/alerting/partials/alert_howto.html',
- modalClass: 'confirm-modal',
- model: {},
- });
- };
-
- onSearchQueryChange = evt => {
- this.props.alertList.setSearchQuery(evt.target.value);
- };
-
- render() {
- const { nav, alertList } = this.props;
-
- return (
-
-
-
-
-
-
-
-
-
-
-
-
States
-
-
-
- {this.stateFilters.map(AlertStateFilterOption)}
-
-
-
-
-
-
-
- How to add an alert
-
-
-
-
-
- {alertList.filteredRules.map(rule => (
-
- ))}
-
-
-
-
- );
- }
-}
-
-function AlertStateFilterOption({ text, value }) {
- return (
-
- {text}
-
- );
-}
-
-export interface AlertRuleItemProps {
- rule: AlertRule;
- search: string;
-}
-
-@observer
-export class AlertRuleItem extends React.Component {
- toggleState = () => {
- this.props.rule.togglePaused();
- };
-
- renderText(text: string) {
- return (
-
- );
- }
-
- render() {
- const { rule } = this.props;
-
- const stateClass = classNames({
- fa: true,
- 'fa-play': rule.isPaused,
- 'fa-pause': !rule.isPaused,
- });
-
- const ruleUrl = `${rule.url}?panelId=${rule.panelId}&fullscreen=true&edit=true&tab=alert`;
-
- return (
-
-
-
-
-
-
-
-
- {this.renderText(rule.stateText)}
- for {rule.stateAge}
-
-
- {rule.info &&
{this.renderText(rule.info)}
}
-
-
-
-
- );
- }
-}
-
-export default hot(module)(AlertRuleList);
diff --git a/public/app/containers/ContainerProps.ts b/public/app/containers/ContainerProps.ts
deleted file mode 100644
index 97889278fdc..00000000000
--- a/public/app/containers/ContainerProps.ts
+++ /dev/null
@@ -1,20 +0,0 @@
-import { SearchStore } from './../stores/SearchStore/SearchStore';
-import { ServerStatsStore } from './../stores/ServerStatsStore/ServerStatsStore';
-import { NavStore } from './../stores/NavStore/NavStore';
-import { PermissionsStore } from './../stores/PermissionsStore/PermissionsStore';
-import { AlertListStore } from './../stores/AlertListStore/AlertListStore';
-import { ViewStore } from './../stores/ViewStore/ViewStore';
-import { FolderStore } from './../stores/FolderStore/FolderStore';
-
-interface ContainerProps {
- search: typeof SearchStore.Type;
- serverStats: typeof ServerStatsStore.Type;
- nav: typeof NavStore.Type;
- alertList: typeof AlertListStore.Type;
- permissions: typeof PermissionsStore.Type;
- view: typeof ViewStore.Type;
- folder: typeof FolderStore.Type;
- backendSrv: any;
-}
-
-export default ContainerProps;
diff --git a/public/app/containers/Explore/Wrapper.tsx b/public/app/containers/Explore/Wrapper.tsx
deleted file mode 100644
index 6bdbd7cc42f..00000000000
--- a/public/app/containers/Explore/Wrapper.tsx
+++ /dev/null
@@ -1,33 +0,0 @@
-import React, { PureComponent } from 'react';
-
-import Explore from './Explore';
-
-export default class Wrapper extends PureComponent {
- state = {
- initialState: null,
- split: false,
- };
-
- handleChangeSplit = (split, initialState) => {
- this.setState({ split, initialState });
- };
-
- render() {
- // State overrides for props from first Explore
- const { initialState, split } = this.state;
- return (
-
-
- {split ? (
-
- ) : null}
-
- );
- }
-}
diff --git a/public/app/containers/Explore/slate-plugins/prism/index.tsx b/public/app/containers/Explore/slate-plugins/prism/index.tsx
deleted file mode 100644
index d185518790f..00000000000
--- a/public/app/containers/Explore/slate-plugins/prism/index.tsx
+++ /dev/null
@@ -1,123 +0,0 @@
-import React from 'react';
-import Prism from 'prismjs';
-
-const TOKEN_MARK = 'prism-token';
-
-export function setPrismTokens(language, field, values, alias = 'variable') {
- Prism.languages[language][field] = {
- alias,
- pattern: new RegExp(`(?:^|\\s)(${values.join('|')})(?:$|\\s)`),
- };
-}
-
-/**
- * Code-highlighting plugin based on Prism and
- * https://github.com/ianstormtaylor/slate/blob/master/examples/code-highlighting/index.js
- *
- * (Adapted to handle nested grammar definitions.)
- */
-
-export default function PrismPlugin({ definition, language }) {
- if (definition) {
- // Don't override exising modified definitions
- Prism.languages[language] = Prism.languages[language] || definition;
- }
-
- return {
- /**
- * Render a Slate mark with appropiate CSS class names
- *
- * @param {Object} props
- * @return {Element}
- */
-
- renderMark(props) {
- const { children, mark } = props;
- // Only apply spans to marks identified by this plugin
- if (mark.type !== TOKEN_MARK) {
- return undefined;
- }
- const className = `token ${mark.data.get('types')}`;
- return {children} ;
- },
-
- /**
- * Decorate code blocks with Prism.js highlighting.
- *
- * @param {Node} node
- * @return {Array}
- */
-
- decorateNode(node) {
- if (node.type !== 'paragraph') {
- return [];
- }
-
- const texts = node.getTexts().toArray();
- const tstring = texts.map(t => t.text).join('\n');
- const grammar = Prism.languages[language];
- const tokens = Prism.tokenize(tstring, grammar);
- const decorations = [];
- let startText = texts.shift();
- let endText = startText;
- let startOffset = 0;
- let endOffset = 0;
- let start = 0;
-
- function processToken(token, acc?) {
- // Accumulate token types down the tree
- const types = `${acc || ''} ${token.type || ''} ${token.alias || ''}`;
-
- // Add mark for token node
- if (typeof token === 'string' || typeof token.content === 'string') {
- startText = endText;
- startOffset = endOffset;
-
- const content = typeof token === 'string' ? token : token.content;
- const newlines = content.split('\n').length - 1;
- const length = content.length - newlines;
- const end = start + length;
-
- let available = startText.text.length - startOffset;
- let remaining = length;
-
- endOffset = startOffset + remaining;
-
- while (available < remaining) {
- endText = texts.shift();
- remaining = length - available;
- available = endText.text.length;
- endOffset = remaining;
- }
-
- // Inject marks from up the tree (acc) as well
- if (typeof token !== 'string' || acc) {
- const range = {
- anchorKey: startText.key,
- anchorOffset: startOffset,
- focusKey: endText.key,
- focusOffset: endOffset,
- marks: [{ type: TOKEN_MARK, data: { types } }],
- };
-
- decorations.push(range);
- }
-
- start = end;
- } else if (token.content && token.content.length) {
- // Tokens can be nested
- for (const subToken of token.content) {
- processToken(subToken, types);
- }
- }
- }
-
- // Process top-level tokens
- for (const token of tokens) {
- processToken(token);
- }
-
- return decorations;
- },
- };
-}
diff --git a/public/app/containers/Explore/utils/query.ts b/public/app/containers/Explore/utils/query.ts
deleted file mode 100644
index d774f619a30..00000000000
--- a/public/app/containers/Explore/utils/query.ts
+++ /dev/null
@@ -1,14 +0,0 @@
-export function generateQueryKey(index = 0) {
- return `Q-${Date.now()}-${Math.random()}-${index}`;
-}
-
-export function ensureQueries(queries?) {
- if (queries && typeof queries === 'object' && queries.length > 0 && typeof queries[0] === 'string') {
- return queries.map((query, i) => ({ key: generateQueryKey(i), query }));
- }
- return [{ key: generateQueryKey(), query: '' }];
-}
-
-export function hasQuery(queries) {
- return queries.some(q => q.query);
-}
diff --git a/public/app/containers/ManageDashboards/FolderPermissions.tsx b/public/app/containers/ManageDashboards/FolderPermissions.tsx
deleted file mode 100644
index 072908d2b8e..00000000000
--- a/public/app/containers/ManageDashboards/FolderPermissions.tsx
+++ /dev/null
@@ -1,80 +0,0 @@
-import React, { Component } from 'react';
-import { hot } from 'react-hot-loader';
-import { inject, observer } from 'mobx-react';
-import { toJS } from 'mobx';
-import ContainerProps from 'app/containers/ContainerProps';
-import PageHeader from 'app/core/components/PageHeader/PageHeader';
-import Permissions from 'app/core/components/Permissions/Permissions';
-import Tooltip from 'app/core/components/Tooltip/Tooltip';
-import PermissionsInfo from 'app/core/components/Permissions/PermissionsInfo';
-import AddPermissions from 'app/core/components/Permissions/AddPermissions';
-import SlideDown from 'app/core/components/Animations/SlideDown';
-
-@inject('nav', 'folder', 'view', 'permissions')
-@observer
-export class FolderPermissions extends Component {
- constructor(props) {
- super(props);
- this.handleAddPermission = this.handleAddPermission.bind(this);
- }
-
- componentDidMount() {
- this.loadStore();
- }
-
- componentWillUnmount() {
- const { permissions } = this.props;
- permissions.hideAddPermissions();
- }
-
- loadStore() {
- const { nav, folder, view } = this.props;
- return folder.load(view.routeParams.get('uid') as string).then(res => {
- view.updatePathAndQuery(`${res.url}/permissions`, {}, {});
- return nav.initFolderNav(toJS(folder.folder), 'manage-folder-permissions');
- });
- }
-
- handleAddPermission() {
- const { permissions } = this.props;
- permissions.toggleAddPermissions();
- }
-
- render() {
- const { nav, folder, permissions, backendSrv } = this.props;
-
- if (!folder.folder || !nav.main) {
- return Loading ;
- }
-
- const dashboardId = folder.folder.id;
-
- return (
-
-
-
-
-
Folder Permissions
-
-
-
-
-
- Add Permission
-
-
-
-
-
-
-
-
- );
- }
-}
-
-export default hot(module)(FolderPermissions);
diff --git a/public/app/containers/ManageDashboards/FolderSettings.test.tsx b/public/app/containers/ManageDashboards/FolderSettings.test.tsx
deleted file mode 100644
index bed3d569bcc..00000000000
--- a/public/app/containers/ManageDashboards/FolderSettings.test.tsx
+++ /dev/null
@@ -1,84 +0,0 @@
-import React from 'react';
-import { FolderSettings } from './FolderSettings';
-import { RootStore } from 'app/stores/RootStore/RootStore';
-import { backendSrv } from 'test/mocks/common';
-import { shallow } from 'enzyme';
-
-describe('FolderSettings', () => {
- let wrapper;
- let page;
-
- beforeAll(() => {
- backendSrv.getFolderByUid.mockReturnValue(
- Promise.resolve({
- id: 1,
- uid: 'uid',
- title: 'Folder Name',
- url: '/dashboards/f/uid/folder-name',
- canSave: true,
- version: 1,
- })
- );
-
- const store = RootStore.create(
- {
- view: {
- path: 'asd',
- query: {},
- routeParams: {
- uid: 'uid-str',
- },
- },
- },
- {
- backendSrv: backendSrv,
- }
- );
-
- wrapper = shallow( );
- page = wrapper.dive();
- return page
- .instance()
- .loadStore()
- .then(() => {
- page.update();
- });
- });
-
- it('should set the title input field', () => {
- const titleInput = page.find('.gf-form-input');
- expect(titleInput).toHaveLength(1);
- expect(titleInput.prop('value')).toBe('Folder Name');
- });
-
- it('should update title and enable save button when changed', () => {
- const titleInput = page.find('.gf-form-input');
- const disabledSubmitButton = page.find('button[type="submit"]');
- expect(disabledSubmitButton.prop('disabled')).toBe(true);
-
- titleInput.simulate('change', { target: { value: 'New Title' } });
-
- const updatedTitleInput = page.find('.gf-form-input');
- expect(updatedTitleInput.prop('value')).toBe('New Title');
- const enabledSubmitButton = page.find('button[type="submit"]');
- expect(enabledSubmitButton.prop('disabled')).toBe(false);
- });
-
- it('should disable save button if title is changed back to old title', () => {
- const titleInput = page.find('.gf-form-input');
-
- titleInput.simulate('change', { target: { value: 'Folder Name' } });
-
- const enabledSubmitButton = page.find('button[type="submit"]');
- expect(enabledSubmitButton.prop('disabled')).toBe(true);
- });
-
- it('should disable save button if title is changed to empty string', () => {
- const titleInput = page.find('.gf-form-input');
-
- titleInput.simulate('change', { target: { value: '' } });
-
- const enabledSubmitButton = page.find('button[type="submit"]');
- expect(enabledSubmitButton.prop('disabled')).toBe(true);
- });
-});
diff --git a/public/app/containers/ManageDashboards/FolderSettings.tsx b/public/app/containers/ManageDashboards/FolderSettings.tsx
deleted file mode 100644
index 88830356563..00000000000
--- a/public/app/containers/ManageDashboards/FolderSettings.tsx
+++ /dev/null
@@ -1,160 +0,0 @@
-import React from 'react';
-import { hot } from 'react-hot-loader';
-import { inject, observer } from 'mobx-react';
-import { toJS } from 'mobx';
-import PageHeader from 'app/core/components/PageHeader/PageHeader';
-import ContainerProps from 'app/containers/ContainerProps';
-import { getSnapshot } from 'mobx-state-tree';
-import appEvents from 'app/core/app_events';
-
-@inject('nav', 'folder', 'view')
-@observer
-export class FolderSettings extends React.Component {
- formSnapshot: any;
-
- componentDidMount() {
- this.loadStore();
- }
-
- loadStore() {
- const { nav, folder, view } = this.props;
-
- return folder.load(view.routeParams.get('uid') as string).then(res => {
- this.formSnapshot = getSnapshot(folder);
- view.updatePathAndQuery(`${res.url}/settings`, {}, {});
-
- return nav.initFolderNav(toJS(folder.folder), 'manage-folder-settings');
- });
- }
-
- onTitleChange(evt) {
- this.props.folder.setTitle(this.getFormSnapshot().folder.title, evt.target.value);
- }
-
- getFormSnapshot() {
- if (!this.formSnapshot) {
- this.formSnapshot = getSnapshot(this.props.folder);
- }
-
- return this.formSnapshot;
- }
-
- save(evt) {
- if (evt) {
- evt.stopPropagation();
- evt.preventDefault();
- }
-
- const { nav, folder, view } = this.props;
-
- folder
- .saveFolder({ overwrite: false })
- .then(newUrl => {
- view.updatePathAndQuery(newUrl, {}, {});
-
- appEvents.emit('dashboard-saved');
- appEvents.emit('alert-success', ['Folder saved']);
- })
- .then(() => {
- return nav.initFolderNav(toJS(folder.folder), 'manage-folder-settings');
- })
- .catch(this.handleSaveFolderError.bind(this));
- }
-
- delete(evt) {
- if (evt) {
- evt.stopPropagation();
- evt.preventDefault();
- }
-
- const { folder, view } = this.props;
- const title = folder.folder.title;
-
- appEvents.emit('confirm-modal', {
- title: 'Delete',
- text: `Do you want to delete this folder and all its dashboards?`,
- icon: 'fa-trash',
- yesText: 'Delete',
- onConfirm: () => {
- return folder.deleteFolder().then(() => {
- appEvents.emit('alert-success', ['Folder Deleted', `${title} has been deleted`]);
- view.updatePathAndQuery('dashboards', '', '');
- });
- },
- });
- }
-
- handleSaveFolderError(err) {
- if (err.data && err.data.status === 'version-mismatch') {
- err.isHandled = true;
-
- const { nav, folder, view } = this.props;
-
- appEvents.emit('confirm-modal', {
- title: 'Conflict',
- text: 'Someone else has updated this folder.',
- text2: 'Would you still like to save this folder?',
- yesText: 'Save & Overwrite',
- icon: 'fa-warning',
- onConfirm: () => {
- folder
- .saveFolder({ overwrite: true })
- .then(newUrl => {
- view.updatePathAndQuery(newUrl, {}, {});
-
- appEvents.emit('dashboard-saved');
- appEvents.emit('alert-success', ['Folder saved']);
- })
- .then(() => {
- return nav.initFolderNav(toJS(folder.folder), 'manage-folder-settings');
- });
- },
- });
- }
- }
-
- render() {
- const { nav, folder } = this.props;
-
- if (!folder.folder || !nav.main) {
- return Loading ;
- }
-
- return (
-
- );
- }
-}
-
-export default hot(module)(FolderSettings);
diff --git a/public/app/containers/ServerStats/ServerStats.test.tsx b/public/app/containers/ServerStats/ServerStats.test.tsx
deleted file mode 100644
index a329a47527d..00000000000
--- a/public/app/containers/ServerStats/ServerStats.test.tsx
+++ /dev/null
@@ -1,30 +0,0 @@
-import React from 'react';
-import renderer from 'react-test-renderer';
-import { ServerStats } from './ServerStats';
-import { RootStore } from 'app/stores/RootStore/RootStore';
-import { backendSrv, createNavTree } from 'test/mocks/common';
-
-describe('ServerStats', () => {
- it('Should render table with stats', done => {
- backendSrv.get.mockReturnValue(
- Promise.resolve({
- dashboards: 10,
- })
- );
-
- const store = RootStore.create(
- {},
- {
- backendSrv: backendSrv,
- navTree: createNavTree('cfg', 'admin', 'server-stats'),
- }
- );
-
- const page = renderer.create( );
-
- setTimeout(() => {
- expect(page.toJSON()).toMatchSnapshot();
- done();
- });
- });
-});
diff --git a/public/app/containers/ServerStats/ServerStats.tsx b/public/app/containers/ServerStats/ServerStats.tsx
deleted file mode 100644
index 63e78996041..00000000000
--- a/public/app/containers/ServerStats/ServerStats.tsx
+++ /dev/null
@@ -1,48 +0,0 @@
-import React from 'react';
-import { hot } from 'react-hot-loader';
-import { inject, observer } from 'mobx-react';
-import PageHeader from 'app/core/components/PageHeader/PageHeader';
-import ContainerProps from 'app/containers/ContainerProps';
-
-@inject('nav', 'serverStats')
-@observer
-export class ServerStats extends React.Component {
- constructor(props) {
- super(props);
- const { nav, serverStats } = this.props;
-
- nav.load('cfg', 'admin', 'server-stats');
- serverStats.load();
- }
-
- render() {
- const { nav, serverStats } = this.props;
- return (
-
-
-
-
-
-
- Name
- Value
-
-
- {serverStats.stats.map(StatItem)}
-
-
-
- );
- }
-}
-
-function StatItem(stat) {
- return (
-
- {stat.name}
- {stat.value}
-
- );
-}
-
-export default hot(module)(ServerStats);
diff --git a/public/app/containers/Teams/TeamPages.tsx b/public/app/containers/Teams/TeamPages.tsx
deleted file mode 100644
index 2abc9c51535..00000000000
--- a/public/app/containers/Teams/TeamPages.tsx
+++ /dev/null
@@ -1,77 +0,0 @@
-import React from 'react';
-import _ from 'lodash';
-import { hot } from 'react-hot-loader';
-import { inject, observer } from 'mobx-react';
-import config from 'app/core/config';
-import PageHeader from 'app/core/components/PageHeader/PageHeader';
-import { NavStore } from 'app/stores/NavStore/NavStore';
-import { TeamsStore, Team } from 'app/stores/TeamsStore/TeamsStore';
-import { ViewStore } from 'app/stores/ViewStore/ViewStore';
-import TeamMembers from './TeamMembers';
-import TeamSettings from './TeamSettings';
-import TeamGroupSync from './TeamGroupSync';
-
-interface Props {
- nav: typeof NavStore.Type;
- teams: typeof TeamsStore.Type;
- view: typeof ViewStore.Type;
-}
-
-@inject('nav', 'teams', 'view')
-@observer
-export class TeamPages extends React.Component {
- isSyncEnabled: boolean;
- currentPage: string;
-
- constructor(props) {
- super(props);
-
- this.isSyncEnabled = config.buildInfo.isEnterprise;
- this.currentPage = this.getCurrentPage();
-
- this.loadTeam();
- }
-
- async loadTeam() {
- const { teams, nav, view } = this.props;
-
- await teams.loadById(view.routeParams.get('id'));
-
- nav.initTeamPage(this.getCurrentTeam(), this.currentPage, this.isSyncEnabled);
- }
-
- getCurrentTeam(): Team {
- const { teams, view } = this.props;
- return teams.map.get(view.routeParams.get('id'));
- }
-
- getCurrentPage() {
- const pages = ['members', 'settings', 'groupsync'];
- const currentPage = this.props.view.routeParams.get('page');
- return _.includes(pages, currentPage) ? currentPage : pages[0];
- }
-
- render() {
- const { nav } = this.props;
- const currentTeam = this.getCurrentTeam();
-
- if (!nav.main) {
- return null;
- }
-
- return (
-
-
- {currentTeam && (
-
- {this.currentPage === 'members' && }
- {this.currentPage === 'settings' && }
- {this.currentPage === 'groupsync' && this.isSyncEnabled && }
-
- )}
-
- );
- }
-}
-
-export default hot(module)(TeamPages);
diff --git a/public/app/containers/Teams/TeamSettings.tsx b/public/app/containers/Teams/TeamSettings.tsx
deleted file mode 100644
index 0de60a0b16c..00000000000
--- a/public/app/containers/Teams/TeamSettings.tsx
+++ /dev/null
@@ -1,69 +0,0 @@
-import React from 'react';
-import { hot } from 'react-hot-loader';
-import { observer } from 'mobx-react';
-import { Team } from 'app/stores/TeamsStore/TeamsStore';
-import { Label } from 'app/core/components/Forms/Forms';
-
-interface Props {
- team: Team;
-}
-
-@observer
-export class TeamSettings extends React.Component {
- constructor(props) {
- super(props);
- }
-
- onChangeName = evt => {
- this.props.team.setName(evt.target.value);
- };
-
- onChangeEmail = evt => {
- this.props.team.setEmail(evt.target.value);
- };
-
- onUpdate = evt => {
- evt.preventDefault();
- this.props.team.update();
- };
-
- render() {
- return (
-
- );
- }
-}
-
-export default hot(module)(TeamSettings);
diff --git a/public/app/core/actions/index.ts b/public/app/core/actions/index.ts
new file mode 100644
index 00000000000..451a13dae99
--- /dev/null
+++ b/public/app/core/actions/index.ts
@@ -0,0 +1,4 @@
+import { updateLocation } from './location';
+import { updateNavIndex, UpdateNavIndexAction } from './navModel';
+
+export { updateLocation, updateNavIndex, UpdateNavIndexAction };
diff --git a/public/app/core/actions/location.ts b/public/app/core/actions/location.ts
new file mode 100644
index 00000000000..6f7ac67363e
--- /dev/null
+++ b/public/app/core/actions/location.ts
@@ -0,0 +1,13 @@
+import { LocationUpdate } from 'app/types';
+
+export type Action = UpdateLocationAction;
+
+export interface UpdateLocationAction {
+ type: 'UPDATE_LOCATION';
+ payload: LocationUpdate;
+}
+
+export const updateLocation = (location: LocationUpdate): UpdateLocationAction => ({
+ type: 'UPDATE_LOCATION',
+ payload: location,
+});
diff --git a/public/app/core/actions/navModel.ts b/public/app/core/actions/navModel.ts
new file mode 100644
index 00000000000..a40a0e880ee
--- /dev/null
+++ b/public/app/core/actions/navModel.ts
@@ -0,0 +1,17 @@
+import { NavModelItem } from '../../types';
+
+export enum ActionTypes {
+ UpdateNavIndex = 'UPDATE_NAV_INDEX',
+}
+
+export type Action = UpdateNavIndexAction;
+
+export interface UpdateNavIndexAction {
+ type: ActionTypes.UpdateNavIndex;
+ payload: NavModelItem;
+}
+
+export const updateNavIndex = (item: NavModelItem): UpdateNavIndexAction => ({
+ type: ActionTypes.UpdateNavIndex,
+ payload: item,
+});
diff --git a/public/app/core/angular_wrappers.ts b/public/app/core/angular_wrappers.ts
index 57fc3780715..7e72f53204e 100644
--- a/public/app/core/angular_wrappers.ts
+++ b/public/app/core/angular_wrappers.ts
@@ -5,7 +5,6 @@ import EmptyListCTA from './components/EmptyListCTA/EmptyListCTA';
import { SearchResult } from './components/search/SearchResult';
import { TagFilter } from './components/TagFilter/TagFilter';
import { SideMenu } from './components/sidemenu/SideMenu';
-import DashboardPermissions from './components/Permissions/DashboardPermissions';
import { GraphLegend } from 'app/plugins/panel/graph/Legend';
export function registerAngularDirectives() {
@@ -19,6 +18,5 @@ export function registerAngularDirectives() {
['onSelect', { watchDepth: 'reference' }],
['tagOptions', { watchDepth: 'reference' }],
]);
- react2AngularDirective('dashboardPermissions', DashboardPermissions, ['backendSrv', 'dashboardId', 'folder']);
react2AngularDirective('graphLegendReact', GraphLegend, ['seriesList', 'className']);
}
diff --git a/public/app/core/components/CustomScrollbar/CustomScrollbar.tsx b/public/app/core/components/CustomScrollbar/CustomScrollbar.tsx
index 8be65249808..9b9a9c4d02a 100644
--- a/public/app/core/components/CustomScrollbar/CustomScrollbar.tsx
+++ b/public/app/core/components/CustomScrollbar/CustomScrollbar.tsx
@@ -13,7 +13,6 @@ interface Props {
* Wraps component into component from `react-custom-scrollbars`
*/
class CustomScrollbar extends PureComponent {
-
static defaultProps: Partial = {
customClassName: 'custom-scrollbars',
autoHide: true,
diff --git a/public/app/core/components/LayoutSelector/LayoutSelector.tsx b/public/app/core/components/LayoutSelector/LayoutSelector.tsx
new file mode 100644
index 00000000000..d9e00102438
--- /dev/null
+++ b/public/app/core/components/LayoutSelector/LayoutSelector.tsx
@@ -0,0 +1,39 @@
+import React, { SFC } from 'react';
+
+export type LayoutMode = LayoutModes.Grid | LayoutModes.List;
+
+export enum LayoutModes {
+ Grid = 'grid',
+ List = 'list',
+}
+
+interface Props {
+ mode: LayoutMode;
+ onLayoutModeChanged: (mode: LayoutMode) => {};
+}
+
+const LayoutSelector: SFC = props => {
+ const { mode, onLayoutModeChanged } = props;
+ return (
+
+ {
+ onLayoutModeChanged(LayoutModes.List);
+ }}
+ className={mode === LayoutModes.List ? 'active' : ''}
+ >
+
+
+ {
+ onLayoutModeChanged(LayoutModes.Grid);
+ }}
+ className={mode === LayoutModes.Grid ? 'active' : ''}
+ >
+
+
+
+ );
+};
+
+export default LayoutSelector;
diff --git a/public/app/core/components/OrgActionBar/OrgActionBar.test.tsx b/public/app/core/components/OrgActionBar/OrgActionBar.test.tsx
new file mode 100644
index 00000000000..9faf07f18d1
--- /dev/null
+++ b/public/app/core/components/OrgActionBar/OrgActionBar.test.tsx
@@ -0,0 +1,24 @@
+import React from 'react';
+import { shallow } from 'enzyme';
+import OrgActionBar, { Props } from './OrgActionBar';
+
+const setup = (propOverrides?: object) => {
+ const props: Props = {
+ searchQuery: '',
+ setSearchQuery: jest.fn(),
+ target: '_blank',
+ linkButton: { href: 'some/url', title: 'test' },
+ };
+
+ Object.assign(props, propOverrides);
+
+ return shallow( );
+};
+
+describe('Render', () => {
+ it('should render component', () => {
+ const wrapper = setup();
+
+ expect(wrapper).toMatchSnapshot();
+ });
+});
diff --git a/public/app/core/components/OrgActionBar/OrgActionBar.tsx b/public/app/core/components/OrgActionBar/OrgActionBar.tsx
new file mode 100644
index 00000000000..8fc34a018e1
--- /dev/null
+++ b/public/app/core/components/OrgActionBar/OrgActionBar.tsx
@@ -0,0 +1,44 @@
+import React, { PureComponent } from 'react';
+import LayoutSelector, { LayoutMode } from '../LayoutSelector/LayoutSelector';
+
+export interface Props {
+ searchQuery: string;
+ layoutMode?: LayoutMode;
+ onSetLayoutMode?: (mode: LayoutMode) => {};
+ setSearchQuery: (value: string) => {};
+ linkButton: { href: string; title: string };
+ target?: string;
+}
+
+export default class OrgActionBar extends PureComponent {
+ render() {
+ const { searchQuery, layoutMode, onSetLayoutMode, linkButton, setSearchQuery, target } = this.props;
+ const linkProps = { href: linkButton.href, target: undefined };
+
+ if (target) {
+ linkProps.target = target;
+ }
+
+ return (
+
+
+
+ setSearchQuery(event.target.value)}
+ placeholder="Filter by name or type"
+ />
+
+
+ onSetLayoutMode(mode)} />
+
+
+
+ {linkButton.title}
+
+
+ );
+ }
+}
diff --git a/public/app/core/components/OrgActionBar/__snapshots__/OrgActionBar.test.tsx.snap b/public/app/core/components/OrgActionBar/__snapshots__/OrgActionBar.test.tsx.snap
new file mode 100644
index 00000000000..dc53e7863ea
--- /dev/null
+++ b/public/app/core/components/OrgActionBar/__snapshots__/OrgActionBar.test.tsx.snap
@@ -0,0 +1,39 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Render should render component 1`] = `
+
+`;
diff --git a/public/app/core/components/PageHeader/PageHeader.tsx b/public/app/core/components/PageHeader/PageHeader.tsx
index b7bef2495bb..c176095afa4 100644
--- a/public/app/core/components/PageHeader/PageHeader.tsx
+++ b/public/app/core/components/PageHeader/PageHeader.tsx
@@ -1,9 +1,7 @@
import React from 'react';
-import { observer } from 'mobx-react';
-import { NavModel, NavModelItem } from '../../nav_model_srv';
+import { NavModel, NavModelItem } from 'app/types';
import classNames from 'classnames';
import appEvents from 'app/core/app_events';
-import { toJS } from 'mobx';
export interface Props {
model: NavModel;
@@ -81,7 +79,6 @@ const Navigation = ({ main }: { main: NavModelItem }) => {
);
};
-@observer
export default class PageHeader extends React.Component {
constructor(props) {
super(props);
@@ -148,7 +145,7 @@ export default class PageHeader extends React.Component {
return null;
}
- const main = toJS(model.main); // Convert to JS if its a mobx observable
+ const main = model.main;
return (
diff --git a/public/app/core/components/PageLoader/PageLoader.tsx b/public/app/core/components/PageLoader/PageLoader.tsx
new file mode 100644
index 00000000000..dcb67dde220
--- /dev/null
+++ b/public/app/core/components/PageLoader/PageLoader.tsx
@@ -0,0 +1,17 @@
+import React, { SFC } from 'react';
+
+interface Props {
+ pageName: string;
+}
+
+const PageLoader: SFC
= ({ pageName }) => {
+ const loadingText = `Loading ${pageName}...`;
+ return (
+
+ );
+};
+
+export default PageLoader;
diff --git a/public/app/core/components/PermissionList/AddPermission.tsx b/public/app/core/components/PermissionList/AddPermission.tsx
new file mode 100644
index 00000000000..a60a7dd4af6
--- /dev/null
+++ b/public/app/core/components/PermissionList/AddPermission.tsx
@@ -0,0 +1,140 @@
+import React, { Component } from 'react';
+import { UserPicker } from 'app/core/components/Picker/UserPicker';
+import { TeamPicker, Team } from 'app/core/components/Picker/TeamPicker';
+import DescriptionPicker, { OptionWithDescription } from 'app/core/components/Picker/DescriptionPicker';
+import { User } from 'app/types';
+import {
+ dashboardPermissionLevels,
+ dashboardAclTargets,
+ AclTarget,
+ PermissionLevel,
+ NewDashboardAclItem,
+ OrgRole,
+} from 'app/types/acl';
+
+export interface Props {
+ onAddPermission: (item: NewDashboardAclItem) => void;
+ onCancel: () => void;
+}
+
+class AddPermissions extends Component {
+ constructor(props) {
+ super(props);
+ this.state = this.getCleanState();
+ }
+
+ getCleanState() {
+ return {
+ userId: 0,
+ teamId: 0,
+ type: AclTarget.Team,
+ permission: PermissionLevel.View,
+ };
+ }
+
+ onTypeChanged = evt => {
+ const type = evt.target.value as AclTarget;
+
+ switch (type) {
+ case AclTarget.User:
+ case AclTarget.Team:
+ this.setState({ type: type, userId: 0, teamId: 0, role: undefined });
+ break;
+ case AclTarget.Editor:
+ this.setState({ type: type, userId: 0, teamId: 0, role: OrgRole.Editor });
+ break;
+ case AclTarget.Viewer:
+ this.setState({ type: type, userId: 0, teamId: 0, role: OrgRole.Viewer });
+ break;
+ }
+ };
+
+ onUserSelected = (user: User) => {
+ this.setState({ userId: user && !Array.isArray(user) ? user.id : 0 });
+ };
+
+ onTeamSelected = (team: Team) => {
+ this.setState({ teamId: team && !Array.isArray(team) ? team.id : 0 });
+ };
+
+ onPermissionChanged = (permission: OptionWithDescription) => {
+ this.setState({ permission: permission.value });
+ };
+
+ onSubmit = async evt => {
+ evt.preventDefault();
+ await this.props.onAddPermission(this.state);
+ this.setState(this.getCleanState());
+ };
+
+ isValid() {
+ switch (this.state.type) {
+ case AclTarget.Team:
+ return this.state.teamId > 0;
+ case AclTarget.User:
+ return this.state.userId > 0;
+ }
+ return true;
+ }
+
+ render() {
+ const { onCancel } = this.props;
+ const newItem = this.state;
+ const pickerClassName = 'width-20';
+ const isValid = this.isValid();
+ return (
+
+ );
+ }
+}
+
+export default AddPermissions;
diff --git a/public/app/core/components/Permissions/DisabledPermissionsListItem.tsx b/public/app/core/components/PermissionList/DisabledPermissionListItem.tsx
similarity index 84%
rename from public/app/core/components/Permissions/DisabledPermissionsListItem.tsx
rename to public/app/core/components/PermissionList/DisabledPermissionListItem.tsx
index d65595dae66..ff679f67ae2 100644
--- a/public/app/core/components/Permissions/DisabledPermissionsListItem.tsx
+++ b/public/app/core/components/PermissionList/DisabledPermissionListItem.tsx
@@ -1,6 +1,6 @@
import React, { Component } from 'react';
import DescriptionPicker from 'app/core/components/Picker/DescriptionPicker';
-import { permissionOptions } from 'app/stores/PermissionsStore/PermissionsStore';
+import { dashboardPermissionLevels } from 'app/types/acl';
export interface Props {
item: any;
@@ -24,11 +24,11 @@ export default class DisabledPermissionListItem extends Component {
{}}
- value={item.permission}
disabled={true}
- className={'gf-form-input--form-dropdown-right'}
+ className={'gf-form-select-box__control--menu-right'}
+ value={item.permission}
/>
diff --git a/public/app/core/components/Permissions/PermissionsList.tsx b/public/app/core/components/PermissionList/PermissionList.tsx
similarity index 55%
rename from public/app/core/components/Permissions/PermissionsList.tsx
rename to public/app/core/components/PermissionList/PermissionList.tsx
index 7e64de012e4..772baa0c274 100644
--- a/public/app/core/components/Permissions/PermissionsList.tsx
+++ b/public/app/core/components/PermissionList/PermissionList.tsx
@@ -1,21 +1,20 @@
-import React, { Component } from 'react';
-import PermissionsListItem from './PermissionsListItem';
-import DisabledPermissionsListItem from './DisabledPermissionsListItem';
-import { observer } from 'mobx-react';
-import { FolderInfo } from './FolderInfo';
+import React, { PureComponent } from 'react';
+import PermissionsListItem from './PermissionListItem';
+import DisabledPermissionsListItem from './DisabledPermissionListItem';
+import { FolderInfo } from 'app/types';
+import { DashboardAcl } from 'app/types/acl';
export interface Props {
- permissions: any[];
- removeItem: any;
- permissionChanged: any;
- fetching: boolean;
+ items: DashboardAcl[];
+ onRemoveItem: (item: DashboardAcl) => void;
+ onPermissionChanged: any;
+ isFetching: boolean;
folderInfo?: FolderInfo;
}
-@observer
-class PermissionsList extends Component {
+class PermissionList extends PureComponent {
render() {
- const { permissions, removeItem, permissionChanged, fetching, folderInfo } = this.props;
+ const { items, onRemoveItem, onPermissionChanged, isFetching, folderInfo } = this.props;
return (
@@ -28,19 +27,18 @@ class PermissionsList extends Component {
icon: 'fa fa-fw fa-street-view',
}}
/>
- {permissions.map((item, idx) => {
+ {items.map((item, idx) => {
return (
);
})}
- {fetching === true && permissions.length < 1 ? (
+ {isFetching === true && items.length < 1 ? (
Loading permissions...
@@ -48,7 +46,7 @@ class PermissionsList extends Component {
) : null}
- {fetching === false && permissions.length < 1 ? (
+ {isFetching === false && items.length < 1 ? (
No permissions are set. Will only be accessible by admins.
@@ -61,4 +59,4 @@ class PermissionsList extends Component {
}
}
-export default PermissionsList;
+export default PermissionList;
diff --git a/public/app/core/components/PermissionList/PermissionListItem.tsx b/public/app/core/components/PermissionList/PermissionListItem.tsx
new file mode 100644
index 00000000000..56b6114d236
--- /dev/null
+++ b/public/app/core/components/PermissionList/PermissionListItem.tsx
@@ -0,0 +1,100 @@
+import React, { PureComponent } from 'react';
+import DescriptionPicker from 'app/core/components/Picker/DescriptionPicker';
+import { dashboardPermissionLevels, DashboardAcl, PermissionLevel } from 'app/types/acl';
+import { FolderInfo } from 'app/types';
+
+const setClassNameHelper = inherited => {
+ return inherited ? 'gf-form-disabled' : '';
+};
+
+function ItemAvatar({ item }) {
+ if (item.userAvatarUrl) {
+ return ;
+ }
+ if (item.teamAvatarUrl) {
+ return ;
+ }
+ if (item.role === 'Editor') {
+ return ;
+ }
+
+ return ;
+}
+
+function ItemDescription({ item }) {
+ if (item.userId) {
+ return (User) ;
+ }
+ if (item.teamId) {
+ return (Team) ;
+ }
+ return (Role) ;
+}
+
+interface Props {
+ item: DashboardAcl;
+ onRemoveItem: (item: DashboardAcl) => void;
+ onPermissionChanged: (item: DashboardAcl, level: PermissionLevel) => void;
+ folderInfo?: FolderInfo;
+}
+
+export default class PermissionsListItem extends PureComponent {
+ onPermissionChanged = option => {
+ this.props.onPermissionChanged(this.props.item, option.value as PermissionLevel);
+ };
+
+ onRemoveItem = () => {
+ this.props.onRemoveItem(this.props.item);
+ };
+
+ render() {
+ const { item, folderInfo } = this.props;
+ const inheritedFromRoot = item.dashboardId === -1 && !item.inherited;
+
+ return (
+
+
+
+
+
+ {item.name}
+
+
+ {item.inherited &&
+ folderInfo && (
+
+ Inherited from folder{' '}
+
+ {folderInfo.title}
+ {' '}
+
+ )}
+ {inheritedFromRoot && Default Permission }
+
+ Can
+
+
+
+
+
+
+ {!item.inherited ? (
+
+
+
+ ) : (
+
+
+
+ )}
+
+
+ );
+ }
+}
diff --git a/public/app/core/components/Permissions/PermissionsInfo.tsx b/public/app/core/components/PermissionList/PermissionsInfo.tsx
similarity index 100%
rename from public/app/core/components/Permissions/PermissionsInfo.tsx
rename to public/app/core/components/PermissionList/PermissionsInfo.tsx
diff --git a/public/app/core/components/Permissions/AddPermissions.test.tsx b/public/app/core/components/Permissions/AddPermissions.test.tsx
deleted file mode 100644
index c6d1ab381b8..00000000000
--- a/public/app/core/components/Permissions/AddPermissions.test.tsx
+++ /dev/null
@@ -1,90 +0,0 @@
-import React from 'react';
-import { shallow } from 'enzyme';
-import AddPermissions from './AddPermissions';
-import { RootStore } from 'app/stores/RootStore/RootStore';
-import { getBackendSrv } from 'app/core/services/backend_srv';
-
-jest.mock('app/core/services/backend_srv', () => ({
- getBackendSrv: () => {
- return {
- get: () => {
- return Promise.resolve([
- { id: 2, dashboardId: 1, role: 'Viewer', permission: 1, permissionName: 'View' },
- { id: 3, dashboardId: 1, role: 'Editor', permission: 1, permissionName: 'Edit' },
- ]);
- },
- post: jest.fn(() => Promise.resolve({})),
- };
- },
-}));
-
-describe('AddPermissions', () => {
- let wrapper;
- let store;
- let instance;
- const backendSrv: any = getBackendSrv();
-
- beforeAll(() => {
- store = RootStore.create({}, { backendSrv: backendSrv });
- wrapper = shallow( );
- instance = wrapper.instance();
- return store.permissions.load(1, true, false);
- });
-
- describe('when permission for a user is added', () => {
- it('should save permission to db', () => {
- const evt = {
- target: {
- value: 'User',
- },
- };
- const userItem = {
- id: 2,
- login: 'user2',
- };
-
- instance.onTypeChanged(evt);
- instance.onUserSelected(userItem);
-
- wrapper.update();
-
- expect(wrapper.find('[data-save-permission]').prop('disabled')).toBe(false);
-
- wrapper.find('form').simulate('submit', { preventDefault() {} });
-
- expect(backendSrv.post.mock.calls.length).toBe(1);
- expect(backendSrv.post.mock.calls[0][0]).toBe('/api/dashboards/id/1/permissions');
- });
- });
-
- describe('when permission for team is added', () => {
- it('should save permission to db', () => {
- const evt = {
- target: {
- value: 'Group',
- },
- };
-
- const teamItem = {
- id: 2,
- name: 'ug1',
- };
-
- instance.onTypeChanged(evt);
- instance.onTeamSelected(teamItem);
-
- wrapper.update();
-
- expect(wrapper.find('[data-save-permission]').prop('disabled')).toBe(false);
-
- wrapper.find('form').simulate('submit', { preventDefault() {} });
-
- expect(backendSrv.post.mock.calls.length).toBe(1);
- expect(backendSrv.post.mock.calls[0][0]).toBe('/api/dashboards/id/1/permissions');
- });
- });
-
- afterEach(() => {
- backendSrv.post.mockClear();
- });
-});
diff --git a/public/app/core/components/Permissions/AddPermissions.tsx b/public/app/core/components/Permissions/AddPermissions.tsx
deleted file mode 100644
index 289e27aa731..00000000000
--- a/public/app/core/components/Permissions/AddPermissions.tsx
+++ /dev/null
@@ -1,128 +0,0 @@
-import React, { Component } from 'react';
-import { observer } from 'mobx-react';
-import { aclTypes } from 'app/stores/PermissionsStore/PermissionsStore';
-import { UserPicker, User } from 'app/core/components/Picker/UserPicker';
-import { TeamPicker, Team } from 'app/core/components/Picker/TeamPicker';
-import DescriptionPicker, { OptionWithDescription } from 'app/core/components/Picker/DescriptionPicker';
-import { permissionOptions } from 'app/stores/PermissionsStore/PermissionsStore';
-
-export interface Props {
- permissions: any;
-}
-
-@observer
-class AddPermissions extends Component {
- constructor(props) {
- super(props);
- }
-
- componentWillMount() {
- const { permissions } = this.props;
- permissions.resetNewType();
- }
-
- onTypeChanged = evt => {
- const { value } = evt.target;
- const { permissions } = this.props;
-
- permissions.setNewType(value);
- };
-
- onUserSelected = (user: User) => {
- const { permissions } = this.props;
- if (!user) {
- permissions.newItem.setUser(null, null);
- return;
- }
- return permissions.newItem.setUser(user.id, user.login, user.avatarUrl);
- };
-
- onTeamSelected = (team: Team) => {
- const { permissions } = this.props;
- if (!team) {
- permissions.newItem.setTeam(null, null);
- return;
- }
- return permissions.newItem.setTeam(team.id, team.name, team.avatarUrl);
- };
-
- onPermissionChanged = (permission: OptionWithDescription) => {
- const { permissions } = this.props;
- return permissions.newItem.setPermission(permission.value);
- };
-
- resetNewType() {
- const { permissions } = this.props;
- return permissions.resetNewType();
- }
-
- onSubmit = evt => {
- evt.preventDefault();
- const { permissions } = this.props;
- permissions.addStoreItem();
- };
-
- render() {
- const { permissions } = this.props;
- const newItem = permissions.newItem;
- const pickerClassName = 'width-20';
-
- const isValid = newItem.isValid();
-
- return (
-
- );
- }
-}
-
-export default AddPermissions;
diff --git a/public/app/core/components/Permissions/DashboardPermissions.tsx b/public/app/core/components/Permissions/DashboardPermissions.tsx
deleted file mode 100644
index 38a646b2473..00000000000
--- a/public/app/core/components/Permissions/DashboardPermissions.tsx
+++ /dev/null
@@ -1,71 +0,0 @@
-import React, { Component } from 'react';
-import { observer } from 'mobx-react';
-import { store } from 'app/stores/store';
-import Permissions from 'app/core/components/Permissions/Permissions';
-import Tooltip from 'app/core/components/Tooltip/Tooltip';
-import PermissionsInfo from 'app/core/components/Permissions/PermissionsInfo';
-import AddPermissions from 'app/core/components/Permissions/AddPermissions';
-import SlideDown from 'app/core/components/Animations/SlideDown';
-import { FolderInfo } from './FolderInfo';
-
-export interface Props {
- dashboardId: number;
- folder?: FolderInfo;
- backendSrv: any;
-}
-
-@observer
-class DashboardPermissions extends Component {
- permissions: any;
-
- constructor(props) {
- super(props);
- this.handleAddPermission = this.handleAddPermission.bind(this);
- this.permissions = store.permissions;
- }
-
- handleAddPermission() {
- this.permissions.toggleAddPermissions();
- }
-
- componentWillUnmount() {
- this.permissions.hideAddPermissions();
- }
-
- render() {
- const { dashboardId, folder, backendSrv } = this.props;
-
- return (
-
-
-
-
Permissions
-
-
-
-
-
- Add Permission
-
-
-
-
-
-
-
-
- );
- }
-}
-
-export default DashboardPermissions;
diff --git a/public/app/core/components/Permissions/FolderInfo.ts b/public/app/core/components/Permissions/FolderInfo.ts
deleted file mode 100644
index d4a6020bb71..00000000000
--- a/public/app/core/components/Permissions/FolderInfo.ts
+++ /dev/null
@@ -1,5 +0,0 @@
-export interface FolderInfo {
- id: number;
- title: string;
- url: string;
-}
diff --git a/public/app/core/components/Permissions/Permissions.tsx b/public/app/core/components/Permissions/Permissions.tsx
deleted file mode 100644
index d17899c891f..00000000000
--- a/public/app/core/components/Permissions/Permissions.tsx
+++ /dev/null
@@ -1,91 +0,0 @@
-import React, { Component } from 'react';
-import PermissionsList from './PermissionsList';
-import { observer } from 'mobx-react';
-import { FolderInfo } from './FolderInfo';
-
-export interface DashboardAcl {
- id?: number;
- dashboardId?: number;
- userId?: number;
- userLogin?: string;
- userEmail?: string;
- teamId?: number;
- team?: string;
- permission?: number;
- permissionName?: string;
- role?: string;
- icon?: string;
- name?: string;
- inherited?: boolean;
- sortRank?: number;
-}
-
-export interface Props {
- dashboardId: number;
- folderInfo?: FolderInfo;
- permissions?: any;
- isFolder: boolean;
- backendSrv: any;
-}
-
-@observer
-class Permissions extends Component {
- constructor(props) {
- super(props);
- const { dashboardId, isFolder, folderInfo } = this.props;
- this.permissionChanged = this.permissionChanged.bind(this);
- this.typeChanged = this.typeChanged.bind(this);
- this.removeItem = this.removeItem.bind(this);
- this.loadStore(dashboardId, isFolder, folderInfo && folderInfo.id === 0);
- }
-
- loadStore(dashboardId, isFolder, isInRoot = false) {
- return this.props.permissions.load(dashboardId, isFolder, isInRoot);
- }
-
- permissionChanged(index: number, permission: number, permissionName: string) {
- const { permissions } = this.props;
- permissions.updatePermissionOnIndex(index, permission, permissionName);
- }
-
- removeItem(index: number) {
- const { permissions } = this.props;
- permissions.removeStoreItem(index);
- }
-
- resetNewType() {
- const { permissions } = this.props;
- permissions.resetNewType();
- }
-
- typeChanged(evt) {
- const { value } = evt.target;
- const { permissions, dashboardId } = this.props;
-
- if (value === 'Viewer' || value === 'Editor') {
- permissions.addStoreItem({ permission: 1, role: value, dashboardId: dashboardId }, dashboardId);
- this.resetNewType();
- return;
- }
-
- permissions.setNewType(value);
- }
-
- render() {
- const { permissions, folderInfo } = this.props;
-
- return (
-
- );
- }
-}
-
-export default Permissions;
diff --git a/public/app/core/components/Permissions/PermissionsListItem.tsx b/public/app/core/components/Permissions/PermissionsListItem.tsx
deleted file mode 100644
index a17aa8c04df..00000000000
--- a/public/app/core/components/Permissions/PermissionsListItem.tsx
+++ /dev/null
@@ -1,91 +0,0 @@
-import React from 'react';
-import { observer } from 'mobx-react';
-import DescriptionPicker from 'app/core/components/Picker/DescriptionPicker';
-import { permissionOptions } from 'app/stores/PermissionsStore/PermissionsStore';
-
-const setClassNameHelper = inherited => {
- return inherited ? 'gf-form-disabled' : '';
-};
-
-function ItemAvatar({ item }) {
- if (item.userAvatarUrl) {
- return ;
- }
- if (item.teamAvatarUrl) {
- return ;
- }
- if (item.role === 'Editor') {
- return ;
- }
-
- return ;
-}
-
-function ItemDescription({ item }) {
- if (item.userId) {
- return (User) ;
- }
- if (item.teamId) {
- return (Team) ;
- }
- return (Role) ;
-}
-
-export default observer(({ item, removeItem, permissionChanged, itemIndex, folderInfo }) => {
- const handleRemoveItem = evt => {
- evt.preventDefault();
- removeItem(itemIndex);
- };
-
- const handleChangePermission = permissionOption => {
- permissionChanged(itemIndex, permissionOption.value, permissionOption.label);
- };
-
- const inheritedFromRoot = item.dashboardId === -1 && !item.inherited;
-
- return (
-
-
-
-
-
- {item.name}
-
-
- {item.inherited &&
- folderInfo && (
-
- Inherited from folder{' '}
-
- {folderInfo.title}
- {' '}
-
- )}
- {inheritedFromRoot && Default Permission }
-
- Can
-
-
-
-
-
-
- {!item.inherited ? (
-
-
-
- ) : (
-
-
-
- )}
-
-
- );
-});
diff --git a/public/app/core/components/Picker/DescriptionOption.tsx b/public/app/core/components/Picker/DescriptionOption.tsx
index 1bcb7100489..9ddf13f7532 100644
--- a/public/app/core/components/Picker/DescriptionOption.tsx
+++ b/public/app/core/components/Picker/DescriptionOption.tsx
@@ -1,56 +1,25 @@
-import React, { Component } from 'react';
+import React from 'react';
+import { components } from 'react-select';
+import { OptionProps } from 'react-select/lib/components/Option';
-export interface Props {
- onSelect: any;
- onFocus: any;
- option: any;
- isFocused: any;
- className: any;
+// https://github.com/JedWatson/react-select/issues/3038
+interface ExtendedOptionProps extends OptionProps {
+ data: any;
}
-class DescriptionOption extends Component {
- constructor(props) {
- super(props);
- this.handleMouseDown = this.handleMouseDown.bind(this);
- this.handleMouseEnter = this.handleMouseEnter.bind(this);
- this.handleMouseMove = this.handleMouseMove.bind(this);
- }
-
- handleMouseDown(event) {
- event.preventDefault();
- event.stopPropagation();
- this.props.onSelect(this.props.option, event);
- }
-
- handleMouseEnter(event) {
- this.props.onFocus(this.props.option, event);
- }
-
- handleMouseMove(event) {
- if (this.props.isFocused) {
- return;
- }
- this.props.onFocus(this.props.option, event);
- }
-
- render() {
- const { option, children, className } = this.props;
- return (
-
+export const Option = (props: ExtendedOptionProps) => {
+ const { children, isSelected, data, className } = props;
+ return (
+
+
+ {isSelected &&
}
{children}
-
{option.description}
- {className.indexOf('is-selected') > -1 &&
}
+
{data.description}
-
- );
- }
-}
+
+
+ );
+};
-export default DescriptionOption;
+export default Option;
diff --git a/public/app/core/components/Picker/DescriptionPicker.tsx b/public/app/core/components/Picker/DescriptionPicker.tsx
index 2e53d096e08..4c39dcd8a79 100644
--- a/public/app/core/components/Picker/DescriptionPicker.tsx
+++ b/public/app/core/components/Picker/DescriptionPicker.tsx
@@ -1,14 +1,9 @@
import React, { Component } from 'react';
import Select from 'react-select';
import DescriptionOption from './DescriptionOption';
-
-export interface Props {
- optionsWithDesc: OptionWithDescription[];
- onSelected: (permission) => void;
- value: number;
- disabled: boolean;
- className?: string;
-}
+import IndicatorsContainer from './IndicatorsContainer';
+import ResetStyles from './ResetStyles';
+import NoOptionsMessage from './NoOptionsMessage';
export interface OptionWithDescription {
value: any;
@@ -16,29 +11,42 @@ export interface OptionWithDescription {
description: string;
}
+export interface Props {
+ optionsWithDesc: OptionWithDescription[];
+ onSelected: (permission) => void;
+ disabled: boolean;
+ className?: string;
+ value?: any;
+}
+
+const getSelectedOption = (optionsWithDesc, value) => optionsWithDesc.find(option => option.value === value);
+
class DescriptionPicker extends Component {
constructor(props) {
super(props);
- this.state = {};
}
render() {
- const { optionsWithDesc, onSelected, value, disabled, className } = this.props;
-
+ const { optionsWithDesc, onSelected, disabled, className, value } = this.props;
+ const selectedOption = getSelectedOption(optionsWithDesc, value);
return (
i.value}
+ getOptionLabel={i => i.label}
+ value={selectedOption}
/>
);
diff --git a/public/app/core/components/Picker/IndicatorsContainer.tsx b/public/app/core/components/Picker/IndicatorsContainer.tsx
new file mode 100644
index 00000000000..a0998fe95b8
--- /dev/null
+++ b/public/app/core/components/Picker/IndicatorsContainer.tsx
@@ -0,0 +1,15 @@
+import React from 'react';
+import { components } from 'react-select';
+
+export const IndicatorsContainer = props => {
+ const isOpen = props.selectProps.menuIsOpen;
+ return (
+
+
+
+ );
+};
+
+export default IndicatorsContainer;
diff --git a/public/app/core/components/Picker/NoOptionsMessage.tsx b/public/app/core/components/Picker/NoOptionsMessage.tsx
new file mode 100644
index 00000000000..1d2ad4a179e
--- /dev/null
+++ b/public/app/core/components/Picker/NoOptionsMessage.tsx
@@ -0,0 +1,18 @@
+import React from 'react';
+import { components } from 'react-select';
+import { OptionProps } from 'react-select/lib/components/Option';
+
+export interface Props {
+ children: Element;
+}
+
+export const PickerOption = (props: OptionProps) => {
+ const { children, className } = props;
+ return (
+
+ {children}
+
+ );
+};
+
+export default PickerOption;
diff --git a/public/app/core/components/Picker/PickerOption.test.tsx b/public/app/core/components/Picker/PickerOption.test.tsx
index 2f46f3197c5..7ebcac354e8 100644
--- a/public/app/core/components/Picker/PickerOption.test.tsx
+++ b/public/app/core/components/Picker/PickerOption.test.tsx
@@ -3,10 +3,26 @@ import renderer from 'react-test-renderer';
import PickerOption from './PickerOption';
const model = {
- onSelect: () => {},
- onFocus: () => {},
- isFocused: () => {},
- option: {
+ cx: jest.fn(),
+ clearValue: jest.fn(),
+ onSelect: jest.fn(),
+ getStyles: jest.fn(),
+ getValue: jest.fn(),
+ hasValue: true,
+ isMulti: false,
+ options: [],
+ selectOption: jest.fn(),
+ selectProps: {},
+ setValue: jest.fn(),
+ isDisabled: false,
+ isFocused: false,
+ isSelected: false,
+ innerRef: null,
+ innerProps: null,
+ label: 'Option label',
+ type: null,
+ children: 'Model title',
+ data: {
title: 'Model title',
avatarUrl: 'url/to/avatar',
label: 'User picker label',
diff --git a/public/app/core/components/Picker/PickerOption.tsx b/public/app/core/components/Picker/PickerOption.tsx
index f30a7c06d10..98bb5dc566e 100644
--- a/public/app/core/components/Picker/PickerOption.tsx
+++ b/public/app/core/components/Picker/PickerOption.tsx
@@ -1,54 +1,22 @@
-import React, { Component } from 'react';
+import React from 'react';
+import { components } from 'react-select';
+import { OptionProps } from 'react-select/lib/components/Option';
-export interface Props {
- onSelect: any;
- onFocus: any;
- option: any;
- isFocused: any;
- className: any;
+// https://github.com/JedWatson/react-select/issues/3038
+interface ExtendedOptionProps extends OptionProps {
+ data: any;
}
-class UserPickerOption extends Component {
- constructor(props) {
- super(props);
- this.handleMouseDown = this.handleMouseDown.bind(this);
- this.handleMouseEnter = this.handleMouseEnter.bind(this);
- this.handleMouseMove = this.handleMouseMove.bind(this);
- }
-
- handleMouseDown(event) {
- event.preventDefault();
- event.stopPropagation();
- this.props.onSelect(this.props.option, event);
- }
-
- handleMouseEnter(event) {
- this.props.onFocus(this.props.option, event);
- }
-
- handleMouseMove(event) {
- if (this.props.isFocused) {
- return;
- }
- this.props.onFocus(this.props.option, event);
- }
-
- render() {
- const { option, children, className } = this.props;
-
- return (
-
-
+export const PickerOption = (props: ExtendedOptionProps) => {
+ const { children, data, className } = props;
+ return (
+
+
+ {data.avatarUrl &&
}
{children}
-
- );
- }
-}
+
+
+ );
+};
-export default UserPickerOption;
+export default PickerOption;
diff --git a/public/app/core/components/Picker/ResetStyles.tsx b/public/app/core/components/Picker/ResetStyles.tsx
new file mode 100644
index 00000000000..2a235b2892a
--- /dev/null
+++ b/public/app/core/components/Picker/ResetStyles.tsx
@@ -0,0 +1,23 @@
+export default {
+ clearIndicator: () => ({}),
+ container: () => ({}),
+ control: () => ({}),
+ dropdownIndicator: () => ({}),
+ group: () => ({}),
+ groupHeading: () => ({}),
+ indicatorsContainer: () => ({}),
+ indicatorSeparator: () => ({}),
+ input: () => ({}),
+ loadingIndicator: () => ({}),
+ loadingMessage: () => ({}),
+ menu: () => ({}),
+ menuList: () => ({}),
+ multiValue: () => ({}),
+ multiValueLabel: () => ({}),
+ multiValueRemove: () => ({}),
+ noOptionsMessage: () => ({}),
+ option: () => ({}),
+ placeholder: () => ({}),
+ singleValue: () => ({}),
+ valueContainer: () => ({}),
+};
diff --git a/public/app/core/components/Picker/TeamPicker.tsx b/public/app/core/components/Picker/TeamPicker.tsx
index 04f108ff8da..f44717acd28 100644
--- a/public/app/core/components/Picker/TeamPicker.tsx
+++ b/public/app/core/components/Picker/TeamPicker.tsx
@@ -1,18 +1,11 @@
import React, { Component } from 'react';
-import Select from 'react-select';
+import AsyncSelect from 'react-select/lib/Async';
import PickerOption from './PickerOption';
import { debounce } from 'lodash';
import { getBackendSrv } from 'app/core/services/backend_srv';
-
-export interface Props {
- onSelected: (team: Team) => void;
- value?: string;
- className?: string;
-}
-
-export interface State {
- isLoading;
-}
+import ResetStyles from './ResetStyles';
+import IndicatorsContainer from './IndicatorsContainer';
+import NoOptionsMessage from './NoOptionsMessage';
export interface Team {
id: number;
@@ -21,6 +14,15 @@ export interface Team {
avatarUrl: string;
}
+export interface Props {
+ onSelected: (team: Team) => void;
+ className?: string;
+}
+
+export interface State {
+ isLoading: boolean;
+}
+
export class TeamPicker extends Component {
debouncedSearch: any;
@@ -31,7 +33,7 @@ export class TeamPicker extends Component {
this.debouncedSearch = debounce(this.search, 300, {
leading: true,
- trailing: false,
+ trailing: true,
});
}
@@ -50,31 +52,34 @@ export class TeamPicker extends Component {
});
this.setState({ isLoading: false });
- return { options: teams };
+ return teams;
});
}
render() {
- const { onSelected, value, className } = this.props;
+ const { onSelected, className } = this.props;
const { isLoading } = this.state;
-
return (
- 'Loading...'}
+ noOptionsMessage={() => 'No teams found'}
+ getOptionValue={i => i.id}
+ getOptionLabel={i => i.label}
/>
);
diff --git a/public/app/core/components/Picker/UserPicker.tsx b/public/app/core/components/Picker/UserPicker.tsx
index e50513c44e1..f78cf69bf5e 100644
--- a/public/app/core/components/Picker/UserPicker.tsx
+++ b/public/app/core/components/Picker/UserPicker.tsx
@@ -1,12 +1,15 @@
import React, { Component } from 'react';
-import Select from 'react-select';
+import AsyncSelect from 'react-select/lib/Async';
import PickerOption from './PickerOption';
import { debounce } from 'lodash';
import { getBackendSrv } from 'app/core/services/backend_srv';
+import { User } from 'app/types';
+import ResetStyles from './ResetStyles';
+import IndicatorsContainer from './IndicatorsContainer';
+import NoOptionsMessage from './NoOptionsMessage';
export interface Props {
onSelected: (user: User) => void;
- value?: string;
className?: string;
}
@@ -14,13 +17,6 @@ export interface State {
isLoading: boolean;
}
-export interface User {
- id: number;
- label: string;
- avatarUrl: string;
- login: string;
-}
-
export class UserPicker extends Component {
debouncedSearch: any;
@@ -37,20 +33,17 @@ export class UserPicker extends Component {
search(query?: string) {
const backendSrv = getBackendSrv();
-
this.setState({ isLoading: true });
return backendSrv
.get(`/api/org/users?query=${query}&limit=10`)
.then(result => {
- return {
- options: result.map(user => ({
- id: user.userId,
- label: `${user.login} - ${user.email}`,
- avatarUrl: user.avatarUrl,
- login: user.login,
- })),
- };
+ return result.map(user => ({
+ id: user.userId,
+ label: `${user.login} - ${user.email}`,
+ avatarUrl: user.avatarUrl,
+ login: user.login,
+ }));
})
.finally(() => {
this.setState({ isLoading: false });
@@ -58,26 +51,30 @@ export class UserPicker extends Component {
}
render() {
- const { value, className } = this.props;
+ const { className, onSelected } = this.props;
const { isLoading } = this.state;
return (
- 'Loading...'}
+ noOptionsMessage={() => 'No users found'}
+ getOptionValue={i => i.id}
+ getOptionLabel={i => i.label}
/>
);
diff --git a/public/app/core/components/Picker/__snapshots__/PickerOption.test.tsx.snap b/public/app/core/components/Picker/__snapshots__/PickerOption.test.tsx.snap
index 764f7844cb7..748fcbee4aa 100644
--- a/public/app/core/components/Picker/__snapshots__/PickerOption.test.tsx.snap
+++ b/public/app/core/components/Picker/__snapshots__/PickerOption.test.tsx.snap
@@ -1,17 +1,16 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`PickerOption renders correctly 1`] = `
-
-
-
+
+
+
+ Model title
+
+
`;
diff --git a/public/app/core/components/Picker/__snapshots__/TeamPicker.test.tsx.snap b/public/app/core/components/Picker/__snapshots__/TeamPicker.test.tsx.snap
index 67232d0ea5b..011811f21ad 100644
--- a/public/app/core/components/Picker/__snapshots__/TeamPicker.test.tsx.snap
+++ b/public/app/core/components/Picker/__snapshots__/TeamPicker.test.tsx.snap
@@ -5,93 +5,115 @@ exports[`TeamPicker renders correctly 1`] = `
className="user-picker"
>
-
- Loading...
+ Select a team
-
-
+
-
-
-
-
+
diff --git a/public/app/core/components/Picker/__snapshots__/UserPicker.test.tsx.snap b/public/app/core/components/Picker/__snapshots__/UserPicker.test.tsx.snap
index 3262dc10efe..3e23bd435b1 100644
--- a/public/app/core/components/Picker/__snapshots__/UserPicker.test.tsx.snap
+++ b/public/app/core/components/Picker/__snapshots__/UserPicker.test.tsx.snap
@@ -5,93 +5,115 @@ exports[`UserPicker renders correctly 1`] = `
className="user-picker"
>
-
- Loading...
+ Select user
-
-
+
-
-
-
-
+
diff --git a/public/app/core/components/TagFilter/TagBadge.tsx b/public/app/core/components/TagFilter/TagBadge.tsx
index d93b5fd1e74..b00b95b70cb 100644
--- a/public/app/core/components/TagFilter/TagBadge.tsx
+++ b/public/app/core/components/TagFilter/TagBadge.tsx
@@ -5,17 +5,12 @@ export interface Props {
label: string;
removeIcon: boolean;
count: number;
- onClick: any;
+ onClick?: any;
}
export class TagBadge extends React.Component {
constructor(props) {
super(props);
- this.onClick = this.onClick.bind(this);
- }
-
- onClick(event) {
- this.props.onClick(event);
}
render() {
@@ -28,7 +23,7 @@ export class TagBadge extends React.Component {
const countLabel = count !== 0 && {`(${count})`} ;
return (
-
+
{removeIcon && }
{label} {countLabel}
diff --git a/public/app/core/components/TagFilter/TagFilter.tsx b/public/app/core/components/TagFilter/TagFilter.tsx
index a879f544da0..14b6ef4932b 100644
--- a/public/app/core/components/TagFilter/TagFilter.tsx
+++ b/public/app/core/components/TagFilter/TagFilter.tsx
@@ -1,8 +1,11 @@
-import _ from 'lodash';
import React from 'react';
-import { Async } from 'react-select';
-import { TagValue } from './TagValue';
+import AsyncSelect from 'react-select/lib/Async';
import { TagOption } from './TagOption';
+import { TagBadge } from './TagBadge';
+import IndicatorsContainer from 'app/core/components/Picker/IndicatorsContainer';
+import NoOptionsMessage from 'app/core/components/Picker/NoOptionsMessage';
+import { components } from 'react-select';
+import ResetStyles from 'app/core/components/Picker/ResetStyles';
export interface Props {
tags: string[];
@@ -18,15 +21,15 @@ export class TagFilter extends React.Component {
this.searchTags = this.searchTags.bind(this);
this.onChange = this.onChange.bind(this);
- this.onTagRemove = this.onTagRemove.bind(this);
}
searchTags(query) {
return this.props.tagOptions().then(options => {
- const tags = _.map(options, tagOption => {
- return { value: tagOption.term, label: tagOption.term, count: tagOption.count };
- });
- return { options: tags };
+ return options.map(option => ({
+ value: option.term,
+ label: option.term,
+ count: option.count,
+ }));
});
}
@@ -34,33 +37,44 @@ export class TagFilter extends React.Component {
this.props.onSelect(newTags);
}
- onTagRemove(tag) {
- let newTags = _.without(this.props.tags, tag.label);
- newTags = _.map(newTags, tag => {
- return { value: tag };
- });
- this.props.onSelect(newTags);
- }
-
render() {
const selectOptions = {
+ classNamePrefix: 'gf-form-select-box',
+ isMulti: true,
+ defaultOptions: true,
loadOptions: this.searchTags,
onChange: this.onChange,
- value: this.props.tags,
- multi: true,
className: 'gf-form-input gf-form-input--form-dropdown',
placeholder: 'Tags',
- loadingPlaceholder: 'Loading...',
- noResultsText: 'No tags found',
- optionComponent: TagOption,
- };
+ loadingMessage: () => 'Loading...',
+ noOptionsMessage: () => 'No tags found',
+ getOptionValue: i => i.value,
+ getOptionLabel: i => i.label,
+ value: this.props.tags,
+ styles: ResetStyles,
+ components: {
+ Option: TagOption,
+ IndicatorsContainer,
+ NoOptionsMessage,
+ MultiValueLabel: () => {
+ return null; // We want the whole tag to be clickable so we use MultiValueRemove instead
+ },
+ MultiValueRemove: props => {
+ const { data } = props;
- selectOptions['valueComponent'] = TagValue;
+ return (
+
+
+
+ );
+ },
+ },
+ };
return (
diff --git a/public/app/core/components/TagFilter/TagOption.tsx b/public/app/core/components/TagFilter/TagOption.tsx
index 5938c98f870..e4e91f93c00 100644
--- a/public/app/core/components/TagFilter/TagOption.tsx
+++ b/public/app/core/components/TagFilter/TagOption.tsx
@@ -1,52 +1,22 @@
import React from 'react';
+import { components } from 'react-select';
+import { OptionProps } from 'react-select/lib/components/Option';
import { TagBadge } from './TagBadge';
-export interface Props {
- onSelect: any;
- onFocus: any;
- option: any;
- isFocused: any;
- className: any;
+// https://github.com/JedWatson/react-select/issues/3038
+interface ExtendedOptionProps extends OptionProps {
+ data: any;
}
-export class TagOption extends React.Component {
- constructor(props) {
- super(props);
- this.handleMouseDown = this.handleMouseDown.bind(this);
- this.handleMouseEnter = this.handleMouseEnter.bind(this);
- this.handleMouseMove = this.handleMouseMove.bind(this);
- }
+export const TagOption = (props: ExtendedOptionProps) => {
+ const { data, className, label } = props;
+ return (
+
+
+
+
+
+ );
+};
- handleMouseDown(event) {
- event.preventDefault();
- event.stopPropagation();
- this.props.onSelect(this.props.option, event);
- }
-
- handleMouseEnter(event) {
- this.props.onFocus(this.props.option, event);
- }
-
- handleMouseMove(event) {
- if (this.props.isFocused) {
- return;
- }
- this.props.onFocus(this.props.option, event);
- }
-
- render() {
- const { option, className } = this.props;
-
- return (
-
-
-
- );
- }
-}
+export default TagOption;
diff --git a/public/app/core/components/TagFilter/TagValue.tsx b/public/app/core/components/TagFilter/TagValue.tsx
index ca8ca9e4fba..43e41c7fdc2 100644
--- a/public/app/core/components/TagFilter/TagValue.tsx
+++ b/public/app/core/components/TagFilter/TagValue.tsx
@@ -21,6 +21,6 @@ export class TagValue extends React.Component {
render() {
const { value } = this.props;
- return ;
+ return ;
}
}
diff --git a/public/app/core/components/grafana_app.ts b/public/app/core/components/grafana_app.ts
index 6c7b8cf3bf7..2774ab99426 100644
--- a/public/app/core/components/grafana_app.ts
+++ b/public/app/core/components/grafana_app.ts
@@ -6,10 +6,11 @@ import coreModule from 'app/core/core_module';
import { profiler } from 'app/core/profiler';
import appEvents from 'app/core/app_events';
import Drop from 'tether-drop';
-import { createStore } from 'app/stores/store';
import colors from 'app/core/utils/colors';
import { BackendSrv, setBackendSrv } from 'app/core/services/backend_srv';
import { DatasourceSrv } from 'app/features/plugins/datasource_srv';
+import { configureStore } from 'app/store/configureStore';
+import { AngularLoader, setAngularLoader } from 'app/core/services/AngularLoader';
export class GrafanaCtrl {
/** @ngInject */
@@ -22,11 +23,13 @@ export class GrafanaCtrl {
contextSrv,
bridgeSrv,
backendSrv: BackendSrv,
- datasourceSrv: DatasourceSrv
+ datasourceSrv: DatasourceSrv,
+ angularLoader: AngularLoader
) {
// sets singleston instances for angular services so react components can access them
+ setAngularLoader(angularLoader);
setBackendSrv(backendSrv);
- createStore({ backendSrv, datasourceSrv });
+ configureStore();
$scope.init = () => {
$scope.contextSrv = contextSrv;
@@ -245,6 +248,9 @@ export function grafanaAppDirective(playlistSrv, contextSrv, $timeout, $rootScop
return;
}
+ // ensure dropdown menu doesn't impact on z-index
+ body.find('.dropdown-menu-open').removeClass('dropdown-menu-open');
+
// for stuff that animates, slides out etc, clicking it needs to
// hide it right away
const clickAutoHide = target.closest('[data-click-hide]');
diff --git a/public/app/core/components/manage_dashboards/manage_dashboards.ts b/public/app/core/components/manage_dashboards/manage_dashboards.ts
index da3fa2f8ab8..25a69b1f5e4 100644
--- a/public/app/core/components/manage_dashboards/manage_dashboards.ts
+++ b/public/app/core/components/manage_dashboards/manage_dashboards.ts
@@ -207,7 +207,7 @@ export class ManageDashboardsCtrl {
const template =
'' +
- ' `';
+ '';
appEvents.emit('show-modal', {
templateHtml: template,
modalClass: 'modal--narrow',
diff --git a/public/app/core/components/search/SearchResult.tsx b/public/app/core/components/search/SearchResult.tsx
index 3141d29ac7f..13333c168f9 100644
--- a/public/app/core/components/search/SearchResult.tsx
+++ b/public/app/core/components/search/SearchResult.tsx
@@ -1,22 +1,13 @@
import React from 'react';
import classNames from 'classnames';
-import { observer } from 'mobx-react';
-import { store } from 'app/stores/store';
-export interface SearchResultProps {
- search: any;
-}
-
-@observer
-export class SearchResult extends React.Component {
+export class SearchResult extends React.Component {
constructor(props) {
super(props);
this.state = {
- search: store.search,
+ search: '',
};
-
- store.search.query();
}
render() {
@@ -30,7 +21,6 @@ export interface SectionProps {
section: any;
}
-@observer
export class SearchResultSection extends React.Component {
constructor(props) {
super(props);
diff --git a/public/app/core/components/search/search.ts b/public/app/core/components/search/search.ts
index e347fcd829a..322dd2bdf10 100644
--- a/public/app/core/components/search/search.ts
+++ b/public/app/core/components/search/search.ts
@@ -160,8 +160,12 @@ export class SearchCtrl {
searchDashboards() {
this.currentSearchId = this.currentSearchId + 1;
const localSearchId = this.currentSearchId;
+ const query = {
+ ...this.query,
+ tag: this.query.tag.map(i => i.value),
+ };
- return this.searchSrv.search(this.query).then(results => {
+ return this.searchSrv.search(query).then(results => {
if (localSearchId < this.currentSearchId) {
return;
}
@@ -196,7 +200,7 @@ export class SearchCtrl {
}
onTagSelect(newTags) {
- this.query.tag = _.map(newTags, tag => tag.value);
+ this.query.tag = newTags;
this.search();
}
diff --git a/public/app/core/components/search/search_results.html b/public/app/core/components/search/search_results.html
index 7435f8d0b7e..45258ded652 100644
--- a/public/app/core/components/search/search_results.html
+++ b/public/app/core/components/search/search_results.html
@@ -34,6 +34,7 @@
{{::item.title}}
+ {{::item.folderTitle}}
diff --git a/public/app/core/components/sidemenu/SideMenu.tsx b/public/app/core/components/sidemenu/SideMenu.tsx
index 0092c1e3842..fd3e0d95564 100644
--- a/public/app/core/components/sidemenu/SideMenu.tsx
+++ b/public/app/core/components/sidemenu/SideMenu.tsx
@@ -17,7 +17,7 @@ export class SideMenu extends PureComponent {
render() {
return [
-
+
,
diff --git a/public/app/core/components/sidemenu/__snapshots__/SideMenu.test.tsx.snap b/public/app/core/components/sidemenu/__snapshots__/SideMenu.test.tsx.snap
index 78e64209749..ec2fa845c6d 100644
--- a/public/app/core/components/sidemenu/__snapshots__/SideMenu.test.tsx.snap
+++ b/public/app/core/components/sidemenu/__snapshots__/SideMenu.test.tsx.snap
@@ -6,12 +6,29 @@ Array [
className="sidemenu__logo"
key="logo"
onClick={[Function]}
- />,
+ >
+
+
,
,
+ >
+
+
+
+ Close
+
+ ,
,
diff --git a/public/app/core/components/sidemenu/__snapshots__/SignIn.test.tsx.snap b/public/app/core/components/sidemenu/__snapshots__/SignIn.test.tsx.snap
index ba21be63b51..15de108cbcf 100644
--- a/public/app/core/components/sidemenu/__snapshots__/SignIn.test.tsx.snap
+++ b/public/app/core/components/sidemenu/__snapshots__/SignIn.test.tsx.snap
@@ -6,7 +6,7 @@ exports[`Render should render component 1`] = `
>
{
+ switch (action.type) {
+ case 'UPDATE_LOCATION': {
+ const { path, query, routeParams } = action.payload;
+ return {
+ url: renderUrl(path || state.path, query),
+ path: path || state.path,
+ query: query || state.query,
+ routeParams: routeParams || state.routeParams,
+ };
+ }
+ }
+
+ return state;
+};
diff --git a/public/app/core/reducers/navModel.ts b/public/app/core/reducers/navModel.ts
new file mode 100644
index 00000000000..ac0e51854e7
--- /dev/null
+++ b/public/app/core/reducers/navModel.ts
@@ -0,0 +1,43 @@
+import { Action, ActionTypes } from 'app/core/actions/navModel';
+import { NavIndex, NavModelItem } from 'app/types';
+import config from 'app/core/config';
+
+export function buildInitialState(): NavIndex {
+ const navIndex: NavIndex = {};
+ const rootNodes = config.bootData.navTree as NavModelItem[];
+ buildNavIndex(navIndex, rootNodes);
+ return navIndex;
+}
+
+function buildNavIndex(navIndex: NavIndex, children: NavModelItem[], parentItem?: NavModelItem) {
+ for (const node of children) {
+ navIndex[node.id] = {
+ ...node,
+ parentItem: parentItem,
+ };
+
+ if (node.children) {
+ buildNavIndex(navIndex, node.children, node);
+ }
+ }
+}
+
+export const initialState: NavIndex = buildInitialState();
+
+export const navIndexReducer = (state = initialState, action: Action): NavIndex => {
+ switch (action.type) {
+ case ActionTypes.UpdateNavIndex:
+ const newPages = {};
+ const payload = action.payload;
+
+ for (const node of payload.children) {
+ newPages[node.id] = {
+ ...node,
+ parentItem: payload,
+ };
+ }
+
+ return { ...state, ...newPages };
+ }
+ return state;
+};
diff --git a/public/app/core/reducers/processsAclItems.ts b/public/app/core/reducers/processsAclItems.ts
new file mode 100644
index 00000000000..57578d6b2d8
--- /dev/null
+++ b/public/app/core/reducers/processsAclItems.ts
@@ -0,0 +1,31 @@
+import { DashboardAcl, DashboardAclDTO } from 'app/types/acl';
+
+export function processAclItems(items: DashboardAclDTO[]): DashboardAcl[] {
+ return items.map(processAclItem).sort((a, b) => b.sortRank - a.sortRank || a.name.localeCompare(b.name));
+}
+
+function processAclItem(dto: DashboardAclDTO): DashboardAcl {
+ const item = dto as DashboardAcl;
+
+ item.sortRank = 0;
+ if (item.userId > 0) {
+ item.name = item.userLogin;
+ item.sortRank = 10;
+ } else if (item.teamId > 0) {
+ item.name = item.team;
+ item.sortRank = 20;
+ } else if (item.role) {
+ item.icon = 'fa fa-fw fa-street-view';
+ item.name = item.role;
+ item.sortRank = 30;
+ if (item.role === 'Editor') {
+ item.sortRank += 1;
+ }
+ }
+
+ if (item.inherited) {
+ item.sortRank += 100;
+ }
+
+ return item;
+}
diff --git a/public/app/core/selectors/location.ts b/public/app/core/selectors/location.ts
new file mode 100644
index 00000000000..adc31f47e89
--- /dev/null
+++ b/public/app/core/selectors/location.ts
@@ -0,0 +1,3 @@
+export const getRouteParamsId = state => state.routeParams.id;
+
+export const getRouteParamsPage = state => state.routeParams.page;
diff --git a/public/app/core/selectors/navModel.ts b/public/app/core/selectors/navModel.ts
new file mode 100644
index 00000000000..aa508616962
--- /dev/null
+++ b/public/app/core/selectors/navModel.ts
@@ -0,0 +1,43 @@
+import { NavModel, NavModelItem, NavIndex } from 'app/types';
+
+function getNotFoundModel(): NavModel {
+ const node: NavModelItem = {
+ id: 'not-found',
+ text: 'Page not found',
+ icon: 'fa fa-fw fa-warning',
+ subTitle: '404 Error',
+ url: 'not-found',
+ };
+
+ return {
+ node: node,
+ main: node,
+ };
+}
+
+export function getNavModel(navIndex: NavIndex, id: string, fallback?: NavModel): NavModel {
+ if (navIndex[id]) {
+ const node = navIndex[id];
+ const main = {
+ ...node.parentItem,
+ };
+
+ main.children = main.children.map(item => {
+ return {
+ ...item,
+ active: item.url === node.url,
+ };
+ });
+
+ return {
+ node: node,
+ main: main,
+ };
+ }
+
+ if (fallback) {
+ return fallback;
+ }
+
+ return getNotFoundModel();
+}
diff --git a/public/app/core/services/AngularLoader.ts b/public/app/core/services/AngularLoader.ts
new file mode 100644
index 00000000000..e3a7dec4351
--- /dev/null
+++ b/public/app/core/services/AngularLoader.ts
@@ -0,0 +1,42 @@
+import angular from 'angular';
+import coreModule from 'app/core/core_module';
+import _ from 'lodash';
+
+export interface AngularComponent {
+ destroy();
+}
+
+export class AngularLoader {
+ /** @ngInject */
+ constructor(private $compile, private $rootScope) {}
+
+ load(elem, scopeProps, template): AngularComponent {
+ const scope = this.$rootScope.$new();
+
+ _.assign(scope, scopeProps);
+
+ const compiledElem = this.$compile(template)(scope);
+ const rootNode = angular.element(elem);
+ rootNode.append(compiledElem);
+
+ return {
+ destroy: () => {
+ scope.$destroy();
+ compiledElem.remove();
+ },
+ };
+ }
+}
+
+coreModule.service('angularLoader', AngularLoader);
+
+let angularLoaderInstance: AngularLoader;
+
+export function setAngularLoader(pl: AngularLoader) {
+ angularLoaderInstance = pl;
+}
+
+// away to access it from react
+export function getAngularLoader(): AngularLoader {
+ return angularLoaderInstance;
+}
diff --git a/public/app/core/services/backend_srv.ts b/public/app/core/services/backend_srv.ts
index 2a50a1b1f12..3e8132a695b 100644
--- a/public/app/core/services/backend_srv.ts
+++ b/public/app/core/services/backend_srv.ts
@@ -252,16 +252,6 @@ export class BackendSrv {
return this.post('/api/folders', payload);
}
- updateFolder(folder, options) {
- options = options || {};
-
- return this.put(`/api/folders/${folder.uid}`, {
- title: folder.title,
- version: folder.version,
- overwrite: options.overwrite === true,
- });
- }
-
deleteFolder(uid: string, showSuccessAlert) {
return this.request({ method: 'DELETE', url: `/api/folders/${uid}`, showSuccessAlert: showSuccessAlert === true });
}
diff --git a/public/app/core/services/bridge_srv.ts b/public/app/core/services/bridge_srv.ts
index bdc2976a94c..ee184c243ac 100644
--- a/public/app/core/services/bridge_srv.ts
+++ b/public/app/core/services/bridge_srv.ts
@@ -1,8 +1,8 @@
import coreModule from 'app/core/core_module';
import appEvents from 'app/core/app_events';
-import { store } from 'app/stores/store';
-import { reaction } from 'mobx';
+import { store } from 'app/store/configureStore';
import locationUtil from 'app/core/utils/location_util';
+import { updateLocation } from 'app/core/actions';
// Services that handles angular -> mobx store sync & other react <-> angular sync
export class BridgeSrv {
@@ -16,28 +16,40 @@ export class BridgeSrv {
init() {
this.$rootScope.$on('$routeUpdate', (evt, data) => {
const angularUrl = this.$location.url();
- if (store.view.currentUrl !== angularUrl) {
- store.view.updatePathAndQuery(this.$location.path(), this.$location.search(), this.$route.current.params);
+ const state = store.getState();
+ if (state.location.url !== angularUrl) {
+ store.dispatch(
+ updateLocation({
+ path: this.$location.path(),
+ query: this.$location.search(),
+ routeParams: this.$route.current.params,
+ })
+ );
}
});
this.$rootScope.$on('$routeChangeSuccess', (evt, data) => {
- store.view.updatePathAndQuery(this.$location.path(), this.$location.search(), this.$route.current.params);
+ store.dispatch(
+ updateLocation({
+ path: this.$location.path(),
+ query: this.$location.search(),
+ routeParams: this.$route.current.params,
+ })
+ );
});
- reaction(
- () => store.view.currentUrl,
- currentUrl => {
- const angularUrl = this.$location.url();
- const url = locationUtil.stripBaseFromUrl(currentUrl);
- if (angularUrl !== url) {
- this.$timeout(() => {
- this.$location.url(url);
- });
- console.log('store updating angular $location.url', url);
- }
+ // Listen for changes in redux location -> update angular location
+ store.subscribe(() => {
+ const state = store.getState();
+ const angularUrl = this.$location.url();
+ const url = locationUtil.stripBaseFromUrl(state.location.url);
+ if (angularUrl !== url) {
+ this.$timeout(() => {
+ this.$location.url(url);
+ });
+ console.log('store updating angular $location.url', url);
}
- );
+ });
appEvents.on('location-change', payload => {
const urlWithoutBase = locationUtil.stripBaseFromUrl(payload.href);
diff --git a/public/app/core/services/keybindingSrv.ts b/public/app/core/services/keybindingSrv.ts
index d05e9b0c21c..d8dfc958dd4 100644
--- a/public/app/core/services/keybindingSrv.ts
+++ b/public/app/core/services/keybindingSrv.ts
@@ -4,7 +4,7 @@ import _ from 'lodash';
import config from 'app/core/config';
import coreModule from 'app/core/core_module';
import appEvents from 'app/core/app_events';
-import { encodePathComponent } from 'app/core/utils/location_util';
+import { getExploreUrl } from 'app/core/utils/explore';
import Mousetrap from 'mousetrap';
import 'mousetrap-global-bind';
@@ -15,7 +15,14 @@ export class KeybindingSrv {
timepickerOpen = false;
/** @ngInject */
- constructor(private $rootScope, private $location, private datasourceSrv, private timeSrv, private contextSrv) {
+ constructor(
+ private $rootScope,
+ private $location,
+ private $timeout,
+ private datasourceSrv,
+ private timeSrv,
+ private contextSrv
+ ) {
// clear out all shortcuts on route change
$rootScope.$on('$routeChangeSuccess', () => {
Mousetrap.reset();
@@ -194,14 +201,9 @@ export class KeybindingSrv {
if (dashboard.meta.focusPanelId) {
const panel = dashboard.getPanelById(dashboard.meta.focusPanelId);
const datasource = await this.datasourceSrv.get(panel.datasource);
- if (datasource && datasource.supportsExplore) {
- const range = this.timeSrv.timeRangeForUrl();
- const state = {
- ...datasource.getExploreState(panel),
- range,
- };
- const exploreState = encodePathComponent(JSON.stringify(state));
- this.$location.url(`/explore?state=${exploreState}`);
+ const url = await getExploreUrl(panel, panel.targets, datasource, this.datasourceSrv, this.timeSrv);
+ if (url) {
+ this.$timeout(() => this.$location.url(url));
}
}
});
diff --git a/public/app/core/specs/kbn.test.ts b/public/app/core/specs/kbn.test.ts
index dfa665e3205..e621cdef632 100644
--- a/public/app/core/specs/kbn.test.ts
+++ b/public/app/core/specs/kbn.test.ts
@@ -399,6 +399,77 @@ describe('duration', () => {
});
});
+describe('clock', () => {
+ it('null', () => {
+ const str = kbn.toClock(null, 0);
+ expect(str).toBe('');
+ });
+ it('size less than 1 second', () => {
+ const str = kbn.toClock(999, 0);
+ expect(str).toBe('999ms');
+ });
+ describe('size less than 1 minute', () => {
+ it('default', () => {
+ const str = kbn.toClock(59999);
+ expect(str).toBe('59s:999ms');
+ });
+ it('decimals equals 0', () => {
+ const str = kbn.toClock(59999, 0);
+ expect(str).toBe('59s');
+ });
+ });
+ describe('size less than 1 hour', () => {
+ it('default', () => {
+ const str = kbn.toClock(3599999);
+ expect(str).toBe('59m:59s:999ms');
+ });
+ it('decimals equals 0', () => {
+ const str = kbn.toClock(3599999, 0);
+ expect(str).toBe('59m');
+ });
+ it('decimals equals 1', () => {
+ const str = kbn.toClock(3599999, 1);
+ expect(str).toBe('59m:59s');
+ });
+ });
+ describe('size greater than or equal 1 hour', () => {
+ it('default', () => {
+ const str = kbn.toClock(7199999);
+ expect(str).toBe('01h:59m:59s:999ms');
+ });
+ it('decimals equals 0', () => {
+ const str = kbn.toClock(7199999, 0);
+ expect(str).toBe('01h');
+ });
+ it('decimals equals 1', () => {
+ const str = kbn.toClock(7199999, 1);
+ expect(str).toBe('01h:59m');
+ });
+ it('decimals equals 2', () => {
+ const str = kbn.toClock(7199999, 2);
+ expect(str).toBe('01h:59m:59s');
+ });
+ });
+ describe('size greater than or equal 1 day', () => {
+ it('default', () => {
+ const str = kbn.toClock(89999999);
+ expect(str).toBe('24h:59m:59s:999ms');
+ });
+ it('decimals equals 0', () => {
+ const str = kbn.toClock(89999999, 0);
+ expect(str).toBe('24h');
+ });
+ it('decimals equals 1', () => {
+ const str = kbn.toClock(89999999, 1);
+ expect(str).toBe('24h:59m');
+ });
+ it('decimals equals 2', () => {
+ const str = kbn.toClock(89999999, 2);
+ expect(str).toBe('24h:59m:59s');
+ });
+ });
+});
+
describe('volume', () => {
it('1000m3', () => {
const str = kbn.valueFormats['m3'](1000, 1, null);
diff --git a/public/app/core/specs/url.test.ts b/public/app/core/specs/url.test.ts
new file mode 100644
index 00000000000..b5994488128
--- /dev/null
+++ b/public/app/core/specs/url.test.ts
@@ -0,0 +1,16 @@
+import { toUrlParams } from '../utils/url';
+
+describe('toUrlParams', () => {
+ it('should encode object properties as url parameters', () => {
+ const url = toUrlParams({
+ server: 'backend-01',
+ hasSpace: 'has space',
+ many: ['1', '2', '3'],
+ true: true,
+ number: 20,
+ isNull: null,
+ isUndefined: undefined,
+ });
+ expect(url).toBe('server=backend-01&hasSpace=has%20space&many=1&many=2&many=3&true&number=20&isNull=&isUndefined=');
+ });
+});
diff --git a/public/app/core/utils/acl.ts b/public/app/core/utils/acl.ts
new file mode 100644
index 00000000000..57578d6b2d8
--- /dev/null
+++ b/public/app/core/utils/acl.ts
@@ -0,0 +1,31 @@
+import { DashboardAcl, DashboardAclDTO } from 'app/types/acl';
+
+export function processAclItems(items: DashboardAclDTO[]): DashboardAcl[] {
+ return items.map(processAclItem).sort((a, b) => b.sortRank - a.sortRank || a.name.localeCompare(b.name));
+}
+
+function processAclItem(dto: DashboardAclDTO): DashboardAcl {
+ const item = dto as DashboardAcl;
+
+ item.sortRank = 0;
+ if (item.userId > 0) {
+ item.name = item.userLogin;
+ item.sortRank = 10;
+ } else if (item.teamId > 0) {
+ item.name = item.team;
+ item.sortRank = 20;
+ } else if (item.role) {
+ item.icon = 'fa fa-fw fa-street-view';
+ item.name = item.role;
+ item.sortRank = 30;
+ if (item.role === 'Editor') {
+ item.sortRank += 1;
+ }
+ }
+
+ if (item.inherited) {
+ item.sortRank += 100;
+ }
+
+ return item;
+}
diff --git a/public/app/core/utils/dag.test.ts b/public/app/core/utils/dag.test.ts
index 064da13806b..4ee0dd7134b 100644
--- a/public/app/core/utils/dag.test.ts
+++ b/public/app/core/utils/dag.test.ts
@@ -104,5 +104,17 @@ describe('Directed acyclic graph', () => {
const actual = nodeH.getOptimizedInputEdges();
expect(actual).toHaveLength(0);
});
+
+ it('when linking non-existing input node with existing output node should throw error', () => {
+ expect(() => {
+ dag.link('non-existing', 'A');
+ }).toThrowError("cannot link input node named non-existing since it doesn't exist in graph");
+ });
+
+ it('when linking existing input node with non-existing output node should throw error', () => {
+ expect(() => {
+ dag.link('A', 'non-existing');
+ }).toThrowError("cannot link output node named non-existing since it doesn't exist in graph");
+ });
});
});
diff --git a/public/app/core/utils/dag.ts b/public/app/core/utils/dag.ts
index eb7ff1c3b1a..48c00a4c8c3 100644
--- a/public/app/core/utils/dag.ts
+++ b/public/app/core/utils/dag.ts
@@ -15,6 +15,14 @@ export class Edge {
}
link(inputNode: Node, outputNode: Node) {
+ if (!inputNode) {
+ throw Error('inputNode is required');
+ }
+
+ if (!outputNode) {
+ throw Error('outputNode is required');
+ }
+
this.unlink();
this.inputNode = inputNode;
this.outputNode = outputNode;
@@ -152,7 +160,11 @@ export class Graph {
for (let n = 0; n < inputArr.length; n++) {
const i = inputArr[n];
if (typeof i === 'string') {
- inputNodes.push(this.getNode(i));
+ const n = this.getNode(i);
+ if (!n) {
+ throw Error(`cannot link input node named ${i} since it doesn't exist in graph`);
+ }
+ inputNodes.push(n);
} else {
inputNodes.push(i);
}
@@ -161,7 +173,11 @@ export class Graph {
for (let n = 0; n < outputArr.length; n++) {
const i = outputArr[n];
if (typeof i === 'string') {
- outputNodes.push(this.getNode(i));
+ const n = this.getNode(i);
+ if (!n) {
+ throw Error(`cannot link output node named ${i} since it doesn't exist in graph`);
+ }
+ outputNodes.push(n);
} else {
outputNodes.push(i);
}
diff --git a/public/app/core/utils/explore.test.ts b/public/app/core/utils/explore.test.ts
new file mode 100644
index 00000000000..915b47e14e2
--- /dev/null
+++ b/public/app/core/utils/explore.test.ts
@@ -0,0 +1,145 @@
+import { DEFAULT_RANGE, serializeStateToUrlParam, parseUrlState } from './explore';
+import { ExploreState } from 'app/types/explore';
+
+const DEFAULT_EXPLORE_STATE: ExploreState = {
+ datasource: null,
+ datasourceError: null,
+ datasourceLoading: null,
+ datasourceMissing: false,
+ datasourceName: '',
+ exploreDatasources: [],
+ graphResult: null,
+ history: [],
+ latency: 0,
+ loading: false,
+ logsResult: null,
+ queries: [],
+ queryErrors: [],
+ queryHints: [],
+ range: DEFAULT_RANGE,
+ requestOptions: null,
+ showingGraph: true,
+ showingLogs: true,
+ showingTable: true,
+ supportsGraph: null,
+ supportsLogs: null,
+ supportsTable: null,
+ tableResult: null,
+};
+
+describe('state functions', () => {
+ describe('parseUrlState', () => {
+ it('returns default state on empty string', () => {
+ expect(parseUrlState('')).toMatchObject({
+ datasource: null,
+ queries: [],
+ range: DEFAULT_RANGE,
+ });
+ });
+
+ it('returns a valid Explore state from URL parameter', () => {
+ const paramValue =
+ '%7B"datasource":"Local","queries":%5B%7B"query":"metric"%7D%5D,"range":%7B"from":"now-1h","to":"now"%7D%7D';
+ expect(parseUrlState(paramValue)).toMatchObject({
+ datasource: 'Local',
+ queries: [{ query: 'metric' }],
+ range: {
+ from: 'now-1h',
+ to: 'now',
+ },
+ });
+ });
+
+ it('returns a valid Explore state from a compact URL parameter', () => {
+ const paramValue = '%5B"now-1h","now","Local","metric"%5D';
+ expect(parseUrlState(paramValue)).toMatchObject({
+ datasource: 'Local',
+ queries: [{ query: 'metric' }],
+ range: {
+ from: 'now-1h',
+ to: 'now',
+ },
+ });
+ });
+ });
+
+ describe('serializeStateToUrlParam', () => {
+ it('returns url parameter value for a state object', () => {
+ const state = {
+ ...DEFAULT_EXPLORE_STATE,
+ datasourceName: 'foo',
+ range: {
+ from: 'now-5h',
+ to: 'now',
+ },
+ queries: [
+ {
+ query: 'metric{test="a/b"}',
+ },
+ {
+ query: 'super{foo="x/z"}',
+ },
+ ],
+ };
+ expect(serializeStateToUrlParam(state)).toBe(
+ '{"datasource":"foo","queries":[{"query":"metric{test=\\"a/b\\"}"},' +
+ '{"query":"super{foo=\\"x/z\\"}"}],"range":{"from":"now-5h","to":"now"}}'
+ );
+ });
+
+ it('returns url parameter value for a state object', () => {
+ const state = {
+ ...DEFAULT_EXPLORE_STATE,
+ datasourceName: 'foo',
+ range: {
+ from: 'now-5h',
+ to: 'now',
+ },
+ queries: [
+ {
+ query: 'metric{test="a/b"}',
+ },
+ {
+ query: 'super{foo="x/z"}',
+ },
+ ],
+ };
+ expect(serializeStateToUrlParam(state, true)).toBe(
+ '["now-5h","now","foo","metric{test=\\"a/b\\"}","super{foo=\\"x/z\\"}"]'
+ );
+ });
+ });
+
+ describe('interplay', () => {
+ it('can parse the serialized state into the original state', () => {
+ const state = {
+ ...DEFAULT_EXPLORE_STATE,
+ datasourceName: 'foo',
+ range: {
+ from: 'now - 5h',
+ to: 'now',
+ },
+ queries: [
+ {
+ query: 'metric{test="a/b"}',
+ },
+ {
+ query: 'super{foo="x/z"}',
+ },
+ ],
+ };
+ const serialized = serializeStateToUrlParam(state);
+ const parsed = parseUrlState(serialized);
+
+ // Account for datasource vs datasourceName
+ const { datasource, ...rest } = parsed;
+ const sameState = {
+ ...rest,
+ datasource: DEFAULT_EXPLORE_STATE.datasource,
+ datasourceName: datasource,
+ };
+
+ expect(state).toMatchObject(sameState);
+ });
+ });
+});
diff --git a/public/app/core/utils/explore.ts b/public/app/core/utils/explore.ts
new file mode 100644
index 00000000000..ecd11a495ad
--- /dev/null
+++ b/public/app/core/utils/explore.ts
@@ -0,0 +1,99 @@
+import { renderUrl } from 'app/core/utils/url';
+import { ExploreState, ExploreUrlState } from 'app/types/explore';
+
+export const DEFAULT_RANGE = {
+ from: 'now-6h',
+ to: 'now',
+};
+
+/**
+ * Returns an Explore-URL that contains a panel's queries and the dashboard time range.
+ *
+ * @param panel Origin panel of the jump to Explore
+ * @param panelTargets The origin panel's query targets
+ * @param panelDatasource The origin panel's datasource
+ * @param datasourceSrv Datasource service to query other datasources in case the panel datasource is mixed
+ * @param timeSrv Time service to get the current dashboard range from
+ */
+export async function getExploreUrl(
+ panel: any,
+ panelTargets: any[],
+ panelDatasource: any,
+ datasourceSrv: any,
+ timeSrv: any
+) {
+ let exploreDatasource = panelDatasource;
+ let exploreTargets = panelTargets;
+ let url;
+
+ // Mixed datasources need to choose only one datasource
+ if (panelDatasource.meta.id === 'mixed' && panelTargets) {
+ // Find first explore datasource among targets
+ let mixedExploreDatasource;
+ for (const t of panel.targets) {
+ const datasource = await datasourceSrv.get(t.datasource);
+ if (datasource && datasource.meta.explore) {
+ mixedExploreDatasource = datasource;
+ break;
+ }
+ }
+
+ // Add all its targets
+ if (mixedExploreDatasource) {
+ exploreDatasource = mixedExploreDatasource;
+ exploreTargets = panelTargets.filter(t => t.datasource === mixedExploreDatasource.name);
+ }
+ }
+
+ if (exploreDatasource && exploreDatasource.meta.explore) {
+ const range = timeSrv.timeRangeForUrl();
+ const state = {
+ ...exploreDatasource.getExploreState(exploreTargets),
+ range,
+ };
+ const exploreState = JSON.stringify(state);
+ url = renderUrl('/explore', { state: exploreState });
+ }
+ return url;
+}
+
+export function parseUrlState(initial: string | undefined): ExploreUrlState {
+ if (initial) {
+ try {
+ const parsed = JSON.parse(decodeURI(initial));
+ if (Array.isArray(parsed)) {
+ if (parsed.length <= 3) {
+ throw new Error('Error parsing compact URL state for Explore.');
+ }
+ const range = {
+ from: parsed[0],
+ to: parsed[1],
+ };
+ const datasource = parsed[2];
+ const queries = parsed.slice(3).map(query => ({ query }));
+ return { datasource, queries, range };
+ }
+ return parsed;
+ } catch (e) {
+ console.error(e);
+ }
+ }
+ return { datasource: null, queries: [], range: DEFAULT_RANGE };
+}
+
+export function serializeStateToUrlParam(state: ExploreState, compact?: boolean): string {
+ const urlState: ExploreUrlState = {
+ datasource: state.datasourceName,
+ queries: state.queries.map(q => ({ query: q.query })),
+ range: state.range,
+ };
+ if (compact) {
+ return JSON.stringify([
+ urlState.range.from,
+ urlState.range.to,
+ urlState.datasource,
+ ...urlState.queries.map(q => q.query),
+ ]);
+ }
+ return JSON.stringify(urlState);
+}
diff --git a/public/app/core/utils/kbn.ts b/public/app/core/utils/kbn.ts
index bd69f2e89d9..398ad9bb3e7 100644
--- a/public/app/core/utils/kbn.ts
+++ b/public/app/core/utils/kbn.ts
@@ -808,6 +808,51 @@ kbn.toDuration = (size, decimals, timeScale) => {
return strings.join(', ');
};
+kbn.toClock = (size, decimals) => {
+ if (size === null) {
+ return '';
+ }
+
+ // < 1 second
+ if (size < 1000) {
+ return moment.utc(size).format('SSS\\m\\s');
+ }
+
+ // < 1 minute
+ if (size < 60000) {
+ let format = 'ss\\s:SSS\\m\\s';
+ if (decimals === 0) {
+ format = 'ss\\s';
+ }
+ return moment.utc(size).format(format);
+ }
+
+ // < 1 hour
+ if (size < 3600000) {
+ let format = 'mm\\m:ss\\s:SSS\\m\\s';
+ if (decimals === 0) {
+ format = 'mm\\m';
+ } else if (decimals === 1) {
+ format = 'mm\\m:ss\\s';
+ }
+ return moment.utc(size).format(format);
+ }
+
+ let format = 'mm\\m:ss\\s:SSS\\m\\s';
+
+ const hours = `${('0' + Math.floor(moment.duration(size, 'milliseconds').asHours())).slice(-2)}h`;
+
+ if (decimals === 0) {
+ format = '';
+ } else if (decimals === 1) {
+ format = 'mm\\m';
+ } else if (decimals === 2) {
+ format = 'mm\\m:ss\\s';
+ }
+
+ return format ? `${hours}:${moment.utc(size).format(format)}` : hours;
+};
+
kbn.valueFormats.dtdurationms = (size, decimals) => {
return kbn.toDuration(size, decimals, 'millisecond');
};
@@ -824,6 +869,14 @@ kbn.valueFormats.timeticks = (size, decimals, scaledDecimals) => {
return kbn.valueFormats.s(size / 100, decimals, scaledDecimals);
};
+kbn.valueFormats.clockms = (size, decimals) => {
+ return kbn.toClock(size, decimals);
+};
+
+kbn.valueFormats.clocks = (size, decimals) => {
+ return kbn.toClock(size * 1000, decimals);
+};
+
kbn.valueFormats.dateTimeAsIso = (epoch, isUtc) => {
const time = isUtc ? moment.utc(epoch) : moment(epoch);
@@ -901,6 +954,8 @@ kbn.getUnitFormats = () => {
{ text: 'duration (s)', value: 'dtdurations' },
{ text: 'duration (hh:mm:ss)', value: 'dthms' },
{ text: 'Timeticks (s/100)', value: 'timeticks' },
+ { text: 'clock (ms)', value: 'clockms' },
+ { text: 'clock (s)', value: 'clocks' },
],
},
{
diff --git a/public/app/core/utils/location_util.ts b/public/app/core/utils/location_util.ts
index 735272285ff..76f2fc5881f 100644
--- a/public/app/core/utils/location_util.ts
+++ b/public/app/core/utils/location_util.ts
@@ -1,10 +1,5 @@
import config from 'app/core/config';
-// Slash encoding for angular location provider, see https://github.com/angular/angular.js/issues/10479
-const SLASH = '';
-export const decodePathComponent = (pc: string) => decodeURIComponent(pc).replace(new RegExp(SLASH, 'g'), '/');
-export const encodePathComponent = (pc: string) => encodeURIComponent(pc.replace(/\//g, SLASH));
-
export const stripBaseFromUrl = url => {
const appSubUrl = config.appSubUrl;
const stripExtraChars = appSubUrl.endsWith('/') ? 1 : 0;
diff --git a/public/app/core/utils/text.test.ts b/public/app/core/utils/text.test.ts
new file mode 100644
index 00000000000..4f9d8367218
--- /dev/null
+++ b/public/app/core/utils/text.test.ts
@@ -0,0 +1,24 @@
+import { findMatchesInText } from './text';
+
+describe('findMatchesInText()', () => {
+ it('gets no matches for when search and or line are empty', () => {
+ expect(findMatchesInText('', '')).toEqual([]);
+ expect(findMatchesInText('foo', '')).toEqual([]);
+ expect(findMatchesInText('', 'foo')).toEqual([]);
+ });
+
+ it('gets no matches for unmatched search string', () => {
+ expect(findMatchesInText('foo', 'bar')).toEqual([]);
+ });
+
+ it('gets matches for matched search string', () => {
+ expect(findMatchesInText('foo', 'foo')).toEqual([{ length: 3, start: 0, text: 'foo', end: 3 }]);
+ expect(findMatchesInText(' foo ', 'foo')).toEqual([{ length: 3, start: 1, text: 'foo', end: 4 }]);
+ });
+
+ expect(findMatchesInText(' foo foo bar ', 'foo|bar')).toEqual([
+ { length: 3, start: 1, text: 'foo', end: 4 },
+ { length: 3, start: 5, text: 'foo', end: 8 },
+ { length: 3, start: 9, text: 'bar', end: 12 },
+ ]);
+});
diff --git a/public/app/core/utils/text.ts b/public/app/core/utils/text.ts
new file mode 100644
index 00000000000..5d7591a31e2
--- /dev/null
+++ b/public/app/core/utils/text.ts
@@ -0,0 +1,32 @@
+import { TextMatch } from 'app/types/explore';
+
+/**
+ * Adapt findMatchesInText for react-highlight-words findChunks handler.
+ * See https://github.com/bvaughn/react-highlight-words#props
+ */
+export function findHighlightChunksInText({ searchWords, textToHighlight }) {
+ return findMatchesInText(textToHighlight, searchWords.join(' '));
+}
+
+/**
+ * Returns a list of substring regexp matches.
+ */
+export function findMatchesInText(haystack: string, needle: string): TextMatch[] {
+ // Empty search can send re.exec() into infinite loop, exit early
+ if (!haystack || !needle) {
+ return [];
+ }
+ const regexp = new RegExp(`(?:${needle})`, 'g');
+ const matches = [];
+ let match = regexp.exec(haystack);
+ while (match) {
+ matches.push({
+ text: match[0],
+ start: match.index,
+ length: match[0].length,
+ end: match.index + match[0].length,
+ });
+ match = regexp.exec(haystack);
+ }
+ return matches;
+}
diff --git a/public/app/core/utils/url.ts b/public/app/core/utils/url.ts
index 04c3e9a4308..ab8be8ad222 100644
--- a/public/app/core/utils/url.ts
+++ b/public/app/core/utils/url.ts
@@ -2,6 +2,15 @@
* @preserve jquery-param (c) 2015 KNOWLEDGECODE | MIT
*/
+import { UrlQueryMap } from 'app/types';
+
+export function renderUrl(path: string, query: UrlQueryMap | undefined): string {
+ if (query && Object.keys(query).length > 0) {
+ path += '?' + toUrlParams(query);
+ }
+ return path;
+}
+
export function toUrlParams(a) {
const s = [];
const rbracket = /\[\]$/;
@@ -50,7 +59,5 @@ export function toUrlParams(a) {
return s;
};
- return buildParams('', a)
- .join('&')
- .replace(/%20/g, '+');
+ return buildParams('', a).join('&');
}
diff --git a/public/app/features/admin/admin_edit_org_ctrl.ts b/public/app/features/admin/AdminEditOrgCtrl.ts
similarity index 88%
rename from public/app/features/admin/admin_edit_org_ctrl.ts
rename to public/app/features/admin/AdminEditOrgCtrl.ts
index ec3f8548023..3117c5f0f9b 100644
--- a/public/app/features/admin/admin_edit_org_ctrl.ts
+++ b/public/app/features/admin/AdminEditOrgCtrl.ts
@@ -1,6 +1,5 @@
-import angular from 'angular';
-export class AdminEditOrgCtrl {
+export default class AdminEditOrgCtrl {
/** @ngInject */
constructor($scope, $routeParams, backendSrv, $location, navModelSrv) {
$scope.init = () => {
@@ -48,4 +47,3 @@ export class AdminEditOrgCtrl {
}
}
-angular.module('grafana.controllers').controller('AdminEditOrgCtrl', AdminEditOrgCtrl);
diff --git a/public/app/features/admin/admin_edit_user_ctrl.ts b/public/app/features/admin/AdminEditUserCtrl.ts
similarity index 95%
rename from public/app/features/admin/admin_edit_user_ctrl.ts
rename to public/app/features/admin/AdminEditUserCtrl.ts
index c34ccdc1cad..bf72c1746aa 100644
--- a/public/app/features/admin/admin_edit_user_ctrl.ts
+++ b/public/app/features/admin/AdminEditUserCtrl.ts
@@ -1,7 +1,6 @@
-import angular from 'angular';
import _ from 'lodash';
-export class AdminEditUserCtrl {
+export default class AdminEditUserCtrl {
/** @ngInject */
constructor($scope, $routeParams, backendSrv, $location, navModelSrv) {
$scope.user = {};
@@ -117,5 +116,3 @@ export class AdminEditUserCtrl {
$scope.init();
}
}
-
-angular.module('grafana.controllers').controller('AdminEditUserCtrl', AdminEditUserCtrl);
diff --git a/public/app/features/admin/admin_list_orgs_ctrl.ts b/public/app/features/admin/AdminListOrgsCtrl.ts
similarity index 84%
rename from public/app/features/admin/admin_list_orgs_ctrl.ts
rename to public/app/features/admin/AdminListOrgsCtrl.ts
index 0513752aa3e..9190f7f494e 100644
--- a/public/app/features/admin/admin_list_orgs_ctrl.ts
+++ b/public/app/features/admin/AdminListOrgsCtrl.ts
@@ -1,6 +1,5 @@
-import angular from 'angular';
-export class AdminListOrgsCtrl {
+export default class AdminListOrgsCtrl {
/** @ngInject */
constructor($scope, backendSrv, navModelSrv) {
$scope.init = () => {
@@ -33,4 +32,3 @@ export class AdminListOrgsCtrl {
}
}
-angular.module('grafana.controllers').controller('AdminListOrgsCtrl', AdminListOrgsCtrl);
diff --git a/public/app/features/admin/admin_list_users_ctrl.ts b/public/app/features/admin/AdminListUsersCtrl.ts
similarity index 100%
rename from public/app/features/admin/admin_list_users_ctrl.ts
rename to public/app/features/admin/AdminListUsersCtrl.ts
diff --git a/public/app/features/admin/ServerStats.test.tsx b/public/app/features/admin/ServerStats.test.tsx
new file mode 100644
index 00000000000..cbcc580f612
--- /dev/null
+++ b/public/app/features/admin/ServerStats.test.tsx
@@ -0,0 +1,23 @@
+import React from 'react';
+import renderer from 'react-test-renderer';
+import { ServerStats } from './ServerStats';
+import { createNavModel } from 'test/mocks/common';
+import { ServerStat } from './state/apis';
+
+describe('ServerStats', () => {
+ it('Should render table with stats', done => {
+ const navModel = createNavModel('Admin', 'stats');
+ const stats: ServerStat[] = [{ name: 'Total dashboards', value: 10 }, { name: 'Total Users', value: 1 }];
+
+ const getServerStats = () => {
+ return Promise.resolve(stats);
+ };
+
+ const page = renderer.create( );
+
+ setTimeout(() => {
+ expect(page.toJSON()).toMatchSnapshot();
+ done();
+ });
+ });
+});
diff --git a/public/app/features/admin/ServerStats.tsx b/public/app/features/admin/ServerStats.tsx
new file mode 100644
index 00000000000..40be87ed4d3
--- /dev/null
+++ b/public/app/features/admin/ServerStats.tsx
@@ -0,0 +1,73 @@
+import React, { PureComponent } from 'react';
+import { hot } from 'react-hot-loader';
+import { connect } from 'react-redux';
+import { NavModel, StoreState } from 'app/types';
+import { getNavModel } from 'app/core/selectors/navModel';
+import { getServerStats, ServerStat } from './state/apis';
+import PageHeader from 'app/core/components/PageHeader/PageHeader';
+
+interface Props {
+ navModel: NavModel;
+ getServerStats: () => Promise;
+}
+
+interface State {
+ stats: ServerStat[];
+}
+
+export class ServerStats extends PureComponent {
+ constructor(props) {
+ super(props);
+
+ this.state = {
+ stats: [],
+ };
+ }
+
+ async componentDidMount() {
+ try {
+ const stats = await this.props.getServerStats();
+ this.setState({ stats });
+ } catch (error) {
+ console.error(error);
+ }
+ }
+
+ render() {
+ const { navModel } = this.props;
+ const { stats } = this.state;
+
+ return (
+
+
+
+
+
+
+ Name
+ Value
+
+
+ {stats.map(StatItem)}
+
+
+
+ );
+ }
+}
+
+function StatItem(stat: ServerStat) {
+ return (
+
+ {stat.name}
+ {stat.value}
+
+ );
+}
+
+const mapStateToProps = (state: StoreState) => ({
+ navModel: getNavModel(state.navIndex, 'server-stats'),
+ getServerStats: getServerStats,
+});
+
+export default hot(module)(connect(mapStateToProps)(ServerStats));
diff --git a/public/app/features/styleguide/styleguide.ts b/public/app/features/admin/StyleGuideCtrl.ts
similarity index 85%
rename from public/app/features/styleguide/styleguide.ts
rename to public/app/features/admin/StyleGuideCtrl.ts
index 4aac194d950..7be3009634d 100644
--- a/public/app/features/styleguide/styleguide.ts
+++ b/public/app/features/admin/StyleGuideCtrl.ts
@@ -1,7 +1,6 @@
-import coreModule from 'app/core/core_module';
import config from 'app/core/config';
-class StyleGuideCtrl {
+export default class StyleGuideCtrl {
theme: string;
buttonNames = ['primary', 'secondary', 'inverse', 'success', 'warning', 'danger'];
buttonSizes = ['btn-small', '', 'btn-large'];
@@ -27,4 +26,3 @@ class StyleGuideCtrl {
}
}
-coreModule.controller('StyleGuideCtrl', StyleGuideCtrl);
diff --git a/public/app/containers/ServerStats/__snapshots__/ServerStats.test.tsx.snap b/public/app/features/admin/__snapshots__/ServerStats.test.tsx.snap
similarity index 59%
rename from public/app/containers/ServerStats/__snapshots__/ServerStats.test.tsx.snap
rename to public/app/features/admin/__snapshots__/ServerStats.test.tsx.snap
index eac793ca2ca..02e8784adc5 100644
--- a/public/app/containers/ServerStats/__snapshots__/ServerStats.test.tsx.snap
+++ b/public/app/features/admin/__snapshots__/ServerStats.test.tsx.snap
@@ -17,8 +17,9 @@ exports[`ServerStats Should render table with stats 1`] = `
-
-
+
- admin-Text
+ Admin
-
+
+ subTitle
+
@@ -36,19 +41,19 @@ exports[`ServerStats Should render table with stats 1`] = `
className="gf-form-select-wrapper width-20 page-header__select-nav"
>
@@ -60,13 +65,12 @@ exports[`ServerStats Should render table with stats 1`] = `
>
- server-stats-Text
+ Admin
@@ -101,66 +105,10 @@ exports[`ServerStats Should render table with stats 1`] = `
- Total users
+ Total Users
- 0
-
-
-
-
- Active users (seen last 30 days)
-
-
- 0
-
-
-
-
- Total orgs
-
-
- 0
-
-
-
-
- Total playlists
-
-
- 0
-
-
-
-
- Total snapshots
-
-
- 0
-
-
-
-
- Total dashboard tags
-
-
- 0
-
-
-
-
- Total starred dashboards
-
-
- 0
-
-
-
-
- Total alerts
-
-
- 0
+ 1
diff --git a/public/app/features/admin/admin.ts b/public/app/features/admin/index.ts
similarity index 57%
rename from public/app/features/admin/admin.ts
rename to public/app/features/admin/index.ts
index 00e98821779..7d06155b6f8 100644
--- a/public/app/features/admin/admin.ts
+++ b/public/app/features/admin/index.ts
@@ -1,7 +1,8 @@
-import AdminListUsersCtrl from './admin_list_users_ctrl';
-import './admin_list_orgs_ctrl';
-import './admin_edit_org_ctrl';
-import './admin_edit_user_ctrl';
+import AdminListUsersCtrl from './AdminListUsersCtrl';
+import AdminEditUserCtrl from './AdminEditUserCtrl';
+import AdminListOrgsCtrl from './AdminListOrgsCtrl';
+import AdminEditOrgCtrl from './AdminEditOrgCtrl';
+import StyleGuideCtrl from './StyleGuideCtrl';
import coreModule from 'app/core/core_module';
@@ -27,21 +28,10 @@ class AdminHomeCtrl {
}
}
-export class AdminStatsCtrl {
- stats: any;
- navModel: any;
-
- /** @ngInject */
- constructor(backendSrv: any, navModelSrv) {
- this.navModel = navModelSrv.getNav('cfg', 'admin', 'server-stats', 1);
-
- backendSrv.get('/api/admin/stats').then(stats => {
- this.stats = stats;
- });
- }
-}
-
+coreModule.controller('AdminListUsersCtrl', AdminListUsersCtrl);
+coreModule.controller('AdminEditUserCtrl', AdminEditUserCtrl);
+coreModule.controller('AdminListOrgsCtrl', AdminListOrgsCtrl);
+coreModule.controller('AdminEditOrgCtrl', AdminEditOrgCtrl);
coreModule.controller('AdminSettingsCtrl', AdminSettingsCtrl);
coreModule.controller('AdminHomeCtrl', AdminHomeCtrl);
-coreModule.controller('AdminStatsCtrl', AdminStatsCtrl);
-coreModule.controller('AdminListUsersCtrl', AdminListUsersCtrl);
+coreModule.controller('StyleGuideCtrl', StyleGuideCtrl);
diff --git a/public/app/features/styleguide/styleguide.html b/public/app/features/admin/partials/styleguide.html
similarity index 100%
rename from public/app/features/styleguide/styleguide.html
rename to public/app/features/admin/partials/styleguide.html
diff --git a/public/app/features/admin/state/apis.ts b/public/app/features/admin/state/apis.ts
new file mode 100644
index 00000000000..d81fd299493
--- /dev/null
+++ b/public/app/features/admin/state/apis.ts
@@ -0,0 +1,26 @@
+import { getBackendSrv } from 'app/core/services/backend_srv';
+
+export interface ServerStat {
+ name: string;
+ value: number;
+}
+
+export const getServerStats = async (): Promise => {
+ try {
+ const res = await getBackendSrv().get('api/admin/stats');
+ return [
+ { name: 'Total users', value: res.users },
+ { name: 'Total dashboards', value: res.dashboards },
+ { name: 'Active users (seen last 30 days)', value: res.activeUsers },
+ { name: 'Total orgs', value: res.orgs },
+ { name: 'Total playlists', value: res.playlists },
+ { name: 'Total snapshots', value: res.snapshots },
+ { name: 'Total dashboard tags', value: res.tags },
+ { name: 'Total starred dashboards', value: res.stars },
+ { name: 'Total alerts', value: res.alerts },
+ ];
+ } catch (error) {
+ console.error(error);
+ throw error;
+ }
+};
diff --git a/public/app/features/alerting/AlertRuleItem.test.tsx b/public/app/features/alerting/AlertRuleItem.test.tsx
new file mode 100644
index 00000000000..bd37e127c39
--- /dev/null
+++ b/public/app/features/alerting/AlertRuleItem.test.tsx
@@ -0,0 +1,38 @@
+import React from 'react';
+import { shallow } from 'enzyme';
+import AlertRuleItem, { Props } from './AlertRuleItem';
+
+jest.mock('react-redux', () => ({
+ connect: () => params => params,
+}));
+
+const setup = (propOverrides?: object) => {
+ const props: Props = {
+ rule: {
+ id: 1,
+ dashboardId: 1,
+ panelId: 1,
+ name: 'Some rule',
+ state: 'Open',
+ stateText: 'state text',
+ stateIcon: 'icon',
+ stateClass: 'state class',
+ stateAge: 'age',
+ url: 'https://something.something.darkside',
+ },
+ search: '',
+ onTogglePause: jest.fn(),
+ };
+
+ Object.assign(props, propOverrides);
+
+ return shallow( );
+};
+
+describe('Render', () => {
+ it('should render component', () => {
+ const wrapper = setup();
+
+ expect(wrapper).toMatchSnapshot();
+ });
+});
diff --git a/public/app/features/alerting/AlertRuleItem.tsx b/public/app/features/alerting/AlertRuleItem.tsx
new file mode 100644
index 00000000000..f47a6348303
--- /dev/null
+++ b/public/app/features/alerting/AlertRuleItem.tsx
@@ -0,0 +1,69 @@
+import React, { PureComponent } from 'react';
+import Highlighter from 'react-highlight-words';
+import classNames from 'classnames/bind';
+import { AlertRule } from '../../types';
+
+export interface Props {
+ rule: AlertRule;
+ search: string;
+ onTogglePause: () => void;
+}
+
+class AlertRuleItem extends PureComponent {
+ renderText(text: string) {
+ return (
+
+ );
+ }
+
+ render() {
+ const { rule, onTogglePause } = this.props;
+
+ const stateClass = classNames({
+ fa: true,
+ 'fa-play': rule.state === 'paused',
+ 'fa-pause': rule.state !== 'paused',
+ });
+
+ const ruleUrl = `${rule.url}?panelId=${rule.panelId}&fullscreen=true&edit=true&tab=alert`;
+
+ return (
+
+
+
+
+
+
+
+
+ {this.renderText(rule.stateText)}
+ for {rule.stateAge}
+
+
+ {rule.info &&
{this.renderText(rule.info)}
}
+
+
+
+
+ );
+ }
+}
+
+export default AlertRuleItem;
diff --git a/public/app/features/alerting/AlertRuleList.test.tsx b/public/app/features/alerting/AlertRuleList.test.tsx
new file mode 100644
index 00000000000..2d1cf653540
--- /dev/null
+++ b/public/app/features/alerting/AlertRuleList.test.tsx
@@ -0,0 +1,156 @@
+import React from 'react';
+import { shallow } from 'enzyme';
+import { AlertRuleList, Props } from './AlertRuleList';
+import { AlertRule, NavModel } from '../../types';
+import appEvents from '../../core/app_events';
+
+jest.mock('../../core/app_events', () => ({
+ emit: jest.fn(),
+}));
+
+const setup = (propOverrides?: object) => {
+ const props: Props = {
+ navModel: {} as NavModel,
+ alertRules: [] as AlertRule[],
+ updateLocation: jest.fn(),
+ getAlertRulesAsync: jest.fn(),
+ setSearchQuery: jest.fn(),
+ togglePauseAlertRule: jest.fn(),
+ stateFilter: '',
+ search: '',
+ };
+
+ Object.assign(props, propOverrides);
+
+ const wrapper = shallow( );
+
+ return {
+ wrapper,
+ instance: wrapper.instance() as AlertRuleList,
+ };
+};
+
+describe('Render', () => {
+ it('should render component', () => {
+ const { wrapper } = setup();
+
+ expect(wrapper).toMatchSnapshot();
+ });
+
+ it('should render alert rules', () => {
+ const { wrapper } = setup({
+ alertRules: [
+ {
+ id: 1,
+ dashboardId: 7,
+ dashboardUid: 'ggHbN42mk',
+ dashboardSlug: 'alerting-with-testdata',
+ panelId: 3,
+ name: 'TestData - Always OK',
+ state: 'ok',
+ newStateDate: '2018-09-04T10:01:01+02:00',
+ evalDate: '0001-01-01T00:00:00Z',
+ evalData: {},
+ executionError: '',
+ url: '/d/ggHbN42mk/alerting-with-testdata',
+ },
+ {
+ id: 3,
+ dashboardId: 7,
+ dashboardUid: 'ggHbN42mk',
+ dashboardSlug: 'alerting-with-testdata',
+ panelId: 3,
+ name: 'TestData - ok',
+ state: 'ok',
+ newStateDate: '2018-09-04T10:01:01+02:00',
+ evalDate: '0001-01-01T00:00:00Z',
+ evalData: {},
+ executionError: 'error',
+ url: '/d/ggHbN42mk/alerting-with-testdata',
+ },
+ ],
+ });
+
+ expect(wrapper).toMatchSnapshot();
+ });
+});
+
+describe('Life cycle', () => {
+ describe('component did mount', () => {
+ it('should call fetchrules', () => {
+ const { instance } = setup();
+ instance.fetchRules = jest.fn();
+ instance.componentDidMount();
+ expect(instance.fetchRules).toHaveBeenCalled();
+ });
+ });
+
+ describe('component did update', () => {
+ it('should call fetchrules if props differ', () => {
+ const { instance } = setup();
+ instance.fetchRules = jest.fn();
+
+ instance.componentDidUpdate({ stateFilter: 'ok' } as Props);
+
+ expect(instance.fetchRules).toHaveBeenCalled();
+ });
+ });
+});
+
+describe('Functions', () => {
+ describe('Get state filter', () => {
+ it('should get all if prop is not set', () => {
+ const { instance } = setup();
+
+ const stateFilter = instance.getStateFilter();
+
+ expect(stateFilter).toEqual('all');
+ });
+
+ it('should return state filter if set', () => {
+ const { instance } = setup({
+ stateFilter: 'ok',
+ });
+
+ const stateFilter = instance.getStateFilter();
+
+ expect(stateFilter).toEqual('ok');
+ });
+ });
+
+ describe('State filter changed', () => {
+ it('should update location', () => {
+ const { instance } = setup();
+ const mockEvent = { target: { value: 'alerting' } };
+
+ instance.onStateFilterChanged(mockEvent);
+
+ expect(instance.props.updateLocation).toHaveBeenCalledWith({ query: { state: 'alerting' } });
+ });
+ });
+
+ describe('Open how to', () => {
+ it('should emit show-modal event', () => {
+ const { instance } = setup();
+
+ instance.onOpenHowTo();
+
+ expect(appEvents.emit).toHaveBeenCalledWith('show-modal', {
+ src: 'public/app/features/alerting/partials/alert_howto.html',
+ modalClass: 'confirm-modal',
+ model: {},
+ });
+ });
+ });
+
+ describe('Search query change', () => {
+ it('should set search query', () => {
+ const { instance } = setup();
+ const mockEvent = { target: { value: 'dashboard' } };
+
+ instance.onSearchQueryChange(mockEvent);
+
+ expect(instance.props.setSearchQuery).toHaveBeenCalledWith('dashboard');
+ });
+ });
+});
diff --git a/public/app/features/alerting/AlertRuleList.tsx b/public/app/features/alerting/AlertRuleList.tsx
new file mode 100644
index 00000000000..d25fc659af5
--- /dev/null
+++ b/public/app/features/alerting/AlertRuleList.tsx
@@ -0,0 +1,153 @@
+import React, { PureComponent } from 'react';
+import { hot } from 'react-hot-loader';
+import { connect } from 'react-redux';
+import PageHeader from 'app/core/components/PageHeader/PageHeader';
+import AlertRuleItem from './AlertRuleItem';
+import appEvents from 'app/core/app_events';
+import { updateLocation } from 'app/core/actions';
+import { getNavModel } from 'app/core/selectors/navModel';
+import { NavModel, StoreState, AlertRule } from 'app/types';
+import { getAlertRulesAsync, setSearchQuery, togglePauseAlertRule } from './state/actions';
+import { getAlertRuleItems, getSearchQuery } from './state/selectors';
+
+export interface Props {
+ navModel: NavModel;
+ alertRules: AlertRule[];
+ updateLocation: typeof updateLocation;
+ getAlertRulesAsync: typeof getAlertRulesAsync;
+ setSearchQuery: typeof setSearchQuery;
+ togglePauseAlertRule: typeof togglePauseAlertRule;
+ stateFilter: string;
+ search: string;
+}
+
+export class AlertRuleList extends PureComponent {
+ stateFilters = [
+ { text: 'All', value: 'all' },
+ { text: 'OK', value: 'ok' },
+ { text: 'Not OK', value: 'not_ok' },
+ { text: 'Alerting', value: 'alerting' },
+ { text: 'No Data', value: 'no_data' },
+ { text: 'Paused', value: 'paused' },
+ ];
+
+ componentDidMount() {
+ this.fetchRules();
+ }
+
+ componentDidUpdate(prevProps: Props) {
+ if (prevProps.stateFilter !== this.props.stateFilter) {
+ this.fetchRules();
+ }
+ }
+
+ async fetchRules() {
+ await this.props.getAlertRulesAsync({ state: this.getStateFilter() });
+ }
+
+ getStateFilter(): string {
+ const { stateFilter } = this.props;
+ if (stateFilter) {
+ return stateFilter.toString();
+ }
+ return 'all';
+ }
+
+ onStateFilterChanged = event => {
+ this.props.updateLocation({
+ query: { state: event.target.value },
+ });
+ };
+
+ onOpenHowTo = () => {
+ appEvents.emit('show-modal', {
+ src: 'public/app/features/alerting/partials/alert_howto.html',
+ modalClass: 'confirm-modal',
+ model: {},
+ });
+ };
+
+ onSearchQueryChange = event => {
+ const { value } = event.target;
+ this.props.setSearchQuery(value);
+ };
+
+ onTogglePause = (rule: AlertRule) => {
+ this.props.togglePauseAlertRule(rule.id, { paused: rule.state !== 'paused' });
+ };
+
+ alertStateFilterOption = ({ text, value }) => {
+ return (
+
+ {text}
+
+ );
+ };
+
+ render() {
+ const { navModel, alertRules, search } = this.props;
+
+ return (
+
+
+
+
+
+
+
+
+
+
+
+
States
+
+
+
+ {this.stateFilters.map(this.alertStateFilterOption)}
+
+
+
+
+
+ How to add an alert
+
+
+
+
+ {alertRules.map(rule => (
+ this.onTogglePause(rule)}
+ />
+ ))}
+
+
+
+
+ );
+ }
+}
+
+const mapStateToProps = (state: StoreState) => ({
+ navModel: getNavModel(state.navIndex, 'alert-list'),
+ alertRules: getAlertRuleItems(state.alertRules),
+ stateFilter: state.location.query.state,
+ search: getSearchQuery(state.alertRules),
+});
+
+const mapDispatchToProps = {
+ updateLocation,
+ getAlertRulesAsync,
+ setSearchQuery,
+ togglePauseAlertRule,
+};
+
+export default hot(module)(connect(mapStateToProps, mapDispatchToProps)(AlertRuleList));
diff --git a/public/app/features/alerting/alert_tab_ctrl.ts b/public/app/features/alerting/AlertTabCtrl.ts
similarity index 99%
rename from public/app/features/alerting/alert_tab_ctrl.ts
rename to public/app/features/alerting/AlertTabCtrl.ts
index 53f7e57fd69..c91ff5cd6c3 100644
--- a/public/app/features/alerting/alert_tab_ctrl.ts
+++ b/public/app/features/alerting/AlertTabCtrl.ts
@@ -1,7 +1,7 @@
import _ from 'lodash';
-import { ThresholdMapper } from './threshold_mapper';
+import { ThresholdMapper } from './state/ThresholdMapper';
import { QueryPart } from 'app/core/components/query_part/query_part';
-import alertDef from './alert_def';
+import alertDef from './state/alertDef';
import config from 'app/core/config';
import appEvents from 'app/core/app_events';
diff --git a/public/app/features/alerting/notification_edit_ctrl.ts b/public/app/features/alerting/NotificationsEditCtrl.ts
similarity index 100%
rename from public/app/features/alerting/notification_edit_ctrl.ts
rename to public/app/features/alerting/NotificationsEditCtrl.ts
diff --git a/public/app/features/alerting/notifications_list_ctrl.ts b/public/app/features/alerting/NotificationsListCtrl.ts
similarity index 100%
rename from public/app/features/alerting/notifications_list_ctrl.ts
rename to public/app/features/alerting/NotificationsListCtrl.ts
diff --git a/public/app/containers/AlertRuleList/__snapshots__/AlertRuleList.test.tsx.snap b/public/app/features/alerting/__snapshots__/AlertRuleItem.test.tsx.snap
similarity index 60%
rename from public/app/containers/AlertRuleList/__snapshots__/AlertRuleList.test.tsx.snap
rename to public/app/features/alerting/__snapshots__/AlertRuleItem.test.tsx.snap
index f408f6409be..f686127ebf3 100644
--- a/public/app/containers/AlertRuleList/__snapshots__/AlertRuleList.test.tsx.snap
+++ b/public/app/features/alerting/__snapshots__/AlertRuleItem.test.tsx.snap
@@ -1,14 +1,14 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`AlertRuleList should render 1 rule 1`] = `
+exports[`Render should render component 1`] = `
-
-
- OK
-
-
-
+ textToHighlight="state text"
+ />
for
- 5 minutes
+ age
@@ -82,7 +64,7 @@ exports[`AlertRuleList should render 1 rule 1`] = `
>
+
+
+
+
+
+
+
+
+
+
+
+ States
+
+
+
+
+ All
+
+
+ OK
+
+
+ Not OK
+
+
+ Alerting
+
+
+ No Data
+
+
+ Paused
+
+
+
+
+
+
+
+ How to add an alert
+
+
+
+
+
+`;
+
+exports[`Render should render component 1`] = `
+
+
+
+
+
+
+
+
+
+
+
+
+ States
+
+
+
+
+ All
+
+
+ OK
+
+
+ Not OK
+
+
+ Alerting
+
+
+ No Data
+
+
+ Paused
+
+
+
+
+
+
+
+ How to add an alert
+
+
+
+
+
+`;
diff --git a/public/app/features/alerting/all.ts b/public/app/features/alerting/all.ts
deleted file mode 100644
index 91d3a4109e7..00000000000
--- a/public/app/features/alerting/all.ts
+++ /dev/null
@@ -1,2 +0,0 @@
-import './notifications_list_ctrl';
-import './notification_edit_ctrl';
diff --git a/public/app/features/alerting/specs/threshold_mapper.test.ts b/public/app/features/alerting/state/ThresholdMapper.test.ts
similarity index 97%
rename from public/app/features/alerting/specs/threshold_mapper.test.ts
rename to public/app/features/alerting/state/ThresholdMapper.test.ts
index 922d9c8787e..8e91d0b6d0a 100644
--- a/public/app/features/alerting/specs/threshold_mapper.test.ts
+++ b/public/app/features/alerting/state/ThresholdMapper.test.ts
@@ -1,6 +1,6 @@
import { describe, it, expect } from 'test/lib/common';
-import { ThresholdMapper } from '../threshold_mapper';
+import { ThresholdMapper } from './ThresholdMapper';
describe('ThresholdMapper', () => {
describe('with greater than evaluator', () => {
diff --git a/public/app/features/alerting/threshold_mapper.ts b/public/app/features/alerting/state/ThresholdMapper.ts
similarity index 100%
rename from public/app/features/alerting/threshold_mapper.ts
rename to public/app/features/alerting/state/ThresholdMapper.ts
diff --git a/public/app/features/alerting/state/actions.ts b/public/app/features/alerting/state/actions.ts
new file mode 100644
index 00000000000..edd6fbb1da1
--- /dev/null
+++ b/public/app/features/alerting/state/actions.ts
@@ -0,0 +1,47 @@
+import { getBackendSrv } from 'app/core/services/backend_srv';
+import { AlertRuleDTO, StoreState } from 'app/types';
+import { ThunkAction } from 'redux-thunk';
+
+export enum ActionTypes {
+ LoadAlertRules = 'LOAD_ALERT_RULES',
+ SetSearchQuery = 'SET_ALERT_SEARCH_QUERY',
+}
+
+export interface LoadAlertRulesAction {
+ type: ActionTypes.LoadAlertRules;
+ payload: AlertRuleDTO[];
+}
+
+export interface SetSearchQueryAction {
+ type: ActionTypes.SetSearchQuery;
+ payload: string;
+}
+
+export const loadAlertRules = (rules: AlertRuleDTO[]): LoadAlertRulesAction => ({
+ type: ActionTypes.LoadAlertRules,
+ payload: rules,
+});
+
+export const setSearchQuery = (query: string): SetSearchQueryAction => ({
+ type: ActionTypes.SetSearchQuery,
+ payload: query,
+});
+
+export type Action = LoadAlertRulesAction | SetSearchQueryAction;
+
+type ThunkResult = ThunkAction;
+
+export function getAlertRulesAsync(options: { state: string }): ThunkResult {
+ return async dispatch => {
+ const rules = await getBackendSrv().get('/api/alerts', options);
+ dispatch(loadAlertRules(rules));
+ };
+}
+
+export function togglePauseAlertRule(id: number, options: { paused: boolean }): ThunkResult {
+ return async (dispatch, getState) => {
+ await getBackendSrv().post(`/api/alerts/${id}/pause`, options);
+ const stateFilter = getState().location.query.state || 'all';
+ dispatch(getAlertRulesAsync({ state: stateFilter.toString() }));
+ };
+}
diff --git a/public/app/features/alerting/alert_def.ts b/public/app/features/alerting/state/alertDef.ts
similarity index 100%
rename from public/app/features/alerting/alert_def.ts
rename to public/app/features/alerting/state/alertDef.ts
diff --git a/public/app/features/alerting/state/reducers.test.ts b/public/app/features/alerting/state/reducers.test.ts
new file mode 100644
index 00000000000..4f079a090cf
--- /dev/null
+++ b/public/app/features/alerting/state/reducers.test.ts
@@ -0,0 +1,91 @@
+import { ActionTypes, Action } from './actions';
+import { alertRulesReducer, initialState } from './reducers';
+import { AlertRuleDTO } from 'app/types';
+
+describe('Alert rules', () => {
+ const payload: AlertRuleDTO[] = [
+ {
+ id: 2,
+ dashboardId: 7,
+ dashboardUid: 'ggHbN42mk',
+ dashboardSlug: 'alerting-with-testdata',
+ panelId: 4,
+ name: 'TestData - Always Alerting',
+ state: 'alerting',
+ newStateDate: '2018-09-04T10:00:30+02:00',
+ evalDate: '0001-01-01T00:00:00Z',
+ evalData: { evalMatches: [{ metric: 'A-series', tags: null, value: 215 }] },
+ executionError: '',
+ url: '/d/ggHbN42mk/alerting-with-testdata',
+ },
+ {
+ id: 1,
+ dashboardId: 7,
+ dashboardUid: 'ggHbN42mk',
+ dashboardSlug: 'alerting-with-testdata',
+ panelId: 3,
+ name: 'TestData - Always OK',
+ state: 'ok',
+ newStateDate: '2018-09-04T10:01:01+02:00',
+ evalDate: '0001-01-01T00:00:00Z',
+ evalData: {},
+ executionError: '',
+ url: '/d/ggHbN42mk/alerting-with-testdata',
+ },
+ {
+ id: 3,
+ dashboardId: 7,
+ dashboardUid: 'ggHbN42mk',
+ dashboardSlug: 'alerting-with-testdata',
+ panelId: 3,
+ name: 'TestData - ok',
+ state: 'ok',
+ newStateDate: '2018-09-04T10:01:01+02:00',
+ evalDate: '0001-01-01T00:00:00Z',
+ evalData: {},
+ executionError: 'error',
+ url: '/d/ggHbN42mk/alerting-with-testdata',
+ },
+ {
+ id: 4,
+ dashboardId: 7,
+ dashboardUid: 'ggHbN42mk',
+ dashboardSlug: 'alerting-with-testdata',
+ panelId: 3,
+ name: 'TestData - Paused',
+ state: 'paused',
+ newStateDate: '2018-09-04T10:01:01+02:00',
+ evalDate: '0001-01-01T00:00:00Z',
+ evalData: {},
+ executionError: 'error',
+ url: '/d/ggHbN42mk/alerting-with-testdata',
+ },
+ {
+ id: 5,
+ dashboardId: 7,
+ dashboardUid: 'ggHbN42mk',
+ dashboardSlug: 'alerting-with-testdata',
+ panelId: 3,
+ name: 'TestData - Ok',
+ state: 'ok',
+ newStateDate: '2018-09-04T10:01:01+02:00',
+ evalDate: '0001-01-01T00:00:00Z',
+ evalData: {
+ noData: true,
+ },
+ executionError: 'error',
+ url: '/d/ggHbN42mk/alerting-with-testdata',
+ },
+ ];
+
+ it('should set alert rules', () => {
+ const action: Action = {
+ type: ActionTypes.LoadAlertRules,
+ payload: payload,
+ };
+
+ const result = alertRulesReducer(initialState, action);
+
+ expect(result.items).toEqual(payload);
+ });
+});
diff --git a/public/app/features/alerting/state/reducers.ts b/public/app/features/alerting/state/reducers.ts
new file mode 100644
index 00000000000..c525885bc9c
--- /dev/null
+++ b/public/app/features/alerting/state/reducers.ts
@@ -0,0 +1,50 @@
+import moment from 'moment';
+import { AlertRuleDTO, AlertRule, AlertRulesState } from 'app/types';
+import { Action, ActionTypes } from './actions';
+import alertDef from './alertDef';
+
+export const initialState: AlertRulesState = { items: [], searchQuery: '' };
+
+function convertToAlertRule(rule, state): AlertRule {
+ const stateModel = alertDef.getStateDisplayModel(state);
+ rule.stateText = stateModel.text;
+ rule.stateIcon = stateModel.iconClass;
+ rule.stateClass = stateModel.stateClass;
+ rule.stateAge = moment(rule.newStateDate)
+ .fromNow()
+ .replace(' ago', '');
+
+ if (rule.state !== 'paused') {
+ if (rule.executionError) {
+ rule.info = 'Execution Error: ' + rule.executionError;
+ }
+ if (rule.evalData && rule.evalData.noData) {
+ rule.info = 'Query returned no data';
+ }
+ }
+
+ return rule;
+}
+
+export const alertRulesReducer = (state = initialState, action: Action): AlertRulesState => {
+ switch (action.type) {
+ case ActionTypes.LoadAlertRules: {
+ const alertRules: AlertRuleDTO[] = action.payload;
+
+ const alertRulesViewModel: AlertRule[] = alertRules.map(rule => {
+ return convertToAlertRule(rule, rule.state);
+ });
+
+ return { items: alertRulesViewModel, searchQuery: state.searchQuery };
+ }
+
+ case ActionTypes.SetSearchQuery:
+ return { items: state.items, searchQuery: action.payload };
+ }
+
+ return state;
+};
+
+export default {
+ alertRules: alertRulesReducer,
+};
diff --git a/public/app/features/alerting/state/selectors.test.ts b/public/app/features/alerting/state/selectors.test.ts
new file mode 100644
index 00000000000..e853b146c03
--- /dev/null
+++ b/public/app/features/alerting/state/selectors.test.ts
@@ -0,0 +1,94 @@
+import { getSearchQuery, getAlertRuleItems } from './selectors';
+
+describe('Get search query', () => {
+ it('should get search query', () => {
+ const state = { searchQuery: 'dashboard' };
+ const result = getSearchQuery(state);
+
+ expect(result).toEqual(state.searchQuery);
+ });
+});
+
+describe('Get alert rule items', () => {
+ it('should get alert rule items', () => {
+ const state = {
+ items: [
+ {
+ id: 1,
+ dashboardId: 1,
+ panelId: 1,
+ name: '',
+ state: '',
+ stateText: '',
+ stateIcon: '',
+ stateClass: '',
+ stateAge: '',
+ url: '',
+ },
+ ],
+ searchQuery: '',
+ };
+
+ const result = getAlertRuleItems(state);
+ expect(result.length).toEqual(1);
+ });
+
+ it('should filter rule items based on search query', () => {
+ const state = {
+ items: [
+ {
+ id: 1,
+ dashboardId: 1,
+ panelId: 1,
+ name: 'dashboard',
+ state: '',
+ stateText: '',
+ stateIcon: '',
+ stateClass: '',
+ stateAge: '',
+ url: '',
+ },
+ {
+ id: 2,
+ dashboardId: 3,
+ panelId: 1,
+ name: 'dashboard2',
+ state: '',
+ stateText: '',
+ stateIcon: '',
+ stateClass: '',
+ stateAge: '',
+ url: '',
+ },
+ {
+ id: 3,
+ dashboardId: 5,
+ panelId: 1,
+ name: 'hello',
+ state: '',
+ stateText: '',
+ stateIcon: '',
+ stateClass: '',
+ stateAge: '',
+ url: '',
+ },
+ {
+ id: 4,
+ dashboardId: 7,
+ panelId: 1,
+ name: 'test',
+ state: '',
+ stateText: 'dashboard',
+ stateIcon: '',
+ stateClass: '',
+ stateAge: '',
+ url: '',
+ },
+ ],
+ searchQuery: 'dashboard',
+ };
+
+ const result = getAlertRuleItems(state);
+ expect(result.length).toEqual(3);
+ });
+});
diff --git a/public/app/features/alerting/state/selectors.ts b/public/app/features/alerting/state/selectors.ts
new file mode 100644
index 00000000000..7c72520d773
--- /dev/null
+++ b/public/app/features/alerting/state/selectors.ts
@@ -0,0 +1,9 @@
+export const getSearchQuery = state => state.searchQuery;
+
+export const getAlertRuleItems = state => {
+ const regex = new RegExp(state.searchQuery, 'i');
+
+ return state.items.filter(item => {
+ return regex.test(item.name) || regex.test(item.stateText) || regex.test(item.info);
+ });
+};
diff --git a/public/app/features/all.ts b/public/app/features/all.ts
index df987a8b59b..7f6f84b7676 100644
--- a/public/app/features/all.ts
+++ b/public/app/features/all.ts
@@ -1,13 +1,11 @@
-import './panellinks/module';
-import './dashlinks/module';
import './annotations/all';
import './templating/all';
import './plugins/all';
import './dashboard/all';
import './playlist/all';
-import './snapshot/all';
import './panel/all';
import './org/all';
-import './admin/admin';
-import './alerting/all';
-import './styleguide/styleguide';
+import './admin';
+import './alerting/NotificationsEditCtrl';
+import './alerting/NotificationsListCtrl';
+import './manage-dashboards';
diff --git a/public/app/features/annotations/annotation_tooltip.ts b/public/app/features/annotations/annotation_tooltip.ts
index 7e626bc5860..16c18005204 100644
--- a/public/app/features/annotations/annotation_tooltip.ts
+++ b/public/app/features/annotations/annotation_tooltip.ts
@@ -1,7 +1,7 @@
import _ from 'lodash';
import $ from 'jquery';
import coreModule from 'app/core/core_module';
-import alertDef from '../alerting/alert_def';
+import alertDef from '../alerting/state/alertDef';
/** @ngInject */
export function annotationTooltipDirective($sanitize, dashboardSrv, contextSrv, $compile) {
diff --git a/public/app/features/annotations/annotations_srv.ts b/public/app/features/annotations/annotations_srv.ts
index f9820fe566d..19850da52d9 100644
--- a/public/app/features/annotations/annotations_srv.ts
+++ b/public/app/features/annotations/annotations_srv.ts
@@ -8,6 +8,7 @@ import { makeRegions, dedupAnnotations } from './events_processing';
export class AnnotationsSrv {
globalAnnotationsPromise: any;
alertStatesPromise: any;
+ datasourcePromises: any;
/** @ngInject */
constructor(private $rootScope, private $q, private datasourceSrv, private backendSrv, private timeSrv) {
@@ -18,6 +19,7 @@ export class AnnotationsSrv {
clearCache() {
this.globalAnnotationsPromise = null;
this.alertStatesPromise = null;
+ this.datasourcePromises = null;
}
getAnnotations(options) {
@@ -90,6 +92,7 @@ export class AnnotationsSrv {
const range = this.timeSrv.timeRange();
const promises = [];
+ const dsPromises = [];
for (const annotation of dashboard.annotations.list) {
if (!annotation.enable) {
@@ -99,10 +102,10 @@ export class AnnotationsSrv {
if (annotation.snapshotData) {
return this.translateQueryResult(annotation, annotation.snapshotData);
}
-
+ const datasourcePromise = this.datasourceSrv.get(annotation.datasource);
+ dsPromises.push(datasourcePromise);
promises.push(
- this.datasourceSrv
- .get(annotation.datasource)
+ datasourcePromise
.then(datasource => {
// issue query against data source
return datasource.annotationQuery({
@@ -122,7 +125,7 @@ export class AnnotationsSrv {
})
);
}
-
+ this.datasourcePromises = this.$q.all(dsPromises);
this.globalAnnotationsPromise = this.$q.all(promises);
return this.globalAnnotationsPromise;
}
diff --git a/public/app/features/api-keys/ApiKeysAddedModal.test.tsx b/public/app/features/api-keys/ApiKeysAddedModal.test.tsx
new file mode 100644
index 00000000000..160418a7ab8
--- /dev/null
+++ b/public/app/features/api-keys/ApiKeysAddedModal.test.tsx
@@ -0,0 +1,25 @@
+import React from 'react';
+import { shallow } from 'enzyme';
+import { ApiKeysAddedModal, Props } from './ApiKeysAddedModal';
+
+const setup = (propOverrides?: object) => {
+ const props: Props = {
+ apiKey: 'api key test',
+ rootPath: 'test/path',
+ };
+
+ Object.assign(props, propOverrides);
+
+ const wrapper = shallow( );
+
+ return {
+ wrapper,
+ };
+};
+
+describe('Render', () => {
+ it('should render component', () => {
+ const { wrapper } = setup();
+ expect(wrapper).toMatchSnapshot();
+ });
+});
diff --git a/public/app/features/api-keys/ApiKeysAddedModal.tsx b/public/app/features/api-keys/ApiKeysAddedModal.tsx
new file mode 100644
index 00000000000..995aa46c773
--- /dev/null
+++ b/public/app/features/api-keys/ApiKeysAddedModal.tsx
@@ -0,0 +1,46 @@
+import React from 'react';
+
+export interface Props {
+ apiKey: string;
+ rootPath: string;
+}
+
+export const ApiKeysAddedModal = (props: Props) => {
+ return (
+
+
+
+
+ API Key Created
+
+
+
+
+
+
+
+
+
+
+ Key
+ {props.apiKey}
+
+
+
+
+ You will only be able to view this key here once! It is not stored in this form. So be sure to copy it now.
+
+
+ You can authenticate request using the Authorization HTTP header, example:
+
+
+
+ curl -H "Authorization: Bearer {props.apiKey}" {props.rootPath}/api/dashboards/home
+
+
+
+
+ );
+};
+
+export default ApiKeysAddedModal;
diff --git a/public/app/features/api-keys/ApiKeysPage.test.tsx b/public/app/features/api-keys/ApiKeysPage.test.tsx
new file mode 100644
index 00000000000..8bc6e9338fc
--- /dev/null
+++ b/public/app/features/api-keys/ApiKeysPage.test.tsx
@@ -0,0 +1,75 @@
+import React from 'react';
+import { shallow } from 'enzyme';
+import { Props, ApiKeysPage } from './ApiKeysPage';
+import { NavModel, ApiKey } from 'app/types';
+import { getMultipleMockKeys, getMockKey } from './__mocks__/apiKeysMock';
+
+const setup = (propOverrides?: object) => {
+ const props: Props = {
+ navModel: {} as NavModel,
+ apiKeys: [] as ApiKey[],
+ searchQuery: '',
+ hasFetched: false,
+ loadApiKeys: jest.fn(),
+ deleteApiKey: jest.fn(),
+ setSearchQuery: jest.fn(),
+ addApiKey: jest.fn(),
+ };
+
+ Object.assign(props, propOverrides);
+
+ const wrapper = shallow( );
+ const instance = wrapper.instance() as ApiKeysPage;
+
+ return {
+ wrapper,
+ instance,
+ };
+};
+
+describe('Render', () => {
+ it('should render component', () => {
+ const { wrapper } = setup();
+ expect(wrapper).toMatchSnapshot();
+ });
+
+ it('should render API keys table', () => {
+ const { wrapper } = setup({
+ apiKeys: getMultipleMockKeys(5),
+ hasFetched: true,
+ });
+
+ expect(wrapper).toMatchSnapshot();
+ });
+});
+
+describe('Life cycle', () => {
+ it('should call loadApiKeys', () => {
+ const { instance } = setup();
+
+ instance.componentDidMount();
+
+ expect(instance.props.loadApiKeys).toHaveBeenCalled();
+ });
+});
+
+describe('Functions', () => {
+ describe('Delete team', () => {
+ it('should call delete team', () => {
+ const { instance } = setup();
+ instance.onDeleteApiKey(getMockKey());
+ expect(instance.props.deleteApiKey).toHaveBeenCalledWith(1);
+ });
+ });
+
+ describe('on search query change', () => {
+ it('should call setSearchQuery', () => {
+ const { instance } = setup();
+ const mockEvent = { target: { value: 'test' } };
+
+ instance.onSearchQueryChange(mockEvent);
+
+ expect(instance.props.setSearchQuery).toHaveBeenCalledWith('test');
+ });
+ });
+});
diff --git a/public/app/features/api-keys/ApiKeysPage.tsx b/public/app/features/api-keys/ApiKeysPage.tsx
new file mode 100644
index 00000000000..6052b0f4fc8
--- /dev/null
+++ b/public/app/features/api-keys/ApiKeysPage.tsx
@@ -0,0 +1,234 @@
+import React, { PureComponent } from 'react';
+import ReactDOMServer from 'react-dom/server';
+import { connect } from 'react-redux';
+import { hot } from 'react-hot-loader';
+import { NavModel, ApiKey, NewApiKey, OrgRole } from 'app/types';
+import { getNavModel } from 'app/core/selectors/navModel';
+import { getApiKeys } from './state/selectors';
+import { loadApiKeys, deleteApiKey, setSearchQuery, addApiKey } from './state/actions';
+import PageHeader from 'app/core/components/PageHeader/PageHeader';
+import SlideDown from 'app/core/components/Animations/SlideDown';
+import PageLoader from 'app/core/components/PageLoader/PageLoader';
+import ApiKeysAddedModal from './ApiKeysAddedModal';
+import config from 'app/core/config';
+import appEvents from 'app/core/app_events';
+
+export interface Props {
+ navModel: NavModel;
+ apiKeys: ApiKey[];
+ searchQuery: string;
+ hasFetched: boolean;
+ loadApiKeys: typeof loadApiKeys;
+ deleteApiKey: typeof deleteApiKey;
+ setSearchQuery: typeof setSearchQuery;
+ addApiKey: typeof addApiKey;
+}
+
+export interface State {
+ isAdding: boolean;
+ newApiKey: NewApiKey;
+}
+
+enum ApiKeyStateProps {
+ Name = 'name',
+ Role = 'role',
+}
+
+const initialApiKeyState = {
+ name: '',
+ role: OrgRole.Viewer,
+};
+
+export class ApiKeysPage extends PureComponent {
+ constructor(props) {
+ super(props);
+ this.state = { isAdding: false, newApiKey: initialApiKeyState };
+ }
+
+ componentDidMount() {
+ this.fetchApiKeys();
+ }
+
+ async fetchApiKeys() {
+ await this.props.loadApiKeys();
+ }
+
+ onDeleteApiKey(key: ApiKey) {
+ this.props.deleteApiKey(key.id);
+ }
+
+ onSearchQueryChange = evt => {
+ this.props.setSearchQuery(evt.target.value);
+ };
+
+ onToggleAdding = () => {
+ this.setState({ isAdding: !this.state.isAdding });
+ };
+
+ onAddApiKey = async evt => {
+ evt.preventDefault();
+
+ const openModal = (apiKey: string) => {
+ const rootPath = window.location.origin + config.appSubUrl;
+ const modalTemplate = ReactDOMServer.renderToString( );
+
+ appEvents.emit('show-modal', {
+ templateHtml: modalTemplate,
+ });
+ };
+
+ this.props.addApiKey(this.state.newApiKey, openModal);
+ this.setState((prevState: State) => {
+ return {
+ ...prevState,
+ newApiKey: initialApiKeyState,
+ };
+ });
+ };
+
+ onApiKeyStateUpdate = (evt, prop: string) => {
+ const value = evt.currentTarget.value;
+ this.setState((prevState: State) => {
+ const newApiKey = {
+ ...prevState.newApiKey,
+ };
+ newApiKey[prop] = value;
+
+ return {
+ ...prevState,
+ newApiKey: newApiKey,
+ };
+ });
+ };
+
+ renderTable() {
+ const { apiKeys } = this.props;
+
+ return [
+
+ Existing Keys
+ ,
+ ,
+ ];
+ }
+
+ render() {
+ const { newApiKey, isAdding } = this.state;
+ const { hasFetched, navModel, searchQuery } = this.props;
+
+ return (
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Add API Key
+
+
+
+
+
+
+ {hasFetched ? this.renderTable() :
}
+
+
+ );
+ }
+}
+
+function mapStateToProps(state) {
+ return {
+ navModel: getNavModel(state.navIndex, 'apikeys'),
+ apiKeys: getApiKeys(state.apiKeys),
+ searchQuery: state.apiKeys.searchQuery,
+ hasFetched: state.apiKeys.hasFetched,
+ };
+}
+
+const mapDispatchToProps = {
+ loadApiKeys,
+ deleteApiKey,
+ setSearchQuery,
+ addApiKey,
+};
+
+export default hot(module)(connect(mapStateToProps, mapDispatchToProps)(ApiKeysPage));
diff --git a/public/app/features/api-keys/__mocks__/apiKeysMock.ts b/public/app/features/api-keys/__mocks__/apiKeysMock.ts
new file mode 100644
index 00000000000..117f0d6d0c6
--- /dev/null
+++ b/public/app/features/api-keys/__mocks__/apiKeysMock.ts
@@ -0,0 +1,22 @@
+import { ApiKey, OrgRole } from 'app/types';
+
+export const getMultipleMockKeys = (numberOfKeys: number): ApiKey[] => {
+ const keys: ApiKey[] = [];
+ for (let i = 1; i <= numberOfKeys; i++) {
+ keys.push({
+ id: i,
+ name: `test-${i}`,
+ role: OrgRole.Viewer,
+ });
+ }
+
+ return keys;
+};
+
+export const getMockKey = (): ApiKey => {
+ return {
+ id: 1,
+ name: 'test',
+ role: OrgRole.Admin,
+ };
+};
diff --git a/public/app/features/api-keys/__snapshots__/ApiKeysAddedModal.test.tsx.snap b/public/app/features/api-keys/__snapshots__/ApiKeysAddedModal.test.tsx.snap
new file mode 100644
index 00000000000..0fcb13308eb
--- /dev/null
+++ b/public/app/features/api-keys/__snapshots__/ApiKeysAddedModal.test.tsx.snap
@@ -0,0 +1,78 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Render should render component 1`] = `
+
+
+
+
+
+ API Key Created
+
+
+
+
+
+
+
+
+
+
+ Key
+
+
+ api key test
+
+
+
+
+ You will only be able to view this key here once! It is not stored in this form. So be sure to copy it now.
+
+
+ You can authenticate request using the Authorization HTTP header, example:
+
+
+
+ curl -H "Authorization: Bearer
+ api key test
+ "
+ test/path
+ /api/dashboards/home
+
+
+
+
+`;
diff --git a/public/app/features/api-keys/__snapshots__/ApiKeysPage.test.tsx.snap b/public/app/features/api-keys/__snapshots__/ApiKeysPage.test.tsx.snap
new file mode 100644
index 00000000000..b1cac8469be
--- /dev/null
+++ b/public/app/features/api-keys/__snapshots__/ApiKeysPage.test.tsx.snap
@@ -0,0 +1,414 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Render should render API keys table 1`] = `
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Add API Key
+
+
+
+
+
+
+
+
+ Add API Key
+
+
+
+
+
+ Existing Keys
+
+
+
+
+
+ Name
+
+
+ Role
+
+
+
+
+
+
+
+ test-1
+
+
+ Viewer
+
+
+
+
+
+
+
+
+
+ test-2
+
+
+ Viewer
+
+
+
+
+
+
+
+
+
+ test-3
+
+
+ Viewer
+
+
+
+
+
+
+
+
+
+ test-4
+
+
+ Viewer
+
+
+
+
+
+
+
+
+
+ test-5
+
+
+ Viewer
+
+
+
+
+
+
+
+
+
+
+
+`;
+
+exports[`Render should render component 1`] = `
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Add API Key
+
+
+
+
+
+
+
+
+ Add API Key
+
+
+
+
+
+
+
+`;
diff --git a/public/app/features/api-keys/state/actions.ts b/public/app/features/api-keys/state/actions.ts
new file mode 100644
index 00000000000..63e91088476
--- /dev/null
+++ b/public/app/features/api-keys/state/actions.ts
@@ -0,0 +1,56 @@
+import { ThunkAction } from 'redux-thunk';
+import { getBackendSrv } from 'app/core/services/backend_srv';
+import { StoreState, ApiKey } from 'app/types';
+
+export enum ActionTypes {
+ LoadApiKeys = 'LOAD_API_KEYS',
+ SetApiKeysSearchQuery = 'SET_API_KEYS_SEARCH_QUERY',
+}
+
+export interface LoadApiKeysAction {
+ type: ActionTypes.LoadApiKeys;
+ payload: ApiKey[];
+}
+
+export interface SetSearchQueryAction {
+ type: ActionTypes.SetApiKeysSearchQuery;
+ payload: string;
+}
+
+export type Action = LoadApiKeysAction | SetSearchQueryAction;
+
+type ThunkResult = ThunkAction;
+
+const apiKeysLoaded = (apiKeys: ApiKey[]): LoadApiKeysAction => ({
+ type: ActionTypes.LoadApiKeys,
+ payload: apiKeys,
+});
+
+export function addApiKey(apiKey: ApiKey, openModal: (key: string) => void): ThunkResult {
+ return async dispatch => {
+ const result = await getBackendSrv().post('/api/auth/keys', apiKey);
+ dispatch(setSearchQuery(''));
+ dispatch(loadApiKeys());
+ openModal(result.key);
+ };
+}
+
+export function loadApiKeys(): ThunkResult {
+ return async dispatch => {
+ const response = await getBackendSrv().get('/api/auth/keys');
+ dispatch(apiKeysLoaded(response));
+ };
+}
+
+export function deleteApiKey(id: number): ThunkResult {
+ return async dispatch => {
+ getBackendSrv()
+ .delete('/api/auth/keys/' + id)
+ .then(dispatch(loadApiKeys()));
+ };
+}
+
+export const setSearchQuery = (searchQuery: string): SetSearchQueryAction => ({
+ type: ActionTypes.SetApiKeysSearchQuery,
+ payload: searchQuery,
+});
diff --git a/public/app/features/api-keys/state/reducers.test.ts b/public/app/features/api-keys/state/reducers.test.ts
new file mode 100644
index 00000000000..3b2c831a5a3
--- /dev/null
+++ b/public/app/features/api-keys/state/reducers.test.ts
@@ -0,0 +1,31 @@
+import { Action, ActionTypes } from './actions';
+import { initialApiKeysState, apiKeysReducer } from './reducers';
+import { getMultipleMockKeys } from '../__mocks__/apiKeysMock';
+
+describe('API Keys reducer', () => {
+ it('should set keys', () => {
+ const payload = getMultipleMockKeys(4);
+
+ const action: Action = {
+ type: ActionTypes.LoadApiKeys,
+ payload,
+ };
+
+ const result = apiKeysReducer(initialApiKeysState, action);
+
+ expect(result.keys).toEqual(payload);
+ });
+
+ it('should set search query', () => {
+ const payload = 'test query';
+
+ const action: Action = {
+ type: ActionTypes.SetApiKeysSearchQuery,
+ payload,
+ };
+
+ const result = apiKeysReducer(initialApiKeysState, action);
+
+ expect(result.searchQuery).toEqual('test query');
+ });
+});
diff --git a/public/app/features/api-keys/state/reducers.ts b/public/app/features/api-keys/state/reducers.ts
new file mode 100644
index 00000000000..57849b20d4f
--- /dev/null
+++ b/public/app/features/api-keys/state/reducers.ts
@@ -0,0 +1,22 @@
+import { ApiKeysState } from 'app/types';
+import { Action, ActionTypes } from './actions';
+
+export const initialApiKeysState: ApiKeysState = {
+ keys: [],
+ searchQuery: '',
+ hasFetched: false,
+};
+
+export const apiKeysReducer = (state = initialApiKeysState, action: Action): ApiKeysState => {
+ switch (action.type) {
+ case ActionTypes.LoadApiKeys:
+ return { ...state, hasFetched: true, keys: action.payload };
+ case ActionTypes.SetApiKeysSearchQuery:
+ return { ...state, searchQuery: action.payload };
+ }
+ return state;
+};
+
+export default {
+ apiKeys: apiKeysReducer,
+};
diff --git a/public/app/features/api-keys/state/selectors.test.ts b/public/app/features/api-keys/state/selectors.test.ts
new file mode 100644
index 00000000000..5e9ba51462f
--- /dev/null
+++ b/public/app/features/api-keys/state/selectors.test.ts
@@ -0,0 +1,25 @@
+import { getApiKeys } from './selectors';
+import { getMultipleMockKeys } from '../__mocks__/apiKeysMock';
+import { ApiKeysState } from 'app/types';
+
+describe('API Keys selectors', () => {
+ describe('Get API Keys', () => {
+ const mockKeys = getMultipleMockKeys(5);
+
+ it('should return all keys if no search query', () => {
+ const mockState: ApiKeysState = { keys: mockKeys, searchQuery: '', hasFetched: false };
+
+ const keys = getApiKeys(mockState);
+
+ expect(keys).toEqual(mockKeys);
+ });
+
+ it('should filter keys if search query exists', () => {
+ const mockState: ApiKeysState = { keys: mockKeys, searchQuery: '5', hasFetched: false };
+
+ const keys = getApiKeys(mockState);
+
+ expect(keys.length).toEqual(1);
+ });
+ });
+});
diff --git a/public/app/features/api-keys/state/selectors.ts b/public/app/features/api-keys/state/selectors.ts
new file mode 100644
index 00000000000..8065c252e85
--- /dev/null
+++ b/public/app/features/api-keys/state/selectors.ts
@@ -0,0 +1,9 @@
+import { ApiKeysState } from 'app/types';
+
+export const getApiKeys = (state: ApiKeysState) => {
+ const regex = RegExp(state.searchQuery, 'i');
+
+ return state.keys.filter(key => {
+ return regex.test(key.name) || regex.test(key.role);
+ });
+};
diff --git a/public/app/features/dashboard/ad_hoc_filters.ts b/public/app/features/dashboard/ad_hoc_filters.ts
index 68b068152b5..0ceac9ddbba 100644
--- a/public/app/features/dashboard/ad_hoc_filters.ts
+++ b/public/app/features/dashboard/ad_hoc_filters.ts
@@ -59,10 +59,10 @@ export class AdHocFiltersCtrl {
let promise = null;
if (segment.type !== 'value') {
- promise = ds.getTagKeys();
+ promise = ds.getTagKeys ? ds.getTagKeys() : Promise.resolve([]);
} else {
options.key = this.segments[index - 2].value;
- promise = ds.getTagValues(options);
+ promise = ds.getTagValues ? ds.getTagValues(options) : Promise.resolve([]);
}
return promise.then(results => {
@@ -99,7 +99,7 @@ export class AdHocFiltersCtrl {
this.segments.splice(index, 0, this.uiSegmentSrv.newCondition('AND'));
}
this.segments.push(this.uiSegmentSrv.newOperator('='));
- this.segments.push(this.uiSegmentSrv.newFake('select tag value', 'value', 'query-segment-value'));
+ this.segments.push(this.uiSegmentSrv.newFake('select value', 'value', 'query-segment-value'));
segment.type = 'key';
segment.cssClass = 'query-segment-key';
}
diff --git a/public/app/features/dashboard/all.ts b/public/app/features/dashboard/all.ts
index a8f491f3ddd..f75743513f1 100644
--- a/public/app/features/dashboard/all.ts
+++ b/public/app/features/dashboard/all.ts
@@ -27,16 +27,20 @@ import './dashgrid/RowOptions';
import './folder_picker/folder_picker';
import './move_to_folder_modal/move_to_folder';
import './settings/settings';
+import './panellinks/module';
+import './dashlinks/module';
+
+// angular wrappers
+import { react2AngularDirective } from 'app/core/utils/react2angular';
+import DashboardPermissions from './permissions/DashboardPermissions';
+
+react2AngularDirective('dashboardPermissions', DashboardPermissions, ['dashboardId', 'folder']);
import coreModule from 'app/core/core_module';
-import { DashboardListCtrl } from './dashboard_list_ctrl';
import { FolderDashboardsCtrl } from './folder_dashboards_ctrl';
-import { FolderSettingsCtrl } from './folder_settings_ctrl';
import { DashboardImportCtrl } from './dashboard_import_ctrl';
import { CreateFolderCtrl } from './create_folder_ctrl';
-coreModule.controller('DashboardListCtrl', DashboardListCtrl);
coreModule.controller('FolderDashboardsCtrl', FolderDashboardsCtrl);
-coreModule.controller('FolderSettingsCtrl', FolderSettingsCtrl);
coreModule.controller('DashboardImportCtrl', DashboardImportCtrl);
coreModule.controller('CreateFolderCtrl', CreateFolderCtrl);
diff --git a/public/app/features/dashboard/dashgrid/DashboardGrid.tsx b/public/app/features/dashboard/dashgrid/DashboardGrid.tsx
index 132e9b65a55..ab0d4969668 100644
--- a/public/app/features/dashboard/dashgrid/DashboardGrid.tsx
+++ b/public/app/features/dashboard/dashgrid/DashboardGrid.tsx
@@ -177,7 +177,8 @@ export class DashboardGrid extends React.Component {
for (const panel of this.dashboard.panels) {
const panelClasses = classNames({ panel: true, 'panel--fullscreen': panel.fullscreen });
panelElements.push(
-
+ /** panel-id is set for html bookmarks */
+
);
diff --git a/public/app/features/dashboard/dashgrid/DashboardRow.tsx b/public/app/features/dashboard/dashgrid/DashboardRow.tsx
index 74630ac8f47..378cf4c2c7c 100644
--- a/public/app/features/dashboard/dashgrid/DashboardRow.tsx
+++ b/public/app/features/dashboard/dashgrid/DashboardRow.tsx
@@ -87,6 +87,7 @@ export class DashboardRow extends React.Component
{
const title = templateSrv.replaceWithText(this.props.panel.title, this.props.panel.scopedVars);
const count = this.props.panel.panels ? this.props.panel.panels.length : 0;
const panels = count === 1 ? 'panel' : 'panels';
+ const canEdit = this.dashboard.meta.canEdit === true;
return (
@@ -97,7 +98,7 @@ export class DashboardRow extends React.Component
{
({count} {panels})
- {this.dashboard.meta.canEdit === true && (
+ {canEdit && (
)}
-
+ {canEdit &&
}
);
}
diff --git a/public/app/features/dashlinks/editor.html b/public/app/features/dashboard/dashlinks/editor.html
similarity index 100%
rename from public/app/features/dashlinks/editor.html
rename to public/app/features/dashboard/dashlinks/editor.html
diff --git a/public/app/features/dashlinks/editor.ts b/public/app/features/dashboard/dashlinks/editor.ts
similarity index 95%
rename from public/app/features/dashlinks/editor.ts
rename to public/app/features/dashboard/dashlinks/editor.ts
index a4ba9ea209a..482052469db 100644
--- a/public/app/features/dashlinks/editor.ts
+++ b/public/app/features/dashboard/dashlinks/editor.ts
@@ -66,7 +66,7 @@ function dashLinksEditor() {
return {
restrict: 'E',
controller: DashLinkEditorCtrl,
- templateUrl: 'public/app/features/dashlinks/editor.html',
+ templateUrl: 'public/app/features/dashboard/dashlinks/editor.html',
bindToController: true,
controllerAs: 'ctrl',
scope: {
diff --git a/public/app/features/dashlinks/module.ts b/public/app/features/dashboard/dashlinks/module.ts
similarity index 100%
rename from public/app/features/dashlinks/module.ts
rename to public/app/features/dashboard/dashlinks/module.ts
diff --git a/public/app/features/dashboard/folder_picker/folder_picker.ts b/public/app/features/dashboard/folder_picker/folder_picker.ts
index 352b29d27a0..80651fecb7e 100644
--- a/public/app/features/dashboard/folder_picker/folder_picker.ts
+++ b/public/app/features/dashboard/folder_picker/folder_picker.ts
@@ -131,6 +131,7 @@ export class FolderPickerCtrl {
private loadInitialValue() {
const resetFolder = { text: this.initialTitle, value: null };
const rootFolder = { text: this.rootName, value: 0 };
+
this.getOptions('').then(result => {
let folder;
if (this.initialFolderId) {
@@ -150,7 +151,7 @@ export class FolderPickerCtrl {
this.folder = folder;
// if this is not the same as our initial value notify parent
- if (this.folder.id !== this.initialFolderId) {
+ if (this.folder.value !== this.initialFolderId) {
this.onChange({ $folder: { id: this.folder.value, title: this.folder.text } });
}
});
diff --git a/public/app/features/dashboard/folder_settings_ctrl.ts b/public/app/features/dashboard/folder_settings_ctrl.ts
deleted file mode 100644
index a847c29ac56..00000000000
--- a/public/app/features/dashboard/folder_settings_ctrl.ts
+++ /dev/null
@@ -1,94 +0,0 @@
-import { FolderPageLoader } from './folder_page_loader';
-import appEvents from 'app/core/app_events';
-
-export class FolderSettingsCtrl {
- folderPageLoader: FolderPageLoader;
- navModel: any;
- folderId: number;
- uid: string;
- canSave = false;
- folder: any;
- title: string;
- hasChanged: boolean;
-
- /** @ngInject */
- constructor(private backendSrv, navModelSrv, private $routeParams, private $location) {
- if (this.$routeParams.uid) {
- this.uid = $routeParams.uid;
-
- this.folderPageLoader = new FolderPageLoader(this.backendSrv);
- this.folderPageLoader.load(this, this.uid, 'manage-folder-settings').then(folder => {
- if ($location.path() !== folder.meta.url) {
- $location.path(`${folder.meta.url}/settings`).replace();
- }
-
- this.folder = folder;
- this.canSave = this.folder.canSave;
- this.title = this.folder.title;
- });
- }
- }
-
- save() {
- this.titleChanged();
-
- if (!this.hasChanged) {
- return;
- }
-
- this.folder.title = this.title.trim();
-
- return this.backendSrv
- .updateFolder(this.folder)
- .then(result => {
- if (result.url !== this.$location.path()) {
- this.$location.url(result.url + '/settings');
- }
-
- appEvents.emit('dashboard-saved');
- appEvents.emit('alert-success', ['Folder saved']);
- })
- .catch(this.handleSaveFolderError);
- }
-
- titleChanged() {
- this.hasChanged = this.folder.title.toLowerCase() !== this.title.trim().toLowerCase();
- }
-
- delete(evt) {
- if (evt) {
- evt.stopPropagation();
- evt.preventDefault();
- }
-
- appEvents.emit('confirm-modal', {
- title: 'Delete',
- text: `Do you want to delete this folder and all its dashboards?`,
- icon: 'fa-trash',
- yesText: 'Delete',
- onConfirm: () => {
- return this.backendSrv.deleteFolder(this.uid).then(() => {
- appEvents.emit('alert-success', ['Folder Deleted', `${this.folder.title} has been deleted`]);
- this.$location.url('dashboards');
- });
- },
- });
- }
-
- handleSaveFolderError(err) {
- if (err.data && err.data.status === 'version-mismatch') {
- err.isHandled = true;
-
- appEvents.emit('confirm-modal', {
- title: 'Conflict',
- text: 'Someone else has updated this folder.',
- text2: 'Would you still like to save this folder?',
- yesText: 'Save & Overwrite',
- icon: 'fa-warning',
- onConfirm: () => {
- this.backendSrv.updateFolder(this.folder, { overwrite: true });
- },
- });
- }
- }
-}
diff --git a/public/app/features/panellinks/link_srv.ts b/public/app/features/dashboard/panellinks/link_srv.ts
similarity index 100%
rename from public/app/features/panellinks/link_srv.ts
rename to public/app/features/dashboard/panellinks/link_srv.ts
diff --git a/public/app/features/panellinks/module.html b/public/app/features/dashboard/panellinks/module.html
similarity index 100%
rename from public/app/features/panellinks/module.html
rename to public/app/features/dashboard/panellinks/module.html
diff --git a/public/app/features/panellinks/module.ts b/public/app/features/dashboard/panellinks/module.ts
similarity index 95%
rename from public/app/features/panellinks/module.ts
rename to public/app/features/dashboard/panellinks/module.ts
index a5605c1d641..c10e4ab224e 100644
--- a/public/app/features/panellinks/module.ts
+++ b/public/app/features/dashboard/panellinks/module.ts
@@ -9,7 +9,7 @@ function panelLinksEditor() {
},
restrict: 'E',
controller: 'PanelLinksEditorCtrl',
- templateUrl: 'public/app/features/panellinks/module.html',
+ templateUrl: 'public/app/features/dashboard/panellinks/module.html',
link: () => {},
};
}
diff --git a/public/app/features/panellinks/specs/link_srv.test.ts b/public/app/features/dashboard/panellinks/specs/link_srv.test.ts
similarity index 100%
rename from public/app/features/panellinks/specs/link_srv.test.ts
rename to public/app/features/dashboard/panellinks/specs/link_srv.test.ts
diff --git a/public/app/features/dashboard/permissions/DashboardPermissions.tsx b/public/app/features/dashboard/permissions/DashboardPermissions.tsx
new file mode 100644
index 00000000000..5651242a485
--- /dev/null
+++ b/public/app/features/dashboard/permissions/DashboardPermissions.tsx
@@ -0,0 +1,116 @@
+import React, { PureComponent } from 'react';
+import { connect } from 'react-redux';
+import Tooltip from 'app/core/components/Tooltip/Tooltip';
+import SlideDown from 'app/core/components/Animations/SlideDown';
+import { StoreState, FolderInfo } from 'app/types';
+import { DashboardAcl, PermissionLevel, NewDashboardAclItem } from 'app/types/acl';
+import {
+ getDashboardPermissions,
+ addDashboardPermission,
+ removeDashboardPermission,
+ updateDashboardPermission,
+} from '../state/actions';
+import PermissionList from 'app/core/components/PermissionList/PermissionList';
+import AddPermission from 'app/core/components/PermissionList/AddPermission';
+import PermissionsInfo from 'app/core/components/PermissionList/PermissionsInfo';
+import { store } from 'app/store/configureStore';
+
+export interface Props {
+ dashboardId: number;
+ folder?: FolderInfo;
+ permissions: DashboardAcl[];
+ getDashboardPermissions: typeof getDashboardPermissions;
+ updateDashboardPermission: typeof updateDashboardPermission;
+ removeDashboardPermission: typeof removeDashboardPermission;
+ addDashboardPermission: typeof addDashboardPermission;
+}
+
+export interface State {
+ isAdding: boolean;
+}
+
+export class DashboardPermissions extends PureComponent {
+ constructor(props) {
+ super(props);
+
+ this.state = {
+ isAdding: false,
+ };
+ }
+
+ componentDidMount() {
+ this.props.getDashboardPermissions(this.props.dashboardId);
+ }
+
+ onOpenAddPermissions = () => {
+ this.setState({ isAdding: true });
+ };
+
+ onRemoveItem = (item: DashboardAcl) => {
+ this.props.removeDashboardPermission(this.props.dashboardId, item);
+ };
+
+ onPermissionChanged = (item: DashboardAcl, level: PermissionLevel) => {
+ this.props.updateDashboardPermission(this.props.dashboardId, item, level);
+ };
+
+ onAddPermission = (newItem: NewDashboardAclItem) => {
+ return this.props.addDashboardPermission(this.props.dashboardId, newItem);
+ };
+
+ onCancelAddPermission = () => {
+ this.setState({ isAdding: false });
+ };
+
+ render() {
+ const { permissions, folder } = this.props;
+ const { isAdding } = this.state;
+
+ return (
+
+
+
+
Permissions
+
+
+
+
+
+ Add Permission
+
+
+
+
+
+
+
+
+ );
+ }
+}
+
+function connectWithStore(WrappedComponent, ...args) {
+ const ConnectedWrappedComponent = connect(...args)(WrappedComponent);
+ return props => {
+ return ;
+ };
+}
+
+const mapStateToProps = (state: StoreState) => ({
+ permissions: state.dashboard.permissions,
+});
+
+const mapDispatchToProps = {
+ getDashboardPermissions,
+ addDashboardPermission,
+ removeDashboardPermission,
+ updateDashboardPermission,
+};
+
+export default connectWithStore(DashboardPermissions, mapStateToProps, mapDispatchToProps);
diff --git a/public/app/features/dashboard/specs/DashboardRow.test.tsx b/public/app/features/dashboard/specs/DashboardRow.test.tsx
index 8424346b0c5..3d89c22f962 100644
--- a/public/app/features/dashboard/specs/DashboardRow.test.tsx
+++ b/public/app/features/dashboard/specs/DashboardRow.test.tsx
@@ -39,6 +39,12 @@ describe('DashboardRow', () => {
expect(wrapper.find('.dashboard-row__actions .pointer')).toHaveLength(2);
});
+ it('should not show row drag handle when cannot edit', () => {
+ dashboardMock.meta.canEdit = false;
+ wrapper = shallow( );
+ expect(wrapper.find('.dashboard-row__drag')).toHaveLength(0);
+ });
+
it('should have zero actions when cannot edit', () => {
dashboardMock.meta.canEdit = false;
panel = new PanelModel({ collapsed: false });
diff --git a/public/app/features/dashboard/specs/share_modal_ctrl.test.ts b/public/app/features/dashboard/specs/share_modal_ctrl.test.ts
index 8a8d94fdddb..70d301ed5ff 100644
--- a/public/app/features/dashboard/specs/share_modal_ctrl.test.ts
+++ b/public/app/features/dashboard/specs/share_modal_ctrl.test.ts
@@ -1,7 +1,7 @@
import '../shareModalCtrl';
import { ShareModalCtrl } from '../shareModalCtrl';
import config from 'app/core/config';
-import { LinkSrv } from 'app/features/panellinks/link_srv';
+import { LinkSrv } from 'app/features/dashboard/panellinks/link_srv';
describe('ShareModalCtrl', () => {
const ctx = {
diff --git a/public/app/features/dashboard/specs/time_srv.test.ts b/public/app/features/dashboard/specs/time_srv.test.ts
index 514e0b90792..db0d11f2ebe 100644
--- a/public/app/features/dashboard/specs/time_srv.test.ts
+++ b/public/app/features/dashboard/specs/time_srv.test.ts
@@ -29,6 +29,7 @@ describe('timeSrv', () => {
beforeEach(() => {
timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() });
timeSrv.init(_dashboard);
+ _dashboard.refresh = false;
});
describe('timeRange', () => {
@@ -79,6 +80,23 @@ describe('timeSrv', () => {
expect(time.to.valueOf()).toEqual(new Date('2014-05-20T03:10:22Z').getTime());
});
+ it('should ignore refresh if time absolute', () => {
+ location = {
+ search: jest.fn(() => ({
+ from: '20140410T052010',
+ to: '20140520T031022',
+ })),
+ };
+
+ timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() });
+
+ // dashboard saved with refresh on
+ _dashboard.refresh = true;
+ timeSrv.init(_dashboard);
+
+ expect(timeSrv.refresh).toBe(false);
+ });
+
it('should handle formatted dates without time', () => {
location = {
search: jest.fn(() => ({
diff --git a/public/app/features/dashboard/state/actions.ts b/public/app/features/dashboard/state/actions.ts
new file mode 100644
index 00000000000..9e923f6bcb7
--- /dev/null
+++ b/public/app/features/dashboard/state/actions.ts
@@ -0,0 +1,115 @@
+import { StoreState } from 'app/types';
+import { ThunkAction } from 'redux-thunk';
+import { getBackendSrv } from 'app/core/services/backend_srv';
+
+import {
+ DashboardAcl,
+ DashboardAclDTO,
+ PermissionLevel,
+ DashboardAclUpdateDTO,
+ NewDashboardAclItem,
+} from 'app/types/acl';
+
+export enum ActionTypes {
+ LoadDashboardPermissions = 'LOAD_DASHBOARD_PERMISSIONS',
+}
+
+export interface LoadDashboardPermissionsAction {
+ type: ActionTypes.LoadDashboardPermissions;
+ payload: DashboardAcl[];
+}
+
+export type Action = LoadDashboardPermissionsAction;
+
+type ThunkResult = ThunkAction;
+
+export const loadDashboardPermissions = (items: DashboardAclDTO[]): LoadDashboardPermissionsAction => ({
+ type: ActionTypes.LoadDashboardPermissions,
+ payload: items,
+});
+
+export function getDashboardPermissions(id: number): ThunkResult {
+ return async dispatch => {
+ const permissions = await getBackendSrv().get(`/api/dashboards/id/${id}/permissions`);
+ dispatch(loadDashboardPermissions(permissions));
+ };
+}
+
+function toUpdateItem(item: DashboardAcl): DashboardAclUpdateDTO {
+ return {
+ userId: item.userId,
+ teamId: item.teamId,
+ role: item.role,
+ permission: item.permission,
+ };
+}
+
+export function updateDashboardPermission(
+ dashboardId: number,
+ itemToUpdate: DashboardAcl,
+ level: PermissionLevel
+): ThunkResult {
+ return async (dispatch, getStore) => {
+ const { dashboard } = getStore();
+ const itemsToUpdate = [];
+
+ for (const item of dashboard.permissions) {
+ if (item.inherited) {
+ continue;
+ }
+
+ const updated = toUpdateItem(item);
+
+ // if this is the item we want to update, update it's permisssion
+ if (itemToUpdate === item) {
+ updated.permission = level;
+ }
+
+ itemsToUpdate.push(updated);
+ }
+
+ await getBackendSrv().post(`/api/dashboards/id/${dashboardId}/permissions`, { items: itemsToUpdate });
+ await dispatch(getDashboardPermissions(dashboardId));
+ };
+}
+
+export function removeDashboardPermission(dashboardId: number, itemToDelete: DashboardAcl): ThunkResult {
+ return async (dispatch, getStore) => {
+ const dashboard = getStore().dashboard;
+ const itemsToUpdate = [];
+
+ for (const item of dashboard.permissions) {
+ if (item.inherited || item === itemToDelete) {
+ continue;
+ }
+ itemsToUpdate.push(toUpdateItem(item));
+ }
+
+ await getBackendSrv().post(`/api/dashboards/id/${dashboardId}/permissions`, { items: itemsToUpdate });
+ await dispatch(getDashboardPermissions(dashboardId));
+ };
+}
+
+export function addDashboardPermission(dashboardId: number, newItem: NewDashboardAclItem): ThunkResult {
+ return async (dispatch, getStore) => {
+ const { dashboard } = getStore();
+ const itemsToUpdate = [];
+
+ for (const item of dashboard.permissions) {
+ if (item.inherited) {
+ continue;
+ }
+ itemsToUpdate.push(toUpdateItem(item));
+ }
+
+ itemsToUpdate.push({
+ userId: newItem.userId,
+ teamId: newItem.teamId,
+ role: newItem.role,
+ permission: newItem.permission,
+ });
+
+ await getBackendSrv().post(`/api/dashboards/id/${dashboardId}/permissions`, { items: itemsToUpdate });
+ await dispatch(getDashboardPermissions(dashboardId));
+ };
+}
diff --git a/public/app/features/dashboard/state/reducers.test.ts b/public/app/features/dashboard/state/reducers.test.ts
new file mode 100644
index 00000000000..c5b67f58ac9
--- /dev/null
+++ b/public/app/features/dashboard/state/reducers.test.ts
@@ -0,0 +1,24 @@
+import { Action, ActionTypes } from './actions';
+import { OrgRole, PermissionLevel, DashboardState } from 'app/types';
+import { inititalState, dashboardReducer } from './reducers';
+
+describe('dashboard reducer', () => {
+ describe('loadDashboardPermissions', () => {
+ let state: DashboardState;
+
+ beforeEach(() => {
+ const action: Action = {
+ type: ActionTypes.LoadDashboardPermissions,
+ payload: [
+ { id: 2, dashboardId: 1, role: OrgRole.Viewer, permission: PermissionLevel.View },
+ { id: 3, dashboardId: 1, role: OrgRole.Editor, permission: PermissionLevel.Edit },
+ ],
+ };
+ state = dashboardReducer(inititalState, action);
+ });
+
+ it('should add permissions to state', async () => {
+ expect(state.permissions.length).toBe(2);
+ });
+ });
+});
diff --git a/public/app/features/dashboard/state/reducers.ts b/public/app/features/dashboard/state/reducers.ts
new file mode 100644
index 00000000000..5100529d973
--- /dev/null
+++ b/public/app/features/dashboard/state/reducers.ts
@@ -0,0 +1,22 @@
+import { DashboardState } from 'app/types';
+import { Action, ActionTypes } from './actions';
+import { processAclItems } from 'app/core/utils/acl';
+
+export const inititalState: DashboardState = {
+ permissions: [],
+};
+
+export const dashboardReducer = (state = inititalState, action: Action): DashboardState => {
+ switch (action.type) {
+ case ActionTypes.LoadDashboardPermissions:
+ return {
+ ...state,
+ permissions: processAclItems(action.payload),
+ };
+ }
+ return state;
+};
+
+export default {
+ dashboard: dashboardReducer,
+};
diff --git a/public/app/features/dashboard/submenu/submenu.html b/public/app/features/dashboard/submenu/submenu.html
index f240a86efba..d7cee33e6c3 100644
--- a/public/app/features/dashboard/submenu/submenu.html
+++ b/public/app/features/dashboard/submenu/submenu.html
@@ -4,7 +4,8 @@
{{variable.label || variable.name}}
-
+
+
diff --git a/public/app/features/dashboard/time_srv.ts b/public/app/features/dashboard/time_srv.ts
index dd5a0ba758f..a96bc89daa7 100644
--- a/public/app/features/dashboard/time_srv.ts
+++ b/public/app/features/dashboard/time_srv.ts
@@ -85,6 +85,12 @@ export class TimeSrv {
if (params.to) {
this.time.to = this.parseUrlParam(params.to) || this.time.to;
}
+ // if absolute ignore refresh option saved to dashboard
+ if (params.to && params.to.indexOf('now') === -1) {
+ this.refresh = false;
+ this.dashboard.refresh = false;
+ }
+ // but if refresh explicitly set then use that
if (params.refresh) {
this.refresh = params.refresh || this.refresh;
}
@@ -107,7 +113,7 @@ export class TimeSrv {
}
private timeHasChangedSinceLoad() {
- return this.timeAtLoad.from !== this.time.from || this.timeAtLoad.to !== this.time.to;
+ return this.timeAtLoad && (this.timeAtLoad.from !== this.time.from || this.timeAtLoad.to !== this.time.to);
}
setAutoRefresh(interval) {
diff --git a/public/app/features/dashboard/upload.ts b/public/app/features/dashboard/upload.ts
index 974a0c35cd2..42871327eb6 100644
--- a/public/app/features/dashboard/upload.ts
+++ b/public/app/features/dashboard/upload.ts
@@ -1,10 +1,12 @@
import coreModule from 'app/core/core_module';
+import appEvents from 'app/core/app_events';
+import angular from 'angular';
const template = `
-
+
- Upload .json File
+ {{btnText}}
`;
@@ -15,8 +17,11 @@ function uploadDashboardDirective(timer, alertSrv, $location) {
template: template,
scope: {
onUpload: '&',
+ btnText: '@?',
},
- link: scope => {
+ link: (scope, elem) => {
+ scope.btnText = angular.isDefined(scope.btnText) ? scope.btnText : 'Upload .json File';
+
function file_selected(evt) {
const files = evt.target.files; // FileList object
const readerOnload = () => {
@@ -26,7 +31,7 @@ function uploadDashboardDirective(timer, alertSrv, $location) {
dash = JSON.parse(e.target.result);
} catch (err) {
console.log(err);
- scope.appEvent('alert-error', ['Import failed', 'JSON -> JS Serialization failed: ' + err.message]);
+ appEvents.emit('alert-error', ['Import failed', 'JSON -> JS Serialization failed: ' + err.message]);
return;
}
@@ -52,7 +57,7 @@ function uploadDashboardDirective(timer, alertSrv, $location) {
// Check for the various File API support.
if (wnd.File && wnd.FileReader && wnd.FileList && wnd.Blob) {
// Something
- document.getElementById('dashupload').addEventListener('change', file_selected, false);
+ elem[0].addEventListener('change', file_selected, false);
} else {
alertSrv.set('Oops', 'Sorry, the HTML5 File APIs are not fully supported in this browser.', 'error');
}
diff --git a/public/app/features/datasources/DataSourceList.test.tsx b/public/app/features/datasources/DataSourceList.test.tsx
new file mode 100644
index 00000000000..6e097da2c53
--- /dev/null
+++ b/public/app/features/datasources/DataSourceList.test.tsx
@@ -0,0 +1,22 @@
+import React from 'react';
+import { shallow } from 'enzyme';
+import DataSourcesList from './DataSourcesList';
+import { getMockDataSources } from './__mocks__/dataSourcesMocks';
+import { LayoutModes } from '../../core/components/LayoutSelector/LayoutSelector';
+
+const setup = () => {
+ const props = {
+ dataSources: getMockDataSources(3),
+ layoutMode: LayoutModes.Grid,
+ };
+
+ return shallow( );
+};
+
+describe('Render', () => {
+ it('should render component', () => {
+ const wrapper = setup();
+
+ expect(wrapper).toMatchSnapshot();
+ });
+});
diff --git a/public/app/features/datasources/DataSourcesList.tsx b/public/app/features/datasources/DataSourcesList.tsx
new file mode 100644
index 00000000000..904ed0cf679
--- /dev/null
+++ b/public/app/features/datasources/DataSourcesList.tsx
@@ -0,0 +1,34 @@
+import React, { PureComponent } from 'react';
+import classNames from 'classnames/bind';
+import DataSourcesListItem from './DataSourcesListItem';
+import { DataSource } from 'app/types';
+import { LayoutMode, LayoutModes } from '../../core/components/LayoutSelector/LayoutSelector';
+
+export interface Props {
+ dataSources: DataSource[];
+ layoutMode: LayoutMode;
+}
+
+export class DataSourcesList extends PureComponent {
+ render() {
+ const { dataSources, layoutMode } = this.props;
+
+ const listStyle = classNames({
+ 'card-section': true,
+ 'card-list-layout-grid': layoutMode === LayoutModes.Grid,
+ 'card-list-layout-list': layoutMode === LayoutModes.List,
+ });
+
+ return (
+
+
+ {dataSources.map((dataSource, index) => {
+ return ;
+ })}
+
+
+ );
+ }
+}
+
+export default DataSourcesList;
diff --git a/public/app/features/datasources/DataSourcesListItem.test.tsx b/public/app/features/datasources/DataSourcesListItem.test.tsx
new file mode 100644
index 00000000000..138c71cb46a
--- /dev/null
+++ b/public/app/features/datasources/DataSourcesListItem.test.tsx
@@ -0,0 +1,20 @@
+import React from 'react';
+import { shallow } from 'enzyme';
+import DataSourcesListItem from './DataSourcesListItem';
+import { getMockDataSource } from './__mocks__/dataSourcesMocks';
+
+const setup = () => {
+ const props = {
+ dataSource: getMockDataSource(),
+ };
+
+ return shallow( );
+};
+
+describe('Render', () => {
+ it('should render component', () => {
+ const wrapper = setup();
+
+ expect(wrapper).toMatchSnapshot();
+ });
+});
diff --git a/public/app/features/datasources/DataSourcesListItem.tsx b/public/app/features/datasources/DataSourcesListItem.tsx
new file mode 100644
index 00000000000..a4fedb893fb
--- /dev/null
+++ b/public/app/features/datasources/DataSourcesListItem.tsx
@@ -0,0 +1,35 @@
+import React, { PureComponent } from 'react';
+import { DataSource } from 'app/types';
+
+export interface Props {
+ dataSource: DataSource;
+}
+
+export class DataSourcesListItem extends PureComponent {
+ render() {
+ const { dataSource } = this.props;
+ return (
+
+
+
+
+
+
+
+
+
+ {dataSource.name}
+ {dataSource.isDefault && default }
+
+
{dataSource.url}
+
+
+
+
+ );
+ }
+}
+
+export default DataSourcesListItem;
diff --git a/public/app/features/datasources/DataSourcesListPage.test.tsx b/public/app/features/datasources/DataSourcesListPage.test.tsx
new file mode 100644
index 00000000000..0ea716d62c9
--- /dev/null
+++ b/public/app/features/datasources/DataSourcesListPage.test.tsx
@@ -0,0 +1,42 @@
+import React from 'react';
+import { shallow } from 'enzyme';
+import { DataSourcesListPage, Props } from './DataSourcesListPage';
+import { DataSource, NavModel } from 'app/types';
+import { LayoutModes } from '../../core/components/LayoutSelector/LayoutSelector';
+import { getMockDataSources } from './__mocks__/dataSourcesMocks';
+
+const setup = (propOverrides?: object) => {
+ const props: Props = {
+ dataSources: [] as DataSource[],
+ layoutMode: LayoutModes.Grid,
+ loadDataSources: jest.fn(),
+ navModel: {} as NavModel,
+ dataSourcesCount: 0,
+ searchQuery: '',
+ setDataSourcesSearchQuery: jest.fn(),
+ setDataSourcesLayoutMode: jest.fn(),
+ hasFetched: false,
+ };
+
+ Object.assign(props, propOverrides);
+
+ return shallow( );
+};
+
+describe('Render', () => {
+ it('should render component', () => {
+ const wrapper = setup();
+
+ expect(wrapper).toMatchSnapshot();
+ });
+
+ it('should render action bar and datasources', () => {
+ const wrapper = setup({
+ dataSources: getMockDataSources(5),
+ dataSourcesCount: 5,
+ hasFetched: true,
+ });
+
+ expect(wrapper).toMatchSnapshot();
+ });
+});
diff --git a/public/app/features/datasources/DataSourcesListPage.tsx b/public/app/features/datasources/DataSourcesListPage.tsx
new file mode 100644
index 00000000000..6a292d63e53
--- /dev/null
+++ b/public/app/features/datasources/DataSourcesListPage.tsx
@@ -0,0 +1,110 @@
+import React, { PureComponent } from 'react';
+import { connect } from 'react-redux';
+import { hot } from 'react-hot-loader';
+import PageHeader from '../../core/components/PageHeader/PageHeader';
+import PageLoader from 'app/core/components/PageLoader/PageLoader';
+import OrgActionBar from '../../core/components/OrgActionBar/OrgActionBar';
+import EmptyListCTA from '../../core/components/EmptyListCTA/EmptyListCTA';
+import DataSourcesList from './DataSourcesList';
+import { DataSource, NavModel } from 'app/types';
+import { LayoutMode } from '../../core/components/LayoutSelector/LayoutSelector';
+import { loadDataSources, setDataSourcesLayoutMode, setDataSourcesSearchQuery } from './state/actions';
+import { getNavModel } from '../../core/selectors/navModel';
+import {
+ getDataSources,
+ getDataSourcesCount,
+ getDataSourcesLayoutMode,
+ getDataSourcesSearchQuery,
+} from './state/selectors';
+
+export interface Props {
+ navModel: NavModel;
+ dataSources: DataSource[];
+ dataSourcesCount: number;
+ layoutMode: LayoutMode;
+ searchQuery: string;
+ hasFetched: boolean;
+ loadDataSources: typeof loadDataSources;
+ setDataSourcesLayoutMode: typeof setDataSourcesLayoutMode;
+ setDataSourcesSearchQuery: typeof setDataSourcesSearchQuery;
+}
+
+const emptyListModel = {
+ title: 'There are no data sources defined yet',
+ buttonIcon: 'gicon gicon-add-datasources',
+ buttonLink: 'datasources/new',
+ buttonTitle: 'Add data source',
+ proTip: 'You can also define data sources through configuration files.',
+ proTipLink: 'http://docs.grafana.org/administration/provisioning/#datasources?utm_source=grafana_ds_list',
+ proTipLinkTitle: 'Learn more',
+ proTipTarget: '_blank',
+};
+
+export class DataSourcesListPage extends PureComponent {
+ componentDidMount() {
+ this.fetchDataSources();
+ }
+
+ async fetchDataSources() {
+ return await this.props.loadDataSources();
+ }
+
+ render() {
+ const {
+ dataSources,
+ dataSourcesCount,
+ navModel,
+ layoutMode,
+ searchQuery,
+ setDataSourcesSearchQuery,
+ setDataSourcesLayoutMode,
+ hasFetched,
+ } = this.props;
+
+ const linkButton = {
+ href: 'datasources/new',
+ title: 'Add data source',
+ };
+
+ return (
+
+
+
+ {!hasFetched &&
}
+ {hasFetched && dataSourcesCount === 0 &&
}
+ {hasFetched &&
+ dataSourcesCount > 0 && [
+
setDataSourcesLayoutMode(mode)}
+ setSearchQuery={query => setDataSourcesSearchQuery(query)}
+ linkButton={linkButton}
+ key="action-bar"
+ />,
+ ,
+ ]}
+
+
+ );
+ }
+}
+
+function mapStateToProps(state) {
+ return {
+ navModel: getNavModel(state.navIndex, 'datasources'),
+ dataSources: getDataSources(state.dataSources),
+ layoutMode: getDataSourcesLayoutMode(state.dataSources),
+ dataSourcesCount: getDataSourcesCount(state.dataSources),
+ searchQuery: getDataSourcesSearchQuery(state.dataSources),
+ hasFetched: state.dataSources.hasFetched,
+ };
+}
+
+const mapDispatchToProps = {
+ loadDataSources,
+ setDataSourcesSearchQuery,
+ setDataSourcesLayoutMode,
+};
+
+export default hot(module)(connect(mapStateToProps, mapDispatchToProps)(DataSourcesListPage));
diff --git a/public/app/features/datasources/NewDataSourcePage.tsx b/public/app/features/datasources/NewDataSourcePage.tsx
new file mode 100644
index 00000000000..527ecf6db83
--- /dev/null
+++ b/public/app/features/datasources/NewDataSourcePage.tsx
@@ -0,0 +1,88 @@
+import React, { PureComponent } from 'react';
+import { connect } from 'react-redux';
+import { hot } from 'react-hot-loader';
+import PageHeader from 'app/core/components/PageHeader/PageHeader';
+import { NavModel, Plugin } from 'app/types';
+import { addDataSource, loadDataSourceTypes, setDataSourceTypeSearchQuery } from './state/actions';
+import { updateLocation } from '../../core/actions';
+import { getNavModel } from 'app/core/selectors/navModel';
+import { getDataSourceTypes } from './state/selectors';
+
+export interface Props {
+ navModel: NavModel;
+ dataSourceTypes: Plugin[];
+ addDataSource: typeof addDataSource;
+ loadDataSourceTypes: typeof loadDataSourceTypes;
+ updateLocation: typeof updateLocation;
+ dataSourceTypeSearchQuery: string;
+ setDataSourceTypeSearchQuery: typeof setDataSourceTypeSearchQuery;
+}
+
+class NewDataSourcePage extends PureComponent {
+ componentDidMount() {
+ this.props.loadDataSourceTypes();
+ }
+
+ onDataSourceTypeClicked = type => {
+ this.props.addDataSource(type);
+ };
+
+ onSearchQueryChange = event => {
+ this.props.setDataSourceTypeSearchQuery(event.target.value);
+ };
+
+ render() {
+ const { navModel, dataSourceTypes, dataSourceTypeSearchQuery } = this.props;
+
+ return (
+
+
+
+
Choose data source type
+
+
+
+
+
+
+
+ {dataSourceTypes.map((type, index) => {
+ return (
+
this.onDataSourceTypeClicked(type)}
+ className="add-data-source-grid-item"
+ key={`${type.id}-${index}`}
+ >
+
+
{type.name}
+
+ );
+ })}
+
+
+
+ );
+ }
+}
+
+function mapStateToProps(state) {
+ return {
+ navModel: getNavModel(state.navIndex, 'datasources'),
+ dataSourceTypes: getDataSourceTypes(state.dataSources),
+ };
+}
+
+const mapDispatchToProps = {
+ addDataSource,
+ loadDataSourceTypes,
+ updateLocation,
+ setDataSourceTypeSearchQuery,
+};
+
+export default hot(module)(connect(mapStateToProps, mapDispatchToProps)(NewDataSourcePage));
diff --git a/public/app/features/datasources/__mocks__/dataSourcesMocks.ts b/public/app/features/datasources/__mocks__/dataSourcesMocks.ts
new file mode 100644
index 00000000000..97819a18c82
--- /dev/null
+++ b/public/app/features/datasources/__mocks__/dataSourcesMocks.ts
@@ -0,0 +1,45 @@
+import { DataSource } from 'app/types';
+
+export const getMockDataSources = (amount: number): DataSource[] => {
+ const dataSources = [];
+
+ for (let i = 0; i <= amount; i++) {
+ dataSources.push({
+ access: '',
+ basicAuth: false,
+ database: `database-${i}`,
+ id: i,
+ isDefault: false,
+ jsonData: { authType: 'credentials', defaultRegion: 'eu-west-2' },
+ name: `dataSource-${i}`,
+ orgId: 1,
+ password: '',
+ readOnly: false,
+ type: 'cloudwatch',
+ typeLogoUrl: 'public/app/plugins/datasource/cloudwatch/img/amazon-web-services.png',
+ url: '',
+ user: '',
+ });
+ }
+
+ return dataSources;
+};
+
+export const getMockDataSource = (): DataSource => {
+ return {
+ access: '',
+ basicAuth: false,
+ database: '',
+ id: 13,
+ isDefault: false,
+ jsonData: { authType: 'credentials', defaultRegion: 'eu-west-2' },
+ name: 'gdev-cloudwatch',
+ orgId: 1,
+ password: '',
+ readOnly: false,
+ type: 'cloudwatch',
+ typeLogoUrl: 'public/app/plugins/datasource/cloudwatch/img/amazon-web-services.png',
+ url: '',
+ user: '',
+ };
+};
diff --git a/public/app/features/datasources/__snapshots__/DataSourceList.test.tsx.snap b/public/app/features/datasources/__snapshots__/DataSourceList.test.tsx.snap
new file mode 100644
index 00000000000..7167f59b048
--- /dev/null
+++ b/public/app/features/datasources/__snapshots__/DataSourceList.test.tsx.snap
@@ -0,0 +1,108 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Render should render component 1`] = `
+
+`;
diff --git a/public/app/features/datasources/__snapshots__/DataSourcesListItem.test.tsx.snap b/public/app/features/datasources/__snapshots__/DataSourcesListItem.test.tsx.snap
new file mode 100644
index 00000000000..a424276cf32
--- /dev/null
+++ b/public/app/features/datasources/__snapshots__/DataSourcesListItem.test.tsx.snap
@@ -0,0 +1,45 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Render should render component 1`] = `
+
+
+
+
+
+
+
+
+
+ gdev-cloudwatch
+
+
+
+
+
+
+`;
diff --git a/public/app/features/datasources/__snapshots__/DataSourcesListPage.test.tsx.snap b/public/app/features/datasources/__snapshots__/DataSourcesListPage.test.tsx.snap
new file mode 100644
index 00000000000..c26ac50fed8
--- /dev/null
+++ b/public/app/features/datasources/__snapshots__/DataSourcesListPage.test.tsx.snap
@@ -0,0 +1,163 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Render should render action bar and datasources 1`] = `
+
+`;
+
+exports[`Render should render component 1`] = `
+
+`;
diff --git a/public/app/features/datasources/state/actions.test.ts b/public/app/features/datasources/state/actions.test.ts
new file mode 100644
index 00000000000..d0a8caad9a7
--- /dev/null
+++ b/public/app/features/datasources/state/actions.test.ts
@@ -0,0 +1,44 @@
+import { findNewName, nameExits } from './actions';
+import { getMockPlugin, getMockPlugins } from '../../plugins/__mocks__/pluginMocks';
+
+describe('Name exists', () => {
+ const plugins = getMockPlugins(5);
+
+ it('should be true', () => {
+ const name = 'pretty cool plugin-1';
+
+ expect(nameExits(plugins, name)).toEqual(true);
+ });
+
+ it('should be false', () => {
+ const name = 'pretty cool plugin-6';
+
+ expect(nameExits(plugins, name));
+ });
+});
+
+describe('Find new name', () => {
+ it('should create a new name', () => {
+ const plugins = getMockPlugins(5);
+ const name = 'pretty cool plugin-1';
+
+ expect(findNewName(plugins, name)).toEqual('pretty cool plugin-6');
+ });
+
+ it('should create new name without suffix', () => {
+ const plugin = getMockPlugin();
+ plugin.name = 'prometheus';
+ const plugins = [plugin];
+ const name = 'prometheus';
+
+ expect(findNewName(plugins, name)).toEqual('prometheus-1');
+ });
+
+ it('should handle names that end with -', () => {
+ const plugin = getMockPlugin();
+ const plugins = [plugin];
+ const name = 'pretty cool plugin-';
+
+ expect(findNewName(plugins, name)).toEqual('pretty cool plugin-');
+ });
+});
diff --git a/public/app/features/datasources/state/actions.ts b/public/app/features/datasources/state/actions.ts
new file mode 100644
index 00000000000..33d6b79c5df
--- /dev/null
+++ b/public/app/features/datasources/state/actions.ts
@@ -0,0 +1,154 @@
+import { ThunkAction } from 'redux-thunk';
+import { DataSource, Plugin, StoreState } from 'app/types';
+import { getBackendSrv } from '../../../core/services/backend_srv';
+import { LayoutMode } from '../../../core/components/LayoutSelector/LayoutSelector';
+import { updateLocation } from '../../../core/actions';
+import { UpdateLocationAction } from '../../../core/actions/location';
+
+export enum ActionTypes {
+ LoadDataSources = 'LOAD_DATA_SOURCES',
+ LoadDataSourceTypes = 'LOAD_DATA_SOURCE_TYPES',
+ SetDataSourcesSearchQuery = 'SET_DATA_SOURCES_SEARCH_QUERY',
+ SetDataSourcesLayoutMode = 'SET_DATA_SOURCES_LAYOUT_MODE',
+ SetDataSourceTypeSearchQuery = 'SET_DATA_SOURCE_TYPE_SEARCH_QUERY',
+}
+
+export interface LoadDataSourcesAction {
+ type: ActionTypes.LoadDataSources;
+ payload: DataSource[];
+}
+
+export interface SetDataSourcesSearchQueryAction {
+ type: ActionTypes.SetDataSourcesSearchQuery;
+ payload: string;
+}
+
+export interface SetDataSourcesLayoutModeAction {
+ type: ActionTypes.SetDataSourcesLayoutMode;
+ payload: LayoutMode;
+}
+
+export interface LoadDataSourceTypesAction {
+ type: ActionTypes.LoadDataSourceTypes;
+ payload: Plugin[];
+}
+
+export interface SetDataSourceTypeSearchQueryAction {
+ type: ActionTypes.SetDataSourceTypeSearchQuery;
+ payload: string;
+}
+
+const dataSourcesLoaded = (dataSources: DataSource[]): LoadDataSourcesAction => ({
+ type: ActionTypes.LoadDataSources,
+ payload: dataSources,
+});
+
+const dataSourceTypesLoaded = (dataSourceTypes: Plugin[]): LoadDataSourceTypesAction => ({
+ type: ActionTypes.LoadDataSourceTypes,
+ payload: dataSourceTypes,
+});
+
+export const setDataSourcesSearchQuery = (searchQuery: string): SetDataSourcesSearchQueryAction => ({
+ type: ActionTypes.SetDataSourcesSearchQuery,
+ payload: searchQuery,
+});
+
+export const setDataSourcesLayoutMode = (layoutMode: LayoutMode): SetDataSourcesLayoutModeAction => ({
+ type: ActionTypes.SetDataSourcesLayoutMode,
+ payload: layoutMode,
+});
+
+export const setDataSourceTypeSearchQuery = (query: string): SetDataSourceTypeSearchQueryAction => ({
+ type: ActionTypes.SetDataSourceTypeSearchQuery,
+ payload: query,
+});
+
+export type Action =
+ | LoadDataSourcesAction
+ | SetDataSourcesSearchQueryAction
+ | SetDataSourcesLayoutModeAction
+ | UpdateLocationAction
+ | LoadDataSourceTypesAction
+ | SetDataSourceTypeSearchQueryAction;
+
+type ThunkResult = ThunkAction;
+
+export function loadDataSources(): ThunkResult {
+ return async dispatch => {
+ const response = await getBackendSrv().get('/api/datasources');
+ dispatch(dataSourcesLoaded(response));
+ };
+}
+
+export function addDataSource(plugin: Plugin): ThunkResult {
+ return async (dispatch, getStore) => {
+ await dispatch(loadDataSources());
+
+ const dataSources = getStore().dataSources.dataSources;
+
+ const newInstance = {
+ name: plugin.name,
+ type: plugin.id,
+ access: 'proxy',
+ isDefault: dataSources.length === 0,
+ };
+
+ if (nameExits(dataSources, newInstance.name)) {
+ newInstance.name = findNewName(dataSources, newInstance.name);
+ }
+
+ const result = await getBackendSrv().post('/api/datasources', newInstance);
+ dispatch(updateLocation({ path: `/datasources/edit/${result.id}` }));
+ };
+}
+
+export function loadDataSourceTypes(): ThunkResult {
+ return async dispatch => {
+ const result = await getBackendSrv().get('/api/plugins', { enabled: 1, type: 'datasource' });
+ dispatch(dataSourceTypesLoaded(result));
+ };
+}
+
+export function nameExits(dataSources, name) {
+ return (
+ dataSources.filter(dataSource => {
+ return dataSource.name === name;
+ }).length > 0
+ );
+}
+
+export function findNewName(dataSources, name) {
+ // Need to loop through current data sources to make sure
+ // the name doesn't exist
+ while (nameExits(dataSources, name)) {
+ // If there's a duplicate name that doesn't end with '-x'
+ // we can add -1 to the name and be done.
+ if (!nameHasSuffix(name)) {
+ name = `${name}-1`;
+ } else {
+ // if there's a duplicate name that ends with '-x'
+ // we can try to increment the last digit until the name is unique
+
+ // remove the 'x' part and replace it with the new number
+ name = `${getNewName(name)}${incrementLastDigit(getLastDigit(name))}`;
+ }
+ }
+
+ return name;
+}
+
+function nameHasSuffix(name) {
+ return name.endsWith('-', name.length - 1);
+}
+
+function getLastDigit(name) {
+ return parseInt(name.slice(-1), 10);
+}
+
+function incrementLastDigit(digit) {
+ return isNaN(digit) ? 1 : digit + 1;
+}
+
+function getNewName(name) {
+ return name.slice(0, name.length - 1);
+}
diff --git a/public/app/features/datasources/state/reducers.ts b/public/app/features/datasources/state/reducers.ts
new file mode 100644
index 00000000000..9b84799dcea
--- /dev/null
+++ b/public/app/features/datasources/state/reducers.ts
@@ -0,0 +1,38 @@
+import { DataSource, DataSourcesState, Plugin } from 'app/types';
+import { Action, ActionTypes } from './actions';
+import { LayoutModes } from '../../../core/components/LayoutSelector/LayoutSelector';
+
+const initialState: DataSourcesState = {
+ dataSources: [] as DataSource[],
+ layoutMode: LayoutModes.Grid,
+ searchQuery: '',
+ dataSourcesCount: 0,
+ dataSourceTypes: [] as Plugin[],
+ dataSourceTypeSearchQuery: '',
+ hasFetched: false,
+};
+
+export const dataSourcesReducer = (state = initialState, action: Action): DataSourcesState => {
+ switch (action.type) {
+ case ActionTypes.LoadDataSources:
+ return { ...state, hasFetched: true, dataSources: action.payload, dataSourcesCount: action.payload.length };
+
+ case ActionTypes.SetDataSourcesSearchQuery:
+ return { ...state, searchQuery: action.payload };
+
+ case ActionTypes.SetDataSourcesLayoutMode:
+ return { ...state, layoutMode: action.payload };
+
+ case ActionTypes.LoadDataSourceTypes:
+ return { ...state, dataSourceTypes: action.payload };
+
+ case ActionTypes.SetDataSourceTypeSearchQuery:
+ return { ...state, dataSourceTypeSearchQuery: action.payload };
+ }
+
+ return state;
+};
+
+export default {
+ dataSources: dataSourcesReducer,
+};
diff --git a/public/app/features/datasources/state/selectors.ts b/public/app/features/datasources/state/selectors.ts
new file mode 100644
index 00000000000..80e1400114f
--- /dev/null
+++ b/public/app/features/datasources/state/selectors.ts
@@ -0,0 +1,19 @@
+export const getDataSources = state => {
+ const regex = new RegExp(state.searchQuery, 'i');
+
+ return state.dataSources.filter(dataSource => {
+ return regex.test(dataSource.name) || regex.test(dataSource.database);
+ });
+};
+
+export const getDataSourceTypes = state => {
+ const regex = new RegExp(state.dataSourceTypeSearchQuery, 'i');
+
+ return state.dataSourceTypes.filter(type => {
+ return regex.test(type.name);
+ });
+};
+
+export const getDataSourcesSearchQuery = state => state.searchQuery;
+export const getDataSourcesLayoutMode = state => state.layoutMode;
+export const getDataSourcesCount = state => state.dataSourcesCount;
diff --git a/public/app/containers/Explore/ElapsedTime.tsx b/public/app/features/explore/ElapsedTime.tsx
similarity index 100%
rename from public/app/containers/Explore/ElapsedTime.tsx
rename to public/app/features/explore/ElapsedTime.tsx
diff --git a/public/app/containers/Explore/Explore.tsx b/public/app/features/explore/Explore.tsx
similarity index 68%
rename from public/app/containers/Explore/Explore.tsx
rename to public/app/features/explore/Explore.tsx
index 16175747a06..5a496ea910b 100644
--- a/public/app/containers/Explore/Explore.tsx
+++ b/public/app/features/explore/Explore.tsx
@@ -2,19 +2,24 @@ import React from 'react';
import { hot } from 'react-hot-loader';
import Select from 'react-select';
+import { ExploreState, ExploreUrlState, Query } from 'app/types/explore';
import kbn from 'app/core/utils/kbn';
import colors from 'app/core/utils/colors';
import store from 'app/core/store';
import TimeSeries from 'app/core/time_series2';
-import { decodePathComponent } from 'app/core/utils/location_util';
import { parse as parseDate } from 'app/core/utils/datemath';
+import { DEFAULT_RANGE } from 'app/core/utils/explore';
+import ResetStyles from 'app/core/components/Picker/ResetStyles';
+import PickerOption from 'app/core/components/Picker/PickerOption';
+import IndicatorsContainer from 'app/core/components/Picker/IndicatorsContainer';
+import NoOptionsMessage from 'app/core/components/Picker/NoOptionsMessage';
import ElapsedTime from './ElapsedTime';
import QueryRows from './QueryRows';
import Graph from './Graph';
import Logs from './Logs';
import Table from './Table';
-import TimePicker, { DEFAULT_RANGE } from './TimePicker';
+import TimePicker from './TimePicker';
import { ensureQueries, generateQueryKey, hasQuery } from './utils/query';
const MAX_HISTORY_ITEMS = 100;
@@ -47,101 +52,90 @@ function makeTimeSeriesList(dataList, options) {
});
}
-function parseUrlState(initial: string | undefined) {
- if (initial) {
- try {
- const parsed = JSON.parse(decodePathComponent(initial));
- return {
- datasource: parsed.datasource,
- queries: parsed.queries.map(q => q.query),
- range: parsed.range,
- };
- } catch (e) {
- console.error(e);
- }
- }
- return { datasource: null, queries: [], range: DEFAULT_RANGE };
+interface ExploreProps {
+ datasourceSrv: any;
+ onChangeSplit: (split: boolean, state?: ExploreState) => void;
+ onSaveState: (key: string, state: ExploreState) => void;
+ position: string;
+ split: boolean;
+ splitState?: ExploreState;
+ stateKey: string;
+ urlState: ExploreUrlState;
}
-interface ExploreState {
- datasource: any;
- datasourceError: any;
- datasourceLoading: boolean | null;
- datasourceMissing: boolean;
- graphResult: any;
- history: any[];
- initialDatasource?: string;
- latency: number;
- loading: any;
- logsResult: any;
- queries: any[];
- queryErrors: any[];
- queryHints: any[];
- range: any;
- requestOptions: any;
- showingGraph: boolean;
- showingLogs: boolean;
- showingTable: boolean;
- supportsGraph: boolean | null;
- supportsLogs: boolean | null;
- supportsTable: boolean | null;
- tableResult: any;
-}
-
-export class Explore extends React.Component {
+export class Explore extends React.PureComponent {
el: any;
+ /**
+ * Current query expressions of the rows including their modifications, used for running queries.
+ * Not kept in component state to prevent edit-render roundtrips.
+ */
+ queryExpressions: string[];
constructor(props) {
super(props);
- const initialState: ExploreState = props.initialState;
- const { datasource, queries, range } = parseUrlState(props.routeParams.state);
- this.state = {
- datasource: null,
- datasourceError: null,
- datasourceLoading: null,
- datasourceMissing: false,
- graphResult: null,
- initialDatasource: datasource,
- history: [],
- latency: 0,
- loading: false,
- logsResult: null,
- queries: ensureQueries(queries),
- queryErrors: [],
- queryHints: [],
- range: range || { ...DEFAULT_RANGE },
- requestOptions: null,
- showingGraph: true,
- showingLogs: true,
- showingTable: true,
- supportsGraph: null,
- supportsLogs: null,
- supportsTable: null,
- tableResult: null,
- ...initialState,
- };
+ const splitState: ExploreState = props.splitState;
+ let initialQueries: Query[];
+ if (splitState) {
+ // Split state overrides everything
+ this.state = splitState;
+ initialQueries = splitState.queries;
+ } else {
+ const { datasource, queries, range } = props.urlState as ExploreUrlState;
+ initialQueries = ensureQueries(queries);
+ this.state = {
+ datasource: null,
+ datasourceError: null,
+ datasourceLoading: null,
+ datasourceMissing: false,
+ datasourceName: datasource,
+ exploreDatasources: [],
+ graphResult: null,
+ history: [],
+ latency: 0,
+ loading: false,
+ logsResult: null,
+ queries: initialQueries,
+ queryErrors: [],
+ queryHints: [],
+ range: range || { ...DEFAULT_RANGE },
+ requestOptions: null,
+ showingGraph: true,
+ showingLogs: true,
+ showingTable: true,
+ supportsGraph: null,
+ supportsLogs: null,
+ supportsTable: null,
+ tableResult: null,
+ };
+ }
+ this.queryExpressions = initialQueries.map(q => q.query);
}
async componentDidMount() {
const { datasourceSrv } = this.props;
- const { initialDatasource } = this.state;
+ const { datasourceName } = this.state;
if (!datasourceSrv) {
throw new Error('No datasource service passed as props.');
}
const datasources = datasourceSrv.getExploreSources();
+ const exploreDatasources = datasources.map(ds => ({
+ value: ds.name,
+ label: ds.name,
+ }));
+
if (datasources.length > 0) {
- this.setState({ datasourceLoading: true });
+ this.setState({ datasourceLoading: true, exploreDatasources });
// Priority: datasource in url, default datasource, first explore datasource
let datasource;
- if (initialDatasource) {
- datasource = await datasourceSrv.get(initialDatasource);
+ if (datasourceName) {
+ datasource = await datasourceSrv.get(datasourceName);
} else {
datasource = await datasourceSrv.get();
}
if (!datasource.meta.explore) {
datasource = await datasourceSrv.get(datasources[0].name);
}
- this.setDatasource(datasource);
+ await this.setDatasource(datasource);
} else {
this.setState({ datasourceMissing: true });
}
@@ -174,9 +168,10 @@ export class Explore extends React.Component {
}
// Keep queries but reset edit state
- const nextQueries = this.state.queries.map(q => ({
+ const nextQueries = this.state.queries.map((q, i) => ({
...q,
- edited: false,
+ key: generateQueryKey(i),
+ query: this.queryExpressions[i],
}));
this.setState(
@@ -188,9 +183,14 @@ export class Explore extends React.Component {
supportsLogs,
supportsTable,
datasourceLoading: false,
+ datasourceName: datasource.name,
queries: nextQueries,
},
- () => datasourceError === null && this.onSubmit()
+ () => {
+ if (datasourceError === null) {
+ this.onSubmit();
+ }
+ }
);
}
@@ -200,6 +200,7 @@ export class Explore extends React.Component {
onAddQueryRow = index => {
const { queries } = this.state;
+ this.queryExpressions[index + 1] = '';
const nextQueries = [
...queries.slice(0, index + 1),
{ query: '', key: generateQueryKey() },
@@ -220,34 +221,34 @@ export class Explore extends React.Component {
queryHints: [],
tableResult: null,
});
- const datasource = await this.props.datasourceSrv.get(option.value);
+ const datasourceName = option.value;
+ const datasource = await this.props.datasourceSrv.get(datasourceName);
this.setDatasource(datasource);
};
onChangeQuery = (value: string, index: number, override?: boolean) => {
- const { queries } = this.state;
- let { queryErrors, queryHints } = this.state;
- const prevQuery = queries[index];
- const edited = override ? false : prevQuery.query !== value;
- const nextQuery = {
- ...queries[index],
- edited,
- query: value,
- };
- const nextQueries = [...queries];
- nextQueries[index] = nextQuery;
+ // Keep current value in local cache
+ this.queryExpressions[index] = value;
+
+ // Replace query row on override
if (override) {
- queryErrors = [];
- queryHints = [];
+ const { queries } = this.state;
+ const nextQuery: Query = {
+ key: generateQueryKey(index),
+ query: value,
+ };
+ const nextQueries = [...queries];
+ nextQueries[index] = nextQuery;
+
+ this.setState(
+ {
+ queryErrors: [],
+ queryHints: [],
+ queries: nextQueries,
+ },
+ this.onSubmit
+ );
}
- this.setState(
- {
- queryErrors,
- queryHints,
- queries: nextQueries,
- },
- override ? () => this.onSubmit() : undefined
- );
};
onChangeTime = nextRange => {
@@ -259,15 +260,19 @@ export class Explore extends React.Component {
};
onClickClear = () => {
- this.setState({
- graphResult: null,
- logsResult: null,
- latency: 0,
- queries: ensureQueries(),
- queryErrors: [],
- queryHints: [],
- tableResult: null,
- });
+ this.queryExpressions = [''];
+ this.setState(
+ {
+ graphResult: null,
+ logsResult: null,
+ latency: 0,
+ queries: ensureQueries(),
+ queryErrors: [],
+ queryHints: [],
+ tableResult: null,
+ },
+ this.saveState
+ );
};
onClickCloseSplit = () => {
@@ -287,9 +292,8 @@ export class Explore extends React.Component {
onClickSplit = () => {
const { onChangeSplit } = this.props;
- const state = { ...this.state };
- state.queries = state.queries.map(({ edited, ...rest }) => rest);
if (onChangeSplit) {
+ const state = this.cloneState();
onChangeSplit(true, state);
}
};
@@ -308,23 +312,22 @@ export class Explore extends React.Component {
let nextQueries;
if (index === undefined) {
// Modify all queries
- nextQueries = queries.map(q => ({
- ...q,
- edited: false,
- query: datasource.modifyQuery(q.query, action),
+ nextQueries = queries.map((q, i) => ({
+ key: generateQueryKey(i),
+ query: datasource.modifyQuery(this.queryExpressions[i], action),
}));
} else {
// Modify query only at index
nextQueries = [
...queries.slice(0, index),
{
- ...queries[index],
- edited: false,
- query: datasource.modifyQuery(queries[index].query, action),
+ key: generateQueryKey(index),
+ query: datasource.modifyQuery(this.queryExpressions[index], action),
},
...queries.slice(index + 1),
];
}
+ this.queryExpressions = nextQueries.map(q => q.query);
this.setState({ queries: nextQueries }, () => this.onSubmit());
}
};
@@ -335,6 +338,7 @@ export class Explore extends React.Component {
return;
}
const nextQueries = [...queries.slice(0, index), ...queries.slice(index + 1)];
+ this.queryExpressions = nextQueries.map(q => q.query);
this.setState({ queries: nextQueries }, () => this.onSubmit());
};
@@ -349,9 +353,10 @@ export class Explore extends React.Component {
if (showingLogs && supportsLogs) {
this.runLogsQuery();
}
+ this.saveState();
};
- onQuerySuccess(datasourceId: string, queries: any[]): void {
+ onQuerySuccess(datasourceId: string, queries: string[]): void {
// save queries to history
let { history } = this.state;
const { datasource } = this.state;
@@ -362,8 +367,7 @@ export class Explore extends React.Component {
}
const ts = Date.now();
- queries.forEach(q => {
- const { query } = q;
+ queries.forEach(query => {
history = [{ query, ts }, ...history];
});
@@ -378,16 +382,16 @@ export class Explore extends React.Component {
}
buildQueryOptions(targetOptions: { format: string; hinting?: boolean; instant?: boolean }) {
- const { datasource, queries, range } = this.state;
+ const { datasource, range } = this.state;
const resolution = this.el.offsetWidth;
const absoluteRange = {
from: parseDate(range.from, false),
to: parseDate(range.to, true),
};
const { interval } = kbn.calculateInterval(absoluteRange, resolution, datasource.interval);
- const targets = queries.map(q => ({
+ const targets = this.queryExpressions.map(q => ({
...targetOptions,
- expr: q.query,
+ expr: q,
}));
return {
interval,
@@ -397,7 +401,8 @@ export class Explore extends React.Component {
}
async runGraphQuery() {
- const { datasource, queries } = this.state;
+ const { datasource } = this.state;
+ const queries = [...this.queryExpressions];
if (!hasQuery(queries)) {
return;
}
@@ -419,7 +424,8 @@ export class Explore extends React.Component {
}
async runTableQuery() {
- const { datasource, queries } = this.state;
+ const queries = [...this.queryExpressions];
+ const { datasource } = this.state;
if (!hasQuery(queries)) {
return;
}
@@ -443,7 +449,8 @@ export class Explore extends React.Component {
}
async runLogsQuery() {
- const { datasource, queries } = this.state;
+ const queries = [...this.queryExpressions];
+ const { datasource } = this.state;
if (!hasQuery(queries)) {
return;
}
@@ -471,13 +478,27 @@ export class Explore extends React.Component {
return datasource.metadataRequest(url);
};
+ cloneState(): ExploreState {
+ // Copy state, but copy queries including modifications
+ return {
+ ...this.state,
+ queries: ensureQueries(this.queryExpressions.map(query => ({ query }))),
+ };
+ }
+
+ saveState = () => {
+ const { stateKey, onSaveState } = this.props;
+ onSaveState(stateKey, this.cloneState());
+ };
+
render() {
- const { datasourceSrv, position, split } = this.props;
+ const { position, split } = this.props;
const {
datasource,
datasourceError,
datasourceLoading,
datasourceMissing,
+ exploreDatasources,
graphResult,
history,
latency,
@@ -502,11 +523,7 @@ export class Explore extends React.Component {
const logsButtonActive = showingLogs ? 'active' : '';
const tableButtonActive = showingBoth || showingTable ? 'active' : '';
const exploreClass = split ? 'explore explore-split' : 'explore';
- const datasources = datasourceSrv.getExploreSources().map(ds => ({
- value: ds.name,
- label: ds.name,
- }));
- const selectedDatasource = datasource ? datasource.name : undefined;
+ const selectedDatasource = datasource ? exploreDatasources.find(d => d.label === datasource.name) : undefined;
return (
@@ -528,12 +545,23 @@ export class Explore extends React.Component
{
{!datasourceMissing ? (
'Loading datasources...'}
+ noOptionsMessage={() => 'No datasources found'}
value={selectedDatasource}
+ components={{
+ Option: PickerOption,
+ IndicatorsContainer,
+ NoOptionsMessage,
+ }}
/>
) : null}
@@ -582,36 +610,39 @@ export class Explore extends React.Component {
onClickHintFix={this.onModifyQueries}
onExecuteQuery={this.onSubmit}
onRemoveQueryRow={this.onRemoveQueryRow}
+ supportsLogs={supportsLogs}
/>
{supportsGraph ? (
-
+
Graph
) : null}
{supportsTable ? (
-
+
Table
) : null}
{supportsLogs ? (
-
+
Logs
) : null}
- {supportsGraph && showingGraph ? (
-
- ) : null}
+ {supportsGraph &&
+ showingGraph &&
+ graphResult && (
+
+ )}
{supportsTable && showingTable ? (
) : null}
diff --git a/public/app/features/explore/Graph.test.tsx b/public/app/features/explore/Graph.test.tsx
new file mode 100644
index 00000000000..043b843f0a5
--- /dev/null
+++ b/public/app/features/explore/Graph.test.tsx
@@ -0,0 +1,60 @@
+import React from 'react';
+import { shallow } from 'enzyme';
+import { Graph } from './Graph';
+import { mockData } from './__mocks__/mockData';
+
+const setup = (propOverrides?: object) => {
+ const props = Object.assign(
+ {
+ data: mockData().slice(0, 19),
+ options: {
+ interval: '20s',
+ range: { from: 'now-6h', to: 'now' },
+ targets: [
+ {
+ format: 'time_series',
+ instant: false,
+ hinting: true,
+ expr: 'prometheus_http_request_duration_seconds_bucket',
+ },
+ ],
+ },
+ },
+ propOverrides
+ );
+
+ // Enzyme.shallow did not work well with jquery.flop. Mocking the draw function.
+ Graph.prototype.draw = jest.fn();
+
+ const wrapper = shallow( );
+ const instance = wrapper.instance() as Graph;
+
+ return {
+ wrapper,
+ instance,
+ };
+};
+
+describe('Render', () => {
+ it('should render component', () => {
+ const { wrapper } = setup();
+
+ expect(wrapper).toMatchSnapshot();
+ });
+
+ it('should render component with disclaimer', () => {
+ const { wrapper } = setup({
+ data: mockData(),
+ });
+
+ expect(wrapper).toMatchSnapshot();
+ });
+
+ it('should show query return no time series', () => {
+ const { wrapper } = setup({
+ data: [],
+ });
+
+ expect(wrapper).toMatchSnapshot();
+ });
+});
diff --git a/public/app/containers/Explore/Graph.tsx b/public/app/features/explore/Graph.tsx
similarity index 57%
rename from public/app/containers/Explore/Graph.tsx
rename to public/app/features/explore/Graph.tsx
index 9243f612466..cb5def07fab 100644
--- a/public/app/containers/Explore/Graph.tsx
+++ b/public/app/features/explore/Graph.tsx
@@ -1,6 +1,7 @@
import $ from 'jquery';
-import React, { Component } from 'react';
+import React, { PureComponent } from 'react';
import moment from 'moment';
+import { withSize } from 'react-sizeme';
import 'vendor/flot/jquery.flot';
import 'vendor/flot/jquery.flot.time';
@@ -9,6 +10,8 @@ import TimeSeries from 'app/core/time_series2';
import Legend from './Legend';
+const MAX_NUMBER_OF_TIME_SERIES = 20;
+
// Copied from graph.ts
function time_format(ticks, min, max) {
if (min && max && ticks) {
@@ -66,24 +69,60 @@ const FLOT_OPTIONS = {
// },
};
-class Graph extends Component {
+interface GraphProps {
+ data: any[];
+ height?: string; // e.g., '200px'
+ id?: string;
+ loading?: boolean;
+ options: any;
+ split?: boolean;
+ size?: { width: number; height: number };
+}
+
+interface GraphState {
+ showAllTimeSeries: boolean;
+}
+
+export class Graph extends PureComponent {
+ state = {
+ showAllTimeSeries: false,
+ };
+
+ getGraphData() {
+ const { data } = this.props;
+
+ return this.state.showAllTimeSeries ? data : data.slice(0, MAX_NUMBER_OF_TIME_SERIES);
+ }
+
componentDidMount() {
this.draw();
}
- componentDidUpdate(prevProps) {
+ componentDidUpdate(prevProps: GraphProps) {
if (
prevProps.data !== this.props.data ||
prevProps.options !== this.props.options ||
prevProps.split !== this.props.split ||
- prevProps.height !== this.props.height
+ prevProps.height !== this.props.height ||
+ (prevProps.size && prevProps.size.width !== this.props.size.width)
) {
this.draw();
}
}
+ onShowAllTimeSeries = () => {
+ this.setState(
+ {
+ showAllTimeSeries: true,
+ },
+ this.draw
+ );
+ };
+
draw() {
- const { data, options: userOptions } = this.props;
+ const { options: userOptions, size } = this.props;
+ const data = this.getGraphData();
+
const $el = $(`#${this.props.id}`);
if (!data) {
$el.empty();
@@ -95,7 +134,7 @@ class Graph extends Component {
data: ts.getFlotPairs('null'),
}));
- const ticks = $el.width() / 100;
+ const ticks = (size.width || 0) / 100;
let { from, to } = userOptions.range;
if (!moment.isMoment(from)) {
from = dateMath.parse(from, false);
@@ -124,8 +163,10 @@ class Graph extends Component {
}
render() {
- const { data, height, loading } = this.props;
- if (!loading && data && data.length === 0) {
+ const { height = '100px', id = 'graph', loading = false } = this.props;
+ const data = this.getGraphData();
+
+ if (!loading && data.length === 0) {
return (
The queries returned no time series to graph.
@@ -133,12 +174,24 @@ class Graph extends Component
{
);
}
return (
-
-
-
+
+ {this.props.data.length > MAX_NUMBER_OF_TIME_SERIES &&
+ !this.state.showAllTimeSeries && (
+
+
+ {`Showing only ${MAX_NUMBER_OF_TIME_SERIES} time series. `}
+ {`Show all ${
+ this.props.data.length
+ }`}
+
+ )}
+
);
}
}
-export default Graph;
+export default withSize()(Graph);
diff --git a/public/app/containers/Explore/JSONViewer.tsx b/public/app/features/explore/JSONViewer.tsx
similarity index 100%
rename from public/app/containers/Explore/JSONViewer.tsx
rename to public/app/features/explore/JSONViewer.tsx
diff --git a/public/app/containers/Explore/Legend.tsx b/public/app/features/explore/Legend.tsx
similarity index 85%
rename from public/app/containers/Explore/Legend.tsx
rename to public/app/features/explore/Legend.tsx
index e00932fe566..439b6c3e54f 100644
--- a/public/app/containers/Explore/Legend.tsx
+++ b/public/app/features/explore/Legend.tsx
@@ -5,7 +5,9 @@ const LegendItem = ({ series }) => (
-
{series.alias}
+
+ {series.alias}
+
);
diff --git a/public/app/containers/Explore/Logs.tsx b/public/app/features/explore/Logs.tsx
similarity index 50%
rename from public/app/containers/Explore/Logs.tsx
rename to public/app/features/explore/Logs.tsx
index ae2d5e2daa6..cc8f9be48fd 100644
--- a/public/app/containers/Explore/Logs.tsx
+++ b/public/app/features/explore/Logs.tsx
@@ -1,6 +1,8 @@
import React, { Fragment, PureComponent } from 'react';
+import Highlighter from 'react-highlight-words';
-import { LogsModel, LogRow } from 'app/core/logs_model';
+import { LogsModel } from 'app/core/logs_model';
+import { findHighlightChunksInText } from 'app/core/utils/text';
interface LogsProps {
className?: string;
@@ -10,34 +12,7 @@ interface LogsProps {
const EXAMPLE_QUERY = '{job="default/prometheus"}';
-const Entry: React.SFC = props => {
- const { entry, searchMatches } = props;
- if (searchMatches && searchMatches.length > 0) {
- let lastMatchEnd = 0;
- const spans = searchMatches.reduce((acc, match, i) => {
- // Insert non-match
- if (match.start !== lastMatchEnd) {
- acc.push(<>{entry.slice(lastMatchEnd, match.start)}>);
- }
- // Match
- acc.push(
-
- {entry.substr(match.start, match.length)}
-
- );
- lastMatchEnd = match.start + match.length;
- // Non-matching end
- if (i === searchMatches.length - 1) {
- acc.push(<>{entry.slice(lastMatchEnd)}>);
- }
- return acc;
- }, []);
- return <>{spans}>;
- }
- return <>{props.entry}>;
-};
-
-export default class Logs extends PureComponent {
+export default class Logs extends PureComponent {
render() {
const { className = '', data } = this.props;
const hasData = data && data.rows && data.rows.length > 0;
@@ -50,7 +25,12 @@ export default class Logs extends PureComponent {
{row.timeLocal}
-
+
))}
diff --git a/public/app/containers/Explore/PromQueryField.test.tsx b/public/app/features/explore/PromQueryField.test.tsx
similarity index 100%
rename from public/app/containers/Explore/PromQueryField.test.tsx
rename to public/app/features/explore/PromQueryField.test.tsx
diff --git a/public/app/containers/Explore/PromQueryField.tsx b/public/app/features/explore/PromQueryField.tsx
similarity index 77%
rename from public/app/containers/Explore/PromQueryField.tsx
rename to public/app/features/explore/PromQueryField.tsx
index 0991f08429a..889666c5e35 100644
--- a/public/app/containers/Explore/PromQueryField.tsx
+++ b/public/app/features/explore/PromQueryField.tsx
@@ -3,10 +3,11 @@ import moment from 'moment';
import React from 'react';
import { Value } from 'slate';
import Cascader from 'rc-cascader';
+import PluginPrism from 'slate-prism';
+import Prism from 'prismjs';
// dom also includes Element polyfills
import { getNextCharacter, getPreviousCousin } from './utils/dom';
-import PluginPrism, { setPrismTokens } from './slate-plugins/prism/index';
import PrismPromql, { FUNCTIONS } from './slate-plugins/prism/promql';
import BracesPlugin from './slate-plugins/braces';
import RunnerPlugin from './slate-plugins/runner';
@@ -27,7 +28,7 @@ const HISTOGRAM_SELECTOR = '{le!=""}'; // Returns all timeseries for histograms
const HISTORY_ITEM_COUNT = 5;
const HISTORY_COUNT_CUTOFF = 1000 * 60 * 60 * 24; // 24h
const METRIC_MARK = 'metric';
-const PRISM_LANGUAGE = 'promql';
+const PRISM_SYNTAX = 'promql';
export const RECORDING_RULES_GROUP = '__recording_rules__';
export const wrapLabel = (label: string) => ({ label });
@@ -36,6 +37,15 @@ export const setFunctionMove = (suggestion: Suggestion): Suggestion => {
return suggestion;
};
+// Syntax highlighting
+Prism.languages[PRISM_SYNTAX] = PrismPromql;
+function setPrismTokens(language, field, values, alias = 'variable') {
+ Prism.languages[language][field] = {
+ alias,
+ pattern: new RegExp(`(?:^|\\s)(${values.join('|')})(?:$|\\s)`),
+ };
+}
+
export function addHistoryMetadata(item: Suggestion, history: any[]): Suggestion {
const cutoffTs = Date.now() - HISTORY_COUNT_CUTOFF;
const historyForItem = history.filter(h => h.ts > cutoffTs && h.query === item.label);
@@ -135,16 +145,20 @@ interface PromQueryFieldProps {
onClickHintFix?: (action: any) => void;
onPressEnter?: () => void;
onQueryChange?: (value: string, override?: boolean) => void;
- portalPrefix?: string;
+ portalOrigin?: string;
request?: (url: string) => any;
+ supportsLogs?: boolean; // To be removed after Logging gets its own query field
}
interface PromQueryFieldState {
histogramMetrics: string[];
labelKeys: { [index: string]: string[] }; // metric -> [labelKey,...]
labelValues: { [index: string]: { [index: string]: string[] } }; // metric -> labelKey -> [labelValue,...]
+ logLabelOptions: any[];
metrics: string[];
+ metricsOptions: any[];
metricsByPrefix: CascaderOption[];
+ syntaxLoaded: boolean;
}
interface PromTypeaheadInput {
@@ -155,7 +169,7 @@ interface PromTypeaheadInput {
value?: Value;
}
-class PromQueryField extends React.Component {
+class PromQueryField extends React.PureComponent {
plugins: any[];
constructor(props: PromQueryFieldProps, context) {
@@ -164,23 +178,53 @@ class PromQueryField extends React.Component node.type === 'code_block',
+ getSyntax: node => 'promql',
+ }),
];
this.state = {
histogramMetrics: props.histogramMetrics || [],
labelKeys: props.labelKeys || {},
labelValues: props.labelValues || {},
+ logLabelOptions: [],
metrics: props.metrics || [],
metricsByPrefix: props.metricsByPrefix || [],
+ metricsOptions: [],
+ syntaxLoaded: false,
};
}
componentDidMount() {
- this.fetchMetricNames();
- this.fetchHistogramMetrics();
+ // Temporarily reused by logging
+ const { supportsLogs } = this.props;
+ if (supportsLogs) {
+ this.fetchLogLabels();
+ } else {
+ // Usual actions
+ this.fetchMetricNames();
+ this.fetchHistogramMetrics();
+ }
}
+ onChangeLogLabels = (values: string[], selectedOptions: CascaderOption[]) => {
+ let query;
+ if (selectedOptions.length === 1) {
+ if (selectedOptions[0].children.length === 0) {
+ query = selectedOptions[0].value;
+ } else {
+ // Ignore click on group
+ return;
+ }
+ } else {
+ const key = selectedOptions[0].value;
+ const value = selectedOptions[1].value;
+ query = `{${key}="${value}"}`;
+ }
+ this.onChangeQuery(query, true);
+ };
+
onChangeMetrics = (values: string[], selectedOptions: CascaderOption[]) => {
let query;
if (selectedOptions.length === 1) {
@@ -218,10 +262,22 @@ class PromQueryField extends React.Component {
- if (!this.state.metrics) {
+ const { histogramMetrics, metrics, metricsByPrefix } = this.state;
+ if (!metrics) {
return;
}
- setPrismTokens(PRISM_LANGUAGE, METRIC_MARK, this.state.metrics);
+
+ // Update global prism config
+ setPrismTokens(PRISM_SYNTAX, METRIC_MARK, metrics);
+
+ // Build metrics tree
+ const histogramOptions = histogramMetrics.map(hm => ({ label: hm, value: hm }));
+ const metricsOptions = [
+ { label: 'Histograms', value: HISTOGRAM_GROUP, children: histogramOptions },
+ ...metricsByPrefix,
+ ];
+
+ this.setState({ metricsOptions, syntaxLoaded: true });
};
onTypeahead = (typeahead: TypeaheadInput): TypeaheadOutput => {
@@ -242,6 +298,8 @@ class PromQueryField extends React.Component 3;
// Determine candidates by CSS context
if (_.includes(wrapperClasses, 'context-range')) {
// Suggestions for metric[|]
@@ -252,10 +310,10 @@ class PromQueryField extends React.Component this.fetchLabelValues(key)));
@@ -410,11 +469,43 @@ class PromQueryField extends React.Component ({ label: value, value })),
+ });
+ }
+ const labelValues = { [EMPTY_SELECTOR]: labelValuesByKey };
+ this.setState({ labelKeys: labelKeysBySelector, labelValues, logLabelOptions });
+ } catch (e) {
+ console.error(e);
+ }
+ }
+
async fetchLabelValues(key: string) {
const url = `/api/v1/label/${key}/values`;
try {
@@ -469,31 +560,34 @@ class PromQueryField extends React.Component ({ label: hm, value: hm }));
- const metricsOptions = [
- { label: 'Histograms', value: HISTOGRAM_GROUP, children: histogramOptions },
- ...metricsByPrefix,
- ];
+ const { error, hint, initialQuery, supportsLogs } = this.props;
+ const { logLabelOptions, metricsOptions, syntaxLoaded } = this.state;
return (
-
- Metrics
-
+ {supportsLogs ? (
+
+ Log labels
+
+ ) : (
+
+ Metrics
+
+ )}
{error ?
{error}
: null}
diff --git a/public/app/containers/Explore/QueryField.tsx b/public/app/features/explore/QueryField.tsx
similarity index 81%
rename from public/app/containers/Explore/QueryField.tsx
rename to public/app/features/explore/QueryField.tsx
index 52bfbc7fed4..c89893b4f28 100644
--- a/public/app/containers/Explore/QueryField.tsx
+++ b/public/app/features/explore/QueryField.tsx
@@ -1,7 +1,7 @@
import _ from 'lodash';
import React from 'react';
import ReactDOM from 'react-dom';
-import { Block, Change, Document, Text, Value } from 'slate';
+import { Change, Value } from 'slate';
import { Editor } from 'slate-react';
import Plain from 'slate-plain-serializer';
@@ -9,28 +9,20 @@ import ClearPlugin from './slate-plugins/clear';
import NewlinePlugin from './slate-plugins/newline';
import Typeahead from './Typeahead';
+import { makeFragment, makeValue } from './Value';
-export const TYPEAHEAD_DEBOUNCE = 300;
+export const TYPEAHEAD_DEBOUNCE = 100;
-function flattenSuggestions(s: any[]): any[] {
- return s ? s.reduce((acc, g) => acc.concat(g.items), []) : [];
+function getSuggestionByIndex(suggestions: SuggestionGroup[], index: number): Suggestion {
+ // Flatten suggestion groups
+ const flattenedSuggestions = suggestions.reduce((acc, g) => acc.concat(g.items), []);
+ const correctedIndex = Math.max(index, 0) % flattenedSuggestions.length;
+ return flattenedSuggestions[correctedIndex];
}
-export const makeFragment = (text: string): Document => {
- const lines = text.split('\n').map(line =>
- Block.create({
- type: 'paragraph',
- nodes: [Text.create(line)],
- })
- );
-
- const fragment = Document.create({
- nodes: lines,
- });
- return fragment;
-};
-
-export const getInitialValue = (value: string): Value => Value.create({ document: makeFragment(value) });
+function hasSuggestions(suggestions: SuggestionGroup[]): boolean {
+ return suggestions && suggestions.length > 0;
+}
export interface Suggestion {
/**
@@ -112,7 +104,9 @@ interface TypeaheadFieldProps {
onValueChanged?: (value: Value) => void;
onWillApplySuggestion?: (suggestion: string, state: TypeaheadFieldState) => string;
placeholder?: string;
- portalPrefix?: string;
+ portalOrigin?: string;
+ syntax?: string;
+ syntaxLoaded?: boolean;
}
export interface TypeaheadFieldState {
@@ -139,7 +133,7 @@ export interface TypeaheadOutput {
suggestions: SuggestionGroup[];
}
-class QueryField extends React.Component
{
+class QueryField extends React.PureComponent {
menuEl: HTMLElement | null;
plugins: any[];
resetTimer: any;
@@ -156,7 +150,7 @@ class QueryField extends React.Component {
- const changed = value.document !== this.state.value.document;
+ const textChanged = value.document !== this.state.value.document;
+
+ // Control editor loop, then pass text change up to parent
this.setState({ value }, () => {
- if (changed) {
+ if (textChanged) {
this.handleChangeValue();
}
});
- if (changed) {
+ // Show suggest menu on text input
+ if (textChanged && value.selection.isCollapsed) {
+ // Need one paint to allow DOM-based typeahead rules to work
window.requestAnimationFrame(this.handleTypeahead);
+ } else {
+ this.resetTypeahead();
}
};
@@ -230,7 +241,7 @@ class QueryField extends React.Component {
if (group.items) {
if (prefix) {
@@ -255,6 +266,11 @@ class QueryField extends React.Component group.items && group.items.length > 0); // Filter out empty groups
+ // Keep same object for equality checking later
+ if (_.isEqual(filteredSuggestions, this.state.suggestions)) {
+ filteredSuggestions = this.state.suggestions;
+ }
+
this.setState(
{
suggestions: filteredSuggestions,
@@ -272,7 +288,7 @@ class QueryField extends React.Component 0;
- if (!hasSuggesstions) {
+ if (!hasSuggestions(suggestions)) {
menu.removeAttribute('style');
return;
}
@@ -449,27 +459,22 @@ class QueryField extends React.Component {
- const { portalPrefix } = this.props;
- const { suggestions } = this.state;
- const hasSuggesstions = suggestions && suggestions.length > 0;
- if (!hasSuggesstions) {
+ const { portalOrigin } = this.props;
+ const { suggestions, typeaheadIndex, typeaheadPrefix } = this.state;
+ if (!hasSuggestions(suggestions)) {
return null;
}
- // Guard selectedIndex to be within the length of the suggestions
- let selectedIndex = Math.max(this.state.typeaheadIndex, 0);
- const flattenedSuggestions = flattenSuggestions(suggestions);
- selectedIndex = selectedIndex % flattenedSuggestions.length || 0;
- const selectedItem: Suggestion | null =
- flattenedSuggestions.length > 0 ? flattenedSuggestions[selectedIndex] : null;
+ const selectedItem = getSuggestionByIndex(suggestions, typeaheadIndex);
// Create typeahead in DOM root so we can later position it absolutely
return (
-
+
@@ -496,14 +501,14 @@ class QueryField extends React.Component {
+class Portal extends React.PureComponent<{ index?: number; origin: string }, {}> {
node: HTMLElement;
constructor(props) {
super(props);
- const { index = 0, prefix = 'query' } = props;
+ const { index = 0, origin = 'query' } = props;
this.node = document.createElement('div');
- this.node.classList.add(`slate-typeahead`, `slate-typeahead-${prefix}-${index}`);
+ this.node.classList.add(`slate-typeahead`, `slate-typeahead-${origin}-${index}`);
document.body.appendChild(this.node);
}
diff --git a/public/app/containers/Explore/QueryRows.tsx b/public/app/features/explore/QueryRows.tsx
similarity index 89%
rename from public/app/containers/Explore/QueryRows.tsx
rename to public/app/features/explore/QueryRows.tsx
index a7d91d59033..3d71c2f3566 100644
--- a/public/app/containers/Explore/QueryRows.tsx
+++ b/public/app/features/explore/QueryRows.tsx
@@ -44,20 +44,20 @@ class QueryRow extends PureComponent {
};
render() {
- const { edited, history, query, queryError, queryHint, request } = this.props;
+ const { history, query, queryError, queryHint, request, supportsLogs } = this.props;
return (
@@ -78,7 +78,7 @@ class QueryRow extends PureComponent
{
export default class QueryRows extends PureComponent {
render() {
- const { className = '', queries, queryErrors = [], queryHints = [], ...handlers } = this.props;
+ const { className = '', queries, queryErrors, queryHints, ...handlers } = this.props;
return (
{queries.map((q, index) => (
@@ -88,7 +88,6 @@ export default class QueryRows extends PureComponent
{
query={q.query}
queryError={queryErrors[index]}
queryHint={queryHints[index]}
- edited={q.edited}
{...handlers}
/>
))}
diff --git a/public/app/containers/Explore/Table.tsx b/public/app/features/explore/Table.tsx
similarity index 100%
rename from public/app/containers/Explore/Table.tsx
rename to public/app/features/explore/Table.tsx
diff --git a/public/app/containers/Explore/TimePicker.test.tsx b/public/app/features/explore/TimePicker.test.tsx
similarity index 100%
rename from public/app/containers/Explore/TimePicker.test.tsx
rename to public/app/features/explore/TimePicker.test.tsx
diff --git a/public/app/containers/Explore/TimePicker.tsx b/public/app/features/explore/TimePicker.tsx
similarity index 99%
rename from public/app/containers/Explore/TimePicker.tsx
rename to public/app/features/explore/TimePicker.tsx
index 08867f8d0fc..f9c740073d0 100644
--- a/public/app/containers/Explore/TimePicker.tsx
+++ b/public/app/features/explore/TimePicker.tsx
@@ -5,7 +5,6 @@ import * as dateMath from 'app/core/utils/datemath';
import * as rangeUtil from 'app/core/utils/rangeutil';
const DATE_FORMAT = 'YYYY-MM-DD HH:mm:ss';
-
export const DEFAULT_RANGE = {
from: 'now-6h',
to: 'now',
diff --git a/public/app/containers/Explore/Typeahead.tsx b/public/app/features/explore/Typeahead.tsx
similarity index 72%
rename from public/app/containers/Explore/Typeahead.tsx
rename to public/app/features/explore/Typeahead.tsx
index 9924488035c..0c01cbe01ba 100644
--- a/public/app/containers/Explore/Typeahead.tsx
+++ b/public/app/features/explore/Typeahead.tsx
@@ -1,4 +1,5 @@
import React from 'react';
+import Highlighter from 'react-highlight-words';
import { Suggestion, SuggestionGroup } from './QueryField';
@@ -16,6 +17,7 @@ interface TypeaheadItemProps {
isSelected: boolean;
item: Suggestion;
onClickItem: (Suggestion) => void;
+ prefix?: string;
}
class TypeaheadItem extends React.PureComponent {
@@ -23,7 +25,9 @@ class TypeaheadItem extends React.PureComponent {
componentDidUpdate(prevProps) {
if (this.props.isSelected && !prevProps.isSelected) {
- scrollIntoView(this.el);
+ requestAnimationFrame(() => {
+ scrollIntoView(this.el);
+ });
}
}
@@ -36,11 +40,12 @@ class TypeaheadItem extends React.PureComponent {
};
render() {
- const { isSelected, item } = this.props;
+ const { isSelected, item, prefix } = this.props;
const className = isSelected ? 'typeahead-item typeahead-item__selected' : 'typeahead-item';
+ const { label } = item;
return (
- {item.detail || item.label}
+
{item.documentation && isSelected ? {item.documentation}
: null}
);
@@ -52,18 +57,25 @@ interface TypeaheadGroupProps {
label: string;
onClickItem: (Suggestion) => void;
selected: Suggestion;
+ prefix?: string;
}
class TypeaheadGroup extends React.PureComponent {
render() {
- const { items, label, selected, onClickItem } = this.props;
+ const { items, label, selected, onClickItem, prefix } = this.props;
return (
{label}
{items.map(item => {
return (
-
+
);
})}
@@ -77,14 +89,15 @@ interface TypeaheadProps {
menuRef: any;
selectedItem: Suggestion | null;
onClickItem: (Suggestion) => void;
+ prefix?: string;
}
class Typeahead extends React.PureComponent {
render() {
- const { groupedItems, menuRef, selectedItem, onClickItem } = this.props;
+ const { groupedItems, menuRef, selectedItem, onClickItem, prefix } = this.props;
return (
{groupedItems.map(g => (
-
+
))}
);
diff --git a/public/app/features/explore/Value.ts b/public/app/features/explore/Value.ts
new file mode 100644
index 00000000000..48ee0060a2d
--- /dev/null
+++ b/public/app/features/explore/Value.ts
@@ -0,0 +1,41 @@
+import { Block, Document, Text, Value } from 'slate';
+
+const SCHEMA = {
+ blocks: {
+ paragraph: 'paragraph',
+ codeblock: 'code_block',
+ codeline: 'code_line',
+ },
+ inlines: {},
+ marks: {},
+};
+
+export const makeFragment = (text: string, syntax?: string) => {
+ const lines = text.split('\n').map(line =>
+ Block.create({
+ type: 'code_line',
+ nodes: [Text.create(line)],
+ })
+ );
+
+ const block = Block.create({
+ data: {
+ syntax,
+ },
+ type: 'code_block',
+ nodes: lines,
+ });
+
+ return Document.create({
+ nodes: [block],
+ });
+};
+
+export const makeValue = (text: string, syntax?: string) => {
+ const fragment = makeFragment(text, syntax);
+
+ return Value.create({
+ document: fragment,
+ SCHEMA,
+ });
+};
diff --git a/public/app/features/explore/Wrapper.tsx b/public/app/features/explore/Wrapper.tsx
new file mode 100644
index 00000000000..7e07aafbf6d
--- /dev/null
+++ b/public/app/features/explore/Wrapper.tsx
@@ -0,0 +1,100 @@
+import React, { Component } from 'react';
+import { hot } from 'react-hot-loader';
+import { connect } from 'react-redux';
+
+import { updateLocation } from 'app/core/actions';
+import { serializeStateToUrlParam, parseUrlState } from 'app/core/utils/explore';
+import { StoreState } from 'app/types';
+import { ExploreState } from 'app/types/explore';
+
+import Explore from './Explore';
+
+interface WrapperProps {
+ backendSrv?: any;
+ datasourceSrv?: any;
+ updateLocation: typeof updateLocation;
+ urlStates: { [key: string]: string };
+}
+
+interface WrapperState {
+ split: boolean;
+ splitState: ExploreState;
+}
+
+const STATE_KEY_LEFT = 'state';
+const STATE_KEY_RIGHT = 'stateRight';
+
+export class Wrapper extends Component {
+ urlStates: { [key: string]: string };
+
+ constructor(props: WrapperProps) {
+ super(props);
+ this.urlStates = props.urlStates;
+ this.state = {
+ split: Boolean(props.urlStates[STATE_KEY_RIGHT]),
+ splitState: undefined,
+ };
+ }
+
+ onChangeSplit = (split: boolean, splitState: ExploreState) => {
+ this.setState({ split, splitState });
+ // When closing split, remove URL state for split part
+ if (!split) {
+ delete this.urlStates[STATE_KEY_RIGHT];
+ this.props.updateLocation({
+ query: this.urlStates,
+ });
+ }
+ };
+
+ onSaveState = (key: string, state: ExploreState) => {
+ const urlState = serializeStateToUrlParam(state, true);
+ this.urlStates[key] = urlState;
+ this.props.updateLocation({
+ query: this.urlStates,
+ });
+ };
+
+ render() {
+ const { datasourceSrv } = this.props;
+ // State overrides for props from first Explore
+ const { split, splitState } = this.state;
+ const urlStateLeft = parseUrlState(this.urlStates[STATE_KEY_LEFT]);
+ const urlStateRight = parseUrlState(this.urlStates[STATE_KEY_RIGHT]);
+ return (
+
+
+ {split && (
+
+ )}
+
+ );
+ }
+}
+
+const mapStateToProps = (state: StoreState) => ({
+ urlStates: state.location.query,
+});
+
+const mapDispatchToProps = {
+ updateLocation,
+};
+
+export default hot(module)(connect(mapStateToProps, mapDispatchToProps)(Wrapper));
diff --git a/public/app/features/explore/__mocks__/mockData.ts b/public/app/features/explore/__mocks__/mockData.ts
new file mode 100644
index 00000000000..2e89ded29cf
--- /dev/null
+++ b/public/app/features/explore/__mocks__/mockData.ts
@@ -0,0 +1,274 @@
+export const mockData = () => {
+ return [
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/label/:name/values',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '+Inf',
+ },
+ values: [[1537858100, '16'], [1537861960, '1'], [1537861980, '1']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/label/:name/values',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '0.1',
+ },
+ values: [[1537858100, '16'], [1537861960, '1'], [1537861980, '1']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/label/:name/values',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '0.2',
+ },
+ values: [[1537858100, '16'], [1537861960, '1'], [1537861980, '1']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/label/:name/values',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '0.4',
+ },
+ values: [[1537858100, '16'], [1537861960, '1'], [1537861980, '1']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/label/:name/values',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '1',
+ },
+ values: [[1537858100, '16'], [1537861960, '1'], [1537861980, '1']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/label/:name/values',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '120',
+ },
+ values: [[1537858100, '16'], [1537861960, '1'], [1537861980, '1']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/label/:name/values',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '20',
+ },
+ values: [[1537858100, '16'], [1537861960, '1'], [1537861980, '1']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/label/:name/values',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '3',
+ },
+ values: [[1537858100, '16'], [1537861960, '1'], [1537861980, '1']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/label/:name/values',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '60',
+ },
+ values: [[1537858100, '16'], [1537861960, '1'], [1537861980, '1']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/label/:name/values',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '8',
+ },
+ values: [[1537858100, '16'], [1537861960, '1'], [1537861980, '1']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/metrics',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '+Inf',
+ },
+ values: [[1537858060, '1195'], [1537858080, '1195'], [1537858100, '1195']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/metrics',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '0.1',
+ },
+ values: [[1537858060, '1195'], [1537858080, '1195'], [1537858100, '1195']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/metrics',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '0.4',
+ },
+ values: [[1537858060, '1195'], [1537858080, '1195'], [1537858100, '1195']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/metrics',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '1',
+ },
+ values: [[1537847900, '953'], [1537858080, '1195'], [1537858100, '1195']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/metrics',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '120',
+ },
+ values: [[1537858060, '1195'], [1537858080, '1195'], [1537858100, '1195']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/metrics',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '20',
+ },
+ values: [[1537858060, '1195'], [1537858080, '1195'], [1537858100, '1195']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/metrics',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '3',
+ },
+ values: [[1537858060, '1195'], [1537858080, '1195'], [1537858100, '1195']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/metrics',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '60',
+ },
+ values: [[1537858060, '1195'], [1537858080, '1195'], [1537858100, '1195']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/metrics',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '8',
+ },
+ values: [[1537858060, '1195'], [1537858080, '1195'], [1537858100, '1195']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/query',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '+Inf',
+ },
+ values: [[1537858100, '55'], [1537861960, '1'], [1537861980, '1']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/query',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '0.1',
+ },
+ values: [[1537858100, '55'], [1537861960, '1'], [1537861980, '1']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/query',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '0.2',
+ },
+ values: [[1537858100, '55'], [1537861960, '1'], [1537861980, '1']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/query',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '0.4',
+ },
+ values: [[1537858100, '55'], [1537861960, '1'], [1537861980, '1']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/query',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '1',
+ },
+ values: [[1537858100, '55'], [1537861960, '1'], [1537861980, '1']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/query',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '120',
+ },
+ values: [[1537858100, '55'], [1537861960, '1'], [1537861980, '1']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/query',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '20',
+ },
+ values: [[1537858100, '55'], [1537861960, '1'], [1537861980, '1']],
+ },
+ {
+ metric: {
+ __name__: 'prometheus_http_request_duration_seconds_bucket',
+ handler: '/query',
+ instance: 'localhost:9090',
+ job: 'prometheus',
+ le: '3',
+ },
+ values: [[1537857260, '55'], [1537861960, '1'], [1537861980, '1']],
+ },
+ ];
+};
diff --git a/public/app/features/explore/__snapshots__/Graph.test.tsx.snap b/public/app/features/explore/__snapshots__/Graph.test.tsx.snap
new file mode 100644
index 00000000000..d6760dff59c
--- /dev/null
+++ b/public/app/features/explore/__snapshots__/Graph.test.tsx.snap
@@ -0,0 +1,972 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Render should render component 1`] = `
+
+`;
+
+exports[`Render should render component with disclaimer 1`] = `
+
+
+
+ Showing only 20 time series.
+
+ Show all 27
+
+
+
+
+`;
+
+exports[`Render should show query return no time series 1`] = `
+
+
+ The queries returned no time series to graph.
+
+
+`;
diff --git a/public/app/containers/Explore/slate-plugins/braces.test.ts b/public/app/features/explore/slate-plugins/braces.test.ts
similarity index 73%
rename from public/app/containers/Explore/slate-plugins/braces.test.ts
rename to public/app/features/explore/slate-plugins/braces.test.ts
index dda805c07f7..410334d020f 100644
--- a/public/app/containers/Explore/slate-plugins/braces.test.ts
+++ b/public/app/features/explore/slate-plugins/braces.test.ts
@@ -53,4 +53,22 @@ describe('braces', () => {
handler(event, change);
expect(Plain.serialize(change.value)).toEqual('sum(rate(metric{namespace="dev", cluster="c1"}[2m]))');
});
+
+ it('removes closing brace when opening brace is removed', () => {
+ const change = Plain.deserialize('time()').change();
+ let event;
+ change.move(5);
+ event = new window.KeyboardEvent('keydown', { key: 'Backspace' });
+ handler(event, change);
+ expect(Plain.serialize(change.value)).toEqual('time');
+ });
+
+ it('keeps closing brace when opening brace is removed and inner values exist', () => {
+ const change = Plain.deserialize('time(value)').change();
+ let event;
+ change.move(5);
+ event = new window.KeyboardEvent('keydown', { key: 'Backspace' });
+ const handled = handler(event, change);
+ expect(handled).toBeFalsy();
+ });
});
diff --git a/public/app/containers/Explore/slate-plugins/braces.ts b/public/app/features/explore/slate-plugins/braces.ts
similarity index 70%
rename from public/app/containers/Explore/slate-plugins/braces.ts
rename to public/app/features/explore/slate-plugins/braces.ts
index 2ea58569ef0..f3a76263ad6 100644
--- a/public/app/containers/Explore/slate-plugins/braces.ts
+++ b/public/app/features/explore/slate-plugins/braces.ts
@@ -43,6 +43,22 @@ export default function BracesPlugin() {
return true;
}
+ case 'Backspace': {
+ const text = value.anchorText.text;
+ const offset = value.anchorOffset;
+ const previousChar = text[offset - 1];
+ const nextChar = text[offset];
+ if (BRACES[previousChar] && BRACES[previousChar] === nextChar) {
+ event.preventDefault();
+ // Remove closing brace if directly following
+ change
+ .deleteBackward()
+ .deleteForward()
+ .focus();
+ return true;
+ }
+ }
+
default: {
break;
}
diff --git a/public/app/containers/Explore/slate-plugins/clear.test.ts b/public/app/features/explore/slate-plugins/clear.test.ts
similarity index 100%
rename from public/app/containers/Explore/slate-plugins/clear.test.ts
rename to public/app/features/explore/slate-plugins/clear.test.ts
diff --git a/public/app/containers/Explore/slate-plugins/clear.ts b/public/app/features/explore/slate-plugins/clear.ts
similarity index 100%
rename from public/app/containers/Explore/slate-plugins/clear.ts
rename to public/app/features/explore/slate-plugins/clear.ts
diff --git a/public/app/containers/Explore/slate-plugins/newline.ts b/public/app/features/explore/slate-plugins/newline.ts
similarity index 100%
rename from public/app/containers/Explore/slate-plugins/newline.ts
rename to public/app/features/explore/slate-plugins/newline.ts
diff --git a/public/app/containers/Explore/slate-plugins/prism/promql.ts b/public/app/features/explore/slate-plugins/prism/promql.ts
similarity index 100%
rename from public/app/containers/Explore/slate-plugins/prism/promql.ts
rename to public/app/features/explore/slate-plugins/prism/promql.ts
diff --git a/public/app/containers/Explore/slate-plugins/runner.ts b/public/app/features/explore/slate-plugins/runner.ts
similarity index 100%
rename from public/app/containers/Explore/slate-plugins/runner.ts
rename to public/app/features/explore/slate-plugins/runner.ts
diff --git a/public/app/containers/Explore/utils/debounce.ts b/public/app/features/explore/utils/debounce.ts
similarity index 100%
rename from public/app/containers/Explore/utils/debounce.ts
rename to public/app/features/explore/utils/debounce.ts
diff --git a/public/app/containers/Explore/utils/dom.ts b/public/app/features/explore/utils/dom.ts
similarity index 100%
rename from public/app/containers/Explore/utils/dom.ts
rename to public/app/features/explore/utils/dom.ts
diff --git a/public/app/containers/Explore/utils/prometheus.test.ts b/public/app/features/explore/utils/prometheus.test.ts
similarity index 94%
rename from public/app/containers/Explore/utils/prometheus.test.ts
rename to public/app/features/explore/utils/prometheus.test.ts
index d12d28c6bc9..4e84deaa7e8 100644
--- a/public/app/containers/Explore/utils/prometheus.test.ts
+++ b/public/app/features/explore/utils/prometheus.test.ts
@@ -57,5 +57,8 @@ describe('parseSelector()', () => {
parsed = parseSelector('baz{foo="bar"}', 12);
expect(parsed.selector).toBe('{__name__="baz",foo="bar"}');
+
+ parsed = parseSelector('bar:metric:1m{}', 14);
+ expect(parsed.selector).toBe('{__name__="bar:metric:1m"}');
});
});
diff --git a/public/app/containers/Explore/utils/prometheus.ts b/public/app/features/explore/utils/prometheus.ts
similarity index 96%
rename from public/app/containers/Explore/utils/prometheus.ts
rename to public/app/features/explore/utils/prometheus.ts
index 19129976282..8c41b94d684 100644
--- a/public/app/containers/Explore/utils/prometheus.ts
+++ b/public/app/features/explore/utils/prometheus.ts
@@ -32,7 +32,7 @@ const labelRegexp = /\b\w+="[^"\n]*?"/g;
export function parseSelector(query: string, cursorOffset = 1): { labelKeys: any[]; selector: string } {
if (!query.match(selectorRegexp)) {
// Special matcher for metrics
- if (query.match(/^\w+$/)) {
+ if (query.match(/^[A-Za-z:][\w:]*$/)) {
return {
selector: `{__name__="${query}"}`,
labelKeys: ['__name__'],
@@ -76,7 +76,7 @@ export function parseSelector(query: string, cursorOffset = 1): { labelKeys: any
// Add metric if there is one before the selector
const metricPrefix = query.slice(0, prefixOpen);
- const metricMatch = metricPrefix.match(/\w+$/);
+ const metricMatch = metricPrefix.match(/[A-Za-z:][\w:]*$/);
if (metricMatch) {
labels['__name__'] = `"${metricMatch[0]}"`;
}
diff --git a/public/app/features/explore/utils/query.ts b/public/app/features/explore/utils/query.ts
new file mode 100644
index 00000000000..193ee2dbc52
--- /dev/null
+++ b/public/app/features/explore/utils/query.ts
@@ -0,0 +1,16 @@
+import { Query } from 'app/types/explore';
+
+export function generateQueryKey(index = 0): string {
+ return `Q-${Date.now()}-${Math.random()}-${index}`;
+}
+
+export function ensureQueries(queries?: Query[]): Query[] {
+ if (queries && typeof queries === 'object' && queries.length > 0 && typeof queries[0].query === 'string') {
+ return queries.map(({ query }, i) => ({ key: generateQueryKey(i), query }));
+ }
+ return [{ key: generateQueryKey(), query: '' }];
+}
+
+export function hasQuery(queries: string[]): boolean {
+ return queries.some(q => Boolean(q));
+}
diff --git a/public/app/features/folders/FolderPermissions.tsx b/public/app/features/folders/FolderPermissions.tsx
new file mode 100644
index 00000000000..176e270038b
--- /dev/null
+++ b/public/app/features/folders/FolderPermissions.tsx
@@ -0,0 +1,128 @@
+import React, { PureComponent } from 'react';
+import { hot } from 'react-hot-loader';
+import { connect } from 'react-redux';
+import PageHeader from 'app/core/components/PageHeader/PageHeader';
+import Tooltip from 'app/core/components/Tooltip/Tooltip';
+import SlideDown from 'app/core/components/Animations/SlideDown';
+import { getNavModel } from 'app/core/selectors/navModel';
+import { NavModel, StoreState, FolderState } from 'app/types';
+import { DashboardAcl, PermissionLevel, NewDashboardAclItem } from 'app/types/acl';
+import {
+ getFolderByUid,
+ getFolderPermissions,
+ updateFolderPermission,
+ removeFolderPermission,
+ addFolderPermission,
+} from './state/actions';
+import { getLoadingNav } from './state/navModel';
+import PermissionList from 'app/core/components/PermissionList/PermissionList';
+import AddPermission from 'app/core/components/PermissionList/AddPermission';
+import PermissionsInfo from 'app/core/components/PermissionList/PermissionsInfo';
+
+export interface Props {
+ navModel: NavModel;
+ folderUid: string;
+ folder: FolderState;
+ getFolderByUid: typeof getFolderByUid;
+ getFolderPermissions: typeof getFolderPermissions;
+ updateFolderPermission: typeof updateFolderPermission;
+ removeFolderPermission: typeof removeFolderPermission;
+ addFolderPermission: typeof addFolderPermission;
+}
+
+export interface State {
+ isAdding: boolean;
+}
+
+export class FolderPermissions extends PureComponent {
+ constructor(props) {
+ super(props);
+
+ this.state = {
+ isAdding: false,
+ };
+ }
+
+ componentDidMount() {
+ this.props.getFolderByUid(this.props.folderUid);
+ this.props.getFolderPermissions(this.props.folderUid);
+ }
+
+ onOpenAddPermissions = () => {
+ this.setState({ isAdding: true });
+ };
+
+ onRemoveItem = (item: DashboardAcl) => {
+ this.props.removeFolderPermission(item);
+ };
+
+ onPermissionChanged = (item: DashboardAcl, level: PermissionLevel) => {
+ this.props.updateFolderPermission(item, level);
+ };
+
+ onAddPermission = (newItem: NewDashboardAclItem) => {
+ return this.props.addFolderPermission(newItem);
+ };
+
+ onCancelAddPermission = () => {
+ this.setState({ isAdding: false });
+ };
+
+ render() {
+ const { navModel, folder } = this.props;
+ const { isAdding } = this.state;
+
+ if (folder.id === 0) {
+ return ;
+ }
+
+ const folderInfo = { title: folder.title, url: folder.url, id: folder.id };
+
+ return (
+
+
+
+
+
Folder Permissions
+
+
+
+
+
+ Add Permission
+
+
+
+
+
+
+
+
+ );
+ }
+}
+
+const mapStateToProps = (state: StoreState) => {
+ const uid = state.location.routeParams.uid;
+ return {
+ navModel: getNavModel(state.navIndex, `folder-permissions-${uid}`, getLoadingNav(1)),
+ folderUid: uid,
+ folder: state.folder,
+ };
+};
+
+const mapDispatchToProps = {
+ getFolderByUid,
+ getFolderPermissions,
+ updateFolderPermission,
+ removeFolderPermission,
+ addFolderPermission,
+};
+
+export default hot(module)(connect(mapStateToProps, mapDispatchToProps)(FolderPermissions));
diff --git a/public/app/features/folders/FolderSettingsPage.test.tsx b/public/app/features/folders/FolderSettingsPage.test.tsx
new file mode 100644
index 00000000000..5d889ea59c4
--- /dev/null
+++ b/public/app/features/folders/FolderSettingsPage.test.tsx
@@ -0,0 +1,56 @@
+import React from 'react';
+import { FolderSettingsPage, Props } from './FolderSettingsPage';
+import { NavModel } from 'app/types';
+import { shallow } from 'enzyme';
+
+const setup = (propOverrides?: object) => {
+ const props: Props = {
+ navModel: {} as NavModel,
+ folderUid: '1234',
+ folder: {
+ id: 0,
+ uid: '1234',
+ title: 'loading',
+ canSave: true,
+ url: 'url',
+ hasChanged: false,
+ version: 1,
+ permissions: [],
+ },
+ getFolderByUid: jest.fn(),
+ setFolderTitle: jest.fn(),
+ saveFolder: jest.fn(),
+ deleteFolder: jest.fn(),
+ };
+
+ Object.assign(props, propOverrides);
+
+ const wrapper = shallow( );
+ const instance = wrapper.instance() as FolderSettingsPage;
+
+ return {
+ wrapper,
+ instance,
+ };
+};
+
+describe('Render', () => {
+ it('should render component', () => {
+ const { wrapper } = setup();
+ expect(wrapper).toMatchSnapshot();
+ });
+
+ it('should enable save button', () => {
+ const { wrapper } = setup({
+ folder: {
+ id: 1,
+ uid: '1234',
+ title: 'loading',
+ canSave: true,
+ hasChanged: true,
+ version: 1,
+ },
+ });
+ expect(wrapper).toMatchSnapshot();
+ });
+});
diff --git a/public/app/features/folders/FolderSettingsPage.tsx b/public/app/features/folders/FolderSettingsPage.tsx
new file mode 100644
index 00000000000..1eb7ccafc65
--- /dev/null
+++ b/public/app/features/folders/FolderSettingsPage.tsx
@@ -0,0 +1,105 @@
+import React, { PureComponent } from 'react';
+import { hot } from 'react-hot-loader';
+import { connect } from 'react-redux';
+import PageHeader from 'app/core/components/PageHeader/PageHeader';
+import appEvents from 'app/core/app_events';
+import { getNavModel } from 'app/core/selectors/navModel';
+import { NavModel, StoreState, FolderState } from 'app/types';
+import { getFolderByUid, setFolderTitle, saveFolder, deleteFolder } from './state/actions';
+import { getLoadingNav } from './state/navModel';
+
+export interface Props {
+ navModel: NavModel;
+ folderUid: string;
+ folder: FolderState;
+ getFolderByUid: typeof getFolderByUid;
+ setFolderTitle: typeof setFolderTitle;
+ saveFolder: typeof saveFolder;
+ deleteFolder: typeof deleteFolder;
+}
+
+export class FolderSettingsPage extends PureComponent {
+ componentDidMount() {
+ this.props.getFolderByUid(this.props.folderUid);
+ }
+
+ onTitleChange = evt => {
+ this.props.setFolderTitle(evt.target.value);
+ };
+
+ onSave = async evt => {
+ evt.preventDefault();
+ evt.stopPropagation();
+
+ await this.props.saveFolder(this.props.folder);
+ };
+
+ onDelete = evt => {
+ evt.stopPropagation();
+ evt.preventDefault();
+
+ appEvents.emit('confirm-modal', {
+ title: 'Delete',
+ text: `Do you want to delete this folder and all its dashboards?`,
+ icon: 'fa-trash',
+ yesText: 'Delete',
+ onConfirm: () => {
+ this.props.deleteFolder(this.props.folder.uid);
+ },
+ });
+ };
+
+ render() {
+ const { navModel, folder } = this.props;
+
+ return (
+
+ );
+ }
+}
+
+const mapStateToProps = (state: StoreState) => {
+ const uid = state.location.routeParams.uid;
+
+ return {
+ navModel: getNavModel(state.navIndex, `folder-settings-${uid}`, getLoadingNav(2)),
+ folderUid: uid,
+ folder: state.folder,
+ };
+};
+
+const mapDispatchToProps = {
+ getFolderByUid,
+ saveFolder,
+ setFolderTitle,
+ deleteFolder,
+};
+
+export default hot(module)(connect(mapStateToProps, mapDispatchToProps)(FolderSettingsPage));
diff --git a/public/app/features/folders/__snapshots__/FolderSettingsPage.test.tsx.snap b/public/app/features/folders/__snapshots__/FolderSettingsPage.test.tsx.snap
new file mode 100644
index 00000000000..2de0c193d27
--- /dev/null
+++ b/public/app/features/folders/__snapshots__/FolderSettingsPage.test.tsx.snap
@@ -0,0 +1,131 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Render should enable save button 1`] = `
+
+
+
+
+ Folder Settings
+
+
+
+
+`;
+
+exports[`Render should render component 1`] = `
+
+
+
+
+ Folder Settings
+
+
+
+
+`;
diff --git a/public/app/features/folders/state/actions.ts b/public/app/features/folders/state/actions.ts
new file mode 100644
index 00000000000..cd02915e586
--- /dev/null
+++ b/public/app/features/folders/state/actions.ts
@@ -0,0 +1,167 @@
+import { getBackendSrv } from 'app/core/services/backend_srv';
+import { StoreState } from 'app/types';
+import { ThunkAction } from 'redux-thunk';
+import { FolderDTO, FolderState } from 'app/types';
+import {
+ DashboardAcl,
+ DashboardAclDTO,
+ PermissionLevel,
+ DashboardAclUpdateDTO,
+ NewDashboardAclItem,
+} from 'app/types/acl';
+
+import { updateNavIndex, updateLocation } from 'app/core/actions';
+import { buildNavModel } from './navModel';
+import appEvents from 'app/core/app_events';
+
+export enum ActionTypes {
+ LoadFolder = 'LOAD_FOLDER',
+ SetFolderTitle = 'SET_FOLDER_TITLE',
+ SaveFolder = 'SAVE_FOLDER',
+ LoadFolderPermissions = 'LOAD_FOLDER_PERMISSONS',
+}
+
+export interface LoadFolderAction {
+ type: ActionTypes.LoadFolder;
+ payload: FolderDTO;
+}
+
+export interface SetFolderTitleAction {
+ type: ActionTypes.SetFolderTitle;
+ payload: string;
+}
+
+export interface LoadFolderPermissionsAction {
+ type: ActionTypes.LoadFolderPermissions;
+ payload: DashboardAcl[];
+}
+
+export type Action = LoadFolderAction | SetFolderTitleAction | LoadFolderPermissionsAction;
+
+type ThunkResult = ThunkAction;
+
+export const loadFolder = (folder: FolderDTO): LoadFolderAction => ({
+ type: ActionTypes.LoadFolder,
+ payload: folder,
+});
+
+export const setFolderTitle = (newTitle: string): SetFolderTitleAction => ({
+ type: ActionTypes.SetFolderTitle,
+ payload: newTitle,
+});
+
+export const loadFolderPermissions = (items: DashboardAclDTO[]): LoadFolderPermissionsAction => ({
+ type: ActionTypes.LoadFolderPermissions,
+ payload: items,
+});
+
+export function getFolderByUid(uid: string): ThunkResult {
+ return async dispatch => {
+ const folder = await getBackendSrv().getFolderByUid(uid);
+ dispatch(loadFolder(folder));
+ dispatch(updateNavIndex(buildNavModel(folder)));
+ };
+}
+
+export function saveFolder(folder: FolderState): ThunkResult {
+ return async dispatch => {
+ const res = await getBackendSrv().put(`/api/folders/${folder.uid}`, {
+ title: folder.title,
+ version: folder.version,
+ });
+
+ // this should be redux action at some point
+ appEvents.emit('alert-success', ['Folder saved']);
+
+ dispatch(updateLocation({ path: `${res.url}/settings` }));
+ };
+}
+
+export function deleteFolder(uid: string): ThunkResult {
+ return async dispatch => {
+ await getBackendSrv().deleteFolder(uid, true);
+ dispatch(updateLocation({ path: `dashboards` }));
+ };
+}
+
+export function getFolderPermissions(uid: string): ThunkResult {
+ return async dispatch => {
+ const permissions = await getBackendSrv().get(`/api/folders/${uid}/permissions`);
+ dispatch(loadFolderPermissions(permissions));
+ };
+}
+
+function toUpdateItem(item: DashboardAcl): DashboardAclUpdateDTO {
+ return {
+ userId: item.userId,
+ teamId: item.teamId,
+ role: item.role,
+ permission: item.permission,
+ };
+}
+
+export function updateFolderPermission(itemToUpdate: DashboardAcl, level: PermissionLevel): ThunkResult {
+ return async (dispatch, getStore) => {
+ const folder = getStore().folder;
+ const itemsToUpdate = [];
+
+ for (const item of folder.permissions) {
+ if (item.inherited) {
+ continue;
+ }
+
+ const updated = toUpdateItem(item);
+
+ // if this is the item we want to update, update it's permisssion
+ if (itemToUpdate === item) {
+ updated.permission = level;
+ }
+
+ itemsToUpdate.push(updated);
+ }
+
+ await getBackendSrv().post(`/api/folders/${folder.uid}/permissions`, { items: itemsToUpdate });
+ await dispatch(getFolderPermissions(folder.uid));
+ };
+}
+
+export function removeFolderPermission(itemToDelete: DashboardAcl): ThunkResult {
+ return async (dispatch, getStore) => {
+ const folder = getStore().folder;
+ const itemsToUpdate = [];
+
+ for (const item of folder.permissions) {
+ if (item.inherited || item === itemToDelete) {
+ continue;
+ }
+ itemsToUpdate.push(toUpdateItem(item));
+ }
+
+ await getBackendSrv().post(`/api/folders/${folder.uid}/permissions`, { items: itemsToUpdate });
+ await dispatch(getFolderPermissions(folder.uid));
+ };
+}
+
+export function addFolderPermission(newItem: NewDashboardAclItem): ThunkResult {
+ return async (dispatch, getStore) => {
+ const folder = getStore().folder;
+ const itemsToUpdate = [];
+
+ for (const item of folder.permissions) {
+ if (item.inherited) {
+ continue;
+ }
+ itemsToUpdate.push(toUpdateItem(item));
+ }
+
+ itemsToUpdate.push({
+ userId: newItem.userId,
+ teamId: newItem.teamId,
+ role: newItem.role,
+ permission: newItem.permission,
+ });
+
+ await getBackendSrv().post(`/api/folders/${folder.uid}/permissions`, { items: itemsToUpdate });
+ await dispatch(getFolderPermissions(folder.uid));
+ };
+}
diff --git a/public/app/features/folders/state/navModel.ts b/public/app/features/folders/state/navModel.ts
new file mode 100644
index 00000000000..e6ef763d019
--- /dev/null
+++ b/public/app/features/folders/state/navModel.ts
@@ -0,0 +1,53 @@
+import { FolderDTO, NavModelItem, NavModel } from 'app/types';
+
+export function buildNavModel(folder: FolderDTO): NavModelItem {
+ return {
+ icon: 'fa fa-folder-open',
+ id: 'manage-folder',
+ subTitle: 'Manage folder dashboards & permissions',
+ url: '',
+ text: folder.title,
+ breadcrumbs: [{ title: 'Dashboards', url: 'dashboards' }],
+ children: [
+ {
+ active: false,
+ icon: 'fa fa-fw fa-th-large',
+ id: `folder-dashboards-${folder.uid}`,
+ text: 'Dashboards',
+ url: folder.url,
+ },
+ {
+ active: false,
+ icon: 'fa fa-fw fa-lock',
+ id: `folder-permissions-${folder.uid}`,
+ text: 'Permissions',
+ url: `${folder.url}/permissions`,
+ },
+ {
+ active: false,
+ icon: 'fa fa-fw fa-cog',
+ id: `folder-settings-${folder.uid}`,
+ text: 'Settings',
+ url: `${folder.url}/settings`,
+ },
+ ],
+ };
+}
+
+export function getLoadingNav(tabIndex: number): NavModel {
+ const main = buildNavModel({
+ id: 1,
+ uid: 'loading',
+ title: 'Loading',
+ url: 'url',
+ canSave: false,
+ version: 0,
+ });
+
+ main.children[tabIndex].active = true;
+
+ return {
+ main: main,
+ node: main.children[tabIndex],
+ };
+}
diff --git a/public/app/features/folders/state/reducers.test.ts b/public/app/features/folders/state/reducers.test.ts
new file mode 100644
index 00000000000..72e97f39562
--- /dev/null
+++ b/public/app/features/folders/state/reducers.test.ts
@@ -0,0 +1,97 @@
+import { Action, ActionTypes } from './actions';
+import { FolderDTO, OrgRole, PermissionLevel, FolderState } from 'app/types';
+import { inititalState, folderReducer } from './reducers';
+
+function getTestFolder(): FolderDTO {
+ return {
+ id: 1,
+ title: 'test folder',
+ uid: 'asd',
+ url: 'url',
+ canSave: true,
+ version: 0,
+ };
+}
+
+describe('folder reducer', () => {
+ describe('loadFolder', () => {
+ it('should load folder and set hasChanged to false', () => {
+ const folder = getTestFolder();
+
+ const action: Action = {
+ type: ActionTypes.LoadFolder,
+ payload: folder,
+ };
+
+ const state = folderReducer(inititalState, action);
+
+ expect(state.hasChanged).toEqual(false);
+ expect(state.title).toEqual('test folder');
+ });
+ });
+
+ describe('detFolderTitle', () => {
+ it('should set title', () => {
+ const action: Action = {
+ type: ActionTypes.SetFolderTitle,
+ payload: 'new title',
+ };
+
+ const state = folderReducer(inititalState, action);
+
+ expect(state.hasChanged).toEqual(true);
+ expect(state.title).toEqual('new title');
+ });
+ });
+
+ describe('loadFolderPermissions', () => {
+ let state: FolderState;
+
+ beforeEach(() => {
+ const action: Action = {
+ type: ActionTypes.LoadFolderPermissions,
+ payload: [
+ { id: 2, dashboardId: 1, role: OrgRole.Viewer, permission: PermissionLevel.View },
+ { id: 3, dashboardId: 1, role: OrgRole.Editor, permission: PermissionLevel.Edit },
+ {
+ id: 4,
+ dashboardId: 10,
+ permission: PermissionLevel.View,
+ teamId: 1,
+ team: 'MyTestTeam',
+ inherited: true,
+ },
+ {
+ id: 5,
+ dashboardId: 1,
+ permission: PermissionLevel.View,
+ userId: 1,
+ userLogin: 'MyTestUser',
+ },
+ {
+ id: 6,
+ dashboardId: 1,
+ permission: PermissionLevel.Edit,
+ teamId: 2,
+ team: 'MyTestTeam2',
+ },
+ ],
+ };
+
+ state = folderReducer(inititalState, action);
+ });
+
+ it('should add permissions to state', async () => {
+ expect(state.permissions.length).toBe(5);
+ });
+
+ it('should be sorted by sort rank and alphabetically', async () => {
+ expect(state.permissions[0].name).toBe('MyTestTeam');
+ expect(state.permissions[0].dashboardId).toBe(10);
+ expect(state.permissions[1].name).toBe('Editor');
+ expect(state.permissions[2].name).toBe('Viewer');
+ expect(state.permissions[3].name).toBe('MyTestTeam2');
+ expect(state.permissions[4].name).toBe('MyTestUser');
+ });
+ });
+});
diff --git a/public/app/features/folders/state/reducers.ts b/public/app/features/folders/state/reducers.ts
new file mode 100644
index 00000000000..4560c999659
--- /dev/null
+++ b/public/app/features/folders/state/reducers.ts
@@ -0,0 +1,41 @@
+import { FolderState } from 'app/types';
+import { Action, ActionTypes } from './actions';
+import { processAclItems } from 'app/core/utils/acl';
+
+export const inititalState: FolderState = {
+ id: 0,
+ uid: 'loading',
+ title: 'loading',
+ url: '',
+ canSave: false,
+ hasChanged: false,
+ version: 1,
+ permissions: [],
+};
+
+export const folderReducer = (state = inititalState, action: Action): FolderState => {
+ switch (action.type) {
+ case ActionTypes.LoadFolder:
+ return {
+ ...state,
+ ...action.payload,
+ hasChanged: false,
+ };
+ case ActionTypes.SetFolderTitle:
+ return {
+ ...state,
+ title: action.payload,
+ hasChanged: action.payload.trim().length > 0,
+ };
+ case ActionTypes.LoadFolderPermissions:
+ return {
+ ...state,
+ permissions: processAclItems(action.payload),
+ };
+ }
+ return state;
+};
+
+export default {
+ folder: folderReducer,
+};
diff --git a/public/app/features/dashboard/dashboard_list_ctrl.ts b/public/app/features/manage-dashboards/DashboardListCtrl.ts
similarity index 100%
rename from public/app/features/dashboard/dashboard_list_ctrl.ts
rename to public/app/features/manage-dashboards/DashboardListCtrl.ts
diff --git a/public/app/features/snapshot/snapshot_ctrl.ts b/public/app/features/manage-dashboards/SnapshotListCtrl.ts
similarity index 86%
rename from public/app/features/snapshot/snapshot_ctrl.ts
rename to public/app/features/manage-dashboards/SnapshotListCtrl.ts
index 1dde4876cd5..2ff53e7aed5 100644
--- a/public/app/features/snapshot/snapshot_ctrl.ts
+++ b/public/app/features/manage-dashboards/SnapshotListCtrl.ts
@@ -1,7 +1,6 @@
-import angular from 'angular';
import _ from 'lodash';
-export class SnapshotsCtrl {
+export class SnapshotListCtrl {
navModel: any;
snapshots: any;
@@ -35,5 +34,3 @@ export class SnapshotsCtrl {
});
}
}
-
-angular.module('grafana.controllers').controller('SnapshotsCtrl', SnapshotsCtrl);
diff --git a/public/app/features/manage-dashboards/index.ts b/public/app/features/manage-dashboards/index.ts
new file mode 100644
index 00000000000..046740904e1
--- /dev/null
+++ b/public/app/features/manage-dashboards/index.ts
@@ -0,0 +1,7 @@
+import coreModule from 'app/core/core_module';
+
+import { DashboardListCtrl } from './DashboardListCtrl';
+import { SnapshotListCtrl } from './SnapshotListCtrl';
+
+coreModule.controller('DashboardListCtrl', DashboardListCtrl);
+coreModule.controller('SnapshotListCtrl', SnapshotListCtrl);
diff --git a/public/app/features/dashboard/partials/dashboard_list.html b/public/app/features/manage-dashboards/partials/dashboard_list.html
similarity index 100%
rename from public/app/features/dashboard/partials/dashboard_list.html
rename to public/app/features/manage-dashboards/partials/dashboard_list.html
diff --git a/public/app/features/snapshot/partials/snapshots.html b/public/app/features/manage-dashboards/partials/snapshot_list.html
similarity index 100%
rename from public/app/features/snapshot/partials/snapshots.html
rename to public/app/features/manage-dashboards/partials/snapshot_list.html
diff --git a/public/app/features/org/all.ts b/public/app/features/org/all.ts
index 8872450e3ab..9cbcec8de0d 100644
--- a/public/app/features/org/all.ts
+++ b/public/app/features/org/all.ts
@@ -1,11 +1,8 @@
-import './org_users_ctrl';
import './profile_ctrl';
-import './org_users_ctrl';
import './select_org_ctrl';
import './change_password_ctrl';
import './new_org_ctrl';
import './user_invite_ctrl';
import './create_team_ctrl';
-import './org_api_keys_ctrl';
import './org_details_ctrl';
import './prefs_control';
diff --git a/public/app/features/org/org_api_keys_ctrl.ts b/public/app/features/org/org_api_keys_ctrl.ts
deleted file mode 100644
index 1ead0a350b9..00000000000
--- a/public/app/features/org/org_api_keys_ctrl.ts
+++ /dev/null
@@ -1,44 +0,0 @@
-import angular from 'angular';
-
-export class OrgApiKeysCtrl {
- /** @ngInject */
- constructor($scope, $http, backendSrv, navModelSrv) {
- $scope.navModel = navModelSrv.getNav('cfg', 'apikeys', 0);
-
- $scope.roleTypes = ['Viewer', 'Editor', 'Admin'];
- $scope.token = { role: 'Viewer' };
-
- $scope.init = () => {
- $scope.getTokens();
- };
-
- $scope.getTokens = () => {
- backendSrv.get('/api/auth/keys').then(tokens => {
- $scope.tokens = tokens;
- });
- };
-
- $scope.removeToken = id => {
- backendSrv.delete('/api/auth/keys/' + id).then($scope.getTokens);
- };
-
- $scope.addToken = () => {
- backendSrv.post('/api/auth/keys', $scope.token).then(result => {
- const modalScope = $scope.$new(true);
- modalScope.key = result.key;
- modalScope.rootPath = window.location.origin + $scope.$root.appSubUrl;
-
- $scope.appEvent('show-modal', {
- src: 'public/app/features/org/partials/apikeyModal.html',
- scope: modalScope,
- });
-
- $scope.getTokens();
- });
- };
-
- $scope.init();
- }
-}
-
-angular.module('grafana.controllers').controller('OrgApiKeysCtrl', OrgApiKeysCtrl);
diff --git a/public/app/features/org/org_users_ctrl.ts b/public/app/features/org/org_users_ctrl.ts
deleted file mode 100644
index 625e2749399..00000000000
--- a/public/app/features/org/org_users_ctrl.ts
+++ /dev/null
@@ -1,87 +0,0 @@
-import config from 'app/core/config';
-import coreModule from 'app/core/core_module';
-import Remarkable from 'remarkable';
-import _ from 'lodash';
-
-export class OrgUsersCtrl {
- unfiltered: any;
- users: any;
- pendingInvites: any;
- editor: any;
- navModel: any;
- externalUserMngLinkUrl: string;
- externalUserMngLinkName: string;
- externalUserMngInfo: string;
- canInvite: boolean;
- searchQuery: string;
- showInvites: boolean;
-
- /** @ngInject */
- constructor(private $scope, private backendSrv, navModelSrv, $sce) {
- this.navModel = navModelSrv.getNav('cfg', 'users', 0);
-
- this.get();
- this.externalUserMngLinkUrl = config.externalUserMngLinkUrl;
- this.externalUserMngLinkName = config.externalUserMngLinkName;
- this.canInvite = !config.disableLoginForm && !config.externalUserMngLinkName;
-
- // render external user management info markdown
- if (config.externalUserMngInfo) {
- this.externalUserMngInfo = new Remarkable({
- linkTarget: '__blank',
- }).render(config.externalUserMngInfo);
- }
- }
-
- get() {
- this.backendSrv.get('/api/org/users').then(users => {
- this.users = users;
- this.unfiltered = users;
- });
- this.backendSrv.get('/api/org/invites').then(pendingInvites => {
- this.pendingInvites = pendingInvites;
- });
- }
-
- onQueryUpdated() {
- const regex = new RegExp(this.searchQuery, 'ig');
- this.users = _.filter(this.unfiltered, item => {
- return regex.test(item.email) || regex.test(item.login);
- });
- }
-
- updateOrgUser(user) {
- this.backendSrv.patch('/api/org/users/' + user.userId, user);
- }
-
- removeUser(user) {
- this.$scope.appEvent('confirm-modal', {
- title: 'Delete',
- text: 'Are you sure you want to delete user ' + user.login + '?',
- yesText: 'Delete',
- icon: 'fa-warning',
- onConfirm: () => {
- this.removeUserConfirmed(user);
- },
- });
- }
-
- removeUserConfirmed(user) {
- this.backendSrv.delete('/api/org/users/' + user.userId).then(this.get.bind(this));
- }
-
- revokeInvite(invite, evt) {
- evt.stopPropagation();
- this.backendSrv.patch('/api/org/invites/' + invite.code + '/revoke').then(this.get.bind(this));
- }
-
- copyInviteToClipboard(evt) {
- evt.stopPropagation();
- }
-
- getInviteUrl(invite) {
- return invite.url;
- }
-}
-
-coreModule.controller('OrgUsersCtrl', OrgUsersCtrl);
diff --git a/public/app/features/org/partials/apikeyModal.html b/public/app/features/org/partials/apikeyModal.html
deleted file mode 100644
index eeefcafc634..00000000000
--- a/public/app/features/org/partials/apikeyModal.html
+++ /dev/null
@@ -1,37 +0,0 @@
-
-
-
-
-
-
-
-
- You will only be able to view this key here once! It is not stored in this form. So be sure to copy it now.
-
-
- You can authenticate request using the Authorization HTTP header, example:
-
-
-
-curl -H "Authorization: Bearer {{key}}" {{rootPath}}/api/dashboards/home
-
-
-
-
-
-
-
diff --git a/public/app/features/org/partials/orgApiKeys.html b/public/app/features/org/partials/orgApiKeys.html
deleted file mode 100644
index a2b4ceb6670..00000000000
--- a/public/app/features/org/partials/orgApiKeys.html
+++ /dev/null
@@ -1,49 +0,0 @@
-
-
-
-
-
Add new
-
-
-
-
Existing Keys
-
-
-
- Name
- Role
-
-
-
-
-
- {{t.name}}
- {{t.role}}
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/public/app/features/org/partials/orgUsers.html b/public/app/features/org/partials/orgUsers.html
deleted file mode 100644
index 697879c6ac2..00000000000
--- a/public/app/features/org/partials/orgUsers.html
+++ /dev/null
@@ -1,96 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
- Pending Invites ({{ctrl.pendingInvites.length}})
-
-
-
-
- Invite
-
-
-
-
- {{ctrl.externalUserMngLinkName}}
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/public/app/features/panel/metrics_panel_ctrl.ts b/public/app/features/panel/metrics_panel_ctrl.ts
index 5eecf6036d8..b42b06f1238 100644
--- a/public/app/features/panel/metrics_panel_ctrl.ts
+++ b/public/app/features/panel/metrics_panel_ctrl.ts
@@ -6,7 +6,7 @@ import kbn from 'app/core/utils/kbn';
import { PanelCtrl } from 'app/features/panel/panel_ctrl';
import * as rangeUtil from 'app/core/utils/rangeutil';
import * as dateMath from 'app/core/utils/datemath';
-import { encodePathComponent } from 'app/core/utils/location_util';
+import { getExploreUrl } from 'app/core/utils/explore';
import { metricsTabDirective } from './metrics_tab';
@@ -314,7 +314,12 @@ class MetricsPanelCtrl extends PanelCtrl {
getAdditionalMenuItems() {
const items = [];
- if (config.exploreEnabled && this.contextSrv.isEditor && this.datasource && this.datasource.supportsExplore) {
+ if (
+ config.exploreEnabled &&
+ this.contextSrv.isEditor &&
+ this.datasource &&
+ (this.datasource.meta.explore || this.datasource.meta.id === 'mixed')
+ ) {
items.push({
text: 'Explore',
click: 'ctrl.explore();',
@@ -325,14 +330,11 @@ class MetricsPanelCtrl extends PanelCtrl {
return items;
}
- explore() {
- const range = this.timeSrv.timeRangeForUrl();
- const state = {
- ...this.datasource.getExploreState(this.panel),
- range,
- };
- const exploreState = encodePathComponent(JSON.stringify(state));
- this.$location.url(`/explore?state=${exploreState}`);
+ async explore() {
+ const url = await getExploreUrl(this.panel, this.panel.targets, this.datasource, this.datasourceSrv, this.timeSrv);
+ if (url) {
+ this.$timeout(() => this.$location.url(url));
+ }
}
addQuery(target) {
diff --git a/public/app/features/panel/specs/metrics_panel_ctrl.test.ts b/public/app/features/panel/specs/metrics_panel_ctrl.test.ts
index a28bf92e63b..913a2461fd0 100644
--- a/public/app/features/panel/specs/metrics_panel_ctrl.test.ts
+++ b/public/app/features/panel/specs/metrics_panel_ctrl.test.ts
@@ -38,7 +38,7 @@ describe('MetricsPanelCtrl', () => {
describe('and has datasource set that supports explore and user has powers', () => {
beforeEach(() => {
ctrl.contextSrv = { isEditor: true };
- ctrl.datasource = { supportsExplore: true };
+ ctrl.datasource = { meta: { explore: true } };
additionalItems = ctrl.getAdditionalMenuItems();
});
diff --git a/public/app/features/plugins/PluginList.test.tsx b/public/app/features/plugins/PluginList.test.tsx
new file mode 100644
index 00000000000..201dd69b9db
--- /dev/null
+++ b/public/app/features/plugins/PluginList.test.tsx
@@ -0,0 +1,25 @@
+import React from 'react';
+import { shallow } from 'enzyme';
+import PluginList from './PluginList';
+import { getMockPlugins } from './__mocks__/pluginMocks';
+import { LayoutModes } from '../../core/components/LayoutSelector/LayoutSelector';
+
+const setup = (propOverrides?: object) => {
+ const props = Object.assign(
+ {
+ plugins: getMockPlugins(5),
+ layoutMode: LayoutModes.Grid,
+ },
+ propOverrides
+ );
+
+ return shallow( );
+};
+
+describe('Render', () => {
+ it('should render component', () => {
+ const wrapper = setup();
+
+ expect(wrapper).toMatchSnapshot();
+ });
+});
diff --git a/public/app/features/plugins/PluginList.tsx b/public/app/features/plugins/PluginList.tsx
new file mode 100644
index 00000000000..0074839e754
--- /dev/null
+++ b/public/app/features/plugins/PluginList.tsx
@@ -0,0 +1,32 @@
+import React, { SFC } from 'react';
+import classNames from 'classnames/bind';
+import PluginListItem from './PluginListItem';
+import { Plugin } from 'app/types';
+import { LayoutMode, LayoutModes } from '../../core/components/LayoutSelector/LayoutSelector';
+
+interface Props {
+ plugins: Plugin[];
+ layoutMode: LayoutMode;
+}
+
+const PluginList: SFC = props => {
+ const { plugins, layoutMode } = props;
+
+ const listStyle = classNames({
+ 'card-section': true,
+ 'card-list-layout-grid': layoutMode === LayoutModes.Grid,
+ 'card-list-layout-list': layoutMode === LayoutModes.List,
+ });
+
+ return (
+
+
+ {plugins.map((plugin, index) => {
+ return ;
+ })}
+
+
+ );
+};
+
+export default PluginList;
diff --git a/public/app/features/plugins/PluginListItem.test.tsx b/public/app/features/plugins/PluginListItem.test.tsx
new file mode 100644
index 00000000000..175911c5e05
--- /dev/null
+++ b/public/app/features/plugins/PluginListItem.test.tsx
@@ -0,0 +1,33 @@
+import React from 'react';
+import { shallow } from 'enzyme';
+import PluginListItem from './PluginListItem';
+import { getMockPlugin } from './__mocks__/pluginMocks';
+
+const setup = (propOverrides?: object) => {
+ const props = Object.assign(
+ {
+ plugin: getMockPlugin(),
+ },
+ propOverrides
+ );
+
+ return shallow( );
+};
+
+describe('Render', () => {
+ it('should render component', () => {
+ const wrapper = setup();
+
+ expect(wrapper).toMatchSnapshot();
+ });
+
+ it('should render has plugin section', () => {
+ const mockPlugin = getMockPlugin();
+ mockPlugin.hasUpdate = true;
+ const wrapper = setup({
+ plugin: mockPlugin,
+ });
+
+ expect(wrapper).toMatchSnapshot();
+ });
+});
diff --git a/public/app/features/plugins/PluginListItem.tsx b/public/app/features/plugins/PluginListItem.tsx
new file mode 100644
index 00000000000..05eac614fd5
--- /dev/null
+++ b/public/app/features/plugins/PluginListItem.tsx
@@ -0,0 +1,39 @@
+import React, { SFC } from 'react';
+import { Plugin } from 'app/types';
+
+interface Props {
+ plugin: Plugin;
+}
+
+const PluginListItem: SFC = props => {
+ const { plugin } = props;
+
+ return (
+
+
+
+
+
+ {plugin.type}
+
+ {plugin.hasUpdate && (
+
+ Update available!
+
+ )}
+
+
+
+
+
+
+
{plugin.name}
+
{`By ${plugin.info.author.name}`}
+
+
+
+
+ );
+};
+
+export default PluginListItem;
diff --git a/public/app/features/plugins/PluginListPage.test.tsx b/public/app/features/plugins/PluginListPage.test.tsx
new file mode 100644
index 00000000000..31b2f128436
--- /dev/null
+++ b/public/app/features/plugins/PluginListPage.test.tsx
@@ -0,0 +1,38 @@
+import React from 'react';
+import { shallow } from 'enzyme';
+import { PluginListPage, Props } from './PluginListPage';
+import { NavModel, Plugin } from '../../types';
+import { LayoutModes } from '../../core/components/LayoutSelector/LayoutSelector';
+
+const setup = (propOverrides?: object) => {
+ const props: Props = {
+ navModel: {} as NavModel,
+ plugins: [] as Plugin[],
+ searchQuery: '',
+ setPluginsSearchQuery: jest.fn(),
+ setPluginsLayoutMode: jest.fn(),
+ layoutMode: LayoutModes.Grid,
+ loadPlugins: jest.fn(),
+ hasFetched: false,
+ };
+
+ Object.assign(props, propOverrides);
+
+ return shallow( );
+};
+
+describe('Render', () => {
+ it('should render component', () => {
+ const wrapper = setup();
+
+ expect(wrapper).toMatchSnapshot();
+ });
+
+ it('should render list', () => {
+ const wrapper = setup({
+ hasFetched: true,
+ });
+
+ expect(wrapper).toMatchSnapshot();
+ });
+});
diff --git a/public/app/features/plugins/PluginListPage.tsx b/public/app/features/plugins/PluginListPage.tsx
new file mode 100644
index 00000000000..a2fcb90ce54
--- /dev/null
+++ b/public/app/features/plugins/PluginListPage.tsx
@@ -0,0 +1,88 @@
+import React, { PureComponent } from 'react';
+import { hot } from 'react-hot-loader';
+import { connect } from 'react-redux';
+import PageHeader from 'app/core/components/PageHeader/PageHeader';
+import OrgActionBar from 'app/core/components/OrgActionBar/OrgActionBar';
+import PageLoader from 'app/core/components/PageLoader/PageLoader';
+import PluginList from './PluginList';
+import { NavModel, Plugin } from 'app/types';
+import { loadPlugins, setPluginsLayoutMode, setPluginsSearchQuery } from './state/actions';
+import { getNavModel } from '../../core/selectors/navModel';
+import { getLayoutMode, getPlugins, getPluginsSearchQuery } from './state/selectors';
+import { LayoutMode } from '../../core/components/LayoutSelector/LayoutSelector';
+
+export interface Props {
+ navModel: NavModel;
+ plugins: Plugin[];
+ layoutMode: LayoutMode;
+ searchQuery: string;
+ hasFetched: boolean;
+ loadPlugins: typeof loadPlugins;
+ setPluginsLayoutMode: typeof setPluginsLayoutMode;
+ setPluginsSearchQuery: typeof setPluginsSearchQuery;
+}
+
+export class PluginListPage extends PureComponent {
+ componentDidMount() {
+ this.fetchPlugins();
+ }
+
+ async fetchPlugins() {
+ await this.props.loadPlugins();
+ }
+
+ render() {
+ const {
+ hasFetched,
+ navModel,
+ plugins,
+ layoutMode,
+ setPluginsLayoutMode,
+ setPluginsSearchQuery,
+ searchQuery,
+ } = this.props;
+
+ const linkButton = {
+ href: 'https://grafana.com/plugins?utm_source=grafana_plugin_list',
+ title: 'Find more plugins on Grafana.com',
+ };
+
+ return (
+
+
+
+
setPluginsLayoutMode(mode)}
+ setSearchQuery={query => setPluginsSearchQuery(query)}
+ linkButton={linkButton}
+ />
+ {hasFetched ? (
+ plugins &&
+ ) : (
+
+ )}
+
+
+ );
+ }
+}
+
+function mapStateToProps(state) {
+ return {
+ navModel: getNavModel(state.navIndex, 'plugins'),
+ plugins: getPlugins(state.plugins),
+ layoutMode: getLayoutMode(state.plugins),
+ searchQuery: getPluginsSearchQuery(state.plugins),
+ hasFetched: state.plugins.hasFetched,
+ };
+}
+
+const mapDispatchToProps = {
+ loadPlugins,
+ setPluginsLayoutMode,
+ setPluginsSearchQuery,
+};
+
+export default hot(module)(connect(mapStateToProps, mapDispatchToProps)(PluginListPage));
diff --git a/public/app/features/plugins/__mocks__/pluginMocks.ts b/public/app/features/plugins/__mocks__/pluginMocks.ts
new file mode 100644
index 00000000000..d8dd67d5b61
--- /dev/null
+++ b/public/app/features/plugins/__mocks__/pluginMocks.ts
@@ -0,0 +1,59 @@
+import { Plugin } from 'app/types';
+
+export const getMockPlugins = (amount: number): Plugin[] => {
+ const plugins = [];
+
+ for (let i = 0; i <= amount; i++) {
+ plugins.push({
+ defaultNavUrl: 'some/url',
+ enabled: false,
+ hasUpdate: false,
+ id: `${i}`,
+ info: {
+ author: {
+ name: 'Grafana Labs',
+ url: 'url/to/GrafanaLabs',
+ },
+ description: 'pretty decent plugin',
+ links: ['one link'],
+ logos: { small: 'small/logo', large: 'large/logo' },
+ screenshots: `screenshot/${i}`,
+ updated: '2018-09-26',
+ version: '1',
+ },
+ latestVersion: `1.${i}`,
+ name: `pretty cool plugin-${i}`,
+ pinned: false,
+ state: '',
+ type: '',
+ });
+ }
+
+ return plugins;
+};
+
+export const getMockPlugin = () => {
+ return {
+ defaultNavUrl: 'some/url',
+ enabled: false,
+ hasUpdate: false,
+ id: '1',
+ info: {
+ author: {
+ name: 'Grafana Labs',
+ url: 'url/to/GrafanaLabs',
+ },
+ description: 'pretty decent plugin',
+ links: ['one link'],
+ logos: { small: 'small/logo', large: 'large/logo' },
+ screenshots: 'screenshot/1',
+ updated: '2018-09-26',
+ version: '1',
+ },
+ latestVersion: '1',
+ name: 'pretty cool plugin 1',
+ pinned: false,
+ state: '',
+ type: '',
+ };
+};
diff --git a/public/app/features/plugins/__snapshots__/PluginList.test.tsx.snap b/public/app/features/plugins/__snapshots__/PluginList.test.tsx.snap
new file mode 100644
index 00000000000..176304b7b11
--- /dev/null
+++ b/public/app/features/plugins/__snapshots__/PluginList.test.tsx.snap
@@ -0,0 +1,210 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Render should render component 1`] = `
+
+`;
diff --git a/public/app/features/plugins/__snapshots__/PluginListItem.test.tsx.snap b/public/app/features/plugins/__snapshots__/PluginListItem.test.tsx.snap
new file mode 100644
index 00000000000..fc0cc68c522
--- /dev/null
+++ b/public/app/features/plugins/__snapshots__/PluginListItem.test.tsx.snap
@@ -0,0 +1,106 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Render should render component 1`] = `
+
+
+
+
+
+
+
+
+
+ pretty cool plugin 1
+
+
+ By Grafana Labs
+
+
+
+
+
+`;
+
+exports[`Render should render has plugin section 1`] = `
+
+
+
+
+
+
+
+
+ Update available!
+
+
+
+
+
+
+
+
+
+ pretty cool plugin 1
+
+
+ By Grafana Labs
+
+
+
+
+
+`;
diff --git a/public/app/features/plugins/__snapshots__/PluginListPage.test.tsx.snap b/public/app/features/plugins/__snapshots__/PluginListPage.test.tsx.snap
new file mode 100644
index 00000000000..ad27dd5037c
--- /dev/null
+++ b/public/app/features/plugins/__snapshots__/PluginListPage.test.tsx.snap
@@ -0,0 +1,56 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Render should render component 1`] = `
+
+`;
+
+exports[`Render should render list 1`] = `
+
+`;
diff --git a/public/app/features/plugins/all.ts b/public/app/features/plugins/all.ts
index fd19ea963b6..d164a6d4255 100644
--- a/public/app/features/plugins/all.ts
+++ b/public/app/features/plugins/all.ts
@@ -1,9 +1,7 @@
import './plugin_edit_ctrl';
import './plugin_page_ctrl';
-import './plugin_list_ctrl';
import './import_list/import_list';
import './ds_edit_ctrl';
import './ds_dashboards_ctrl';
-import './ds_list_ctrl';
import './datasource_srv';
import './plugin_component';
diff --git a/public/app/features/plugins/built_in_plugins.ts b/public/app/features/plugins/built_in_plugins.ts
index 2c5bf459eda..e29e1709ccf 100644
--- a/public/app/features/plugins/built_in_plugins.ts
+++ b/public/app/features/plugins/built_in_plugins.ts
@@ -11,6 +11,7 @@ import * as postgresPlugin from 'app/plugins/datasource/postgres/module';
import * as prometheusPlugin from 'app/plugins/datasource/prometheus/module';
import * as mssqlPlugin from 'app/plugins/datasource/mssql/module';
import * as testDataDSPlugin from 'app/plugins/datasource/testdata/module';
+import * as stackdriverPlugin from 'app/plugins/datasource/stackdriver/module';
import * as textPanel from 'app/plugins/panel/text/module';
import * as graphPanel from 'app/plugins/panel/graph/module';
@@ -36,6 +37,7 @@ const builtInPlugins = {
'app/plugins/datasource/mssql/module': mssqlPlugin,
'app/plugins/datasource/prometheus/module': prometheusPlugin,
'app/plugins/datasource/testdata/module': testDataDSPlugin,
+ 'app/plugins/datasource/stackdriver/module': stackdriverPlugin,
'app/plugins/panel/text/module': textPanel,
'app/plugins/panel/graph/module': graphPanel,
diff --git a/public/app/features/plugins/ds_dashboards_ctrl.ts b/public/app/features/plugins/ds_dashboards_ctrl.ts
index ed7800698b7..a0324215453 100644
--- a/public/app/features/plugins/ds_dashboards_ctrl.ts
+++ b/public/app/features/plugins/ds_dashboards_ctrl.ts
@@ -1,6 +1,7 @@
-import { toJS } from 'mobx';
import { coreModule } from 'app/core/core';
-import { store } from 'app/stores/store';
+import { store } from 'app/store/configureStore';
+import { getNavModel } from 'app/core/selectors/navModel';
+import { buildNavModel } from './state/navModel';
export class DataSourceDashboardsCtrl {
datasourceMeta: any;
@@ -9,11 +10,8 @@ export class DataSourceDashboardsCtrl {
/** @ngInject */
constructor(private backendSrv, private $routeParams) {
- if (store.nav.main === null) {
- store.nav.load('cfg', 'datasources');
- }
-
- this.navModel = toJS(store.nav);
+ const state = store.getState();
+ this.navModel = getNavModel(state.navIndex, 'datasources');
if (this.$routeParams.id) {
this.getDatasourceById(this.$routeParams.id);
@@ -30,8 +28,7 @@ export class DataSourceDashboardsCtrl {
}
updateNav() {
- store.nav.initDatasourceEditNav(this.current, this.datasourceMeta, 'datasource-dashboards');
- this.navModel = toJS(store.nav);
+ this.navModel = buildNavModel(this.current, this.datasourceMeta, 'datasource-dashboards');
}
getPluginInfo() {
diff --git a/public/app/features/plugins/ds_edit_ctrl.ts b/public/app/features/plugins/ds_edit_ctrl.ts
index 19889d3e26e..c223f444ef3 100644
--- a/public/app/features/plugins/ds_edit_ctrl.ts
+++ b/public/app/features/plugins/ds_edit_ctrl.ts
@@ -1,8 +1,9 @@
import _ from 'lodash';
-import { toJS } from 'mobx';
import config from 'app/core/config';
import { coreModule, appEvents } from 'app/core/core';
-import { store } from 'app/stores/store';
+import { store } from 'app/store/configureStore';
+import { getNavModel } from 'app/core/selectors/navModel';
+import { buildNavModel } from './state/navModel';
let datasourceTypes = [];
@@ -31,11 +32,8 @@ export class DataSourceEditCtrl {
/** @ngInject */
constructor(private $q, private backendSrv, private $routeParams, private $location, private datasourceSrv) {
- if (store.nav.main === null) {
- store.nav.load('cfg', 'datasources');
- }
-
- this.navModel = toJS(store.nav);
+ const state = store.getState();
+ this.navModel = getNavModel(state.navIndex, 'datasources');
this.datasources = [];
this.loadDatasourceTypes().then(() => {
@@ -101,8 +99,7 @@ export class DataSourceEditCtrl {
}
updateNav() {
- store.nav.initDatasourceEditNav(this.current, this.datasourceMeta, 'datasource-settings');
- this.navModel = toJS(store.nav);
+ this.navModel = buildNavModel(this.current, this.datasourceMeta, 'datasource-settings');
}
typeChanged() {
diff --git a/public/app/features/plugins/ds_list_ctrl.ts b/public/app/features/plugins/ds_list_ctrl.ts
deleted file mode 100644
index 71c1a516842..00000000000
--- a/public/app/features/plugins/ds_list_ctrl.ts
+++ /dev/null
@@ -1,61 +0,0 @@
-import coreModule from '../../core/core_module';
-import _ from 'lodash';
-
-export class DataSourcesCtrl {
- datasources: any;
- unfiltered: any;
- navModel: any;
- searchQuery: string;
-
- /** @ngInject */
- constructor(private $scope, private backendSrv, private datasourceSrv, private navModelSrv) {
- this.navModel = this.navModelSrv.getNav('cfg', 'datasources', 0);
- backendSrv.get('/api/datasources').then(result => {
- this.datasources = result;
- this.unfiltered = result;
- });
- }
-
- onQueryUpdated() {
- const regex = new RegExp(this.searchQuery, 'ig');
- this.datasources = _.filter(this.unfiltered, item => {
- regex.lastIndex = 0;
- return regex.test(item.name) || regex.test(item.type);
- });
- }
-
- removeDataSourceConfirmed(ds) {
- this.backendSrv
- .delete('/api/datasources/' + ds.id)
- .then(
- () => {
- this.$scope.appEvent('alert-success', ['Datasource deleted', '']);
- },
- () => {
- this.$scope.appEvent('alert-error', ['Unable to delete datasource', '']);
- }
- )
- .then(() => {
- this.backendSrv.get('/api/datasources').then(result => {
- this.datasources = result;
- });
- this.backendSrv.get('/api/frontend/settings').then(settings => {
- this.datasourceSrv.init(settings.datasources);
- });
- });
- }
-
- removeDataSource(ds) {
- this.$scope.appEvent('confirm-modal', {
- title: 'Delete',
- text: 'Are you sure you want to delete datasource ' + ds.name + '?',
- yesText: 'Delete',
- icon: 'fa-trash',
- onConfirm: () => {
- this.removeDataSourceConfirmed(ds);
- },
- });
- }
-}
-
-coreModule.controller('DataSourcesCtrl', DataSourcesCtrl);
diff --git a/public/app/features/plugins/partials/ds_edit.html b/public/app/features/plugins/partials/ds_edit.html
index f0bb8867f83..0b83e69c7d2 100644
--- a/public/app/features/plugins/partials/ds_edit.html
+++ b/public/app/features/plugins/partials/ds_edit.html
@@ -1,18 +1,13 @@
-
-
-
- Disclaimer. This datasource was added by config and cannot be modified using the UI. Please contact your server admin to update this datasource.
-
-
+
Settings
-
+
+ This datasource was added by config and cannot be modified using the UI. Please contact your server admin to update this datasource.
+
-
-
-
+
-
+
+
+
+
+
diff --git a/public/app/features/plugins/partials/ds_http_settings.html b/public/app/features/plugins/partials/ds_http_settings.html
index 6d014af567c..17aedd48afd 100644
--- a/public/app/features/plugins/partials/ds_http_settings.html
+++ b/public/app/features/plugins/partials/ds_http_settings.html
@@ -1,9 +1,9 @@
+
+
+ Access mode controls how requests to the data source will be handled.
+ Server should be the preferred way if nothing else stated.
+
+
Server access mode (Default):
+
+ All requests will be made from the browser to Grafana backend/server which in turn will forward the requests to the data source
+ and by that circumvent possible Cross-Origin Resource Sharing (CORS) requirements.
+ The URL needs to be accessible from the grafana backend/server if you select this access mode.
+
+
Browser access mode:
+
+ All requests will be made from the browser directly to the data source and may be subject to
+ Cross-Origin Resource Sharing (CORS) requirements. The URL needs to be accessible from the browser if you select this
+ access mode.
+
- Auth
-
+
+
-
-
-
+
Auth
+
+
+
+
+
-
-
-
-
-
Advanced HTTP Settings
-
diff --git a/public/app/features/plugins/partials/ds_list.html b/public/app/features/plugins/partials/ds_list.html
deleted file mode 100644
index fd537fc47d4..00000000000
--- a/public/app/features/plugins/partials/ds_list.html
+++ /dev/null
@@ -1,63 +0,0 @@
-
-
-
diff --git a/public/app/features/plugins/partials/plugin_list.html b/public/app/features/plugins/partials/plugin_list.html
deleted file mode 100644
index 04b5bf9c791..00000000000
--- a/public/app/features/plugins/partials/plugin_list.html
+++ /dev/null
@@ -1,45 +0,0 @@
-
-
-
diff --git a/public/app/features/plugins/plugin_list_ctrl.ts b/public/app/features/plugins/plugin_list_ctrl.ts
deleted file mode 100644
index 315252364cc..00000000000
--- a/public/app/features/plugins/plugin_list_ctrl.ts
+++ /dev/null
@@ -1,30 +0,0 @@
-import angular from 'angular';
-import _ from 'lodash';
-
-export class PluginListCtrl {
- plugins: any[];
- tabIndex: number;
- navModel: any;
- searchQuery: string;
- allPlugins: any[];
-
- /** @ngInject */
- constructor(private backendSrv: any, $location, navModelSrv) {
- this.tabIndex = 0;
- this.navModel = navModelSrv.getNav('cfg', 'plugins', 0);
-
- this.backendSrv.get('api/plugins', { embedded: 0 }).then(plugins => {
- this.plugins = plugins;
- this.allPlugins = plugins;
- });
- }
-
- onQueryUpdated() {
- const regex = new RegExp(this.searchQuery, 'ig');
- this.plugins = _.filter(this.allPlugins, item => {
- return regex.test(item.name) || regex.test(item.type);
- });
- }
-}
-
-angular.module('grafana.controllers').controller('PluginListCtrl', PluginListCtrl);
diff --git a/public/app/features/plugins/state/actions.ts b/public/app/features/plugins/state/actions.ts
new file mode 100644
index 00000000000..dcfd510ffa0
--- /dev/null
+++ b/public/app/features/plugins/state/actions.ts
@@ -0,0 +1,51 @@
+import { Plugin, StoreState } from 'app/types';
+import { ThunkAction } from 'redux-thunk';
+import { getBackendSrv } from '../../../core/services/backend_srv';
+import { LayoutMode } from '../../../core/components/LayoutSelector/LayoutSelector';
+
+export enum ActionTypes {
+ LoadPlugins = 'LOAD_PLUGINS',
+ SetPluginsSearchQuery = 'SET_PLUGIN_SEARCH_QUERY',
+ SetLayoutMode = 'SET_LAYOUT_MODE',
+}
+
+export interface LoadPluginsAction {
+ type: ActionTypes.LoadPlugins;
+ payload: Plugin[];
+}
+
+export interface SetPluginsSearchQueryAction {
+ type: ActionTypes.SetPluginsSearchQuery;
+ payload: string;
+}
+
+export interface SetLayoutModeAction {
+ type: ActionTypes.SetLayoutMode;
+ payload: LayoutMode;
+}
+
+export const setPluginsLayoutMode = (mode: LayoutMode): SetLayoutModeAction => ({
+ type: ActionTypes.SetLayoutMode,
+ payload: mode,
+});
+
+export const setPluginsSearchQuery = (query: string): SetPluginsSearchQueryAction => ({
+ type: ActionTypes.SetPluginsSearchQuery,
+ payload: query,
+});
+
+const pluginsLoaded = (plugins: Plugin[]): LoadPluginsAction => ({
+ type: ActionTypes.LoadPlugins,
+ payload: plugins,
+});
+
+export type Action = LoadPluginsAction | SetPluginsSearchQueryAction | SetLayoutModeAction;
+
+type ThunkResult
= ThunkAction;
+
+export function loadPlugins(): ThunkResult {
+ return async dispatch => {
+ const result = await getBackendSrv().get('api/plugins', { embedded: 0 });
+ dispatch(pluginsLoaded(result));
+ };
+}
diff --git a/public/app/features/plugins/state/navModel.ts b/public/app/features/plugins/state/navModel.ts
new file mode 100644
index 00000000000..852eb2806f9
--- /dev/null
+++ b/public/app/features/plugins/state/navModel.ts
@@ -0,0 +1,45 @@
+import _ from 'lodash';
+import { DataSource, PluginMeta, NavModel } from 'app/types';
+
+export function buildNavModel(ds: DataSource, plugin: PluginMeta, currentPage: string): NavModel {
+ let title = 'New';
+ const subTitle = `Type: ${plugin.name}`;
+
+ if (ds.id) {
+ title = ds.name;
+ }
+
+ const main = {
+ img: plugin.info.logos.large,
+ id: 'ds-edit-' + plugin.id,
+ subTitle: subTitle,
+ url: '',
+ text: title,
+ breadcrumbs: [{ title: 'Data Sources', url: 'datasources' }],
+ children: [
+ {
+ active: currentPage === 'datasource-settings',
+ icon: 'fa fa-fw fa-sliders',
+ id: 'datasource-settings',
+ text: 'Settings',
+ url: `datasources/edit/${ds.id}`,
+ },
+ ],
+ };
+
+ const hasDashboards = _.find(plugin.includes, { type: 'dashboard' }) !== undefined;
+ if (hasDashboards && ds.id) {
+ main.children.push({
+ active: currentPage === 'datasource-dashboards',
+ icon: 'fa fa-fw fa-th-large',
+ id: 'datasource-dashboards',
+ text: 'Dashboards',
+ url: `datasources/edit/${ds.id}/dashboards`,
+ });
+ }
+
+ return {
+ main: main,
+ node: _.find(main.children, { active: true }),
+ };
+}
diff --git a/public/app/features/plugins/state/reducers.ts b/public/app/features/plugins/state/reducers.ts
new file mode 100644
index 00000000000..bd99d2029f5
--- /dev/null
+++ b/public/app/features/plugins/state/reducers.ts
@@ -0,0 +1,28 @@
+import { Action, ActionTypes } from './actions';
+import { Plugin, PluginsState } from 'app/types';
+import { LayoutModes } from '../../../core/components/LayoutSelector/LayoutSelector';
+
+export const initialState: PluginsState = {
+ plugins: [] as Plugin[],
+ searchQuery: '',
+ layoutMode: LayoutModes.Grid,
+ hasFetched: false,
+};
+
+export const pluginsReducer = (state = initialState, action: Action): PluginsState => {
+ switch (action.type) {
+ case ActionTypes.LoadPlugins:
+ return { ...state, hasFetched: true, plugins: action.payload };
+
+ case ActionTypes.SetPluginsSearchQuery:
+ return { ...state, searchQuery: action.payload };
+
+ case ActionTypes.SetLayoutMode:
+ return { ...state, layoutMode: action.payload };
+ }
+ return state;
+};
+
+export default {
+ plugins: pluginsReducer,
+};
diff --git a/public/app/features/plugins/state/selectors.test.ts b/public/app/features/plugins/state/selectors.test.ts
new file mode 100644
index 00000000000..09b1ce4c259
--- /dev/null
+++ b/public/app/features/plugins/state/selectors.test.ts
@@ -0,0 +1,31 @@
+import { getPlugins, getPluginsSearchQuery } from './selectors';
+import { initialState } from './reducers';
+import { getMockPlugins } from '../__mocks__/pluginMocks';
+
+describe('Selectors', () => {
+ const mockState = initialState;
+
+ it('should return search query', () => {
+ mockState.searchQuery = 'test';
+ const query = getPluginsSearchQuery(mockState);
+
+ expect(query).toEqual(mockState.searchQuery);
+ });
+
+ it('should return plugins', () => {
+ mockState.plugins = getMockPlugins(5);
+ mockState.searchQuery = '';
+
+ const plugins = getPlugins(mockState);
+
+ expect(plugins).toEqual(mockState.plugins);
+ });
+
+ it('should filter plugins', () => {
+ mockState.searchQuery = 'plugin-1';
+
+ const plugins = getPlugins(mockState);
+
+ expect(plugins.length).toEqual(1);
+ });
+});
diff --git a/public/app/features/plugins/state/selectors.ts b/public/app/features/plugins/state/selectors.ts
new file mode 100644
index 00000000000..e1d16462527
--- /dev/null
+++ b/public/app/features/plugins/state/selectors.ts
@@ -0,0 +1,10 @@
+export const getPlugins = state => {
+ const regex = new RegExp(state.searchQuery, 'i');
+
+ return state.plugins.filter(item => {
+ return regex.test(item.name) || regex.test(item.info.author.name) || regex.test(item.info.description);
+ });
+};
+
+export const getPluginsSearchQuery = state => state.searchQuery;
+export const getLayoutMode = state => state.layoutMode;
diff --git a/public/app/features/snapshot/all.ts b/public/app/features/snapshot/all.ts
deleted file mode 100644
index 521c7a4c111..00000000000
--- a/public/app/features/snapshot/all.ts
+++ /dev/null
@@ -1 +0,0 @@
-import './snapshot_ctrl';
diff --git a/public/app/features/teams/TeamGroupSync.test.tsx b/public/app/features/teams/TeamGroupSync.test.tsx
new file mode 100644
index 00000000000..f3deb62c77b
--- /dev/null
+++ b/public/app/features/teams/TeamGroupSync.test.tsx
@@ -0,0 +1,63 @@
+import React from 'react';
+import { shallow } from 'enzyme';
+import { Props, TeamGroupSync } from './TeamGroupSync';
+import { TeamGroup } from '../../types';
+import { getMockTeamGroups } from './__mocks__/teamMocks';
+
+const setup = (propOverrides?: object) => {
+ const props: Props = {
+ groups: [] as TeamGroup[],
+ loadTeamGroups: jest.fn(),
+ addTeamGroup: jest.fn(),
+ removeTeamGroup: jest.fn(),
+ };
+
+ Object.assign(props, propOverrides);
+
+ const wrapper = shallow( );
+ const instance = wrapper.instance() as TeamGroupSync;
+
+ return {
+ wrapper,
+ instance,
+ };
+};
+
+describe('Render', () => {
+ it('should render component', () => {
+ const { wrapper } = setup();
+
+ expect(wrapper).toMatchSnapshot();
+ });
+
+ it('should render groups table', () => {
+ const { wrapper } = setup({
+ groups: getMockTeamGroups(3),
+ });
+
+ expect(wrapper).toMatchSnapshot();
+ });
+});
+
+describe('Functions', () => {
+ it('should call add group', () => {
+ const { instance } = setup();
+
+ instance.setState({ newGroupId: 'some/group' });
+ const mockEvent = { preventDefault: jest.fn() };
+
+ instance.onAddGroup(mockEvent);
+
+ expect(instance.props.addTeamGroup).toHaveBeenCalledWith('some/group');
+ });
+
+ it('should call remove group', () => {
+ const { instance } = setup();
+
+ const mockGroup: TeamGroup = { teamId: 1, groupId: 'some/group' };
+
+ instance.onRemoveGroup(mockGroup);
+
+ expect(instance.props.removeTeamGroup).toHaveBeenCalledWith('some/group');
+ });
+});
diff --git a/public/app/containers/Teams/TeamGroupSync.tsx b/public/app/features/teams/TeamGroupSync.tsx
similarity index 73%
rename from public/app/containers/Teams/TeamGroupSync.tsx
rename to public/app/features/teams/TeamGroupSync.tsx
index a3b2e4aed14..939dfcc8e31 100644
--- a/public/app/containers/Teams/TeamGroupSync.tsx
+++ b/public/app/features/teams/TeamGroupSync.tsx
@@ -1,12 +1,16 @@
-import React from 'react';
-import { hot } from 'react-hot-loader';
-import { observer } from 'mobx-react';
-import { Team, TeamGroup } from 'app/stores/TeamsStore/TeamsStore';
+import React, { PureComponent } from 'react';
+import { connect } from 'react-redux';
import SlideDown from 'app/core/components/Animations/SlideDown';
import Tooltip from 'app/core/components/Tooltip/Tooltip';
+import { TeamGroup } from '../../types';
+import { addTeamGroup, loadTeamGroups, removeTeamGroup } from './state/actions';
+import { getTeamGroups } from './state/selectors';
-interface Props {
- team: Team;
+export interface Props {
+ groups: TeamGroup[];
+ loadTeamGroups: typeof loadTeamGroups;
+ addTeamGroup: typeof addTeamGroup;
+ removeTeamGroup: typeof removeTeamGroup;
}
interface State {
@@ -16,15 +20,40 @@ interface State {
const headerTooltip = `Sync LDAP or OAuth groups with your Grafana teams.`;
-@observer
-export class TeamGroupSync extends React.Component {
+export class TeamGroupSync extends PureComponent {
constructor(props) {
super(props);
this.state = { isAdding: false, newGroupId: '' };
}
componentDidMount() {
- this.props.team.loadGroups();
+ this.fetchTeamGroups();
+ }
+
+ async fetchTeamGroups() {
+ await this.props.loadTeamGroups();
+ }
+
+ onToggleAdding = () => {
+ this.setState({ isAdding: !this.state.isAdding });
+ };
+
+ onNewGroupIdChanged = event => {
+ this.setState({ newGroupId: event.target.value });
+ };
+
+ onAddGroup = event => {
+ event.preventDefault();
+ this.props.addTeamGroup(this.state.newGroupId);
+ this.setState({ isAdding: false, newGroupId: '' });
+ };
+
+ onRemoveGroup = (group: TeamGroup) => {
+ this.props.removeTeamGroup(group.groupId);
+ };
+
+ isNewGroupValid() {
+ return this.state.newGroupId.length > 1;
}
renderGroup(group: TeamGroup) {
@@ -40,30 +69,9 @@ export class TeamGroupSync extends React.Component {
);
}
- onToggleAdding = () => {
- this.setState({ isAdding: !this.state.isAdding });
- };
-
- onNewGroupIdChanged = evt => {
- this.setState({ newGroupId: evt.target.value });
- };
-
- onAddGroup = () => {
- this.props.team.addGroup(this.state.newGroupId);
- this.setState({ isAdding: false, newGroupId: '' });
- };
-
- onRemoveGroup = (group: TeamGroup) => {
- this.props.team.removeGroup(group.groupId);
- };
-
- isNewGroupValid() {
- return this.state.newGroupId.length > 1;
- }
-
render() {
const { isAdding, newGroupId } = this.state;
- const groups = this.props.team.groups.values();
+ const groups = this.props.groups;
return (
@@ -86,7 +94,7 @@ export class TeamGroupSync extends React.Component
{
Add External Group
-
+
@@ -146,4 +149,16 @@ export class TeamGroupSync extends React.Component {
}
}
-export default hot(module)(TeamGroupSync);
+function mapStateToProps(state) {
+ return {
+ groups: getTeamGroups(state.team),
+ };
+}
+
+const mapDispatchToProps = {
+ loadTeamGroups,
+ addTeamGroup,
+ removeTeamGroup,
+};
+
+export default connect(mapStateToProps, mapDispatchToProps)(TeamGroupSync);
diff --git a/public/app/features/teams/TeamList.test.tsx b/public/app/features/teams/TeamList.test.tsx
new file mode 100644
index 00000000000..f6e1c11c9f9
--- /dev/null
+++ b/public/app/features/teams/TeamList.test.tsx
@@ -0,0 +1,77 @@
+import React from 'react';
+import { shallow } from 'enzyme';
+import { Props, TeamList } from './TeamList';
+import { NavModel, Team } from '../../types';
+import { getMockTeam, getMultipleMockTeams } from './__mocks__/teamMocks';
+
+const setup = (propOverrides?: object) => {
+ const props: Props = {
+ navModel: {} as NavModel,
+ teams: [] as Team[],
+ loadTeams: jest.fn(),
+ deleteTeam: jest.fn(),
+ setSearchQuery: jest.fn(),
+ searchQuery: '',
+ teamsCount: 0,
+ hasFetched: false,
+ };
+
+ Object.assign(props, propOverrides);
+
+ const wrapper = shallow( );
+ const instance = wrapper.instance() as TeamList;
+
+ return {
+ wrapper,
+ instance,
+ };
+};
+
+describe('Render', () => {
+ it('should render component', () => {
+ const { wrapper } = setup();
+ expect(wrapper).toMatchSnapshot();
+ });
+
+ it('should render teams table', () => {
+ const { wrapper } = setup({
+ teams: getMultipleMockTeams(5),
+ teamsCount: 5,
+ hasFetched: true,
+ });
+
+ expect(wrapper).toMatchSnapshot();
+ });
+});
+
+describe('Life cycle', () => {
+ it('should call loadTeams', () => {
+ const { instance } = setup();
+
+ instance.componentDidMount();
+
+ expect(instance.props.loadTeams).toHaveBeenCalled();
+ });
+});
+
+describe('Functions', () => {
+ describe('Delete team', () => {
+ it('should call delete team', () => {
+ const { instance } = setup();
+ instance.deleteTeam(getMockTeam());
+
+ expect(instance.props.deleteTeam).toHaveBeenCalledWith(1);
+ });
+ });
+
+ describe('on search query change', () => {
+ it('should call setSearchQuery', () => {
+ const { instance } = setup();
+ const mockEvent = { target: { value: 'test' } };
+
+ instance.onSearchQueryChange(mockEvent);
+
+ expect(instance.props.setSearchQuery).toHaveBeenCalledWith('test');
+ });
+ });
+});
diff --git a/public/app/containers/Teams/TeamList.tsx b/public/app/features/teams/TeamList.tsx
similarity index 58%
rename from public/app/containers/Teams/TeamList.tsx
rename to public/app/features/teams/TeamList.tsx
index 2a5743bea96..d8e12e338e9 100644
--- a/public/app/containers/Teams/TeamList.tsx
+++ b/public/app/features/teams/TeamList.tsx
@@ -1,42 +1,44 @@
-import React from 'react';
+import React, { PureComponent } from 'react';
+import { connect } from 'react-redux';
import { hot } from 'react-hot-loader';
-import { inject, observer } from 'mobx-react';
import PageHeader from 'app/core/components/PageHeader/PageHeader';
-import { NavStore } from 'app/stores/NavStore/NavStore';
-import { TeamsStore, Team } from 'app/stores/TeamsStore/TeamsStore';
-import { BackendSrv } from 'app/core/services/backend_srv';
import DeleteButton from 'app/core/components/DeleteButton/DeleteButton';
import EmptyListCTA from 'app/core/components/EmptyListCTA/EmptyListCTA';
+import PageLoader from 'app/core/components/PageLoader/PageLoader';
+import { NavModel, Team } from '../../types';
+import { loadTeams, deleteTeam, setSearchQuery } from './state/actions';
+import { getSearchQuery, getTeams, getTeamsCount } from './state/selectors';
+import { getNavModel } from 'app/core/selectors/navModel';
-interface Props {
- nav: typeof NavStore.Type;
- teams: typeof TeamsStore.Type;
- backendSrv: BackendSrv;
+export interface Props {
+ navModel: NavModel;
+ teams: Team[];
+ searchQuery: string;
+ teamsCount: number;
+ hasFetched: boolean;
+ loadTeams: typeof loadTeams;
+ deleteTeam: typeof deleteTeam;
+ setSearchQuery: typeof setSearchQuery;
}
-@inject('nav', 'teams')
-@observer
-export class TeamList extends React.Component {
- constructor(props) {
- super(props);
-
- this.props.nav.load('cfg', 'teams');
+export class TeamList extends PureComponent {
+ componentDidMount() {
this.fetchTeams();
}
- fetchTeams() {
- this.props.teams.loadTeams();
+ async fetchTeams() {
+ await this.props.loadTeams();
}
- deleteTeam(team: Team) {
- this.props.backendSrv.delete('/api/teams/' + team.id).then(this.fetchTeams.bind(this));
- }
-
- onSearchQueryChange = evt => {
- this.props.teams.setSearchQuery(evt.target.value);
+ deleteTeam = (team: Team) => {
+ this.props.deleteTeam(team.id);
};
- renderTeamMember(team: Team): JSX.Element {
+ onSearchQueryChange = event => {
+ this.props.setSearchQuery(event.target.value);
+ };
+
+ renderTeam(team: Team) {
const teamUrl = `org/teams/edit/${team.id}`;
return (
@@ -62,48 +64,6 @@ export class TeamList extends React.Component {
);
}
- renderTeamList(teams) {
- return (
-
-
-
-
-
-
-
-
- Name
- Email
- Members
-
-
-
- {teams.filteredTeams.map(team => this.renderTeamMember(team))}
-
-
-
- );
- }
-
renderEmptyList() {
return (
@@ -123,23 +83,86 @@ export class TeamList extends React.Component
{
);
}
- render() {
- const { nav, teams } = this.props;
- let view;
+ renderTeamList() {
+ const { teams, searchQuery } = this.props;
- if (teams.filteredTeams.length > 0) {
- view = this.renderTeamList(teams);
+ return (
+
+
+
+
+
+
+
+
+ Name
+ Email
+ Members
+
+
+
+ {teams.map(team => this.renderTeam(team))}
+
+
+
+ );
+ }
+
+ renderList() {
+ const { teamsCount } = this.props;
+
+ if (teamsCount > 0) {
+ return this.renderTeamList();
} else {
- view = this.renderEmptyList();
+ return this.renderEmptyList();
}
+ }
+
+ render() {
+ const { hasFetched, navModel } = this.props;
return (
-
- {view}
+
+ {hasFetched ? this.renderList() :
}
);
}
}
-export default hot(module)(TeamList);
+function mapStateToProps(state) {
+ return {
+ navModel: getNavModel(state.navIndex, 'teams'),
+ teams: getTeams(state.teams),
+ searchQuery: getSearchQuery(state.teams),
+ teamsCount: getTeamsCount(state.teams),
+ hasFetched: state.teams.hasFetched,
+ };
+}
+
+const mapDispatchToProps = {
+ loadTeams,
+ deleteTeam,
+ setSearchQuery,
+};
+
+export default hot(module)(connect(mapStateToProps, mapDispatchToProps)(TeamList));
diff --git a/public/app/features/teams/TeamMembers.test.tsx b/public/app/features/teams/TeamMembers.test.tsx
new file mode 100644
index 00000000000..696880bebd9
--- /dev/null
+++ b/public/app/features/teams/TeamMembers.test.tsx
@@ -0,0 +1,90 @@
+import React from 'react';
+import { shallow } from 'enzyme';
+import { TeamMembers, Props, State } from './TeamMembers';
+import { TeamMember } from '../../types';
+import { getMockTeamMember, getMockTeamMembers } from './__mocks__/teamMocks';
+
+const setup = (propOverrides?: object) => {
+ const props: Props = {
+ members: [] as TeamMember[],
+ searchMemberQuery: '',
+ setSearchMemberQuery: jest.fn(),
+ loadTeamMembers: jest.fn(),
+ addTeamMember: jest.fn(),
+ removeTeamMember: jest.fn(),
+ syncEnabled: false,
+ };
+
+ Object.assign(props, propOverrides);
+
+ const wrapper = shallow( );
+ const instance = wrapper.instance() as TeamMembers;
+
+ return {
+ wrapper,
+ instance,
+ };
+};
+
+describe('Render', () => {
+ it('should render component', () => {
+ const { wrapper } = setup();
+
+ expect(wrapper).toMatchSnapshot();
+ });
+
+ it('should render team members', () => {
+ const { wrapper } = setup({
+ members: getMockTeamMembers(5),
+ });
+
+ expect(wrapper).toMatchSnapshot();
+ });
+
+ it('should render team members when sync enabled', () => {
+ const { wrapper } = setup({
+ members: getMockTeamMembers(5),
+ syncEnabled: true,
+ });
+
+ expect(wrapper).toMatchSnapshot();
+ });
+});
+
+describe('Functions', () => {
+ describe('on search member query change', () => {
+ it('it should call setSearchMemberQuery', () => {
+ const { instance } = setup();
+ const mockEvent = { target: { value: 'member' } };
+
+ instance.onSearchQueryChange(mockEvent);
+
+ expect(instance.props.setSearchMemberQuery).toHaveBeenCalledWith('member');
+ });
+ });
+
+ describe('on remove member', () => {
+ const { instance } = setup();
+ const mockTeamMember = getMockTeamMember();
+
+ instance.onRemoveMember(mockTeamMember);
+
+ expect(instance.props.removeTeamMember).toHaveBeenCalledWith(1);
+ });
+
+ describe('on add user to team', () => {
+ const { wrapper, instance } = setup();
+ const state = wrapper.state() as State;
+
+ state.newTeamMember = {
+ id: 1,
+ label: '',
+ avatarUrl: '',
+ login: '',
+ };
+
+ instance.onAddUserToTeam();
+
+ expect(instance.props.addTeamMember).toHaveBeenCalledWith(1);
+ });
+});
diff --git a/public/app/containers/Teams/TeamMembers.tsx b/public/app/features/teams/TeamMembers.tsx
similarity index 55%
rename from public/app/containers/Teams/TeamMembers.tsx
rename to public/app/features/teams/TeamMembers.tsx
index b06a547063a..2534a08ed15 100644
--- a/public/app/containers/Teams/TeamMembers.tsx
+++ b/public/app/features/teams/TeamMembers.tsx
@@ -1,56 +1,44 @@
-import React from 'react';
-import { hot } from 'react-hot-loader';
-import { observer } from 'mobx-react';
-import { Team, TeamMember } from 'app/stores/TeamsStore/TeamsStore';
+import React, { PureComponent } from 'react';
+import { connect } from 'react-redux';
import SlideDown from 'app/core/components/Animations/SlideDown';
-import { UserPicker, User } from 'app/core/components/Picker/UserPicker';
+import { UserPicker } from 'app/core/components/Picker/UserPicker';
import DeleteButton from 'app/core/components/DeleteButton/DeleteButton';
+import { TagBadge } from 'app/core/components/TagFilter/TagBadge';
+import { TeamMember, User } from 'app/types';
+import { loadTeamMembers, addTeamMember, removeTeamMember, setSearchMemberQuery } from './state/actions';
+import { getSearchMemberQuery, getTeamMembers } from './state/selectors';
-interface Props {
- team: Team;
+export interface Props {
+ members: TeamMember[];
+ searchMemberQuery: string;
+ loadTeamMembers: typeof loadTeamMembers;
+ addTeamMember: typeof addTeamMember;
+ removeTeamMember: typeof removeTeamMember;
+ setSearchMemberQuery: typeof setSearchMemberQuery;
+ syncEnabled: boolean;
}
-interface State {
+export interface State {
isAdding: boolean;
newTeamMember?: User;
}
-@observer
-export class TeamMembers extends React.Component {
+export class TeamMembers extends PureComponent {
constructor(props) {
super(props);
this.state = { isAdding: false, newTeamMember: null };
}
componentDidMount() {
- this.props.team.loadMembers();
+ this.props.loadTeamMembers();
}
- onSearchQueryChange = evt => {
- this.props.team.setSearchQuery(evt.target.value);
+ onSearchQueryChange = event => {
+ this.props.setSearchMemberQuery(event.target.value);
};
- removeMember(member: TeamMember) {
- this.props.team.removeMember(member);
- }
-
- removeMemberConfirmed(member: TeamMember) {
- this.props.team.removeMember(member);
- }
-
- renderMember(member: TeamMember) {
- return (
-
-
-
-
- {member.login}
- {member.email}
-
- this.removeMember(member)} />
-
-
- );
+ onRemoveMember(member: TeamMember) {
+ this.props.removeTeamMember(member.userId);
}
onToggleAdding = () => {
@@ -62,17 +50,41 @@ export class TeamMembers extends React.Component {
};
onAddUserToTeam = async () => {
- await this.props.team.addMember(this.state.newTeamMember.id);
- await this.props.team.loadMembers();
+ this.props.addTeamMember(this.state.newTeamMember.id);
this.setState({ newTeamMember: null });
};
- render() {
- const { newTeamMember, isAdding } = this.state;
- const members = this.props.team.filteredMembers;
- const newTeamMemberValue = newTeamMember && newTeamMember.id.toString();
- const { team } = this.props;
+ renderLabels(labels: string[]) {
+ if (!labels) {
+ return ;
+ }
+ return (
+
+ {labels.map(label => {}} />)}
+
+ );
+ }
+
+ renderMember(member: TeamMember, syncEnabled: boolean) {
+ return (
+
+
+
+
+ {member.login}
+ {member.email}
+ {syncEnabled ? this.renderLabels(member.labels) : ''}
+
+ this.onRemoveMember(member)} />
+
+
+ );
+ }
+
+ render() {
+ const { isAdding } = this.state;
+ const { searchMemberQuery, members, syncEnabled } = this.props;
return (
@@ -82,7 +94,7 @@ export class TeamMembers extends React.Component
{
type="text"
className="gf-form-input"
placeholder="Search members"
- value={team.search}
+ value={searchMemberQuery}
onChange={this.onSearchQueryChange}
/>
@@ -103,8 +115,7 @@ export class TeamMembers extends React.Component {
Add Team Member
-
-
+
{this.state.newTeamMember && (
Add to team
@@ -121,10 +132,11 @@ export class TeamMembers extends React.Component {
Name
Email
+ {syncEnabled ? : ''}
- {members.map(member => this.renderMember(member))}
+ {members && members.map(member => this.renderMember(member, syncEnabled))}
@@ -132,4 +144,18 @@ export class TeamMembers extends React.Component {
}
}
-export default hot(module)(TeamMembers);
+function mapStateToProps(state) {
+ return {
+ members: getTeamMembers(state.team),
+ searchMemberQuery: getSearchMemberQuery(state.team),
+ };
+}
+
+const mapDispatchToProps = {
+ loadTeamMembers,
+ addTeamMember,
+ removeTeamMember,
+ setSearchMemberQuery,
+};
+
+export default connect(mapStateToProps, mapDispatchToProps)(TeamMembers);
diff --git a/public/app/features/teams/TeamPages.test.tsx b/public/app/features/teams/TeamPages.test.tsx
new file mode 100644
index 00000000000..65084d0dc47
--- /dev/null
+++ b/public/app/features/teams/TeamPages.test.tsx
@@ -0,0 +1,63 @@
+import React from 'react';
+import { shallow } from 'enzyme';
+import { TeamPages, Props } from './TeamPages';
+import { NavModel, Team } from '../../types';
+import { getMockTeam } from './__mocks__/teamMocks';
+
+jest.mock('app/core/config', () => ({
+ buildInfo: { isEnterprise: true },
+}));
+
+const setup = (propOverrides?: object) => {
+ const props: Props = {
+ navModel: {} as NavModel,
+ teamId: 1,
+ loadTeam: jest.fn(),
+ pageName: 'members',
+ team: {} as Team,
+ };
+
+ Object.assign(props, propOverrides);
+
+ const wrapper = shallow( );
+ const instance = wrapper.instance();
+
+ return {
+ wrapper,
+ instance,
+ };
+};
+
+describe('Render', () => {
+ it('should render component', () => {
+ const { wrapper } = setup();
+
+ expect(wrapper).toMatchSnapshot();
+ });
+
+ it('should render member page if team not empty', () => {
+ const { wrapper } = setup({
+ team: getMockTeam(),
+ });
+
+ expect(wrapper).toMatchSnapshot();
+ });
+
+ it('should render settings page', () => {
+ const { wrapper } = setup({
+ team: getMockTeam(),
+ pageName: 'settings',
+ });
+
+ expect(wrapper).toMatchSnapshot();
+ });
+
+ it('should render group sync page', () => {
+ const { wrapper } = setup({
+ team: getMockTeam(),
+ pageName: 'groupsync',
+ });
+
+ expect(wrapper).toMatchSnapshot();
+ });
+});
diff --git a/public/app/features/teams/TeamPages.tsx b/public/app/features/teams/TeamPages.tsx
new file mode 100644
index 00000000000..3dc5a9f6f15
--- /dev/null
+++ b/public/app/features/teams/TeamPages.tsx
@@ -0,0 +1,107 @@
+import React, { PureComponent } from 'react';
+import { connect } from 'react-redux';
+import _ from 'lodash';
+import { hot } from 'react-hot-loader';
+import config from 'app/core/config';
+import PageHeader from 'app/core/components/PageHeader/PageHeader';
+import TeamMembers from './TeamMembers';
+import TeamSettings from './TeamSettings';
+import TeamGroupSync from './TeamGroupSync';
+import { NavModel, Team } from 'app/types';
+import { loadTeam } from './state/actions';
+import { getTeam } from './state/selectors';
+import { getTeamLoadingNav } from './state/navModel';
+import { getNavModel } from 'app/core/selectors/navModel';
+import { getRouteParamsId, getRouteParamsPage } from '../../core/selectors/location';
+
+export interface Props {
+ team: Team;
+ loadTeam: typeof loadTeam;
+ teamId: number;
+ pageName: string;
+ navModel: NavModel;
+}
+
+interface State {
+ isSyncEnabled: boolean;
+}
+
+enum PageTypes {
+ Members = 'members',
+ Settings = 'settings',
+ GroupSync = 'groupsync',
+}
+
+export class TeamPages extends PureComponent {
+ constructor(props) {
+ super(props);
+
+ this.state = {
+ isSyncEnabled: config.buildInfo.isEnterprise,
+ };
+ }
+
+ componentDidMount() {
+ this.fetchTeam();
+ }
+
+ async fetchTeam() {
+ const { loadTeam, teamId } = this.props;
+
+ await loadTeam(teamId);
+ }
+
+ getCurrentPage() {
+ const pages = ['members', 'settings', 'groupsync'];
+ const currentPage = this.props.pageName;
+ return _.includes(pages, currentPage) ? currentPage : pages[0];
+ }
+
+ renderPage() {
+ const { isSyncEnabled } = this.state;
+ const currentPage = this.getCurrentPage();
+
+ switch (currentPage) {
+ case PageTypes.Members:
+ return ;
+
+ case PageTypes.Settings:
+ return ;
+
+ case PageTypes.GroupSync:
+ return isSyncEnabled && ;
+ }
+
+ return null;
+ }
+
+ render() {
+ const { team, navModel } = this.props;
+
+ return (
+
+
+ {team && Object.keys(team).length !== 0 &&
{this.renderPage()}
}
+
+ );
+ }
+}
+
+function mapStateToProps(state) {
+ const teamId = getRouteParamsId(state.location);
+ const pageName = getRouteParamsPage(state.location) || 'members';
+ const teamLoadingNav = getTeamLoadingNav(pageName);
+
+ return {
+ navModel: getNavModel(state.navIndex, `team-${pageName}-${teamId}`, teamLoadingNav),
+ teamId: teamId,
+ pageName: pageName,
+ team: getTeam(state.team, teamId),
+ };
+}
+
+const mapDispatchToProps = {
+ loadTeam,
+};
+
+export default hot(module)(connect(mapStateToProps, mapDispatchToProps)(TeamPages));
diff --git a/public/app/features/teams/TeamSettings.test.tsx b/public/app/features/teams/TeamSettings.test.tsx
new file mode 100644
index 00000000000..2e40a0e3c44
--- /dev/null
+++ b/public/app/features/teams/TeamSettings.test.tsx
@@ -0,0 +1,44 @@
+import React from 'react';
+import { shallow } from 'enzyme';
+import { Props, TeamSettings } from './TeamSettings';
+import { getMockTeam } from './__mocks__/teamMocks';
+
+const setup = (propOverrides?: object) => {
+ const props: Props = {
+ team: getMockTeam(),
+ updateTeam: jest.fn(),
+ };
+
+ Object.assign(props, propOverrides);
+
+ const wrapper = shallow( );
+ const instance = wrapper.instance() as TeamSettings;
+
+ return {
+ wrapper,
+ instance,
+ };
+};
+
+describe('Render', () => {
+ it('should render component', () => {
+ const { wrapper } = setup();
+
+ expect(wrapper).toMatchSnapshot();
+ });
+});
+
+describe('Functions', () => {
+ it('should update team', () => {
+ const { instance } = setup();
+ const mockEvent = { preventDefault: jest.fn() };
+
+ instance.setState({
+ name: 'test11',
+ });
+
+ instance.onUpdate(mockEvent);
+
+ expect(instance.props.updateTeam).toHaveBeenCalledWith('test11', 'test@test.com');
+ });
+});
diff --git a/public/app/features/teams/TeamSettings.tsx b/public/app/features/teams/TeamSettings.tsx
new file mode 100644
index 00000000000..ef9a5ae0b70
--- /dev/null
+++ b/public/app/features/teams/TeamSettings.tsx
@@ -0,0 +1,96 @@
+import React from 'react';
+import { connect } from 'react-redux';
+import { Label } from 'app/core/components/Forms/Forms';
+import { Team } from '../../types';
+import { updateTeam } from './state/actions';
+import { getRouteParamsId } from '../../core/selectors/location';
+import { getTeam } from './state/selectors';
+
+export interface Props {
+ team: Team;
+ updateTeam: typeof updateTeam;
+}
+
+interface State {
+ name: string;
+ email: string;
+}
+
+export class TeamSettings extends React.Component {
+ constructor(props) {
+ super(props);
+
+ this.state = {
+ name: props.team.name,
+ email: props.team.email,
+ };
+ }
+
+ onChangeName = event => {
+ this.setState({ name: event.target.value });
+ };
+
+ onChangeEmail = event => {
+ this.setState({ email: event.target.value });
+ };
+
+ onUpdate = event => {
+ const { name, email } = this.state;
+ event.preventDefault();
+ this.props.updateTeam(name, email);
+ };
+
+ render() {
+ const { name, email } = this.state;
+
+ return (
+
+ );
+ }
+}
+
+function mapStateToProps(state) {
+ const teamId = getRouteParamsId(state.location);
+
+ return {
+ team: getTeam(state.team, teamId),
+ };
+}
+
+const mapDispatchToProps = {
+ updateTeam,
+};
+
+export default connect(mapStateToProps, mapDispatchToProps)(TeamSettings);
diff --git a/public/app/features/teams/__mocks__/navModelMock.ts b/public/app/features/teams/__mocks__/navModelMock.ts
new file mode 100644
index 00000000000..7aa8515ee13
--- /dev/null
+++ b/public/app/features/teams/__mocks__/navModelMock.ts
@@ -0,0 +1,59 @@
+export const getMockNavModel = (pageName: string) => {
+ return {
+ node: {
+ active: false,
+ icon: 'gicon gicon-team',
+ id: `team-${pageName}-2`,
+ text: `${pageName}`,
+ url: 'org/teams/edit/2/members',
+ parentItem: {
+ img: '/avatar/b5695b61c91d13e7fa2fe71cfb95de9b',
+ id: 'team-2',
+ subTitle: 'Manage members & settings',
+ url: '',
+ text: 'test1',
+ breadcrumbs: [{ title: 'Teams', url: 'org/teams' }],
+ children: [
+ {
+ active: false,
+ icon: 'gicon gicon-team',
+ id: 'team-members-2',
+ text: 'Members',
+ url: 'org/teams/edit/2/members',
+ },
+ {
+ active: false,
+ icon: 'fa fa-fw fa-sliders',
+ id: 'team-settings-2',
+ text: 'Settings',
+ url: 'org/teams/edit/2/settings',
+ },
+ ],
+ },
+ },
+ main: {
+ img: '/avatar/b5695b61c91d13e7fa2fe71cfb95de9b',
+ id: 'team-2',
+ subTitle: 'Manage members & settings',
+ url: '',
+ text: 'test1',
+ breadcrumbs: [{ title: 'Teams', url: 'org/teams' }],
+ children: [
+ {
+ active: true,
+ icon: 'gicon gicon-team',
+ id: 'team-members-2',
+ text: 'Members',
+ url: 'org/teams/edit/2/members',
+ },
+ {
+ active: false,
+ icon: 'fa fa-fw fa-sliders',
+ id: 'team-settings-2',
+ text: 'Settings',
+ url: 'org/teams/edit/2/settings',
+ },
+ ],
+ },
+ };
+};
diff --git a/public/app/features/teams/__mocks__/teamMocks.ts b/public/app/features/teams/__mocks__/teamMocks.ts
new file mode 100644
index 00000000000..339f227c081
--- /dev/null
+++ b/public/app/features/teams/__mocks__/teamMocks.ts
@@ -0,0 +1,67 @@
+import { Team, TeamGroup, TeamMember } from 'app/types';
+
+export const getMultipleMockTeams = (numberOfTeams: number): Team[] => {
+ const teams: Team[] = [];
+ for (let i = 1; i <= numberOfTeams; i++) {
+ teams.push({
+ id: i,
+ name: `test-${i}`,
+ avatarUrl: 'some/url/',
+ email: `test-${i}@test.com`,
+ memberCount: i,
+ });
+ }
+
+ return teams;
+};
+
+export const getMockTeam = (): Team => {
+ return {
+ id: 1,
+ name: 'test',
+ avatarUrl: 'some/url/',
+ email: 'test@test.com',
+ memberCount: 1,
+ };
+};
+
+export const getMockTeamMembers = (amount: number): TeamMember[] => {
+ const teamMembers: TeamMember[] = [];
+
+ for (let i = 1; i <= amount; i++) {
+ teamMembers.push({
+ userId: i,
+ teamId: 1,
+ avatarUrl: 'some/url/',
+ email: 'test@test.com',
+ login: `testUser-${i}`,
+ labels: ['label 1', 'label 2'],
+ });
+ }
+
+ return teamMembers;
+};
+
+export const getMockTeamMember = (): TeamMember => {
+ return {
+ userId: 1,
+ teamId: 1,
+ avatarUrl: 'some/url/',
+ email: 'test@test.com',
+ login: 'testUser',
+ labels: [],
+ };
+};
+
+export const getMockTeamGroups = (amount: number): TeamGroup[] => {
+ const groups: TeamGroup[] = [];
+
+ for (let i = 1; i <= amount; i++) {
+ groups.push({
+ groupId: `group-${i}`,
+ teamId: 1,
+ });
+ }
+
+ return groups;
+};
diff --git a/public/app/features/teams/__snapshots__/TeamGroupSync.test.tsx.snap b/public/app/features/teams/__snapshots__/TeamGroupSync.test.tsx.snap
new file mode 100644
index 00000000000..5a143f19038
--- /dev/null
+++ b/public/app/features/teams/__snapshots__/TeamGroupSync.test.tsx.snap
@@ -0,0 +1,281 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Render should render component 1`] = `
+
+
+
+ External group sync
+
+
+
+
+
+
+
+
+
+
+
+
+ Add External Group
+
+
+
+
+
+
+ There are no external groups to sync with
+
+
+
+ Add Group
+
+
+
+
+ Sync LDAP or OAuth groups with your Grafana teams.
+
+ Learn more
+
+
+
+
+`;
+
+exports[`Render should render groups table 1`] = `
+
+
+
+ External group sync
+
+
+
+
+
+
+
+ Add group
+
+
+
+
+
+
+
+
+ Add External Group
+
+
+
+
+
+
+
+
+
+ External Group ID
+
+
+
+
+
+
+
+ group-1
+
+
+
+
+
+
+
+
+
+ group-2
+
+
+
+
+
+
+
+
+
+ group-3
+
+
+
+
+
+
+
+
+
+
+
+`;
diff --git a/public/app/features/teams/__snapshots__/TeamList.test.tsx.snap b/public/app/features/teams/__snapshots__/TeamList.test.tsx.snap
new file mode 100644
index 00000000000..73f081d496a
--- /dev/null
+++ b/public/app/features/teams/__snapshots__/TeamList.test.tsx.snap
@@ -0,0 +1,336 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Render should render component 1`] = `
+
+`;
+
+exports[`Render should render teams table 1`] = `
+
+`;
diff --git a/public/app/features/teams/__snapshots__/TeamMembers.test.tsx.snap b/public/app/features/teams/__snapshots__/TeamMembers.test.tsx.snap
new file mode 100644
index 00000000000..0e9b4332cca
--- /dev/null
+++ b/public/app/features/teams/__snapshots__/TeamMembers.test.tsx.snap
@@ -0,0 +1,616 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Render should render component 1`] = `
+
+
+
+
+
+
+
+
+
+
+
+ Add a member
+
+
+
+
+
+
+
+
+ Add Team Member
+
+
+
+
+
+
+
+
+
+
+
+
+ Name
+
+
+ Email
+
+
+
+
+
+
+
+
+`;
+
+exports[`Render should render team members 1`] = `
+
+
+
+
+
+
+
+
+
+
+
+ Add a member
+
+
+
+
+
+
+
+
+ Add Team Member
+
+
+
+
+
+
+
+
+`;
+
+exports[`Render should render team members when sync enabled 1`] = `
+
+
+
+
+
+
+
+
+
+
+
+ Add a member
+
+
+
+
+
+
+
+
+ Add Team Member
+
+
+
+
+
+
+
+
+`;
diff --git a/public/app/features/teams/__snapshots__/TeamPages.test.tsx.snap b/public/app/features/teams/__snapshots__/TeamPages.test.tsx.snap
new file mode 100644
index 00000000000..f32b8211d2c
--- /dev/null
+++ b/public/app/features/teams/__snapshots__/TeamPages.test.tsx.snap
@@ -0,0 +1,50 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Render should render component 1`] = `
+
+`;
+
+exports[`Render should render group sync page 1`] = `
+
+`;
+
+exports[`Render should render member page if team not empty 1`] = `
+
+`;
+
+exports[`Render should render settings page 1`] = `
+
+`;
diff --git a/public/app/features/teams/__snapshots__/TeamSettings.test.tsx.snap b/public/app/features/teams/__snapshots__/TeamSettings.test.tsx.snap
new file mode 100644
index 00000000000..0f6573ccf90
--- /dev/null
+++ b/public/app/features/teams/__snapshots__/TeamSettings.test.tsx.snap
@@ -0,0 +1,57 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Render should render component 1`] = `
+
+`;
diff --git a/public/app/features/teams/state/actions.ts b/public/app/features/teams/state/actions.ts
new file mode 100644
index 00000000000..d948dc1c5a3
--- /dev/null
+++ b/public/app/features/teams/state/actions.ts
@@ -0,0 +1,162 @@
+import { ThunkAction } from 'redux-thunk';
+import { getBackendSrv } from 'app/core/services/backend_srv';
+import { StoreState, Team, TeamGroup, TeamMember } from 'app/types';
+import { updateNavIndex, UpdateNavIndexAction } from 'app/core/actions';
+import { buildNavModel } from './navModel';
+
+export enum ActionTypes {
+ LoadTeams = 'LOAD_TEAMS',
+ LoadTeam = 'LOAD_TEAM',
+ SetSearchQuery = 'SET_TEAM_SEARCH_QUERY',
+ SetSearchMemberQuery = 'SET_TEAM_MEMBER_SEARCH_QUERY',
+ LoadTeamMembers = 'TEAM_MEMBERS_LOADED',
+ LoadTeamGroups = 'TEAM_GROUPS_LOADED',
+}
+
+export interface LoadTeamsAction {
+ type: ActionTypes.LoadTeams;
+ payload: Team[];
+}
+
+export interface LoadTeamAction {
+ type: ActionTypes.LoadTeam;
+ payload: Team;
+}
+
+export interface LoadTeamMembersAction {
+ type: ActionTypes.LoadTeamMembers;
+ payload: TeamMember[];
+}
+
+export interface LoadTeamGroupsAction {
+ type: ActionTypes.LoadTeamGroups;
+ payload: TeamGroup[];
+}
+
+export interface SetSearchQueryAction {
+ type: ActionTypes.SetSearchQuery;
+ payload: string;
+}
+
+export interface SetSearchMemberQueryAction {
+ type: ActionTypes.SetSearchMemberQuery;
+ payload: string;
+}
+
+export type Action =
+ | LoadTeamsAction
+ | SetSearchQueryAction
+ | LoadTeamAction
+ | LoadTeamMembersAction
+ | SetSearchMemberQueryAction
+ | LoadTeamGroupsAction;
+
+type ThunkResult = ThunkAction;
+
+const teamsLoaded = (teams: Team[]): LoadTeamsAction => ({
+ type: ActionTypes.LoadTeams,
+ payload: teams,
+});
+
+const teamLoaded = (team: Team): LoadTeamAction => ({
+ type: ActionTypes.LoadTeam,
+ payload: team,
+});
+
+const teamMembersLoaded = (teamMembers: TeamMember[]): LoadTeamMembersAction => ({
+ type: ActionTypes.LoadTeamMembers,
+ payload: teamMembers,
+});
+
+const teamGroupsLoaded = (teamGroups: TeamGroup[]): LoadTeamGroupsAction => ({
+ type: ActionTypes.LoadTeamGroups,
+ payload: teamGroups,
+});
+
+export const setSearchMemberQuery = (searchQuery: string): SetSearchMemberQueryAction => ({
+ type: ActionTypes.SetSearchMemberQuery,
+ payload: searchQuery,
+});
+
+export const setSearchQuery = (searchQuery: string): SetSearchQueryAction => ({
+ type: ActionTypes.SetSearchQuery,
+ payload: searchQuery,
+});
+
+export function loadTeams(): ThunkResult {
+ return async dispatch => {
+ const response = await getBackendSrv().get('/api/teams/search', { perpage: 1000, page: 1 });
+ dispatch(teamsLoaded(response.teams));
+ };
+}
+
+export function loadTeam(id: number): ThunkResult {
+ return async dispatch => {
+ const response = await getBackendSrv().get(`/api/teams/${id}`);
+ dispatch(teamLoaded(response));
+ dispatch(updateNavIndex(buildNavModel(response)));
+ };
+}
+
+export function loadTeamMembers(): ThunkResult {
+ return async (dispatch, getStore) => {
+ const team = getStore().team.team;
+ const response = await getBackendSrv().get(`/api/teams/${team.id}/members`);
+ dispatch(teamMembersLoaded(response));
+ };
+}
+
+export function addTeamMember(id: number): ThunkResult {
+ return async (dispatch, getStore) => {
+ const team = getStore().team.team;
+ await getBackendSrv().post(`/api/teams/${team.id}/members`, { userId: id });
+ dispatch(loadTeamMembers());
+ };
+}
+
+export function removeTeamMember(id: number): ThunkResult {
+ return async (dispatch, getStore) => {
+ const team = getStore().team.team;
+ await getBackendSrv().delete(`/api/teams/${team.id}/members/${id}`);
+ dispatch(loadTeamMembers());
+ };
+}
+
+export function updateTeam(name: string, email: string): ThunkResult {
+ return async (dispatch, getStore) => {
+ const team = getStore().team.team;
+ await getBackendSrv().put(`/api/teams/${team.id}`, { name, email });
+ dispatch(loadTeam(team.id));
+ };
+}
+
+export function loadTeamGroups(): ThunkResult {
+ return async (dispatch, getStore) => {
+ const team = getStore().team.team;
+ const response = await getBackendSrv().get(`/api/teams/${team.id}/groups`);
+ dispatch(teamGroupsLoaded(response));
+ };
+}
+
+export function addTeamGroup(groupId: string): ThunkResult {
+ return async (dispatch, getStore) => {
+ const team = getStore().team.team;
+ await getBackendSrv().post(`/api/teams/${team.id}/groups`, { groupId: groupId });
+ dispatch(loadTeamGroups());
+ };
+}
+
+export function removeTeamGroup(groupId: string): ThunkResult {
+ return async (dispatch, getStore) => {
+ const team = getStore().team.team;
+ await getBackendSrv().delete(`/api/teams/${team.id}/groups/${groupId}`);
+ dispatch(loadTeamGroups());
+ };
+}
+
+export function deleteTeam(id: number): ThunkResult {
+ return async dispatch => {
+ await getBackendSrv().delete(`/api/teams/${id}`);
+ dispatch(loadTeams());
+ };
+}
diff --git a/public/app/features/teams/state/navModel.ts b/public/app/features/teams/state/navModel.ts
new file mode 100644
index 00000000000..2fd5a68e680
--- /dev/null
+++ b/public/app/features/teams/state/navModel.ts
@@ -0,0 +1,67 @@
+import { Team, NavModelItem, NavModel } from 'app/types';
+import config from 'app/core/config';
+
+export function buildNavModel(team: Team): NavModelItem {
+ const navModel = {
+ img: team.avatarUrl,
+ id: 'team-' + team.id,
+ subTitle: 'Manage members & settings',
+ url: '',
+ text: team.name,
+ breadcrumbs: [{ title: 'Teams', url: 'org/teams' }],
+ children: [
+ {
+ active: false,
+ icon: 'gicon gicon-team',
+ id: `team-members-${team.id}`,
+ text: 'Members',
+ url: `org/teams/edit/${team.id}/members`,
+ },
+ {
+ active: false,
+ icon: 'fa fa-fw fa-sliders',
+ id: `team-settings-${team.id}`,
+ text: 'Settings',
+ url: `org/teams/edit/${team.id}/settings`,
+ },
+ ],
+ };
+
+ if (config.buildInfo.isEnterprise) {
+ navModel.children.push({
+ active: false,
+ icon: 'fa fa-fw fa-refresh',
+ id: `team-groupsync-${team.id}`,
+ text: 'External group sync',
+ url: `org/teams/edit/${team.id}/groupsync`,
+ });
+ }
+
+ return navModel;
+}
+
+export function getTeamLoadingNav(pageName: string): NavModel {
+ const main = buildNavModel({
+ avatarUrl: 'public/img/user_profile.png',
+ id: 1,
+ name: 'Loading',
+ email: 'loading',
+ memberCount: 0,
+ });
+
+ let node: NavModelItem;
+
+ // find active page
+ for (const child of main.children) {
+ if (child.id.indexOf(pageName) > 0) {
+ child.active = true;
+ node = child;
+ break;
+ }
+ }
+
+ return {
+ main: main,
+ node: node,
+ };
+}
diff --git a/public/app/features/teams/state/reducers.test.ts b/public/app/features/teams/state/reducers.test.ts
new file mode 100644
index 00000000000..7f7a33d60ac
--- /dev/null
+++ b/public/app/features/teams/state/reducers.test.ts
@@ -0,0 +1,72 @@
+import { Action, ActionTypes } from './actions';
+import { initialTeamsState, initialTeamState, teamReducer, teamsReducer } from './reducers';
+import { getMockTeam, getMockTeamMember } from '../__mocks__/teamMocks';
+
+describe('teams reducer', () => {
+ it('should set teams', () => {
+ const payload = [getMockTeam()];
+
+ const action: Action = {
+ type: ActionTypes.LoadTeams,
+ payload,
+ };
+
+ const result = teamsReducer(initialTeamsState, action);
+
+ expect(result.teams).toEqual(payload);
+ });
+
+ it('should set search query', () => {
+ const payload = 'test';
+
+ const action: Action = {
+ type: ActionTypes.SetSearchQuery,
+ payload,
+ };
+
+ const result = teamsReducer(initialTeamsState, action);
+
+ expect(result.searchQuery).toEqual('test');
+ });
+});
+
+describe('team reducer', () => {
+ it('should set team', () => {
+ const payload = getMockTeam();
+
+ const action: Action = {
+ type: ActionTypes.LoadTeam,
+ payload,
+ };
+
+ const result = teamReducer(initialTeamState, action);
+
+ expect(result.team).toEqual(payload);
+ });
+
+ it('should set team members', () => {
+ const mockTeamMember = getMockTeamMember();
+
+ const action: Action = {
+ type: ActionTypes.LoadTeamMembers,
+ payload: [mockTeamMember],
+ };
+
+ const result = teamReducer(initialTeamState, action);
+
+ expect(result.members).toEqual([mockTeamMember]);
+ });
+
+ it('should set member search query', () => {
+ const payload = 'member';
+
+ const action: Action = {
+ type: ActionTypes.SetSearchMemberQuery,
+ payload,
+ };
+
+ const result = teamReducer(initialTeamState, action);
+
+ expect(result.searchMemberQuery).toEqual('member');
+ });
+});
diff --git a/public/app/features/teams/state/reducers.ts b/public/app/features/teams/state/reducers.ts
new file mode 100644
index 00000000000..2e72dce0afb
--- /dev/null
+++ b/public/app/features/teams/state/reducers.ts
@@ -0,0 +1,44 @@
+import { Team, TeamGroup, TeamMember, TeamsState, TeamState } from 'app/types';
+import { Action, ActionTypes } from './actions';
+
+export const initialTeamsState: TeamsState = { teams: [], searchQuery: '', hasFetched: false };
+export const initialTeamState: TeamState = {
+ team: {} as Team,
+ members: [] as TeamMember[],
+ groups: [] as TeamGroup[],
+ searchMemberQuery: '',
+};
+
+export const teamsReducer = (state = initialTeamsState, action: Action): TeamsState => {
+ switch (action.type) {
+ case ActionTypes.LoadTeams:
+ return { ...state, hasFetched: true, teams: action.payload };
+
+ case ActionTypes.SetSearchQuery:
+ return { ...state, searchQuery: action.payload };
+ }
+ return state;
+};
+
+export const teamReducer = (state = initialTeamState, action: Action): TeamState => {
+ switch (action.type) {
+ case ActionTypes.LoadTeam:
+ return { ...state, team: action.payload };
+
+ case ActionTypes.LoadTeamMembers:
+ return { ...state, members: action.payload };
+
+ case ActionTypes.SetSearchMemberQuery:
+ return { ...state, searchMemberQuery: action.payload };
+
+ case ActionTypes.LoadTeamGroups:
+ return { ...state, groups: action.payload };
+ }
+
+ return state;
+};
+
+export default {
+ teams: teamsReducer,
+ team: teamReducer,
+};
diff --git a/public/app/features/teams/state/selectors.test.ts b/public/app/features/teams/state/selectors.test.ts
new file mode 100644
index 00000000000..3764a9355c6
--- /dev/null
+++ b/public/app/features/teams/state/selectors.test.ts
@@ -0,0 +1,56 @@
+import { getTeam, getTeamMembers, getTeams } from './selectors';
+import { getMockTeam, getMockTeamMembers, getMultipleMockTeams } from '../__mocks__/teamMocks';
+import { Team, TeamGroup, TeamsState, TeamState } from '../../../types';
+
+describe('Teams selectors', () => {
+ describe('Get teams', () => {
+ const mockTeams = getMultipleMockTeams(5);
+
+ it('should return teams if no search query', () => {
+ const mockState: TeamsState = { teams: mockTeams, searchQuery: '', hasFetched: false };
+
+ const teams = getTeams(mockState);
+
+ expect(teams).toEqual(mockTeams);
+ });
+
+ it('Should filter teams if search query', () => {
+ const mockState: TeamsState = { teams: mockTeams, searchQuery: '5', hasFetched: false };
+
+ const teams = getTeams(mockState);
+
+ expect(teams.length).toEqual(1);
+ });
+ });
+});
+
+describe('Team selectors', () => {
+ describe('Get team', () => {
+ const mockTeam = getMockTeam();
+
+ it('should return team if matching with location team', () => {
+ const mockState: TeamState = { team: mockTeam, searchMemberQuery: '', members: [], groups: [] };
+
+ const team = getTeam(mockState, '1');
+
+ expect(team).toEqual(mockTeam);
+ });
+ });
+
+ describe('Get members', () => {
+ const mockTeamMembers = getMockTeamMembers(5);
+
+ it('should return team members', () => {
+ const mockState: TeamState = {
+ team: {} as Team,
+ searchMemberQuery: '',
+ members: mockTeamMembers,
+ groups: [] as TeamGroup[],
+ };
+
+ const members = getTeamMembers(mockState);
+
+ expect(members).toEqual(mockTeamMembers);
+ });
+ });
+});
diff --git a/public/app/features/teams/state/selectors.ts b/public/app/features/teams/state/selectors.ts
new file mode 100644
index 00000000000..9201993bf0d
--- /dev/null
+++ b/public/app/features/teams/state/selectors.ts
@@ -0,0 +1,30 @@
+import { Team, TeamsState, TeamState } from 'app/types';
+
+export const getSearchQuery = (state: TeamsState) => state.searchQuery;
+export const getSearchMemberQuery = (state: TeamState) => state.searchMemberQuery;
+export const getTeamGroups = (state: TeamState) => state.groups;
+export const getTeamsCount = (state: TeamsState) => state.teams.length;
+
+export const getTeam = (state: TeamState, currentTeamId): Team | null => {
+ if (state.team.id === parseInt(currentTeamId, 10)) {
+ return state.team;
+ }
+
+ return null;
+};
+
+export const getTeams = (state: TeamsState) => {
+ const regex = RegExp(state.searchQuery, 'i');
+
+ return state.teams.filter(team => {
+ return regex.test(team.name);
+ });
+};
+
+export const getTeamMembers = (state: TeamState) => {
+ const regex = RegExp(state.searchMemberQuery, 'i');
+
+ return state.members.filter(member => {
+ return regex.test(member.login) || regex.test(member.email);
+ });
+};
diff --git a/public/app/features/templating/TextBoxVariable.ts b/public/app/features/templating/TextBoxVariable.ts
new file mode 100644
index 00000000000..331ff4f95b8
--- /dev/null
+++ b/public/app/features/templating/TextBoxVariable.ts
@@ -0,0 +1,58 @@
+import { Variable, assignModelProperties, variableTypes } from './variable';
+
+export class TextBoxVariable implements Variable {
+ query: string;
+ current: any;
+ options: any[];
+ skipUrlSync: boolean;
+
+ defaults = {
+ type: 'textbox',
+ name: '',
+ hide: 2,
+ label: '',
+ query: '',
+ current: {},
+ options: [],
+ skipUrlSync: false,
+ };
+
+ /** @ngInject */
+ constructor(private model, private variableSrv) {
+ assignModelProperties(this, model, this.defaults);
+ }
+
+ getSaveModel() {
+ assignModelProperties(this.model, this, this.defaults);
+ return this.model;
+ }
+
+ setValue(option) {
+ this.variableSrv.setOptionAsCurrent(this, option);
+ }
+
+ updateOptions() {
+ this.options = [{ text: this.query.trim(), value: this.query.trim() }];
+ this.current = this.options[0];
+ return Promise.resolve();
+ }
+
+ dependsOn(variable) {
+ return false;
+ }
+
+ setValueFromUrl(urlValue) {
+ this.query = urlValue;
+ return this.variableSrv.setOptionFromUrl(this, urlValue);
+ }
+
+ getValueForUrl() {
+ return this.current.value;
+ }
+}
+
+variableTypes['textbox'] = {
+ name: 'Text box',
+ ctor: TextBoxVariable,
+ description: 'Define a textbox variable, where users can enter any arbitrary string',
+};
diff --git a/public/app/features/templating/all.ts b/public/app/features/templating/all.ts
index 16465740642..b872fa6cd4a 100644
--- a/public/app/features/templating/all.ts
+++ b/public/app/features/templating/all.ts
@@ -9,6 +9,7 @@ import { DatasourceVariable } from './datasource_variable';
import { CustomVariable } from './custom_variable';
import { ConstantVariable } from './constant_variable';
import { AdhocVariable } from './adhoc_variable';
+import { TextBoxVariable } from './TextBoxVariable';
coreModule.factory('templateSrv', () => {
return templateSrv;
@@ -22,4 +23,5 @@ export {
CustomVariable,
ConstantVariable,
AdhocVariable,
+ TextBoxVariable,
};
diff --git a/public/app/features/templating/partials/editor.html b/public/app/features/templating/partials/editor.html
index 0d8b0ace327..ac4450c20a2 100644
--- a/public/app/features/templating/partials/editor.html
+++ b/public/app/features/templating/partials/editor.html
@@ -155,6 +155,14 @@
+
+
@@ -236,26 +273,35 @@
// insert it at the end of the head in a legacy-friendly manner
document.head.insertBefore(myCSS, document.head.childNodes[document.head.childNodes.length - 1].nextSibling);
- [[if .GoogleTagManagerId]]
-
-
-
-
-
-
-
+
+ [[if .GoogleTagManagerId]]
+
+
+
+
+
+
+
[[end]]