diff --git a/.circleci/config.yml b/.circleci/config.yml index e631e0a8d33..a4bb2d67855 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -83,13 +83,14 @@ jobs: - checkout - run: 'go get -u github.com/alecthomas/gometalinter' - run: 'go get -u github.com/tsenart/deadcode' + - run: 'go get -u github.com/jgautheron/goconst/cmd/goconst' - run: 'go get -u github.com/gordonklaus/ineffassign' - run: 'go get -u github.com/opennota/check/cmd/structcheck' - run: 'go get -u github.com/mdempsky/unconvert' - run: 'go get -u github.com/opennota/check/cmd/varcheck' - run: name: run linters - command: 'gometalinter --enable-gc --vendor --deadline 10m --disable-all --enable=deadcode --enable=ineffassign --enable=structcheck --enable=unconvert --enable=varcheck ./...' + command: 'gometalinter --enable-gc --vendor --deadline 10m --disable-all --enable=deadcode --enable=goconst --enable=ineffassign --enable=structcheck --enable=unconvert --enable=varcheck ./...' - run: name: run go vet command: 'go vet ./pkg/...' @@ -157,14 +158,18 @@ jobs: name: sha-sum packages command: 'go run build.go sha-dist' - run: - name: Build Grafana.com publisher + name: Build Grafana.com master publisher command: 'go build -o scripts/publish scripts/build/publish.go' + - run: + name: Build Grafana.com release publisher + command: 'cd scripts/build/release_publisher && go build -o release_publisher .' - persist_to_workspace: root: . paths: - dist/grafana* - scripts/*.sh - scripts/publish + - scripts/build/release_publisher/release_publisher build: docker: @@ -298,8 +303,8 @@ jobs: name: deploy to s3 command: 'aws s3 sync ./dist s3://$BUCKET_NAME/release' - run: - name: Trigger Windows build - command: './scripts/trigger_windows_build.sh ${APPVEYOR_TOKEN} ${CIRCLE_SHA1} release' + name: Deploy to Grafana.com + command: './scripts/build/publish.sh' workflows: version: 2 diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md deleted file mode 100644 index 769ba2a519b..00000000000 --- a/.github/CONTRIBUTING.md +++ /dev/null @@ -1,22 +0,0 @@ -Follow the setup guide in README.md - -### Rebuild frontend assets on source change -``` -yarn watch -``` - -### Rerun tests on source change -``` -yarn jest -``` - -### Run tests for backend assets before commit -``` -test -z "$(gofmt -s -l . | grep -v -E 'vendor/(github.com|golang.org|gopkg.in)' | tee /dev/stderr)" -``` - -### Run tests for frontend assets before commit -``` -yarn test -go test -v ./pkg/... -``` diff --git a/.gitignore b/.gitignore index 08525d92519..20e8fffb3b1 100644 --- a/.gitignore +++ b/.gitignore @@ -73,3 +73,5 @@ debug.test /devenv/bulk-dashboards/*.json /devenv/bulk_alerting_dashboards/*.json + +/scripts/build/release_publisher/release_publisher diff --git a/CHANGELOG.md b/CHANGELOG.md index 39479054af3..5ab9ad41db4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,23 +1,38 @@ # 5.4.0 (unreleased) +### Minor + +* **Datasource Proxy**: Keep trailing slash for datasource proxy requests [#13326](https://github.com/grafana/grafana/pull/13326), thx [@ryantxu](https://github.com/ryantxu) + +# 5.3.0 (unreleased) + +# 5.3.0-beta3 (2018-10-03) + +* **Stackdriver**: Fix for missing ngInject [#13511](https://github.com/grafana/grafana/pull/13511) +* **Permissions**: Fix for broken permissions selector [#13507](https://github.com/grafana/grafana/issues/13507) +* **Alerting**: Alert reminders deduping not working as expected when running multiple Grafana instances [#13492](https://github.com/grafana/grafana/issues/13492) + +# 5.3.0-beta2 (2018-10-01) + ### New Features * **Annotations**: Enable template variables in tagged annotations queries [#9735](https://github.com/grafana/grafana/issues/9735) +* **Stackdriver**: Support for Google Stackdriver Datasource [#13289](https://github.com/grafana/grafana/pull/13289) ### Minor +* **Provisioning**: Dashboard Provisioning now support symlinks that changes target [#12534](https://github.com/grafana/grafana/issues/12534), thx [@auhlig](https://github.com/auhlig) * **OAuth**: Allow oauth email attribute name to be configurable [#12986](https://github.com/grafana/grafana/issues/12986), thx [@bobmshannon](https://github.com/bobmshannon) * **Tags**: Default sort order for GetDashboardTags [#11681](https://github.com/grafana/grafana/pull/11681), thx [@Jonnymcc](https://github.com/Jonnymcc) * **Prometheus**: Label completion queries respect dashboard time range [#12251](https://github.com/grafana/grafana/pull/12251), thx [@mtanda](https://github.com/mtanda) * **Prometheus**: Allow to display annotations based on Prometheus series value [#10159](https://github.com/grafana/grafana/issues/10159), thx [@mtanda](https://github.com/mtanda) * **Prometheus**: Adhoc-filtering for Prometheus dashboards [#13212](https://github.com/grafana/grafana/issues/13212) * **Singlestat**: Fix gauge display accuracy for percents [#13270](https://github.com/grafana/grafana/issues/13270), thx [@tianon](https://github.com/tianon) - -# 5.3.0 (unreleased) - -### Minor - +* **Dashboard**: Prevent auto refresh from starting when loading dashboard with absolute time range [#12030](https://github.com/grafana/grafana/issues/12030) +* **Templating**: New templating variable type `Text box` that allows free text input [#3173](https://github.com/grafana/grafana/issues/3173) * **Alerting**: Link to view full size image in Microsoft Teams alert notifier [#13121](https://github.com/grafana/grafana/issues/13121), thx [@holiiveira](https://github.com/holiiveira) +* **Alerting**: Fixes a bug where all alerts would send reminders after upgrade & restart [#13402](https://github.com/grafana/grafana/pull/13402) +* **Alerting**: Concurrent render limit for graphs used in notifications [#13401](https://github.com/grafana/grafana/pull/13401) * **Postgres/MySQL/MSSQL**: Add support for replacing $__interval and $__interval_ms in alert queries [#11555](https://github.com/grafana/grafana/issues/11555), thx [@svenklemm](https://github.com/svenklemm) # 5.3.0-beta1 (2018-09-06) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000000..8b2ba090fe1 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,56 @@ + +# Contributing + +Grafana uses GitHub to manage contributions. +Contributions take the form of pull requests that will be reviewed by the core team. + +* If you are a new contributor see: [Steps to Contribute](#steps-to-contribute) + +* If you have a trivial fix or improvement, go ahead and create a pull request. + +* If you plan to do something more involved, discuss your idea on the respective [issue](https://github.com/grafana/grafana/issues) or create a [new issue](https://github.com/grafana/grafana/issues/new) if it does not exist. This will avoid unnecessary work and surely give you and us a good deal of inspiration. + + +## Steps to Contribute + +Should you wish to work on a GitHub issue, check first if it is not already assigned to someone. If it is free, you claim it by commenting on the issue that you want to work on it. This is to prevent duplicated efforts from contributors on the same issue. + +Please check the [`beginner friendly`](https://github.com/grafana/grafana/issues?q=is%3Aopen+is%3Aissue+label%3A%22beginner+friendly%22) label to find issues that are good for getting started. If you have questions about one of the issues, with or without the tag, please comment on them and one of the core team or the original poster will clarify it. + + + +## Setup + +Follow the setup guide in README.md + +### Rebuild frontend assets on source change +``` +yarn watch +``` + +### Rerun tests on source change +``` +yarn jest +``` + +### Run tests for backend assets before commit +``` +test -z "$(gofmt -s -l . | grep -v -E 'vendor/(github.com|golang.org|gopkg.in)' | tee /dev/stderr)" +``` + +### Run tests for frontend assets before commit +``` +yarn test +go test -v ./pkg/... +``` + + +## Pull Request Checklist + +* Branch from the master branch and, if needed, rebase to the current master branch before submitting your pull request. If it doesn't merge cleanly with master you may be asked to rebase your changes. + +* Commits should be as small as possible, while ensuring that each commit is correct independently (i.e., each commit should compile and pass tests). + +* If your patch is not getting reviewed or you need a specific person to review it, you can @-reply a reviewer asking for a review in the pull request or a comment. + +* Add tests relevant to the fixed bug or new feature. diff --git a/Gopkg.lock b/Gopkg.lock index bd247d691dd..041f784f770 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -19,6 +19,12 @@ packages = ["."] revision = "7677a1d7c1137cd3dd5ba7a076d0c898a1ef4520" +[[projects]] + branch = "master" + name = "github.com/VividCortex/mysqlerr" + packages = ["."] + revision = "6c6b55f8796f578c870b7e19bafb16103bc40095" + [[projects]] name = "github.com/aws/aws-sdk-go" packages = [ @@ -673,6 +679,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "81a37e747b875cf870c1b9486fa3147e704dea7db8ba86f7cb942d3ddc01d3e3" + inputs-digest = "6e9458f912a5f0eb3430b968f1b4dbc4e3b7671b282cf4fe1573419a6d9ba0d4" solver-name = "gps-cdcl" solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml index 6c91ec37221..c5b4b31cb32 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -203,3 +203,7 @@ ignored = [ [[constraint]] name = "github.com/denisenkom/go-mssqldb" revision = "270bc3860bb94dd3a3ffd047377d746c5e276726" + +[[constraint]] + name = "github.com/VividCortex/mysqlerr" + branch = "master" diff --git a/PLUGIN_DEV.md b/PLUGIN_DEV.md index 4e2e080ebe6..168b21dbd88 100644 --- a/PLUGIN_DEV.md +++ b/PLUGIN_DEV.md @@ -6,8 +6,8 @@ upgrading Grafana please check here before creating an issue. ## Links -- [Datasource plugin written in typescript](https://github.com/grafana/typescript-template-datasource) -- [Simple json dataource plugin](https://github.com/grafana/simple-json-datasource) +- [Datasource plugin written in TypeScript](https://github.com/grafana/typescript-template-datasource) +- [Simple JSON datasource plugin](https://github.com/grafana/simple-json-datasource) - [Plugin development guide](http://docs.grafana.org/plugins/developing/development/) - [Webpack Grafana plugin template project](https://github.com/CorpGlory/grafana-plugin-template-webpack) diff --git a/build.go b/build.go index 9502f52be11..69fbf3bada8 100644 --- a/build.go +++ b/build.go @@ -22,6 +22,11 @@ import ( "time" ) +const ( + windows = "windows" + linux = "linux" +) + var ( //versionRe = regexp.MustCompile(`-[0-9]{1,3}-g[0-9a-f]{5,10}`) goarch string @@ -110,13 +115,13 @@ func main() { case "package": grunt(gruntBuildArg("build")...) grunt(gruntBuildArg("package")...) - if goos == "linux" { + if goos == linux { createLinuxPackages() } case "package-only": grunt(gruntBuildArg("package")...) - if goos == "linux" { + if goos == linux { createLinuxPackages() } @@ -378,7 +383,7 @@ func ensureGoPath() { } func grunt(params ...string) { - if runtime.GOOS == "windows" { + if runtime.GOOS == windows { runPrint(`.\node_modules\.bin\grunt`, params...) } else { runPrint("./node_modules/.bin/grunt", params...) @@ -420,7 +425,7 @@ func build(binaryName, pkg string, tags []string) { binary = fmt.Sprintf("./bin/%s", binaryName) } - if goos == "windows" { + if goos == windows { binary += ".exe" } @@ -484,11 +489,11 @@ func clean() { func setBuildEnv() { os.Setenv("GOOS", goos) - if goos == "windows" { + if goos == windows { // require windows >=7 os.Setenv("CGO_CFLAGS", "-D_WIN32_WINNT=0x0601") } - if goarch != "amd64" || goos != "linux" { + if goarch != "amd64" || goos != linux { // needed for all other archs cgo = true } diff --git a/conf/defaults.ini b/conf/defaults.ini index 15b8927e65a..eb8debc0094 100644 --- a/conf/defaults.ini +++ b/conf/defaults.ini @@ -474,6 +474,10 @@ error_or_timeout = alerting # Default setting for how Grafana handles nodata or null values in alerting. (alerting, no_data, keep_state, ok) nodata_or_nullvalues = no_data +# Alert notifications can include images, but rendering many images at the same time can overload the server +# This limit will protect the server from render overloading and make sure notifications are sent out quickly +concurrent_render_limit = 5 + #################################### Explore ############################# [explore] # Enable the Explore section diff --git a/conf/sample.ini b/conf/sample.ini index 2ef254f79b9..e6a03718d19 100644 --- a/conf/sample.ini +++ b/conf/sample.ini @@ -393,6 +393,10 @@ log_queries = # Default setting for how Grafana handles nodata or null values in alerting. (alerting, no_data, keep_state, ok) ;nodata_or_nullvalues = no_data +# Alert notifications can include images, but rendering many images at the same time can overload the server +# This limit will protect the server from render overloading and make sure notifications are sent out quickly +;concurrent_render_limit = 5 + #################################### Explore ############################# [explore] # Enable the Explore section @@ -431,7 +435,7 @@ log_queries = ;sampler_param = 1 #################################### Grafana.com integration ########################## -# Url used to to import dashboards directly from Grafana.com +# Url used to import dashboards directly from Grafana.com [grafana_com] ;url = https://grafana.com diff --git a/devenv/dev-dashboards/panel_tests_slow_queries_and_annotations.json b/devenv/dev-dashboards/panel_tests_slow_queries_and_annotations.json new file mode 100644 index 00000000000..08bf6dce9d0 --- /dev/null +++ b/devenv/dev-dashboards/panel_tests_slow_queries_and_annotations.json @@ -0,0 +1,1166 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + }, + { + "datasource": "-- Grafana --", + "enable": true, + "hide": false, + "iconColor": "rgba(255, 96, 96, 1)", + "limit": 100, + "matchAny": false, + "name": "annotations", + "showIn": 0, + "tags": [ + "asd" + ], + "type": "tags" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "links": [], + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 7, + "w": 13, + "x": 0, + "y": 0 + }, + "id": 6, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "30s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 7, + "w": 11, + "x": 13, + "y": 0 + }, + "id": 7, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "30s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 0, + "y": 7 + }, + "id": 8, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "30s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 8, + "y": 7 + }, + "id": 18, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "30s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 7 + }, + "id": 17, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "30s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 5, + "w": 8, + "x": 0, + "y": 14 + }, + "id": 10, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 5, + "w": 8, + "x": 8, + "y": 14 + }, + "id": 9, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 5, + "w": 8, + "x": 16, + "y": 14 + }, + "id": 11, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 5, + "w": 8, + "x": 0, + "y": 19 + }, + "id": 14, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 5, + "w": 8, + "x": 8, + "y": 19 + }, + "id": 15, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 5, + "w": 8, + "x": 16, + "y": 19 + }, + "id": 12, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 6, + "w": 16, + "x": 0, + "y": 24 + }, + "id": 13, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "fill": 1, + "gridPos": { + "h": 6, + "w": 8, + "x": 16, + "y": 24 + }, + "id": 16, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "slow_query", + "stringInput": "5s" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + } + ], + "schemaVersion": 16, + "style": "dark", + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-6h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "", + "title": "Panel tests - Slow Queries & Annotations", + "uid": "xtY_uCAiz", + "version": 11 +} diff --git a/devenv/docker/ha_test/.gitignore b/devenv/docker/ha_test/.gitignore new file mode 100644 index 00000000000..0f4e139e204 --- /dev/null +++ b/devenv/docker/ha_test/.gitignore @@ -0,0 +1 @@ +grafana/provisioning/dashboards/alerts/alert-* \ No newline at end of file diff --git a/devenv/docker/ha_test/README.md b/devenv/docker/ha_test/README.md new file mode 100644 index 00000000000..bc93727ceae --- /dev/null +++ b/devenv/docker/ha_test/README.md @@ -0,0 +1,137 @@ +# Grafana High Availability (HA) test setup + +A set of docker compose services which together creates a Grafana HA test setup with capability of easily +scaling up/down number of Grafana instances. + +Included services + +* Grafana +* Mysql - Grafana configuration database and session storage +* Prometheus - Monitoring of Grafana and used as datasource of provisioned alert rules +* Nginx - Reverse proxy for Grafana and Prometheus. Enables browsing Grafana/Prometheus UI using a hostname + +## Prerequisites + +### Build grafana docker container + +Build a Grafana docker container from current branch and commit and tag it as grafana/grafana:dev. + +```bash +$ cd +$ make build-docker-full +``` + +### Virtual host names + +#### Alternative 1 - Use dnsmasq + +```bash +$ sudo apt-get install dnsmasq +$ echo 'address=/loc/127.0.0.1' | sudo tee /etc/dnsmasq.d/dnsmasq-loc.conf > /dev/null +$ sudo /etc/init.d/dnsmasq restart +$ ping whatever.loc +PING whatever.loc (127.0.0.1) 56(84) bytes of data. +64 bytes from localhost (127.0.0.1): icmp_seq=1 ttl=64 time=0.076 ms +--- whatever.loc ping statistics --- +1 packet transmitted, 1 received, 0% packet loss, time 1998ms +``` + +#### Alternative 2 - Manually update /etc/hosts + +Update your `/etc/hosts` to be able to access Grafana and/or Prometheus UI using a hostname. + +```bash +$ cat /etc/hosts +127.0.0.1 grafana.loc +127.0.0.1 prometheus.loc +``` + +## Start services + +```bash +$ docker-compose up -d +``` + +Browse +* http://grafana.loc/ +* http://prometheus.loc/ + +Check for any errors + +```bash +$ docker-compose logs | grep error +``` + +### Scale Grafana instances up/down + +Scale number of Grafana instances to `` + +```bash +$ docker-compose up --scale grafana= -d +# for example 3 instances +$ docker-compose up --scale grafana=3 -d +``` + +## Test alerting + +### Create notification channels + +Creates default notification channels, if not already exists + +```bash +$ ./alerts.sh setup +``` + +### Slack notifications + +Disable + +```bash +$ ./alerts.sh slack -d +``` + +Enable and configure url + +```bash +$ ./alerts.sh slack -u https://hooks.slack.com/services/... +``` + +Enable, configure url and enable reminders + +```bash +$ ./alerts.sh slack -u https://hooks.slack.com/services/... -r -e 10m +``` + +### Provision alert dashboards with alert rules + +Provision 1 dashboard/alert rule (default) + +```bash +$ ./alerts.sh provision +``` + +Provision 10 dashboards/alert rules + +```bash +$ ./alerts.sh provision -a 10 +``` + +Provision 10 dashboards/alert rules and change condition to `gt > 100` + +```bash +$ ./alerts.sh provision -a 10 -c 100 +``` + +### Pause/unpause all alert rules + +Pause + +```bash +$ ./alerts.sh pause +``` + +Unpause + +```bash +$ ./alerts.sh unpause +``` diff --git a/devenv/docker/ha_test/alerts.sh b/devenv/docker/ha_test/alerts.sh new file mode 100755 index 00000000000..a05a4581739 --- /dev/null +++ b/devenv/docker/ha_test/alerts.sh @@ -0,0 +1,156 @@ +#!/bin/bash + +requiresJsonnet() { + if ! type "jsonnet" > /dev/null; then + echo "you need you install jsonnet to run this script" + echo "follow the instructions on https://github.com/google/jsonnet" + exit 1 + fi +} + +setup() { + STATUS=$(curl -s -o /dev/null -w '%{http_code}' http://admin:admin@grafana.loc/api/alert-notifications/1) + if [ $STATUS -eq 200 ]; then + echo "Email already exists, skipping..." + else + curl -H "Content-Type: application/json" \ + -d '{ + "name": "Email", + "type": "email", + "isDefault": false, + "sendReminder": false, + "uploadImage": true, + "settings": { + "addresses": "user@test.com" + } + }' \ + http://admin:admin@grafana.loc/api/alert-notifications + fi + + STATUS=$(curl -s -o /dev/null -w '%{http_code}' http://admin:admin@grafana.loc/api/alert-notifications/2) + if [ $STATUS -eq 200 ]; then + echo "Slack already exists, skipping..." + else + curl -H "Content-Type: application/json" \ + -d '{ + "name": "Slack", + "type": "slack", + "isDefault": false, + "sendReminder": false, + "uploadImage": true + }' \ + http://admin:admin@grafana.loc/api/alert-notifications + fi +} + +slack() { + enabled=true + url='' + remind=false + remindEvery='10m' + + while getopts ":e:u:dr" o; do + case "${o}" in + e) + remindEvery=${OPTARG} + ;; + u) + url=${OPTARG} + ;; + d) + enabled=false + ;; + r) + remind=true + ;; + esac + done + shift $((OPTIND-1)) + + curl -X PUT \ + -H "Content-Type: application/json" \ + -d '{ + "id": 2, + "name": "Slack", + "type": "slack", + "isDefault": '$enabled', + "sendReminder": '$remind', + "frequency": "'$remindEvery'", + "uploadImage": true, + "settings": { + "url": "'$url'" + } + }' \ + http://admin:admin@grafana.loc/api/alert-notifications/2 +} + +provision() { + alerts=1 + condition=65 + while getopts ":a:c:" o; do + case "${o}" in + a) + alerts=${OPTARG} + ;; + c) + condition=${OPTARG} + ;; + esac + done + shift $((OPTIND-1)) + + requiresJsonnet + + rm -rf grafana/provisioning/dashboards/alerts/alert-*.json + jsonnet -m grafana/provisioning/dashboards/alerts grafana/provisioning/alerts.jsonnet --ext-code alerts=$alerts --ext-code condition=$condition +} + +pause() { + curl -H "Content-Type: application/json" \ + -d '{"paused":true}' \ + http://admin:admin@grafana.loc/api/admin/pause-all-alerts +} + +unpause() { + curl -H "Content-Type: application/json" \ + -d '{"paused":false}' \ + http://admin:admin@grafana.loc/api/admin/pause-all-alerts +} + +usage() { + echo -e "Usage: ./alerts.sh COMMAND [OPTIONS]\n" + echo -e "Commands" + echo -e " setup\t\t creates default alert notification channels" + echo -e " slack\t\t configure slack notification channel" + echo -e " [-d]\t\t\t disable notifier, default enabled" + echo -e " [-u]\t\t\t url" + echo -e " [-r]\t\t\t send reminders" + echo -e " [-e ]\t\t default 10m\n" + echo -e " provision\t provision alerts" + echo -e " [-a ]\t default 1" + echo -e " [-c ]\t default 65\n" + echo -e " pause\t\t pause all alerts" + echo -e " unpause\t unpause all alerts" +} + +main() { + local cmd=$1 + + if [[ $cmd == "setup" ]]; then + setup + elif [[ $cmd == "slack" ]]; then + slack "${@:2}" + elif [[ $cmd == "provision" ]]; then + provision "${@:2}" + elif [[ $cmd == "pause" ]]; then + pause + elif [[ $cmd == "unpause" ]]; then + unpause + fi + + if [[ -z "$cmd" ]]; then + usage + fi +} + +main "$@" diff --git a/devenv/docker/ha_test/docker-compose.yaml b/devenv/docker/ha_test/docker-compose.yaml new file mode 100644 index 00000000000..ce8630d88a4 --- /dev/null +++ b/devenv/docker/ha_test/docker-compose.yaml @@ -0,0 +1,78 @@ +version: "2.1" + +services: + nginx-proxy: + image: jwilder/nginx-proxy + ports: + - "80:80" + volumes: + - /var/run/docker.sock:/tmp/docker.sock:ro + + db: + image: mysql + environment: + MYSQL_ROOT_PASSWORD: rootpass + MYSQL_DATABASE: grafana + MYSQL_USER: grafana + MYSQL_PASSWORD: password + ports: + - 3306 + healthcheck: + test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"] + timeout: 10s + retries: 10 + + # db: + # image: postgres:9.3 + # environment: + # POSTGRES_DATABASE: grafana + # POSTGRES_USER: grafana + # POSTGRES_PASSWORD: password + # ports: + # - 5432 + # healthcheck: + # test: ["CMD-SHELL", "pg_isready -d grafana -U grafana"] + # timeout: 10s + # retries: 10 + + grafana: + image: grafana/grafana:dev + volumes: + - ./grafana/provisioning/:/etc/grafana/provisioning/ + environment: + - VIRTUAL_HOST=grafana.loc + - GF_SERVER_ROOT_URL=http://grafana.loc + - GF_DATABASE_NAME=grafana + - GF_DATABASE_USER=grafana + - GF_DATABASE_PASSWORD=password + - GF_DATABASE_TYPE=mysql + - GF_DATABASE_HOST=db:3306 + - GF_SESSION_PROVIDER=mysql + - GF_SESSION_PROVIDER_CONFIG=grafana:password@tcp(db:3306)/grafana?allowNativePasswords=true + # - GF_DATABASE_TYPE=postgres + # - GF_DATABASE_HOST=db:5432 + # - GF_DATABASE_SSL_MODE=disable + # - GF_SESSION_PROVIDER=postgres + # - GF_SESSION_PROVIDER_CONFIG=user=grafana password=password host=db port=5432 dbname=grafana sslmode=disable + - GF_LOG_FILTERS=alerting.notifier:debug,alerting.notifier.slack:debug + ports: + - 3000 + depends_on: + db: + condition: service_healthy + + prometheus: + image: prom/prometheus:v2.4.2 + volumes: + - ./prometheus/:/etc/prometheus/ + environment: + - VIRTUAL_HOST=prometheus.loc + ports: + - 9090 + + # mysqld-exporter: + # image: prom/mysqld-exporter + # environment: + # - DATA_SOURCE_NAME=grafana:password@(mysql:3306)/ + # ports: + # - 9104 diff --git a/devenv/docker/ha_test/grafana/provisioning/alerts.jsonnet b/devenv/docker/ha_test/grafana/provisioning/alerts.jsonnet new file mode 100644 index 00000000000..86ded7e79d6 --- /dev/null +++ b/devenv/docker/ha_test/grafana/provisioning/alerts.jsonnet @@ -0,0 +1,202 @@ +local numAlerts = std.extVar('alerts'); +local condition = std.extVar('condition'); +local arr = std.range(1, numAlerts); + +local alertDashboardTemplate = { + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "id": null, + "links": [], + "panels": [ + { + "alert": { + "conditions": [ + { + "evaluator": { + "params": [ + 65 + ], + "type": "gt" + }, + "operator": { + "type": "and" + }, + "query": { + "params": [ + "A", + "5m", + "now" + ] + }, + "reducer": { + "params": [], + "type": "avg" + }, + "type": "query" + } + ], + "executionErrorState": "alerting", + "frequency": "10s", + "handler": 1, + "name": "bulk alerting", + "noDataState": "no_data", + "notifications": [ + { + "id": 2 + } + ] + }, + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "fill": 1, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 0 + }, + "id": 2, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "$$hashKey": "object:117", + "expr": "go_goroutines", + "format": "time_series", + "intervalFactor": 1, + "refId": "A" + } + ], + "thresholds": [ + { + "colorMode": "critical", + "fill": true, + "line": true, + "op": "gt", + "value": 50 + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + } + ], + "schemaVersion": 16, + "style": "dark", + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-6h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "", + "title": "New dashboard", + "uid": null, + "version": 0 +}; + + +{ + ['alert-' + std.toString(x) + '.json']: + alertDashboardTemplate + { + panels: [ + alertDashboardTemplate.panels[0] + + { + alert+: { + name: 'Alert rule ' + x, + conditions: [ + alertDashboardTemplate.panels[0].alert.conditions[0] + + { + evaluator+: { + params: [condition] + } + }, + ], + }, + }, + ], + uid: 'alert-' + x, + title: 'Alert ' + x + }, + for x in arr +} \ No newline at end of file diff --git a/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts.yaml b/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts.yaml new file mode 100644 index 00000000000..60b6cd4bb04 --- /dev/null +++ b/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts.yaml @@ -0,0 +1,8 @@ +apiVersion: 1 + +providers: + - name: 'Alerts' + folder: 'Alerts' + type: file + options: + path: /etc/grafana/provisioning/dashboards/alerts diff --git a/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts/overview.json b/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts/overview.json new file mode 100644 index 00000000000..53e33c37b1f --- /dev/null +++ b/devenv/docker/ha_test/grafana/provisioning/dashboards/alerts/overview.json @@ -0,0 +1,172 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "links": [], + "panels": [ + { + "aliasColors": { + "Active alerts": "#bf1b00" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "Prometheus", + "fill": 1, + "gridPos": { + "h": 12, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 2, + "interval": "", + "legend": { + "alignAsTable": true, + "avg": false, + "current": true, + "max": false, + "min": false, + "rightSide": true, + "show": true, + "total": false, + "values": true + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [ + { + "alias": "Active grafana instances", + "dashes": true, + "fill": 0 + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "sum(increase(grafana_alerting_notification_sent_total[1m])) by(job)", + "format": "time_series", + "instant": false, + "interval": "1m", + "intervalFactor": 1, + "legendFormat": "Notifications sent", + "refId": "A" + }, + { + "expr": "min(grafana_alerting_active_alerts) without(instance)", + "format": "time_series", + "interval": "1m", + "intervalFactor": 1, + "legendFormat": "Active alerts", + "refId": "B" + }, + { + "expr": "count(up{job=\"grafana\"})", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "Active grafana instances", + "refId": "C" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Notifications sent vs active alerts", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": 3 + } + } + ], + "schemaVersion": 16, + "style": "dark", + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-1h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "", + "title": "Overview", + "uid": "xHy7-hAik", + "version": 6 +} \ No newline at end of file diff --git a/devenv/docker/ha_test/grafana/provisioning/datasources/datasources.yaml b/devenv/docker/ha_test/grafana/provisioning/datasources/datasources.yaml new file mode 100644 index 00000000000..8d59793be16 --- /dev/null +++ b/devenv/docker/ha_test/grafana/provisioning/datasources/datasources.yaml @@ -0,0 +1,11 @@ +apiVersion: 1 + +datasources: + - name: Prometheus + type: prometheus + access: proxy + url: http://prometheus:9090 + jsonData: + timeInterval: 10s + queryTimeout: 30s + httpMethod: POST \ No newline at end of file diff --git a/devenv/docker/ha_test/prometheus/prometheus.yml b/devenv/docker/ha_test/prometheus/prometheus.yml new file mode 100644 index 00000000000..ea97ba8ba05 --- /dev/null +++ b/devenv/docker/ha_test/prometheus/prometheus.yml @@ -0,0 +1,39 @@ +# my global config +global: + scrape_interval: 10s # By default, scrape targets every 15 seconds. + evaluation_interval: 10s # By default, scrape targets every 15 seconds. + # scrape_timeout is set to the global default (10s). + +# Load and evaluate rules in this file every 'evaluation_interval' seconds. +#rule_files: +# - "alert.rules" +# - "first.rules" +# - "second.rules" + +# alerting: +# alertmanagers: +# - scheme: http +# static_configs: +# - targets: +# - "127.0.0.1:9093" + +scrape_configs: + - job_name: 'prometheus' + static_configs: + - targets: ['localhost:9090'] + + - job_name: 'grafana' + dns_sd_configs: + - names: + - 'grafana' + type: 'A' + port: 3000 + refresh_interval: 10s + + # - job_name: 'mysql' + # dns_sd_configs: + # - names: + # - 'mysqld-exporter' + # type: 'A' + # port: 9104 + # refresh_interval: 10s \ No newline at end of file diff --git a/devenv/setup.sh b/devenv/setup.sh index 7b5499a9f52..c9cc0d47a6f 100755 --- a/devenv/setup.sh +++ b/devenv/setup.sh @@ -11,7 +11,7 @@ bulkDashboard() { let COUNTER=COUNTER+1 done - ln -s -f -r ./bulk-dashboards/bulk-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml + ln -s -f ../../../devenv/bulk-dashboards/bulk-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml } bulkAlertingDashboard() { @@ -25,7 +25,7 @@ bulkAlertingDashboard() { let COUNTER=COUNTER+1 done - ln -s -f -r ./bulk_alerting_dashboards/bulk_alerting_dashboards.yaml ../conf/provisioning/dashboards/custom.yaml + ln -s -f ../../../devenv/bulk_alerting_dashboards/bulk_alerting_dashboards.yaml ../conf/provisioning/dashboards/custom.yaml } requiresJsonnet() { diff --git a/docs/sources/administration/permissions.md b/docs/sources/administration/permissions.md index 1d1a70607c8..0d374f03647 100644 --- a/docs/sources/administration/permissions.md +++ b/docs/sources/administration/permissions.md @@ -55,7 +55,7 @@ This admin flag makes a user a `Super Admin`. This means they can access the `Se {{< docs-imagebox img="/img/docs/v50/folder_permissions.png" max-width="500px" class="docs-image--right" >}} For dashboards and dashboard folders there is a **Permissions** page that make it possible to -remove the default role based permssions for Editors and Viewers. It's here you can add and assign permissions to specific **Users** and **Teams**. +remove the default role based permissions for Editors and Viewers. It's here you can add and assign permissions to specific **Users** and **Teams**. You can assign & remove permissions for **Organization Roles**, **Users** and **Teams**. @@ -102,7 +102,7 @@ Permissions for a dashboard: Result: You cannot override to a lower permission. `user1` has Admin permission as the highest permission always wins. -- **View**: Can only view existing dashboars/folders. +- **View**: Can only view existing dashboards/folders. - You cannot override permissions for users with **Org Admin Role** - A more specific permission with lower permission level will not have any effect if a more general rule exists with higher permission level. For example if "Everyone with Editor Role Can Edit" exists in the ACL list then **John Doe** will still have Edit permission even after you have specifically added a permission for this user with the permission set to **View**. You need to remove or lower the permission level of the more general rule. diff --git a/docs/sources/administration/provisioning.md b/docs/sources/administration/provisioning.md index a026d1ec0cd..336ef9bfc3e 100644 --- a/docs/sources/administration/provisioning.md +++ b/docs/sources/administration/provisioning.md @@ -200,7 +200,7 @@ providers: folder: '' type: file disableDeletion: false - updateIntervalSeconds: 3 #how often Grafana will scan for changed dashboards + updateIntervalSeconds: 10 #how often Grafana will scan for changed dashboards options: path: /var/lib/grafana/dashboards ``` @@ -217,7 +217,7 @@ Note: The JSON shown in input field and when using `Copy JSON to Clipboard` and/ {{< docs-imagebox img="/img/docs/v51/provisioning_cannot_save_dashboard.png" max-width="500px" class="docs-image--no-shadow" >}} -### Reuseable Dashboard Urls +### Reusable Dashboard Urls If the dashboard in the json file contains an [uid](/reference/dashboard/#json-fields), Grafana will force insert/update on that uid. This allows you to migrate dashboards betweens Grafana instances and provisioning Grafana from configuration without breaking the urls given since the new dashboard url uses the uid as identifier. When Grafana starts, it will update/insert all dashboards available in the configured folders. If you modify the file, the dashboard will also be updated. diff --git a/docs/sources/auth/ldap.md b/docs/sources/auth/ldap.md index 82db8214fb7..4a884a60d15 100644 --- a/docs/sources/auth/ldap.md +++ b/docs/sources/auth/ldap.md @@ -181,6 +181,7 @@ group_search_filter = "(member:1.2.840.113556.1.4.1941:=CN=%s,[user container/OU group_search_filter = "(|(member:1.2.840.113556.1.4.1941:=CN=%s,[user container/OU])(member:1.2.840.113556.1.4.1941:=CN=%s,[another user container/OU]))" group_search_filter_user_attribute = "cn" ``` +For more information on AD searches see [Microsoft's Search Filter Syntax](https://docs.microsoft.com/en-us/windows/desktop/adsi/search-filter-syntax) documentation. For troubleshooting, by changing `member_of` in `[servers.attributes]` to "dn" it will show you more accurate group memberships when [debug is enabled](#troubleshooting). diff --git a/docs/sources/auth/overview.md b/docs/sources/auth/overview.md index 20010a9ac09..a372600ac46 100644 --- a/docs/sources/auth/overview.md +++ b/docs/sources/auth/overview.md @@ -58,7 +58,7 @@ If you change your organization name in the Grafana UI this setting needs to be ### Basic authentication Basic auth is enabled by default and works with the built in Grafana user password authentication system and LDAP -authenticaten integration. +authentication integration. To disable basic auth: diff --git a/docs/sources/contribute/cla.md b/docs/sources/contribute/cla.md index ffb2aaef1b9..a073a9a4eae 100644 --- a/docs/sources/contribute/cla.md +++ b/docs/sources/contribute/cla.md @@ -101,4 +101,4 @@ TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, IN NO EVENT WILL YOU [OR US]


-This CLA agreement is based on the [Harmony Contributor Aggrement Template (combined)](http://www.harmonyagreements.org/agreements.html), [Creative Commons Attribution 3.0 Unported License](https://creativecommons.org/licenses/by/3.0/) +This CLA agreement is based on the [Harmony Contributor Agreement Template (combined)](http://www.harmonyagreements.org/agreements.html), [Creative Commons Attribution 3.0 Unported License](https://creativecommons.org/licenses/by/3.0/) diff --git a/docs/sources/features/datasources/mssql.md b/docs/sources/features/datasources/mssql.md index debf771ffb0..a8399804344 100644 --- a/docs/sources/features/datasources/mssql.md +++ b/docs/sources/features/datasources/mssql.md @@ -225,7 +225,7 @@ When above query are used in a graph panel the result will be two series named ` {{< docs-imagebox img="/img/docs/v51/mssql_time_series_two.png" class="docs-image--no-shadow docs-image--right" >}} -**Example with multiple `value` culumns:** +**Example with multiple `value` columns:** ```sql SELECT diff --git a/docs/sources/features/datasources/mysql.md b/docs/sources/features/datasources/mysql.md index d713a4b42b7..590f4dec65e 100644 --- a/docs/sources/features/datasources/mysql.md +++ b/docs/sources/features/datasources/mysql.md @@ -59,7 +59,7 @@ Identifier | Description The database user you specify when you add the data source should only be granted SELECT permissions on the specified database & tables you want to query. Grafana does not validate that the query is safe. The query could include any SQL statement. For example, statements like `USE otherdb;` and `DROP TABLE user;` would be -executed. To protect against this we **Highly** recommmend you create a specific mysql user with restricted permissions. +executed. To protect against this we **Highly** recommend you create a specific mysql user with restricted permissions. Example: diff --git a/docs/sources/features/datasources/opentsdb.md b/docs/sources/features/datasources/opentsdb.md index 1f6f022a18c..d2cd0b1dc0e 100644 --- a/docs/sources/features/datasources/opentsdb.md +++ b/docs/sources/features/datasources/opentsdb.md @@ -84,7 +84,7 @@ Some examples are mentioned below to make nested template queries work successfu Query | Description ------------ | ------------- *tag_values(cpu, hostname, env=$env)* | Return tag values for cpu metric, selected env tag value and tag key hostname -*tag_values(cpu, hostanme, env=$env, region=$region)* | Return tag values for cpu metric, selected env tag value, selected region tag value and tag key hostname +*tag_values(cpu, hostname, env=$env, region=$region)* | Return tag values for cpu metric, selected env tag value, selected region tag value and tag key hostname For details on OpenTSDB metric queries checkout the official [OpenTSDB documentation](http://opentsdb.net/docs/build/html/index.html) diff --git a/docs/sources/features/datasources/stackdriver.md b/docs/sources/features/datasources/stackdriver.md new file mode 100644 index 00000000000..96f3ba3382e --- /dev/null +++ b/docs/sources/features/datasources/stackdriver.md @@ -0,0 +1,171 @@ ++++ +title = "Using Stackdriver in Grafana" +description = "Guide for using Stackdriver in Grafana" +keywords = ["grafana", "stackdriver", "google", "guide"] +type = "docs" +aliases = ["/datasources/stackdriver"] +[menu.docs] +name = "Stackdriver" +parent = "datasources" +weight = 11 ++++ + +# Using Google Stackdriver in Grafana + +> Only available in Grafana v5.3+. +> The datasource is currently a beta feature and is subject to change. + +Grafana ships with built-in support for Google Stackdriver. Just add it as a datasource and you are ready to build dashboards for your Stackdriver metrics. + +## Adding the data source to Grafana + +1. Open the side menu by clicking the Grafana icon in the top header. +2. In the side menu under the `Dashboards` link you should find a link named `Data Sources`. +3. Click the `+ Add data source` button in the top header. +4. Select `Stackdriver` from the *Type* dropdown. +5. Upload or paste in the Service Account Key file. See below for steps on how to create a Service Account Key file. + +> NOTE: If you're not seeing the `Data Sources` link in your side menu it means that your current user does not have the `Admin` role for the current organization. + +| Name | Description | +| --------------------- | ----------------------------------------------------------------------------------- | +| _Name_ | The datasource name. This is how you refer to the datasource in panels & queries. | +| _Default_ | Default datasource means that it will be pre-selected for new panels. | +| _Service Account Key_ | Service Account Key File for a GCP Project. Instructions below on how to create it. | + +## Authentication + +### Service Account Credentials - Private Key File + +To authenticate with the Stackdriver API, you need to create a Google Cloud Platform (GCP) Service Account for the Project you want to show data for. A Grafana datasource integrates with one GCP Project. If you want to visualize data from multiple GCP Projects then you need to create one datasource per GCP Project. + +#### Enable APIs + +The following APIs need to be enabled first: + +- [Monitoring API](https://console.cloud.google.com/apis/library/monitoring.googleapis.com) +- [Cloud Resource Manager API](https://console.cloud.google.com/apis/library/cloudresourcemanager.googleapis.com) + +Click on the links above and click the `Enable` button: + +![Enable GCP APIs](/img/docs/v54/stackdriver_enable_api.png) + +#### Create a GCP Service Account for a Project + +1. Navigate to the [APIs & Services Credentials page](https://console.cloud.google.com/apis/credentials). +2. Click on the `Create credentials` dropdown/button and choose the `Service account key` option. + + ![Create service account button](/img/docs/v54/stackdriver_create_service_account_button.png) +3. On the `Create service account key` page, choose key type `JSON`. Then in the `Service Account` dropdown, choose the `New service account` option: + + ![Create service account key](/img/docs/v54/stackdriver_create_service_account_key.png) +4. Some new fields will appear. Fill in a name for the service account in the `Service account name` field and then choose the `Monitoring Viewer` role from the `Role` dropdown: + + ![Choose role](/img/docs/v54/stackdriver_service_account_choose_role.png) +5. Click the Create button. A JSON key file will be created and downloaded to your computer. Store this file in a secure place as it allows access to your Stackdriver data. +6. Upload it to Grafana on the datasource Configuration page. You can either upload the file or paste in the contents of the file. + + ![Choose role](/img/docs/v54/stackdriver_grafana_upload_key.png) +7. The file contents will be encrypted and saved in the Grafana database. Don't forget to save after uploading the file! + + ![Choose role](/img/docs/v54/stackdriver_grafana_key_uploaded.png) + +## Metric Query Editor + +Choose a metric from the `Metric` dropdown. + +To add a filter, click the plus icon and choose a field to filter by and enter a filter value e.g. `instance_name = grafana-1` + +### Aggregation + +The aggregation field lets you combine time series based on common statistics. Read more about this option [here](https://cloud.google.com/monitoring/charts/metrics-selector#aggregation-options). + +The `Aligner` field allows you to align multiple time series after the same group by time interval. Read more about how it works [here](https://cloud.google.com/monitoring/charts/metrics-selector#alignment). + +#### Alignment Period/Group by Time + +The `Alignment Period` groups a metric by time if an aggregation is chosen. The default is to use the GCP Stackdriver default groupings (which allows you to compare graphs in Grafana with graphs in the Stackdriver UI). +The option is called `Stackdriver auto` and the defaults are: + +- 1m for time ranges < 23 hours +- 5m for time ranges >= 23 hours and < 6 days +- 1h for time ranges >= 6 days + +The other automatic option is `Grafana auto`. This will automatically set the group by time depending on the time range chosen and the width of the graph panel. Read more about the details [here](http://docs.grafana.org/reference/templating/#the-interval-variable). + +It is also possible to choose fixed time intervals to group by, like `1h` or `1d`. + +### Group By + +Group by resource or metric labels to reduce the number of time series and to aggregate the results by a group by. E.g. Group by instance_name to see an aggregated metric for a Compute instance. + +### Alias Patterns + +The Alias By field allows you to control the format of the legend keys. The default is to show the metric name and labels. This can be long and hard to read. Using the following patterns in the alias field, you can format the legend key the way you want it. + +#### Metric Type Patterns + +Alias Pattern | Description | Example Result +----------------- | ---------------------------- | ------------- +`{{metric.type}}` | returns the full Metric Type | `compute.googleapis.com/instance/cpu/utilization` +`{{metric.name}}` | returns the metric name part | `instance/cpu/utilization` +`{{metric.service}}` | returns the service part | `compute` + +#### Label Patterns + +In the Group By dropdown, you can see a list of metric and resource labels for a metric. These can be included in the legend key using alias patterns. + +Alias Pattern Format | Description | Alias Pattern Example | Example Result +---------------------- | ---------------------------------- | ---------------------------- | ------------- +`{{metric.label.xxx}}` | returns the metric label value | `{{metric.label.instance_name}}` | `grafana-1-prod` +`{{resource.label.xxx}}` | returns the resource label value | `{{resource.label.zone}}` | `us-east1-b` + +Example Alias By: `{{metric.type}} - {{metric.labels.instance_name}}` + +Example Result: `compute.googleapis.com/instance/cpu/usage_time - server1-prod` + +## Templating + +Instead of hard-coding things like server, application and sensor name in you metric queries you can use variables in their place. +Variables are shown as dropdown select boxes at the top of the dashboard. These dropdowns makes it easy to change the data +being displayed in your dashboard. + +Checkout the [Templating]({{< relref "reference/templating.md" >}}) documentation for an introduction to the templating feature and the different +types of template variables. + +### Query Variable + +Writing variable queries is not supported yet. + +### Using variables in queries + +There are two syntaxes: + +- `$` Example: rate(http_requests_total{job=~"$job"}[5m]) +- `[[varname]]` Example: rate(http_requests_total{job=~"[[job]]"}[5m]) + +Why two ways? The first syntax is easier to read and write but does not allow you to use a variable in the middle of a word. When the *Multi-value* or *Include all value* options are enabled, Grafana converts the labels from plain text to a regex compatible string, which means you have to use `=~` instead of `=`. + +## Annotations + +[Annotations]({{< relref "reference/annotations.md" >}}) allows you to overlay rich event information on top of graphs. You add annotation +queries via the Dashboard menu / Annotations view. + +## Configure the Datasource with Provisioning + +It's now possible to configure datasources using config files with Grafana's provisioning system. You can read more about how it works and all the settings you can set for datasources on the [provisioning docs page](/administration/provisioning/#datasources) + +Here is a provisioning example for this datasource. + +```yaml +apiVersion: 1 + +datasources: + - name: Stackdriver + type: stackdriver + jsonData: + tokenUri: https://oauth2.googleapis.com/token + clientEmail: stackdriver@myproject.iam.gserviceaccount.com + secureJsonData: + privateKey: "" +``` diff --git a/docs/sources/features/panels/alertlist.md b/docs/sources/features/panels/alertlist.md index 58aa2c0966a..a1ea8f0f600 100644 --- a/docs/sources/features/panels/alertlist.md +++ b/docs/sources/features/panels/alertlist.md @@ -22,6 +22,6 @@ The alert list panel allows you to display your dashboards alerts. The list can 1. **Show**: Lets you choose between current state or recent state changes. 2. **Max Items**: Max items set the maximum of items in a list. -3. **Sort Order**: Lets you sort your list alphabeticaly(asc/desc) or by importance. +3. **Sort Order**: Lets you sort your list alphabetically(asc/desc) or by importance. 4. **Alerts From** This Dashboard`: Shows alerts only from the dashboard the alert list is in. 5. **State Filter**: Here you can filter your list by one or more parameters. diff --git a/docs/sources/features/panels/heatmap.md b/docs/sources/features/panels/heatmap.md index 56ffe29f20f..aa87fbef1df 100644 --- a/docs/sources/features/panels/heatmap.md +++ b/docs/sources/features/panels/heatmap.md @@ -80,7 +80,7 @@ the upper or lower bound of the interval. There are a number of datasources supporting histogram over time like Elasticsearch (by using a Histogram bucket aggregation) or Prometheus (with [histogram](https://prometheus.io/docs/concepts/metric_types/#histogram) metric type and *Format as* option set to Heatmap). But generally, any datasource could be used if it meets the requirements: -returns series with names representing bucket bound or returns sereis sorted by the bound in ascending order. +returns series with names representing bucket bound or returns series sorted by the bound in ascending order. With Elasticsearch you control the size of the buckets using the Histogram interval (Y-Axis) and the Date Histogram interval (X-axis). diff --git a/docs/sources/guides/whats-new-in-v2-5.md b/docs/sources/guides/whats-new-in-v2-5.md index 90270ea1121..08d51ba5bd7 100644 --- a/docs/sources/guides/whats-new-in-v2-5.md +++ b/docs/sources/guides/whats-new-in-v2-5.md @@ -25,7 +25,7 @@ correctly in UTC mode.
This release brings a fully featured query editor for Elasticsearch. You will now be able to visualize -logs or any kind of data stored in Elasticserarch. The query editor allows you to build both simple +logs or any kind of data stored in Elasticsearch. The query editor allows you to build both simple and complex queries for logs or metrics. - Compute metrics from your documents, supported Elasticsearch aggregations: diff --git a/docs/sources/guides/whats-new-in-v2.md b/docs/sources/guides/whats-new-in-v2.md index 499849c8d83..28d068b1cd6 100644 --- a/docs/sources/guides/whats-new-in-v2.md +++ b/docs/sources/guides/whats-new-in-v2.md @@ -34,7 +34,7 @@ Organizations via a role. That role can be: There are currently no permissions on individual dashboards. -Read more about Grafanas new user model on the [Admin section](../reference/admin/) +Read more about Grafana's new user model on the [Admin section](../reference/admin/) ## Dashboard Snapshot sharing diff --git a/docs/sources/guides/whats-new-in-v3-1.md b/docs/sources/guides/whats-new-in-v3-1.md index 1e8ef87297b..ab6c5281275 100644 --- a/docs/sources/guides/whats-new-in-v3-1.md +++ b/docs/sources/guides/whats-new-in-v3-1.md @@ -21,7 +21,7 @@ The export feature is now accessed from the share menu. Dashboards exported from Grafana 3.1 are now more portable and easier for others to import than before. The export process extracts information data source types used by panels and adds these to a new `inputs` section in the dashboard json. So when you or another person tries to import the dashboard they will be asked to -select data source and optional metrix prefix options. +select data source and optional metric prefix options. @@ -53,7 +53,7 @@ Grafana url to share with a colleague without having to use the Share modal. ## Internal metrics -Do you want metrics about viewing metrics? Ofc you do! In this release we added support for sending metrics about Grafana to graphite. +Do you want metrics about viewing metrics? Of course you do! In this release we added support for sending metrics about Grafana to graphite. You can configure interval and server in the config file. ## Logging diff --git a/docs/sources/guides/whats-new-in-v3.md b/docs/sources/guides/whats-new-in-v3.md index d82a833ec90..dbd9b685a2b 100644 --- a/docs/sources/guides/whats-new-in-v3.md +++ b/docs/sources/guides/whats-new-in-v3.md @@ -197,7 +197,7 @@ you can install it manually from [Grafana.com](https://grafana.com) ## Plugin showcase Discovering and installing plugins is very quick and easy with Grafana 3.0 and [Grafana.com](https://grafana.com). Here -are a couple that I incurage you try! +are a couple that I encourage you try! #### [Clock Panel](https://grafana.com/plugins/grafana-clock-panel) Support's both current time and count down mode. diff --git a/docs/sources/guides/whats-new-in-v4-2.md b/docs/sources/guides/whats-new-in-v4-2.md index e36e762bb76..7a00023172a 100644 --- a/docs/sources/guides/whats-new-in-v4-2.md +++ b/docs/sources/guides/whats-new-in-v4-2.md @@ -45,7 +45,7 @@ We might add more global built in variables in the future and if we do we will p ### Dedupe alert notifications when running multiple servers -In this release we will dedupe alert notificiations when you are running multiple servers. +In this release we will dedupe alert notifications when you are running multiple servers. This makes it possible to run alerting on multiple servers and only get one notification. We currently solve this with sql transactions which puts some limitations for how many servers you can use to execute the same rules. diff --git a/docs/sources/guides/whats-new-in-v4-5.md b/docs/sources/guides/whats-new-in-v4-5.md index a5cd3ca982d..c6cfcf64720 100644 --- a/docs/sources/guides/whats-new-in-v4-5.md +++ b/docs/sources/guides/whats-new-in-v4-5.md @@ -45,7 +45,7 @@ More information [here](https://community.grafana.com/t/using-grafanas-query-ins ### Enhancements * **GitHub OAuth**: Support for GitHub organizations with 100+ teams. [#8846](https://github.com/grafana/grafana/issues/8846), thx [@skwashd](https://github.com/skwashd) -* **Graphite**: Calls to Graphite api /metrics/find now include panel or dashboad time range (from & until) in most cases, [#8055](https://github.com/grafana/grafana/issues/8055) +* **Graphite**: Calls to Graphite api /metrics/find now include panel or dashboard time range (from & until) in most cases, [#8055](https://github.com/grafana/grafana/issues/8055) * **Graphite**: Added new graphite 1.0 functions, available if you set version to 1.0.x in data source settings. New Functions: mapSeries, reduceSeries, isNonNull, groupByNodes, offsetToZero, grep, weightedAverage, removeEmptySeries, aggregateLine, averageOutsidePercentile, delay, exponentialMovingAverage, fallbackSeries, integralByInterval, interpolate, invert, linearRegression, movingMin, movingMax, movingSum, multiplySeriesWithWildcards, pow, powSeries, removeBetweenPercentile, squareRoot, timeSlice, closes [#8261](https://github.com/grafana/grafana/issues/8261) - **Elasticsearch**: Ad-hoc filters now use query phrase match filters instead of term filters, works on non keyword/raw fields [#9095](https://github.com/grafana/grafana/issues/9095). @@ -53,7 +53,7 @@ More information [here](https://community.grafana.com/t/using-grafanas-query-ins * **InfluxDB/Elasticsearch**: The panel & data source option named "Group by time interval" is now named "Min time interval" and does now always define a lower limit for the auto group by time. Without having to use `>` prefix (that prefix still works). This should in theory have close to zero actual impact on existing dashboards. It does mean that if you used this setting to define a hard group by time interval of, say "1d", if you zoomed to a time range wide enough the time range could increase above the "1d" range as the setting is now always considered a lower limit. -This option is now rennamed (and moved to Options sub section above your queries): +This option is now renamed (and moved to Options sub section above your queries): ![image|519x120](upload://ySjHOVpavV6yk9LHQxL9nq2HIsT.png) Datas source selection & options & help are now above your metric queries. diff --git a/docs/sources/guides/whats-new-in-v4-6.md b/docs/sources/guides/whats-new-in-v4-6.md index ee0c4ea7a04..91fa74084a8 100644 --- a/docs/sources/guides/whats-new-in-v4-6.md +++ b/docs/sources/guides/whats-new-in-v4-6.md @@ -61,7 +61,7 @@ This makes exploring and filtering Prometheus data much easier. ### Minor Changes * **SMTP**: Make it possible to set specific EHLO for smtp client. [#9319](https://github.com/grafana/grafana/issues/9319) -* **Dataproxy**: Allow grafan to renegotiate tls connection [#9250](https://github.com/grafana/grafana/issues/9250) +* **Dataproxy**: Allow Grafana to renegotiate tls connection [#9250](https://github.com/grafana/grafana/issues/9250) * **HTTP**: set net.Dialer.DualStack to true for all http clients [#9367](https://github.com/grafana/grafana/pull/9367) * **Alerting**: Add diff and percent diff as series reducers [#9386](https://github.com/grafana/grafana/pull/9386), thx [@shanhuhai5739](https://github.com/shanhuhai5739) * **Slack**: Allow images to be uploaded to slack when Token is present [#7175](https://github.com/grafana/grafana/issues/7175), thx [@xginn8](https://github.com/xginn8) diff --git a/docs/sources/http_api/alerting.md b/docs/sources/http_api/alerting.md index 032fd508dd0..103de190793 100644 --- a/docs/sources/http_api/alerting.md +++ b/docs/sources/http_api/alerting.md @@ -227,7 +227,7 @@ Content-Type: application/json ## Create alert notification -You can find the full list of [supported notifers](/alerting/notifications/#all-supported-notifier) at the alert notifiers page. +You can find the full list of [supported notifiers](/alerting/notifications/#all-supported-notifier) at the alert notifiers page. `POST /api/alert-notifications` diff --git a/docs/sources/http_api/dashboard_versions.md b/docs/sources/http_api/dashboard_versions.md index 3d0ec27a3a3..0be22674997 100644 --- a/docs/sources/http_api/dashboard_versions.md +++ b/docs/sources/http_api/dashboard_versions.md @@ -291,7 +291,7 @@ Content-Type: text/html; charset=UTF-8

``` -The response is a textual respresentation of the diff, with the dashboard values being in JSON, similar to the diffs seen on sites like GitHub or GitLab. +The response is a textual representation of the diff, with the dashboard values being in JSON, similar to the diffs seen on sites like GitHub or GitLab. Status Codes: diff --git a/docs/sources/installation/configuration.md b/docs/sources/installation/configuration.md index 2bf4789257d..8d156e739bf 100644 --- a/docs/sources/installation/configuration.md +++ b/docs/sources/installation/configuration.md @@ -127,10 +127,13 @@ Another way is put a webserver like Nginx or Apache in front of Grafana and have ### protocol -`http` or `https` +`http`,`https` or `socket` > **Note** Grafana versions earlier than 3.0 are vulnerable to [POODLE](https://en.wikipedia.org/wiki/POODLE). So we strongly recommend to upgrade to 3.x or use a reverse proxy for ssl termination. +### socket +Path where the socket should be created when `protocol=socket`. Please make sure that Grafana has appropriate permissions. + ### domain This setting is only used in as a part of the `root_url` setting (see below). Important if you @@ -566,3 +569,11 @@ Default setting for new alert rules. Defaults to categorize error and timeouts a > Available in 5.3 and above Default setting for how Grafana handles nodata or null values in alerting. (alerting, no_data, keep_state, ok) + +# concurrent_render_limit + +> Available in 5.3 and above + +Alert notifications can include images, but rendering many images at the same time can overload the server. +This limit will protect the server from render overloading and make sure notifications are sent out quickly. Default +value is `5`. diff --git a/docs/sources/tutorials/ha_setup.md b/docs/sources/tutorials/ha_setup.md index 5fdb091a348..f141392e223 100644 --- a/docs/sources/tutorials/ha_setup.md +++ b/docs/sources/tutorials/ha_setup.md @@ -26,9 +26,9 @@ Grafana will now persist all long term data in the database. How to configure th ## User sessions -The second thing to consider is how to deal with user sessions and how to configure your load balancer infront of Grafana. +The second thing to consider is how to deal with user sessions and how to configure your load balancer in front of Grafana. Grafana supports two ways of storing session data: locally on disk or in a database/cache-server. -If you want to store sessions on disk you can use `sticky sessions` in your load balanacer. If you prefer to store session data in a database/cache-server +If you want to store sessions on disk you can use `sticky sessions` in your load balancer. If you prefer to store session data in a database/cache-server you can use any stateless routing strategy in your load balancer (ex round robin or least connections). ### Sticky sessions diff --git a/docs/versions.json b/docs/versions.json index caefbe198d6..34e9c2150e1 100644 --- a/docs/versions.json +++ b/docs/versions.json @@ -1,4 +1,5 @@ [ + { "version": "v5.3", "path": "/v5.3", "archived": false, "current": false }, { "version": "v5.2", "path": "/", "archived": false, "current": true }, { "version": "v5.1", "path": "/v5.1", "archived": true }, { "version": "v5.0", "path": "/v5.0", "archived": true }, diff --git a/package.json b/package.json index 1e7ed02c87b..5577579e0e5 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "company": "Grafana Labs" }, "name": "grafana", - "version": "5.3.0-pre1", + "version": "5.4.0-pre1", "repository": { "type": "git", "url": "http://github.com/grafana/grafana.git" @@ -12,7 +12,7 @@ "devDependencies": { "@types/d3": "^4.10.1", "@types/enzyme": "^3.1.13", - "@types/jest": "^21.1.4", + "@types/jest": "^23.3.2", "@types/node": "^8.0.31", "@types/react": "^16.4.14", "@types/react-custom-scrollbars": "^4.0.5", diff --git a/pkg/api/avatar/avatar.go b/pkg/api/avatar/avatar.go index 5becf90ca35..6cf164285bf 100644 --- a/pkg/api/avatar/avatar.go +++ b/pkg/api/avatar/avatar.go @@ -97,15 +97,6 @@ type CacheServer struct { cache *gocache.Cache } -func (this *CacheServer) mustInt(r *http.Request, defaultValue int, keys ...string) (v int) { - for _, k := range keys { - if _, err := fmt.Sscanf(r.FormValue(k), "%d", &v); err == nil { - defaultValue = v - } - } - return defaultValue -} - func (this *CacheServer) Handler(ctx *macaron.Context) { urlPath := ctx.Req.URL.Path hash := urlPath[strings.LastIndex(urlPath, "/")+1:] diff --git a/pkg/api/dashboard.go b/pkg/api/dashboard.go index c2ab6dd9a1a..d65598f6e5e 100644 --- a/pkg/api/dashboard.go +++ b/pkg/api/dashboard.go @@ -22,6 +22,10 @@ import ( "github.com/grafana/grafana/pkg/util" ) +const ( + anonString = "Anonymous" +) + func isDashboardStarredByUser(c *m.ReqContext, dashID int64) (bool, error) { if !c.IsSignedIn { return false, nil @@ -64,7 +68,7 @@ func GetDashboard(c *m.ReqContext) Response { } // Finding creator and last updater of the dashboard - updater, creator := "Anonymous", "Anonymous" + updater, creator := anonString, anonString if dash.UpdatedBy > 0 { updater = getUserLogin(dash.UpdatedBy) } @@ -128,7 +132,7 @@ func getUserLogin(userID int64) string { query := m.GetUserByIdQuery{Id: userID} err := bus.Dispatch(&query) if err != nil { - return "Anonymous" + return anonString } return query.Result.Login } @@ -403,7 +407,7 @@ func GetDashboardVersion(c *m.ReqContext) Response { return Error(500, fmt.Sprintf("Dashboard version %d not found for dashboardId %d", query.Version, dashID), err) } - creator := "Anonymous" + creator := anonString if query.Result.CreatedBy > 0 { creator = getUserLogin(query.Result.CreatedBy) } diff --git a/pkg/api/dataproxy.go b/pkg/api/dataproxy.go index f455d3dbd29..eddfb884f8f 100644 --- a/pkg/api/dataproxy.go +++ b/pkg/api/dataproxy.go @@ -51,7 +51,21 @@ func (hs *HTTPServer) ProxyDataSourceRequest(c *m.ReqContext) { return } - proxyPath := c.Params("*") + // macaron does not include trailing slashes when resolving a wildcard path + proxyPath := ensureProxyPathTrailingSlash(c.Req.URL.Path, c.Params("*")) + proxy := pluginproxy.NewDataSourceProxy(ds, plugin, c, proxyPath) proxy.HandleRequest() } + +// ensureProxyPathTrailingSlash Check for a trailing slash in original path and makes +// sure that a trailing slash is added to proxy path, if not already exists. +func ensureProxyPathTrailingSlash(originalPath, proxyPath string) string { + if len(proxyPath) > 1 { + if originalPath[len(originalPath)-1] == '/' && proxyPath[len(proxyPath)-1] != '/' { + return proxyPath + "/" + } + } + + return proxyPath +} diff --git a/pkg/api/dataproxy_test.go b/pkg/api/dataproxy_test.go new file mode 100644 index 00000000000..a1d7cf68a37 --- /dev/null +++ b/pkg/api/dataproxy_test.go @@ -0,0 +1,19 @@ +package api + +import ( + "testing" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestDataProxy(t *testing.T) { + Convey("Data proxy test", t, func() { + Convey("Should append trailing slash to proxy path if original path has a trailing slash", func() { + So(ensureProxyPathTrailingSlash("/api/datasources/proxy/6/api/v1/query_range/", "api/v1/query_range/"), ShouldEqual, "api/v1/query_range/") + }) + + Convey("Should not append trailing slash to proxy path if original path doesn't have a trailing slash", func() { + So(ensureProxyPathTrailingSlash("/api/datasources/proxy/6/api/v1/query_range", "api/v1/query_range"), ShouldEqual, "api/v1/query_range") + }) + }) +} diff --git a/pkg/api/folder.go b/pkg/api/folder.go index f0cdff24d20..0e08343b556 100644 --- a/pkg/api/folder.go +++ b/pkg/api/folder.go @@ -95,7 +95,7 @@ func toFolderDto(g guardian.DashboardGuardian, folder *m.Folder) dtos.Folder { canAdmin, _ := g.CanAdmin() // Finding creator and last updater of the folder - updater, creator := "Anonymous", "Anonymous" + updater, creator := anonString, anonString if folder.CreatedBy > 0 { creator = getUserLogin(folder.CreatedBy) } diff --git a/pkg/api/folder_test.go b/pkg/api/folder_test.go index 6e24e432535..880de338c8f 100644 --- a/pkg/api/folder_test.go +++ b/pkg/api/folder_test.go @@ -133,16 +133,6 @@ func TestFoldersApiEndpoint(t *testing.T) { }) } -func callGetFolderByUID(sc *scenarioContext) { - sc.handlerFunc = GetFolderByUID - sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec() -} - -func callDeleteFolder(sc *scenarioContext) { - sc.handlerFunc = DeleteFolder - sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() -} - func callCreateFolder(sc *scenarioContext) { sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec() } diff --git a/pkg/api/index.go b/pkg/api/index.go index b8101a01fc8..1b73acd8829 100644 --- a/pkg/api/index.go +++ b/pkg/api/index.go @@ -11,6 +11,12 @@ import ( "github.com/grafana/grafana/pkg/setting" ) +const ( + // Themes + lightName = "light" + darkName = "dark" +) + func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { settings, err := getFrontendSettingsMap(c) if err != nil { @@ -60,7 +66,7 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { OrgRole: c.OrgRole, GravatarUrl: dtos.GetGravatarUrl(c.Email), IsGrafanaAdmin: c.IsGrafanaAdmin, - LightTheme: prefs.Theme == "light", + LightTheme: prefs.Theme == lightName, Timezone: prefs.Timezone, Locale: locale, HelpFlags1: c.HelpFlags1, @@ -88,12 +94,12 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { } themeURLParam := c.Query("theme") - if themeURLParam == "light" { + if themeURLParam == lightName { data.User.LightTheme = true - data.Theme = "light" - } else if themeURLParam == "dark" { + data.Theme = lightName + } else if themeURLParam == darkName { data.User.LightTheme = false - data.Theme = "dark" + data.Theme = darkName } if hasEditPermissionInFoldersQuery.Result { diff --git a/pkg/api/live/hub.go b/pkg/api/live/hub.go index 37ab5667e55..9708bc515d1 100644 --- a/pkg/api/live/hub.go +++ b/pkg/api/live/hub.go @@ -37,9 +37,6 @@ func newHub() *hub { } } -func (h *hub) removeConnection() { -} - func (h *hub) run(ctx context.Context) { for { select { diff --git a/pkg/api/pluginproxy/access_token_provider.go b/pkg/api/pluginproxy/access_token_provider.go new file mode 100644 index 00000000000..22407823ff9 --- /dev/null +++ b/pkg/api/pluginproxy/access_token_provider.go @@ -0,0 +1,171 @@ +package pluginproxy + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + "net/url" + "strconv" + "sync" + "time" + + "golang.org/x/oauth2" + + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "golang.org/x/oauth2/jwt" +) + +var ( + tokenCache = tokenCacheType{ + cache: map[string]*jwtToken{}, + } + oauthJwtTokenCache = oauthJwtTokenCacheType{ + cache: map[string]*oauth2.Token{}, + } +) + +type tokenCacheType struct { + cache map[string]*jwtToken + sync.Mutex +} + +type oauthJwtTokenCacheType struct { + cache map[string]*oauth2.Token + sync.Mutex +} + +type accessTokenProvider struct { + route *plugins.AppPluginRoute + datasourceId int64 + datasourceVersion int +} + +type jwtToken struct { + ExpiresOn time.Time `json:"-"` + ExpiresOnString string `json:"expires_on"` + AccessToken string `json:"access_token"` +} + +func newAccessTokenProvider(ds *models.DataSource, pluginRoute *plugins.AppPluginRoute) *accessTokenProvider { + return &accessTokenProvider{ + datasourceId: ds.Id, + datasourceVersion: ds.Version, + route: pluginRoute, + } +} + +func (provider *accessTokenProvider) getAccessToken(data templateData) (string, error) { + tokenCache.Lock() + defer tokenCache.Unlock() + if cachedToken, found := tokenCache.cache[provider.getAccessTokenCacheKey()]; found { + if cachedToken.ExpiresOn.After(time.Now().Add(time.Second * 10)) { + logger.Info("Using token from cache") + return cachedToken.AccessToken, nil + } + } + + urlInterpolated, err := interpolateString(provider.route.TokenAuth.Url, data) + if err != nil { + return "", err + } + + params := make(url.Values) + for key, value := range provider.route.TokenAuth.Params { + interpolatedParam, err := interpolateString(value, data) + if err != nil { + return "", err + } + params.Add(key, interpolatedParam) + } + + getTokenReq, _ := http.NewRequest("POST", urlInterpolated, bytes.NewBufferString(params.Encode())) + getTokenReq.Header.Add("Content-Type", "application/x-www-form-urlencoded") + getTokenReq.Header.Add("Content-Length", strconv.Itoa(len(params.Encode()))) + + resp, err := client.Do(getTokenReq) + if err != nil { + return "", err + } + + defer resp.Body.Close() + + var token jwtToken + if err := json.NewDecoder(resp.Body).Decode(&token); err != nil { + return "", err + } + + expiresOnEpoch, _ := strconv.ParseInt(token.ExpiresOnString, 10, 64) + token.ExpiresOn = time.Unix(expiresOnEpoch, 0) + tokenCache.cache[provider.getAccessTokenCacheKey()] = &token + + logger.Info("Got new access token", "ExpiresOn", token.ExpiresOn) + + return token.AccessToken, nil +} + +func (provider *accessTokenProvider) getJwtAccessToken(ctx context.Context, data templateData) (string, error) { + oauthJwtTokenCache.Lock() + defer oauthJwtTokenCache.Unlock() + if cachedToken, found := oauthJwtTokenCache.cache[provider.getAccessTokenCacheKey()]; found { + if cachedToken.Expiry.After(time.Now().Add(time.Second * 10)) { + logger.Debug("Using token from cache") + return cachedToken.AccessToken, nil + } + } + + conf := &jwt.Config{} + + if val, ok := provider.route.JwtTokenAuth.Params["client_email"]; ok { + interpolatedVal, err := interpolateString(val, data) + if err != nil { + return "", err + } + conf.Email = interpolatedVal + } + + if val, ok := provider.route.JwtTokenAuth.Params["private_key"]; ok { + interpolatedVal, err := interpolateString(val, data) + if err != nil { + return "", err + } + conf.PrivateKey = []byte(interpolatedVal) + } + + if val, ok := provider.route.JwtTokenAuth.Params["token_uri"]; ok { + interpolatedVal, err := interpolateString(val, data) + if err != nil { + return "", err + } + conf.TokenURL = interpolatedVal + } + + conf.Scopes = provider.route.JwtTokenAuth.Scopes + + token, err := getTokenSource(conf, ctx) + if err != nil { + return "", err + } + + oauthJwtTokenCache.cache[provider.getAccessTokenCacheKey()] = token + + logger.Info("Got new access token", "ExpiresOn", token.Expiry) + + return token.AccessToken, nil +} + +var getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) { + tokenSrc := conf.TokenSource(ctx) + token, err := tokenSrc.Token() + if err != nil { + return nil, err + } + + return token, nil +} + +func (provider *accessTokenProvider) getAccessTokenCacheKey() string { + return fmt.Sprintf("%v_%v_%v_%v", provider.datasourceId, provider.datasourceVersion, provider.route.Path, provider.route.Method) +} diff --git a/pkg/api/pluginproxy/access_token_provider_test.go b/pkg/api/pluginproxy/access_token_provider_test.go new file mode 100644 index 00000000000..e75748e4660 --- /dev/null +++ b/pkg/api/pluginproxy/access_token_provider_test.go @@ -0,0 +1,94 @@ +package pluginproxy + +import ( + "context" + "testing" + "time" + + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + . "github.com/smartystreets/goconvey/convey" + "golang.org/x/oauth2" + "golang.org/x/oauth2/jwt" +) + +func TestAccessToken(t *testing.T) { + Convey("Plugin with JWT token auth route", t, func() { + pluginRoute := &plugins.AppPluginRoute{ + Path: "pathwithjwttoken1", + Url: "https://api.jwt.io/some/path", + Method: "GET", + JwtTokenAuth: &plugins.JwtTokenAuth{ + Url: "https://login.server.com/{{.JsonData.tenantId}}/oauth2/token", + Scopes: []string{ + "https://www.testapi.com/auth/monitoring.read", + "https://www.testapi.com/auth/cloudplatformprojects.readonly", + }, + Params: map[string]string{ + "token_uri": "{{.JsonData.tokenUri}}", + "client_email": "{{.JsonData.clientEmail}}", + "private_key": "{{.SecureJsonData.privateKey}}", + }, + }, + } + + templateData := templateData{ + JsonData: map[string]interface{}{ + "clientEmail": "test@test.com", + "tokenUri": "login.url.com/token", + }, + SecureJsonData: map[string]string{ + "privateKey": "testkey", + }, + } + + ds := &models.DataSource{Id: 1, Version: 2} + + Convey("should fetch token using jwt private key", func() { + getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) { + return &oauth2.Token{AccessToken: "abc"}, nil + } + provider := newAccessTokenProvider(ds, pluginRoute) + token, err := provider.getJwtAccessToken(context.Background(), templateData) + So(err, ShouldBeNil) + + So(token, ShouldEqual, "abc") + }) + + Convey("should set jwt config values", func() { + getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) { + So(conf.Email, ShouldEqual, "test@test.com") + So(conf.PrivateKey, ShouldResemble, []byte("testkey")) + So(len(conf.Scopes), ShouldEqual, 2) + So(conf.Scopes[0], ShouldEqual, "https://www.testapi.com/auth/monitoring.read") + So(conf.Scopes[1], ShouldEqual, "https://www.testapi.com/auth/cloudplatformprojects.readonly") + So(conf.TokenURL, ShouldEqual, "login.url.com/token") + + return &oauth2.Token{AccessToken: "abc"}, nil + } + + provider := newAccessTokenProvider(ds, pluginRoute) + _, err := provider.getJwtAccessToken(context.Background(), templateData) + So(err, ShouldBeNil) + }) + + Convey("should use cached token on second call", func() { + getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) { + return &oauth2.Token{ + AccessToken: "abc", + Expiry: time.Now().Add(1 * time.Minute)}, nil + } + provider := newAccessTokenProvider(ds, pluginRoute) + token1, err := provider.getJwtAccessToken(context.Background(), templateData) + So(err, ShouldBeNil) + So(token1, ShouldEqual, "abc") + + getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) { + return &oauth2.Token{AccessToken: "error: cache not used"}, nil + } + token2, err := provider.getJwtAccessToken(context.Background(), templateData) + So(err, ShouldBeNil) + So(token2, ShouldEqual, "abc") + }) + }) +} diff --git a/pkg/api/pluginproxy/ds_auth_provider.go b/pkg/api/pluginproxy/ds_auth_provider.go new file mode 100644 index 00000000000..c68da839d13 --- /dev/null +++ b/pkg/api/pluginproxy/ds_auth_provider.go @@ -0,0 +1,93 @@ +package pluginproxy + +import ( + "bytes" + "context" + "fmt" + "net/http" + "net/url" + "strings" + "text/template" + + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/util" +) + +//ApplyRoute should use the plugin route data to set auth headers and custom headers +func ApplyRoute(ctx context.Context, req *http.Request, proxyPath string, route *plugins.AppPluginRoute, ds *m.DataSource) { + proxyPath = strings.TrimPrefix(proxyPath, route.Path) + + data := templateData{ + JsonData: ds.JsonData.Interface().(map[string]interface{}), + SecureJsonData: ds.SecureJsonData.Decrypt(), + } + + interpolatedURL, err := interpolateString(route.Url, data) + if err != nil { + logger.Error("Error interpolating proxy url", "error", err) + return + } + + routeURL, err := url.Parse(interpolatedURL) + if err != nil { + logger.Error("Error parsing plugin route url", "error", err) + return + } + + req.URL.Scheme = routeURL.Scheme + req.URL.Host = routeURL.Host + req.Host = routeURL.Host + req.URL.Path = util.JoinUrlFragments(routeURL.Path, proxyPath) + + if err := addHeaders(&req.Header, route, data); err != nil { + logger.Error("Failed to render plugin headers", "error", err) + } + + tokenProvider := newAccessTokenProvider(ds, route) + + if route.TokenAuth != nil { + if token, err := tokenProvider.getAccessToken(data); err != nil { + logger.Error("Failed to get access token", "error", err) + } else { + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token)) + } + } + + if route.JwtTokenAuth != nil { + if token, err := tokenProvider.getJwtAccessToken(ctx, data); err != nil { + logger.Error("Failed to get access token", "error", err) + } else { + req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token)) + } + } + logger.Info("Requesting", "url", req.URL.String()) + +} + +func interpolateString(text string, data templateData) (string, error) { + t, err := template.New("content").Parse(text) + if err != nil { + return "", fmt.Errorf("could not parse template %s", text) + } + + var contentBuf bytes.Buffer + err = t.Execute(&contentBuf, data) + if err != nil { + return "", fmt.Errorf("failed to execute template %s", text) + } + + return contentBuf.String(), nil +} + +func addHeaders(reqHeaders *http.Header, route *plugins.AppPluginRoute, data templateData) error { + for _, header := range route.Headers { + interpolated, err := interpolateString(header.Content, data) + if err != nil { + return err + } + reqHeaders.Add(header.Name, interpolated) + } + + return nil +} diff --git a/pkg/api/pluginproxy/ds_auth_provider_test.go b/pkg/api/pluginproxy/ds_auth_provider_test.go new file mode 100644 index 00000000000..9bd98a339e5 --- /dev/null +++ b/pkg/api/pluginproxy/ds_auth_provider_test.go @@ -0,0 +1,21 @@ +package pluginproxy + +import ( + "testing" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestDsAuthProvider(t *testing.T) { + Convey("When interpolating string", t, func() { + data := templateData{ + SecureJsonData: map[string]string{ + "Test": "0asd+asd", + }, + } + + interpolated, err := interpolateString("{{.SecureJsonData.Test}}", data) + So(err, ShouldBeNil) + So(interpolated, ShouldEqual, "0asd+asd") + }) +} diff --git a/pkg/api/pluginproxy/ds_proxy.go b/pkg/api/pluginproxy/ds_proxy.go index fb2cab9b9b1..0c000058e4b 100644 --- a/pkg/api/pluginproxy/ds_proxy.go +++ b/pkg/api/pluginproxy/ds_proxy.go @@ -2,7 +2,6 @@ package pluginproxy import ( "bytes" - "encoding/json" "errors" "fmt" "io/ioutil" @@ -12,7 +11,6 @@ import ( "net/url" "strconv" "strings" - "text/template" "time" "github.com/opentracing/opentracing-go" @@ -25,17 +23,10 @@ import ( ) var ( - logger = log.New("data-proxy-log") - tokenCache = map[string]*jwtToken{} - client = newHTTPClient() + logger = log.New("data-proxy-log") + client = newHTTPClient() ) -type jwtToken struct { - ExpiresOn time.Time `json:"-"` - ExpiresOnString string `json:"expires_on"` - AccessToken string `json:"access_token"` -} - type DataSourceProxy struct { ds *m.DataSource ctx *m.ReqContext @@ -162,7 +153,6 @@ func (proxy *DataSourceProxy) getDirector() func(req *http.Request) { } else { req.URL.Path = util.JoinUrlFragments(proxy.targetUrl.Path, proxy.proxyPath) } - if proxy.ds.BasicAuth { req.Header.Del("Authorization") req.Header.Add("Authorization", util.GetBasicAuthHeader(proxy.ds.BasicAuthUser, proxy.ds.BasicAuthPassword)) @@ -219,7 +209,7 @@ func (proxy *DataSourceProxy) getDirector() func(req *http.Request) { } if proxy.route != nil { - proxy.applyRoute(req) + ApplyRoute(proxy.ctx.Req.Context(), req, proxy.proxyPath, proxy.route, proxy.ds) } } } @@ -311,120 +301,3 @@ func checkWhiteList(c *m.ReqContext, host string) bool { return true } - -func (proxy *DataSourceProxy) applyRoute(req *http.Request) { - proxy.proxyPath = strings.TrimPrefix(proxy.proxyPath, proxy.route.Path) - - data := templateData{ - JsonData: proxy.ds.JsonData.Interface().(map[string]interface{}), - SecureJsonData: proxy.ds.SecureJsonData.Decrypt(), - } - - interpolatedURL, err := interpolateString(proxy.route.Url, data) - if err != nil { - logger.Error("Error interpolating proxy url", "error", err) - return - } - - routeURL, err := url.Parse(interpolatedURL) - if err != nil { - logger.Error("Error parsing plugin route url", "error", err) - return - } - - req.URL.Scheme = routeURL.Scheme - req.URL.Host = routeURL.Host - req.Host = routeURL.Host - req.URL.Path = util.JoinUrlFragments(routeURL.Path, proxy.proxyPath) - - if err := addHeaders(&req.Header, proxy.route, data); err != nil { - logger.Error("Failed to render plugin headers", "error", err) - } - - if proxy.route.TokenAuth != nil { - if token, err := proxy.getAccessToken(data); err != nil { - logger.Error("Failed to get access token", "error", err) - } else { - req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token)) - } - } - - logger.Info("Requesting", "url", req.URL.String()) -} - -func (proxy *DataSourceProxy) getAccessToken(data templateData) (string, error) { - if cachedToken, found := tokenCache[proxy.getAccessTokenCacheKey()]; found { - if cachedToken.ExpiresOn.After(time.Now().Add(time.Second * 10)) { - logger.Info("Using token from cache") - return cachedToken.AccessToken, nil - } - } - - urlInterpolated, err := interpolateString(proxy.route.TokenAuth.Url, data) - if err != nil { - return "", err - } - - params := make(url.Values) - for key, value := range proxy.route.TokenAuth.Params { - interpolatedParam, err := interpolateString(value, data) - if err != nil { - return "", err - } - params.Add(key, interpolatedParam) - } - - getTokenReq, _ := http.NewRequest("POST", urlInterpolated, bytes.NewBufferString(params.Encode())) - getTokenReq.Header.Add("Content-Type", "application/x-www-form-urlencoded") - getTokenReq.Header.Add("Content-Length", strconv.Itoa(len(params.Encode()))) - - resp, err := client.Do(getTokenReq) - if err != nil { - return "", err - } - - defer resp.Body.Close() - - var token jwtToken - if err := json.NewDecoder(resp.Body).Decode(&token); err != nil { - return "", err - } - - expiresOnEpoch, _ := strconv.ParseInt(token.ExpiresOnString, 10, 64) - token.ExpiresOn = time.Unix(expiresOnEpoch, 0) - tokenCache[proxy.getAccessTokenCacheKey()] = &token - - logger.Info("Got new access token", "ExpiresOn", token.ExpiresOn) - return token.AccessToken, nil -} - -func (proxy *DataSourceProxy) getAccessTokenCacheKey() string { - return fmt.Sprintf("%v_%v_%v", proxy.ds.Id, proxy.route.Path, proxy.route.Method) -} - -func interpolateString(text string, data templateData) (string, error) { - t, err := template.New("content").Parse(text) - if err != nil { - return "", fmt.Errorf("could not parse template %s", text) - } - - var contentBuf bytes.Buffer - err = t.Execute(&contentBuf, data) - if err != nil { - return "", fmt.Errorf("failed to execute template %s", text) - } - - return contentBuf.String(), nil -} - -func addHeaders(reqHeaders *http.Header, route *plugins.AppPluginRoute, data templateData) error { - for _, header := range route.Headers { - interpolated, err := interpolateString(header.Content, data) - if err != nil { - return err - } - reqHeaders.Add(header.Name, interpolated) - } - - return nil -} diff --git a/pkg/api/pluginproxy/ds_proxy_test.go b/pkg/api/pluginproxy/ds_proxy_test.go index e6d05872787..ab0effb298f 100644 --- a/pkg/api/pluginproxy/ds_proxy_test.go +++ b/pkg/api/pluginproxy/ds_proxy_test.go @@ -83,7 +83,7 @@ func TestDSRouteRule(t *testing.T) { Convey("When matching route path", func() { proxy := NewDataSourceProxy(ds, plugin, ctx, "api/v4/some/method") proxy.route = plugin.Routes[0] - proxy.applyRoute(req) + ApplyRoute(proxy.ctx.Req.Context(), req, proxy.proxyPath, proxy.route, proxy.ds) Convey("should add headers and update url", func() { So(req.URL.String(), ShouldEqual, "https://www.google.com/some/method") @@ -94,7 +94,7 @@ func TestDSRouteRule(t *testing.T) { Convey("When matching route path and has dynamic url", func() { proxy := NewDataSourceProxy(ds, plugin, ctx, "api/common/some/method") proxy.route = plugin.Routes[3] - proxy.applyRoute(req) + ApplyRoute(proxy.ctx.Req.Context(), req, proxy.proxyPath, proxy.route, proxy.ds) Convey("should add headers and interpolate the url", func() { So(req.URL.String(), ShouldEqual, "https://dynamic.grafana.com/some/method") @@ -188,7 +188,7 @@ func TestDSRouteRule(t *testing.T) { client = newFakeHTTPClient(json) proxy1 := NewDataSourceProxy(ds, plugin, ctx, "pathwithtoken1") proxy1.route = plugin.Routes[0] - proxy1.applyRoute(req) + ApplyRoute(proxy1.ctx.Req.Context(), req, proxy1.proxyPath, proxy1.route, proxy1.ds) authorizationHeaderCall1 = req.Header.Get("Authorization") So(req.URL.String(), ShouldEqual, "https://api.nr1.io/some/path") @@ -202,7 +202,7 @@ func TestDSRouteRule(t *testing.T) { client = newFakeHTTPClient(json2) proxy2 := NewDataSourceProxy(ds, plugin, ctx, "pathwithtoken2") proxy2.route = plugin.Routes[1] - proxy2.applyRoute(req) + ApplyRoute(proxy2.ctx.Req.Context(), req, proxy2.proxyPath, proxy2.route, proxy2.ds) authorizationHeaderCall2 = req.Header.Get("Authorization") @@ -217,7 +217,7 @@ func TestDSRouteRule(t *testing.T) { client = newFakeHTTPClient([]byte{}) proxy3 := NewDataSourceProxy(ds, plugin, ctx, "pathwithtoken1") proxy3.route = plugin.Routes[0] - proxy3.applyRoute(req) + ApplyRoute(proxy3.ctx.Req.Context(), req, proxy3.proxyPath, proxy3.route, proxy3.ds) authorizationHeaderCall3 := req.Header.Get("Authorization") So(req.URL.String(), ShouldEqual, "https://api.nr1.io/some/path") @@ -331,18 +331,6 @@ func TestDSRouteRule(t *testing.T) { }) }) - Convey("When interpolating string", func() { - data := templateData{ - SecureJsonData: map[string]string{ - "Test": "0asd+asd", - }, - } - - interpolated, err := interpolateString("{{.SecureJsonData.Test}}", data) - So(err, ShouldBeNil) - So(interpolated, ShouldEqual, "0asd+asd") - }) - Convey("When proxying a data source with custom headers specified", func() { plugin := &plugins.DataSourcePlugin{} @@ -374,6 +362,23 @@ func TestDSRouteRule(t *testing.T) { }) }) + Convey("When proxying a custom datasource", func() { + plugin := &plugins.DataSourcePlugin{} + ds := &m.DataSource{ + Type: "custom-datasource", + Url: "http://host/root/", + } + ctx := &m.ReqContext{} + proxy := NewDataSourceProxy(ds, plugin, ctx, "/path/to/folder/") + req, err := http.NewRequest(http.MethodGet, "http://grafana.com/sub", nil) + So(err, ShouldBeNil) + + proxy.getDirector()(req) + + Convey("Shoudl keep user request (including trailing slash)", func() { + So(req.URL.String(), ShouldEqual, "http://host/root/path/to/folder/") + }) + }) }) } diff --git a/pkg/api/render.go b/pkg/api/render.go index b8ef6cc5cb6..cf672af9bea 100644 --- a/pkg/api/render.go +++ b/pkg/api/render.go @@ -41,15 +41,16 @@ func (hs *HTTPServer) RenderToPng(c *m.ReqContext) { } result, err := hs.RenderService.Render(c.Req.Context(), rendering.Opts{ - Width: width, - Height: height, - Timeout: time.Duration(timeout) * time.Second, - OrgId: c.OrgId, - UserId: c.UserId, - OrgRole: c.OrgRole, - Path: c.Params("*") + queryParams, - Timezone: queryReader.Get("tz", ""), - Encoding: queryReader.Get("encoding", ""), + Width: width, + Height: height, + Timeout: time.Duration(timeout) * time.Second, + OrgId: c.OrgId, + UserId: c.UserId, + OrgRole: c.OrgRole, + Path: c.Params("*") + queryParams, + Timezone: queryReader.Get("tz", ""), + Encoding: queryReader.Get("encoding", ""), + ConcurrentLimit: 30, }) if err != nil && err == rendering.ErrTimeout { diff --git a/pkg/cmd/grafana-cli/commands/commands.go b/pkg/cmd/grafana-cli/commands/commands.go index 5e69559b9fa..902fd415977 100644 --- a/pkg/cmd/grafana-cli/commands/commands.go +++ b/pkg/cmd/grafana-cli/commands/commands.go @@ -6,6 +6,7 @@ import ( "github.com/codegangsta/cli" "github.com/fatih/color" + "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/setting" @@ -24,6 +25,7 @@ func runDbCommand(command func(commandLine CommandLine) error) func(context *cli engine := &sqlstore.SqlStore{} engine.Cfg = cfg + engine.Bus = bus.GetBus() engine.Init() if err := command(cmd); err != nil { diff --git a/pkg/cmd/grafana-server/main.go b/pkg/cmd/grafana-server/main.go index f1e298671d7..84325bae808 100644 --- a/pkg/cmd/grafana-server/main.go +++ b/pkg/cmd/grafana-server/main.go @@ -29,6 +29,7 @@ import ( _ "github.com/grafana/grafana/pkg/tsdb/opentsdb" _ "github.com/grafana/grafana/pkg/tsdb/postgres" _ "github.com/grafana/grafana/pkg/tsdb/prometheus" + _ "github.com/grafana/grafana/pkg/tsdb/stackdriver" _ "github.com/grafana/grafana/pkg/tsdb/testdata" ) @@ -103,7 +104,7 @@ func listenToSystemSignals(server *GrafanaServerImpl) { for { select { - case _ = <-sighupChan: + case <-sighupChan: log.Reload() case sig := <-signalChan: server.Shutdown(fmt.Sprintf("System signal: %s", sig)) diff --git a/pkg/components/imguploader/azureblobuploader.go b/pkg/components/imguploader/azureblobuploader.go index a902807925b..b37763931c8 100644 --- a/pkg/components/imguploader/azureblobuploader.go +++ b/pkg/components/imguploader/azureblobuploader.go @@ -127,8 +127,6 @@ type xmlError struct { const ms_date_layout = "Mon, 02 Jan 2006 15:04:05 GMT" const version = "2017-04-17" -var client = &http.Client{} - type StorageClient struct { Auth *Auth Transport http.RoundTripper diff --git a/pkg/components/imguploader/s3uploader.go b/pkg/components/imguploader/s3uploader.go index a1e4aed0f47..9c8af21e39e 100644 --- a/pkg/components/imguploader/s3uploader.go +++ b/pkg/components/imguploader/s3uploader.go @@ -2,12 +2,15 @@ package imguploader import ( "context" + "fmt" "os" "time" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/credentials" "github.com/aws/aws-sdk-go/aws/credentials/ec2rolecreds" + "github.com/aws/aws-sdk-go/aws/credentials/endpointcreds" + "github.com/aws/aws-sdk-go/aws/defaults" "github.com/aws/aws-sdk-go/aws/ec2metadata" "github.com/aws/aws-sdk-go/aws/endpoints" "github.com/aws/aws-sdk-go/aws/session" @@ -50,7 +53,7 @@ func (u *S3Uploader) Upload(ctx context.Context, imageDiskPath string) (string, SecretAccessKey: u.secretKey, }}, &credentials.EnvProvider{}, - &ec2rolecreds.EC2RoleProvider{Client: ec2metadata.New(sess), ExpiryWindow: 5 * time.Minute}, + remoteCredProvider(sess), }) cfg := &aws.Config{ Region: aws.String(u.region), @@ -85,3 +88,27 @@ func (u *S3Uploader) Upload(ctx context.Context, imageDiskPath string) (string, } return image_url, nil } + +func remoteCredProvider(sess *session.Session) credentials.Provider { + ecsCredURI := os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") + + if len(ecsCredURI) > 0 { + return ecsCredProvider(sess, ecsCredURI) + } + return ec2RoleProvider(sess) +} + +func ecsCredProvider(sess *session.Session, uri string) credentials.Provider { + const host = `169.254.170.2` + + d := defaults.Get() + return endpointcreds.NewProviderClient( + *d.Config, + d.Handlers, + fmt.Sprintf("http://%s%s", host, uri), + func(p *endpointcreds.Provider) { p.ExpiryWindow = 5 * time.Minute }) +} + +func ec2RoleProvider(sess *session.Session) credentials.Provider { + return &ec2rolecreds.EC2RoleProvider{Client: ec2metadata.New(sess), ExpiryWindow: 5 * time.Minute} +} diff --git a/pkg/components/null/float.go b/pkg/components/null/float.go index 4f783f2c584..9082c831084 100644 --- a/pkg/components/null/float.go +++ b/pkg/components/null/float.go @@ -8,6 +8,10 @@ import ( "strconv" ) +const ( + nullString = "null" +) + // Float is a nullable float64. // It does not consider zero values to be null. // It will decode to null, not zero, if null. @@ -68,7 +72,7 @@ func (f *Float) UnmarshalJSON(data []byte) error { // It will return an error if the input is not an integer, blank, or "null". func (f *Float) UnmarshalText(text []byte) error { str := string(text) - if str == "" || str == "null" { + if str == "" || str == nullString { f.Valid = false return nil } @@ -82,7 +86,7 @@ func (f *Float) UnmarshalText(text []byte) error { // It will encode null if this Float is null. func (f Float) MarshalJSON() ([]byte, error) { if !f.Valid { - return []byte("null"), nil + return []byte(nullString), nil } return []byte(strconv.FormatFloat(f.Float64, 'f', -1, 64)), nil } @@ -100,7 +104,7 @@ func (f Float) MarshalText() ([]byte, error) { // It will encode a blank string if this Float is null. func (f Float) String() string { if !f.Valid { - return "null" + return nullString } return fmt.Sprintf("%1.3f", f.Float64) @@ -109,7 +113,7 @@ func (f Float) String() string { // FullString returns float as string in full precision func (f Float) FullString() string { if !f.Valid { - return "null" + return nullString } return fmt.Sprintf("%f", f.Float64) diff --git a/pkg/middleware/middleware_test.go b/pkg/middleware/middleware_test.go index 87c23a7b49a..1830b3eb161 100644 --- a/pkg/middleware/middleware_test.go +++ b/pkg/middleware/middleware_test.go @@ -435,11 +435,6 @@ func (sc *scenarioContext) withValidApiKey() *scenarioContext { return sc } -func (sc *scenarioContext) withInvalidApiKey() *scenarioContext { - sc.apiKey = "nvalidhhhhds" - return sc -} - func (sc *scenarioContext) withAuthorizationHeader(authHeader string) *scenarioContext { sc.authHeader = authHeader return sc diff --git a/pkg/models/alert.go b/pkg/models/alert.go index fba2aa63df9..ba1fc0779ba 100644 --- a/pkg/models/alert.go +++ b/pkg/models/alert.go @@ -75,7 +75,7 @@ type Alert struct { EvalData *simplejson.Json NewStateDate time.Time - StateChanges int + StateChanges int64 Created time.Time Updated time.Time @@ -156,7 +156,7 @@ type SetAlertStateCommand struct { Error string EvalData *simplejson.Json - Timestamp time.Time + Result Alert } //Queries diff --git a/pkg/models/alert_notifications.go b/pkg/models/alert_notifications.go index 42d33d5ed22..2128b469fa4 100644 --- a/pkg/models/alert_notifications.go +++ b/pkg/models/alert_notifications.go @@ -8,8 +8,18 @@ import ( ) var ( - ErrNotificationFrequencyNotFound = errors.New("Notification frequency not specified") - ErrJournalingNotFound = errors.New("alert notification journaling not found") + ErrNotificationFrequencyNotFound = errors.New("Notification frequency not specified") + ErrAlertNotificationStateNotFound = errors.New("alert notification state not found") + ErrAlertNotificationStateVersionConflict = errors.New("alert notification state update version conflict") + ErrAlertNotificationStateAlreadyExist = errors.New("alert notification state already exists.") +) + +type AlertNotificationStateType string + +var ( + AlertNotificationStatePending = AlertNotificationStateType("pending") + AlertNotificationStateCompleted = AlertNotificationStateType("completed") + AlertNotificationStateUnknown = AlertNotificationStateType("unknown") ) type AlertNotification struct { @@ -76,33 +86,34 @@ type GetAllAlertNotificationsQuery struct { Result []*AlertNotification } -type AlertNotificationJournal struct { - Id int64 - OrgId int64 - AlertId int64 - NotifierId int64 - SentAt int64 - Success bool +type AlertNotificationState struct { + Id int64 + OrgId int64 + AlertId int64 + NotifierId int64 + State AlertNotificationStateType + Version int64 + UpdatedAt int64 + AlertRuleStateUpdatedVersion int64 } -type RecordNotificationJournalCommand struct { - OrgId int64 - AlertId int64 - NotifierId int64 - SentAt int64 - Success bool +type SetAlertNotificationStateToPendingCommand struct { + Id int64 + AlertRuleStateUpdatedVersion int64 + Version int64 + + ResultVersion int64 } -type GetLatestNotificationQuery struct { +type SetAlertNotificationStateToCompleteCommand struct { + Id int64 + Version int64 +} + +type GetOrCreateNotificationStateQuery struct { OrgId int64 AlertId int64 NotifierId int64 - Result *AlertNotificationJournal -} - -type CleanNotificationJournalCommand struct { - OrgId int64 - AlertId int64 - NotifierId int64 + Result *AlertNotificationState } diff --git a/pkg/models/datasource.go b/pkg/models/datasource.go index 9a32b326a4b..de7158c0afb 100644 --- a/pkg/models/datasource.go +++ b/pkg/models/datasource.go @@ -22,6 +22,7 @@ const ( DS_MSSQL = "mssql" DS_ACCESS_DIRECT = "direct" DS_ACCESS_PROXY = "proxy" + DS_STACKDRIVER = "stackdriver" ) var ( @@ -71,12 +72,12 @@ var knownDatasourcePlugins = map[string]bool{ DS_POSTGRES: true, DS_MYSQL: true, DS_MSSQL: true, + DS_STACKDRIVER: true, "opennms": true, "abhisant-druid-datasource": true, "dalmatinerdb-datasource": true, "gnocci": true, "zabbix": true, - "alexanderzobnin-zabbix-datasource": true, "newrelic-app": true, "grafana-datadog-datasource": true, "grafana-simple-json": true, @@ -89,6 +90,7 @@ var knownDatasourcePlugins = map[string]bool{ "ayoungprogrammer-finance-datasource": true, "monasca-datasource": true, "vertamedia-clickhouse-datasource": true, + "alexanderzobnin-zabbix-datasource": true, } func IsKnownDataSourcePlugin(dsType string) bool { diff --git a/pkg/plugins/app_plugin.go b/pkg/plugins/app_plugin.go index b070ba592f0..922b2444b7b 100644 --- a/pkg/plugins/app_plugin.go +++ b/pkg/plugins/app_plugin.go @@ -23,12 +23,13 @@ type AppPlugin struct { } type AppPluginRoute struct { - Path string `json:"path"` - Method string `json:"method"` - ReqRole models.RoleType `json:"reqRole"` - Url string `json:"url"` - Headers []AppPluginRouteHeader `json:"headers"` - TokenAuth *JwtTokenAuth `json:"tokenAuth"` + Path string `json:"path"` + Method string `json:"method"` + ReqRole models.RoleType `json:"reqRole"` + Url string `json:"url"` + Headers []AppPluginRouteHeader `json:"headers"` + TokenAuth *JwtTokenAuth `json:"tokenAuth"` + JwtTokenAuth *JwtTokenAuth `json:"jwtTokenAuth"` } type AppPluginRouteHeader struct { @@ -36,8 +37,11 @@ type AppPluginRouteHeader struct { Content string `json:"content"` } +// JwtTokenAuth struct is both for normal Token Auth and JWT Token Auth with +// an uploaded JWT file. type JwtTokenAuth struct { Url string `json:"url"` + Scopes []string `json:"scopes"` Params map[string]string `json:"params"` } diff --git a/pkg/services/alerting/interfaces.go b/pkg/services/alerting/interfaces.go index 46f8b3c769c..96294f0624f 100644 --- a/pkg/services/alerting/interfaces.go +++ b/pkg/services/alerting/interfaces.go @@ -3,6 +3,8 @@ package alerting import ( "context" "time" + + "github.com/grafana/grafana/pkg/models" ) type EvalHandler interface { @@ -20,7 +22,7 @@ type Notifier interface { NeedsImage() bool // ShouldNotify checks this evaluation should send an alert notification - ShouldNotify(ctx context.Context, evalContext *EvalContext) bool + ShouldNotify(ctx context.Context, evalContext *EvalContext, notificationState *models.AlertNotificationState) bool GetNotifierId() int64 GetIsDefault() bool @@ -28,11 +30,16 @@ type Notifier interface { GetFrequency() time.Duration } -type NotifierSlice []Notifier +type notifierState struct { + notifier Notifier + state *models.AlertNotificationState +} -func (notifiers NotifierSlice) ShouldUploadImage() bool { - for _, notifier := range notifiers { - if notifier.NeedsImage() { +type notifierStateSlice []*notifierState + +func (notifiers notifierStateSlice) ShouldUploadImage() bool { + for _, ns := range notifiers { + if ns.notifier.NeedsImage() { return true } } diff --git a/pkg/services/alerting/notifier.go b/pkg/services/alerting/notifier.go index 7fbd956f4f9..9ce50eadd6b 100644 --- a/pkg/services/alerting/notifier.go +++ b/pkg/services/alerting/notifier.go @@ -1,16 +1,15 @@ package alerting import ( - "context" "errors" "fmt" - "time" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/imguploader" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/metrics" "github.com/grafana/grafana/pkg/services/rendering" + "github.com/grafana/grafana/pkg/setting" m "github.com/grafana/grafana/pkg/models" ) @@ -40,61 +39,78 @@ type notificationService struct { } func (n *notificationService) SendIfNeeded(context *EvalContext) error { - notifiers, err := n.getNeededNotifiers(context.Rule.OrgId, context.Rule.Notifications, context) + notifierStates, err := n.getNeededNotifiers(context.Rule.OrgId, context.Rule.Notifications, context) if err != nil { return err } - if len(notifiers) == 0 { + if len(notifierStates) == 0 { return nil } - if notifiers.ShouldUploadImage() { + if notifierStates.ShouldUploadImage() { if err = n.uploadImage(context); err != nil { n.log.Error("Failed to upload alert panel image.", "error", err) } } - return n.sendNotifications(context, notifiers) + return n.sendNotifications(context, notifierStates) } -func (n *notificationService) sendNotifications(evalContext *EvalContext, notifiers []Notifier) error { - for _, notifier := range notifiers { - not := notifier +func (n *notificationService) sendAndMarkAsComplete(evalContext *EvalContext, notifierState *notifierState) error { + notifier := notifierState.notifier - err := bus.InTransaction(evalContext.Ctx, func(ctx context.Context) error { - n.log.Debug("trying to send notification", "id", not.GetNotifierId()) + n.log.Debug("Sending notification", "type", notifier.GetType(), "id", notifier.GetNotifierId(), "isDefault", notifier.GetIsDefault()) + metrics.M_Alerting_Notification_Sent.WithLabelValues(notifier.GetType()).Inc() - // Verify that we can send the notification again - // but this time within the same transaction. - if !evalContext.IsTestRun && !not.ShouldNotify(context.Background(), evalContext) { - return nil - } + err := notifier.Notify(evalContext) - n.log.Debug("Sending notification", "type", not.GetType(), "id", not.GetNotifierId(), "isDefault", not.GetIsDefault()) - metrics.M_Alerting_Notification_Sent.WithLabelValues(not.GetType()).Inc() + if err != nil { + n.log.Error("failed to send notification", "id", notifier.GetNotifierId(), "error", err) + } - //send notification - success := not.Notify(evalContext) == nil + if evalContext.IsTestRun { + return nil + } - if evalContext.IsTestRun { - return nil - } + cmd := &m.SetAlertNotificationStateToCompleteCommand{ + Id: notifierState.state.Id, + Version: notifierState.state.Version, + } - //write result to db. - cmd := &m.RecordNotificationJournalCommand{ - OrgId: evalContext.Rule.OrgId, - AlertId: evalContext.Rule.Id, - NotifierId: not.GetNotifierId(), - SentAt: time.Now().Unix(), - Success: success, - } + return bus.DispatchCtx(evalContext.Ctx, cmd) +} - return bus.DispatchCtx(ctx, cmd) - }) +func (n *notificationService) sendNotification(evalContext *EvalContext, notifierState *notifierState) error { + if !evalContext.IsTestRun { + setPendingCmd := &m.SetAlertNotificationStateToPendingCommand{ + Id: notifierState.state.Id, + Version: notifierState.state.Version, + AlertRuleStateUpdatedVersion: evalContext.Rule.StateChanges, + } + + err := bus.DispatchCtx(evalContext.Ctx, setPendingCmd) + if err == m.ErrAlertNotificationStateVersionConflict { + return nil + } if err != nil { - n.log.Error("failed to send notification", "id", not.GetNotifierId()) + return err + } + + // We need to update state version to be able to log + // unexpected version conflicts when marking notifications as ok + notifierState.state.Version = setPendingCmd.ResultVersion + } + + return n.sendAndMarkAsComplete(evalContext, notifierState) +} + +func (n *notificationService) sendNotifications(evalContext *EvalContext, notifierStates notifierStateSlice) error { + for _, notifierState := range notifierStates { + err := n.sendNotification(evalContext, notifierState) + if err != nil { + n.log.Error("failed to send notification", "id", notifierState.notifier.GetNotifierId(), "error", err) } } @@ -108,11 +124,12 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) { } renderOpts := rendering.Opts{ - Width: 1000, - Height: 500, - Timeout: alertTimeout / 2, - OrgId: context.Rule.OrgId, - OrgRole: m.ROLE_ADMIN, + Width: 1000, + Height: 500, + Timeout: alertTimeout / 2, + OrgId: context.Rule.OrgId, + OrgRole: m.ROLE_ADMIN, + ConcurrentLimit: setting.AlertingRenderLimit, } ref, err := context.GetDashboardUID() @@ -140,22 +157,38 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) { return nil } -func (n *notificationService) getNeededNotifiers(orgId int64, notificationIds []int64, evalContext *EvalContext) (NotifierSlice, error) { +func (n *notificationService) getNeededNotifiers(orgId int64, notificationIds []int64, evalContext *EvalContext) (notifierStateSlice, error) { query := &m.GetAlertNotificationsToSendQuery{OrgId: orgId, Ids: notificationIds} if err := bus.Dispatch(query); err != nil { return nil, err } - var result []Notifier + var result notifierStateSlice for _, notification := range query.Result { not, err := n.createNotifierFor(notification) if err != nil { - return nil, err + n.log.Error("Could not create notifier", "notifier", notification.Id, "error", err) + continue } - if not.ShouldNotify(evalContext.Ctx, evalContext) { - result = append(result, not) + query := &m.GetOrCreateNotificationStateQuery{ + NotifierId: notification.Id, + AlertId: evalContext.Rule.Id, + OrgId: evalContext.Rule.OrgId, + } + + err = bus.DispatchCtx(evalContext.Ctx, query) + if err != nil { + n.log.Error("Could not get notification state.", "notifier", notification.Id, "error", err) + continue + } + + if not.ShouldNotify(evalContext.Ctx, evalContext, query.Result) { + result = append(result, ¬ifierState{ + notifier: not, + state: query.Result, + }) } } diff --git a/pkg/services/alerting/notifiers/alertmanager.go b/pkg/services/alerting/notifiers/alertmanager.go index 9826dd1dffb..2caa4d5ab58 100644 --- a/pkg/services/alerting/notifiers/alertmanager.go +++ b/pkg/services/alerting/notifiers/alertmanager.go @@ -46,7 +46,7 @@ type AlertmanagerNotifier struct { log log.Logger } -func (this *AlertmanagerNotifier) ShouldNotify(ctx context.Context, evalContext *alerting.EvalContext) bool { +func (this *AlertmanagerNotifier) ShouldNotify(ctx context.Context, evalContext *alerting.EvalContext, notificationState *m.AlertNotificationState) bool { this.log.Debug("Should notify", "ruleId", evalContext.Rule.Id, "state", evalContext.Rule.State, "previousState", evalContext.PrevAlertState) // Do not notify when we become OK for the first time. diff --git a/pkg/services/alerting/notifiers/base.go b/pkg/services/alerting/notifiers/base.go index ca011356247..fbade2eccac 100644 --- a/pkg/services/alerting/notifiers/base.go +++ b/pkg/services/alerting/notifiers/base.go @@ -4,13 +4,16 @@ import ( "context" "time" - "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" ) +const ( + triggMetrString = "Triggered metrics:\n\n" +) + type NotifierBase struct { Name string Type string @@ -42,55 +45,47 @@ func NewNotifierBase(model *models.AlertNotification) NotifierBase { } } -func defaultShouldNotify(context *alerting.EvalContext, sendReminder bool, frequency time.Duration, lastNotify time.Time) bool { +// ShouldNotify checks this evaluation should send an alert notification +func (n *NotifierBase) ShouldNotify(ctx context.Context, context *alerting.EvalContext, notiferState *models.AlertNotificationState) bool { // Only notify on state change. - if context.PrevAlertState == context.Rule.State && !sendReminder { + if context.PrevAlertState == context.Rule.State && !n.SendReminder { return false } - // Do not notify if interval has not elapsed - if sendReminder && !lastNotify.IsZero() && lastNotify.Add(frequency).After(time.Now()) { - return false - } + if context.PrevAlertState == context.Rule.State && n.SendReminder { + // Do not notify if interval has not elapsed + lastNotify := time.Unix(notiferState.UpdatedAt, 0) + if notiferState.UpdatedAt != 0 && lastNotify.Add(n.Frequency).After(time.Now()) { + return false + } - // Do not notify if alert state if OK or pending even on repeated notify - if sendReminder && (context.Rule.State == models.AlertStateOK || context.Rule.State == models.AlertStatePending) { - return false + // Do not notify if alert state is OK or pending even on repeated notify + if context.Rule.State == models.AlertStateOK || context.Rule.State == models.AlertStatePending { + return false + } } // Do not notify when we become OK for the first time. - if (context.PrevAlertState == models.AlertStatePending) && (context.Rule.State == models.AlertStateOK) { + if context.PrevAlertState == models.AlertStatePending && context.Rule.State == models.AlertStateOK { return false } + // Do not notify when we OK -> Pending + if context.PrevAlertState == models.AlertStateOK && context.Rule.State == models.AlertStatePending { + return false + } + + // Do not notifu if state pending and it have been updated last minute + if notiferState.State == models.AlertNotificationStatePending { + lastUpdated := time.Unix(notiferState.UpdatedAt, 0) + if lastUpdated.Add(1 * time.Minute).After(time.Now()) { + return false + } + } + return true } -// ShouldNotify checks this evaluation should send an alert notification -func (n *NotifierBase) ShouldNotify(ctx context.Context, c *alerting.EvalContext) bool { - cmd := &models.GetLatestNotificationQuery{ - OrgId: c.Rule.OrgId, - AlertId: c.Rule.Id, - NotifierId: n.Id, - } - - err := bus.DispatchCtx(ctx, cmd) - if err == models.ErrJournalingNotFound { - return true - } - - if err != nil { - n.log.Error("Could not determine last time alert notifier fired", "Alert name", c.Rule.Name, "Error", err) - return false - } - - if !cmd.Result.Success { - return true - } - - return defaultShouldNotify(c, n.SendReminder, n.Frequency, time.Unix(cmd.Result.SentAt, 0)) -} - func (n *NotifierBase) GetType() string { return n.Type } diff --git a/pkg/services/alerting/notifiers/base_test.go b/pkg/services/alerting/notifiers/base_test.go index 57b82f32466..5e46d3ad72e 100644 --- a/pkg/services/alerting/notifiers/base_test.go +++ b/pkg/services/alerting/notifiers/base_test.go @@ -2,12 +2,9 @@ package notifiers import ( "context" - "errors" "testing" "time" - "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/components/simplejson" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" @@ -15,100 +12,144 @@ import ( ) func TestShouldSendAlertNotification(t *testing.T) { + tnow := time.Now() + tcs := []struct { name string prevState m.AlertStateType newState m.AlertStateType - expected bool sendReminder bool + frequency time.Duration + state *m.AlertNotificationState + + expect bool }{ { - name: "pending -> ok should not trigger an notification", - newState: m.AlertStatePending, - prevState: m.AlertStateOK, - expected: false, + name: "pending -> ok should not trigger an notification", + newState: m.AlertStateOK, + prevState: m.AlertStatePending, + sendReminder: false, + state: &m.AlertNotificationState{}, + + expect: false, }, { - name: "ok -> alerting should trigger an notification", - newState: m.AlertStateOK, - prevState: m.AlertStateAlerting, - expected: true, + name: "ok -> alerting should trigger an notification", + newState: m.AlertStateAlerting, + prevState: m.AlertStateOK, + sendReminder: false, + state: &m.AlertNotificationState{}, + + expect: true, }, { - name: "ok -> pending should not trigger an notification", - newState: m.AlertStateOK, - prevState: m.AlertStatePending, - expected: false, + name: "ok -> pending should not trigger an notification", + newState: m.AlertStatePending, + prevState: m.AlertStateOK, + sendReminder: false, + state: &m.AlertNotificationState{}, + + expect: false, }, { name: "ok -> ok should not trigger an notification", newState: m.AlertStateOK, prevState: m.AlertStateOK, - expected: false, sendReminder: false, - }, - { - name: "ok -> alerting should not trigger an notification", - newState: m.AlertStateOK, - prevState: m.AlertStateAlerting, - expected: true, - sendReminder: true, + state: &m.AlertNotificationState{}, + + expect: false, }, { name: "ok -> ok with reminder should not trigger an notification", newState: m.AlertStateOK, prevState: m.AlertStateOK, - expected: false, sendReminder: true, + state: &m.AlertNotificationState{}, + + expect: false, + }, + { + name: "alerting -> ok should trigger an notification", + newState: m.AlertStateOK, + prevState: m.AlertStateAlerting, + sendReminder: false, + state: &m.AlertNotificationState{}, + + expect: true, + }, + { + name: "alerting -> ok should trigger an notification when reminders enabled", + newState: m.AlertStateOK, + prevState: m.AlertStateAlerting, + frequency: time.Minute * 10, + sendReminder: true, + state: &m.AlertNotificationState{UpdatedAt: tnow.Add(-time.Minute).Unix()}, + + expect: true, + }, + { + name: "alerting -> alerting with reminder and no state should trigger", + newState: m.AlertStateAlerting, + prevState: m.AlertStateAlerting, + frequency: time.Minute * 10, + sendReminder: true, + state: &m.AlertNotificationState{}, + + expect: true, + }, + { + name: "alerting -> alerting with reminder and last notification sent 1 minute ago should not trigger", + newState: m.AlertStateAlerting, + prevState: m.AlertStateAlerting, + frequency: time.Minute * 10, + sendReminder: true, + state: &m.AlertNotificationState{UpdatedAt: tnow.Add(-time.Minute).Unix()}, + + expect: false, + }, + { + name: "alerting -> alerting with reminder and last notifciation sent 11 minutes ago should trigger", + newState: m.AlertStateAlerting, + prevState: m.AlertStateAlerting, + frequency: time.Minute * 10, + sendReminder: true, + state: &m.AlertNotificationState{UpdatedAt: tnow.Add(-11 * time.Minute).Unix()}, + + expect: true, + }, + { + name: "OK -> alerting with notifciation state pending and updated 30 seconds ago should not trigger", + newState: m.AlertStateAlerting, + prevState: m.AlertStateOK, + state: &m.AlertNotificationState{State: m.AlertNotificationStatePending, UpdatedAt: tnow.Add(-30 * time.Second).Unix()}, + + expect: false, + }, + { + name: "OK -> alerting with notifciation state pending and updated 2 minutes ago should trigger", + newState: m.AlertStateAlerting, + prevState: m.AlertStateOK, + state: &m.AlertNotificationState{State: m.AlertNotificationStatePending, UpdatedAt: tnow.Add(-2 * time.Minute).Unix()}, + + expect: true, }, } for _, tc := range tcs { evalContext := alerting.NewEvalContext(context.TODO(), &alerting.Rule{ - State: tc.newState, + State: tc.prevState, }) - evalContext.Rule.State = tc.prevState - if defaultShouldNotify(evalContext, true, 0, time.Now()) != tc.expected { - t.Errorf("failed %s. expected %+v to return %v", tc.name, tc, tc.expected) + evalContext.Rule.State = tc.newState + nb := &NotifierBase{SendReminder: tc.sendReminder, Frequency: tc.frequency} + + if nb.ShouldNotify(evalContext.Ctx, evalContext, tc.state) != tc.expect { + t.Errorf("failed test %s.\n expected \n%+v \nto return: %v", tc.name, tc, tc.expect) } } } -func TestShouldNotifyWhenNoJournalingIsFound(t *testing.T) { - Convey("base notifier", t, func() { - bus.ClearBusHandlers() - - notifier := NewNotifierBase(&m.AlertNotification{ - Id: 1, - Name: "name", - Type: "email", - Settings: simplejson.New(), - }) - evalContext := alerting.NewEvalContext(context.TODO(), &alerting.Rule{}) - - Convey("should notify if no journaling is found", func() { - bus.AddHandlerCtx("", func(ctx context.Context, q *m.GetLatestNotificationQuery) error { - return m.ErrJournalingNotFound - }) - - if !notifier.ShouldNotify(context.Background(), evalContext) { - t.Errorf("should send notifications when ErrJournalingNotFound is returned") - } - }) - - Convey("should not notify query returns error", func() { - bus.AddHandlerCtx("", func(ctx context.Context, q *m.GetLatestNotificationQuery) error { - return errors.New("some kind of error unknown error") - }) - - if notifier.ShouldNotify(context.Background(), evalContext) { - t.Errorf("should not send notifications when query returns error") - } - }) - }) -} - func TestBaseNotifier(t *testing.T) { Convey("default constructor for notifiers", t, func() { bJson := simplejson.New() diff --git a/pkg/services/alerting/notifiers/kafka.go b/pkg/services/alerting/notifiers/kafka.go index d8d19fc5dae..a8a424c87a7 100644 --- a/pkg/services/alerting/notifiers/kafka.go +++ b/pkg/services/alerting/notifiers/kafka.go @@ -61,7 +61,7 @@ func (this *KafkaNotifier) Notify(evalContext *alerting.EvalContext) error { state := evalContext.Rule.State - customData := "Triggered metrics:\n\n" + customData := triggMetrString for _, evt := range evalContext.EvalMatches { customData = customData + fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value) } diff --git a/pkg/services/alerting/notifiers/opsgenie.go b/pkg/services/alerting/notifiers/opsgenie.go index 84148a0d99c..629968b5102 100644 --- a/pkg/services/alerting/notifiers/opsgenie.go +++ b/pkg/services/alerting/notifiers/opsgenie.go @@ -95,7 +95,7 @@ func (this *OpsGenieNotifier) createAlert(evalContext *alerting.EvalContext) err return err } - customData := "Triggered metrics:\n\n" + customData := triggMetrString for _, evt := range evalContext.EvalMatches { customData = customData + fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value) } diff --git a/pkg/services/alerting/notifiers/pagerduty.go b/pkg/services/alerting/notifiers/pagerduty.go index bf85466388f..9f6ce3c2dc8 100644 --- a/pkg/services/alerting/notifiers/pagerduty.go +++ b/pkg/services/alerting/notifiers/pagerduty.go @@ -76,7 +76,7 @@ func (this *PagerdutyNotifier) Notify(evalContext *alerting.EvalContext) error { if evalContext.Rule.State == m.AlertStateOK { eventType = "resolve" } - customData := "Triggered metrics:\n\n" + customData := triggMetrString for _, evt := range evalContext.EvalMatches { customData = customData + fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value) } diff --git a/pkg/services/alerting/notifiers/telegram_test.go b/pkg/services/alerting/notifiers/telegram_test.go index 98c8d884ad0..911323ae9d1 100644 --- a/pkg/services/alerting/notifiers/telegram_test.go +++ b/pkg/services/alerting/notifiers/telegram_test.go @@ -1,6 +1,7 @@ package notifiers import ( + "context" "testing" "github.com/grafana/grafana/pkg/components/simplejson" @@ -52,11 +53,12 @@ func TestTelegramNotifier(t *testing.T) { }) Convey("generateCaption should generate a message with all pertinent details", func() { - evalContext := alerting.NewEvalContext(nil, &alerting.Rule{ - Name: "This is an alarm", - Message: "Some kind of message.", - State: m.AlertStateOK, - }) + evalContext := alerting.NewEvalContext(context.Background(), + &alerting.Rule{ + Name: "This is an alarm", + Message: "Some kind of message.", + State: m.AlertStateOK, + }) caption := generateImageCaption(evalContext, "http://grafa.url/abcdef", "") So(len(caption), ShouldBeLessThanOrEqualTo, 200) @@ -68,11 +70,12 @@ func TestTelegramNotifier(t *testing.T) { Convey("When generating a message", func() { Convey("URL should be skipped if it's too long", func() { - evalContext := alerting.NewEvalContext(nil, &alerting.Rule{ - Name: "This is an alarm", - Message: "Some kind of message.", - State: m.AlertStateOK, - }) + evalContext := alerting.NewEvalContext(context.Background(), + &alerting.Rule{ + Name: "This is an alarm", + Message: "Some kind of message.", + State: m.AlertStateOK, + }) caption := generateImageCaption(evalContext, "http://grafa.url/abcdefaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", @@ -85,11 +88,12 @@ func TestTelegramNotifier(t *testing.T) { }) Convey("Message should be trimmed if it's too long", func() { - evalContext := alerting.NewEvalContext(nil, &alerting.Rule{ - Name: "This is an alarm", - Message: "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I promise I will. Yes siree that's it.", - State: m.AlertStateOK, - }) + evalContext := alerting.NewEvalContext(context.Background(), + &alerting.Rule{ + Name: "This is an alarm", + Message: "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I promise I will. Yes siree that's it.", + State: m.AlertStateOK, + }) caption := generateImageCaption(evalContext, "http://grafa.url/foo", @@ -101,11 +105,12 @@ func TestTelegramNotifier(t *testing.T) { }) Convey("Metrics should be skipped if they don't fit", func() { - evalContext := alerting.NewEvalContext(nil, &alerting.Rule{ - Name: "This is an alarm", - Message: "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I ", - State: m.AlertStateOK, - }) + evalContext := alerting.NewEvalContext(context.Background(), + &alerting.Rule{ + Name: "This is an alarm", + Message: "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I ", + State: m.AlertStateOK, + }) caption := generateImageCaption(evalContext, "http://grafa.url/foo", diff --git a/pkg/services/alerting/result_handler.go b/pkg/services/alerting/result_handler.go index 363d06d1132..420ffeb9a55 100644 --- a/pkg/services/alerting/result_handler.go +++ b/pkg/services/alerting/result_handler.go @@ -67,6 +67,12 @@ func (handler *DefaultResultHandler) Handle(evalContext *EvalContext) error { } handler.log.Error("Failed to save state", "error", err) + } else { + + // StateChanges is used for de duping alert notifications + // when two servers are raising. This makes sure that the server + // with the last state change always sends a notification. + evalContext.Rule.StateChanges = cmd.Result.StateChanges } // save annotation @@ -88,19 +94,6 @@ func (handler *DefaultResultHandler) Handle(evalContext *EvalContext) error { } } - if evalContext.Rule.State == m.AlertStateOK && evalContext.PrevAlertState != m.AlertStateOK { - for _, notifierId := range evalContext.Rule.Notifications { - cmd := &m.CleanNotificationJournalCommand{ - AlertId: evalContext.Rule.Id, - NotifierId: notifierId, - OrgId: evalContext.Rule.OrgId, - } - if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { - handler.log.Error("Failed to clean up old notification records", "notifier", notifierId, "alert", evalContext.Rule.Id, "Error", err) - } - } - } handler.notifier.SendIfNeeded(evalContext) - return nil } diff --git a/pkg/services/alerting/rule.go b/pkg/services/alerting/rule.go index 018d138dbe4..d13924c2a17 100644 --- a/pkg/services/alerting/rule.go +++ b/pkg/services/alerting/rule.go @@ -23,6 +23,8 @@ type Rule struct { State m.AlertStateType Conditions []Condition Notifications []int64 + + StateChanges int64 } type ValidationError struct { @@ -100,6 +102,7 @@ func NewRuleFromDBAlert(ruleDef *m.Alert) (*Rule, error) { model.State = ruleDef.State model.NoDataState = m.NoDataOption(ruleDef.Settings.Get("noDataState").MustString("no_data")) model.ExecutionErrorState = m.ExecutionErrorOption(ruleDef.Settings.Get("executionErrorState").MustString("alerting")) + model.StateChanges = ruleDef.StateChanges for _, v := range ruleDef.Settings.Get("notifications").MustArray() { jsonModel := simplejson.NewFromAny(v) diff --git a/pkg/services/alerting/test_notification.go b/pkg/services/alerting/test_notification.go index 8421360b5ed..8aa1b80aa22 100644 --- a/pkg/services/alerting/test_notification.go +++ b/pkg/services/alerting/test_notification.go @@ -39,7 +39,7 @@ func handleNotificationTestCommand(cmd *NotificationTestCommand) error { return err } - return notifier.sendNotifications(createTestEvalContext(cmd), []Notifier{notifiers}) + return notifier.sendNotifications(createTestEvalContext(cmd), notifierStateSlice{{notifier: notifiers}}) } func createTestEvalContext(cmd *NotificationTestCommand) *EvalContext { diff --git a/pkg/services/alerting/ticker.go b/pkg/services/alerting/ticker.go index 5ce19b1b232..8cee2653ee9 100644 --- a/pkg/services/alerting/ticker.go +++ b/pkg/services/alerting/ticker.go @@ -37,10 +37,6 @@ func NewTicker(last time.Time, initialOffset time.Duration, c clock.Clock) *Tick return t } -func (t *Ticker) updateOffset(offset time.Duration) { - t.newOffset <- offset -} - func (t *Ticker) run() { for { next := t.last.Add(time.Duration(1) * time.Second) diff --git a/pkg/services/notifications/notifications_test.go b/pkg/services/notifications/notifications_test.go index 504c10c22ec..d54b70e704f 100644 --- a/pkg/services/notifications/notifications_test.go +++ b/pkg/services/notifications/notifications_test.go @@ -9,12 +9,6 @@ import ( . "github.com/smartystreets/goconvey/convey" ) -type testTriggeredAlert struct { - ActualValue float64 - Name string - State string -} - func TestNotifications(t *testing.T) { Convey("Given the notifications service", t, func() { diff --git a/pkg/services/provisioning/dashboards/config_reader.go b/pkg/services/provisioning/dashboards/config_reader.go index 7508550838f..bfef06b558e 100644 --- a/pkg/services/provisioning/dashboards/config_reader.go +++ b/pkg/services/provisioning/dashboards/config_reader.go @@ -83,7 +83,7 @@ func (cr *configReader) readConfig() ([]*DashboardsAsConfig, error) { } if dashboards[i].UpdateIntervalSeconds == 0 { - dashboards[i].UpdateIntervalSeconds = 3 + dashboards[i].UpdateIntervalSeconds = 10 } } diff --git a/pkg/services/provisioning/dashboards/config_reader_test.go b/pkg/services/provisioning/dashboards/config_reader_test.go index df0d2ae038e..d386e42349d 100644 --- a/pkg/services/provisioning/dashboards/config_reader_test.go +++ b/pkg/services/provisioning/dashboards/config_reader_test.go @@ -70,7 +70,7 @@ func validateDashboardAsConfig(t *testing.T, cfg []*DashboardsAsConfig) { So(len(ds.Options), ShouldEqual, 1) So(ds.Options["path"], ShouldEqual, "/var/lib/grafana/dashboards") So(ds.DisableDeletion, ShouldBeTrue) - So(ds.UpdateIntervalSeconds, ShouldEqual, 10) + So(ds.UpdateIntervalSeconds, ShouldEqual, 15) ds2 := cfg[1] So(ds2.Name, ShouldEqual, "default") @@ -81,5 +81,5 @@ func validateDashboardAsConfig(t *testing.T, cfg []*DashboardsAsConfig) { So(len(ds2.Options), ShouldEqual, 1) So(ds2.Options["path"], ShouldEqual, "/var/lib/grafana/dashboards") So(ds2.DisableDeletion, ShouldBeFalse) - So(ds2.UpdateIntervalSeconds, ShouldEqual, 3) + So(ds2.UpdateIntervalSeconds, ShouldEqual, 10) } diff --git a/pkg/services/provisioning/dashboards/file_reader.go b/pkg/services/provisioning/dashboards/file_reader.go index ef27ba97235..ea093860f3e 100644 --- a/pkg/services/provisioning/dashboards/file_reader.go +++ b/pkg/services/provisioning/dashboards/file_reader.go @@ -43,26 +43,6 @@ func NewDashboardFileReader(cfg *DashboardsAsConfig, log log.Logger) (*fileReade log.Warn("[Deprecated] The folder property is deprecated. Please use path instead.") } - if _, err := os.Stat(path); os.IsNotExist(err) { - log.Error("Cannot read directory", "error", err) - } - - copy := path - path, err := filepath.Abs(path) - if err != nil { - log.Error("Could not create absolute path ", "path", path) - } - - path, err = filepath.EvalSymlinks(path) - if err != nil { - log.Error("Failed to read content of symlinked path: %s", path) - } - - if path == "" { - path = copy - log.Info("falling back to original path due to EvalSymlink/Abs failure") - } - return &fileReader{ Cfg: cfg, Path: path, @@ -99,7 +79,8 @@ func (fr *fileReader) ReadAndListen(ctx context.Context) error { } func (fr *fileReader) startWalkingDisk() error { - if _, err := os.Stat(fr.Path); err != nil { + resolvedPath := fr.resolvePath(fr.Path) + if _, err := os.Stat(resolvedPath); err != nil { if os.IsNotExist(err) { return err } @@ -116,7 +97,7 @@ func (fr *fileReader) startWalkingDisk() error { } filesFoundOnDisk := map[string]os.FileInfo{} - err = filepath.Walk(fr.Path, createWalkFn(filesFoundOnDisk)) + err = filepath.Walk(resolvedPath, createWalkFn(filesFoundOnDisk)) if err != nil { return err } @@ -156,7 +137,7 @@ func (fr *fileReader) deleteDashboardIfFileIsMissing(provisionedDashboardRefs ma cmd := &models.DeleteDashboardCommand{OrgId: fr.Cfg.OrgId, Id: dashboardId} err := bus.Dispatch(cmd) if err != nil { - fr.log.Error("failed to delete dashboard", "id", cmd.Id) + fr.log.Error("failed to delete dashboard", "id", cmd.Id, "error", err) } } } @@ -344,6 +325,29 @@ func (fr *fileReader) readDashboardFromFile(path string, lastModified time.Time, }, nil } +func (fr *fileReader) resolvePath(path string) string { + if _, err := os.Stat(path); os.IsNotExist(err) { + fr.log.Error("Cannot read directory", "error", err) + } + + copy := path + path, err := filepath.Abs(path) + if err != nil { + fr.log.Error("Could not create absolute path ", "path", path) + } + + path, err = filepath.EvalSymlinks(path) + if err != nil { + fr.log.Error("Failed to read content of symlinked path: %s", path) + } + + if path == "" { + path = copy + fr.log.Info("falling back to original path due to EvalSymlink/Abs failure") + } + return path +} + type provisioningMetadata struct { uid string title string diff --git a/pkg/services/provisioning/dashboards/file_reader_linux_test.go b/pkg/services/provisioning/dashboards/file_reader_linux_test.go index 9d4cdae8609..77f488ebcfb 100644 --- a/pkg/services/provisioning/dashboards/file_reader_linux_test.go +++ b/pkg/services/provisioning/dashboards/file_reader_linux_test.go @@ -30,10 +30,11 @@ func TestProvsionedSymlinkedFolder(t *testing.T) { want, err := filepath.Abs(containingId) if err != nil { - t.Errorf("expected err to be nill") + t.Errorf("expected err to be nil") } - if reader.Path != want { - t.Errorf("got %s want %s", reader.Path, want) + resolvedPath := reader.resolvePath(reader.Path) + if resolvedPath != want { + t.Errorf("got %s want %s", resolvedPath, want) } } diff --git a/pkg/services/provisioning/dashboards/file_reader_test.go b/pkg/services/provisioning/dashboards/file_reader_test.go index bdc1e95aafe..fe849816553 100644 --- a/pkg/services/provisioning/dashboards/file_reader_test.go +++ b/pkg/services/provisioning/dashboards/file_reader_test.go @@ -67,7 +67,8 @@ func TestCreatingNewDashboardFileReader(t *testing.T) { reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) So(err, ShouldBeNil) - So(filepath.IsAbs(reader.Path), ShouldBeTrue) + resolvedPath := reader.resolvePath(reader.Path) + So(filepath.IsAbs(resolvedPath), ShouldBeTrue) }) }) } diff --git a/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml b/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml index e26c329f87c..c43c4a14c53 100644 --- a/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml +++ b/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml @@ -6,7 +6,7 @@ providers: folder: 'developers' editable: true disableDeletion: true - updateIntervalSeconds: 10 + updateIntervalSeconds: 15 type: file options: path: /var/lib/grafana/dashboards diff --git a/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml b/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml index 69a317fb396..8b7b8991759 100644 --- a/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml +++ b/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml @@ -3,7 +3,7 @@ folder: 'developers' editable: true disableDeletion: true - updateIntervalSeconds: 10 + updateIntervalSeconds: 15 type: file options: path: /var/lib/grafana/dashboards diff --git a/pkg/services/rendering/interface.go b/pkg/services/rendering/interface.go index 85c139cfc04..39cb1ada0f5 100644 --- a/pkg/services/rendering/interface.go +++ b/pkg/services/rendering/interface.go @@ -13,15 +13,16 @@ var ErrNoRenderer = errors.New("No renderer plugin found nor is an external rend var ErrPhantomJSNotInstalled = errors.New("PhantomJS executable not found") type Opts struct { - Width int - Height int - Timeout time.Duration - OrgId int64 - UserId int64 - OrgRole models.RoleType - Path string - Encoding string - Timezone string + Width int + Height int + Timeout time.Duration + OrgId int64 + UserId int64 + OrgRole models.RoleType + Path string + Encoding string + Timezone string + ConcurrentLimit int } type RenderResult struct { diff --git a/pkg/services/rendering/rendering.go b/pkg/services/rendering/rendering.go index ecef83d74d9..0b4f23e93b4 100644 --- a/pkg/services/rendering/rendering.go +++ b/pkg/services/rendering/rendering.go @@ -24,12 +24,13 @@ func init() { } type RenderingService struct { - log log.Logger - pluginClient *plugin.Client - grpcPlugin pluginModel.RendererPlugin - pluginInfo *plugins.RendererPlugin - renderAction renderFunc - domain string + log log.Logger + pluginClient *plugin.Client + grpcPlugin pluginModel.RendererPlugin + pluginInfo *plugins.RendererPlugin + renderAction renderFunc + domain string + inProgressCount int Cfg *setting.Cfg `inject:""` } @@ -90,6 +91,18 @@ func (rs *RenderingService) Run(ctx context.Context) error { } func (rs *RenderingService) Render(ctx context.Context, opts Opts) (*RenderResult, error) { + if rs.inProgressCount > opts.ConcurrentLimit { + return &RenderResult{ + FilePath: filepath.Join(setting.HomePath, "public/img/rendering_limit.png"), + }, nil + } + + defer func() { + rs.inProgressCount -= 1 + }() + + rs.inProgressCount += 1 + if rs.renderAction != nil { return rs.renderAction(ctx, opts) } else { diff --git a/pkg/services/sqlstore/alert.go b/pkg/services/sqlstore/alert.go index ba898769578..2f17402b80c 100644 --- a/pkg/services/sqlstore/alert.go +++ b/pkg/services/sqlstore/alert.go @@ -60,6 +60,10 @@ func deleteAlertByIdInternal(alertId int64, reason string, sess *DBSession) erro return err } + if _, err := sess.Exec("DELETE FROM alert_notification_state WHERE alert_id = ?", alertId); err != nil { + return err + } + return nil } @@ -275,6 +279,8 @@ func SetAlertState(cmd *m.SetAlertStateCommand) error { } sess.ID(alert.Id).Update(&alert) + + cmd.Result = alert return nil }) } diff --git a/pkg/services/sqlstore/alert_notification.go b/pkg/services/sqlstore/alert_notification.go index 31867910ddb..daaef945b96 100644 --- a/pkg/services/sqlstore/alert_notification.go +++ b/pkg/services/sqlstore/alert_notification.go @@ -3,6 +3,7 @@ package sqlstore import ( "bytes" "context" + "errors" "fmt" "strings" "time" @@ -18,16 +19,23 @@ func init() { bus.AddHandler("sql", DeleteAlertNotification) bus.AddHandler("sql", GetAlertNotificationsToSend) bus.AddHandler("sql", GetAllAlertNotifications) - bus.AddHandlerCtx("sql", RecordNotificationJournal) - bus.AddHandlerCtx("sql", GetLatestNotification) - bus.AddHandlerCtx("sql", CleanNotificationJournal) + bus.AddHandlerCtx("sql", GetOrCreateAlertNotificationState) + bus.AddHandlerCtx("sql", SetAlertNotificationStateToCompleteCommand) + bus.AddHandlerCtx("sql", SetAlertNotificationStateToPendingCommand) } func DeleteAlertNotification(cmd *m.DeleteAlertNotificationCommand) error { return inTransaction(func(sess *DBSession) error { sql := "DELETE FROM alert_notification WHERE alert_notification.org_id = ? AND alert_notification.id = ?" - _, err := sess.Exec(sql, cmd.OrgId, cmd.Id) - return err + if _, err := sess.Exec(sql, cmd.OrgId, cmd.Id); err != nil { + return err + } + + if _, err := sess.Exec("DELETE FROM alert_notification_state WHERE alert_notification_state.org_id = ? AND alert_notification_state.notifier_id = ?", cmd.OrgId, cmd.Id); err != nil { + return err + } + + return nil }) } @@ -229,46 +237,123 @@ func UpdateAlertNotification(cmd *m.UpdateAlertNotificationCommand) error { }) } -func RecordNotificationJournal(ctx context.Context, cmd *m.RecordNotificationJournalCommand) error { +func SetAlertNotificationStateToCompleteCommand(ctx context.Context, cmd *m.SetAlertNotificationStateToCompleteCommand) error { return inTransactionCtx(ctx, func(sess *DBSession) error { - journalEntry := &m.AlertNotificationJournal{ - OrgId: cmd.OrgId, - AlertId: cmd.AlertId, - NotifierId: cmd.NotifierId, - SentAt: cmd.SentAt, - Success: cmd.Success, - } + version := cmd.Version + var current m.AlertNotificationState + sess.ID(cmd.Id).Get(¤t) - _, err := sess.Insert(journalEntry) - return err - }) -} + newVersion := cmd.Version + 1 -func GetLatestNotification(ctx context.Context, cmd *m.GetLatestNotificationQuery) error { - return inTransactionCtx(ctx, func(sess *DBSession) error { - nj := &m.AlertNotificationJournal{} + sql := `UPDATE alert_notification_state SET + state = ?, + version = ?, + updated_at = ? + WHERE + id = ?` - _, err := sess.Desc("alert_notification_journal.sent_at"). - Limit(1). - Where("alert_notification_journal.org_id = ? AND alert_notification_journal.alert_id = ? AND alert_notification_journal.notifier_id = ?", cmd.OrgId, cmd.AlertId, cmd.NotifierId).Get(nj) + _, err := sess.Exec(sql, m.AlertNotificationStateCompleted, newVersion, timeNow().Unix(), cmd.Id) if err != nil { return err } - if nj.AlertId == 0 && nj.Id == 0 && nj.NotifierId == 0 && nj.OrgId == 0 { - return m.ErrJournalingNotFound + if current.Version != version { + sqlog.Error("notification state out of sync. the notification is marked as complete but has been modified between set as pending and completion.", "notifierId", current.NotifierId) } - cmd.Result = nj return nil }) } -func CleanNotificationJournal(ctx context.Context, cmd *m.CleanNotificationJournalCommand) error { - return inTransactionCtx(ctx, func(sess *DBSession) error { - sql := "DELETE FROM alert_notification_journal WHERE alert_notification_journal.org_id = ? AND alert_notification_journal.alert_id = ? AND alert_notification_journal.notifier_id = ?" - _, err := sess.Exec(sql, cmd.OrgId, cmd.AlertId, cmd.NotifierId) - return err +func SetAlertNotificationStateToPendingCommand(ctx context.Context, cmd *m.SetAlertNotificationStateToPendingCommand) error { + return withDbSession(ctx, func(sess *DBSession) error { + newVersion := cmd.Version + 1 + sql := `UPDATE alert_notification_state SET + state = ?, + version = ?, + updated_at = ?, + alert_rule_state_updated_version = ? + WHERE + id = ? AND + (version = ? OR alert_rule_state_updated_version < ?)` + + res, err := sess.Exec(sql, + m.AlertNotificationStatePending, + newVersion, + timeNow().Unix(), + cmd.AlertRuleStateUpdatedVersion, + cmd.Id, + cmd.Version, + cmd.AlertRuleStateUpdatedVersion) + + if err != nil { + return err + } + + affected, _ := res.RowsAffected() + if affected == 0 { + return m.ErrAlertNotificationStateVersionConflict + } + + cmd.ResultVersion = newVersion + + return nil }) } + +func GetOrCreateAlertNotificationState(ctx context.Context, cmd *m.GetOrCreateNotificationStateQuery) error { + return inTransactionCtx(ctx, func(sess *DBSession) error { + nj := &m.AlertNotificationState{} + + exist, err := getAlertNotificationState(sess, cmd, nj) + + // if exists, return it, otherwise create it with default values + if err != nil { + return err + } + + if exist { + cmd.Result = nj + return nil + } + + notificationState := &m.AlertNotificationState{ + OrgId: cmd.OrgId, + AlertId: cmd.AlertId, + NotifierId: cmd.NotifierId, + State: m.AlertNotificationStateUnknown, + UpdatedAt: timeNow().Unix(), + } + + if _, err := sess.Insert(notificationState); err != nil { + if dialect.IsUniqueConstraintViolation(err) { + exist, err = getAlertNotificationState(sess, cmd, nj) + + if err != nil { + return err + } + + if !exist { + return errors.New("Should not happen") + } + + cmd.Result = nj + return nil + } + + return err + } + + cmd.Result = notificationState + return nil + }) +} + +func getAlertNotificationState(sess *DBSession, cmd *m.GetOrCreateNotificationStateQuery, nj *m.AlertNotificationState) (bool, error) { + return sess. + Where("alert_notification_state.org_id = ?", cmd.OrgId). + Where("alert_notification_state.alert_id = ?", cmd.AlertId). + Where("alert_notification_state.notifier_id = ?", cmd.NotifierId). + Get(nj) +} diff --git a/pkg/services/sqlstore/alert_notification_test.go b/pkg/services/sqlstore/alert_notification_test.go index 83fb42db9bb..ed682bae5c6 100644 --- a/pkg/services/sqlstore/alert_notification_test.go +++ b/pkg/services/sqlstore/alert_notification_test.go @@ -6,7 +6,7 @@ import ( "time" "github.com/grafana/grafana/pkg/components/simplejson" - m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/models" . "github.com/smartystreets/goconvey/convey" ) @@ -14,50 +14,133 @@ func TestAlertNotificationSQLAccess(t *testing.T) { Convey("Testing Alert notification sql access", t, func() { InitTestDB(t) - Convey("Alert notification journal", func() { - var alertId int64 = 5 - var orgId int64 = 5 - var notifierId int64 = 5 + Convey("Alert notification state", func() { + var alertID int64 = 7 + var orgID int64 = 5 + var notifierID int64 = 10 + oldTimeNow := timeNow + now := time.Date(2018, 9, 30, 0, 0, 0, 0, time.UTC) + timeNow = func() time.Time { return now } - Convey("Getting last journal should raise error if no one exists", func() { - query := &m.GetLatestNotificationQuery{AlertId: alertId, OrgId: orgId, NotifierId: notifierId} - err := GetLatestNotification(context.Background(), query) - So(err, ShouldEqual, m.ErrJournalingNotFound) + Convey("Get no existing state should create a new state", func() { + query := &models.GetOrCreateNotificationStateQuery{AlertId: alertID, OrgId: orgID, NotifierId: notifierID} + err := GetOrCreateAlertNotificationState(context.Background(), query) + So(err, ShouldBeNil) + So(query.Result, ShouldNotBeNil) + So(query.Result.State, ShouldEqual, "unknown") + So(query.Result.Version, ShouldEqual, 0) + So(query.Result.UpdatedAt, ShouldEqual, now.Unix()) - Convey("shoulbe be able to record two journaling events", func() { - createCmd := &m.RecordNotificationJournalCommand{AlertId: alertId, NotifierId: notifierId, OrgId: orgId, Success: true, SentAt: 1} - - err := RecordNotificationJournal(context.Background(), createCmd) + Convey("Get existing state should not create a new state", func() { + query2 := &models.GetOrCreateNotificationStateQuery{AlertId: alertID, OrgId: orgID, NotifierId: notifierID} + err := GetOrCreateAlertNotificationState(context.Background(), query2) So(err, ShouldBeNil) + So(query2.Result, ShouldNotBeNil) + So(query2.Result.Id, ShouldEqual, query.Result.Id) + So(query2.Result.UpdatedAt, ShouldEqual, now.Unix()) + }) - createCmd.SentAt += 1000 //increase epoch + Convey("Update existing state to pending with correct version should update database", func() { + s := *query.Result - err = RecordNotificationJournal(context.Background(), createCmd) + cmd := models.SetAlertNotificationStateToPendingCommand{ + Id: s.Id, + Version: s.Version, + AlertRuleStateUpdatedVersion: s.AlertRuleStateUpdatedVersion, + } + + err := SetAlertNotificationStateToPendingCommand(context.Background(), &cmd) So(err, ShouldBeNil) + So(cmd.ResultVersion, ShouldEqual, 1) - Convey("get last journaling event", func() { - err := GetLatestNotification(context.Background(), query) + query2 := &models.GetOrCreateNotificationStateQuery{AlertId: alertID, OrgId: orgID, NotifierId: notifierID} + err = GetOrCreateAlertNotificationState(context.Background(), query2) + So(err, ShouldBeNil) + So(query2.Result.Version, ShouldEqual, 1) + So(query2.Result.State, ShouldEqual, models.AlertNotificationStatePending) + So(query2.Result.UpdatedAt, ShouldEqual, now.Unix()) + + Convey("Update existing state to completed should update database", func() { + s := *query.Result + setStateCmd := models.SetAlertNotificationStateToCompleteCommand{ + Id: s.Id, + Version: cmd.ResultVersion, + } + err := SetAlertNotificationStateToCompleteCommand(context.Background(), &setStateCmd) So(err, ShouldBeNil) - So(query.Result.SentAt, ShouldEqual, 1001) - Convey("be able to clear all journaling for an notifier", func() { - cmd := &m.CleanNotificationJournalCommand{AlertId: alertId, NotifierId: notifierId, OrgId: orgId} - err := CleanNotificationJournal(context.Background(), cmd) - So(err, ShouldBeNil) + query3 := &models.GetOrCreateNotificationStateQuery{AlertId: alertID, OrgId: orgID, NotifierId: notifierID} + err = GetOrCreateAlertNotificationState(context.Background(), query3) + So(err, ShouldBeNil) + So(query3.Result.Version, ShouldEqual, 2) + So(query3.Result.State, ShouldEqual, models.AlertNotificationStateCompleted) + So(query3.Result.UpdatedAt, ShouldEqual, now.Unix()) + }) - Convey("querying for last junaling should raise error", func() { - query := &m.GetLatestNotificationQuery{AlertId: alertId, OrgId: orgId, NotifierId: notifierId} - err := GetLatestNotification(context.Background(), query) - So(err, ShouldEqual, m.ErrJournalingNotFound) - }) - }) + Convey("Update existing state to completed should update database. regardless of version", func() { + s := *query.Result + unknownVersion := int64(1000) + cmd := models.SetAlertNotificationStateToCompleteCommand{ + Id: s.Id, + Version: unknownVersion, + } + err := SetAlertNotificationStateToCompleteCommand(context.Background(), &cmd) + So(err, ShouldBeNil) + + query3 := &models.GetOrCreateNotificationStateQuery{AlertId: alertID, OrgId: orgID, NotifierId: notifierID} + err = GetOrCreateAlertNotificationState(context.Background(), query3) + So(err, ShouldBeNil) + So(query3.Result.Version, ShouldEqual, unknownVersion+1) + So(query3.Result.State, ShouldEqual, models.AlertNotificationStateCompleted) + So(query3.Result.UpdatedAt, ShouldEqual, now.Unix()) }) }) + + Convey("Update existing state to pending with incorrect version should return version mismatch error", func() { + s := *query.Result + s.Version = 1000 + cmd := models.SetAlertNotificationStateToPendingCommand{ + Id: s.NotifierId, + Version: s.Version, + AlertRuleStateUpdatedVersion: s.AlertRuleStateUpdatedVersion, + } + err := SetAlertNotificationStateToPendingCommand(context.Background(), &cmd) + So(err, ShouldEqual, models.ErrAlertNotificationStateVersionConflict) + }) + + Convey("Updating existing state to pending with incorrect version since alert rule state update version is higher", func() { + s := *query.Result + cmd := models.SetAlertNotificationStateToPendingCommand{ + Id: s.Id, + Version: s.Version, + AlertRuleStateUpdatedVersion: 1000, + } + err := SetAlertNotificationStateToPendingCommand(context.Background(), &cmd) + So(err, ShouldBeNil) + + So(cmd.ResultVersion, ShouldEqual, 1) + }) + + Convey("different version and same alert state change version should return error", func() { + s := *query.Result + s.Version = 1000 + cmd := models.SetAlertNotificationStateToPendingCommand{ + Id: s.Id, + Version: s.Version, + AlertRuleStateUpdatedVersion: s.AlertRuleStateUpdatedVersion, + } + err := SetAlertNotificationStateToPendingCommand(context.Background(), &cmd) + So(err, ShouldNotBeNil) + }) + }) + + Reset(func() { + timeNow = oldTimeNow }) }) Convey("Alert notifications should be empty", func() { - cmd := &m.GetAlertNotificationsQuery{ + cmd := &models.GetAlertNotificationsQuery{ OrgId: 2, Name: "email", } @@ -68,7 +151,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) { }) Convey("Cannot save alert notifier with send reminder = true", func() { - cmd := &m.CreateAlertNotificationCommand{ + cmd := &models.CreateAlertNotificationCommand{ Name: "ops", Type: "email", OrgId: 1, @@ -78,7 +161,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) { Convey("and missing frequency", func() { err := CreateAlertNotificationCommand(cmd) - So(err, ShouldEqual, m.ErrNotificationFrequencyNotFound) + So(err, ShouldEqual, models.ErrNotificationFrequencyNotFound) }) Convey("invalid frequency", func() { @@ -90,7 +173,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) { }) Convey("Cannot update alert notifier with send reminder = false", func() { - cmd := &m.CreateAlertNotificationCommand{ + cmd := &models.CreateAlertNotificationCommand{ Name: "ops update", Type: "email", OrgId: 1, @@ -101,14 +184,14 @@ func TestAlertNotificationSQLAccess(t *testing.T) { err := CreateAlertNotificationCommand(cmd) So(err, ShouldBeNil) - updateCmd := &m.UpdateAlertNotificationCommand{ + updateCmd := &models.UpdateAlertNotificationCommand{ Id: cmd.Result.Id, SendReminder: true, } Convey("and missing frequency", func() { err := UpdateAlertNotification(updateCmd) - So(err, ShouldEqual, m.ErrNotificationFrequencyNotFound) + So(err, ShouldEqual, models.ErrNotificationFrequencyNotFound) }) Convey("invalid frequency", func() { @@ -121,7 +204,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) { }) Convey("Can save Alert Notification", func() { - cmd := &m.CreateAlertNotificationCommand{ + cmd := &models.CreateAlertNotificationCommand{ Name: "ops", Type: "email", OrgId: 1, @@ -143,7 +226,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) { }) Convey("Can update alert notification", func() { - newCmd := &m.UpdateAlertNotificationCommand{ + newCmd := &models.UpdateAlertNotificationCommand{ Name: "NewName", Type: "webhook", OrgId: cmd.Result.OrgId, @@ -159,7 +242,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) { }) Convey("Can update alert notification to disable sending of reminders", func() { - newCmd := &m.UpdateAlertNotificationCommand{ + newCmd := &models.UpdateAlertNotificationCommand{ Name: "NewName", Type: "webhook", OrgId: cmd.Result.OrgId, @@ -174,12 +257,12 @@ func TestAlertNotificationSQLAccess(t *testing.T) { }) Convey("Can search using an array of ids", func() { - cmd1 := m.CreateAlertNotificationCommand{Name: "nagios", Type: "webhook", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} - cmd2 := m.CreateAlertNotificationCommand{Name: "slack", Type: "webhook", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} - cmd3 := m.CreateAlertNotificationCommand{Name: "ops2", Type: "email", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} - cmd4 := m.CreateAlertNotificationCommand{IsDefault: true, Name: "default", Type: "email", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} + cmd1 := models.CreateAlertNotificationCommand{Name: "nagios", Type: "webhook", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} + cmd2 := models.CreateAlertNotificationCommand{Name: "slack", Type: "webhook", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} + cmd3 := models.CreateAlertNotificationCommand{Name: "ops2", Type: "email", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} + cmd4 := models.CreateAlertNotificationCommand{IsDefault: true, Name: "default", Type: "email", OrgId: 1, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} - otherOrg := m.CreateAlertNotificationCommand{Name: "default", Type: "email", OrgId: 2, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} + otherOrg := models.CreateAlertNotificationCommand{Name: "default", Type: "email", OrgId: 2, SendReminder: true, Frequency: "10s", Settings: simplejson.New()} So(CreateAlertNotificationCommand(&cmd1), ShouldBeNil) So(CreateAlertNotificationCommand(&cmd2), ShouldBeNil) @@ -188,7 +271,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) { So(CreateAlertNotificationCommand(&otherOrg), ShouldBeNil) Convey("search", func() { - query := &m.GetAlertNotificationsToSendQuery{ + query := &models.GetAlertNotificationsToSendQuery{ Ids: []int64{cmd1.Result.Id, cmd2.Result.Id, 112341231}, OrgId: 1, } @@ -199,7 +282,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) { }) Convey("all", func() { - query := &m.GetAllAlertNotificationsQuery{ + query := &models.GetAllAlertNotificationsQuery{ OrgId: 1, } diff --git a/pkg/services/sqlstore/dashboard_service_integration_test.go b/pkg/services/sqlstore/dashboard_service_integration_test.go index a9658f7ab76..a4e76aca340 100644 --- a/pkg/services/sqlstore/dashboard_service_integration_test.go +++ b/pkg/services/sqlstore/dashboard_service_integration_test.go @@ -932,29 +932,6 @@ func TestIntegratedDashboardService(t *testing.T) { }) } -type scenarioContext struct { - dashboardGuardianMock *guardian.FakeDashboardGuardian -} - -type scenarioFunc func(c *scenarioContext) - -func dashboardGuardianScenario(desc string, mock *guardian.FakeDashboardGuardian, fn scenarioFunc) { - Convey(desc, func() { - origNewDashboardGuardian := guardian.New - guardian.MockDashboardGuardian(mock) - - sc := &scenarioContext{ - dashboardGuardianMock: mock, - } - - defer func() { - guardian.New = origNewDashboardGuardian - }() - - fn(sc) - }) -} - type dashboardPermissionScenarioContext struct { dashboardGuardianMock *guardian.FakeDashboardGuardian } diff --git a/pkg/services/sqlstore/migrations/alert_mig.go b/pkg/services/sqlstore/migrations/alert_mig.go index e27e64c6124..cadcccf6c95 100644 --- a/pkg/services/sqlstore/migrations/alert_mig.go +++ b/pkg/services/sqlstore/migrations/alert_mig.go @@ -107,4 +107,27 @@ func addAlertMigrations(mg *Migrator) { mg.AddMigration("create notification_journal table v1", NewAddTableMigration(notification_journal)) mg.AddMigration("add index notification_journal org_id & alert_id & notifier_id", NewAddIndexMigration(notification_journal, notification_journal.Indices[0])) + + mg.AddMigration("drop alert_notification_journal", NewDropTableMigration("alert_notification_journal")) + + alert_notification_state := Table{ + Name: "alert_notification_state", + Columns: []*Column{ + {Name: "id", Type: DB_BigInt, IsPrimaryKey: true, IsAutoIncrement: true}, + {Name: "org_id", Type: DB_BigInt, Nullable: false}, + {Name: "alert_id", Type: DB_BigInt, Nullable: false}, + {Name: "notifier_id", Type: DB_BigInt, Nullable: false}, + {Name: "state", Type: DB_NVarchar, Length: 50, Nullable: false}, + {Name: "version", Type: DB_BigInt, Nullable: false}, + {Name: "updated_at", Type: DB_BigInt, Nullable: false}, + {Name: "alert_rule_state_updated_version", Type: DB_BigInt, Nullable: false}, + }, + Indices: []*Index{ + {Cols: []string{"org_id", "alert_id", "notifier_id"}, Type: UniqueIndex}, + }, + } + + mg.AddMigration("create alert_notification_state table v1", NewAddTableMigration(alert_notification_state)) + mg.AddMigration("add index alert_notification_state org_id & alert_id & notifier_id", + NewAddIndexMigration(alert_notification_state, alert_notification_state.Indices[0])) } diff --git a/pkg/services/sqlstore/migrator/dialect.go b/pkg/services/sqlstore/migrator/dialect.go index 427d102b280..506a01c3ed8 100644 --- a/pkg/services/sqlstore/migrator/dialect.go +++ b/pkg/services/sqlstore/migrator/dialect.go @@ -44,6 +44,8 @@ type Dialect interface { CleanDB() error NoOpSql() string + + IsUniqueConstraintViolation(err error) bool } func NewDialect(engine *xorm.Engine) Dialect { diff --git a/pkg/services/sqlstore/migrator/mysql_dialect.go b/pkg/services/sqlstore/migrator/mysql_dialect.go index 1ed16871c15..7daa4597430 100644 --- a/pkg/services/sqlstore/migrator/mysql_dialect.go +++ b/pkg/services/sqlstore/migrator/mysql_dialect.go @@ -5,6 +5,8 @@ import ( "strconv" "strings" + "github.com/VividCortex/mysqlerr" + "github.com/go-sql-driver/mysql" "github.com/go-xorm/xorm" ) @@ -125,3 +127,13 @@ func (db *Mysql) CleanDB() error { return nil } + +func (db *Mysql) IsUniqueConstraintViolation(err error) bool { + if driverErr, ok := err.(*mysql.MySQLError); ok { + if driverErr.Number == mysqlerr.ER_DUP_ENTRY { + return true + } + } + + return false +} diff --git a/pkg/services/sqlstore/migrator/postgres_dialect.go b/pkg/services/sqlstore/migrator/postgres_dialect.go index eae9ad3ca3f..ab8812a1e26 100644 --- a/pkg/services/sqlstore/migrator/postgres_dialect.go +++ b/pkg/services/sqlstore/migrator/postgres_dialect.go @@ -6,6 +6,7 @@ import ( "strings" "github.com/go-xorm/xorm" + "github.com/lib/pq" ) type Postgres struct { @@ -136,3 +137,13 @@ func (db *Postgres) CleanDB() error { return nil } + +func (db *Postgres) IsUniqueConstraintViolation(err error) bool { + if driverErr, ok := err.(*pq.Error); ok { + if driverErr.Code == "23505" { + return true + } + } + + return false +} diff --git a/pkg/services/sqlstore/migrator/sqlite_dialect.go b/pkg/services/sqlstore/migrator/sqlite_dialect.go index 01082b95c88..446e3fcef12 100644 --- a/pkg/services/sqlstore/migrator/sqlite_dialect.go +++ b/pkg/services/sqlstore/migrator/sqlite_dialect.go @@ -4,6 +4,7 @@ import ( "fmt" "github.com/go-xorm/xorm" + sqlite3 "github.com/mattn/go-sqlite3" ) type Sqlite3 struct { @@ -82,3 +83,13 @@ func (db *Sqlite3) DropIndexSql(tableName string, index *Index) string { func (db *Sqlite3) CleanDB() error { return nil } + +func (db *Sqlite3) IsUniqueConstraintViolation(err error) bool { + if driverErr, ok := err.(sqlite3.Error); ok { + if driverErr.ExtendedCode == sqlite3.ErrConstraintUnique { + return true + } + } + + return false +} diff --git a/pkg/services/sqlstore/transactions_test.go b/pkg/services/sqlstore/transactions_test.go index 41dedde5db4..041359cf1d3 100644 --- a/pkg/services/sqlstore/transactions_test.go +++ b/pkg/services/sqlstore/transactions_test.go @@ -10,10 +10,6 @@ import ( . "github.com/smartystreets/goconvey/convey" ) -type testQuery struct { - result bool -} - var ProvokedError = errors.New("testing error.") func TestTransaction(t *testing.T) { diff --git a/pkg/setting/setting.go b/pkg/setting/setting.go index 1a253b9b238..27df73a9eed 100644 --- a/pkg/setting/setting.go +++ b/pkg/setting/setting.go @@ -166,6 +166,7 @@ var ( // Alerting AlertingEnabled bool ExecuteAlerts bool + AlertingRenderLimit int AlertingErrorOrTimeout string AlertingNoDataOrNullValues string @@ -196,10 +197,13 @@ type Cfg struct { Smtp SmtpSettings // Rendering - ImagesDir string - PhantomDir string - RendererUrl string - RendererCallbackUrl string + ImagesDir string + PhantomDir string + RendererUrl string + RendererCallbackUrl string + RendererLimit int + RendererLimitAlerting int + DisableBruteForceLoginProtection bool TempDataLifetime time.Duration @@ -677,6 +681,7 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error { alerting := iniFile.Section("alerting") AlertingEnabled = alerting.Key("enabled").MustBool(true) ExecuteAlerts = alerting.Key("execute_alerts").MustBool(true) + AlertingRenderLimit = alerting.Key("concurrent_render_limit").MustInt(5) AlertingErrorOrTimeout = alerting.Key("error_or_timeout").MustString("alerting") AlertingNoDataOrNullValues = alerting.Key("nodata_or_nullvalues").MustString("no_data") diff --git a/pkg/social/social.go b/pkg/social/social.go index 721070ab789..8918507f3b9 100644 --- a/pkg/social/social.go +++ b/pkg/social/social.go @@ -46,10 +46,14 @@ func (e *Error) Error() string { return e.s } +const ( + grafanaCom = "grafana_com" +) + var ( SocialBaseUrl = "/login/" SocialMap = make(map[string]SocialConnector) - allOauthes = []string{"github", "gitlab", "google", "generic_oauth", "grafananet", "grafana_com"} + allOauthes = []string{"github", "gitlab", "google", "generic_oauth", "grafananet", grafanaCom} ) func NewOAuthService() { @@ -82,7 +86,7 @@ func NewOAuthService() { } if name == "grafananet" { - name = "grafana_com" + name = grafanaCom } setting.OAuthService.OAuthInfos[name] = info @@ -159,7 +163,7 @@ func NewOAuthService() { } } - if name == "grafana_com" { + if name == grafanaCom { config = oauth2.Config{ ClientID: info.ClientId, ClientSecret: info.ClientSecret, @@ -171,7 +175,7 @@ func NewOAuthService() { Scopes: info.Scopes, } - SocialMap["grafana_com"] = &SocialGrafanaCom{ + SocialMap[grafanaCom] = &SocialGrafanaCom{ SocialBase: &SocialBase{ Config: &config, log: logger, @@ -194,7 +198,7 @@ var GetOAuthProviders = func(cfg *setting.Cfg) map[string]bool { for _, name := range allOauthes { if name == "grafananet" { - name = "grafana_com" + name = grafanaCom } sec := cfg.Raw.Section("auth." + name) diff --git a/pkg/tsdb/cloudwatch/credentials.go b/pkg/tsdb/cloudwatch/credentials.go index 8b32c76daa3..165f8fdbe97 100644 --- a/pkg/tsdb/cloudwatch/credentials.go +++ b/pkg/tsdb/cloudwatch/credentials.go @@ -42,8 +42,7 @@ func GetCredentials(dsInfo *DatasourceInfo) (*credentials.Credentials, error) { accessKeyId := "" secretAccessKey := "" sessionToken := "" - var expiration *time.Time - expiration = nil + var expiration *time.Time = nil if dsInfo.AuthType == "arn" && strings.Index(dsInfo.AssumeRoleArn, "arn:aws:iam:") == 0 { params := &sts.AssumeRoleInput{ RoleArn: aws.String(dsInfo.AssumeRoleArn), diff --git a/pkg/tsdb/cloudwatch/metric_find_query.go b/pkg/tsdb/cloudwatch/metric_find_query.go index e1e131d9f3a..ee9d9583c4e 100644 --- a/pkg/tsdb/cloudwatch/metric_find_query.go +++ b/pkg/tsdb/cloudwatch/metric_find_query.go @@ -235,7 +235,7 @@ func parseMultiSelectValue(input string) []string { func (e *CloudWatchExecutor) handleGetRegions(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) { regions := []string{ "ap-northeast-1", "ap-northeast-2", "ap-southeast-1", "ap-southeast-2", "ap-south-1", "ca-central-1", "cn-north-1", "cn-northwest-1", - "eu-central-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "us-east-1", "us-east-2", "us-gov-west-1", "us-west-1", "us-west-2", + "eu-central-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "us-east-1", "us-east-2", "us-gov-west-1", "us-west-1", "us-west-2", "us-isob-east-1", "us-iso-east-1", } result := make([]suggestData, 0) diff --git a/pkg/tsdb/elasticsearch/client/client_test.go b/pkg/tsdb/elasticsearch/client/client_test.go index af9ac0d8fce..540a999688a 100644 --- a/pkg/tsdb/elasticsearch/client/client_test.go +++ b/pkg/tsdb/elasticsearch/client/client_test.go @@ -25,7 +25,7 @@ func TestClient(t *testing.T) { JsonData: simplejson.NewFromAny(make(map[string]interface{})), } - _, err := NewClient(nil, ds, nil) + _, err := NewClient(context.Background(), ds, nil) So(err, ShouldNotBeNil) }) @@ -36,7 +36,7 @@ func TestClient(t *testing.T) { }), } - _, err := NewClient(nil, ds, nil) + _, err := NewClient(context.Background(), ds, nil) So(err, ShouldNotBeNil) }) @@ -48,7 +48,7 @@ func TestClient(t *testing.T) { }), } - _, err := NewClient(nil, ds, nil) + _, err := NewClient(context.Background(), ds, nil) So(err, ShouldNotBeNil) }) @@ -60,7 +60,7 @@ func TestClient(t *testing.T) { }), } - c, err := NewClient(nil, ds, nil) + c, err := NewClient(context.Background(), ds, nil) So(err, ShouldBeNil) So(c.GetVersion(), ShouldEqual, 2) }) @@ -73,7 +73,7 @@ func TestClient(t *testing.T) { }), } - c, err := NewClient(nil, ds, nil) + c, err := NewClient(context.Background(), ds, nil) So(err, ShouldBeNil) So(c.GetVersion(), ShouldEqual, 5) }) @@ -86,7 +86,7 @@ func TestClient(t *testing.T) { }), } - c, err := NewClient(nil, ds, nil) + c, err := NewClient(context.Background(), ds, nil) So(err, ShouldBeNil) So(c.GetVersion(), ShouldEqual, 56) }) diff --git a/pkg/tsdb/elasticsearch/response_parser.go b/pkg/tsdb/elasticsearch/response_parser.go index 0090754840a..0837c3dd9d5 100644 --- a/pkg/tsdb/elasticsearch/response_parser.go +++ b/pkg/tsdb/elasticsearch/response_parser.go @@ -13,6 +13,19 @@ import ( "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client" ) +const ( + // Metric types + countType = "count" + percentilesType = "percentiles" + extendedStatsType = "extended_stats" + // Bucket types + dateHistType = "date_histogram" + histogramType = "histogram" + filtersType = "filters" + termsType = "terms" + geohashGridType = "geohash_grid" +) + type responseParser struct { Responses []*es.SearchResponse Targets []*Query @@ -81,7 +94,7 @@ func (rp *responseParser) processBuckets(aggs map[string]interface{}, target *Qu } if depth == maxDepth { - if aggDef.Type == "date_histogram" { + if aggDef.Type == dateHistType { err = rp.processMetrics(esAgg, target, series, props) } else { err = rp.processAggregationDocs(esAgg, aggDef, target, table, props) @@ -149,7 +162,7 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, } switch metric.Type { - case "count": + case countType: newSeries := tsdb.TimeSeries{ Tags: make(map[string]string), } @@ -164,10 +177,10 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, for k, v := range props { newSeries.Tags[k] = v } - newSeries.Tags["metric"] = "count" + newSeries.Tags["metric"] = countType *series = append(*series, &newSeries) - case "percentiles": + case percentilesType: buckets := esAgg.Get("buckets").MustArray() if len(buckets) == 0 { break @@ -198,7 +211,7 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, } *series = append(*series, &newSeries) } - case "extended_stats": + case extendedStatsType: buckets := esAgg.Get("buckets").MustArray() metaKeys := make([]string, 0) @@ -312,9 +325,9 @@ func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef for _, metric := range target.Metrics { switch metric.Type { - case "count": + case countType: addMetricValue(&values, rp.getMetricName(metric.Type), castToNullFloat(bucket.Get("doc_count"))) - case "extended_stats": + case extendedStatsType: metaKeys := make([]string, 0) meta := metric.Meta.MustMap() for k := range meta { @@ -366,7 +379,7 @@ func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef func (rp *responseParser) trimDatapoints(series *tsdb.TimeSeriesSlice, target *Query) { var histogram *BucketAgg for _, bucketAgg := range target.BucketAggs { - if bucketAgg.Type == "date_histogram" { + if bucketAgg.Type == dateHistType { histogram = bucketAgg break } diff --git a/pkg/tsdb/elasticsearch/time_series_query.go b/pkg/tsdb/elasticsearch/time_series_query.go index c9bb05dd09a..fddcf3cb8b3 100644 --- a/pkg/tsdb/elasticsearch/time_series_query.go +++ b/pkg/tsdb/elasticsearch/time_series_query.go @@ -75,15 +75,15 @@ func (e *timeSeriesQuery) execute() (*tsdb.Response, error) { // iterate backwards to create aggregations bottom-down for _, bucketAgg := range q.BucketAggs { switch bucketAgg.Type { - case "date_histogram": + case dateHistType: aggBuilder = addDateHistogramAgg(aggBuilder, bucketAgg, from, to) - case "histogram": + case histogramType: aggBuilder = addHistogramAgg(aggBuilder, bucketAgg) - case "filters": + case filtersType: aggBuilder = addFiltersAgg(aggBuilder, bucketAgg) - case "terms": + case termsType: aggBuilder = addTermsAgg(aggBuilder, bucketAgg, q.Metrics) - case "geohash_grid": + case geohashGridType: aggBuilder = addGeoHashGridAgg(aggBuilder, bucketAgg) } } diff --git a/pkg/tsdb/mssql/macros.go b/pkg/tsdb/mssql/macros.go index 9303712a480..0a260f7ad70 100644 --- a/pkg/tsdb/mssql/macros.go +++ b/pkg/tsdb/mssql/macros.go @@ -66,10 +66,6 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er } return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil - case "__timeFrom": - return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil - case "__timeTo": - return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval", name) @@ -96,10 +92,6 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er return "", fmt.Errorf("missing time column argument for macro %v", name) } return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil - case "__unixEpochFrom": - return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil - case "__unixEpochTo": - return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil case "__unixEpochGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) diff --git a/pkg/tsdb/mssql/macros_test.go b/pkg/tsdb/mssql/macros_test.go index 8e0973b750c..7456238efa4 100644 --- a/pkg/tsdb/mssql/macros_test.go +++ b/pkg/tsdb/mssql/macros_test.go @@ -111,20 +111,6 @@ func TestMacroEngine(t *testing.T) { So(fillInterval, ShouldEqual, 5*time.Minute.Seconds()) }) - Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) - }) - - Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) - }) - Convey("interpolate __unixEpochFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time_column)") So(err, ShouldBeNil) @@ -132,20 +118,6 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("select time_column >= %d AND time_column <= %d", from.Unix(), to.Unix())) }) - Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) - }) - - Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) - }) - Convey("interpolate __unixEpochGroup function", func() { sql, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroup(time_column,'5m')") @@ -171,40 +143,12 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) - Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) - }) - - Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) - }) - Convey("interpolate __unixEpochFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time_column)") So(err, ShouldBeNil) So(sql, ShouldEqual, fmt.Sprintf("select time_column >= %d AND time_column <= %d", from.Unix(), to.Unix())) }) - - Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) - }) - - Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) - }) }) Convey("Given a time range between 1960-02-01 07:00 and 1980-02-03 08:00", func() { @@ -219,40 +163,12 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) - Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) - }) - - Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) - }) - Convey("interpolate __unixEpochFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time_column)") So(err, ShouldBeNil) So(sql, ShouldEqual, fmt.Sprintf("select time_column >= %d AND time_column <= %d", from.Unix(), to.Unix())) }) - - Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) - }) - - Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) - }) }) }) } diff --git a/pkg/tsdb/mssql/mssql_test.go b/pkg/tsdb/mssql/mssql_test.go index f9525fc37ac..8e48994c7ea 100644 --- a/pkg/tsdb/mssql/mssql_test.go +++ b/pkg/tsdb/mssql/mssql_test.go @@ -1,6 +1,7 @@ package mssql import ( + "context" "fmt" "math/rand" "strings" @@ -128,7 +129,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["A"] So(err, ShouldBeNil) @@ -218,7 +219,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -265,7 +266,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -327,7 +328,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -352,7 +353,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -441,7 +442,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -463,7 +464,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -485,7 +486,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -507,7 +508,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -529,7 +530,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -551,7 +552,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -573,7 +574,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -595,7 +596,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -617,7 +618,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -640,7 +641,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -663,7 +664,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -675,6 +676,30 @@ func TestMSSQL(t *testing.T) { So(queryResult.Series[3].Name, ShouldEqual, "Metric B valueTwo") }) + Convey("When doing a query with timeFrom,timeTo,unixEpochFrom,unixEpochTo macros", func() { + tsdb.Interpolate = origInterpolate + query := &tsdb.TsdbQuery{ + TimeRange: tsdb.NewFakeTimeRange("5m", "now", fromStart), + Queries: []*tsdb.Query{ + { + DataSource: &models.DataSource{JsonData: simplejson.New()}, + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeFrom() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT time FROM metric_values WHERE time > '2018-03-15T12:55:00Z' OR time < '2018-03-15T12:55:00Z' OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1") + + }) + Convey("Given a stored procedure that takes @from and @to in epoch time", func() { sql := ` IF object_id('sp_test_epoch') IS NOT NULL @@ -719,9 +744,11 @@ func TestMSSQL(t *testing.T) { So(err, ShouldBeNil) Convey("When doing a metric query using stored procedure should return correct result", func() { + tsdb.Interpolate = origInterpolate query := &tsdb.TsdbQuery{ Queries: []*tsdb.Query{ { + DataSource: &models.DataSource{JsonData: simplejson.New()}, Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `DECLARE @from int = $__unixEpochFrom(), @@ -739,7 +766,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["A"] So(err, ShouldBeNil) So(queryResult.Error, ShouldBeNil) @@ -796,9 +823,11 @@ func TestMSSQL(t *testing.T) { So(err, ShouldBeNil) Convey("When doing a metric query using stored procedure should return correct result", func() { + tsdb.Interpolate = origInterpolate query := &tsdb.TsdbQuery{ Queries: []*tsdb.Query{ { + DataSource: &models.DataSource{JsonData: simplejson.New()}, Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `DECLARE @from int = $__unixEpochFrom(), @@ -816,7 +845,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["A"] So(err, ShouldBeNil) So(queryResult.Error, ShouldBeNil) @@ -892,7 +921,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["Deploys"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) @@ -915,7 +944,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["Tickets"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) @@ -941,7 +970,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -971,7 +1000,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -1001,7 +1030,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -1031,7 +1060,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -1059,7 +1088,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -1087,7 +1116,7 @@ func TestMSSQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) diff --git a/pkg/tsdb/mysql/macros.go b/pkg/tsdb/mysql/macros.go index 0f1c4fcaf2c..a037aa9277a 100644 --- a/pkg/tsdb/mysql/macros.go +++ b/pkg/tsdb/mysql/macros.go @@ -61,10 +61,6 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er } return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil - case "__timeFrom": - return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil - case "__timeTo": - return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval", name) @@ -91,10 +87,6 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er return "", fmt.Errorf("missing time column argument for macro %v", name) } return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil - case "__unixEpochFrom": - return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil - case "__unixEpochTo": - return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil case "__unixEpochGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) diff --git a/pkg/tsdb/mysql/macros_test.go b/pkg/tsdb/mysql/macros_test.go index fe153ca3e2d..3c9a5a26c94 100644 --- a/pkg/tsdb/mysql/macros_test.go +++ b/pkg/tsdb/mysql/macros_test.go @@ -63,20 +63,6 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) - Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) - }) - - Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) - }) - Convey("interpolate __unixEpochFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)") So(err, ShouldBeNil) @@ -84,20 +70,6 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix())) }) - Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) - }) - - Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) - }) - Convey("interpolate __unixEpochGroup function", func() { sql, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroup(time_column,'5m')") @@ -123,40 +95,12 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) - Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) - }) - - Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) - }) - Convey("interpolate __unixEpochFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)") So(err, ShouldBeNil) So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix())) }) - - Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) - }) - - Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) - }) }) Convey("Given a time range between 1960-02-01 07:00 and 1980-02-03 08:00", func() { @@ -171,40 +115,12 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) - Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) - }) - - Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) - }) - Convey("interpolate __unixEpochFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)") So(err, ShouldBeNil) So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix())) }) - - Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) - }) - - Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) - }) }) }) } diff --git a/pkg/tsdb/mysql/mysql_test.go b/pkg/tsdb/mysql/mysql_test.go index 13d9040a738..7f12b8636bb 100644 --- a/pkg/tsdb/mysql/mysql_test.go +++ b/pkg/tsdb/mysql/mysql_test.go @@ -1,6 +1,7 @@ package mysql import ( + "context" "fmt" "math/rand" "strings" @@ -129,7 +130,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -217,7 +218,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -264,7 +265,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -313,7 +314,7 @@ func TestMySQL(t *testing.T) { query := &tsdb.TsdbQuery{ Queries: []*tsdb.Query{ { - DataSource: &models.DataSource{}, + DataSource: &models.DataSource{JsonData: simplejson.New()}, Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT $__timeGroup(time, $__interval) AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1", "format": "time_series", @@ -327,7 +328,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -352,7 +353,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -378,7 +379,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -473,7 +474,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -495,7 +496,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -517,7 +518,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -539,7 +540,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -561,7 +562,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -583,7 +584,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -605,7 +606,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -627,7 +628,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -649,7 +650,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -671,7 +672,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -693,7 +694,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -716,7 +717,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -741,7 +742,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -752,6 +753,30 @@ func TestMySQL(t *testing.T) { }) }) + Convey("When doing a query with timeFrom,timeTo,unixEpochFrom,unixEpochTo macros", func() { + tsdb.Interpolate = origInterpolate + query := &tsdb.TsdbQuery{ + TimeRange: tsdb.NewFakeTimeRange("5m", "now", fromStart), + Queries: []*tsdb.Query{ + { + DataSource: &models.DataSource{JsonData: simplejson.New()}, + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeFrom() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT time FROM metric_values WHERE time > '2018-03-15T12:55:00Z' OR time < '2018-03-15T12:55:00Z' OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1") + + }) + Convey("Given a table with event data", func() { type event struct { TimeSec int64 @@ -802,7 +827,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["Deploys"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) @@ -825,7 +850,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["Tickets"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) @@ -851,7 +876,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -881,7 +906,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -911,7 +936,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -941,7 +966,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -969,7 +994,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -997,7 +1022,7 @@ func TestMySQL(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) diff --git a/pkg/tsdb/postgres/macros.go b/pkg/tsdb/postgres/macros.go index 16f4adb68a6..0fa5d8077e1 100644 --- a/pkg/tsdb/postgres/macros.go +++ b/pkg/tsdb/postgres/macros.go @@ -87,10 +87,6 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string, } return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil - case "__timeFrom": - return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil - case "__timeTo": - return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) @@ -122,10 +118,6 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string, return "", fmt.Errorf("missing time column argument for macro %v", name) } return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil - case "__unixEpochFrom": - return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil - case "__unixEpochTo": - return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil case "__unixEpochGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) diff --git a/pkg/tsdb/postgres/macros_test.go b/pkg/tsdb/postgres/macros_test.go index b0b7a28ddd4..8a3699f82b2 100644 --- a/pkg/tsdb/postgres/macros_test.go +++ b/pkg/tsdb/postgres/macros_test.go @@ -44,13 +44,6 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) - Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) - }) - Convey("interpolate __timeGroup function pre 5.3 compatibility", func() { sql, err := engine.Interpolate(query, timeRange, "SELECT $__timeGroup(time_column,'5m'), value") @@ -102,13 +95,6 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, "GROUP BY time_bucket('300s',time_column)") }) - Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) - }) - Convey("interpolate __unixEpochFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)") So(err, ShouldBeNil) @@ -116,20 +102,6 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix())) }) - Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) - }) - - Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) - }) - Convey("interpolate __unixEpochGroup function", func() { sql, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroup(time_column,'5m')") @@ -155,40 +127,12 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) - Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) - }) - - Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) - }) - Convey("interpolate __unixEpochFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)") So(err, ShouldBeNil) So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix())) }) - - Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) - }) - - Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) - }) }) Convey("Given a time range between 1960-02-01 07:00 and 1980-02-03 08:00", func() { @@ -203,40 +147,12 @@ func TestMacroEngine(t *testing.T) { So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) - Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) - }) - - Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) - }) - Convey("interpolate __unixEpochFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)") So(err, ShouldBeNil) So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix())) }) - - Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) - }) - - Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) - }) }) }) } diff --git a/pkg/tsdb/postgres/postgres_test.go b/pkg/tsdb/postgres/postgres_test.go index fc1a5f34253..c0c04522dba 100644 --- a/pkg/tsdb/postgres/postgres_test.go +++ b/pkg/tsdb/postgres/postgres_test.go @@ -1,6 +1,7 @@ package postgres import ( + "context" "fmt" "math/rand" "strings" @@ -117,7 +118,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -197,7 +198,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -254,7 +255,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -279,7 +280,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -333,7 +334,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -360,7 +361,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -450,7 +451,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -472,7 +473,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -494,7 +495,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -516,7 +517,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -538,7 +539,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -560,7 +561,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -582,7 +583,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -604,7 +605,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -626,7 +627,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -649,7 +650,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -674,7 +675,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -683,6 +684,30 @@ func TestPostgres(t *testing.T) { So(queryResult.Series[0].Name, ShouldEqual, "valueOne") So(queryResult.Series[1].Name, ShouldEqual, "valueTwo") }) + + Convey("When doing a query with timeFrom,timeTo,unixEpochFrom,unixEpochTo macros", func() { + tsdb.Interpolate = origInterpolate + query := &tsdb.TsdbQuery{ + TimeRange: tsdb.NewFakeTimeRange("5m", "now", fromStart), + Queries: []*tsdb.Query{ + { + DataSource: &models.DataSource{JsonData: simplejson.New()}, + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeFrom() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + So(queryResult.Meta.Get("sql").MustString(), ShouldEqual, "SELECT time FROM metric_values WHERE time > '2018-03-15T12:55:00Z' OR time < '2018-03-15T12:55:00Z' OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1") + + }) }) Convey("Given a table with event data", func() { @@ -735,7 +760,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["Deploys"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) @@ -758,7 +783,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) queryResult := resp.Results["Tickets"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) @@ -784,7 +809,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -814,7 +839,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -844,7 +869,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -874,7 +899,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -902,7 +927,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -930,7 +955,7 @@ func TestPostgres(t *testing.T) { }, } - resp, err := endpoint.Query(nil, nil, query) + resp, err := endpoint.Query(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) diff --git a/pkg/tsdb/sql_engine.go b/pkg/tsdb/sql_engine.go index 18e02e328d1..963a627994e 100644 --- a/pkg/tsdb/sql_engine.go +++ b/pkg/tsdb/sql_engine.go @@ -184,6 +184,10 @@ var Interpolate = func(query *Query, timeRange *TimeRange, sql string) (string, sql = strings.Replace(sql, "$__interval_ms", strconv.FormatInt(interval.Milliseconds(), 10), -1) sql = strings.Replace(sql, "$__interval", interval.Text, -1) + sql = strings.Replace(sql, "$__timeFrom()", fmt.Sprintf("'%s'", timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), -1) + sql = strings.Replace(sql, "$__timeTo()", fmt.Sprintf("'%s'", timeRange.GetToAsTimeUTC().Format(time.RFC3339)), -1) + sql = strings.Replace(sql, "$__unixEpochFrom()", fmt.Sprintf("%d", timeRange.GetFromAsSecondsEpoch()), -1) + sql = strings.Replace(sql, "$__unixEpochTo()", fmt.Sprintf("%d", timeRange.GetToAsSecondsEpoch()), -1) return sql, nil } diff --git a/pkg/tsdb/sql_engine_test.go b/pkg/tsdb/sql_engine_test.go index 05b8a51ae6f..bfcc82aac47 100644 --- a/pkg/tsdb/sql_engine_test.go +++ b/pkg/tsdb/sql_engine_test.go @@ -1,6 +1,7 @@ package tsdb import ( + "fmt" "testing" "time" @@ -43,6 +44,34 @@ func TestSqlEngine(t *testing.T) { So(sql, ShouldEqual, "select 60000 ") }) + Convey("interpolate __timeFrom function", func() { + sql, err := Interpolate(query, timeRange, "select $__timeFrom()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) + }) + + Convey("interpolate __timeTo function", func() { + sql, err := Interpolate(query, timeRange, "select $__timeTo()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) + }) + + Convey("interpolate __unixEpochFrom function", func() { + sql, err := Interpolate(query, timeRange, "select $__unixEpochFrom()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) + }) + + Convey("interpolate __unixEpochTo function", func() { + sql, err := Interpolate(query, timeRange, "select $__unixEpochTo()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) + }) + }) Convey("Given row values with time.Time as time columns", func() { diff --git a/pkg/tsdb/stackdriver/annotation_query.go b/pkg/tsdb/stackdriver/annotation_query.go new file mode 100644 index 00000000000..db35171ad70 --- /dev/null +++ b/pkg/tsdb/stackdriver/annotation_query.go @@ -0,0 +1,120 @@ +package stackdriver + +import ( + "context" + "strconv" + "strings" + "time" + + "github.com/grafana/grafana/pkg/tsdb" +) + +func (e *StackdriverExecutor) executeAnnotationQuery(ctx context.Context, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { + result := &tsdb.Response{ + Results: make(map[string]*tsdb.QueryResult), + } + + firstQuery := tsdbQuery.Queries[0] + + queries, err := e.buildQueries(tsdbQuery) + if err != nil { + return nil, err + } + + queryRes, resp, err := e.executeQuery(ctx, queries[0], tsdbQuery) + if err != nil { + return nil, err + } + title := firstQuery.Model.Get("title").MustString() + text := firstQuery.Model.Get("text").MustString() + tags := firstQuery.Model.Get("tags").MustString() + err = e.parseToAnnotations(queryRes, resp, queries[0], title, text, tags) + result.Results[firstQuery.RefId] = queryRes + + return result, err +} + +func (e *StackdriverExecutor) parseToAnnotations(queryRes *tsdb.QueryResult, data StackdriverResponse, query *StackdriverQuery, title string, text string, tags string) error { + annotations := make([]map[string]string, 0) + + for _, series := range data.TimeSeries { + // reverse the order to be ascending + for i := len(series.Points) - 1; i >= 0; i-- { + point := series.Points[i] + value := strconv.FormatFloat(point.Value.DoubleValue, 'f', 6, 64) + if series.ValueType == "STRING" { + value = point.Value.StringValue + } + annotation := make(map[string]string) + annotation["time"] = point.Interval.EndTime.UTC().Format(time.RFC3339) + annotation["title"] = formatAnnotationText(title, value, series.Metric.Type, series.Metric.Labels, series.Resource.Labels) + annotation["tags"] = tags + annotation["text"] = formatAnnotationText(text, value, series.Metric.Type, series.Metric.Labels, series.Resource.Labels) + annotations = append(annotations, annotation) + } + } + + transformAnnotationToTable(annotations, queryRes) + return nil +} + +func transformAnnotationToTable(data []map[string]string, result *tsdb.QueryResult) { + table := &tsdb.Table{ + Columns: make([]tsdb.TableColumn, 4), + Rows: make([]tsdb.RowValues, 0), + } + table.Columns[0].Text = "time" + table.Columns[1].Text = "title" + table.Columns[2].Text = "tags" + table.Columns[3].Text = "text" + + for _, r := range data { + values := make([]interface{}, 4) + values[0] = r["time"] + values[1] = r["title"] + values[2] = r["tags"] + values[3] = r["text"] + table.Rows = append(table.Rows, values) + } + result.Tables = append(result.Tables, table) + result.Meta.Set("rowCount", len(data)) + slog.Info("anno", "len", len(data)) +} + +func formatAnnotationText(annotationText string, pointValue string, metricType string, metricLabels map[string]string, resourceLabels map[string]string) string { + result := legendKeyFormat.ReplaceAllFunc([]byte(annotationText), func(in []byte) []byte { + metaPartName := strings.Replace(string(in), "{{", "", 1) + metaPartName = strings.Replace(metaPartName, "}}", "", 1) + metaPartName = strings.TrimSpace(metaPartName) + + if metaPartName == "metric.type" { + return []byte(metricType) + } + + metricPart := replaceWithMetricPart(metaPartName, metricType) + + if metricPart != nil { + return metricPart + } + + if metaPartName == "metric.value" { + return []byte(pointValue) + } + + metaPartName = strings.Replace(metaPartName, "metric.label.", "", 1) + + if val, exists := metricLabels[metaPartName]; exists { + return []byte(val) + } + + metaPartName = strings.Replace(metaPartName, "resource.label.", "", 1) + + if val, exists := resourceLabels[metaPartName]; exists { + return []byte(val) + } + + return in + }) + + return string(result) +} diff --git a/pkg/tsdb/stackdriver/annotation_query_test.go b/pkg/tsdb/stackdriver/annotation_query_test.go new file mode 100644 index 00000000000..8229470d665 --- /dev/null +++ b/pkg/tsdb/stackdriver/annotation_query_test.go @@ -0,0 +1,33 @@ +package stackdriver + +import ( + "testing" + + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/tsdb" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestStackdriverAnnotationQuery(t *testing.T) { + Convey("Stackdriver Annotation Query Executor", t, func() { + executor := &StackdriverExecutor{} + Convey("When parsing the stackdriver api response", func() { + data, err := loadTestFile("./test-data/2-series-response-no-agg.json") + So(err, ShouldBeNil) + So(len(data.TimeSeries), ShouldEqual, 3) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "annotationQuery"} + query := &StackdriverQuery{} + err = executor.parseToAnnotations(res, data, query, "atitle {{metric.label.instance_name}} {{metric.value}}", "atext {{resource.label.zone}}", "atag") + So(err, ShouldBeNil) + + Convey("Should return annotations table", func() { + So(len(res.Tables), ShouldEqual, 1) + So(len(res.Tables[0].Rows), ShouldEqual, 9) + So(res.Tables[0].Rows[0][1], ShouldEqual, "atitle collector-asia-east-1 9.856650") + So(res.Tables[0].Rows[0][3], ShouldEqual, "atext asia-east1-a") + }) + }) + }) +} diff --git a/pkg/tsdb/stackdriver/stackdriver.go b/pkg/tsdb/stackdriver/stackdriver.go new file mode 100644 index 00000000000..586e154cd5d --- /dev/null +++ b/pkg/tsdb/stackdriver/stackdriver.go @@ -0,0 +1,460 @@ +package stackdriver + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "math" + "net/http" + "net/url" + "path" + "regexp" + "strconv" + "strings" + "time" + + "golang.org/x/net/context/ctxhttp" + + "github.com/grafana/grafana/pkg/api/pluginproxy" + "github.com/grafana/grafana/pkg/components/null" + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/tsdb" + "github.com/opentracing/opentracing-go" +) + +var ( + slog log.Logger + legendKeyFormat *regexp.Regexp + metricNameFormat *regexp.Regexp +) + +// StackdriverExecutor executes queries for the Stackdriver datasource +type StackdriverExecutor struct { + httpClient *http.Client + dsInfo *models.DataSource +} + +// NewStackdriverExecutor initializes a http client +func NewStackdriverExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { + httpClient, err := dsInfo.GetHttpClient() + if err != nil { + return nil, err + } + + return &StackdriverExecutor{ + httpClient: httpClient, + dsInfo: dsInfo, + }, nil +} + +func init() { + slog = log.New("tsdb.stackdriver") + tsdb.RegisterTsdbQueryEndpoint("stackdriver", NewStackdriverExecutor) + legendKeyFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`) + metricNameFormat = regexp.MustCompile(`([\w\d_]+)\.googleapis\.com/(.+)`) +} + +// Query takes in the frontend queries, parses them into the Stackdriver query format +// executes the queries against the Stackdriver API and parses the response into +// the time series or table format +func (e *StackdriverExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { + var result *tsdb.Response + var err error + queryType := tsdbQuery.Queries[0].Model.Get("type").MustString("") + + switch queryType { + case "annotationQuery": + result, err = e.executeAnnotationQuery(ctx, tsdbQuery) + case "timeSeriesQuery": + fallthrough + default: + result, err = e.executeTimeSeriesQuery(ctx, tsdbQuery) + } + + return result, err +} + +func (e *StackdriverExecutor) executeTimeSeriesQuery(ctx context.Context, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { + result := &tsdb.Response{ + Results: make(map[string]*tsdb.QueryResult), + } + + queries, err := e.buildQueries(tsdbQuery) + if err != nil { + return nil, err + } + + for _, query := range queries { + queryRes, resp, err := e.executeQuery(ctx, query, tsdbQuery) + if err != nil { + return nil, err + } + err = e.parseResponse(queryRes, resp, query) + if err != nil { + queryRes.Error = err + } + result.Results[query.RefID] = queryRes + } + + return result, nil +} + +func (e *StackdriverExecutor) buildQueries(tsdbQuery *tsdb.TsdbQuery) ([]*StackdriverQuery, error) { + stackdriverQueries := []*StackdriverQuery{} + + startTime, err := tsdbQuery.TimeRange.ParseFrom() + if err != nil { + return nil, err + } + + endTime, err := tsdbQuery.TimeRange.ParseTo() + if err != nil { + return nil, err + } + + durationSeconds := int(endTime.Sub(startTime).Seconds()) + + for _, query := range tsdbQuery.Queries { + var target string + + metricType := query.Model.Get("metricType").MustString() + filterParts := query.Model.Get("filters").MustArray() + + params := url.Values{} + params.Add("interval.startTime", startTime.UTC().Format(time.RFC3339)) + params.Add("interval.endTime", endTime.UTC().Format(time.RFC3339)) + params.Add("filter", buildFilterString(metricType, filterParts)) + params.Add("view", query.Model.Get("view").MustString("FULL")) + setAggParams(¶ms, query, durationSeconds) + + target = params.Encode() + + if setting.Env == setting.DEV { + slog.Debug("Stackdriver request", "params", params) + } + + groupBys := query.Model.Get("groupBys").MustArray() + groupBysAsStrings := make([]string, 0) + for _, groupBy := range groupBys { + groupBysAsStrings = append(groupBysAsStrings, groupBy.(string)) + } + + aliasBy := query.Model.Get("aliasBy").MustString() + + stackdriverQueries = append(stackdriverQueries, &StackdriverQuery{ + Target: target, + Params: params, + RefID: query.RefId, + GroupBys: groupBysAsStrings, + AliasBy: aliasBy, + }) + } + + return stackdriverQueries, nil +} + +func buildFilterString(metricType string, filterParts []interface{}) string { + filterString := "" + for i, part := range filterParts { + mod := i % 4 + if part == "AND" { + filterString += " " + } else if mod == 2 { + filterString += fmt.Sprintf(`"%s"`, part) + } else { + filterString += part.(string) + } + } + return strings.Trim(fmt.Sprintf(`metric.type="%s" %s`, metricType, filterString), " ") +} + +func setAggParams(params *url.Values, query *tsdb.Query, durationSeconds int) { + primaryAggregation := query.Model.Get("primaryAggregation").MustString() + perSeriesAligner := query.Model.Get("perSeriesAligner").MustString() + alignmentPeriod := query.Model.Get("alignmentPeriod").MustString() + + if primaryAggregation == "" { + primaryAggregation = "REDUCE_NONE" + } + + if perSeriesAligner == "" { + perSeriesAligner = "ALIGN_MEAN" + } + + if alignmentPeriod == "grafana-auto" || alignmentPeriod == "" { + alignmentPeriodValue := int(math.Max(float64(query.IntervalMs)/1000, 60.0)) + alignmentPeriod = "+" + strconv.Itoa(alignmentPeriodValue) + "s" + } + + if alignmentPeriod == "stackdriver-auto" { + alignmentPeriodValue := int(math.Max(float64(durationSeconds), 60.0)) + if alignmentPeriodValue < 60*60*23 { + alignmentPeriod = "+60s" + } else if alignmentPeriodValue < 60*60*24*6 { + alignmentPeriod = "+300s" + } else { + alignmentPeriod = "+3600s" + } + } + + re := regexp.MustCompile("[0-9]+") + seconds, err := strconv.ParseInt(re.FindString(alignmentPeriod), 10, 64) + if err != nil || seconds > 3600 { + alignmentPeriod = "+3600s" + } + + params.Add("aggregation.crossSeriesReducer", primaryAggregation) + params.Add("aggregation.perSeriesAligner", perSeriesAligner) + params.Add("aggregation.alignmentPeriod", alignmentPeriod) + + groupBys := query.Model.Get("groupBys").MustArray() + if len(groupBys) > 0 { + for i := 0; i < len(groupBys); i++ { + params.Add("aggregation.groupByFields", groupBys[i].(string)) + } + } +} + +func (e *StackdriverExecutor) executeQuery(ctx context.Context, query *StackdriverQuery, tsdbQuery *tsdb.TsdbQuery) (*tsdb.QueryResult, StackdriverResponse, error) { + queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefID} + + req, err := e.createRequest(ctx, e.dsInfo) + if err != nil { + queryResult.Error = err + return queryResult, StackdriverResponse{}, nil + } + + req.URL.RawQuery = query.Params.Encode() + queryResult.Meta.Set("rawQuery", req.URL.RawQuery) + alignmentPeriod, ok := req.URL.Query()["aggregation.alignmentPeriod"] + + if ok { + re := regexp.MustCompile("[0-9]+") + seconds, err := strconv.ParseInt(re.FindString(alignmentPeriod[0]), 10, 64) + if err == nil { + queryResult.Meta.Set("alignmentPeriod", seconds) + } + } + + span, ctx := opentracing.StartSpanFromContext(ctx, "stackdriver query") + span.SetTag("target", query.Target) + span.SetTag("from", tsdbQuery.TimeRange.From) + span.SetTag("until", tsdbQuery.TimeRange.To) + span.SetTag("datasource_id", e.dsInfo.Id) + span.SetTag("org_id", e.dsInfo.OrgId) + + defer span.Finish() + + opentracing.GlobalTracer().Inject( + span.Context(), + opentracing.HTTPHeaders, + opentracing.HTTPHeadersCarrier(req.Header)) + + res, err := ctxhttp.Do(ctx, e.httpClient, req) + if err != nil { + queryResult.Error = err + return queryResult, StackdriverResponse{}, nil + } + + data, err := e.unmarshalResponse(res) + if err != nil { + queryResult.Error = err + return queryResult, StackdriverResponse{}, nil + } + + return queryResult, data, nil +} + +func (e *StackdriverExecutor) unmarshalResponse(res *http.Response) (StackdriverResponse, error) { + body, err := ioutil.ReadAll(res.Body) + defer res.Body.Close() + if err != nil { + return StackdriverResponse{}, err + } + + if res.StatusCode/100 != 2 { + slog.Error("Request failed", "status", res.Status, "body", string(body)) + return StackdriverResponse{}, fmt.Errorf(string(body)) + } + + var data StackdriverResponse + err = json.Unmarshal(body, &data) + if err != nil { + slog.Error("Failed to unmarshal Stackdriver response", "error", err, "status", res.Status, "body", string(body)) + return StackdriverResponse{}, err + } + + return data, nil +} + +func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data StackdriverResponse, query *StackdriverQuery) error { + metricLabels := make(map[string][]string) + resourceLabels := make(map[string][]string) + + for _, series := range data.TimeSeries { + points := make([]tsdb.TimePoint, 0) + + // reverse the order to be ascending + for i := len(series.Points) - 1; i >= 0; i-- { + point := series.Points[i] + value := point.Value.DoubleValue + + if series.ValueType == "INT64" { + parsedValue, err := strconv.ParseFloat(point.Value.IntValue, 64) + if err == nil { + value = parsedValue + } + } + + if series.ValueType == "BOOL" { + if point.Value.BoolValue { + value = 1 + } else { + value = 0 + } + } + + points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000)) + } + + defaultMetricName := series.Metric.Type + + for key, value := range series.Metric.Labels { + if !containsLabel(metricLabels[key], value) { + metricLabels[key] = append(metricLabels[key], value) + } + if len(query.GroupBys) == 0 || containsLabel(query.GroupBys, "metric.label."+key) { + defaultMetricName += " " + value + } + } + + for key, value := range series.Resource.Labels { + if !containsLabel(resourceLabels[key], value) { + resourceLabels[key] = append(resourceLabels[key], value) + } + + if containsLabel(query.GroupBys, "resource.label."+key) { + defaultMetricName += " " + value + } + } + + metricName := formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, query) + + queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{ + Name: metricName, + Points: points, + }) + } + + queryRes.Meta.Set("resourceLabels", resourceLabels) + queryRes.Meta.Set("metricLabels", metricLabels) + queryRes.Meta.Set("groupBys", query.GroupBys) + + return nil +} + +func containsLabel(labels []string, newLabel string) bool { + for _, val := range labels { + if val == newLabel { + return true + } + } + return false +} + +func formatLegendKeys(metricType string, defaultMetricName string, metricLabels map[string]string, resourceLabels map[string]string, query *StackdriverQuery) string { + if query.AliasBy == "" { + return defaultMetricName + } + + result := legendKeyFormat.ReplaceAllFunc([]byte(query.AliasBy), func(in []byte) []byte { + metaPartName := strings.Replace(string(in), "{{", "", 1) + metaPartName = strings.Replace(metaPartName, "}}", "", 1) + metaPartName = strings.TrimSpace(metaPartName) + + if metaPartName == "metric.type" { + return []byte(metricType) + } + + metricPart := replaceWithMetricPart(metaPartName, metricType) + + if metricPart != nil { + return metricPart + } + + metaPartName = strings.Replace(metaPartName, "metric.label.", "", 1) + + if val, exists := metricLabels[metaPartName]; exists { + return []byte(val) + } + + metaPartName = strings.Replace(metaPartName, "resource.label.", "", 1) + + if val, exists := resourceLabels[metaPartName]; exists { + return []byte(val) + } + + return in + }) + + return string(result) +} + +func replaceWithMetricPart(metaPartName string, metricType string) []byte { + // https://cloud.google.com/monitoring/api/v3/metrics-details#label_names + shortMatches := metricNameFormat.FindStringSubmatch(metricType) + + if metaPartName == "metric.name" { + if len(shortMatches) > 0 { + return []byte(shortMatches[2]) + } + } + + if metaPartName == "metric.service" { + if len(shortMatches) > 0 { + return []byte(shortMatches[1]) + } + } + + return nil +} + +func (e *StackdriverExecutor) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) { + u, _ := url.Parse(dsInfo.Url) + u.Path = path.Join(u.Path, "render") + + req, err := http.NewRequest(http.MethodGet, "https://monitoring.googleapis.com/", nil) + if err != nil { + slog.Error("Failed to create request", "error", err) + return nil, fmt.Errorf("Failed to create request. error: %v", err) + } + + req.Header.Set("Content-Type", "application/json") + req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion)) + + // find plugin + plugin, ok := plugins.DataSources[dsInfo.Type] + if !ok { + return nil, errors.New("Unable to find datasource plugin Stackdriver") + } + projectName := dsInfo.JsonData.Get("defaultProject").MustString() + proxyPass := fmt.Sprintf("stackdriver%s", "v3/projects/"+projectName+"/timeSeries") + + var stackdriverRoute *plugins.AppPluginRoute + for _, route := range plugin.Routes { + if route.Path == "stackdriver" { + stackdriverRoute = route + break + } + } + + pluginproxy.ApplyRoute(ctx, req, proxyPass, stackdriverRoute, dsInfo) + + return req, nil +} diff --git a/pkg/tsdb/stackdriver/stackdriver_test.go b/pkg/tsdb/stackdriver/stackdriver_test.go new file mode 100644 index 00000000000..da4d6890207 --- /dev/null +++ b/pkg/tsdb/stackdriver/stackdriver_test.go @@ -0,0 +1,357 @@ +package stackdriver + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "testing" + "time" + + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/tsdb" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestStackdriver(t *testing.T) { + Convey("Stackdriver", t, func() { + executor := &StackdriverExecutor{} + + Convey("Parse queries from frontend and build Stackdriver API queries", func() { + fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) + tsdbQuery := &tsdb.TsdbQuery{ + TimeRange: &tsdb.TimeRange{ + From: fmt.Sprintf("%v", fromStart.Unix()*1000), + To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), + }, + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "metricType": "a/metric/type", + "view": "FULL", + "aliasBy": "testalias", + "type": "timeSeriesQuery", + }), + RefId: "A", + }, + }, + } + + Convey("and query has no aggregation set", func() { + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + + So(len(queries), ShouldEqual, 1) + So(queries[0].RefID, ShouldEqual, "A") + So(queries[0].Target, ShouldEqual, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL") + So(len(queries[0].Params), ShouldEqual, 7) + So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z") + So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z") + So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN") + So(queries[0].Params["filter"][0], ShouldEqual, "metric.type=\"a/metric/type\"") + So(queries[0].Params["view"][0], ShouldEqual, "FULL") + So(queries[0].AliasBy, ShouldEqual, "testalias") + }) + + Convey("and query has filters", func() { + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "metricType": "a/metric/type", + "filters": []interface{}{"key", "=", "value", "AND", "key2", "=", "value2"}, + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(len(queries), ShouldEqual, 1) + So(queries[0].Params["filter"][0], ShouldEqual, `metric.type="a/metric/type" key="value" key2="value2"`) + }) + + Convey("and alignmentPeriod is set to grafana-auto", func() { + Convey("and IntervalMs is larger than 60000", func() { + tsdbQuery.Queries[0].IntervalMs = 1000000 + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "alignmentPeriod": "grafana-auto", + "filters": []interface{}{"key", "=", "value", "AND", "key2", "=", "value2"}, + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+1000s`) + }) + Convey("and IntervalMs is less than 60000", func() { + tsdbQuery.Queries[0].IntervalMs = 30000 + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "alignmentPeriod": "grafana-auto", + "filters": []interface{}{"key", "=", "value", "AND", "key2", "=", "value2"}, + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`) + }) + }) + + Convey("and alignmentPeriod is set to stackdriver-auto", func() { + Convey("and range is two hours", func() { + tsdbQuery.TimeRange.From = "1538033322461" + tsdbQuery.TimeRange.To = "1538040522461" + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "target": "target", + "alignmentPeriod": "stackdriver-auto", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`) + }) + + Convey("and range is 22 hours", func() { + tsdbQuery.TimeRange.From = "1538034524922" + tsdbQuery.TimeRange.To = "1538113724922" + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "target": "target", + "alignmentPeriod": "stackdriver-auto", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`) + }) + + Convey("and range is 23 hours", func() { + tsdbQuery.TimeRange.From = "1538034567985" + tsdbQuery.TimeRange.To = "1538117367985" + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "target": "target", + "alignmentPeriod": "stackdriver-auto", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+300s`) + }) + + Convey("and range is 7 days", func() { + tsdbQuery.TimeRange.From = "1538036324073" + tsdbQuery.TimeRange.To = "1538641124073" + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "target": "target", + "alignmentPeriod": "stackdriver-auto", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+3600s`) + }) + }) + + Convey("and alignmentPeriod is set in frontend", func() { + Convey("and alignment period is too big", func() { + tsdbQuery.Queries[0].IntervalMs = 1000 + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "alignmentPeriod": "+360000s", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+3600s`) + }) + + Convey("and alignment period is within accepted range", func() { + tsdbQuery.Queries[0].IntervalMs = 1000 + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "alignmentPeriod": "+600s", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+600s`) + }) + }) + + Convey("and query has aggregation mean set", func() { + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "metricType": "a/metric/type", + "primaryAggregation": "REDUCE_MEAN", + "view": "FULL", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + + So(len(queries), ShouldEqual, 1) + So(queries[0].RefID, ShouldEqual, "A") + So(queries[0].Target, ShouldEqual, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_MEAN&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL") + So(len(queries[0].Params), ShouldEqual, 7) + So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z") + So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z") + So(queries[0].Params["aggregation.crossSeriesReducer"][0], ShouldEqual, "REDUCE_MEAN") + So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN") + So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, "+60s") + So(queries[0].Params["filter"][0], ShouldEqual, "metric.type=\"a/metric/type\"") + So(queries[0].Params["view"][0], ShouldEqual, "FULL") + }) + + Convey("and query has group bys", func() { + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "metricType": "a/metric/type", + "primaryAggregation": "REDUCE_NONE", + "groupBys": []interface{}{"metric.label.group1", "metric.label.group2"}, + "view": "FULL", + }) + + queries, err := executor.buildQueries(tsdbQuery) + So(err, ShouldBeNil) + + So(len(queries), ShouldEqual, 1) + So(queries[0].RefID, ShouldEqual, "A") + So(queries[0].Target, ShouldEqual, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.groupByFields=metric.label.group1&aggregation.groupByFields=metric.label.group2&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL") + So(len(queries[0].Params), ShouldEqual, 8) + So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z") + So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z") + So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN") + So(queries[0].Params["aggregation.groupByFields"][0], ShouldEqual, "metric.label.group1") + So(queries[0].Params["aggregation.groupByFields"][1], ShouldEqual, "metric.label.group2") + So(queries[0].Params["filter"][0], ShouldEqual, "metric.type=\"a/metric/type\"") + So(queries[0].Params["view"][0], ShouldEqual, "FULL") + }) + + }) + + Convey("Parse stackdriver response in the time series format", func() { + Convey("when data from query aggregated to one time series", func() { + data, err := loadTestFile("./test-data/1-series-response-agg-one-metric.json") + So(err, ShouldBeNil) + So(len(data.TimeSeries), ShouldEqual, 1) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &StackdriverQuery{} + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + + So(len(res.Series), ShouldEqual, 1) + So(res.Series[0].Name, ShouldEqual, "serviceruntime.googleapis.com/api/request_count") + So(len(res.Series[0].Points), ShouldEqual, 3) + + Convey("timestamps should be in ascending order", func() { + So(res.Series[0].Points[0][0].Float64, ShouldEqual, 0.05) + So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1536670020000) + + So(res.Series[0].Points[1][0].Float64, ShouldEqual, 1.05) + So(res.Series[0].Points[1][1].Float64, ShouldEqual, 1536670080000) + + So(res.Series[0].Points[2][0].Float64, ShouldEqual, 1.0666666666667) + So(res.Series[0].Points[2][1].Float64, ShouldEqual, 1536670260000) + }) + }) + + Convey("when data from query with no aggregation", func() { + data, err := loadTestFile("./test-data/2-series-response-no-agg.json") + So(err, ShouldBeNil) + So(len(data.TimeSeries), ShouldEqual, 3) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &StackdriverQuery{} + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + + Convey("Should add labels to metric name", func() { + So(len(res.Series), ShouldEqual, 3) + So(res.Series[0].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1") + So(res.Series[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1") + So(res.Series[2].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1") + }) + + Convey("Should parse to time series", func() { + So(len(res.Series[0].Points), ShouldEqual, 3) + So(res.Series[0].Points[0][0].Float64, ShouldEqual, 9.8566497180145) + So(res.Series[0].Points[1][0].Float64, ShouldEqual, 9.7323568146676) + So(res.Series[0].Points[2][0].Float64, ShouldEqual, 9.7730520330369) + }) + + Convey("Should add meta for labels to the response", func() { + metricLabels := res.Meta.Get("metricLabels").Interface().(map[string][]string) + So(metricLabels, ShouldNotBeNil) + So(len(metricLabels["instance_name"]), ShouldEqual, 3) + So(metricLabels["instance_name"][0], ShouldEqual, "collector-asia-east-1") + So(metricLabels["instance_name"][1], ShouldEqual, "collector-europe-west-1") + So(metricLabels["instance_name"][2], ShouldEqual, "collector-us-east-1") + + resourceLabels := res.Meta.Get("resourceLabels").Interface().(map[string][]string) + So(resourceLabels, ShouldNotBeNil) + So(len(resourceLabels["zone"]), ShouldEqual, 3) + So(resourceLabels["zone"][0], ShouldEqual, "asia-east1-a") + So(resourceLabels["zone"][1], ShouldEqual, "europe-west1-b") + So(resourceLabels["zone"][2], ShouldEqual, "us-east1-b") + + So(len(resourceLabels["project_id"]), ShouldEqual, 1) + So(resourceLabels["project_id"][0], ShouldEqual, "grafana-prod") + }) + }) + + Convey("when data from query with no aggregation and group bys", func() { + data, err := loadTestFile("./test-data/2-series-response-no-agg.json") + So(err, ShouldBeNil) + So(len(data.TimeSeries), ShouldEqual, 3) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + query := &StackdriverQuery{GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}} + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + + Convey("Should add instance name and zone labels to metric name", func() { + So(len(res.Series), ShouldEqual, 3) + So(res.Series[0].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1 asia-east1-a") + So(res.Series[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1 europe-west1-b") + So(res.Series[2].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1 us-east1-b") + }) + }) + + Convey("when data from query with no aggregation and alias by", func() { + data, err := loadTestFile("./test-data/2-series-response-no-agg.json") + So(err, ShouldBeNil) + So(len(data.TimeSeries), ShouldEqual, 3) + + res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} + + Convey("and the alias pattern is for metric type, a metric label and a resource label", func() { + + query := &StackdriverQuery{AliasBy: "{{metric.type}} - {{metric.label.instance_name}} - {{resource.label.zone}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}} + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + + Convey("Should use alias by formatting and only show instance name", func() { + So(len(res.Series), ShouldEqual, 3) + So(res.Series[0].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-asia-east-1 - asia-east1-a") + So(res.Series[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-europe-west-1 - europe-west1-b") + So(res.Series[2].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-us-east-1 - us-east1-b") + }) + }) + + Convey("and the alias pattern is for metric name", func() { + + query := &StackdriverQuery{AliasBy: "metric {{metric.name}} service {{metric.service}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}} + err = executor.parseResponse(res, data, query) + So(err, ShouldBeNil) + + Convey("Should use alias by formatting and only show instance name", func() { + So(len(res.Series), ShouldEqual, 3) + So(res.Series[0].Name, ShouldEqual, "metric instance/cpu/usage_time service compute") + So(res.Series[1].Name, ShouldEqual, "metric instance/cpu/usage_time service compute") + So(res.Series[2].Name, ShouldEqual, "metric instance/cpu/usage_time service compute") + }) + }) + }) + }) + }) +} + +func loadTestFile(path string) (StackdriverResponse, error) { + var data StackdriverResponse + + jsonBody, err := ioutil.ReadFile(path) + if err != nil { + return data, err + } + err = json.Unmarshal(jsonBody, &data) + return data, err +} diff --git a/pkg/tsdb/stackdriver/test-data/1-series-response-agg-one-metric.json b/pkg/tsdb/stackdriver/test-data/1-series-response-agg-one-metric.json new file mode 100644 index 00000000000..e1a84583cc4 --- /dev/null +++ b/pkg/tsdb/stackdriver/test-data/1-series-response-agg-one-metric.json @@ -0,0 +1,46 @@ +{ + "timeSeries": [ + { + "metric": { + "type": "serviceruntime.googleapis.com\/api\/request_count" + }, + "resource": { + "type": "consumed_api", + "labels": { + "project_id": "grafana-prod" + } + }, + "metricKind": "GAUGE", + "valueType": "DOUBLE", + "points": [ + { + "interval": { + "startTime": "2018-09-11T12:51:00Z", + "endTime": "2018-09-11T12:51:00Z" + }, + "value": { + "doubleValue": 1.0666666666667 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:48:00Z", + "endTime": "2018-09-11T12:48:00Z" + }, + "value": { + "doubleValue": 1.05 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:47:00Z", + "endTime": "2018-09-11T12:47:00Z" + }, + "value": { + "doubleValue": 0.05 + } + } + ] + } + ] +} diff --git a/pkg/tsdb/stackdriver/test-data/2-series-response-no-agg.json b/pkg/tsdb/stackdriver/test-data/2-series-response-no-agg.json new file mode 100644 index 00000000000..da615a168bf --- /dev/null +++ b/pkg/tsdb/stackdriver/test-data/2-series-response-no-agg.json @@ -0,0 +1,145 @@ +{ + "timeSeries": [ + { + "metric": { + "labels": { + "instance_name": "collector-asia-east-1" + }, + "type": "compute.googleapis.com\/instance\/cpu\/usage_time" + }, + "resource": { + "type": "gce_instance", + "labels": { + "instance_id": "1119268429530133111", + "zone": "asia-east1-a", + "project_id": "grafana-prod" + } + }, + "metricKind": "DELTA", + "valueType": "DOUBLE", + "points": [ + { + "interval": { + "startTime": "2018-09-11T12:30:00Z", + "endTime": "2018-09-11T12:31:00Z" + }, + "value": { + "doubleValue": 9.7730520330369 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:29:00Z", + "endTime": "2018-09-11T12:30:00Z" + }, + "value": { + "doubleValue": 9.7323568146676 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:28:00Z", + "endTime": "2018-09-11T12:29:00Z" + }, + "value": { + "doubleValue": 9.8566497180145 + } + } + ] + }, + { + "metric": { + "labels": { + "instance_name": "collector-europe-west-1" + }, + "type": "compute.googleapis.com\/instance\/cpu\/usage_time" + }, + "resource": { + "type": "gce_instance", + "labels": { + "instance_id": "22241654114540837222", + "zone": "europe-west1-b", + "project_id": "grafana-prod" + } + }, + "metricKind": "DELTA", + "valueType": "DOUBLE", + "points": [ + { + "interval": { + "startTime": "2018-09-11T12:30:00Z", + "endTime": "2018-09-11T12:31:00Z" + }, + "value": { + "doubleValue": 8.8210971239023 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:29:00Z", + "endTime": "2018-09-11T12:30:00Z" + }, + "value": { + "doubleValue": 8.9689492364414 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:28:00Z", + "endTime": "2018-09-11T12:29:00Z" + }, + "value": { + "doubleValue": 9.0238475054502 + } + } + ] + }, + { + "metric": { + "labels": { + "instance_name": "collector-us-east-1" + }, + "type": "compute.googleapis.com\/instance\/cpu\/usage_time" + }, + "resource": { + "type": "gce_instance", + "labels": { + "instance_id": "3332264424035095333", + "zone": "us-east1-b", + "project_id": "grafana-prod" + } + }, + "metricKind": "DELTA", + "valueType": "DOUBLE", + "points": [ + { + "interval": { + "startTime": "2018-09-11T12:30:00Z", + "endTime": "2018-09-11T12:31:00Z" + }, + "value": { + "doubleValue": 30.807846801355 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:29:00Z", + "endTime": "2018-09-11T12:30:00Z" + }, + "value": { + "doubleValue": 30.903974115849 + } + }, + { + "interval": { + "startTime": "2018-09-11T12:28:00Z", + "endTime": "2018-09-11T12:29:00Z" + }, + "value": { + "doubleValue": 30.829426143318 + } + } + ] + } + ] +} diff --git a/pkg/tsdb/stackdriver/types.go b/pkg/tsdb/stackdriver/types.go new file mode 100644 index 00000000000..c58ac2968f2 --- /dev/null +++ b/pkg/tsdb/stackdriver/types.go @@ -0,0 +1,43 @@ +package stackdriver + +import ( + "net/url" + "time" +) + +// StackdriverQuery is the query that Grafana sends from the frontend +type StackdriverQuery struct { + Target string + Params url.Values + RefID string + GroupBys []string + AliasBy string +} + +// StackdriverResponse is the data returned from the external Google Stackdriver API +type StackdriverResponse struct { + TimeSeries []struct { + Metric struct { + Labels map[string]string `json:"labels"` + Type string `json:"type"` + } `json:"metric"` + Resource struct { + Type string `json:"type"` + Labels map[string]string `json:"labels"` + } `json:"resource"` + MetricKind string `json:"metricKind"` + ValueType string `json:"valueType"` + Points []struct { + Interval struct { + StartTime time.Time `json:"startTime"` + EndTime time.Time `json:"endTime"` + } `json:"interval"` + Value struct { + DoubleValue float64 `json:"doubleValue"` + StringValue string `json:"stringValue"` + BoolValue bool `json:"boolValue"` + IntValue string `json:"int64Value"` + } `json:"value"` + } `json:"points"` + } `json:"timeSeries"` +} diff --git a/pkg/tsdb/testdata/scenarios.go b/pkg/tsdb/testdata/scenarios.go index e907fa8aae0..421a907b5e9 100644 --- a/pkg/tsdb/testdata/scenarios.go +++ b/pkg/tsdb/testdata/scenarios.go @@ -95,27 +95,20 @@ func init() { Id: "random_walk", Name: "Random Walk", - Handler: func(query *tsdb.Query, tsdbQuery *tsdb.TsdbQuery) *tsdb.QueryResult { - timeWalkerMs := tsdbQuery.TimeRange.GetFromAsMsEpoch() - to := tsdbQuery.TimeRange.GetToAsMsEpoch() + Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult { + return getRandomWalk(query, context) + }, + }) - series := newSeriesForQuery(query) - - points := make(tsdb.TimeSeriesPoints, 0) - walker := rand.Float64() * 100 - - for i := int64(0); i < 10000 && timeWalkerMs < to; i++ { - points = append(points, tsdb.NewTimePoint(null.FloatFrom(walker), float64(timeWalkerMs))) - - walker += rand.Float64() - 0.5 - timeWalkerMs += query.IntervalMs - } - - series.Points = points - - queryRes := tsdb.NewQueryResult() - queryRes.Series = append(queryRes.Series, series) - return queryRes + registerScenario(&Scenario{ + Id: "slow_query", + Name: "Slow Query", + StringInput: "5s", + Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult { + stringInput := query.Model.Get("stringInput").MustString() + parsedInterval, _ := time.ParseDuration(stringInput) + time.Sleep(parsedInterval) + return getRandomWalk(query, context) }, }) @@ -221,6 +214,57 @@ func init() { return queryRes }, }) + + registerScenario(&Scenario{ + Id: "table_static", + Name: "Table Static", + + Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult { + timeWalkerMs := context.TimeRange.GetFromAsMsEpoch() + to := context.TimeRange.GetToAsMsEpoch() + + table := tsdb.Table{ + Columns: []tsdb.TableColumn{ + {Text: "Time"}, + {Text: "Message"}, + {Text: "Description"}, + {Text: "Value"}, + }, + Rows: []tsdb.RowValues{}, + } + for i := int64(0); i < 10 && timeWalkerMs < to; i++ { + table.Rows = append(table.Rows, tsdb.RowValues{float64(timeWalkerMs), "This is a message", "Description", 23.1}) + timeWalkerMs += query.IntervalMs + } + + queryRes := tsdb.NewQueryResult() + queryRes.Tables = append(queryRes.Tables, &table) + return queryRes + }, + }) +} + +func getRandomWalk(query *tsdb.Query, tsdbQuery *tsdb.TsdbQuery) *tsdb.QueryResult { + timeWalkerMs := tsdbQuery.TimeRange.GetFromAsMsEpoch() + to := tsdbQuery.TimeRange.GetToAsMsEpoch() + + series := newSeriesForQuery(query) + + points := make(tsdb.TimeSeriesPoints, 0) + walker := rand.Float64() * 100 + + for i := int64(0); i < 10000 && timeWalkerMs < to; i++ { + points = append(points, tsdb.NewTimePoint(null.FloatFrom(walker), float64(timeWalkerMs))) + + walker += rand.Float64() - 0.5 + timeWalkerMs += query.IntervalMs + } + + series.Points = points + + queryRes := tsdb.NewQueryResult() + queryRes.Series = append(queryRes.Series, series) + return queryRes } func registerScenario(scenario *Scenario) { diff --git a/public/app/core/components/LayoutSelector/LayoutSelector.tsx b/public/app/core/components/LayoutSelector/LayoutSelector.tsx new file mode 100644 index 00000000000..d9e00102438 --- /dev/null +++ b/public/app/core/components/LayoutSelector/LayoutSelector.tsx @@ -0,0 +1,39 @@ +import React, { SFC } from 'react'; + +export type LayoutMode = LayoutModes.Grid | LayoutModes.List; + +export enum LayoutModes { + Grid = 'grid', + List = 'list', +} + +interface Props { + mode: LayoutMode; + onLayoutModeChanged: (mode: LayoutMode) => {}; +} + +const LayoutSelector: SFC = props => { + const { mode, onLayoutModeChanged } = props; + return ( +
+ + +
+ ); +}; + +export default LayoutSelector; diff --git a/public/app/core/components/OrgActionBar/OrgActionBar.test.tsx b/public/app/core/components/OrgActionBar/OrgActionBar.test.tsx new file mode 100644 index 00000000000..9faf07f18d1 --- /dev/null +++ b/public/app/core/components/OrgActionBar/OrgActionBar.test.tsx @@ -0,0 +1,24 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import OrgActionBar, { Props } from './OrgActionBar'; + +const setup = (propOverrides?: object) => { + const props: Props = { + searchQuery: '', + setSearchQuery: jest.fn(), + target: '_blank', + linkButton: { href: 'some/url', title: 'test' }, + }; + + Object.assign(props, propOverrides); + + return shallow(); +}; + +describe('Render', () => { + it('should render component', () => { + const wrapper = setup(); + + expect(wrapper).toMatchSnapshot(); + }); +}); diff --git a/public/app/core/components/OrgActionBar/OrgActionBar.tsx b/public/app/core/components/OrgActionBar/OrgActionBar.tsx new file mode 100644 index 00000000000..8fc34a018e1 --- /dev/null +++ b/public/app/core/components/OrgActionBar/OrgActionBar.tsx @@ -0,0 +1,44 @@ +import React, { PureComponent } from 'react'; +import LayoutSelector, { LayoutMode } from '../LayoutSelector/LayoutSelector'; + +export interface Props { + searchQuery: string; + layoutMode?: LayoutMode; + onSetLayoutMode?: (mode: LayoutMode) => {}; + setSearchQuery: (value: string) => {}; + linkButton: { href: string; title: string }; + target?: string; +} + +export default class OrgActionBar extends PureComponent { + render() { + const { searchQuery, layoutMode, onSetLayoutMode, linkButton, setSearchQuery, target } = this.props; + const linkProps = { href: linkButton.href, target: undefined }; + + if (target) { + linkProps.target = target; + } + + return ( +
+
+ + onSetLayoutMode(mode)} /> +
+ + ); + } +} diff --git a/public/app/core/components/OrgActionBar/__snapshots__/OrgActionBar.test.tsx.snap b/public/app/core/components/OrgActionBar/__snapshots__/OrgActionBar.test.tsx.snap new file mode 100644 index 00000000000..dc53e7863ea --- /dev/null +++ b/public/app/core/components/OrgActionBar/__snapshots__/OrgActionBar.test.tsx.snap @@ -0,0 +1,39 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Render should render component 1`] = ` +
+
+ + +
+ +`; diff --git a/public/app/core/components/PermissionList/AddPermission.tsx b/public/app/core/components/PermissionList/AddPermission.tsx index 77ac6953b74..fc062ce63e4 100644 --- a/public/app/core/components/PermissionList/AddPermission.tsx +++ b/public/app/core/components/PermissionList/AddPermission.tsx @@ -1,7 +1,8 @@ import React, { Component } from 'react'; -import { UserPicker, User } from 'app/core/components/Picker/UserPicker'; +import { UserPicker } from 'app/core/components/Picker/UserPicker'; import { TeamPicker, Team } from 'app/core/components/Picker/TeamPicker'; import DescriptionPicker, { OptionWithDescription } from 'app/core/components/Picker/DescriptionPicker'; +import { User } from 'app/types'; import { dashboardPermissionLevels, dashboardAclTargets, diff --git a/public/app/core/components/Picker/UserPicker.tsx b/public/app/core/components/Picker/UserPicker.tsx index e50513c44e1..8f48ba8f66a 100644 --- a/public/app/core/components/Picker/UserPicker.tsx +++ b/public/app/core/components/Picker/UserPicker.tsx @@ -3,6 +3,7 @@ import Select from 'react-select'; import PickerOption from './PickerOption'; import { debounce } from 'lodash'; import { getBackendSrv } from 'app/core/services/backend_srv'; +import { User } from 'app/types'; export interface Props { onSelected: (user: User) => void; @@ -14,13 +15,6 @@ export interface State { isLoading: boolean; } -export interface User { - id: number; - label: string; - avatarUrl: string; - login: string; -} - export class UserPicker extends Component { debouncedSearch: any; diff --git a/public/app/core/components/grafana_app.ts b/public/app/core/components/grafana_app.ts index a0ea0279d30..2774ab99426 100644 --- a/public/app/core/components/grafana_app.ts +++ b/public/app/core/components/grafana_app.ts @@ -10,6 +10,7 @@ import colors from 'app/core/utils/colors'; import { BackendSrv, setBackendSrv } from 'app/core/services/backend_srv'; import { DatasourceSrv } from 'app/features/plugins/datasource_srv'; import { configureStore } from 'app/store/configureStore'; +import { AngularLoader, setAngularLoader } from 'app/core/services/AngularLoader'; export class GrafanaCtrl { /** @ngInject */ @@ -22,11 +23,13 @@ export class GrafanaCtrl { contextSrv, bridgeSrv, backendSrv: BackendSrv, - datasourceSrv: DatasourceSrv + datasourceSrv: DatasourceSrv, + angularLoader: AngularLoader ) { // sets singleston instances for angular services so react components can access them - configureStore(); + setAngularLoader(angularLoader); setBackendSrv(backendSrv); + configureStore(); $scope.init = () => { $scope.contextSrv = contextSrv; @@ -245,6 +248,9 @@ export function grafanaAppDirective(playlistSrv, contextSrv, $timeout, $rootScop return; } + // ensure dropdown menu doesn't impact on z-index + body.find('.dropdown-menu-open').removeClass('dropdown-menu-open'); + // for stuff that animates, slides out etc, clicking it needs to // hide it right away const clickAutoHide = target.closest('[data-click-hide]'); diff --git a/public/app/core/reducers/location.ts b/public/app/core/reducers/location.ts index 6a356c4ea5a..2089cfe9f59 100644 --- a/public/app/core/reducers/location.ts +++ b/public/app/core/reducers/location.ts @@ -1,6 +1,6 @@ import { Action } from 'app/core/actions/location'; -import { LocationState, UrlQueryMap } from 'app/types'; -import { toUrlParams } from 'app/core/utils/url'; +import { LocationState } from 'app/types'; +import { renderUrl } from 'app/core/utils/url'; export const initialState: LocationState = { url: '', @@ -9,13 +9,6 @@ export const initialState: LocationState = { routeParams: {}, }; -function renderUrl(path: string, query: UrlQueryMap | undefined): string { - if (query && Object.keys(query).length > 0) { - path += '?' + toUrlParams(query); - } - return path; -} - export const locationReducer = (state = initialState, action: Action): LocationState => { switch (action.type) { case 'UPDATE_LOCATION': { diff --git a/public/app/core/services/AngularLoader.ts b/public/app/core/services/AngularLoader.ts new file mode 100644 index 00000000000..e3a7dec4351 --- /dev/null +++ b/public/app/core/services/AngularLoader.ts @@ -0,0 +1,42 @@ +import angular from 'angular'; +import coreModule from 'app/core/core_module'; +import _ from 'lodash'; + +export interface AngularComponent { + destroy(); +} + +export class AngularLoader { + /** @ngInject */ + constructor(private $compile, private $rootScope) {} + + load(elem, scopeProps, template): AngularComponent { + const scope = this.$rootScope.$new(); + + _.assign(scope, scopeProps); + + const compiledElem = this.$compile(template)(scope); + const rootNode = angular.element(elem); + rootNode.append(compiledElem); + + return { + destroy: () => { + scope.$destroy(); + compiledElem.remove(); + }, + }; + } +} + +coreModule.service('angularLoader', AngularLoader); + +let angularLoaderInstance: AngularLoader; + +export function setAngularLoader(pl: AngularLoader) { + angularLoaderInstance = pl; +} + +// away to access it from react +export function getAngularLoader(): AngularLoader { + return angularLoaderInstance; +} diff --git a/public/app/core/services/keybindingSrv.ts b/public/app/core/services/keybindingSrv.ts index d05e9b0c21c..d8dfc958dd4 100644 --- a/public/app/core/services/keybindingSrv.ts +++ b/public/app/core/services/keybindingSrv.ts @@ -4,7 +4,7 @@ import _ from 'lodash'; import config from 'app/core/config'; import coreModule from 'app/core/core_module'; import appEvents from 'app/core/app_events'; -import { encodePathComponent } from 'app/core/utils/location_util'; +import { getExploreUrl } from 'app/core/utils/explore'; import Mousetrap from 'mousetrap'; import 'mousetrap-global-bind'; @@ -15,7 +15,14 @@ export class KeybindingSrv { timepickerOpen = false; /** @ngInject */ - constructor(private $rootScope, private $location, private datasourceSrv, private timeSrv, private contextSrv) { + constructor( + private $rootScope, + private $location, + private $timeout, + private datasourceSrv, + private timeSrv, + private contextSrv + ) { // clear out all shortcuts on route change $rootScope.$on('$routeChangeSuccess', () => { Mousetrap.reset(); @@ -194,14 +201,9 @@ export class KeybindingSrv { if (dashboard.meta.focusPanelId) { const panel = dashboard.getPanelById(dashboard.meta.focusPanelId); const datasource = await this.datasourceSrv.get(panel.datasource); - if (datasource && datasource.supportsExplore) { - const range = this.timeSrv.timeRangeForUrl(); - const state = { - ...datasource.getExploreState(panel), - range, - }; - const exploreState = encodePathComponent(JSON.stringify(state)); - this.$location.url(`/explore?state=${exploreState}`); + const url = await getExploreUrl(panel, panel.targets, datasource, this.datasourceSrv, this.timeSrv); + if (url) { + this.$timeout(() => this.$location.url(url)); } } }); diff --git a/public/app/core/utils/explore.test.ts b/public/app/core/utils/explore.test.ts new file mode 100644 index 00000000000..c47321225fe --- /dev/null +++ b/public/app/core/utils/explore.test.ts @@ -0,0 +1,96 @@ +import { DEFAULT_RANGE, serializeStateToUrlParam, parseUrlState } from './explore'; +import { ExploreState } from 'app/types/explore'; + +const DEFAULT_EXPLORE_STATE: ExploreState = { + datasource: null, + datasourceError: null, + datasourceLoading: null, + datasourceMissing: false, + datasourceName: '', + exploreDatasources: [], + graphResult: null, + history: [], + latency: 0, + loading: false, + logsResult: null, + queries: [], + queryErrors: [], + queryHints: [], + range: DEFAULT_RANGE, + requestOptions: null, + showingGraph: true, + showingLogs: true, + showingTable: true, + supportsGraph: null, + supportsLogs: null, + supportsTable: null, + tableResult: null, +}; + +describe('state functions', () => { + describe('parseUrlState', () => { + it('returns default state on empty string', () => { + expect(parseUrlState('')).toMatchObject({ + datasource: null, + queries: [], + range: DEFAULT_RANGE, + }); + }); + }); + describe('serializeStateToUrlParam', () => { + it('returns url parameter value for a state object', () => { + const state = { + ...DEFAULT_EXPLORE_STATE, + datasourceName: 'foo', + range: { + from: 'now - 5h', + to: 'now', + }, + queries: [ + { + query: 'metric{test="a/b"}', + }, + { + query: 'super{foo="x/z"}', + }, + ], + }; + expect(serializeStateToUrlParam(state)).toBe( + '{"datasource":"foo","queries":[{"query":"metric{test=\\"a/b\\"}"},' + + '{"query":"super{foo=\\"x/z\\"}"}],"range":{"from":"now - 5h","to":"now"}}' + ); + }); + }); + describe('interplay', () => { + it('can parse the serialized state into the original state', () => { + const state = { + ...DEFAULT_EXPLORE_STATE, + datasourceName: 'foo', + range: { + from: 'now - 5h', + to: 'now', + }, + queries: [ + { + query: 'metric{test="a/b"}', + }, + { + query: 'super{foo="x/z"}', + }, + ], + }; + const serialized = serializeStateToUrlParam(state); + const parsed = parseUrlState(serialized); + + // Account for datasource vs datasourceName + const { datasource, ...rest } = parsed; + const sameState = { + ...rest, + datasource: DEFAULT_EXPLORE_STATE.datasource, + datasourceName: datasource, + }; + + expect(state).toMatchObject(sameState); + }); + }); +}); diff --git a/public/app/core/utils/explore.ts b/public/app/core/utils/explore.ts new file mode 100644 index 00000000000..cca841a1725 --- /dev/null +++ b/public/app/core/utils/explore.ts @@ -0,0 +1,78 @@ +import { renderUrl } from 'app/core/utils/url'; +import { ExploreState, ExploreUrlState } from 'app/types/explore'; + +export const DEFAULT_RANGE = { + from: 'now-6h', + to: 'now', +}; + +/** + * Returns an Explore-URL that contains a panel's queries and the dashboard time range. + * + * @param panel Origin panel of the jump to Explore + * @param panelTargets The origin panel's query targets + * @param panelDatasource The origin panel's datasource + * @param datasourceSrv Datasource service to query other datasources in case the panel datasource is mixed + * @param timeSrv Time service to get the current dashboard range from + */ +export async function getExploreUrl( + panel: any, + panelTargets: any[], + panelDatasource: any, + datasourceSrv: any, + timeSrv: any +) { + let exploreDatasource = panelDatasource; + let exploreTargets = panelTargets; + let url; + + // Mixed datasources need to choose only one datasource + if (panelDatasource.meta.id === 'mixed' && panelTargets) { + // Find first explore datasource among targets + let mixedExploreDatasource; + for (const t of panel.targets) { + const datasource = await datasourceSrv.get(t.datasource); + if (datasource && datasource.meta.explore) { + mixedExploreDatasource = datasource; + break; + } + } + + // Add all its targets + if (mixedExploreDatasource) { + exploreDatasource = mixedExploreDatasource; + exploreTargets = panelTargets.filter(t => t.datasource === mixedExploreDatasource.name); + } + } + + if (exploreDatasource && exploreDatasource.meta.explore) { + const range = timeSrv.timeRangeForUrl(); + const state = { + ...exploreDatasource.getExploreState(exploreTargets), + range, + }; + const exploreState = JSON.stringify(state); + url = renderUrl('/explore', { state: exploreState }); + } + return url; +} + +export function parseUrlState(initial: string | undefined): ExploreUrlState { + if (initial) { + try { + return JSON.parse(decodeURI(initial)); + } catch (e) { + console.error(e); + } + } + return { datasource: null, queries: [], range: DEFAULT_RANGE }; +} + +export function serializeStateToUrlParam(state: ExploreState): string { + const urlState: ExploreUrlState = { + datasource: state.datasourceName, + queries: state.queries.map(q => ({ query: q.query })), + range: state.range, + }; + return JSON.stringify(urlState); +} diff --git a/public/app/core/utils/location_util.ts b/public/app/core/utils/location_util.ts index 735272285ff..76f2fc5881f 100644 --- a/public/app/core/utils/location_util.ts +++ b/public/app/core/utils/location_util.ts @@ -1,10 +1,5 @@ import config from 'app/core/config'; -// Slash encoding for angular location provider, see https://github.com/angular/angular.js/issues/10479 -const SLASH = ''; -export const decodePathComponent = (pc: string) => decodeURIComponent(pc).replace(new RegExp(SLASH, 'g'), '/'); -export const encodePathComponent = (pc: string) => encodeURIComponent(pc.replace(/\//g, SLASH)); - export const stripBaseFromUrl = url => { const appSubUrl = config.appSubUrl; const stripExtraChars = appSubUrl.endsWith('/') ? 1 : 0; diff --git a/public/app/core/utils/url.ts b/public/app/core/utils/url.ts index 198029b0e9f..ab8be8ad222 100644 --- a/public/app/core/utils/url.ts +++ b/public/app/core/utils/url.ts @@ -2,6 +2,15 @@ * @preserve jquery-param (c) 2015 KNOWLEDGECODE | MIT */ +import { UrlQueryMap } from 'app/types'; + +export function renderUrl(path: string, query: UrlQueryMap | undefined): string { + if (query && Object.keys(query).length > 0) { + path += '?' + toUrlParams(query); + } + return path; +} + export function toUrlParams(a) { const s = []; const rbracket = /\[\]$/; diff --git a/public/app/features/api-keys/ApiKeysAddedModal.test.tsx b/public/app/features/api-keys/ApiKeysAddedModal.test.tsx new file mode 100644 index 00000000000..160418a7ab8 --- /dev/null +++ b/public/app/features/api-keys/ApiKeysAddedModal.test.tsx @@ -0,0 +1,25 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import { ApiKeysAddedModal, Props } from './ApiKeysAddedModal'; + +const setup = (propOverrides?: object) => { + const props: Props = { + apiKey: 'api key test', + rootPath: 'test/path', + }; + + Object.assign(props, propOverrides); + + const wrapper = shallow(); + + return { + wrapper, + }; +}; + +describe('Render', () => { + it('should render component', () => { + const { wrapper } = setup(); + expect(wrapper).toMatchSnapshot(); + }); +}); diff --git a/public/app/features/api-keys/ApiKeysAddedModal.tsx b/public/app/features/api-keys/ApiKeysAddedModal.tsx new file mode 100644 index 00000000000..995aa46c773 --- /dev/null +++ b/public/app/features/api-keys/ApiKeysAddedModal.tsx @@ -0,0 +1,46 @@ +import React from 'react'; + +export interface Props { + apiKey: string; + rootPath: string; +} + +export const ApiKeysAddedModal = (props: Props) => { + return ( +
+
+

+ + API Key Created +

+ + + + +
+ +
+
+
+ Key + {props.apiKey} +
+
+ +
+ You will only be able to view this key here once! It is not stored in this form. So be sure to copy it now. +
+
+ You can authenticate request using the Authorization HTTP header, example: +
+
+
+            curl -H "Authorization: Bearer {props.apiKey}" {props.rootPath}/api/dashboards/home
+          
+
+
+
+ ); +}; + +export default ApiKeysAddedModal; diff --git a/public/app/features/api-keys/ApiKeysPage.test.tsx b/public/app/features/api-keys/ApiKeysPage.test.tsx new file mode 100644 index 00000000000..518180fc424 --- /dev/null +++ b/public/app/features/api-keys/ApiKeysPage.test.tsx @@ -0,0 +1,73 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import { Props, ApiKeysPage } from './ApiKeysPage'; +import { NavModel, ApiKey } from 'app/types'; +import { getMultipleMockKeys, getMockKey } from './__mocks__/apiKeysMock'; + +const setup = (propOverrides?: object) => { + const props: Props = { + navModel: {} as NavModel, + apiKeys: [] as ApiKey[], + searchQuery: '', + loadApiKeys: jest.fn(), + deleteApiKey: jest.fn(), + setSearchQuery: jest.fn(), + addApiKey: jest.fn(), + }; + + Object.assign(props, propOverrides); + + const wrapper = shallow(); + const instance = wrapper.instance() as ApiKeysPage; + + return { + wrapper, + instance, + }; +}; + +describe('Render', () => { + it('should render component', () => { + const { wrapper } = setup(); + expect(wrapper).toMatchSnapshot(); + }); + + it('should render API keys table', () => { + const { wrapper } = setup({ + apiKeys: getMultipleMockKeys(5), + }); + + expect(wrapper).toMatchSnapshot(); + }); +}); + +describe('Life cycle', () => { + it('should call loadApiKeys', () => { + const { instance } = setup(); + + instance.componentDidMount(); + + expect(instance.props.loadApiKeys).toHaveBeenCalled(); + }); +}); + +describe('Functions', () => { + describe('Delete team', () => { + it('should call delete team', () => { + const { instance } = setup(); + instance.onDeleteApiKey(getMockKey()); + expect(instance.props.deleteApiKey).toHaveBeenCalledWith(1); + }); + }); + + describe('on search query change', () => { + it('should call setSearchQuery', () => { + const { instance } = setup(); + const mockEvent = { target: { value: 'test' } }; + + instance.onSearchQueryChange(mockEvent); + + expect(instance.props.setSearchQuery).toHaveBeenCalledWith('test'); + }); + }); +}); diff --git a/public/app/features/api-keys/ApiKeysPage.tsx b/public/app/features/api-keys/ApiKeysPage.tsx new file mode 100644 index 00000000000..2f19250e835 --- /dev/null +++ b/public/app/features/api-keys/ApiKeysPage.tsx @@ -0,0 +1,222 @@ +import React, { PureComponent } from 'react'; +import ReactDOMServer from 'react-dom/server'; +import { connect } from 'react-redux'; +import { hot } from 'react-hot-loader'; +import { NavModel, ApiKey, NewApiKey, OrgRole } from 'app/types'; +import { getNavModel } from 'app/core/selectors/navModel'; +import { getApiKeys } from './state/selectors'; +import { loadApiKeys, deleteApiKey, setSearchQuery, addApiKey } from './state/actions'; +import PageHeader from 'app/core/components/PageHeader/PageHeader'; +import SlideDown from 'app/core/components/Animations/SlideDown'; +import ApiKeysAddedModal from './ApiKeysAddedModal'; +import config from 'app/core/config'; +import appEvents from 'app/core/app_events'; + +export interface Props { + navModel: NavModel; + apiKeys: ApiKey[]; + searchQuery: string; + loadApiKeys: typeof loadApiKeys; + deleteApiKey: typeof deleteApiKey; + setSearchQuery: typeof setSearchQuery; + addApiKey: typeof addApiKey; +} + +export interface State { + isAdding: boolean; + newApiKey: NewApiKey; +} + +enum ApiKeyStateProps { + Name = 'name', + Role = 'role', +} + +const initialApiKeyState = { + name: '', + role: OrgRole.Viewer, +}; + +export class ApiKeysPage extends PureComponent { + constructor(props) { + super(props); + this.state = { isAdding: false, newApiKey: initialApiKeyState }; + } + + componentDidMount() { + this.fetchApiKeys(); + } + + async fetchApiKeys() { + await this.props.loadApiKeys(); + } + + onDeleteApiKey(key: ApiKey) { + this.props.deleteApiKey(key.id); + } + + onSearchQueryChange = evt => { + this.props.setSearchQuery(evt.target.value); + }; + + onToggleAdding = () => { + this.setState({ isAdding: !this.state.isAdding }); + }; + + onAddApiKey = async evt => { + evt.preventDefault(); + + const openModal = (apiKey: string) => { + const rootPath = window.location.origin + config.appSubUrl; + const modalTemplate = ReactDOMServer.renderToString(); + + appEvents.emit('show-modal', { + templateHtml: modalTemplate, + }); + }; + + this.props.addApiKey(this.state.newApiKey, openModal); + this.setState((prevState: State) => { + return { + ...prevState, + newApiKey: initialApiKeyState, + }; + }); + }; + + onApiKeyStateUpdate = (evt, prop: string) => { + const value = evt.currentTarget.value; + this.setState((prevState: State) => { + const newApiKey = { + ...prevState.newApiKey, + }; + newApiKey[prop] = value; + + return { + ...prevState, + newApiKey: newApiKey, + }; + }); + }; + + render() { + const { newApiKey, isAdding } = this.state; + const { navModel, apiKeys, searchQuery } = this.props; + + return ( +
+ +
+
+
+ +
+ +
+ +
+ + +
+ +
Add API Key
+
+
+
+ Key name + this.onApiKeyStateUpdate(evt, ApiKeyStateProps.Name)} + /> +
+
+ Role + + + +
+
+ +
+
+
+
+
+ +

Existing Keys

+ + + + + + + + {apiKeys.length > 0 ? ( + + {apiKeys.map(key => { + return ( + + + + + + ); + })} + + ) : null} +
NameRole +
{key.name}{key.role} + this.onDeleteApiKey(key)} className="btn btn-danger btn-mini"> + + +
+
+
+ ); + } +} + +function mapStateToProps(state) { + return { + navModel: getNavModel(state.navIndex, 'apikeys'), + apiKeys: getApiKeys(state.apiKeys), + searchQuery: state.apiKeys.searchQuery, + }; +} + +const mapDispatchToProps = { + loadApiKeys, + deleteApiKey, + setSearchQuery, + addApiKey, +}; + +export default hot(module)(connect(mapStateToProps, mapDispatchToProps)(ApiKeysPage)); diff --git a/public/app/features/api-keys/__mocks__/apiKeysMock.ts b/public/app/features/api-keys/__mocks__/apiKeysMock.ts new file mode 100644 index 00000000000..117f0d6d0c6 --- /dev/null +++ b/public/app/features/api-keys/__mocks__/apiKeysMock.ts @@ -0,0 +1,22 @@ +import { ApiKey, OrgRole } from 'app/types'; + +export const getMultipleMockKeys = (numberOfKeys: number): ApiKey[] => { + const keys: ApiKey[] = []; + for (let i = 1; i <= numberOfKeys; i++) { + keys.push({ + id: i, + name: `test-${i}`, + role: OrgRole.Viewer, + }); + } + + return keys; +}; + +export const getMockKey = (): ApiKey => { + return { + id: 1, + name: 'test', + role: OrgRole.Admin, + }; +}; diff --git a/public/app/features/api-keys/__snapshots__/ApiKeysAddedModal.test.tsx.snap b/public/app/features/api-keys/__snapshots__/ApiKeysAddedModal.test.tsx.snap new file mode 100644 index 00000000000..0fcb13308eb --- /dev/null +++ b/public/app/features/api-keys/__snapshots__/ApiKeysAddedModal.test.tsx.snap @@ -0,0 +1,78 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Render should render component 1`] = ` +
+
+

+ + + API Key Created + +

+ + + +
+
+
+
+ + Key + + + api key test + +
+
+
+ You will only be able to view this key here once! It is not stored in this form. So be sure to copy it now. +
+
+ You can authenticate request using the Authorization HTTP header, example: +
+
+
+        curl -H "Authorization: Bearer 
+        api key test
+        " 
+        test/path
+        /api/dashboards/home
+      
+
+
+
+`; diff --git a/public/app/features/api-keys/__snapshots__/ApiKeysPage.test.tsx.snap b/public/app/features/api-keys/__snapshots__/ApiKeysPage.test.tsx.snap new file mode 100644 index 00000000000..77c7f620173 --- /dev/null +++ b/public/app/features/api-keys/__snapshots__/ApiKeysPage.test.tsx.snap @@ -0,0 +1,435 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Render should render API keys table 1`] = ` +
+ +
+
+
+ +
+
+ +
+ +
+ +
+ Add API Key +
+
+
+
+ + Key name + + +
+
+ + Role + + + + +
+
+ +
+
+
+
+
+

+ Existing Keys +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ Name + + Role + +
+ test-1 + + Viewer + + + + +
+ test-2 + + Viewer + + + + +
+ test-3 + + Viewer + + + + +
+ test-4 + + Viewer + + + + +
+ test-5 + + Viewer + + + + +
+
+
+`; + +exports[`Render should render component 1`] = ` +
+ +
+
+
+ +
+
+ +
+ +
+ +
+ Add API Key +
+
+
+
+ + Key name + + +
+
+ + Role + + + + +
+
+ +
+
+
+
+
+

+ Existing Keys +

+ + + + + + + +
+ Name + + Role + +
+
+
+`; diff --git a/public/app/features/api-keys/state/actions.ts b/public/app/features/api-keys/state/actions.ts new file mode 100644 index 00000000000..63e91088476 --- /dev/null +++ b/public/app/features/api-keys/state/actions.ts @@ -0,0 +1,56 @@ +import { ThunkAction } from 'redux-thunk'; +import { getBackendSrv } from 'app/core/services/backend_srv'; +import { StoreState, ApiKey } from 'app/types'; + +export enum ActionTypes { + LoadApiKeys = 'LOAD_API_KEYS', + SetApiKeysSearchQuery = 'SET_API_KEYS_SEARCH_QUERY', +} + +export interface LoadApiKeysAction { + type: ActionTypes.LoadApiKeys; + payload: ApiKey[]; +} + +export interface SetSearchQueryAction { + type: ActionTypes.SetApiKeysSearchQuery; + payload: string; +} + +export type Action = LoadApiKeysAction | SetSearchQueryAction; + +type ThunkResult = ThunkAction; + +const apiKeysLoaded = (apiKeys: ApiKey[]): LoadApiKeysAction => ({ + type: ActionTypes.LoadApiKeys, + payload: apiKeys, +}); + +export function addApiKey(apiKey: ApiKey, openModal: (key: string) => void): ThunkResult { + return async dispatch => { + const result = await getBackendSrv().post('/api/auth/keys', apiKey); + dispatch(setSearchQuery('')); + dispatch(loadApiKeys()); + openModal(result.key); + }; +} + +export function loadApiKeys(): ThunkResult { + return async dispatch => { + const response = await getBackendSrv().get('/api/auth/keys'); + dispatch(apiKeysLoaded(response)); + }; +} + +export function deleteApiKey(id: number): ThunkResult { + return async dispatch => { + getBackendSrv() + .delete('/api/auth/keys/' + id) + .then(dispatch(loadApiKeys())); + }; +} + +export const setSearchQuery = (searchQuery: string): SetSearchQueryAction => ({ + type: ActionTypes.SetApiKeysSearchQuery, + payload: searchQuery, +}); diff --git a/public/app/features/api-keys/state/reducers.test.ts b/public/app/features/api-keys/state/reducers.test.ts new file mode 100644 index 00000000000..3b2c831a5a3 --- /dev/null +++ b/public/app/features/api-keys/state/reducers.test.ts @@ -0,0 +1,31 @@ +import { Action, ActionTypes } from './actions'; +import { initialApiKeysState, apiKeysReducer } from './reducers'; +import { getMultipleMockKeys } from '../__mocks__/apiKeysMock'; + +describe('API Keys reducer', () => { + it('should set keys', () => { + const payload = getMultipleMockKeys(4); + + const action: Action = { + type: ActionTypes.LoadApiKeys, + payload, + }; + + const result = apiKeysReducer(initialApiKeysState, action); + + expect(result.keys).toEqual(payload); + }); + + it('should set search query', () => { + const payload = 'test query'; + + const action: Action = { + type: ActionTypes.SetApiKeysSearchQuery, + payload, + }; + + const result = apiKeysReducer(initialApiKeysState, action); + + expect(result.searchQuery).toEqual('test query'); + }); +}); diff --git a/public/app/features/api-keys/state/reducers.ts b/public/app/features/api-keys/state/reducers.ts new file mode 100644 index 00000000000..a21aa55dbf7 --- /dev/null +++ b/public/app/features/api-keys/state/reducers.ts @@ -0,0 +1,21 @@ +import { ApiKeysState } from 'app/types'; +import { Action, ActionTypes } from './actions'; + +export const initialApiKeysState: ApiKeysState = { + keys: [], + searchQuery: '', +}; + +export const apiKeysReducer = (state = initialApiKeysState, action: Action): ApiKeysState => { + switch (action.type) { + case ActionTypes.LoadApiKeys: + return { ...state, keys: action.payload }; + case ActionTypes.SetApiKeysSearchQuery: + return { ...state, searchQuery: action.payload }; + } + return state; +}; + +export default { + apiKeys: apiKeysReducer, +}; diff --git a/public/app/features/api-keys/state/selectors.test.ts b/public/app/features/api-keys/state/selectors.test.ts new file mode 100644 index 00000000000..7d8f3122ce6 --- /dev/null +++ b/public/app/features/api-keys/state/selectors.test.ts @@ -0,0 +1,25 @@ +import { getApiKeys } from './selectors'; +import { getMultipleMockKeys } from '../__mocks__/apiKeysMock'; +import { ApiKeysState } from 'app/types'; + +describe('API Keys selectors', () => { + describe('Get API Keys', () => { + const mockKeys = getMultipleMockKeys(5); + + it('should return all keys if no search query', () => { + const mockState: ApiKeysState = { keys: mockKeys, searchQuery: '' }; + + const keys = getApiKeys(mockState); + + expect(keys).toEqual(mockKeys); + }); + + it('should filter keys if search query exists', () => { + const mockState: ApiKeysState = { keys: mockKeys, searchQuery: '5' }; + + const keys = getApiKeys(mockState); + + expect(keys.length).toEqual(1); + }); + }); +}); diff --git a/public/app/features/api-keys/state/selectors.ts b/public/app/features/api-keys/state/selectors.ts new file mode 100644 index 00000000000..8065c252e85 --- /dev/null +++ b/public/app/features/api-keys/state/selectors.ts @@ -0,0 +1,9 @@ +import { ApiKeysState } from 'app/types'; + +export const getApiKeys = (state: ApiKeysState) => { + const regex = RegExp(state.searchQuery, 'i'); + + return state.keys.filter(key => { + return regex.test(key.name) || regex.test(key.role); + }); +}; diff --git a/public/app/features/dashboard/specs/time_srv.test.ts b/public/app/features/dashboard/specs/time_srv.test.ts index 514e0b90792..db0d11f2ebe 100644 --- a/public/app/features/dashboard/specs/time_srv.test.ts +++ b/public/app/features/dashboard/specs/time_srv.test.ts @@ -29,6 +29,7 @@ describe('timeSrv', () => { beforeEach(() => { timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() }); timeSrv.init(_dashboard); + _dashboard.refresh = false; }); describe('timeRange', () => { @@ -79,6 +80,23 @@ describe('timeSrv', () => { expect(time.to.valueOf()).toEqual(new Date('2014-05-20T03:10:22Z').getTime()); }); + it('should ignore refresh if time absolute', () => { + location = { + search: jest.fn(() => ({ + from: '20140410T052010', + to: '20140520T031022', + })), + }; + + timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() }); + + // dashboard saved with refresh on + _dashboard.refresh = true; + timeSrv.init(_dashboard); + + expect(timeSrv.refresh).toBe(false); + }); + it('should handle formatted dates without time', () => { location = { search: jest.fn(() => ({ diff --git a/public/app/features/dashboard/state/actions.ts b/public/app/features/dashboard/state/actions.ts index 82333817b2b..9e923f6bcb7 100644 --- a/public/app/features/dashboard/state/actions.ts +++ b/public/app/features/dashboard/state/actions.ts @@ -58,7 +58,7 @@ export function updateDashboardPermission( continue; } - const updated = toUpdateItem(itemToUpdate); + const updated = toUpdateItem(item); // if this is the item we want to update, update it's permisssion if (itemToUpdate === item) { diff --git a/public/app/features/dashboard/submenu/submenu.html b/public/app/features/dashboard/submenu/submenu.html index f240a86efba..d7cee33e6c3 100644 --- a/public/app/features/dashboard/submenu/submenu.html +++ b/public/app/features/dashboard/submenu/submenu.html @@ -4,7 +4,8 @@ - + +
diff --git a/public/app/features/dashboard/time_srv.ts b/public/app/features/dashboard/time_srv.ts index dd5a0ba758f..a96bc89daa7 100644 --- a/public/app/features/dashboard/time_srv.ts +++ b/public/app/features/dashboard/time_srv.ts @@ -85,6 +85,12 @@ export class TimeSrv { if (params.to) { this.time.to = this.parseUrlParam(params.to) || this.time.to; } + // if absolute ignore refresh option saved to dashboard + if (params.to && params.to.indexOf('now') === -1) { + this.refresh = false; + this.dashboard.refresh = false; + } + // but if refresh explicitly set then use that if (params.refresh) { this.refresh = params.refresh || this.refresh; } @@ -107,7 +113,7 @@ export class TimeSrv { } private timeHasChangedSinceLoad() { - return this.timeAtLoad.from !== this.time.from || this.timeAtLoad.to !== this.time.to; + return this.timeAtLoad && (this.timeAtLoad.from !== this.time.from || this.timeAtLoad.to !== this.time.to); } setAutoRefresh(interval) { diff --git a/public/app/features/dashboard/upload.ts b/public/app/features/dashboard/upload.ts index 974a0c35cd2..42871327eb6 100644 --- a/public/app/features/dashboard/upload.ts +++ b/public/app/features/dashboard/upload.ts @@ -1,10 +1,12 @@ import coreModule from 'app/core/core_module'; +import appEvents from 'app/core/app_events'; +import angular from 'angular'; const template = ` - + `; @@ -15,8 +17,11 @@ function uploadDashboardDirective(timer, alertSrv, $location) { template: template, scope: { onUpload: '&', + btnText: '@?', }, - link: scope => { + link: (scope, elem) => { + scope.btnText = angular.isDefined(scope.btnText) ? scope.btnText : 'Upload .json File'; + function file_selected(evt) { const files = evt.target.files; // FileList object const readerOnload = () => { @@ -26,7 +31,7 @@ function uploadDashboardDirective(timer, alertSrv, $location) { dash = JSON.parse(e.target.result); } catch (err) { console.log(err); - scope.appEvent('alert-error', ['Import failed', 'JSON -> JS Serialization failed: ' + err.message]); + appEvents.emit('alert-error', ['Import failed', 'JSON -> JS Serialization failed: ' + err.message]); return; } @@ -52,7 +57,7 @@ function uploadDashboardDirective(timer, alertSrv, $location) { // Check for the various File API support. if (wnd.File && wnd.FileReader && wnd.FileList && wnd.Blob) { // Something - document.getElementById('dashupload').addEventListener('change', file_selected, false); + elem[0].addEventListener('change', file_selected, false); } else { alertSrv.set('Oops', 'Sorry, the HTML5 File APIs are not fully supported in this browser.', 'error'); } diff --git a/public/app/features/datasources/DataSourceList.test.tsx b/public/app/features/datasources/DataSourceList.test.tsx new file mode 100644 index 00000000000..6e097da2c53 --- /dev/null +++ b/public/app/features/datasources/DataSourceList.test.tsx @@ -0,0 +1,22 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import DataSourcesList from './DataSourcesList'; +import { getMockDataSources } from './__mocks__/dataSourcesMocks'; +import { LayoutModes } from '../../core/components/LayoutSelector/LayoutSelector'; + +const setup = () => { + const props = { + dataSources: getMockDataSources(3), + layoutMode: LayoutModes.Grid, + }; + + return shallow(); +}; + +describe('Render', () => { + it('should render component', () => { + const wrapper = setup(); + + expect(wrapper).toMatchSnapshot(); + }); +}); diff --git a/public/app/features/datasources/DataSourcesList.tsx b/public/app/features/datasources/DataSourcesList.tsx new file mode 100644 index 00000000000..904ed0cf679 --- /dev/null +++ b/public/app/features/datasources/DataSourcesList.tsx @@ -0,0 +1,34 @@ +import React, { PureComponent } from 'react'; +import classNames from 'classnames/bind'; +import DataSourcesListItem from './DataSourcesListItem'; +import { DataSource } from 'app/types'; +import { LayoutMode, LayoutModes } from '../../core/components/LayoutSelector/LayoutSelector'; + +export interface Props { + dataSources: DataSource[]; + layoutMode: LayoutMode; +} + +export class DataSourcesList extends PureComponent { + render() { + const { dataSources, layoutMode } = this.props; + + const listStyle = classNames({ + 'card-section': true, + 'card-list-layout-grid': layoutMode === LayoutModes.Grid, + 'card-list-layout-list': layoutMode === LayoutModes.List, + }); + + return ( +
+
    + {dataSources.map((dataSource, index) => { + return ; + })} +
+
+ ); + } +} + +export default DataSourcesList; diff --git a/public/app/features/datasources/DataSourcesListItem.test.tsx b/public/app/features/datasources/DataSourcesListItem.test.tsx new file mode 100644 index 00000000000..138c71cb46a --- /dev/null +++ b/public/app/features/datasources/DataSourcesListItem.test.tsx @@ -0,0 +1,20 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import DataSourcesListItem from './DataSourcesListItem'; +import { getMockDataSource } from './__mocks__/dataSourcesMocks'; + +const setup = () => { + const props = { + dataSource: getMockDataSource(), + }; + + return shallow(); +}; + +describe('Render', () => { + it('should render component', () => { + const wrapper = setup(); + + expect(wrapper).toMatchSnapshot(); + }); +}); diff --git a/public/app/features/datasources/DataSourcesListItem.tsx b/public/app/features/datasources/DataSourcesListItem.tsx new file mode 100644 index 00000000000..a4fedb893fb --- /dev/null +++ b/public/app/features/datasources/DataSourcesListItem.tsx @@ -0,0 +1,35 @@ +import React, { PureComponent } from 'react'; +import { DataSource } from 'app/types'; + +export interface Props { + dataSource: DataSource; +} + +export class DataSourcesListItem extends PureComponent { + render() { + const { dataSource } = this.props; + return ( +
  • + +
    +
    {dataSource.type}
    +
    +
    +
    + +
    +
    +
    + {dataSource.name} + {dataSource.isDefault && default} +
    +
    {dataSource.url}
    +
    +
    +
    +
  • + ); + } +} + +export default DataSourcesListItem; diff --git a/public/app/features/datasources/DataSourcesListPage.test.tsx b/public/app/features/datasources/DataSourcesListPage.test.tsx new file mode 100644 index 00000000000..96f6c304b16 --- /dev/null +++ b/public/app/features/datasources/DataSourcesListPage.test.tsx @@ -0,0 +1,40 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import { DataSourcesListPage, Props } from './DataSourcesListPage'; +import { DataSource, NavModel } from 'app/types'; +import { LayoutModes } from '../../core/components/LayoutSelector/LayoutSelector'; +import { getMockDataSources } from './__mocks__/dataSourcesMocks'; + +const setup = (propOverrides?: object) => { + const props: Props = { + dataSources: [] as DataSource[], + layoutMode: LayoutModes.Grid, + loadDataSources: jest.fn(), + navModel: {} as NavModel, + dataSourcesCount: 0, + searchQuery: '', + setDataSourcesSearchQuery: jest.fn(), + setDataSourcesLayoutMode: jest.fn(), + }; + + Object.assign(props, propOverrides); + + return shallow(); +}; + +describe('Render', () => { + it('should render component', () => { + const wrapper = setup(); + + expect(wrapper).toMatchSnapshot(); + }); + + it('should render action bar and datasources', () => { + const wrapper = setup({ + dataSources: getMockDataSources(5), + dataSourcesCount: 5, + }); + + expect(wrapper).toMatchSnapshot(); + }); +}); diff --git a/public/app/features/datasources/DataSourcesListPage.tsx b/public/app/features/datasources/DataSourcesListPage.tsx new file mode 100644 index 00000000000..a5887973a6b --- /dev/null +++ b/public/app/features/datasources/DataSourcesListPage.tsx @@ -0,0 +1,107 @@ +import React, { PureComponent } from 'react'; +import { connect } from 'react-redux'; +import { hot } from 'react-hot-loader'; +import PageHeader from '../../core/components/PageHeader/PageHeader'; +import OrgActionBar from '../../core/components/OrgActionBar/OrgActionBar'; +import EmptyListCTA from '../../core/components/EmptyListCTA/EmptyListCTA'; +import DataSourcesList from './DataSourcesList'; +import { DataSource, NavModel } from 'app/types'; +import { LayoutMode } from '../../core/components/LayoutSelector/LayoutSelector'; +import { loadDataSources, setDataSourcesLayoutMode, setDataSourcesSearchQuery } from './state/actions'; +import { getNavModel } from '../../core/selectors/navModel'; +import { + getDataSources, + getDataSourcesCount, + getDataSourcesLayoutMode, + getDataSourcesSearchQuery, +} from './state/selectors'; + +export interface Props { + navModel: NavModel; + dataSources: DataSource[]; + dataSourcesCount: number; + layoutMode: LayoutMode; + searchQuery: string; + loadDataSources: typeof loadDataSources; + setDataSourcesLayoutMode: typeof setDataSourcesLayoutMode; + setDataSourcesSearchQuery: typeof setDataSourcesSearchQuery; +} + +const emptyListModel = { + title: 'There are no data sources defined yet', + buttonIcon: 'gicon gicon-add-datasources', + buttonLink: 'datasources/new', + buttonTitle: 'Add data source', + proTip: 'You can also define data sources through configuration files.', + proTipLink: 'http://docs.grafana.org/administration/provisioning/#datasources?utm_source=grafana_ds_list', + proTipLinkTitle: 'Learn more', + proTipTarget: '_blank', +}; + +export class DataSourcesListPage extends PureComponent { + componentDidMount() { + this.fetchDataSources(); + } + + async fetchDataSources() { + return await this.props.loadDataSources(); + } + + render() { + const { + dataSources, + dataSourcesCount, + navModel, + layoutMode, + searchQuery, + setDataSourcesSearchQuery, + setDataSourcesLayoutMode, + } = this.props; + + const linkButton = { + href: 'datasources/new', + title: 'Add data source', + }; + + return ( +
    + +
    + {dataSourcesCount === 0 ? ( + + ) : ( + [ + setDataSourcesLayoutMode(mode)} + setSearchQuery={query => setDataSourcesSearchQuery(query)} + linkButton={linkButton} + key="action-bar" + />, + , + ] + )} +
    +
    + ); + } +} + +function mapStateToProps(state) { + return { + navModel: getNavModel(state.navIndex, 'datasources'), + dataSources: getDataSources(state.dataSources), + layoutMode: getDataSourcesLayoutMode(state.dataSources), + dataSourcesCount: getDataSourcesCount(state.dataSources), + searchQuery: getDataSourcesSearchQuery(state.dataSources), + }; +} + +const mapDispatchToProps = { + loadDataSources, + setDataSourcesSearchQuery, + setDataSourcesLayoutMode, +}; + +export default hot(module)(connect(mapStateToProps, mapDispatchToProps)(DataSourcesListPage)); diff --git a/public/app/features/datasources/NewDataSourcePage.tsx b/public/app/features/datasources/NewDataSourcePage.tsx new file mode 100644 index 00000000000..527ecf6db83 --- /dev/null +++ b/public/app/features/datasources/NewDataSourcePage.tsx @@ -0,0 +1,88 @@ +import React, { PureComponent } from 'react'; +import { connect } from 'react-redux'; +import { hot } from 'react-hot-loader'; +import PageHeader from 'app/core/components/PageHeader/PageHeader'; +import { NavModel, Plugin } from 'app/types'; +import { addDataSource, loadDataSourceTypes, setDataSourceTypeSearchQuery } from './state/actions'; +import { updateLocation } from '../../core/actions'; +import { getNavModel } from 'app/core/selectors/navModel'; +import { getDataSourceTypes } from './state/selectors'; + +export interface Props { + navModel: NavModel; + dataSourceTypes: Plugin[]; + addDataSource: typeof addDataSource; + loadDataSourceTypes: typeof loadDataSourceTypes; + updateLocation: typeof updateLocation; + dataSourceTypeSearchQuery: string; + setDataSourceTypeSearchQuery: typeof setDataSourceTypeSearchQuery; +} + +class NewDataSourcePage extends PureComponent { + componentDidMount() { + this.props.loadDataSourceTypes(); + } + + onDataSourceTypeClicked = type => { + this.props.addDataSource(type); + }; + + onSearchQueryChange = event => { + this.props.setDataSourceTypeSearchQuery(event.target.value); + }; + + render() { + const { navModel, dataSourceTypes, dataSourceTypeSearchQuery } = this.props; + + return ( +
    + +
    +

    Choose data source type

    +
    + +
    +
    + {dataSourceTypes.map((type, index) => { + return ( +
    this.onDataSourceTypeClicked(type)} + className="add-data-source-grid-item" + key={`${type.id}-${index}`} + > + + {type.name} +
    + ); + })} +
    +
    +
    + ); + } +} + +function mapStateToProps(state) { + return { + navModel: getNavModel(state.navIndex, 'datasources'), + dataSourceTypes: getDataSourceTypes(state.dataSources), + }; +} + +const mapDispatchToProps = { + addDataSource, + loadDataSourceTypes, + updateLocation, + setDataSourceTypeSearchQuery, +}; + +export default hot(module)(connect(mapStateToProps, mapDispatchToProps)(NewDataSourcePage)); diff --git a/public/app/features/datasources/__mocks__/dataSourcesMocks.ts b/public/app/features/datasources/__mocks__/dataSourcesMocks.ts new file mode 100644 index 00000000000..97819a18c82 --- /dev/null +++ b/public/app/features/datasources/__mocks__/dataSourcesMocks.ts @@ -0,0 +1,45 @@ +import { DataSource } from 'app/types'; + +export const getMockDataSources = (amount: number): DataSource[] => { + const dataSources = []; + + for (let i = 0; i <= amount; i++) { + dataSources.push({ + access: '', + basicAuth: false, + database: `database-${i}`, + id: i, + isDefault: false, + jsonData: { authType: 'credentials', defaultRegion: 'eu-west-2' }, + name: `dataSource-${i}`, + orgId: 1, + password: '', + readOnly: false, + type: 'cloudwatch', + typeLogoUrl: 'public/app/plugins/datasource/cloudwatch/img/amazon-web-services.png', + url: '', + user: '', + }); + } + + return dataSources; +}; + +export const getMockDataSource = (): DataSource => { + return { + access: '', + basicAuth: false, + database: '', + id: 13, + isDefault: false, + jsonData: { authType: 'credentials', defaultRegion: 'eu-west-2' }, + name: 'gdev-cloudwatch', + orgId: 1, + password: '', + readOnly: false, + type: 'cloudwatch', + typeLogoUrl: 'public/app/plugins/datasource/cloudwatch/img/amazon-web-services.png', + url: '', + user: '', + }; +}; diff --git a/public/app/features/datasources/__snapshots__/DataSourceList.test.tsx.snap b/public/app/features/datasources/__snapshots__/DataSourceList.test.tsx.snap new file mode 100644 index 00000000000..7167f59b048 --- /dev/null +++ b/public/app/features/datasources/__snapshots__/DataSourceList.test.tsx.snap @@ -0,0 +1,108 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Render should render component 1`] = ` +
    +
      + + + + +
    +
    +`; diff --git a/public/app/features/datasources/__snapshots__/DataSourcesListItem.test.tsx.snap b/public/app/features/datasources/__snapshots__/DataSourcesListItem.test.tsx.snap new file mode 100644 index 00000000000..a424276cf32 --- /dev/null +++ b/public/app/features/datasources/__snapshots__/DataSourcesListItem.test.tsx.snap @@ -0,0 +1,45 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Render should render component 1`] = ` +
  • + +
    +
    + cloudwatch +
    +
    +
  • +`; diff --git a/public/app/features/datasources/__snapshots__/DataSourcesListPage.test.tsx.snap b/public/app/features/datasources/__snapshots__/DataSourcesListPage.test.tsx.snap new file mode 100644 index 00000000000..3f9dbab72ab --- /dev/null +++ b/public/app/features/datasources/__snapshots__/DataSourcesListPage.test.tsx.snap @@ -0,0 +1,174 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Render should render action bar and datasources 1`] = ` +
    + +
    + + +
    +
    +`; + +exports[`Render should render component 1`] = ` +
    + +
    + +
    +
    +`; diff --git a/public/app/features/datasources/state/actions.test.ts b/public/app/features/datasources/state/actions.test.ts new file mode 100644 index 00000000000..d0a8caad9a7 --- /dev/null +++ b/public/app/features/datasources/state/actions.test.ts @@ -0,0 +1,44 @@ +import { findNewName, nameExits } from './actions'; +import { getMockPlugin, getMockPlugins } from '../../plugins/__mocks__/pluginMocks'; + +describe('Name exists', () => { + const plugins = getMockPlugins(5); + + it('should be true', () => { + const name = 'pretty cool plugin-1'; + + expect(nameExits(plugins, name)).toEqual(true); + }); + + it('should be false', () => { + const name = 'pretty cool plugin-6'; + + expect(nameExits(plugins, name)); + }); +}); + +describe('Find new name', () => { + it('should create a new name', () => { + const plugins = getMockPlugins(5); + const name = 'pretty cool plugin-1'; + + expect(findNewName(plugins, name)).toEqual('pretty cool plugin-6'); + }); + + it('should create new name without suffix', () => { + const plugin = getMockPlugin(); + plugin.name = 'prometheus'; + const plugins = [plugin]; + const name = 'prometheus'; + + expect(findNewName(plugins, name)).toEqual('prometheus-1'); + }); + + it('should handle names that end with -', () => { + const plugin = getMockPlugin(); + const plugins = [plugin]; + const name = 'pretty cool plugin-'; + + expect(findNewName(plugins, name)).toEqual('pretty cool plugin-'); + }); +}); diff --git a/public/app/features/datasources/state/actions.ts b/public/app/features/datasources/state/actions.ts new file mode 100644 index 00000000000..33d6b79c5df --- /dev/null +++ b/public/app/features/datasources/state/actions.ts @@ -0,0 +1,154 @@ +import { ThunkAction } from 'redux-thunk'; +import { DataSource, Plugin, StoreState } from 'app/types'; +import { getBackendSrv } from '../../../core/services/backend_srv'; +import { LayoutMode } from '../../../core/components/LayoutSelector/LayoutSelector'; +import { updateLocation } from '../../../core/actions'; +import { UpdateLocationAction } from '../../../core/actions/location'; + +export enum ActionTypes { + LoadDataSources = 'LOAD_DATA_SOURCES', + LoadDataSourceTypes = 'LOAD_DATA_SOURCE_TYPES', + SetDataSourcesSearchQuery = 'SET_DATA_SOURCES_SEARCH_QUERY', + SetDataSourcesLayoutMode = 'SET_DATA_SOURCES_LAYOUT_MODE', + SetDataSourceTypeSearchQuery = 'SET_DATA_SOURCE_TYPE_SEARCH_QUERY', +} + +export interface LoadDataSourcesAction { + type: ActionTypes.LoadDataSources; + payload: DataSource[]; +} + +export interface SetDataSourcesSearchQueryAction { + type: ActionTypes.SetDataSourcesSearchQuery; + payload: string; +} + +export interface SetDataSourcesLayoutModeAction { + type: ActionTypes.SetDataSourcesLayoutMode; + payload: LayoutMode; +} + +export interface LoadDataSourceTypesAction { + type: ActionTypes.LoadDataSourceTypes; + payload: Plugin[]; +} + +export interface SetDataSourceTypeSearchQueryAction { + type: ActionTypes.SetDataSourceTypeSearchQuery; + payload: string; +} + +const dataSourcesLoaded = (dataSources: DataSource[]): LoadDataSourcesAction => ({ + type: ActionTypes.LoadDataSources, + payload: dataSources, +}); + +const dataSourceTypesLoaded = (dataSourceTypes: Plugin[]): LoadDataSourceTypesAction => ({ + type: ActionTypes.LoadDataSourceTypes, + payload: dataSourceTypes, +}); + +export const setDataSourcesSearchQuery = (searchQuery: string): SetDataSourcesSearchQueryAction => ({ + type: ActionTypes.SetDataSourcesSearchQuery, + payload: searchQuery, +}); + +export const setDataSourcesLayoutMode = (layoutMode: LayoutMode): SetDataSourcesLayoutModeAction => ({ + type: ActionTypes.SetDataSourcesLayoutMode, + payload: layoutMode, +}); + +export const setDataSourceTypeSearchQuery = (query: string): SetDataSourceTypeSearchQueryAction => ({ + type: ActionTypes.SetDataSourceTypeSearchQuery, + payload: query, +}); + +export type Action = + | LoadDataSourcesAction + | SetDataSourcesSearchQueryAction + | SetDataSourcesLayoutModeAction + | UpdateLocationAction + | LoadDataSourceTypesAction + | SetDataSourceTypeSearchQueryAction; + +type ThunkResult = ThunkAction; + +export function loadDataSources(): ThunkResult { + return async dispatch => { + const response = await getBackendSrv().get('/api/datasources'); + dispatch(dataSourcesLoaded(response)); + }; +} + +export function addDataSource(plugin: Plugin): ThunkResult { + return async (dispatch, getStore) => { + await dispatch(loadDataSources()); + + const dataSources = getStore().dataSources.dataSources; + + const newInstance = { + name: plugin.name, + type: plugin.id, + access: 'proxy', + isDefault: dataSources.length === 0, + }; + + if (nameExits(dataSources, newInstance.name)) { + newInstance.name = findNewName(dataSources, newInstance.name); + } + + const result = await getBackendSrv().post('/api/datasources', newInstance); + dispatch(updateLocation({ path: `/datasources/edit/${result.id}` })); + }; +} + +export function loadDataSourceTypes(): ThunkResult { + return async dispatch => { + const result = await getBackendSrv().get('/api/plugins', { enabled: 1, type: 'datasource' }); + dispatch(dataSourceTypesLoaded(result)); + }; +} + +export function nameExits(dataSources, name) { + return ( + dataSources.filter(dataSource => { + return dataSource.name === name; + }).length > 0 + ); +} + +export function findNewName(dataSources, name) { + // Need to loop through current data sources to make sure + // the name doesn't exist + while (nameExits(dataSources, name)) { + // If there's a duplicate name that doesn't end with '-x' + // we can add -1 to the name and be done. + if (!nameHasSuffix(name)) { + name = `${name}-1`; + } else { + // if there's a duplicate name that ends with '-x' + // we can try to increment the last digit until the name is unique + + // remove the 'x' part and replace it with the new number + name = `${getNewName(name)}${incrementLastDigit(getLastDigit(name))}`; + } + } + + return name; +} + +function nameHasSuffix(name) { + return name.endsWith('-', name.length - 1); +} + +function getLastDigit(name) { + return parseInt(name.slice(-1), 10); +} + +function incrementLastDigit(digit) { + return isNaN(digit) ? 1 : digit + 1; +} + +function getNewName(name) { + return name.slice(0, name.length - 1); +} diff --git a/public/app/features/datasources/state/reducers.ts b/public/app/features/datasources/state/reducers.ts new file mode 100644 index 00000000000..acb228d3ed6 --- /dev/null +++ b/public/app/features/datasources/state/reducers.ts @@ -0,0 +1,37 @@ +import { DataSource, DataSourcesState, Plugin } from 'app/types'; +import { Action, ActionTypes } from './actions'; +import { LayoutModes } from '../../../core/components/LayoutSelector/LayoutSelector'; + +const initialState: DataSourcesState = { + dataSources: [] as DataSource[], + layoutMode: LayoutModes.Grid, + searchQuery: '', + dataSourcesCount: 0, + dataSourceTypes: [] as Plugin[], + dataSourceTypeSearchQuery: '', +}; + +export const dataSourcesReducer = (state = initialState, action: Action): DataSourcesState => { + switch (action.type) { + case ActionTypes.LoadDataSources: + return { ...state, dataSources: action.payload, dataSourcesCount: action.payload.length }; + + case ActionTypes.SetDataSourcesSearchQuery: + return { ...state, searchQuery: action.payload }; + + case ActionTypes.SetDataSourcesLayoutMode: + return { ...state, layoutMode: action.payload }; + + case ActionTypes.LoadDataSourceTypes: + return { ...state, dataSourceTypes: action.payload }; + + case ActionTypes.SetDataSourceTypeSearchQuery: + return { ...state, dataSourceTypeSearchQuery: action.payload }; + } + + return state; +}; + +export default { + dataSources: dataSourcesReducer, +}; diff --git a/public/app/features/datasources/state/selectors.ts b/public/app/features/datasources/state/selectors.ts new file mode 100644 index 00000000000..80e1400114f --- /dev/null +++ b/public/app/features/datasources/state/selectors.ts @@ -0,0 +1,19 @@ +export const getDataSources = state => { + const regex = new RegExp(state.searchQuery, 'i'); + + return state.dataSources.filter(dataSource => { + return regex.test(dataSource.name) || regex.test(dataSource.database); + }); +}; + +export const getDataSourceTypes = state => { + const regex = new RegExp(state.dataSourceTypeSearchQuery, 'i'); + + return state.dataSourceTypes.filter(type => { + return regex.test(type.name); + }); +}; + +export const getDataSourcesSearchQuery = state => state.searchQuery; +export const getDataSourcesLayoutMode = state => state.layoutMode; +export const getDataSourcesCount = state => state.dataSourcesCount; diff --git a/public/app/features/explore/Explore.tsx b/public/app/features/explore/Explore.tsx index 187d68133cd..6e34f631408 100644 --- a/public/app/features/explore/Explore.tsx +++ b/public/app/features/explore/Explore.tsx @@ -2,19 +2,20 @@ import React from 'react'; import { hot } from 'react-hot-loader'; import Select from 'react-select'; +import { ExploreState, ExploreUrlState, Query } from 'app/types/explore'; import kbn from 'app/core/utils/kbn'; import colors from 'app/core/utils/colors'; import store from 'app/core/store'; import TimeSeries from 'app/core/time_series2'; -import { decodePathComponent } from 'app/core/utils/location_util'; import { parse as parseDate } from 'app/core/utils/datemath'; +import { DEFAULT_RANGE } from 'app/core/utils/explore'; import ElapsedTime from './ElapsedTime'; import QueryRows from './QueryRows'; import Graph from './Graph'; import Logs from './Logs'; import Table from './Table'; -import TimePicker, { DEFAULT_RANGE } from './TimePicker'; +import TimePicker from './TimePicker'; import { ensureQueries, generateQueryKey, hasQuery } from './utils/query'; const MAX_HISTORY_ITEMS = 100; @@ -47,101 +48,90 @@ function makeTimeSeriesList(dataList, options) { }); } -function parseUrlState(initial: string | undefined) { - if (initial) { - try { - const parsed = JSON.parse(decodePathComponent(initial)); - return { - datasource: parsed.datasource, - queries: parsed.queries.map(q => q.query), - range: parsed.range, - }; - } catch (e) { - console.error(e); - } - } - return { datasource: null, queries: [], range: DEFAULT_RANGE }; +interface ExploreProps { + datasourceSrv: any; + onChangeSplit: (split: boolean, state?: ExploreState) => void; + onSaveState: (key: string, state: ExploreState) => void; + position: string; + split: boolean; + splitState?: ExploreState; + stateKey: string; + urlState: ExploreUrlState; } -interface ExploreState { - datasource: any; - datasourceError: any; - datasourceLoading: boolean | null; - datasourceMissing: boolean; - graphResult: any; - history: any[]; - initialDatasource?: string; - latency: number; - loading: any; - logsResult: any; - queries: any[]; - queryErrors: any[]; - queryHints: any[]; - range: any; - requestOptions: any; - showingGraph: boolean; - showingLogs: boolean; - showingTable: boolean; - supportsGraph: boolean | null; - supportsLogs: boolean | null; - supportsTable: boolean | null; - tableResult: any; -} - -export class Explore extends React.Component { +export class Explore extends React.PureComponent { el: any; + /** + * Current query expressions of the rows including their modifications, used for running queries. + * Not kept in component state to prevent edit-render roundtrips. + */ + queryExpressions: string[]; constructor(props) { super(props); - const initialState: ExploreState = props.initialState; - const { datasource, queries, range } = parseUrlState(props.routeParams.state); - this.state = { - datasource: null, - datasourceError: null, - datasourceLoading: null, - datasourceMissing: false, - graphResult: null, - initialDatasource: datasource, - history: [], - latency: 0, - loading: false, - logsResult: null, - queries: ensureQueries(queries), - queryErrors: [], - queryHints: [], - range: range || { ...DEFAULT_RANGE }, - requestOptions: null, - showingGraph: true, - showingLogs: true, - showingTable: true, - supportsGraph: null, - supportsLogs: null, - supportsTable: null, - tableResult: null, - ...initialState, - }; + const splitState: ExploreState = props.splitState; + let initialQueries: Query[]; + if (splitState) { + // Split state overrides everything + this.state = splitState; + initialQueries = splitState.queries; + } else { + const { datasource, queries, range } = props.urlState as ExploreUrlState; + initialQueries = ensureQueries(queries); + this.state = { + datasource: null, + datasourceError: null, + datasourceLoading: null, + datasourceMissing: false, + datasourceName: datasource, + exploreDatasources: [], + graphResult: null, + history: [], + latency: 0, + loading: false, + logsResult: null, + queries: initialQueries, + queryErrors: [], + queryHints: [], + range: range || { ...DEFAULT_RANGE }, + requestOptions: null, + showingGraph: true, + showingLogs: true, + showingTable: true, + supportsGraph: null, + supportsLogs: null, + supportsTable: null, + tableResult: null, + }; + } + this.queryExpressions = initialQueries.map(q => q.query); } async componentDidMount() { const { datasourceSrv } = this.props; - const { initialDatasource } = this.state; + const { datasourceName } = this.state; if (!datasourceSrv) { throw new Error('No datasource service passed as props.'); } const datasources = datasourceSrv.getExploreSources(); + const exploreDatasources = datasources.map(ds => ({ + value: ds.name, + label: ds.name, + })); + if (datasources.length > 0) { - this.setState({ datasourceLoading: true }); + this.setState({ datasourceLoading: true, exploreDatasources }); // Priority: datasource in url, default datasource, first explore datasource let datasource; - if (initialDatasource) { - datasource = await datasourceSrv.get(initialDatasource); + if (datasourceName) { + datasource = await datasourceSrv.get(datasourceName); } else { datasource = await datasourceSrv.get(); } if (!datasource.meta.explore) { datasource = await datasourceSrv.get(datasources[0].name); } - this.setDatasource(datasource); + await this.setDatasource(datasource); } else { this.setState({ datasourceMissing: true }); } @@ -174,9 +164,10 @@ export class Explore extends React.Component { } // Keep queries but reset edit state - const nextQueries = this.state.queries.map(q => ({ + const nextQueries = this.state.queries.map((q, i) => ({ ...q, - edited: false, + key: generateQueryKey(i), + query: this.queryExpressions[i], })); this.setState( @@ -188,9 +179,14 @@ export class Explore extends React.Component { supportsLogs, supportsTable, datasourceLoading: false, + datasourceName: datasource.name, queries: nextQueries, }, - () => datasourceError === null && this.onSubmit() + () => { + if (datasourceError === null) { + this.onSubmit(); + } + } ); } @@ -200,6 +196,7 @@ export class Explore extends React.Component { onAddQueryRow = index => { const { queries } = this.state; + this.queryExpressions[index + 1] = ''; const nextQueries = [ ...queries.slice(0, index + 1), { query: '', key: generateQueryKey() }, @@ -220,34 +217,34 @@ export class Explore extends React.Component { queryHints: [], tableResult: null, }); - const datasource = await this.props.datasourceSrv.get(option.value); + const datasourceName = option.value; + const datasource = await this.props.datasourceSrv.get(datasourceName); this.setDatasource(datasource); }; onChangeQuery = (value: string, index: number, override?: boolean) => { - const { queries } = this.state; - let { queryErrors, queryHints } = this.state; - const prevQuery = queries[index]; - const edited = override ? false : prevQuery.query !== value; - const nextQuery = { - ...queries[index], - edited, - query: value, - }; - const nextQueries = [...queries]; - nextQueries[index] = nextQuery; + // Keep current value in local cache + this.queryExpressions[index] = value; + + // Replace query row on override if (override) { - queryErrors = []; - queryHints = []; + const { queries } = this.state; + const nextQuery: Query = { + key: generateQueryKey(index), + query: value, + }; + const nextQueries = [...queries]; + nextQueries[index] = nextQuery; + + this.setState( + { + queryErrors: [], + queryHints: [], + queries: nextQueries, + }, + this.onSubmit + ); } - this.setState( - { - queryErrors, - queryHints, - queries: nextQueries, - }, - override ? () => this.onSubmit() : undefined - ); }; onChangeTime = nextRange => { @@ -259,21 +256,26 @@ export class Explore extends React.Component { }; onClickClear = () => { - this.setState({ - graphResult: null, - logsResult: null, - latency: 0, - queries: ensureQueries(), - queryErrors: [], - queryHints: [], - tableResult: null, - }); + this.queryExpressions = ['']; + this.setState( + { + graphResult: null, + logsResult: null, + latency: 0, + queries: ensureQueries(), + queryErrors: [], + queryHints: [], + tableResult: null, + }, + this.saveState + ); }; onClickCloseSplit = () => { const { onChangeSplit } = this.props; if (onChangeSplit) { onChangeSplit(false); + this.saveState(); } }; @@ -287,10 +289,10 @@ export class Explore extends React.Component { onClickSplit = () => { const { onChangeSplit } = this.props; - const state = { ...this.state }; - state.queries = state.queries.map(({ edited, ...rest }) => rest); if (onChangeSplit) { + const state = this.cloneState(); onChangeSplit(true, state); + this.saveState(); } }; @@ -308,23 +310,22 @@ export class Explore extends React.Component { let nextQueries; if (index === undefined) { // Modify all queries - nextQueries = queries.map(q => ({ - ...q, - edited: false, - query: datasource.modifyQuery(q.query, action), + nextQueries = queries.map((q, i) => ({ + key: generateQueryKey(i), + query: datasource.modifyQuery(this.queryExpressions[i], action), })); } else { // Modify query only at index nextQueries = [ ...queries.slice(0, index), { - ...queries[index], - edited: false, - query: datasource.modifyQuery(queries[index].query, action), + key: generateQueryKey(index), + query: datasource.modifyQuery(this.queryExpressions[index], action), }, ...queries.slice(index + 1), ]; } + this.queryExpressions = nextQueries.map(q => q.query); this.setState({ queries: nextQueries }, () => this.onSubmit()); } }; @@ -335,6 +336,7 @@ export class Explore extends React.Component { return; } const nextQueries = [...queries.slice(0, index), ...queries.slice(index + 1)]; + this.queryExpressions = nextQueries.map(q => q.query); this.setState({ queries: nextQueries }, () => this.onSubmit()); }; @@ -349,9 +351,10 @@ export class Explore extends React.Component { if (showingLogs && supportsLogs) { this.runLogsQuery(); } + this.saveState(); }; - onQuerySuccess(datasourceId: string, queries: any[]): void { + onQuerySuccess(datasourceId: string, queries: string[]): void { // save queries to history let { history } = this.state; const { datasource } = this.state; @@ -362,8 +365,7 @@ export class Explore extends React.Component { } const ts = Date.now(); - queries.forEach(q => { - const { query } = q; + queries.forEach(query => { history = [{ query, ts }, ...history]; }); @@ -378,16 +380,16 @@ export class Explore extends React.Component { } buildQueryOptions(targetOptions: { format: string; hinting?: boolean; instant?: boolean }) { - const { datasource, queries, range } = this.state; + const { datasource, range } = this.state; const resolution = this.el.offsetWidth; const absoluteRange = { from: parseDate(range.from, false), to: parseDate(range.to, true), }; const { interval } = kbn.calculateInterval(absoluteRange, resolution, datasource.interval); - const targets = queries.map(q => ({ + const targets = this.queryExpressions.map(q => ({ ...targetOptions, - expr: q.query, + expr: q, })); return { interval, @@ -397,7 +399,8 @@ export class Explore extends React.Component { } async runGraphQuery() { - const { datasource, queries } = this.state; + const { datasource } = this.state; + const queries = [...this.queryExpressions]; if (!hasQuery(queries)) { return; } @@ -419,7 +422,8 @@ export class Explore extends React.Component { } async runTableQuery() { - const { datasource, queries } = this.state; + const queries = [...this.queryExpressions]; + const { datasource } = this.state; if (!hasQuery(queries)) { return; } @@ -443,7 +447,8 @@ export class Explore extends React.Component { } async runLogsQuery() { - const { datasource, queries } = this.state; + const queries = [...this.queryExpressions]; + const { datasource } = this.state; if (!hasQuery(queries)) { return; } @@ -471,13 +476,27 @@ export class Explore extends React.Component { return datasource.metadataRequest(url); }; + cloneState(): ExploreState { + // Copy state, but copy queries including modifications + return { + ...this.state, + queries: ensureQueries(this.queryExpressions.map(query => ({ query }))), + }; + } + + saveState = () => { + const { stateKey, onSaveState } = this.props; + onSaveState(stateKey, this.cloneState()); + }; + render() { - const { datasourceSrv, position, split } = this.props; + const { position, split } = this.props; const { datasource, datasourceError, datasourceLoading, datasourceMissing, + exploreDatasources, graphResult, history, latency, @@ -502,10 +521,6 @@ export class Explore extends React.Component { const logsButtonActive = showingLogs ? 'active' : ''; const tableButtonActive = showingBoth || showingTable ? 'active' : ''; const exploreClass = split ? 'explore explore-split' : 'explore'; - const datasources = datasourceSrv.getExploreSources().map(ds => ({ - value: ds.name, - label: ds.name, - })); const selectedDatasource = datasource ? datasource.name : undefined; return ( @@ -528,10 +543,11 @@ export class Explore extends React.Component { {!datasourceMissing ? (
    -
    -
    - Role - - - -
    -
    - -
    -
    - - -

    Existing Keys

    - - - - - - - - - - - - - - - -
    NameRole
    {{t.name}}{{t.role}} - - - -
    -
    - - - diff --git a/public/app/features/org/partials/orgUsers.html b/public/app/features/org/partials/orgUsers.html deleted file mode 100644 index 697879c6ac2..00000000000 --- a/public/app/features/org/partials/orgUsers.html +++ /dev/null @@ -1,96 +0,0 @@ - - -
    -
    - - -
    - - - - - - Invite - - - - - {{ctrl.externalUserMngLinkName}} - -
    - -
    - -
    - -
    - - - - - - - - - - - - - - - - - - - -
    LoginEmail - Seen - Time since user was seen using Grafana - Role
    - - {{user.login}}{{user.email}}{{user.lastSeenAtAge}} -
    - -
    -
    - - - -
    -
    - -
    - - - - - - - - - - - - - - - -
    EmailName
    {{invite.email}}{{invite.name}} - -   - - -
    -
    -
    - diff --git a/public/app/features/panel/metrics_panel_ctrl.ts b/public/app/features/panel/metrics_panel_ctrl.ts index 5eecf6036d8..b42b06f1238 100644 --- a/public/app/features/panel/metrics_panel_ctrl.ts +++ b/public/app/features/panel/metrics_panel_ctrl.ts @@ -6,7 +6,7 @@ import kbn from 'app/core/utils/kbn'; import { PanelCtrl } from 'app/features/panel/panel_ctrl'; import * as rangeUtil from 'app/core/utils/rangeutil'; import * as dateMath from 'app/core/utils/datemath'; -import { encodePathComponent } from 'app/core/utils/location_util'; +import { getExploreUrl } from 'app/core/utils/explore'; import { metricsTabDirective } from './metrics_tab'; @@ -314,7 +314,12 @@ class MetricsPanelCtrl extends PanelCtrl { getAdditionalMenuItems() { const items = []; - if (config.exploreEnabled && this.contextSrv.isEditor && this.datasource && this.datasource.supportsExplore) { + if ( + config.exploreEnabled && + this.contextSrv.isEditor && + this.datasource && + (this.datasource.meta.explore || this.datasource.meta.id === 'mixed') + ) { items.push({ text: 'Explore', click: 'ctrl.explore();', @@ -325,14 +330,11 @@ class MetricsPanelCtrl extends PanelCtrl { return items; } - explore() { - const range = this.timeSrv.timeRangeForUrl(); - const state = { - ...this.datasource.getExploreState(this.panel), - range, - }; - const exploreState = encodePathComponent(JSON.stringify(state)); - this.$location.url(`/explore?state=${exploreState}`); + async explore() { + const url = await getExploreUrl(this.panel, this.panel.targets, this.datasource, this.datasourceSrv, this.timeSrv); + if (url) { + this.$timeout(() => this.$location.url(url)); + } } addQuery(target) { diff --git a/public/app/features/panel/specs/metrics_panel_ctrl.test.ts b/public/app/features/panel/specs/metrics_panel_ctrl.test.ts index a28bf92e63b..913a2461fd0 100644 --- a/public/app/features/panel/specs/metrics_panel_ctrl.test.ts +++ b/public/app/features/panel/specs/metrics_panel_ctrl.test.ts @@ -38,7 +38,7 @@ describe('MetricsPanelCtrl', () => { describe('and has datasource set that supports explore and user has powers', () => { beforeEach(() => { ctrl.contextSrv = { isEditor: true }; - ctrl.datasource = { supportsExplore: true }; + ctrl.datasource = { meta: { explore: true } }; additionalItems = ctrl.getAdditionalMenuItems(); }); diff --git a/public/app/features/plugins/PluginList.test.tsx b/public/app/features/plugins/PluginList.test.tsx new file mode 100644 index 00000000000..201dd69b9db --- /dev/null +++ b/public/app/features/plugins/PluginList.test.tsx @@ -0,0 +1,25 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import PluginList from './PluginList'; +import { getMockPlugins } from './__mocks__/pluginMocks'; +import { LayoutModes } from '../../core/components/LayoutSelector/LayoutSelector'; + +const setup = (propOverrides?: object) => { + const props = Object.assign( + { + plugins: getMockPlugins(5), + layoutMode: LayoutModes.Grid, + }, + propOverrides + ); + + return shallow(); +}; + +describe('Render', () => { + it('should render component', () => { + const wrapper = setup(); + + expect(wrapper).toMatchSnapshot(); + }); +}); diff --git a/public/app/features/plugins/PluginList.tsx b/public/app/features/plugins/PluginList.tsx new file mode 100644 index 00000000000..0074839e754 --- /dev/null +++ b/public/app/features/plugins/PluginList.tsx @@ -0,0 +1,32 @@ +import React, { SFC } from 'react'; +import classNames from 'classnames/bind'; +import PluginListItem from './PluginListItem'; +import { Plugin } from 'app/types'; +import { LayoutMode, LayoutModes } from '../../core/components/LayoutSelector/LayoutSelector'; + +interface Props { + plugins: Plugin[]; + layoutMode: LayoutMode; +} + +const PluginList: SFC = props => { + const { plugins, layoutMode } = props; + + const listStyle = classNames({ + 'card-section': true, + 'card-list-layout-grid': layoutMode === LayoutModes.Grid, + 'card-list-layout-list': layoutMode === LayoutModes.List, + }); + + return ( +
    +
      + {plugins.map((plugin, index) => { + return ; + })} +
    +
    + ); +}; + +export default PluginList; diff --git a/public/app/features/plugins/PluginListItem.test.tsx b/public/app/features/plugins/PluginListItem.test.tsx new file mode 100644 index 00000000000..175911c5e05 --- /dev/null +++ b/public/app/features/plugins/PluginListItem.test.tsx @@ -0,0 +1,33 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import PluginListItem from './PluginListItem'; +import { getMockPlugin } from './__mocks__/pluginMocks'; + +const setup = (propOverrides?: object) => { + const props = Object.assign( + { + plugin: getMockPlugin(), + }, + propOverrides + ); + + return shallow(); +}; + +describe('Render', () => { + it('should render component', () => { + const wrapper = setup(); + + expect(wrapper).toMatchSnapshot(); + }); + + it('should render has plugin section', () => { + const mockPlugin = getMockPlugin(); + mockPlugin.hasUpdate = true; + const wrapper = setup({ + plugin: mockPlugin, + }); + + expect(wrapper).toMatchSnapshot(); + }); +}); diff --git a/public/app/features/plugins/PluginListItem.tsx b/public/app/features/plugins/PluginListItem.tsx new file mode 100644 index 00000000000..05eac614fd5 --- /dev/null +++ b/public/app/features/plugins/PluginListItem.tsx @@ -0,0 +1,39 @@ +import React, { SFC } from 'react'; +import { Plugin } from 'app/types'; + +interface Props { + plugin: Plugin; +} + +const PluginListItem: SFC = props => { + const { plugin } = props; + + return ( +
  • + +
    +
    + + {plugin.type} +
    + {plugin.hasUpdate && ( +
    + Update available! +
    + )} +
    +
    +
    + +
    +
    +
    {plugin.name}
    +
    {`By ${plugin.info.author.name}`}
    +
    +
    +
    +
  • + ); +}; + +export default PluginListItem; diff --git a/public/app/features/plugins/PluginListPage.test.tsx b/public/app/features/plugins/PluginListPage.test.tsx new file mode 100644 index 00000000000..b173ef51a2a --- /dev/null +++ b/public/app/features/plugins/PluginListPage.test.tsx @@ -0,0 +1,35 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import { PluginListPage, Props } from './PluginListPage'; +import { NavModel, Plugin } from '../../types'; +import { LayoutModes } from '../../core/components/LayoutSelector/LayoutSelector'; + +const setup = (propOverrides?: object) => { + const props: Props = { + navModel: {} as NavModel, + plugins: [] as Plugin[], + searchQuery: '', + setPluginsSearchQuery: jest.fn(), + setPluginsLayoutMode: jest.fn(), + layoutMode: LayoutModes.Grid, + loadPlugins: jest.fn(), + }; + + Object.assign(props, propOverrides); + + const wrapper = shallow(); + const instance = wrapper.instance() as PluginListPage; + + return { + wrapper, + instance, + }; +}; + +describe('Render', () => { + it('should render component', () => { + const { wrapper } = setup(); + + expect(wrapper).toMatchSnapshot(); + }); +}); diff --git a/public/app/features/plugins/PluginListPage.tsx b/public/app/features/plugins/PluginListPage.tsx new file mode 100644 index 00000000000..d654ebd7cff --- /dev/null +++ b/public/app/features/plugins/PluginListPage.tsx @@ -0,0 +1,72 @@ +import React, { PureComponent } from 'react'; +import { hot } from 'react-hot-loader'; +import { connect } from 'react-redux'; +import PageHeader from 'app/core/components/PageHeader/PageHeader'; +import OrgActionBar from 'app/core/components/OrgActionBar/OrgActionBar'; +import PluginList from './PluginList'; +import { NavModel, Plugin } from 'app/types'; +import { loadPlugins, setPluginsLayoutMode, setPluginsSearchQuery } from './state/actions'; +import { getNavModel } from '../../core/selectors/navModel'; +import { getLayoutMode, getPlugins, getPluginsSearchQuery } from './state/selectors'; +import { LayoutMode } from '../../core/components/LayoutSelector/LayoutSelector'; + +export interface Props { + navModel: NavModel; + plugins: Plugin[]; + layoutMode: LayoutMode; + searchQuery: string; + loadPlugins: typeof loadPlugins; + setPluginsLayoutMode: typeof setPluginsLayoutMode; + setPluginsSearchQuery: typeof setPluginsSearchQuery; +} + +export class PluginListPage extends PureComponent { + componentDidMount() { + this.fetchPlugins(); + } + + async fetchPlugins() { + await this.props.loadPlugins(); + } + + render() { + const { navModel, plugins, layoutMode, setPluginsLayoutMode, setPluginsSearchQuery, searchQuery } = this.props; + + const linkButton = { + href: 'https://grafana.com/plugins?utm_source=grafana_plugin_list', + title: 'Find more plugins on Grafana.com', + }; + return ( +
    + +
    + setPluginsLayoutMode(mode)} + setSearchQuery={query => setPluginsSearchQuery(query)} + linkButton={linkButton} + /> + {plugins && } +
    +
    + ); + } +} + +function mapStateToProps(state) { + return { + navModel: getNavModel(state.navIndex, 'plugins'), + plugins: getPlugins(state.plugins), + layoutMode: getLayoutMode(state.plugins), + searchQuery: getPluginsSearchQuery(state.plugins), + }; +} + +const mapDispatchToProps = { + loadPlugins, + setPluginsLayoutMode, + setPluginsSearchQuery, +}; + +export default hot(module)(connect(mapStateToProps, mapDispatchToProps)(PluginListPage)); diff --git a/public/app/features/plugins/__mocks__/pluginMocks.ts b/public/app/features/plugins/__mocks__/pluginMocks.ts new file mode 100644 index 00000000000..d8dd67d5b61 --- /dev/null +++ b/public/app/features/plugins/__mocks__/pluginMocks.ts @@ -0,0 +1,59 @@ +import { Plugin } from 'app/types'; + +export const getMockPlugins = (amount: number): Plugin[] => { + const plugins = []; + + for (let i = 0; i <= amount; i++) { + plugins.push({ + defaultNavUrl: 'some/url', + enabled: false, + hasUpdate: false, + id: `${i}`, + info: { + author: { + name: 'Grafana Labs', + url: 'url/to/GrafanaLabs', + }, + description: 'pretty decent plugin', + links: ['one link'], + logos: { small: 'small/logo', large: 'large/logo' }, + screenshots: `screenshot/${i}`, + updated: '2018-09-26', + version: '1', + }, + latestVersion: `1.${i}`, + name: `pretty cool plugin-${i}`, + pinned: false, + state: '', + type: '', + }); + } + + return plugins; +}; + +export const getMockPlugin = () => { + return { + defaultNavUrl: 'some/url', + enabled: false, + hasUpdate: false, + id: '1', + info: { + author: { + name: 'Grafana Labs', + url: 'url/to/GrafanaLabs', + }, + description: 'pretty decent plugin', + links: ['one link'], + logos: { small: 'small/logo', large: 'large/logo' }, + screenshots: 'screenshot/1', + updated: '2018-09-26', + version: '1', + }, + latestVersion: '1', + name: 'pretty cool plugin 1', + pinned: false, + state: '', + type: '', + }; +}; diff --git a/public/app/features/plugins/__snapshots__/PluginList.test.tsx.snap b/public/app/features/plugins/__snapshots__/PluginList.test.tsx.snap new file mode 100644 index 00000000000..176304b7b11 --- /dev/null +++ b/public/app/features/plugins/__snapshots__/PluginList.test.tsx.snap @@ -0,0 +1,210 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Render should render component 1`] = ` +
    +
      + + + + + + +
    +
    +`; diff --git a/public/app/features/plugins/__snapshots__/PluginListItem.test.tsx.snap b/public/app/features/plugins/__snapshots__/PluginListItem.test.tsx.snap new file mode 100644 index 00000000000..fc0cc68c522 --- /dev/null +++ b/public/app/features/plugins/__snapshots__/PluginListItem.test.tsx.snap @@ -0,0 +1,106 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Render should render component 1`] = ` +
  • + +
    +
    + +
    +
    +
    +
    + +
    +
    +
    + pretty cool plugin 1 +
    +
    + By Grafana Labs +
    +
    +
    +
    +
  • +`; + +exports[`Render should render has plugin section 1`] = ` +
  • + +
    +
    + +
    +
    + + Update available! + +
    +
    +
    +
    + +
    +
    +
    + pretty cool plugin 1 +
    +
    + By Grafana Labs +
    +
    +
    +
    +
  • +`; diff --git a/public/app/features/plugins/__snapshots__/PluginListPage.test.tsx.snap b/public/app/features/plugins/__snapshots__/PluginListPage.test.tsx.snap new file mode 100644 index 00000000000..43d9f45883d --- /dev/null +++ b/public/app/features/plugins/__snapshots__/PluginListPage.test.tsx.snap @@ -0,0 +1,29 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Render should render component 1`] = ` +
    + +
    + + +
    +
    +`; diff --git a/public/app/features/plugins/all.ts b/public/app/features/plugins/all.ts index fd19ea963b6..d164a6d4255 100644 --- a/public/app/features/plugins/all.ts +++ b/public/app/features/plugins/all.ts @@ -1,9 +1,7 @@ import './plugin_edit_ctrl'; import './plugin_page_ctrl'; -import './plugin_list_ctrl'; import './import_list/import_list'; import './ds_edit_ctrl'; import './ds_dashboards_ctrl'; -import './ds_list_ctrl'; import './datasource_srv'; import './plugin_component'; diff --git a/public/app/features/plugins/built_in_plugins.ts b/public/app/features/plugins/built_in_plugins.ts index 2c5bf459eda..e29e1709ccf 100644 --- a/public/app/features/plugins/built_in_plugins.ts +++ b/public/app/features/plugins/built_in_plugins.ts @@ -11,6 +11,7 @@ import * as postgresPlugin from 'app/plugins/datasource/postgres/module'; import * as prometheusPlugin from 'app/plugins/datasource/prometheus/module'; import * as mssqlPlugin from 'app/plugins/datasource/mssql/module'; import * as testDataDSPlugin from 'app/plugins/datasource/testdata/module'; +import * as stackdriverPlugin from 'app/plugins/datasource/stackdriver/module'; import * as textPanel from 'app/plugins/panel/text/module'; import * as graphPanel from 'app/plugins/panel/graph/module'; @@ -36,6 +37,7 @@ const builtInPlugins = { 'app/plugins/datasource/mssql/module': mssqlPlugin, 'app/plugins/datasource/prometheus/module': prometheusPlugin, 'app/plugins/datasource/testdata/module': testDataDSPlugin, + 'app/plugins/datasource/stackdriver/module': stackdriverPlugin, 'app/plugins/panel/text/module': textPanel, 'app/plugins/panel/graph/module': graphPanel, diff --git a/public/app/features/plugins/ds_list_ctrl.ts b/public/app/features/plugins/ds_list_ctrl.ts deleted file mode 100644 index 71c1a516842..00000000000 --- a/public/app/features/plugins/ds_list_ctrl.ts +++ /dev/null @@ -1,61 +0,0 @@ -import coreModule from '../../core/core_module'; -import _ from 'lodash'; - -export class DataSourcesCtrl { - datasources: any; - unfiltered: any; - navModel: any; - searchQuery: string; - - /** @ngInject */ - constructor(private $scope, private backendSrv, private datasourceSrv, private navModelSrv) { - this.navModel = this.navModelSrv.getNav('cfg', 'datasources', 0); - backendSrv.get('/api/datasources').then(result => { - this.datasources = result; - this.unfiltered = result; - }); - } - - onQueryUpdated() { - const regex = new RegExp(this.searchQuery, 'ig'); - this.datasources = _.filter(this.unfiltered, item => { - regex.lastIndex = 0; - return regex.test(item.name) || regex.test(item.type); - }); - } - - removeDataSourceConfirmed(ds) { - this.backendSrv - .delete('/api/datasources/' + ds.id) - .then( - () => { - this.$scope.appEvent('alert-success', ['Datasource deleted', '']); - }, - () => { - this.$scope.appEvent('alert-error', ['Unable to delete datasource', '']); - } - ) - .then(() => { - this.backendSrv.get('/api/datasources').then(result => { - this.datasources = result; - }); - this.backendSrv.get('/api/frontend/settings').then(settings => { - this.datasourceSrv.init(settings.datasources); - }); - }); - } - - removeDataSource(ds) { - this.$scope.appEvent('confirm-modal', { - title: 'Delete', - text: 'Are you sure you want to delete datasource ' + ds.name + '?', - yesText: 'Delete', - icon: 'fa-trash', - onConfirm: () => { - this.removeDataSourceConfirmed(ds); - }, - }); - } -} - -coreModule.controller('DataSourcesCtrl', DataSourcesCtrl); diff --git a/public/app/features/plugins/partials/ds_edit.html b/public/app/features/plugins/partials/ds_edit.html index f0bb8867f83..0b83e69c7d2 100644 --- a/public/app/features/plugins/partials/ds_edit.html +++ b/public/app/features/plugins/partials/ds_edit.html @@ -1,18 +1,13 @@
    - -
    -
    - Disclaimer. This datasource was added by config and cannot be modified using the UI. Please contact your server admin to update this datasource. -
    -
    +

    Settings

    - Name + Name The name is used when you select the data source in panels. @@ -22,20 +17,18 @@
    - -
    - Type -
    - -
    -
    -
    +
    This plugin is marked as being in alpha state, which means it is in early development phase and updates will include breaking changes.
    +
    + This plugin is marked as being in a beta development state. This means it is in currently in active development and could be + missing important features. +
    + @@ -61,17 +54,19 @@
    -
    - - - Back -
    +
    + This datasource was added by config and cannot be modified using the UI. Please contact your server admin to update this datasource. +
    -
    -
    -
    +
    + + + Back +
    - +
    +
    +
    + +
    diff --git a/public/app/features/plugins/partials/ds_http_settings.html b/public/app/features/plugins/partials/ds_http_settings.html index 6d014af567c..17aedd48afd 100644 --- a/public/app/features/plugins/partials/ds_http_settings.html +++ b/public/app/features/plugins/partials/ds_http_settings.html @@ -1,9 +1,9 @@
    -

    HTTP

    +

    HTTP

    - URL + URL
    -
    +
    -
    -
    - Access -
    - -
    -
    -
    - -
    -
    +
    +
    + Access +
    + +
    +
    +
    + +
    +
    -
    -
    -

    - Access mode controls how requests to the data source will be handled. - Server should be the preferred way if nothing else stated. -

    -
    Server access mode (Default):
    -

    - All requests will be made from the browser to Grafana backend/server which in turn will forward the requests to the data source - and by that circumvent possible Cross-Origin Resource Sharing (CORS) requirements. - The URL needs to be accessible from the grafana backend/server if you select this access mode. -

    -
    Browser access mode:
    -

    - All requests will be made from the browser directly to the data source and may be subject to - Cross-Origin Resource Sharing (CORS) requirements. The URL needs to be accessible from the browser if you select this - access mode. -

    -
    -
    -
    +
    +

    + Access mode controls how requests to the data source will be handled. + Server should be the preferred way if nothing else stated. +

    +
    Server access mode (Default):
    +

    + All requests will be made from the browser to Grafana backend/server which in turn will forward the requests to the data source + and by that circumvent possible Cross-Origin Resource Sharing (CORS) requirements. + The URL needs to be accessible from the grafana backend/server if you select this access mode. +

    +
    Browser access mode:
    +

    + All requests will be made from the browser directly to the data source and may be subject to + Cross-Origin Resource Sharing (CORS) requirements. The URL needs to be accessible from the browser if you select this + access mode. +

    -

    Auth

    -
    -
    - - -
    -
    - - -
    -
    +
    +
    + Whitelisted Cookies + + + + Grafana Proxy deletes forwarded cookies by default. Specify cookies by name that should be forwarded to the data source. + +
    +
    + -
    - -
    +

    Auth

    +
    +
    + + +
    +
    + + +
    +
    + +
    +
    + +
    +
    Basic Auth Details
    +
    + User + +
    +
    + Password + +
    +
    + +
    +
    +
    TLS Auth Details
    + TLS Certs are encrypted and stored in the Grafana database. +
    +
    +
    +
    + +
    +
    + +
    + +
    + + reset +
    +
    +
    + +
    +
    +
    + +
    +
    + +
    +
    + + reset +
    +
    + +
    +
    + +
    +
    + +
    +
    + + reset +
    +
    +
    -
    -
    Basic Auth Details
    -
    - - User - - -
    - -
    - - Password - - -
    -
    - -
    -
    -
    TLS Auth Details
    - TLS Certs are encrypted and stored in the Grafana database. -
    -
    -
    -
    - -
    -
    - -
    - -
    - - reset -
    -
    -
    - -
    -
    -
    - -
    -
    - -
    -
    - - reset -
    -
    - -
    -
    - -
    -
    - -
    -
    - - reset -
    -
    -
    -
    - -

    Advanced HTTP Settings

    -
    -
    -
    - Whitelisted Cookies - - - - Grafana Proxy deletes forwarded cookies by default. Specify cookies by name that should be forwarded to the data source. - -
    -
    -
    diff --git a/public/app/features/plugins/partials/ds_list.html b/public/app/features/plugins/partials/ds_list.html deleted file mode 100644 index fd537fc47d4..00000000000 --- a/public/app/features/plugins/partials/ds_list.html +++ /dev/null @@ -1,63 +0,0 @@ - - - diff --git a/public/app/features/plugins/partials/plugin_list.html b/public/app/features/plugins/partials/plugin_list.html deleted file mode 100644 index 04b5bf9c791..00000000000 --- a/public/app/features/plugins/partials/plugin_list.html +++ /dev/null @@ -1,45 +0,0 @@ - - - diff --git a/public/app/features/plugins/plugin_list_ctrl.ts b/public/app/features/plugins/plugin_list_ctrl.ts deleted file mode 100644 index 315252364cc..00000000000 --- a/public/app/features/plugins/plugin_list_ctrl.ts +++ /dev/null @@ -1,30 +0,0 @@ -import angular from 'angular'; -import _ from 'lodash'; - -export class PluginListCtrl { - plugins: any[]; - tabIndex: number; - navModel: any; - searchQuery: string; - allPlugins: any[]; - - /** @ngInject */ - constructor(private backendSrv: any, $location, navModelSrv) { - this.tabIndex = 0; - this.navModel = navModelSrv.getNav('cfg', 'plugins', 0); - - this.backendSrv.get('api/plugins', { embedded: 0 }).then(plugins => { - this.plugins = plugins; - this.allPlugins = plugins; - }); - } - - onQueryUpdated() { - const regex = new RegExp(this.searchQuery, 'ig'); - this.plugins = _.filter(this.allPlugins, item => { - return regex.test(item.name) || regex.test(item.type); - }); - } -} - -angular.module('grafana.controllers').controller('PluginListCtrl', PluginListCtrl); diff --git a/public/app/features/plugins/state/actions.ts b/public/app/features/plugins/state/actions.ts new file mode 100644 index 00000000000..dcfd510ffa0 --- /dev/null +++ b/public/app/features/plugins/state/actions.ts @@ -0,0 +1,51 @@ +import { Plugin, StoreState } from 'app/types'; +import { ThunkAction } from 'redux-thunk'; +import { getBackendSrv } from '../../../core/services/backend_srv'; +import { LayoutMode } from '../../../core/components/LayoutSelector/LayoutSelector'; + +export enum ActionTypes { + LoadPlugins = 'LOAD_PLUGINS', + SetPluginsSearchQuery = 'SET_PLUGIN_SEARCH_QUERY', + SetLayoutMode = 'SET_LAYOUT_MODE', +} + +export interface LoadPluginsAction { + type: ActionTypes.LoadPlugins; + payload: Plugin[]; +} + +export interface SetPluginsSearchQueryAction { + type: ActionTypes.SetPluginsSearchQuery; + payload: string; +} + +export interface SetLayoutModeAction { + type: ActionTypes.SetLayoutMode; + payload: LayoutMode; +} + +export const setPluginsLayoutMode = (mode: LayoutMode): SetLayoutModeAction => ({ + type: ActionTypes.SetLayoutMode, + payload: mode, +}); + +export const setPluginsSearchQuery = (query: string): SetPluginsSearchQueryAction => ({ + type: ActionTypes.SetPluginsSearchQuery, + payload: query, +}); + +const pluginsLoaded = (plugins: Plugin[]): LoadPluginsAction => ({ + type: ActionTypes.LoadPlugins, + payload: plugins, +}); + +export type Action = LoadPluginsAction | SetPluginsSearchQueryAction | SetLayoutModeAction; + +type ThunkResult = ThunkAction; + +export function loadPlugins(): ThunkResult { + return async dispatch => { + const result = await getBackendSrv().get('api/plugins', { embedded: 0 }); + dispatch(pluginsLoaded(result)); + }; +} diff --git a/public/app/features/plugins/state/reducers.ts b/public/app/features/plugins/state/reducers.ts new file mode 100644 index 00000000000..1ca2880282c --- /dev/null +++ b/public/app/features/plugins/state/reducers.ts @@ -0,0 +1,27 @@ +import { Action, ActionTypes } from './actions'; +import { Plugin, PluginsState } from 'app/types'; +import { LayoutModes } from '../../../core/components/LayoutSelector/LayoutSelector'; + +export const initialState: PluginsState = { + plugins: [] as Plugin[], + searchQuery: '', + layoutMode: LayoutModes.Grid, +}; + +export const pluginsReducer = (state = initialState, action: Action): PluginsState => { + switch (action.type) { + case ActionTypes.LoadPlugins: + return { ...state, plugins: action.payload }; + + case ActionTypes.SetPluginsSearchQuery: + return { ...state, searchQuery: action.payload }; + + case ActionTypes.SetLayoutMode: + return { ...state, layoutMode: action.payload }; + } + return state; +}; + +export default { + plugins: pluginsReducer, +}; diff --git a/public/app/features/plugins/state/selectors.test.ts b/public/app/features/plugins/state/selectors.test.ts new file mode 100644 index 00000000000..09b1ce4c259 --- /dev/null +++ b/public/app/features/plugins/state/selectors.test.ts @@ -0,0 +1,31 @@ +import { getPlugins, getPluginsSearchQuery } from './selectors'; +import { initialState } from './reducers'; +import { getMockPlugins } from '../__mocks__/pluginMocks'; + +describe('Selectors', () => { + const mockState = initialState; + + it('should return search query', () => { + mockState.searchQuery = 'test'; + const query = getPluginsSearchQuery(mockState); + + expect(query).toEqual(mockState.searchQuery); + }); + + it('should return plugins', () => { + mockState.plugins = getMockPlugins(5); + mockState.searchQuery = ''; + + const plugins = getPlugins(mockState); + + expect(plugins).toEqual(mockState.plugins); + }); + + it('should filter plugins', () => { + mockState.searchQuery = 'plugin-1'; + + const plugins = getPlugins(mockState); + + expect(plugins.length).toEqual(1); + }); +}); diff --git a/public/app/features/plugins/state/selectors.ts b/public/app/features/plugins/state/selectors.ts new file mode 100644 index 00000000000..e1d16462527 --- /dev/null +++ b/public/app/features/plugins/state/selectors.ts @@ -0,0 +1,10 @@ +export const getPlugins = state => { + const regex = new RegExp(state.searchQuery, 'i'); + + return state.plugins.filter(item => { + return regex.test(item.name) || regex.test(item.info.author.name) || regex.test(item.info.description); + }); +}; + +export const getPluginsSearchQuery = state => state.searchQuery; +export const getLayoutMode = state => state.layoutMode; diff --git a/public/app/features/teams/TeamMembers.tsx b/public/app/features/teams/TeamMembers.tsx index cda175f4395..588745eea37 100644 --- a/public/app/features/teams/TeamMembers.tsx +++ b/public/app/features/teams/TeamMembers.tsx @@ -1,10 +1,10 @@ import React, { PureComponent } from 'react'; import { connect } from 'react-redux'; import SlideDown from 'app/core/components/Animations/SlideDown'; -import { UserPicker, User } from 'app/core/components/Picker/UserPicker'; +import { UserPicker } from 'app/core/components/Picker/UserPicker'; import DeleteButton from 'app/core/components/DeleteButton/DeleteButton'; import { TagBadge } from 'app/core/components/TagFilter/TagBadge'; -import { TeamMember } from '../../types'; +import { TeamMember, User } from 'app/types'; import { loadTeamMembers, addTeamMember, removeTeamMember, setSearchMemberQuery } from './state/actions'; import { getSearchMemberQuery, getTeamMembers } from './state/selectors'; diff --git a/public/app/features/teams/__mocks__/teamMocks.ts b/public/app/features/teams/__mocks__/teamMocks.ts index 34fa06b2d09..339f227c081 100644 --- a/public/app/features/teams/__mocks__/teamMocks.ts +++ b/public/app/features/teams/__mocks__/teamMocks.ts @@ -1,4 +1,4 @@ -import { Team, TeamGroup, TeamMember } from '../../../types'; +import { Team, TeamGroup, TeamMember } from 'app/types'; export const getMultipleMockTeams = (numberOfTeams: number): Team[] => { const teams: Team[] = []; diff --git a/public/app/features/templating/TextBoxVariable.ts b/public/app/features/templating/TextBoxVariable.ts new file mode 100644 index 00000000000..331ff4f95b8 --- /dev/null +++ b/public/app/features/templating/TextBoxVariable.ts @@ -0,0 +1,58 @@ +import { Variable, assignModelProperties, variableTypes } from './variable'; + +export class TextBoxVariable implements Variable { + query: string; + current: any; + options: any[]; + skipUrlSync: boolean; + + defaults = { + type: 'textbox', + name: '', + hide: 2, + label: '', + query: '', + current: {}, + options: [], + skipUrlSync: false, + }; + + /** @ngInject */ + constructor(private model, private variableSrv) { + assignModelProperties(this, model, this.defaults); + } + + getSaveModel() { + assignModelProperties(this.model, this, this.defaults); + return this.model; + } + + setValue(option) { + this.variableSrv.setOptionAsCurrent(this, option); + } + + updateOptions() { + this.options = [{ text: this.query.trim(), value: this.query.trim() }]; + this.current = this.options[0]; + return Promise.resolve(); + } + + dependsOn(variable) { + return false; + } + + setValueFromUrl(urlValue) { + this.query = urlValue; + return this.variableSrv.setOptionFromUrl(this, urlValue); + } + + getValueForUrl() { + return this.current.value; + } +} + +variableTypes['textbox'] = { + name: 'Text box', + ctor: TextBoxVariable, + description: 'Define a textbox variable, where users can enter any arbitrary string', +}; diff --git a/public/app/features/templating/all.ts b/public/app/features/templating/all.ts index 16465740642..b872fa6cd4a 100644 --- a/public/app/features/templating/all.ts +++ b/public/app/features/templating/all.ts @@ -9,6 +9,7 @@ import { DatasourceVariable } from './datasource_variable'; import { CustomVariable } from './custom_variable'; import { ConstantVariable } from './constant_variable'; import { AdhocVariable } from './adhoc_variable'; +import { TextBoxVariable } from './TextBoxVariable'; coreModule.factory('templateSrv', () => { return templateSrv; @@ -22,4 +23,5 @@ export { CustomVariable, ConstantVariable, AdhocVariable, + TextBoxVariable, }; diff --git a/public/app/features/templating/partials/editor.html b/public/app/features/templating/partials/editor.html index 0d8b0ace327..ac4450c20a2 100644 --- a/public/app/features/templating/partials/editor.html +++ b/public/app/features/templating/partials/editor.html @@ -155,6 +155,14 @@ +
    +
    Text options
    +
    + Default value + +
    +
    +
    Query Options
    diff --git a/public/app/features/users/InviteesTable.test.tsx b/public/app/features/users/InviteesTable.test.tsx new file mode 100644 index 00000000000..e40ad033c57 --- /dev/null +++ b/public/app/features/users/InviteesTable.test.tsx @@ -0,0 +1,32 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import InviteesTable, { Props } from './InviteesTable'; +import { Invitee } from 'app/types'; +import { getMockInvitees } from './__mocks__/userMocks'; + +const setup = (propOverrides?: object) => { + const props: Props = { + invitees: [] as Invitee[], + onRevokeInvite: jest.fn(), + }; + + Object.assign(props, propOverrides); + + return shallow(); +}; + +describe('Render', () => { + it('should render component', () => { + const wrapper = setup(); + + expect(wrapper).toMatchSnapshot(); + }); + + it('should render invitees', () => { + const wrapper = setup({ + invitees: getMockInvitees(5), + }); + + expect(wrapper).toMatchSnapshot(); + }); +}); diff --git a/public/app/features/users/InviteesTable.tsx b/public/app/features/users/InviteesTable.tsx new file mode 100644 index 00000000000..2521fbd09e2 --- /dev/null +++ b/public/app/features/users/InviteesTable.tsx @@ -0,0 +1,64 @@ +import React, { createRef, PureComponent } from 'react'; +import { Invitee } from 'app/types'; + +export interface Props { + invitees: Invitee[]; + onRevokeInvite: (code: string) => void; +} + +export default class InviteesTable extends PureComponent { + private copyUrlRef = createRef(); + + copyToClipboard = () => { + const node = this.copyUrlRef.current; + + if (node) { + node.select(); + document.execCommand('copy'); + } + }; + + render() { + const { invitees, onRevokeInvite } = this.props; + + return ( + + + + + + + + + {invitees.map((invitee, index) => { + return ( + + + +
    EmailName + +
    {invitee.email}{invitee.name} +