diff --git a/.circleci/config.yml b/.circleci/config.yml index f1d161c3cac..44f34d42926 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -246,7 +246,7 @@ workflows: test-and-build: jobs: - build-all: - filters: *filter-not-release + filters: *filter-only-master - build-enterprise: filters: *filter-only-master - codespell: @@ -270,9 +270,7 @@ workflows: - gometalinter - mysql-integration-test - postgres-integration-test - filters: - branches: - only: master + filters: *filter-only-master - deploy-enterprise-master: requires: - build-all diff --git a/CHANGELOG.md b/CHANGELOG.md index 0ebb038546e..ad1b63234e9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,8 @@ * **Dataproxy**: Pass configured/auth headers to a Datasource [#10971](https://github.com/grafana/grafana/issues/10971), thx [@mrsiano](https://github.com/mrsiano) * **Cleanup**: Make temp file time to live configurable [#11607](https://github.com/grafana/grafana/issues/11607), thx [@xapon](https://github.com/xapon) +* **LDAP**: Define Grafana Admin permission in ldap group mappings [#2469](https://github.com/grafana/grafana/issues/2496), PR [#12622](https://github.com/grafana/grafana/issues/12622) +* **Cloudwatch**: CloudWatch GetMetricData support [#11487](https://github.com/grafana/grafana/issues/11487), thx [@mtanda](https://github.com/mtanda) ### Minor @@ -11,18 +13,28 @@ * **Table**: Make table sorting stable when null values exist [#12362](https://github.com/grafana/grafana/pull/12362), thx [@bz2](https://github.com/bz2) * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) * **Prometheus**: Heatmap - fix unhandled error when some points are missing [#12484](https://github.com/grafana/grafana/issues/12484) +* **Prometheus**: Add $interval, $interval_ms, $range, and $range_ms support for dashboard and template queries [#12597](https://github.com/grafana/grafana/issues/12597) * **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda) * **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm) * **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley) +* **Postgres**: Escape ssl mode parameter in connectionstring [#12644](https://github.com/grafana/grafana/issues/12644), thx [@yogyrahmawan](https://github.com/yogyrahmawan) * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber) * **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane) +* **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek) +* **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda) +* **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668) +* **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731) -# 5.2.2 (unreleased) +# 5.2.2 (2018-07-25) ### Minor * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) * **Dashboard**: Dashboard links not updated when changing variables [#12506](https://github.com/grafana/grafana/issues/12506) +* **Postgres/MySQL/MSSQL**: Fix connection leak [#12636](https://github.com/grafana/grafana/issues/12636) [#9827](https://github.com/grafana/grafana/issues/9827) +* **Plugins**: Fix loading of external plugins [#12551](https://github.com/grafana/grafana/issues/12551) +* **Dashboard**: Remove unwanted scrollbars in embedded panels [#12589](https://github.com/grafana/grafana/issues/12589) +* **Prometheus**: Prevent error using $__interval_ms in query [#12533](https://github.com/grafana/grafana/pull/12533), thx [@mtanda](https://github.com/mtanda) # 5.2.1 (2018-06-29) diff --git a/Gopkg.lock b/Gopkg.lock index 5acaf2a542c..6f08e208ecd 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -32,6 +32,7 @@ "aws/credentials/ec2rolecreds", "aws/credentials/endpointcreds", "aws/credentials/stscreds", + "aws/csm", "aws/defaults", "aws/ec2metadata", "aws/endpoints", @@ -43,6 +44,8 @@ "internal/shareddefaults", "private/protocol", "private/protocol/ec2query", + "private/protocol/eventstream", + "private/protocol/eventstream/eventstreamapi", "private/protocol/query", "private/protocol/query/queryutil", "private/protocol/rest", @@ -54,8 +57,8 @@ "service/s3", "service/sts" ] - revision = "c7cd1ebe87257cde9b65112fc876b0339ea0ac30" - version = "v1.13.49" + revision = "fde4ded7becdeae4d26bf1212916aabba79349b4" + version = "v1.14.12" [[projects]] branch = "master" @@ -424,6 +427,12 @@ revision = "1744e2970ca51c86172c8190fadad617561ed6e7" version = "v1.0.0" +[[projects]] + branch = "master" + name = "github.com/shurcooL/sanitized_anchor_name" + packages = ["."] + revision = "86672fcb3f950f35f2e675df2240550f2a50762f" + [[projects]] name = "github.com/smartystreets/assertions" packages = [ @@ -670,6 +679,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "85cc057e0cc074ab5b43bd620772d63d51e07b04e8782fcfe55e6929d2fc40f7" + inputs-digest = "cb8e7fd81f23ec987fc4d5dd9d31ae0f1164bc2f30cbea2fe86e0d97dd945beb" solver-name = "gps-cdcl" solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml index 1768059f0b8..6c91ec37221 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -36,7 +36,7 @@ ignored = [ [[constraint]] name = "github.com/aws/aws-sdk-go" - version = "1.12.65" + version = "1.13.56" [[constraint]] branch = "master" diff --git a/build.go b/build.go index 77cbde50c41..bcb9b2ddf7d 100644 --- a/build.go +++ b/build.go @@ -330,6 +330,7 @@ func createPackage(options linuxPackageOptions) { name := "grafana" if enterprise { name += "-enterprise" + args = append(args, "--replaces", "grafana") } args = append(args, "--name", name) diff --git a/conf/ldap.toml b/conf/ldap.toml index 166d85eabb1..a74b2b6cc2c 100644 --- a/conf/ldap.toml +++ b/conf/ldap.toml @@ -72,6 +72,8 @@ email = "email" [[servers.group_mappings]] group_dn = "cn=admins,dc=grafana,dc=org" org_role = "Admin" +# To make user an instance admin (Grafana Admin) uncomment line below +# grafana_admin = true # The Grafana organization database id, optional, if left out the default org (id 1) will be used # org_id = 1 diff --git a/devenv/README.md b/devenv/README.md index 4ec6f672f25..9abf3596776 100644 --- a/devenv/README.md +++ b/devenv/README.md @@ -1,11 +1,16 @@ This folder contains useful scripts and configuration for... -* Configuring datasources in Grafana -* Provision example dashboards in Grafana -* Run preconfiured datasources as docker containers - -want to know more? run setup! +* Configuring dev datasources in Grafana +* Configuring dev & test scenarios dashboards. ```bash ./setup.sh ``` + +After restarting grafana server there should now be a number of datasources named `gdev-` provisioned as well as a dashboard folder named `gdev dashboards`. This folder contains dashboard & panel features tests dashboards. + +# Dev dashboards + +Please update these dashboards or make new ones as new panels & dashboards features are developed or new bugs are found. The dashboards are located in the `devenv/dev-dashboards` folder. + + diff --git a/devenv/datasources.yaml b/devenv/datasources.yaml index e93c0217f27..241381097b1 100644 --- a/devenv/datasources.yaml +++ b/devenv/datasources.yaml @@ -14,6 +14,9 @@ datasources: isDefault: true url: http://localhost:9090 + - name: gdev-testdata + type: testdata + - name: gdev-influxdb type: influxdb access: proxy @@ -60,7 +63,8 @@ datasources: url: localhost:5432 database: grafana user: grafana - password: password + secureJsonData: + password: password jsonData: sslmode: "disable" @@ -71,3 +75,4 @@ datasources: authType: credentials defaultRegion: eu-west-2 + diff --git a/devenv/dev-dashboards/dashboard_with_rows.json b/devenv/dev-dashboards/dashboard_with_rows.json deleted file mode 100644 index 335c27bc80a..00000000000 --- a/devenv/dev-dashboards/dashboard_with_rows.json +++ /dev/null @@ -1,592 +0,0 @@ -{ - "annotations": { - "list": [ - { - "builtIn": 1, - "datasource": "-- Grafana --", - "enable": true, - "hide": true, - "iconColor": "rgba(0, 211, 255, 1)", - "name": "Annotations & Alerts", - "type": "dashboard" - } - ] - }, - "editable": true, - "gnetId": null, - "graphTooltip": 0, - "id": 59, - "links": [], - "panels": [ - { - "collapsed": false, - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 0 - }, - "id": 9, - "panels": [], - "title": "Row title", - "type": "row" - }, - { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, - "datasource": "Prometheus", - "fill": 1, - "gridPos": { - "h": 4, - "w": 12, - "x": 0, - "y": 1 - }, - "id": 12, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, - "targets": [ - { - "expr": "go_goroutines", - "format": "time_series", - "intervalFactor": 1, - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Panel Title", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "buckets": null, - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ], - "yaxis": { - "align": false, - "alignLevel": null - } - }, - { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, - "datasource": "Prometheus", - "fill": 1, - "gridPos": { - "h": 4, - "w": 12, - "x": 12, - "y": 1 - }, - "id": 5, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, - "targets": [ - { - "expr": "go_goroutines", - "format": "time_series", - "intervalFactor": 1, - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Panel Title", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "buckets": null, - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ], - "yaxis": { - "align": false, - "alignLevel": null - } - }, - { - "collapsed": false, - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 5 - }, - "id": 7, - "panels": [], - "title": "Row", - "type": "row" - }, - { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, - "datasource": "Prometheus", - "fill": 1, - "gridPos": { - "h": 4, - "w": 12, - "x": 0, - "y": 6 - }, - "id": 2, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, - "targets": [ - { - "expr": "go_goroutines", - "format": "time_series", - "intervalFactor": 1, - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Panel Title", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "buckets": null, - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ], - "yaxis": { - "align": false, - "alignLevel": null - } - }, - { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, - "datasource": "Prometheus", - "fill": 1, - "gridPos": { - "h": 4, - "w": 12, - "x": 12, - "y": 6 - }, - "id": 13, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, - "targets": [ - { - "expr": "go_goroutines", - "format": "time_series", - "intervalFactor": 1, - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Panel Title", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "buckets": null, - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ], - "yaxis": { - "align": false, - "alignLevel": null - } - }, - { - "collapsed": false, - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 10 - }, - "id": 11, - "panels": [], - "title": "Row title", - "type": "row" - }, - { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, - "datasource": "Prometheus", - "fill": 1, - "gridPos": { - "h": 4, - "w": 12, - "x": 0, - "y": 11 - }, - "id": 4, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, - "targets": [ - { - "expr": "go_goroutines", - "format": "time_series", - "intervalFactor": 1, - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Panel Title", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "buckets": null, - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ], - "yaxis": { - "align": false, - "alignLevel": null - } - }, - { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, - "datasource": "Prometheus", - "fill": 1, - "gridPos": { - "h": 4, - "w": 12, - "x": 12, - "y": 11 - }, - "id": 3, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, - "targets": [ - { - "expr": "go_goroutines", - "format": "time_series", - "intervalFactor": 1, - "refId": "A" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Panel Title", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "buckets": null, - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ], - "yaxis": { - "align": false, - "alignLevel": null - } - } - ], - "schemaVersion": 16, - "style": "dark", - "tags": [], - "templating": { - "list": [] - }, - "time": { - "from": "now-30m", - "to": "now" - }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, - "timezone": "", - "title": "Dashboard with rows", - "uid": "1DdOzBNmk", - "version": 5 -} diff --git a/devenv/dev-dashboards/panel_tests_graph.json b/devenv/dev-dashboards/panel_tests_graph.json new file mode 100644 index 00000000000..8a1770f0fa6 --- /dev/null +++ b/devenv/dev-dashboards/panel_tests_graph.json @@ -0,0 +1,1558 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "links": [], + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 0, + "y": 0 + }, + "id": 1, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenario": "random_walk", + "scenarioId": "no_data_points", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "No Data Points Warning", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 8, + "y": 0 + }, + "id": 2, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenario": "random_walk", + "scenarioId": "datapoints_outside_range", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Datapoints Outside Range Warning", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 0 + }, + "id": 3, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenario": "random_walk", + "scenarioId": "random_walk", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Random walk series", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 16, + "x": 0, + "y": 7 + }, + "id": 4, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenario": "random_walk", + "scenarioId": "random_walk", + "target": "" + } + ], + "thresholds": [], + "timeFrom": "2s", + "timeShift": null, + "title": "Millisecond res x-axis and tooltip", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "content": "Just verify that the tooltip time has millisecond resolution ", + "editable": true, + "error": false, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 7 + }, + "id": 6, + "links": [], + "mode": "markdown", + "title": "", + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 9, + "w": 16, + "x": 0, + "y": 14 + }, + "id": 5, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [ + { + "alias": "B-series", + "yaxis": 2 + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "2000,3000,4000,1000,3000,10000", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "2 yaxis and axis labels", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "percent", + "label": "Perecent", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": "Pressure", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "content": "Verify that axis labels look ok", + "editable": true, + "error": false, + "gridPos": { + "h": 9, + "w": 8, + "x": 16, + "y": 14 + }, + "id": 7, + "links": [], + "mode": "markdown", + "title": "", + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 0, + "y": 23 + }, + "id": 8, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,null,null,null,null,null,null,100,10,10,20,30,40,10", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "null value connected", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 8, + "y": 23 + }, + "id": 10, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null as zero", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,null,null,null,null,null,null,100,10,10,20,30,40,10", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "null value null as zero", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "content": "Should be a long line connecting the null region in the `connected` mode, and in zero it should just be a line with zero value at the null points. ", + "editable": true, + "error": false, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 23 + }, + "id": 13, + "links": [], + "mode": "markdown", + "title": "", + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 16, + "x": 0, + "y": 30 + }, + "id": 9, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [ + { + "alias": "B-series", + "zindex": -3 + } + ], + "spaceLength": 10, + "stack": true, + "steppedLine": false, + "targets": [ + { + "hide": false, + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,null,null,null,null,null,null,100,10,10,20,30,40,10", + "target": "" + }, + { + "alias": "", + "hide": false, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,10,20,30,40,40,40,100,10,20,20", + "target": "" + }, + { + "alias": "", + "hide": false, + "refId": "C", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,10,20,30,40,40,40,100,10,20,20", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Stacking value ontop of nulls", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "content": "Stacking values on top of nulls, should treat the null values as zero. ", + "editable": true, + "error": false, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 30 + }, + "id": 14, + "links": [], + "mode": "markdown", + "title": "", + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 16, + "x": 0, + "y": 37 + }, + "id": 12, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [ + { + "alias": "B-series", + "zindex": -3 + } + ], + "spaceLength": 10, + "stack": true, + "steppedLine": false, + "targets": [ + { + "alias": "", + "hide": false, + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,40,null,null,null,null,null,null,100,10,10,20,30,40,10", + "target": "" + }, + { + "alias": "", + "hide": false, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,40,null,null,null,null,null,null,100,10,10,20,30,40,10", + "target": "" + }, + { + "alias": "", + "hide": false, + "refId": "C", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,40,null,null,null,null,null,null,100,10,10,20,30,40,10", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Stacking all series null segment", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "content": "Stacking when all values are null should leave a gap in the graph", + "editable": true, + "error": false, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 37 + }, + "id": 15, + "links": [], + "mode": "markdown", + "title": "", + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "decimals": 3, + "fill": 1, + "gridPos": { + "h": 7, + "w": 24, + "x": 0, + "y": 44 + }, + "id": 20, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "max": true, + "min": true, + "show": true, + "total": true, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Legend Table Single Series Should Take Minimum Height", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "decimals": 3, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 51 + }, + "id": 16, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "max": true, + "min": true, + "show": true, + "total": true, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "C", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "D", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Legend Table No Scroll Visible", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "decimals": 3, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 12, + "y": 51 + }, + "id": 17, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "max": true, + "min": true, + "show": true, + "total": true, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "C", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "D", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "E", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "F", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "G", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "H", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "I", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "J", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Legend Table Should Scroll", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "decimals": 3, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 58 + }, + "id": 18, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "max": true, + "min": true, + "rightSide": true, + "show": true, + "total": true, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "C", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "D", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Legend Table No Scroll Visible", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "decimals": 3, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 12, + "y": 58 + }, + "id": 19, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "max": true, + "min": true, + "rightSide": true, + "show": true, + "total": true, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "C", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "D", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "E", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "F", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "G", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "H", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "I", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "J", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "K", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "L", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Legend Table No Scroll Visible", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + } + ], + "refresh": false, + "revision": 8, + "schemaVersion": 16, + "style": "dark", + "tags": [ + "gdev", + "panel-tests" + ], + "templating": { + "list": [] + }, + "time": { + "from": "now-1h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "browser", + "title": "Panel Tests - Graph", + "uid": "5SdHCadmz", + "version": 3 +} diff --git a/devenv/dev-dashboards/panel_tests_singlestat.json b/devenv/dev-dashboards/panel_tests_singlestat.json new file mode 100644 index 00000000000..2d69f27bcb6 --- /dev/null +++ b/devenv/dev-dashboards/panel_tests_singlestat.json @@ -0,0 +1,574 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "links": [], + "panels": [ + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": true, + "colors": [ + "#299c46", + "rgba(237, 129, 40, 0.89)", + "#d44a3a" + ], + "datasource": "gdev-testdata", + "decimals": null, + "description": "", + "format": "ms", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 0, + "y": 0 + }, + "id": 2, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "postfix", + "postfixFontSize": "50%", + "prefix": "prefix", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": true + }, + "tableColumn": "", + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,2,3,4,5" + } + ], + "thresholds": "5,10", + "title": "prefix 3 ms (green) postfixt + sparkline", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "avg" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorPrefix": false, + "colorValue": true, + "colors": [ + "#d44a3a", + "rgba(237, 129, 40, 0.89)", + "#299c46" + ], + "datasource": "gdev-testdata", + "decimals": null, + "description": "", + "format": "ms", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 8, + "y": 0 + }, + "id": 3, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": true + }, + "tableColumn": "", + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,2,3,4,5" + } + ], + "thresholds": "5,10", + "title": "3 ms (red) + full height sparkline", + "type": "singlestat", + "valueFontSize": "200%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "avg" + }, + { + "cacheTimeout": null, + "colorBackground": true, + "colorPrefix": false, + "colorValue": false, + "colors": [ + "#d44a3a", + "rgba(237, 129, 40, 0.89)", + "#299c46" + ], + "datasource": "gdev-testdata", + "decimals": null, + "description": "", + "format": "ms", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 0 + }, + "id": 4, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,2,3,4,5" + } + ], + "thresholds": "5,10", + "title": "3 ms + red background", + "type": "singlestat", + "valueFontSize": "200%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "avg" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorPrefix": false, + "colorValue": true, + "colors": [ + "#299c46", + "rgba(237, 129, 40, 0.89)", + "#d44a3a" + ], + "datasource": "gdev-testdata", + "decimals": null, + "description": "", + "format": "ms", + "gauge": { + "maxValue": 150, + "minValue": 0, + "show": true, + "thresholdLabels": true, + "thresholdMarkers": true + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 0, + "y": 7 + }, + "id": 5, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "10,20,80" + } + ], + "thresholds": "81,90", + "title": "80 ms green gauge, thresholds 81, 90", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorPrefix": false, + "colorValue": true, + "colors": [ + "#299c46", + "rgba(237, 129, 40, 0.89)", + "#d44a3a" + ], + "datasource": "gdev-testdata", + "decimals": null, + "description": "", + "format": "ms", + "gauge": { + "maxValue": 150, + "minValue": 0, + "show": true, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 8, + "y": 7 + }, + "id": 6, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "10,20,80" + } + ], + "thresholds": "81,90", + "title": "80 ms green gauge, thresholds 81, 90, no labels", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorPrefix": false, + "colorValue": true, + "colors": [ + "#299c46", + "rgba(237, 129, 40, 0.89)", + "#d44a3a" + ], + "datasource": "gdev-testdata", + "decimals": null, + "description": "", + "format": "ms", + "gauge": { + "maxValue": 150, + "minValue": 0, + "show": true, + "thresholdLabels": false, + "thresholdMarkers": false + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 7 + }, + "id": 7, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "10,20,80" + } + ], + "thresholds": "81,90", + "title": "80 ms green gauge, thresholds 81, 90, no markers or labels", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + } + ], + "refresh": false, + "revision": 8, + "schemaVersion": 16, + "style": "dark", + "tags": [ + "gdev", + "panel-tests" + ], + "templating": { + "list": [] + }, + "time": { + "from": "now-1h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "browser", + "title": "Panel Tests - Singlestat", + "uid": "singlestat", + "version": 14 +} diff --git a/devenv/dev-dashboards/panel_tests_table.json b/devenv/dev-dashboards/panel_tests_table.json new file mode 100644 index 00000000000..8337e9cd746 --- /dev/null +++ b/devenv/dev-dashboards/panel_tests_table.json @@ -0,0 +1,453 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "links": [], + "panels": [ + { + "columns": [], + "datasource": "gdev-testdata", + "fontSize": "100%", + "gridPos": { + "h": 11, + "w": 12, + "x": 0, + "y": 0 + }, + "id": 3, + "links": [], + "pageSize": 10, + "scroll": true, + "showHeader": true, + "sort": { + "col": 0, + "desc": true + }, + "styles": [ + { + "alias": "Time", + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "pattern": "Time", + "type": "date" + }, + { + "alias": "", + "colorMode": "cell", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "ColorCell", + "thresholds": [ + "5", + "10" + ], + "type": "number", + "unit": "currencyUSD" + }, + { + "alias": "", + "colorMode": "value", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "ColorValue", + "thresholds": [ + "5", + "10" + ], + "type": "number", + "unit": "Bps" + }, + { + "alias": "", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "decimals": 2, + "pattern": "/.*/", + "thresholds": [], + "type": "number", + "unit": "short" + } + ], + "targets": [ + { + "alias": "server1", + "expr": "", + "format": "table", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0,20,10" + }, + { + "alias": "server2", + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0" + } + ], + "title": "Time series to rows (2 pages)", + "transform": "timeseries_to_rows", + "type": "table" + }, + { + "columns": [ + { + "text": "Avg", + "value": "avg" + }, + { + "text": "Max", + "value": "max" + }, + { + "text": "Current", + "value": "current" + } + ], + "datasource": "gdev-testdata", + "fontSize": "100%", + "gridPos": { + "h": 11, + "w": 12, + "x": 12, + "y": 0 + }, + "id": 4, + "links": [], + "pageSize": 10, + "scroll": true, + "showHeader": true, + "sort": { + "col": 0, + "desc": true + }, + "styles": [ + { + "alias": "Time", + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "pattern": "Time", + "type": "date" + }, + { + "alias": "", + "colorMode": "cell", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "ColorCell", + "thresholds": [ + "5", + "10" + ], + "type": "number", + "unit": "currencyUSD" + }, + { + "alias": "", + "colorMode": "value", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "ColorValue", + "thresholds": [ + "5", + "10" + ], + "type": "number", + "unit": "Bps" + }, + { + "alias": "", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "decimals": 2, + "pattern": "/.*/", + "thresholds": [], + "type": "number", + "unit": "short" + } + ], + "targets": [ + { + "alias": "server1", + "expr": "", + "format": "table", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0,20,10" + }, + { + "alias": "server2", + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0" + } + ], + "title": "Time series aggregations", + "transform": "timeseries_aggregations", + "type": "table" + }, + { + "columns": [], + "datasource": "gdev-testdata", + "fontSize": "100%", + "gridPos": { + "h": 7, + "w": 24, + "x": 0, + "y": 11 + }, + "id": 5, + "links": [], + "pageSize": null, + "scroll": true, + "showHeader": true, + "sort": { + "col": 0, + "desc": true + }, + "styles": [ + { + "alias": "Time", + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "pattern": "Time", + "type": "date" + }, + { + "alias": "", + "colorMode": "row", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "/Color/", + "thresholds": [ + "5", + "10" + ], + "type": "number", + "unit": "currencyUSD" + }, + { + "alias": "", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "decimals": 2, + "pattern": "/.*/", + "thresholds": [], + "type": "number", + "unit": "short" + } + ], + "targets": [ + { + "alias": "ColorValue", + "expr": "", + "format": "table", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0,20,10" + } + ], + "title": "color row by threshold", + "transform": "timeseries_to_columns", + "type": "table" + }, + { + "columns": [], + "datasource": "gdev-testdata", + "fontSize": "100%", + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 18 + }, + "id": 2, + "links": [], + "pageSize": null, + "scroll": true, + "showHeader": true, + "sort": { + "col": 0, + "desc": true + }, + "styles": [ + { + "alias": "Time", + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "pattern": "Time", + "type": "date" + }, + { + "alias": "", + "colorMode": "cell", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "ColorCell", + "thresholds": [ + "5", + "10" + ], + "type": "number", + "unit": "currencyUSD" + }, + { + "alias": "", + "colorMode": "value", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "ColorValue", + "thresholds": [ + "5", + "10" + ], + "type": "number", + "unit": "Bps" + }, + { + "alias": "", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "decimals": 2, + "pattern": "/.*/", + "thresholds": [], + "type": "number", + "unit": "short" + } + ], + "targets": [ + { + "alias": "ColorValue", + "expr": "", + "format": "table", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0,20,10" + }, + { + "alias": "ColorCell", + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "5,1,2,3,4,5,10,20" + } + ], + "title": "Column style thresholds & units", + "transform": "timeseries_to_columns", + "type": "table" + } + ], + "refresh": false, + "revision": 8, + "schemaVersion": 16, + "style": "dark", + "tags": [ + "gdev", + "panel-tests" + ], + "templating": { + "list": [] + }, + "time": { + "from": "now-1h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "browser", + "title": "Panel Tests - Table", + "uid": "pttable", + "version": 1 +} diff --git a/public/app/plugins/app/testdata/dashboards/alerts.json b/devenv/dev-dashboards/testdata_alerts.json similarity index 98% rename from public/app/plugins/app/testdata/dashboards/alerts.json rename to devenv/dev-dashboards/testdata_alerts.json index 159df0f458b..8c2edebf155 100644 --- a/public/app/plugins/app/testdata/dashboards/alerts.json +++ b/devenv/dev-dashboards/testdata_alerts.json @@ -1,6 +1,6 @@ { "revision": 2, - "title": "TestData - Alerts", + "title": "Alerting with TestData", "tags": [ "grafana-test" ], @@ -48,7 +48,7 @@ }, "aliasColors": {}, "bars": false, - "datasource": "Grafana TestData", + "datasource": "gdev-testdata", "editable": true, "error": false, "fill": 1, @@ -161,7 +161,7 @@ }, "aliasColors": {}, "bars": false, - "datasource": "Grafana TestData", + "datasource": "gdev-testdata", "editable": true, "error": false, "fill": 1, diff --git a/devenv/setup.sh b/devenv/setup.sh index 78dbfc1a366..6412bbc98ea 100755 --- a/devenv/setup.sh +++ b/devenv/setup.sh @@ -1,4 +1,4 @@ -#/bin/bash +#!/bin/bash bulkDashboard() { @@ -22,31 +22,37 @@ requiresJsonnet() { fi } -defaultDashboards() { +devDashboards() { + echo -e "\xE2\x9C\x94 Setting up all dev dashboards using provisioning" ln -s -f ../../../devenv/dashboards.yaml ../conf/provisioning/dashboards/dev.yaml } -defaultDatasources() { - echo "setting up all default datasources using provisioning" +devDatasources() { + echo -e "\xE2\x9C\x94 Setting up all dev datasources using provisioning" ln -s -f ../../../devenv/datasources.yaml ../conf/provisioning/datasources/dev.yaml } usage() { - echo -e "install.sh\n\tThis script setups dev provision for datasources and dashboards" + echo -e "\n" echo "Usage:" echo " bulk-dashboards - create and provisioning 400 dashboards" echo " no args - provisiong core datasources and dev dashboards" } main() { + echo -e "------------------------------------------------------------------" + echo -e "This script setups provisioning for dev datasources and dashboards" + echo -e "------------------------------------------------------------------" + echo -e "\n" + local cmd=$1 if [[ $cmd == "bulk-dashboards" ]]; then bulkDashboard else - defaultDashboards - defaultDatasources + devDashboards + devDatasources fi if [[ -z "$cmd" ]]; then diff --git a/docker/blocks/nginx_proxy/Dockerfile b/docker/blocks/nginx_proxy/Dockerfile index 9ded20dfdda..04de507499d 100644 --- a/docker/blocks/nginx_proxy/Dockerfile +++ b/docker/blocks/nginx_proxy/Dockerfile @@ -1,3 +1,4 @@ FROM nginx:alpine -COPY nginx.conf /etc/nginx/nginx.conf \ No newline at end of file +COPY nginx.conf /etc/nginx/nginx.conf +COPY htpasswd /etc/nginx/htpasswd diff --git a/docker/blocks/nginx_proxy/htpasswd b/docker/blocks/nginx_proxy/htpasswd new file mode 100755 index 00000000000..e2c5eeeff7b --- /dev/null +++ b/docker/blocks/nginx_proxy/htpasswd @@ -0,0 +1,3 @@ +user1:$apr1$1odeeQb.$kwV8D/VAAGUDU7pnHuKoV0 +user2:$apr1$A2kf25r.$6S0kp3C7vIuixS5CL0XA9. +admin:$apr1$IWn4DoRR$E2ol7fS/dkI18eU4bXnBO1 diff --git a/docker/blocks/nginx_proxy/nginx.conf b/docker/blocks/nginx_proxy/nginx.conf index 18e27b3fb01..860d3d0b89f 100644 --- a/docker/blocks/nginx_proxy/nginx.conf +++ b/docker/blocks/nginx_proxy/nginx.conf @@ -13,7 +13,26 @@ http { listen 10080; location /grafana/ { + ################################################################ + # Enable these settings to test with basic auth and an auth proxy header + # the htpasswd file contains an admin user with password admin and + # user1: grafana and user2: grafana + ################################################################ + + # auth_basic "Restricted Content"; + # auth_basic_user_file /etc/nginx/htpasswd; + + ################################################################ + # To use the auth proxy header, set the following in custom.ini: + # [auth.proxy] + # enabled = true + # header_name = X-WEBAUTH-USER + # header_property = username + ################################################################ + + # proxy_set_header X-WEBAUTH-USER $remote_user; + proxy_pass http://localhost:3000/; } } -} \ No newline at end of file +} diff --git a/docker/blocks/openldap/ldap_dev.toml b/docker/blocks/openldap/ldap_dev.toml new file mode 100644 index 00000000000..e79771b57de --- /dev/null +++ b/docker/blocks/openldap/ldap_dev.toml @@ -0,0 +1,85 @@ +# To troubleshoot and get more log info enable ldap debug logging in grafana.ini +# [log] +# filters = ldap:debug + +[[servers]] +# Ldap server host (specify multiple hosts space separated) +host = "127.0.0.1" +# Default port is 389 or 636 if use_ssl = true +port = 389 +# Set to true if ldap server supports TLS +use_ssl = false +# Set to true if connect ldap server with STARTTLS pattern (create connection in insecure, then upgrade to secure connection with TLS) +start_tls = false +# set to true if you want to skip ssl cert validation +ssl_skip_verify = false +# set to the path to your root CA certificate or leave unset to use system defaults +# root_ca_cert = "/path/to/certificate.crt" + +# Search user bind dn +bind_dn = "cn=admin,dc=grafana,dc=org" +# Search user bind password +# If the password contains # or ; you have to wrap it with triple quotes. Ex """#password;""" +bind_password = 'grafana' + +# User search filter, for example "(cn=%s)" or "(sAMAccountName=%s)" or "(uid=%s)" +search_filter = "(cn=%s)" + +# An array of base dns to search through +search_base_dns = ["dc=grafana,dc=org"] + +# In POSIX LDAP schemas, without memberOf attribute a secondary query must be made for groups. +# This is done by enabling group_search_filter below. You must also set member_of= "cn" +# in [servers.attributes] below. + +# Users with nested/recursive group membership and an LDAP server that supports LDAP_MATCHING_RULE_IN_CHAIN +# can set group_search_filter, group_search_filter_user_attribute, group_search_base_dns and member_of +# below in such a way that the user's recursive group membership is considered. +# +# Nested Groups + Active Directory (AD) Example: +# +# AD groups store the Distinguished Names (DNs) of members, so your filter must +# recursively search your groups for the authenticating user's DN. For example: +# +# group_search_filter = "(member:1.2.840.113556.1.4.1941:=%s)" +# group_search_filter_user_attribute = "distinguishedName" +# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"] +# +# [servers.attributes] +# ... +# member_of = "distinguishedName" + +## Group search filter, to retrieve the groups of which the user is a member (only set if memberOf attribute is not available) +# group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))" +## Group search filter user attribute defines what user attribute gets substituted for %s in group_search_filter. +## Defaults to the value of username in [server.attributes] +## Valid options are any of your values in [servers.attributes] +## If you are using nested groups you probably want to set this and member_of in +## [servers.attributes] to "distinguishedName" +# group_search_filter_user_attribute = "distinguishedName" +## An array of the base DNs to search through for groups. Typically uses ou=groups +# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"] + +# Specify names of the ldap attributes your ldap uses +[servers.attributes] +name = "givenName" +surname = "sn" +username = "cn" +member_of = "memberOf" +email = "email" + +# Map ldap groups to grafana org roles +[[servers.group_mappings]] +group_dn = "cn=admins,ou=groups,dc=grafana,dc=org" +org_role = "Admin" +# The Grafana organization database id, optional, if left out the default org (id 1) will be used +# org_id = 1 + +[[servers.group_mappings]] +group_dn = "cn=editors,ou=groups,dc=grafana,dc=org" +org_role = "Editor" + +[[servers.group_mappings]] +# If you want to match all (or no ldap groups) then you can use wildcard +group_dn = "*" +org_role = "Viewer" diff --git a/docker/blocks/openldap/notes.md b/docker/blocks/openldap/notes.md index 8de23d5ccf2..65155423616 100644 --- a/docker/blocks/openldap/notes.md +++ b/docker/blocks/openldap/notes.md @@ -14,12 +14,12 @@ After adding ldif files to `prepopulate`: ## Enabling LDAP in Grafana -The default `ldap.toml` file in `conf` has host set to `127.0.0.1` and port to set to 389 so all you need to do is enable it in the .ini file to get Grafana to use this block: +Copy the ldap_dev.toml file in this folder into your `conf` folder (it is gitignored already). To enable it in the .ini file to get Grafana to use this block: ```ini [auth.ldap] enabled = true -config_file = conf/ldap.toml +config_file = conf/ldap_dev.toml ; allow_sign_up = true ``` @@ -43,6 +43,3 @@ editors no groups ldap-viewer - - - diff --git a/docs/sources/features/datasources/prometheus.md b/docs/sources/features/datasources/prometheus.md index 4ff0baee108..3a04ef92e31 100644 --- a/docs/sources/features/datasources/prometheus.md +++ b/docs/sources/features/datasources/prometheus.md @@ -75,6 +75,32 @@ Name | Description For details of *metric names*, *label names* and *label values* are please refer to the [Prometheus documentation](http://prometheus.io/docs/concepts/data_model/#metric-names-and-labels). + +#### Using interval and range variables + +> Support for `$__range` and `$__range_ms` only available from Grafana v5.3 + +It's possible to use some global built-in variables in query variables; `$__interval`, `$__interval_ms`, `$__range` and `$__range_ms`, see [Global built-in variables](/reference/templating/#global-built-in-variables) for more information. These can be convenient to use in conjunction with the `query_result` function when you need to filter variable queries since +`label_values` function doesn't support queries. + +Make sure to set the variable's `refresh` trigger to be `On Time Range Change` to get the correct instances when changing the time range on the dashboard. + +**Example usage:** + +Populate a variable with the the busiest 5 request instances based on average QPS over the time range shown in the dashboard: + +``` +Query: query_result(topk(5, sum(rate(http_requests_total[$__range])) by (instance))) +Regex: /"([^"]+)"/ +``` + +Populate a variable with the instances having a certain state over the time range shown in the dashboard: + +``` +Query: query_result(max_over_time([$__range]) != ) +Regex: +``` + ### Using variables in queries There are two syntaxes: diff --git a/docs/sources/http_api/playlist.md b/docs/sources/http_api/playlist.md new file mode 100644 index 00000000000..7c33900969b --- /dev/null +++ b/docs/sources/http_api/playlist.md @@ -0,0 +1,286 @@ ++++ +title = "Playlist HTTP API " +description = "Playlist Admin HTTP API" +keywords = ["grafana", "http", "documentation", "api", "playlist"] +aliases = ["/http_api/playlist/"] +type = "docs" +[menu.docs] +name = "Playlist" +parent = "http_api" ++++ + +# Playlist API + +## Search Playlist + +`GET /api/playlists` + +Get all existing playlist for the current organization using pagination + +**Example Request**: + +```bash +GET /api/playlists HTTP/1.1 +Accept: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + + Querystring Parameters: + + These parameters are used as querystring parameters. + + - **query** - Limit response to playlist having a name like this value. + - **limit** - Limit response to *X* number of playlist. + +**Example Response**: + +```json +HTTP/1.1 200 +Content-Type: application/json +[ + { + "id": 1, + "name": "my playlist", + "interval": "5m" + } +] +``` + +## Get one playlist + +`GET /api/playlists/:id` + +**Example Request**: + +```bash +GET /api/playlists/1 HTTP/1.1 +Accept: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +**Example Response**: + +```json +HTTP/1.1 200 +Content-Type: application/json +{ + "id" : 1, + "name": "my playlist", + "interval": "5m", + "orgId": "my org", + "items": [ + { + "id": 1, + "playlistId": 1, + "type": "dashboard_by_id", + "value": "3", + "order": 1, + "title":"my third dasboard" + }, + { + "id": 2, + "playlistId": 1, + "type": "dashboard_by_tag", + "value": "myTag", + "order": 2, + "title":"my other dasboard" + } + ] +} +``` + +## Get Playlist items + +`GET /api/playlists/:id/items` + +**Example Request**: + +```bash +GET /api/playlists/1/items HTTP/1.1 +Accept: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +**Example Response**: + +```json +HTTP/1.1 200 +Content-Type: application/json +[ + { + "id": 1, + "playlistId": 1, + "type": "dashboard_by_id", + "value": "3", + "order": 1, + "title":"my third dasboard" + }, + { + "id": 2, + "playlistId": 1, + "type": "dashboard_by_tag", + "value": "myTag", + "order": 2, + "title":"my other dasboard" + } +] +``` + +## Get Playlist dashboards + +`GET /api/playlists/:id/dashboards` + +**Example Request**: + +```bash +GET /api/playlists/1/dashboards HTTP/1.1 +Accept: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +**Example Response**: + +```json +HTTP/1.1 200 +Content-Type: application/json +[ + { + "id": 3, + "title": "my third dasboard", + "order": 1, + }, + { + "id": 5, + "title":"my other dasboard" + "order": 2, + + } +] +``` + +## Create a playlist + +`POST /api/playlists/` + +**Example Request**: + +```bash +PUT /api/playlists/1 HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk + { + "name": "my playlist", + "interval": "5m", + "items": [ + { + "type": "dashboard_by_id", + "value": "3", + "order": 1, + "title":"my third dasboard" + }, + { + "type": "dashboard_by_tag", + "value": "myTag", + "order": 2, + "title":"my other dasboard" + } + ] + } +``` + +**Example Response**: + +```json +HTTP/1.1 200 +Content-Type: application/json + { + "id": 1, + "name": "my playlist", + "interval": "5m" + } +``` + +## Update a playlist + +`PUT /api/playlists/:id` + +**Example Request**: + +```bash +PUT /api/playlists/1 HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk + { + "name": "my playlist", + "interval": "5m", + "items": [ + { + "playlistId": 1, + "type": "dashboard_by_id", + "value": "3", + "order": 1, + "title":"my third dasboard" + }, + { + "playlistId": 1, + "type": "dashboard_by_tag", + "value": "myTag", + "order": 2, + "title":"my other dasboard" + } + ] + } +``` + +**Example Response**: + +```json +HTTP/1.1 200 +Content-Type: application/json +{ + "id" : 1, + "name": "my playlist", + "interval": "5m", + "orgId": "my org", + "items": [ + { + "id": 1, + "playlistId": 1, + "type": "dashboard_by_id", + "value": "3", + "order": 1, + "title":"my third dasboard" + }, + { + "id": 2, + "playlistId": 1, + "type": "dashboard_by_tag", + "value": "myTag", + "order": 2, + "title":"my other dasboard" + } + ] +} +``` + +## Delete a playlist + +`DELETE /api/playlists/:id` + +**Example Request**: + +```bash +DELETE /api/playlists/1 HTTP/1.1 +Accept: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +**Example Response**: + +```json +HTTP/1.1 200 +Content-Type: application/json +{} +``` diff --git a/docs/sources/installation/configuration.md b/docs/sources/installation/configuration.md index 668a44fcb2b..2a799b044b3 100644 --- a/docs/sources/installation/configuration.md +++ b/docs/sources/installation/configuration.md @@ -15,6 +15,8 @@ weight = 1 The Grafana back-end has a number of configuration options that can be specified in a `.ini` configuration file or specified using environment variables. +> **Note.** Grafana needs to be restarted for any configuration changes to take effect. + ## Comments In .ini Files Semicolons (the `;` char) are the standard way to comment out lines in a `.ini` file. @@ -296,6 +298,12 @@ Set to `true` to automatically add new users to the main organization (id 1). When set to `false`, new users will automatically cause a new organization to be created for that new user. +### auto_assign_org_id + +Set this value to automatically add new users to the provided org. +This requires `auto_assign_org` to be set to `true`. Please make sure +that this organization does already exists. + ### auto_assign_org_role The role new users will be assigned for the main organization (if the @@ -857,7 +865,7 @@ Secret key. e.g. AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA Url to where Grafana will send PUT request with images ### public_url -Optional parameter. Url to send to users in notifications, directly appended with the resulting uploaded file name. +Optional parameter. Url to send to users in notifications. If the string contains the sequence ${file}, it will be replaced with the uploaded filename. Otherwise, the file name will be appended to the path part of the url, leaving any query string unchanged. ### username basic auth username diff --git a/docs/sources/installation/ldap.md b/docs/sources/installation/ldap.md index 85501e51d85..9a381b9e467 100644 --- a/docs/sources/installation/ldap.md +++ b/docs/sources/installation/ldap.md @@ -23,8 +23,9 @@ specific configuration file (default: `/etc/grafana/ldap.toml`). ### Example config ```toml -# Set to true to log user information returned from LDAP -verbose_logging = false +# To troubleshoot and get more log info enable ldap debug logging in grafana.ini +# [log] +# filters = ldap:debug [[servers]] # Ldap server host (specify multiple hosts space separated) @@ -73,6 +74,8 @@ email = "email" [[servers.group_mappings]] group_dn = "cn=admins,dc=grafana,dc=org" org_role = "Admin" +# To make user an instance admin (Grafana Admin) uncomment line below +# grafana_admin = true # The Grafana organization database id, optional, if left out the default org (id 1) will be used. Setting this allows for multiple group_dn's to be assigned to the same org_role provided the org_id differs # org_id = 1 @@ -132,6 +135,10 @@ Users page, this change will be reset the next time the user logs in. If you change the LDAP groups of a user, the change will take effect the next time the user logs in. +### Grafana Admin +with a servers.group_mappings section you can set grafana_admin = true or false to sync Grafana Admin permission. A Grafana server admin has admin access over all orgs & +users. + ### Priority The first group mapping that an LDAP user is matched to will be used for the sync. If you have LDAP users that fit multiple mappings, the topmost mapping in the TOML config will be used. diff --git a/docs/sources/reference/templating.md b/docs/sources/reference/templating.md index 8341b9770bd..ce1a1299d26 100644 --- a/docs/sources/reference/templating.md +++ b/docs/sources/reference/templating.md @@ -11,7 +11,7 @@ weight = 1 # Variables Variables allows for more interactive and dynamic dashboards. Instead of hard-coding things like server, application -and sensor name in you metric queries you can use variables in their place. Variables are shown as dropdown select boxes at the top of +and sensor name in your metric queries you can use variables in their place. Variables are shown as dropdown select boxes at the top of the dashboard. These dropdowns make it easy to change the data being displayed in your dashboard. {{< docs-imagebox img="/img/docs/v50/variables_dashboard.png" >}} @@ -273,6 +273,12 @@ The `$__timeFilter` is used in the MySQL data source. This variable is only available in the Singlestat panel and can be used in the prefix or suffix fields on the Options tab. The variable will be replaced with the series name or alias. +### The $__range Variable + +> Only available in Grafana v5.3+ + +Currently only supported for Prometheus data sources. This variable represents the range for the current dashboard. It is calculated by `to - from`. It has a millisecond representation called `$__range_ms`. + ## Repeating Panels Template variables can be very useful to dynamically change your queries across a whole dashboard. If you want diff --git a/package.json b/package.json index c26438230cc..c0581c1de43 100644 --- a/package.json +++ b/package.json @@ -34,7 +34,7 @@ "expose-loader": "^0.7.3", "extract-text-webpack-plugin": "^4.0.0-beta.0", "file-loader": "^1.1.11", - "fork-ts-checker-webpack-plugin": "^0.4.1", + "fork-ts-checker-webpack-plugin": "^0.4.2", "gaze": "^1.1.2", "glob": "~7.0.0", "grunt": "1.0.1", @@ -71,12 +71,14 @@ "karma-webpack": "^3.0.0", "lint-staged": "^6.0.0", "load-grunt-tasks": "3.5.2", + "mini-css-extract-plugin": "^0.4.0", "mobx-react-devtools": "^4.2.15", "mocha": "^4.0.1", "ng-annotate-loader": "^0.6.1", "ng-annotate-webpack-plugin": "^0.2.1-pre", "ngtemplate-loader": "^2.0.1", "npm": "^5.4.2", + "optimize-css-assets-webpack-plugin": "^4.0.2", "phantomjs-prebuilt": "^2.1.15", "postcss-browser-reporter": "^0.5.0", "postcss-loader": "^2.0.6", @@ -90,15 +92,16 @@ "style-loader": "^0.21.0", "systemjs": "0.20.19", "systemjs-plugin-css": "^0.1.36", - "ts-loader": "^4.3.0", "ts-jest": "^22.4.6", + "ts-loader": "^4.3.0", + "tslib": "^1.9.3", "tslint": "^5.8.0", "tslint-loader": "^3.5.3", "typescript": "^2.6.2", + "uglifyjs-webpack-plugin": "^1.2.7", "webpack": "^4.8.0", "webpack-bundle-analyzer": "^2.9.0", "webpack-cleanup-plugin": "^0.5.1", - "fork-ts-checker-webpack-plugin": "^0.4.2", "webpack-cli": "^2.1.4", "webpack-dev-server": "^3.1.0", "webpack-merge": "^4.1.0", @@ -155,14 +158,12 @@ "immutable": "^3.8.2", "jquery": "^3.2.1", "lodash": "^4.17.10", - "mini-css-extract-plugin": "^0.4.0", "mobx": "^3.4.1", "mobx-react": "^4.3.5", "mobx-state-tree": "^1.3.1", "moment": "^2.22.2", "mousetrap": "^1.6.0", "mousetrap-global-bind": "^1.1.0", - "optimize-css-assets-webpack-plugin": "^4.0.2", "prismjs": "^1.6.0", "prop-types": "^15.6.0", "react": "^16.2.0", @@ -181,10 +182,9 @@ "slate-react": "^0.12.4", "tether": "^1.4.0", "tether-drop": "https://github.com/torkelo/drop/tarball/master", - "tinycolor2": "^1.4.1", - "uglifyjs-webpack-plugin": "^1.2.7" + "tinycolor2": "^1.4.1" }, "resolutions": { "caniuse-db": "1.0.30000772" } -} +} \ No newline at end of file diff --git a/pkg/api/alerting_test.go b/pkg/api/alerting_test.go index 9eba0e0d5b6..331beeef5e4 100644 --- a/pkg/api/alerting_test.go +++ b/pkg/api/alerting_test.go @@ -31,7 +31,7 @@ func TestAlertingApiEndpoint(t *testing.T) { }) bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { - query.Result = []*m.Team{} + query.Result = []*m.TeamDTO{} return nil }) diff --git a/pkg/api/annotations_test.go b/pkg/api/annotations_test.go index 6590eb19ff2..08f3018c694 100644 --- a/pkg/api/annotations_test.go +++ b/pkg/api/annotations_test.go @@ -119,7 +119,7 @@ func TestAnnotationsApiEndpoint(t *testing.T) { }) bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { - query.Result = []*m.Team{} + query.Result = []*m.TeamDTO{} return nil }) diff --git a/pkg/api/api.go b/pkg/api/api.go index 8870b9b095e..84425fdae3d 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -73,8 +73,7 @@ func (hs *HTTPServer) registerRoutes() { r.Get("/dashboards/", reqSignedIn, Index) r.Get("/dashboards/*", reqSignedIn, Index) - r.Get("/explore/", reqEditorRole, Index) - r.Get("/explore/*", reqEditorRole, Index) + r.Get("/explore", reqEditorRole, Index) r.Get("/playlists/", reqSignedIn, Index) r.Get("/playlists/*", reqSignedIn, Index) diff --git a/pkg/api/dashboard_snapshot_test.go b/pkg/api/dashboard_snapshot_test.go index 5e7637a24e1..e58f2c4712d 100644 --- a/pkg/api/dashboard_snapshot_test.go +++ b/pkg/api/dashboard_snapshot_test.go @@ -39,7 +39,7 @@ func TestDashboardSnapshotApiEndpoint(t *testing.T) { return nil }) - teamResp := []*m.Team{} + teamResp := []*m.TeamDTO{} bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { query.Result = teamResp return nil diff --git a/pkg/api/dashboard_test.go b/pkg/api/dashboard_test.go index 50a2e314f5c..283a9b5f12c 100644 --- a/pkg/api/dashboard_test.go +++ b/pkg/api/dashboard_test.go @@ -61,7 +61,7 @@ func TestDashboardApiEndpoint(t *testing.T) { }) bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { - query.Result = []*m.Team{} + query.Result = []*m.TeamDTO{} return nil }) @@ -230,7 +230,7 @@ func TestDashboardApiEndpoint(t *testing.T) { }) bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { - query.Result = []*m.Team{} + query.Result = []*m.TeamDTO{} return nil }) diff --git a/pkg/api/metrics.go b/pkg/api/metrics.go index c1b8ffe595e..00ad25ab8c2 100644 --- a/pkg/api/metrics.go +++ b/pkg/api/metrics.go @@ -52,7 +52,7 @@ func QueryMetrics(c *m.ReqContext, reqDto dtos.MetricRequest) Response { if res.Error != nil { res.ErrorString = res.Error.Error() resp.Message = res.ErrorString - statusCode = 500 + statusCode = 400 } } diff --git a/pkg/api/playlist.go b/pkg/api/playlist.go index a90b6425cb6..0963df7d4c4 100644 --- a/pkg/api/playlist.go +++ b/pkg/api/playlist.go @@ -160,6 +160,7 @@ func CreatePlaylist(c *m.ReqContext, cmd m.CreatePlaylistCommand) Response { func UpdatePlaylist(c *m.ReqContext, cmd m.UpdatePlaylistCommand) Response { cmd.OrgId = c.OrgId + cmd.Id = c.ParamsInt64(":id") if err := bus.Dispatch(&cmd); err != nil { return Error(500, "Failed to save playlist", err) diff --git a/pkg/api/team.go b/pkg/api/team.go index 9919305881b..ebb426c4c82 100644 --- a/pkg/api/team.go +++ b/pkg/api/team.go @@ -93,5 +93,6 @@ func GetTeamByID(c *m.ReqContext) Response { return Error(500, "Failed to get Team", err) } + query.Result.AvatarUrl = dtos.GetGravatarUrlWithDefault(query.Result.Email, query.Result.Name) return JSON(200, &query.Result) } diff --git a/pkg/api/team_test.go b/pkg/api/team_test.go index 0bf06d723c8..a1984288870 100644 --- a/pkg/api/team_test.go +++ b/pkg/api/team_test.go @@ -13,7 +13,7 @@ import ( func TestTeamApiEndpoint(t *testing.T) { Convey("Given two teams", t, func() { mockResult := models.SearchTeamQueryResult{ - Teams: []*models.SearchTeamDto{ + Teams: []*models.TeamDTO{ {Name: "team1"}, {Name: "team2"}, }, diff --git a/pkg/components/imguploader/webdavuploader.go b/pkg/components/imguploader/webdavuploader.go index f5478ea8a2f..ed6b14725c0 100644 --- a/pkg/components/imguploader/webdavuploader.go +++ b/pkg/components/imguploader/webdavuploader.go @@ -9,6 +9,7 @@ import ( "net/http" "net/url" "path" + "strings" "time" "github.com/grafana/grafana/pkg/util" @@ -35,6 +36,16 @@ var netClient = &http.Client{ Transport: netTransport, } +func (u *WebdavUploader) PublicURL(filename string) string { + if strings.Contains(u.public_url, "${file}") { + return strings.Replace(u.public_url, "${file}", filename, -1) + } else { + publicURL, _ := url.Parse(u.public_url) + publicURL.Path = path.Join(publicURL.Path, filename) + return publicURL.String() + } +} + func (u *WebdavUploader) Upload(ctx context.Context, pa string) (string, error) { url, _ := url.Parse(u.url) filename := util.GetRandomString(20) + ".png" @@ -65,9 +76,7 @@ func (u *WebdavUploader) Upload(ctx context.Context, pa string) (string, error) } if u.public_url != "" { - publicURL, _ := url.Parse(u.public_url) - publicURL.Path = path.Join(publicURL.Path, filename) - return publicURL.String(), nil + return u.PublicURL(filename), nil } return url.String(), nil diff --git a/pkg/components/imguploader/webdavuploader_test.go b/pkg/components/imguploader/webdavuploader_test.go index 5a8abd0542d..0178c9cda6c 100644 --- a/pkg/components/imguploader/webdavuploader_test.go +++ b/pkg/components/imguploader/webdavuploader_test.go @@ -2,6 +2,7 @@ package imguploader import ( "context" + "net/url" "testing" . "github.com/smartystreets/goconvey/convey" @@ -26,3 +27,15 @@ func TestUploadToWebdav(t *testing.T) { So(path, ShouldStartWith, "http://publicurl:8888/webdav/") }) } + +func TestPublicURL(t *testing.T) { + Convey("Given a public URL with parameters, and no template", t, func() { + webdavUploader, _ := NewWebdavImageUploader("http://localhost:8888/webdav/", "test", "test", "http://cloudycloud.me/s/DOIFDOMV/download?files=") + parsed, _ := url.Parse(webdavUploader.PublicURL("fileyfile.png")) + So(parsed.Path, ShouldEndWith, "fileyfile.png") + }) + Convey("Given a public URL with parameters, and a template", t, func() { + webdavUploader, _ := NewWebdavImageUploader("http://localhost:8888/webdav/", "test", "test", "http://cloudycloud.me/s/DOIFDOMV/download?files=${file}") + So(webdavUploader.PublicURL("fileyfile.png"), ShouldEndWith, "fileyfile.png") + }) +} diff --git a/pkg/login/ext_user.go b/pkg/login/ext_user.go index d6eaf9a975e..a421e3ebe0a 100644 --- a/pkg/login/ext_user.go +++ b/pkg/login/ext_user.go @@ -72,6 +72,13 @@ func UpsertUser(cmd *m.UpsertUserCommand) error { return err } + // Sync isGrafanaAdmin permission + if extUser.IsGrafanaAdmin != nil && *extUser.IsGrafanaAdmin != cmd.Result.IsAdmin { + if err := bus.Dispatch(&m.UpdateUserPermissionsCommand{UserId: cmd.Result.Id, IsGrafanaAdmin: *extUser.IsGrafanaAdmin}); err != nil { + return err + } + } + err = bus.Dispatch(&m.SyncTeamsCommand{ User: cmd.Result, ExternalUser: extUser, diff --git a/pkg/login/ldap.go b/pkg/login/ldap.go index bdf87b2db54..9e4918f0290 100644 --- a/pkg/login/ldap.go +++ b/pkg/login/ldap.go @@ -175,6 +175,7 @@ func (a *ldapAuther) GetGrafanaUserFor(ctx *m.ReqContext, ldapUser *LdapUserInfo if ldapUser.isMemberOf(group.GroupDN) { extUser.OrgRoles[group.OrgId] = group.OrgRole + extUser.IsGrafanaAdmin = group.IsGrafanaAdmin } } @@ -190,18 +191,18 @@ func (a *ldapAuther) GetGrafanaUserFor(ctx *m.ReqContext, ldapUser *LdapUserInfo } // add/update user in grafana - userQuery := &m.UpsertUserCommand{ + upsertUserCmd := &m.UpsertUserCommand{ ReqContext: ctx, ExternalUser: extUser, SignupAllowed: setting.LdapAllowSignup, } - err := bus.Dispatch(userQuery) + err := bus.Dispatch(upsertUserCmd) if err != nil { return nil, err } - return userQuery.Result, nil + return upsertUserCmd.Result, nil } func (a *ldapAuther) serverBind() error { diff --git a/pkg/login/ldap_settings.go b/pkg/login/ldap_settings.go index 497d8725e29..c4f5982b237 100644 --- a/pkg/login/ldap_settings.go +++ b/pkg/login/ldap_settings.go @@ -44,9 +44,10 @@ type LdapAttributeMap struct { } type LdapGroupToOrgRole struct { - GroupDN string `toml:"group_dn"` - OrgId int64 `toml:"org_id"` - OrgRole m.RoleType `toml:"org_role"` + GroupDN string `toml:"group_dn"` + OrgId int64 `toml:"org_id"` + IsGrafanaAdmin *bool `toml:"grafana_admin"` // This is a pointer to know if it was set or not (for backwards compatability) + OrgRole m.RoleType `toml:"org_role"` } var LdapCfg LdapConfig diff --git a/pkg/login/ldap_test.go b/pkg/login/ldap_test.go index 5080840704e..1cf98bd1e14 100644 --- a/pkg/login/ldap_test.go +++ b/pkg/login/ldap_test.go @@ -98,6 +98,10 @@ func TestLdapAuther(t *testing.T) { So(result.Login, ShouldEqual, "torkelo") }) + Convey("Should set isGrafanaAdmin to false by default", func() { + So(result.IsAdmin, ShouldBeFalse) + }) + }) }) @@ -223,8 +227,32 @@ func TestLdapAuther(t *testing.T) { So(sc.addOrgUserCmd.Role, ShouldEqual, m.ROLE_ADMIN) So(sc.setUsingOrgCmd.OrgId, ShouldEqual, 1) }) + + Convey("Should not update permissions unless specified", func() { + So(err, ShouldBeNil) + So(sc.updateUserPermissionsCmd, ShouldBeNil) + }) }) + ldapAutherScenario("given ldap groups with grafana_admin=true", func(sc *scenarioContext) { + trueVal := true + + ldapAuther := NewLdapAuthenticator(&LdapServerConf{ + LdapGroups: []*LdapGroupToOrgRole{ + {GroupDN: "cn=admins", OrgId: 1, OrgRole: "Admin", IsGrafanaAdmin: &trueVal}, + }, + }) + + sc.userOrgsQueryReturns([]*m.UserOrgDTO{}) + _, err := ldapAuther.GetGrafanaUserFor(nil, &LdapUserInfo{ + MemberOf: []string{"cn=admins"}, + }) + + Convey("Should create user with admin set to true", func() { + So(err, ShouldBeNil) + So(sc.updateUserPermissionsCmd.IsGrafanaAdmin, ShouldBeTrue) + }) + }) }) Convey("When calling SyncUser", t, func() { @@ -332,6 +360,11 @@ func ldapAutherScenario(desc string, fn scenarioFunc) { return nil }) + bus.AddHandlerCtx("test", func(ctx context.Context, cmd *m.UpdateUserPermissionsCommand) error { + sc.updateUserPermissionsCmd = cmd + return nil + }) + bus.AddHandler("test", func(cmd *m.GetUserByAuthInfoQuery) error { sc.getUserByAuthInfoQuery = cmd sc.getUserByAuthInfoQuery.Result = &m.User{Login: cmd.Login} @@ -379,14 +412,15 @@ func ldapAutherScenario(desc string, fn scenarioFunc) { } type scenarioContext struct { - getUserByAuthInfoQuery *m.GetUserByAuthInfoQuery - getUserOrgListQuery *m.GetUserOrgListQuery - createUserCmd *m.CreateUserCommand - addOrgUserCmd *m.AddOrgUserCommand - updateOrgUserCmd *m.UpdateOrgUserCommand - removeOrgUserCmd *m.RemoveOrgUserCommand - updateUserCmd *m.UpdateUserCommand - setUsingOrgCmd *m.SetUsingOrgCommand + getUserByAuthInfoQuery *m.GetUserByAuthInfoQuery + getUserOrgListQuery *m.GetUserOrgListQuery + createUserCmd *m.CreateUserCommand + addOrgUserCmd *m.AddOrgUserCommand + updateOrgUserCmd *m.UpdateOrgUserCommand + removeOrgUserCmd *m.RemoveOrgUserCommand + updateUserCmd *m.UpdateUserCommand + setUsingOrgCmd *m.SetUsingOrgCommand + updateUserPermissionsCmd *m.UpdateUserPermissionsCommand } func (sc *scenarioContext) userQueryReturns(user *m.User) { diff --git a/pkg/metrics/metrics.go b/pkg/metrics/metrics.go index 4dd84c12151..a8d9f7308fa 100644 --- a/pkg/metrics/metrics.go +++ b/pkg/metrics/metrics.go @@ -44,6 +44,7 @@ var ( M_Alerting_Notification_Sent *prometheus.CounterVec M_Aws_CloudWatch_GetMetricStatistics prometheus.Counter M_Aws_CloudWatch_ListMetrics prometheus.Counter + M_Aws_CloudWatch_GetMetricData prometheus.Counter M_DB_DataSource_QueryById prometheus.Counter // Timers @@ -218,6 +219,12 @@ func init() { Namespace: exporterName, }) + M_Aws_CloudWatch_GetMetricData = prometheus.NewCounter(prometheus.CounterOpts{ + Name: "aws_cloudwatch_get_metric_data_total", + Help: "counter for getting metric data time series from aws", + Namespace: exporterName, + }) + M_DB_DataSource_QueryById = prometheus.NewCounter(prometheus.CounterOpts{ Name: "db_datasource_query_by_id_total", Help: "counter for getting datasource by id", @@ -307,6 +314,7 @@ func initMetricVars() { M_Alerting_Notification_Sent, M_Aws_CloudWatch_GetMetricStatistics, M_Aws_CloudWatch_ListMetrics, + M_Aws_CloudWatch_GetMetricData, M_DB_DataSource_QueryById, M_Alerting_Active_Alerts, M_StatTotal_Dashboards, diff --git a/pkg/models/playlist.go b/pkg/models/playlist.go index 5c49bb9256c..c52da202293 100644 --- a/pkg/models/playlist.go +++ b/pkg/models/playlist.go @@ -63,7 +63,7 @@ type PlaylistDashboards []*PlaylistDashboard type UpdatePlaylistCommand struct { OrgId int64 `json:"-"` - Id int64 `json:"id" binding:"Required"` + Id int64 `json:"id"` Name string `json:"name" binding:"Required"` Interval string `json:"interval"` Items []PlaylistItemDTO `json:"items"` diff --git a/pkg/models/team.go b/pkg/models/team.go index 9c679a13394..61285db3a5f 100644 --- a/pkg/models/team.go +++ b/pkg/models/team.go @@ -49,13 +49,13 @@ type DeleteTeamCommand struct { type GetTeamByIdQuery struct { OrgId int64 Id int64 - Result *Team + Result *TeamDTO } type GetTeamsByUserQuery struct { OrgId int64 - UserId int64 `json:"userId"` - Result []*Team `json:"teams"` + UserId int64 `json:"userId"` + Result []*TeamDTO `json:"teams"` } type SearchTeamsQuery struct { @@ -68,7 +68,7 @@ type SearchTeamsQuery struct { Result SearchTeamQueryResult } -type SearchTeamDto struct { +type TeamDTO struct { Id int64 `json:"id"` OrgId int64 `json:"orgId"` Name string `json:"name"` @@ -78,8 +78,8 @@ type SearchTeamDto struct { } type SearchTeamQueryResult struct { - TotalCount int64 `json:"totalCount"` - Teams []*SearchTeamDto `json:"teams"` - Page int `json:"page"` - PerPage int `json:"perPage"` + TotalCount int64 `json:"totalCount"` + Teams []*TeamDTO `json:"teams"` + Page int `json:"page"` + PerPage int `json:"perPage"` } diff --git a/pkg/models/user_auth.go b/pkg/models/user_auth.go index 162a4d867a9..28189005737 100644 --- a/pkg/models/user_auth.go +++ b/pkg/models/user_auth.go @@ -13,14 +13,15 @@ type UserAuth struct { } type ExternalUserInfo struct { - AuthModule string - AuthId string - UserId int64 - Email string - Login string - Name string - Groups []string - OrgRoles map[int64]RoleType + AuthModule string + AuthId string + UserId int64 + Email string + Login string + Name string + Groups []string + OrgRoles map[int64]RoleType + IsGrafanaAdmin *bool // This is a pointer to know if we should sync this or not (nil = ignore sync) } // --------------------- diff --git a/pkg/plugins/datasource_plugin.go b/pkg/plugins/datasource_plugin.go index 2fec6acbf54..ff44805e35f 100644 --- a/pkg/plugins/datasource_plugin.go +++ b/pkg/plugins/datasource_plugin.go @@ -17,11 +17,14 @@ import ( plugin "github.com/hashicorp/go-plugin" ) +// DataSourcePlugin contains all metadata about a datasource plugin type DataSourcePlugin struct { FrontendPluginBase Annotations bool `json:"annotations"` Metrics bool `json:"metrics"` Alerting bool `json:"alerting"` + Explore bool `json:"explore"` + Logs bool `json:"logs"` QueryOptions map[string]bool `json:"queryOptions,omitempty"` BuiltIn bool `json:"builtIn,omitempty"` Mixed bool `json:"mixed,omitempty"` diff --git a/pkg/services/guardian/guardian.go b/pkg/services/guardian/guardian.go index cfd8f5c3a6e..7506338c5f0 100644 --- a/pkg/services/guardian/guardian.go +++ b/pkg/services/guardian/guardian.go @@ -30,7 +30,7 @@ type dashboardGuardianImpl struct { dashId int64 orgId int64 acl []*m.DashboardAclInfoDTO - groups []*m.Team + teams []*m.TeamDTO log log.Logger } @@ -186,15 +186,15 @@ func (g *dashboardGuardianImpl) GetAcl() ([]*m.DashboardAclInfoDTO, error) { return g.acl, nil } -func (g *dashboardGuardianImpl) getTeams() ([]*m.Team, error) { - if g.groups != nil { - return g.groups, nil +func (g *dashboardGuardianImpl) getTeams() ([]*m.TeamDTO, error) { + if g.teams != nil { + return g.teams, nil } query := m.GetTeamsByUserQuery{OrgId: g.orgId, UserId: g.user.UserId} err := bus.Dispatch(&query) - g.groups = query.Result + g.teams = query.Result return query.Result, err } diff --git a/pkg/services/guardian/guardian_util_test.go b/pkg/services/guardian/guardian_util_test.go index 3d839e71b74..d85548ecb8c 100644 --- a/pkg/services/guardian/guardian_util_test.go +++ b/pkg/services/guardian/guardian_util_test.go @@ -19,7 +19,7 @@ type scenarioContext struct { givenUser *m.SignedInUser givenDashboardID int64 givenPermissions []*m.DashboardAclInfoDTO - givenTeams []*m.Team + givenTeams []*m.TeamDTO updatePermissions []*m.DashboardAcl expectedFlags permissionFlags callerFile string @@ -84,11 +84,11 @@ func permissionScenario(desc string, dashboardID int64, sc *scenarioContext, per return nil }) - teams := []*m.Team{} + teams := []*m.TeamDTO{} for _, p := range permissions { if p.TeamId > 0 { - teams = append(teams, &m.Team{Id: p.TeamId}) + teams = append(teams, &m.TeamDTO{Id: p.TeamId}) } } diff --git a/pkg/services/sqlstore/alert.go b/pkg/services/sqlstore/alert.go index 531a70b2101..af911dc22e6 100644 --- a/pkg/services/sqlstore/alert.go +++ b/pkg/services/sqlstore/alert.go @@ -73,6 +73,7 @@ func HandleAlertsQuery(query *m.GetAlertsQuery) error { alert.name, alert.state, alert.new_state_date, + alert.eval_data, alert.eval_date, alert.execution_error, dashboard.uid as dashboard_uid, diff --git a/pkg/services/sqlstore/alert_test.go b/pkg/services/sqlstore/alert_test.go index 79fa99864e7..d97deb45f0e 100644 --- a/pkg/services/sqlstore/alert_test.go +++ b/pkg/services/sqlstore/alert_test.go @@ -13,7 +13,7 @@ func mockTimeNow() { var timeSeed int64 timeNow = func() time.Time { fakeNow := time.Unix(timeSeed, 0) - timeSeed += 1 + timeSeed++ return fakeNow } } @@ -30,7 +30,7 @@ func TestAlertingDataAccess(t *testing.T) { InitTestDB(t) testDash := insertTestDashboard("dashboard with alerts", 1, 0, false, "alert") - + evalData, _ := simplejson.NewJson([]byte(`{"test": "test"}`)) items := []*m.Alert{ { PanelId: 1, @@ -40,6 +40,7 @@ func TestAlertingDataAccess(t *testing.T) { Message: "Alerting message", Settings: simplejson.New(), Frequency: 1, + EvalData: evalData, }, } @@ -104,8 +105,18 @@ func TestAlertingDataAccess(t *testing.T) { alert := alertQuery.Result[0] So(err2, ShouldBeNil) + So(alert.Id, ShouldBeGreaterThan, 0) + So(alert.DashboardId, ShouldEqual, testDash.Id) + So(alert.PanelId, ShouldEqual, 1) So(alert.Name, ShouldEqual, "Alerting title") So(alert.State, ShouldEqual, "pending") + So(alert.NewStateDate, ShouldNotBeNil) + So(alert.EvalData, ShouldNotBeNil) + So(alert.EvalData.Get("test").MustString(), ShouldEqual, "test") + So(alert.EvalDate, ShouldNotBeNil) + So(alert.ExecutionError, ShouldEqual, "") + So(alert.DashboardUid, ShouldNotBeNil) + So(alert.DashboardSlug, ShouldEqual, "dashboard-with-alerts") }) Convey("Viewer cannot read alerts", func() { diff --git a/pkg/services/sqlstore/dashboard_test.go b/pkg/services/sqlstore/dashboard_test.go index e4aecf0391d..8ff78c4a0ff 100644 --- a/pkg/services/sqlstore/dashboard_test.go +++ b/pkg/services/sqlstore/dashboard_test.go @@ -181,7 +181,7 @@ func TestDashboardDataAccess(t *testing.T) { So(err, ShouldBeNil) So(query.Result.FolderId, ShouldEqual, 0) So(query.Result.CreatedBy, ShouldEqual, savedDash.CreatedBy) - So(query.Result.Created, ShouldEqual, savedDash.Created.Truncate(time.Second)) + So(query.Result.Created, ShouldHappenWithin, 3*time.Second, savedDash.Created) So(query.Result.UpdatedBy, ShouldEqual, 100) So(query.Result.Updated.IsZero(), ShouldBeFalse) }) @@ -387,6 +387,7 @@ func insertTestDashboardForPlugin(title string, orgId int64, folderId int64, isF func createUser(name string, role string, isAdmin bool) m.User { setting.AutoAssignOrg = true + setting.AutoAssignOrgId = 1 setting.AutoAssignOrgRole = role currentUserCmd := m.CreateUserCommand{Login: name, Email: name + "@test.com", Name: "a " + name, IsAdmin: isAdmin} diff --git a/pkg/services/sqlstore/org_test.go b/pkg/services/sqlstore/org_test.go index 521a2a11c05..af8500707d5 100644 --- a/pkg/services/sqlstore/org_test.go +++ b/pkg/services/sqlstore/org_test.go @@ -17,6 +17,7 @@ func TestAccountDataAccess(t *testing.T) { Convey("Given single org mode", func() { setting.AutoAssignOrg = true + setting.AutoAssignOrgId = 1 setting.AutoAssignOrgRole = "Viewer" Convey("Users should be added to default organization", func() { diff --git a/pkg/services/sqlstore/team.go b/pkg/services/sqlstore/team.go index 9378ca37f60..72955df9a6a 100644 --- a/pkg/services/sqlstore/team.go +++ b/pkg/services/sqlstore/team.go @@ -22,6 +22,16 @@ func init() { bus.AddHandler("sql", GetTeamMembers) } +func getTeamSelectSqlBase() string { + return `SELECT + team.id as id, + team.org_id, + team.name as name, + team.email as email, + (SELECT COUNT(*) from team_member where team_member.team_id = team.id) as member_count + FROM team as team ` +} + func CreateTeam(cmd *m.CreateTeamCommand) error { return inTransaction(func(sess *DBSession) error { @@ -130,21 +140,15 @@ func isTeamNameTaken(orgId int64, name string, existingId int64, sess *DBSession func SearchTeams(query *m.SearchTeamsQuery) error { query.Result = m.SearchTeamQueryResult{ - Teams: make([]*m.SearchTeamDto, 0), + Teams: make([]*m.TeamDTO, 0), } queryWithWildcards := "%" + query.Query + "%" var sql bytes.Buffer params := make([]interface{}, 0) - sql.WriteString(`select - team.id as id, - team.org_id, - team.name as name, - team.email as email, - (select count(*) from team_member where team_member.team_id = team.id) as member_count - from team as team - where team.org_id = ?`) + sql.WriteString(getTeamSelectSqlBase()) + sql.WriteString(` WHERE team.org_id = ?`) params = append(params, query.OrgId) @@ -186,8 +190,14 @@ func SearchTeams(query *m.SearchTeamsQuery) error { } func GetTeamById(query *m.GetTeamByIdQuery) error { - var team m.Team - exists, err := x.Where("org_id=? and id=?", query.OrgId, query.Id).Get(&team) + var sql bytes.Buffer + + sql.WriteString(getTeamSelectSqlBase()) + sql.WriteString(` WHERE team.org_id = ? and team.id = ?`) + + var team m.TeamDTO + exists, err := x.Sql(sql.String(), query.OrgId, query.Id).Get(&team) + if err != nil { return err } @@ -202,13 +212,15 @@ func GetTeamById(query *m.GetTeamByIdQuery) error { // GetTeamsByUser is used by the Guardian when checking a users' permissions func GetTeamsByUser(query *m.GetTeamsByUserQuery) error { - query.Result = make([]*m.Team, 0) + query.Result = make([]*m.TeamDTO, 0) - sess := x.Table("team") - sess.Join("INNER", "team_member", "team.id=team_member.team_id") - sess.Where("team.org_id=? and team_member.user_id=?", query.OrgId, query.UserId) + var sql bytes.Buffer - err := sess.Find(&query.Result) + sql.WriteString(getTeamSelectSqlBase()) + sql.WriteString(` INNER JOIN team_member on team.id = team_member.team_id`) + sql.WriteString(` WHERE team.org_id = ? and team_member.user_id = ?`) + + err := x.Sql(sql.String(), query.OrgId, query.UserId).Find(&query.Result) return err } diff --git a/pkg/services/sqlstore/user.go b/pkg/services/sqlstore/user.go index 5e9a085b26d..0ec1a947870 100644 --- a/pkg/services/sqlstore/user.go +++ b/pkg/services/sqlstore/user.go @@ -42,16 +42,23 @@ func getOrgIdForNewUser(cmd *m.CreateUserCommand, sess *DBSession) (int64, error var org m.Org if setting.AutoAssignOrg { - // right now auto assign to org with id 1 - has, err := sess.Where("id=?", 1).Get(&org) + has, err := sess.Where("id=?", setting.AutoAssignOrgId).Get(&org) if err != nil { return 0, err } if has { return org.Id, nil + } else { + if setting.AutoAssignOrgId == 1 { + org.Name = "Main Org." + org.Id = int64(setting.AutoAssignOrgId) + } else { + sqlog.Info("Could not create user: organization id %v does not exist", + setting.AutoAssignOrgId) + return 0, fmt.Errorf("Could not create user: organization id %v does not exist", + setting.AutoAssignOrgId) + } } - org.Name = "Main Org." - org.Id = 1 } else { org.Name = cmd.OrgName if len(org.Name) == 0 { diff --git a/pkg/setting/setting.go b/pkg/setting/setting.go index d8c8e6431c0..eb61568261d 100644 --- a/pkg/setting/setting.go +++ b/pkg/setting/setting.go @@ -100,6 +100,7 @@ var ( AllowUserSignUp bool AllowUserOrgCreate bool AutoAssignOrg bool + AutoAssignOrgId int AutoAssignOrgRole string VerifyEmailEnabled bool LoginHint string @@ -592,6 +593,7 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error { AllowUserSignUp = users.Key("allow_sign_up").MustBool(true) AllowUserOrgCreate = users.Key("allow_org_create").MustBool(true) AutoAssignOrg = users.Key("auto_assign_org").MustBool(true) + AutoAssignOrgId = users.Key("auto_assign_org_id").MustInt(1) AutoAssignOrgRole = users.Key("auto_assign_org_role").In("Editor", []string{"Editor", "Admin", "Viewer"}) VerifyEmailEnabled = users.Key("verify_email_enabled").MustBool(false) LoginHint = users.Key("login_hint").String() diff --git a/pkg/tsdb/cloudwatch/cloudwatch.go b/pkg/tsdb/cloudwatch/cloudwatch.go index 8af97575ae9..92352a51315 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch.go +++ b/pkg/tsdb/cloudwatch/cloudwatch.go @@ -14,8 +14,10 @@ import ( "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/tsdb" + "golang.org/x/sync/errgroup" "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/awserr" "github.com/aws/aws-sdk-go/aws/request" "github.com/aws/aws-sdk-go/service/cloudwatch" "github.com/aws/aws-sdk-go/service/ec2/ec2iface" @@ -88,48 +90,80 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo Results: make(map[string]*tsdb.QueryResult), } - errCh := make(chan error, 1) - resCh := make(chan *tsdb.QueryResult, 1) + eg, ectx := errgroup.WithContext(ctx) - currentlyExecuting := 0 + getMetricDataQueries := make(map[string]map[string]*CloudWatchQuery) for i, model := range queryContext.Queries { queryType := model.Model.Get("type").MustString() if queryType != "timeSeriesQuery" && queryType != "" { continue } - currentlyExecuting++ - go func(refId string, index int) { - queryRes, err := e.executeQuery(ctx, queryContext.Queries[index].Model, queryContext) - currentlyExecuting-- - if err != nil { - errCh <- err - } else { - queryRes.RefId = refId - resCh <- queryRes + + RefId := queryContext.Queries[i].RefId + query, err := parseQuery(queryContext.Queries[i].Model) + if err != nil { + result.Results[RefId] = &tsdb.QueryResult{ + Error: err, } - }(model.RefId, i) + return result, nil + } + query.RefId = RefId + + if query.Id != "" { + if _, ok := getMetricDataQueries[query.Region]; !ok { + getMetricDataQueries[query.Region] = make(map[string]*CloudWatchQuery) + } + getMetricDataQueries[query.Region][query.Id] = query + continue + } + + if query.Id == "" && query.Expression != "" { + result.Results[query.RefId] = &tsdb.QueryResult{ + Error: fmt.Errorf("Invalid query: id should be set if using expression"), + } + return result, nil + } + + eg.Go(func() error { + queryRes, err := e.executeQuery(ectx, query, queryContext) + if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" { + return err + } + result.Results[queryRes.RefId] = queryRes + if err != nil { + result.Results[queryRes.RefId].Error = err + } + return nil + }) } - for currentlyExecuting != 0 { - select { - case res := <-resCh: - result.Results[res.RefId] = res - case err := <-errCh: - return result, err - case <-ctx.Done(): - return result, ctx.Err() + if len(getMetricDataQueries) > 0 { + for region, getMetricDataQuery := range getMetricDataQueries { + q := getMetricDataQuery + eg.Go(func() error { + queryResponses, err := e.executeGetMetricDataQuery(ectx, region, q, queryContext) + if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" { + return err + } + for _, queryRes := range queryResponses { + result.Results[queryRes.RefId] = queryRes + if err != nil { + result.Results[queryRes.RefId].Error = err + } + } + return nil + }) } } + if err := eg.Wait(); err != nil { + return nil, err + } + return result, nil } -func (e *CloudWatchExecutor) executeQuery(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) (*tsdb.QueryResult, error) { - query, err := parseQuery(parameters) - if err != nil { - return nil, err - } - +func (e *CloudWatchExecutor) executeQuery(ctx context.Context, query *CloudWatchQuery, queryContext *tsdb.TsdbQuery) (*tsdb.QueryResult, error) { client, err := e.getClient(query.Region) if err != nil { return nil, err @@ -201,6 +235,139 @@ func (e *CloudWatchExecutor) executeQuery(ctx context.Context, parameters *simpl return queryRes, nil } +func (e *CloudWatchExecutor) executeGetMetricDataQuery(ctx context.Context, region string, queries map[string]*CloudWatchQuery, queryContext *tsdb.TsdbQuery) ([]*tsdb.QueryResult, error) { + queryResponses := make([]*tsdb.QueryResult, 0) + + // validate query + for _, query := range queries { + if !(len(query.Statistics) == 1 && len(query.ExtendedStatistics) == 0) && + !(len(query.Statistics) == 0 && len(query.ExtendedStatistics) == 1) { + return queryResponses, errors.New("Statistics count should be 1") + } + } + + client, err := e.getClient(region) + if err != nil { + return queryResponses, err + } + + startTime, err := queryContext.TimeRange.ParseFrom() + if err != nil { + return queryResponses, err + } + + endTime, err := queryContext.TimeRange.ParseTo() + if err != nil { + return queryResponses, err + } + + params := &cloudwatch.GetMetricDataInput{ + StartTime: aws.Time(startTime), + EndTime: aws.Time(endTime), + ScanBy: aws.String("TimestampAscending"), + } + for _, query := range queries { + // 1 minutes resolutin metrics is stored for 15 days, 15 * 24 * 60 = 21600 + if query.HighResolution && (((endTime.Unix() - startTime.Unix()) / int64(query.Period)) > 21600) { + return nil, errors.New("too long query period") + } + + mdq := &cloudwatch.MetricDataQuery{ + Id: aws.String(query.Id), + ReturnData: aws.Bool(query.ReturnData), + } + if query.Expression != "" { + mdq.Expression = aws.String(query.Expression) + } else { + mdq.MetricStat = &cloudwatch.MetricStat{ + Metric: &cloudwatch.Metric{ + Namespace: aws.String(query.Namespace), + MetricName: aws.String(query.MetricName), + }, + Period: aws.Int64(int64(query.Period)), + } + for _, d := range query.Dimensions { + mdq.MetricStat.Metric.Dimensions = append(mdq.MetricStat.Metric.Dimensions, + &cloudwatch.Dimension{ + Name: d.Name, + Value: d.Value, + }) + } + if len(query.Statistics) == 1 { + mdq.MetricStat.Stat = query.Statistics[0] + } else { + mdq.MetricStat.Stat = query.ExtendedStatistics[0] + } + } + params.MetricDataQueries = append(params.MetricDataQueries, mdq) + } + + nextToken := "" + mdr := make(map[string]*cloudwatch.MetricDataResult) + for { + if nextToken != "" { + params.NextToken = aws.String(nextToken) + } + resp, err := client.GetMetricDataWithContext(ctx, params) + if err != nil { + return queryResponses, err + } + metrics.M_Aws_CloudWatch_GetMetricData.Add(float64(len(params.MetricDataQueries))) + + for _, r := range resp.MetricDataResults { + if _, ok := mdr[*r.Id]; !ok { + mdr[*r.Id] = r + } else { + mdr[*r.Id].Timestamps = append(mdr[*r.Id].Timestamps, r.Timestamps...) + mdr[*r.Id].Values = append(mdr[*r.Id].Values, r.Values...) + } + } + + if resp.NextToken == nil || *resp.NextToken == "" { + break + } + nextToken = *resp.NextToken + } + + for i, r := range mdr { + if *r.StatusCode != "Complete" { + return queryResponses, fmt.Errorf("Part of query is failed: %s", *r.StatusCode) + } + + queryRes := tsdb.NewQueryResult() + queryRes.RefId = queries[i].RefId + query := queries[*r.Id] + + series := tsdb.TimeSeries{ + Tags: map[string]string{}, + Points: make([]tsdb.TimePoint, 0), + } + for _, d := range query.Dimensions { + series.Tags[*d.Name] = *d.Value + } + s := "" + if len(query.Statistics) == 1 { + s = *query.Statistics[0] + } else { + s = *query.ExtendedStatistics[0] + } + series.Name = formatAlias(query, s, series.Tags) + + for j, t := range r.Timestamps { + expectedTimestamp := r.Timestamps[j].Add(time.Duration(query.Period) * time.Second) + if j > 0 && expectedTimestamp.Before(*t) { + series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), float64(expectedTimestamp.Unix()*1000))) + } + series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(*r.Values[j]), float64((*t).Unix())*1000)) + } + + queryRes.Series = append(queryRes.Series, &series) + queryResponses = append(queryResponses, queryRes) + } + + return queryResponses, nil +} + func parseDimensions(model *simplejson.Json) ([]*cloudwatch.Dimension, error) { var result []*cloudwatch.Dimension @@ -257,6 +424,9 @@ func parseQuery(model *simplejson.Json) (*CloudWatchQuery, error) { return nil, err } + id := model.Get("id").MustString("") + expression := model.Get("expression").MustString("") + dimensions, err := parseDimensions(model) if err != nil { return nil, err @@ -295,6 +465,7 @@ func parseQuery(model *simplejson.Json) (*CloudWatchQuery, error) { alias = "{{metric}}_{{stat}}" } + returnData := model.Get("returnData").MustBool(false) highResolution := model.Get("highResolution").MustBool(false) return &CloudWatchQuery{ @@ -306,11 +477,18 @@ func parseQuery(model *simplejson.Json) (*CloudWatchQuery, error) { ExtendedStatistics: aws.StringSlice(extendedStatistics), Period: period, Alias: alias, + Id: id, + Expression: expression, + ReturnData: returnData, HighResolution: highResolution, }, nil } func formatAlias(query *CloudWatchQuery, stat string, dimensions map[string]string) string { + if len(query.Id) > 0 && len(query.Expression) > 0 { + return query.Id + } + data := map[string]string{} data["region"] = query.Region data["namespace"] = query.Namespace @@ -338,6 +516,7 @@ func formatAlias(query *CloudWatchQuery, stat string, dimensions map[string]stri func parseResponse(resp *cloudwatch.GetMetricStatisticsOutput, query *CloudWatchQuery) (*tsdb.QueryResult, error) { queryRes := tsdb.NewQueryResult() + queryRes.RefId = query.RefId var value float64 for _, s := range append(query.Statistics, query.ExtendedStatistics...) { series := tsdb.TimeSeries{ diff --git a/pkg/tsdb/cloudwatch/types.go b/pkg/tsdb/cloudwatch/types.go index 0737b64686d..1225fb9b31b 100644 --- a/pkg/tsdb/cloudwatch/types.go +++ b/pkg/tsdb/cloudwatch/types.go @@ -5,6 +5,7 @@ import ( ) type CloudWatchQuery struct { + RefId string Region string Namespace string MetricName string @@ -13,5 +14,8 @@ type CloudWatchQuery struct { ExtendedStatistics []*string Period int Alias string + Id string + Expression string + ReturnData bool HighResolution bool } diff --git a/pkg/tsdb/elasticsearch/client/index_pattern.go b/pkg/tsdb/elasticsearch/client/index_pattern.go index 8391e902ea4..952b5c4f806 100644 --- a/pkg/tsdb/elasticsearch/client/index_pattern.go +++ b/pkg/tsdb/elasticsearch/client/index_pattern.go @@ -248,13 +248,28 @@ var datePatternReplacements = map[string]string{ func formatDate(t time.Time, pattern string) string { var datePattern string - parts := strings.Split(strings.TrimLeft(pattern, "["), "]") - base := parts[0] - if len(parts) == 2 { - datePattern = parts[1] - } else { - datePattern = base - base = "" + base := "" + ltr := false + + if strings.HasPrefix(pattern, "[") { + parts := strings.Split(strings.TrimLeft(pattern, "["), "]") + base = parts[0] + if len(parts) == 2 { + datePattern = parts[1] + } else { + datePattern = base + base = "" + } + ltr = true + } else if strings.HasSuffix(pattern, "]") { + parts := strings.Split(strings.TrimRight(pattern, "]"), "[") + datePattern = parts[0] + if len(parts) == 2 { + base = parts[1] + } else { + base = "" + } + ltr = false } formatted := t.Format(patternToLayout(datePattern)) @@ -293,7 +308,11 @@ func formatDate(t time.Time, pattern string) string { formatted = strings.Replace(formatted, "", fmt.Sprintf("%d", t.Hour()), -1) } - return base + formatted + if ltr { + return base + formatted + } + + return formatted + base } func patternToLayout(pattern string) string { diff --git a/pkg/tsdb/elasticsearch/client/index_pattern_test.go b/pkg/tsdb/elasticsearch/client/index_pattern_test.go index 3bd823d8c87..ca20b39d532 100644 --- a/pkg/tsdb/elasticsearch/client/index_pattern_test.go +++ b/pkg/tsdb/elasticsearch/client/index_pattern_test.go @@ -28,29 +28,54 @@ func TestIndexPattern(t *testing.T) { to := fmt.Sprintf("%d", time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC).UnixNano()/int64(time.Millisecond)) indexPatternScenario(intervalHourly, "[data-]YYYY.MM.DD.HH", tsdb.NewTimeRange(from, to), func(indices []string) { - //So(indices, ShouldHaveLength, 1) + So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018.05.15.17") }) + indexPatternScenario(intervalHourly, "YYYY.MM.DD.HH[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "2018.05.15.17-data") + }) + indexPatternScenario(intervalDaily, "[data-]YYYY.MM.DD", tsdb.NewTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018.05.15") }) + indexPatternScenario(intervalDaily, "YYYY.MM.DD[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "2018.05.15-data") + }) + indexPatternScenario(intervalWeekly, "[data-]GGGG.WW", tsdb.NewTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018.20") }) + indexPatternScenario(intervalWeekly, "GGGG.WW[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "2018.20-data") + }) + indexPatternScenario(intervalMonthly, "[data-]YYYY.MM", tsdb.NewTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018.05") }) + indexPatternScenario(intervalMonthly, "YYYY.MM[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "2018.05-data") + }) + indexPatternScenario(intervalYearly, "[data-]YYYY", tsdb.NewTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018") }) + + indexPatternScenario(intervalYearly, "YYYY[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "2018-data") + }) }) Convey("Hourly interval", t, func() { diff --git a/pkg/tsdb/postgres/postgres.go b/pkg/tsdb/postgres/postgres.go index 12270da2a48..8a7e15f9697 100644 --- a/pkg/tsdb/postgres/postgres.go +++ b/pkg/tsdb/postgres/postgres.go @@ -53,7 +53,13 @@ func generateConnectionString(datasource *models.DataSource) string { } sslmode := datasource.JsonData.Get("sslmode").MustString("verify-full") - u := &url.URL{Scheme: "postgres", User: url.UserPassword(datasource.User, password), Host: datasource.Url, Path: datasource.Database, RawQuery: "sslmode=" + sslmode} + u := &url.URL{ + Scheme: "postgres", + User: url.UserPassword(datasource.User, password), + Host: datasource.Url, Path: datasource.Database, + RawQuery: "sslmode=" + url.QueryEscape(sslmode), + } + return u.String() } diff --git a/pkg/tsdb/sql_engine.go b/pkg/tsdb/sql_engine.go index 82a9b8f0d88..ec908aeb9de 100644 --- a/pkg/tsdb/sql_engine.go +++ b/pkg/tsdb/sql_engine.go @@ -68,6 +68,7 @@ func (e *DefaultSqlEngine) InitEngine(driverName string, dsInfo *models.DataSour engine.SetMaxOpenConns(10) engine.SetMaxIdleConns(10) + engineCache.versions[dsInfo.Id] = dsInfo.Version engineCache.cache[dsInfo.Id] = engine e.XormEngine = engine diff --git a/pkg/tsdb/testdata/testdata.go b/pkg/tsdb/testdata/testdata.go index a1ab250ad37..c2c2ea3f696 100644 --- a/pkg/tsdb/testdata/testdata.go +++ b/pkg/tsdb/testdata/testdata.go @@ -21,7 +21,7 @@ func NewTestDataExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, err } func init() { - tsdb.RegisterTsdbQueryEndpoint("grafana-testdata-datasource", NewTestDataExecutor) + tsdb.RegisterTsdbQueryEndpoint("testdata", NewTestDataExecutor) } func (e *TestDataExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { diff --git a/public/app/containers/Explore/Explore.tsx b/public/app/containers/Explore/Explore.tsx index deebe84f2c8..178e53198d4 100644 --- a/public/app/containers/Explore/Explore.tsx +++ b/public/app/containers/Explore/Explore.tsx @@ -1,16 +1,20 @@ import React from 'react'; import { hot } from 'react-hot-loader'; +import Select from 'react-select'; + +import kbn from 'app/core/utils/kbn'; import colors from 'app/core/utils/colors'; import TimeSeries from 'app/core/time_series2'; +import { decodePathComponent } from 'app/core/utils/location_util'; +import { parse as parseDate } from 'app/core/utils/datemath'; import ElapsedTime from './ElapsedTime'; import QueryRows from './QueryRows'; import Graph from './Graph'; +import Logs from './Logs'; import Table from './Table'; import TimePicker, { DEFAULT_RANGE } from './TimePicker'; -import { DatasourceSrv } from 'app/features/plugins/datasource_srv'; -import { buildQueryOptions, ensureQueries, generateQueryKey, hasQuery } from './utils/query'; -import { decodePathComponent } from 'app/core/utils/location_util'; +import { ensureQueries, generateQueryKey, hasQuery } from './utils/query'; function makeTimeSeriesList(dataList, options) { return dataList.map((seriesData, index) => { @@ -30,74 +34,136 @@ function makeTimeSeriesList(dataList, options) { }); } -function parseInitialState(initial) { - try { - const parsed = JSON.parse(decodePathComponent(initial)); - return { - queries: parsed.queries.map(q => q.query), - range: parsed.range, - }; - } catch (e) { - console.error(e); - return { queries: [], range: DEFAULT_RANGE }; +function parseInitialState(initial: string | undefined) { + if (initial) { + try { + const parsed = JSON.parse(decodePathComponent(initial)); + return { + datasource: parsed.datasource, + queries: parsed.queries.map(q => q.query), + range: parsed.range, + }; + } catch (e) { + console.error(e); + } } + return { datasource: null, queries: [], range: DEFAULT_RANGE }; } interface IExploreState { datasource: any; datasourceError: any; - datasourceLoading: any; + datasourceLoading: boolean | null; + datasourceMissing: boolean; graphResult: any; + initialDatasource?: string; latency: number; loading: any; + logsResult: any; queries: any; queryError: any; range: any; requestOptions: any; showingGraph: boolean; + showingLogs: boolean; showingTable: boolean; + supportsGraph: boolean | null; + supportsLogs: boolean | null; + supportsTable: boolean | null; tableResult: any; } -// @observer export class Explore extends React.Component { - datasourceSrv: DatasourceSrv; + el: any; constructor(props) { super(props); - const { range, queries } = parseInitialState(props.routeParams.initial); + const { datasource, queries, range } = parseInitialState(props.routeParams.state); this.state = { datasource: null, datasourceError: null, - datasourceLoading: true, + datasourceLoading: null, + datasourceMissing: false, graphResult: null, + initialDatasource: datasource, latency: 0, loading: false, + logsResult: null, queries: ensureQueries(queries), queryError: null, range: range || { ...DEFAULT_RANGE }, requestOptions: null, showingGraph: true, + showingLogs: true, showingTable: true, + supportsGraph: null, + supportsLogs: null, + supportsTable: null, tableResult: null, ...props.initialState, }; } async componentDidMount() { - const datasource = await this.props.datasourceSrv.get(); - const testResult = await datasource.testDatasource(); - if (testResult.status === 'success') { - this.setState({ datasource, datasourceError: null, datasourceLoading: false }, () => this.handleSubmit()); + const { datasourceSrv } = this.props; + const { initialDatasource } = this.state; + if (!datasourceSrv) { + throw new Error('No datasource service passed as props.'); + } + const datasources = datasourceSrv.getExploreSources(); + if (datasources.length > 0) { + this.setState({ datasourceLoading: true }); + // Priority: datasource in url, default datasource, first explore datasource + let datasource; + if (initialDatasource) { + datasource = await datasourceSrv.get(initialDatasource); + } else { + datasource = await datasourceSrv.get(); + } + if (!datasource.meta.explore) { + datasource = await datasourceSrv.get(datasources[0].name); + } + this.setDatasource(datasource); } else { - this.setState({ datasource: null, datasourceError: testResult.message, datasourceLoading: false }); + this.setState({ datasourceMissing: true }); } } componentDidCatch(error) { + this.setState({ datasourceError: error }); console.error(error); } + async setDatasource(datasource) { + const supportsGraph = datasource.meta.metrics; + const supportsLogs = datasource.meta.logs; + const supportsTable = datasource.meta.metrics; + let datasourceError = null; + + try { + const testResult = await datasource.testDatasource(); + datasourceError = testResult.status === 'success' ? null : testResult.message; + } catch (error) { + datasourceError = (error && error.statusText) || error; + } + + this.setState( + { + datasource, + datasourceError, + supportsGraph, + supportsLogs, + supportsTable, + datasourceLoading: false, + }, + () => datasourceError === null && this.handleSubmit() + ); + } + + getRef = el => { + this.el = el; + }; + handleAddQueryRow = index => { const { queries } = this.state; const nextQueries = [ @@ -108,6 +174,19 @@ export class Explore extends React.Component { this.setState({ queries: nextQueries }); }; + handleChangeDatasource = async option => { + this.setState({ + datasource: null, + datasourceError: null, + datasourceLoading: true, + graphResult: null, + logsResult: null, + tableResult: null, + }); + const datasource = await this.props.datasourceSrv.get(option.value); + this.setDatasource(datasource); + }; + handleChangeQuery = (query, index) => { const { queries } = this.state; const nextQuery = { @@ -138,6 +217,10 @@ export class Explore extends React.Component { this.setState(state => ({ showingGraph: !state.showingGraph })); }; + handleClickLogsButton = () => { + this.setState(state => ({ showingLogs: !state.showingLogs })); + }; + handleClickSplit = () => { const { onChangeSplit } = this.props; if (onChangeSplit) { @@ -159,29 +242,45 @@ export class Explore extends React.Component { }; handleSubmit = () => { - const { showingGraph, showingTable } = this.state; - if (showingTable) { + const { showingLogs, showingGraph, showingTable, supportsGraph, supportsLogs, supportsTable } = this.state; + if (showingTable && supportsTable) { this.runTableQuery(); } - if (showingGraph) { + if (showingGraph && supportsGraph) { this.runGraphQuery(); } + if (showingLogs && supportsLogs) { + this.runLogsQuery(); + } }; - async runGraphQuery() { + buildQueryOptions(targetOptions: { format: string; instant?: boolean }) { const { datasource, queries, range } = this.state; + const resolution = this.el.offsetWidth; + const absoluteRange = { + from: parseDate(range.from, false), + to: parseDate(range.to, true), + }; + const { interval } = kbn.calculateInterval(absoluteRange, resolution, datasource.interval); + const targets = queries.map(q => ({ + ...targetOptions, + expr: q.query, + })); + return { + interval, + range, + targets, + }; + } + + async runGraphQuery() { + const { datasource, queries } = this.state; if (!hasQuery(queries)) { return; } this.setState({ latency: 0, loading: true, graphResult: null, queryError: null }); const now = Date.now(); - const options = buildQueryOptions({ - format: 'time_series', - interval: datasource.interval, - instant: false, - range, - queries: queries.map(q => q.query), - }); + const options = this.buildQueryOptions({ format: 'time_series', instant: false }); try { const res = await datasource.query(options); const result = makeTimeSeriesList(res.data, options); @@ -195,18 +294,15 @@ export class Explore extends React.Component { } async runTableQuery() { - const { datasource, queries, range } = this.state; + const { datasource, queries } = this.state; if (!hasQuery(queries)) { return; } this.setState({ latency: 0, loading: true, queryError: null, tableResult: null }); const now = Date.now(); - const options = buildQueryOptions({ + const options = this.buildQueryOptions({ format: 'table', - interval: datasource.interval, instant: true, - range, - queries: queries.map(q => q.query), }); try { const res = await datasource.query(options); @@ -220,35 +316,71 @@ export class Explore extends React.Component { } } + async runLogsQuery() { + const { datasource, queries } = this.state; + if (!hasQuery(queries)) { + return; + } + this.setState({ latency: 0, loading: true, queryError: null, logsResult: null }); + const now = Date.now(); + const options = this.buildQueryOptions({ + format: 'logs', + }); + + try { + const res = await datasource.query(options); + const logsData = res.data; + const latency = Date.now() - now; + this.setState({ latency, loading: false, logsResult: logsData, requestOptions: options }); + } catch (response) { + console.error(response); + const queryError = response.data ? response.data.error : response; + this.setState({ loading: false, queryError }); + } + } + request = url => { const { datasource } = this.state; return datasource.metadataRequest(url); }; render() { - const { position, split } = this.props; + const { datasourceSrv, position, split } = this.props; const { datasource, datasourceError, datasourceLoading, + datasourceMissing, graphResult, latency, loading, + logsResult, queries, queryError, range, requestOptions, showingGraph, + showingLogs, showingTable, + supportsGraph, + supportsLogs, + supportsTable, tableResult, } = this.state; const showingBoth = showingGraph && showingTable; const graphHeight = showingBoth ? '200px' : '400px'; const graphButtonActive = showingBoth || showingGraph ? 'active' : ''; + const logsButtonActive = showingLogs ? 'active' : ''; const tableButtonActive = showingBoth || showingTable ? 'active' : ''; const exploreClass = split ? 'explore explore-split' : 'explore'; + const datasources = datasourceSrv.getExploreSources().map(ds => ({ + value: ds.name, + label: ds.name, + })); + const selectedDatasource = datasource ? datasource.name : undefined; + return ( -
+
{position === 'left' ? (
@@ -264,6 +396,18 @@ export class Explore extends React.Component {
)} + {!datasourceMissing ? ( +
+ +
+ +
+ +
+
+
+ + + {groups.length === 0 && + !isAdding && ( +
+
There are no external groups to sync with
+ +
+ {headerTooltip} + + Learn more + +
+
+ )} + + {groups.length > 0 && ( +
+ + + + + + + {groups.map(group => this.renderGroup(group))} +
External Group ID +
+
+ )} +
+ ); + } +} + +export default hot(module)(TeamGroupSync); diff --git a/public/app/containers/Teams/TeamList.tsx b/public/app/containers/Teams/TeamList.tsx new file mode 100644 index 00000000000..4429764b1cc --- /dev/null +++ b/public/app/containers/Teams/TeamList.tsx @@ -0,0 +1,125 @@ +import React from 'react'; +import { hot } from 'react-hot-loader'; +import { inject, observer } from 'mobx-react'; +import PageHeader from 'app/core/components/PageHeader/PageHeader'; +import { NavStore } from 'app/stores/NavStore/NavStore'; +import { TeamsStore, ITeam } from 'app/stores/TeamsStore/TeamsStore'; +import { BackendSrv } from 'app/core/services/backend_srv'; +import appEvents from 'app/core/app_events'; + +interface Props { + nav: typeof NavStore.Type; + teams: typeof TeamsStore.Type; + backendSrv: BackendSrv; +} + +@inject('nav', 'teams') +@observer +export class TeamList extends React.Component { + constructor(props) { + super(props); + + this.props.nav.load('cfg', 'teams'); + this.fetchTeams(); + } + + fetchTeams() { + this.props.teams.loadTeams(); + } + + deleteTeam(team: ITeam) { + appEvents.emit('confirm-modal', { + title: 'Delete', + text: 'Are you sure you want to delete Team ' + team.name + '?', + yesText: 'Delete', + icon: 'fa-warning', + onConfirm: () => { + this.deleteTeamConfirmed(team); + }, + }); + } + + deleteTeamConfirmed(team) { + this.props.backendSrv.delete('/api/teams/' + team.id).then(this.fetchTeams.bind(this)); + } + + onSearchQueryChange = evt => { + this.props.teams.setSearchQuery(evt.target.value); + }; + + renderTeamMember(team: ITeam): JSX.Element { + let teamUrl = `org/teams/edit/${team.id}`; + + return ( + + + + + + + + {team.name} + + + {team.email} + + + {team.memberCount} + + + this.deleteTeam(team)} className="btn btn-danger btn-small"> + + + + + ); + } + + render() { + const { nav, teams } = this.props; + return ( +
+ +
+
+
+ +
+ + + +
+ + + + + + + + + {teams.filteredTeams.map(team => this.renderTeamMember(team))} +
+ NameEmailMembers +
+
+
+
+ ); + } +} + +export default hot(module)(TeamList); diff --git a/public/app/containers/Teams/TeamMembers.tsx b/public/app/containers/Teams/TeamMembers.tsx new file mode 100644 index 00000000000..0d0762469a0 --- /dev/null +++ b/public/app/containers/Teams/TeamMembers.tsx @@ -0,0 +1,144 @@ +import React from 'react'; +import { hot } from 'react-hot-loader'; +import { observer } from 'mobx-react'; +import { ITeam, ITeamMember } from 'app/stores/TeamsStore/TeamsStore'; +import appEvents from 'app/core/app_events'; +import SlideDown from 'app/core/components/Animations/SlideDown'; +import { UserPicker, User } from 'app/core/components/Picker/UserPicker'; + +interface Props { + team: ITeam; +} + +interface State { + isAdding: boolean; + newTeamMember?: User; +} + +@observer +export class TeamMembers extends React.Component { + constructor(props) { + super(props); + this.state = { isAdding: false, newTeamMember: null }; + } + + componentDidMount() { + this.props.team.loadMembers(); + } + + onSearchQueryChange = evt => { + this.props.team.setSearchQuery(evt.target.value); + }; + + removeMember(member: ITeamMember) { + appEvents.emit('confirm-modal', { + title: 'Remove Member', + text: 'Are you sure you want to remove ' + member.login + ' from this group?', + yesText: 'Remove', + icon: 'fa-warning', + onConfirm: () => { + this.removeMemberConfirmed(member); + }, + }); + } + + removeMemberConfirmed(member: ITeamMember) { + this.props.team.removeMember(member); + } + + renderMember(member: ITeamMember) { + return ( + + + + + {member.login} + {member.email} + + this.removeMember(member)} className="btn btn-danger btn-mini"> + + + + + ); + } + + onToggleAdding = () => { + this.setState({ isAdding: !this.state.isAdding }); + }; + + onUserSelected = (user: User) => { + this.setState({ newTeamMember: user }); + }; + + onAddUserToTeam = async () => { + await this.props.team.addMember(this.state.newTeamMember.id); + await this.props.team.loadMembers(); + this.setState({ newTeamMember: null }); + }; + + render() { + const { newTeamMember, isAdding } = this.state; + const members = this.props.team.members.values(); + const newTeamMemberValue = newTeamMember && newTeamMember.id.toString(); + + return ( +
+
+
+ +
+ +
+ + +
+ + +
+ +
Add Team Member
+
+ + + {this.state.newTeamMember && ( + + )} +
+
+
+ +
+ + + + + + + + {members.map(member => this.renderMember(member))} +
+ NameEmail +
+
+
+ ); + } +} + +export default hot(module)(TeamMembers); diff --git a/public/app/containers/Teams/TeamPages.tsx b/public/app/containers/Teams/TeamPages.tsx new file mode 100644 index 00000000000..500a7cbe5e8 --- /dev/null +++ b/public/app/containers/Teams/TeamPages.tsx @@ -0,0 +1,77 @@ +import React from 'react'; +import _ from 'lodash'; +import { hot } from 'react-hot-loader'; +import { inject, observer } from 'mobx-react'; +import config from 'app/core/config'; +import PageHeader from 'app/core/components/PageHeader/PageHeader'; +import { NavStore } from 'app/stores/NavStore/NavStore'; +import { TeamsStore, ITeam } from 'app/stores/TeamsStore/TeamsStore'; +import { ViewStore } from 'app/stores/ViewStore/ViewStore'; +import TeamMembers from './TeamMembers'; +import TeamSettings from './TeamSettings'; +import TeamGroupSync from './TeamGroupSync'; + +interface Props { + nav: typeof NavStore.Type; + teams: typeof TeamsStore.Type; + view: typeof ViewStore.Type; +} + +@inject('nav', 'teams', 'view') +@observer +export class TeamPages extends React.Component { + isSyncEnabled: boolean; + currentPage: string; + + constructor(props) { + super(props); + + this.isSyncEnabled = config.buildInfo.isEnterprise; + this.currentPage = this.getCurrentPage(); + + this.loadTeam(); + } + + async loadTeam() { + const { teams, nav, view } = this.props; + + await teams.loadById(view.routeParams.get('id')); + + nav.initTeamPage(this.getCurrentTeam(), this.currentPage, this.isSyncEnabled); + } + + getCurrentTeam(): ITeam { + const { teams, view } = this.props; + return teams.map.get(view.routeParams.get('id')); + } + + getCurrentPage() { + const pages = ['members', 'settings', 'groupsync']; + const currentPage = this.props.view.routeParams.get('page'); + return _.includes(pages, currentPage) ? currentPage : pages[0]; + } + + render() { + const { nav } = this.props; + const currentTeam = this.getCurrentTeam(); + + if (!nav.main) { + return null; + } + + return ( +
+ + {currentTeam && ( +
+ {this.currentPage === 'members' && } + {this.currentPage === 'settings' && } + {this.currentPage === 'groupsync' && this.isSyncEnabled && } +
+ )} +
+ ); + } +} + +export default hot(module)(TeamPages); diff --git a/public/app/containers/Teams/TeamSettings.tsx b/public/app/containers/Teams/TeamSettings.tsx new file mode 100644 index 00000000000..142088a5d1e --- /dev/null +++ b/public/app/containers/Teams/TeamSettings.tsx @@ -0,0 +1,69 @@ +import React from 'react'; +import { hot } from 'react-hot-loader'; +import { observer } from 'mobx-react'; +import { ITeam } from 'app/stores/TeamsStore/TeamsStore'; +import { Label } from 'app/core/components/Forms/Forms'; + +interface Props { + team: ITeam; +} + +@observer +export class TeamSettings extends React.Component { + constructor(props) { + super(props); + } + + onChangeName = evt => { + this.props.team.setName(evt.target.value); + }; + + onChangeEmail = evt => { + this.props.team.setEmail(evt.target.value); + }; + + onUpdate = evt => { + evt.preventDefault(); + this.props.team.update(); + }; + + render() { + return ( +
+

Team Settings

+
+
+ + +
+
+ + +
+ +
+ +
+
+
+ ); + } +} + +export default hot(module)(TeamSettings); diff --git a/public/app/core/angular_wrappers.ts b/public/app/core/angular_wrappers.ts index ace0eb00b07..a4439509f8e 100644 --- a/public/app/core/angular_wrappers.ts +++ b/public/app/core/angular_wrappers.ts @@ -5,7 +5,6 @@ import EmptyListCTA from './components/EmptyListCTA/EmptyListCTA'; import LoginBackground from './components/Login/LoginBackground'; import { SearchResult } from './components/search/SearchResult'; import { TagFilter } from './components/TagFilter/TagFilter'; -import UserPicker from './components/Picker/UserPicker'; import DashboardPermissions from './components/Permissions/DashboardPermissions'; export function registerAngularDirectives() { @@ -19,6 +18,5 @@ export function registerAngularDirectives() { ['onSelect', { watchDepth: 'reference' }], ['tagOptions', { watchDepth: 'reference' }], ]); - react2AngularDirective('selectUserPicker', UserPicker, ['backendSrv', 'handlePicked']); react2AngularDirective('dashboardPermissions', DashboardPermissions, ['backendSrv', 'dashboardId', 'folder']); } diff --git a/public/app/core/components/Forms/Forms.tsx b/public/app/core/components/Forms/Forms.tsx new file mode 100644 index 00000000000..4b74d48ba08 --- /dev/null +++ b/public/app/core/components/Forms/Forms.tsx @@ -0,0 +1,21 @@ +import React, { SFC, ReactNode } from 'react'; +import Tooltip from '../Tooltip/Tooltip'; + +interface Props { + tooltip?: string; + for?: string; + children: ReactNode; +} + +export const Label: SFC = props => { + return ( + + {props.children} + {props.tooltip && ( + + + + )} + + ); +}; diff --git a/public/app/core/components/Permissions/AddPermissions.jest.tsx b/public/app/core/components/Permissions/AddPermissions.jest.tsx index fe97c4c7e62..513a22ddea4 100644 --- a/public/app/core/components/Permissions/AddPermissions.jest.tsx +++ b/public/app/core/components/Permissions/AddPermissions.jest.tsx @@ -1,32 +1,32 @@ -import React from 'react'; +import React from 'react'; +import { shallow } from 'enzyme'; import AddPermissions from './AddPermissions'; import { RootStore } from 'app/stores/RootStore/RootStore'; -import { backendSrv } from 'test/mocks/common'; -import { shallow } from 'enzyme'; +import { getBackendSrv } from 'app/core/services/backend_srv'; + +jest.mock('app/core/services/backend_srv', () => ({ + getBackendSrv: () => { + return { + get: () => { + return Promise.resolve([ + { id: 2, dashboardId: 1, role: 'Viewer', permission: 1, permissionName: 'View' }, + { id: 3, dashboardId: 1, role: 'Editor', permission: 1, permissionName: 'Edit' }, + ]); + }, + post: jest.fn(() => Promise.resolve({})), + }; + }, +})); describe('AddPermissions', () => { let wrapper; let store; let instance; + let backendSrv: any = getBackendSrv(); beforeAll(() => { - backendSrv.get.mockReturnValue( - Promise.resolve([ - { id: 2, dashboardId: 1, role: 'Viewer', permission: 1, permissionName: 'View' }, - { id: 3, dashboardId: 1, role: 'Editor', permission: 1, permissionName: 'Edit' }, - ]) - ); - - backendSrv.post = jest.fn(() => Promise.resolve({})); - - store = RootStore.create( - {}, - { - backendSrv: backendSrv, - } - ); - - wrapper = shallow(); + store = RootStore.create({}, { backendSrv: backendSrv }); + wrapper = shallow(); instance = wrapper.instance(); return store.permissions.load(1, true, false); }); @@ -43,8 +43,8 @@ describe('AddPermissions', () => { login: 'user2', }; - instance.typeChanged(evt); - instance.userPicked(userItem); + instance.onTypeChanged(evt); + instance.onUserSelected(userItem); wrapper.update(); @@ -70,8 +70,8 @@ describe('AddPermissions', () => { name: 'ug1', }; - instance.typeChanged(evt); - instance.teamPicked(teamItem); + instance.onTypeChanged(evt); + instance.onTeamSelected(teamItem); wrapper.update(); diff --git a/public/app/core/components/Permissions/AddPermissions.tsx b/public/app/core/components/Permissions/AddPermissions.tsx index 4dcd07ffb48..289e27aa731 100644 --- a/public/app/core/components/Permissions/AddPermissions.tsx +++ b/public/app/core/components/Permissions/AddPermissions.tsx @@ -1,24 +1,19 @@ -import React, { Component } from 'react'; +import React, { Component } from 'react'; import { observer } from 'mobx-react'; import { aclTypes } from 'app/stores/PermissionsStore/PermissionsStore'; -import UserPicker, { User } from 'app/core/components/Picker/UserPicker'; -import TeamPicker, { Team } from 'app/core/components/Picker/TeamPicker'; +import { UserPicker, User } from 'app/core/components/Picker/UserPicker'; +import { TeamPicker, Team } from 'app/core/components/Picker/TeamPicker'; import DescriptionPicker, { OptionWithDescription } from 'app/core/components/Picker/DescriptionPicker'; import { permissionOptions } from 'app/stores/PermissionsStore/PermissionsStore'; -export interface IProps { +export interface Props { permissions: any; - backendSrv: any; } + @observer -class AddPermissions extends Component { +class AddPermissions extends Component { constructor(props) { super(props); - this.userPicked = this.userPicked.bind(this); - this.teamPicked = this.teamPicked.bind(this); - this.permissionPicked = this.permissionPicked.bind(this); - this.typeChanged = this.typeChanged.bind(this); - this.handleSubmit = this.handleSubmit.bind(this); } componentWillMount() { @@ -26,49 +21,49 @@ class AddPermissions extends Component { permissions.resetNewType(); } - typeChanged(evt) { + onTypeChanged = evt => { const { value } = evt.target; const { permissions } = this.props; permissions.setNewType(value); - } + }; - userPicked(user: User) { + onUserSelected = (user: User) => { const { permissions } = this.props; if (!user) { permissions.newItem.setUser(null, null); return; } return permissions.newItem.setUser(user.id, user.login, user.avatarUrl); - } + }; - teamPicked(team: Team) { + onTeamSelected = (team: Team) => { const { permissions } = this.props; if (!team) { permissions.newItem.setTeam(null, null); return; } return permissions.newItem.setTeam(team.id, team.name, team.avatarUrl); - } + }; - permissionPicked(permission: OptionWithDescription) { + onPermissionChanged = (permission: OptionWithDescription) => { const { permissions } = this.props; return permissions.newItem.setPermission(permission.value); - } + }; resetNewType() { const { permissions } = this.props; return permissions.resetNewType(); } - handleSubmit(evt) { + onSubmit = evt => { evt.preventDefault(); const { permissions } = this.props; permissions.addStoreItem(); - } + }; render() { - const { permissions, backendSrv } = this.props; + const { permissions } = this.props; const newItem = permissions.newItem; const pickerClassName = 'width-20'; @@ -79,12 +74,12 @@ class AddPermissions extends Component { -
-
Add Permission For
+ +
Add Permission For
- {aclTypes.map((option, idx) => { return (
- + {
{}} + onSelected={() => {}} value={item.permission} disabled={true} className={'gf-form-input--form-dropdown-right'} diff --git a/public/app/core/components/Permissions/PermissionsListItem.tsx b/public/app/core/components/Permissions/PermissionsListItem.tsx index b0158525d52..a17aa8c04df 100644 --- a/public/app/core/components/Permissions/PermissionsListItem.tsx +++ b/public/app/core/components/Permissions/PermissionsListItem.tsx @@ -68,7 +68,7 @@ export default observer(({ item, removeItem, permissionChanged, itemIndex, folde
void; + onSelected: (permission) => void; value: number; disabled: boolean; className?: string; @@ -16,14 +16,14 @@ export interface OptionWithDescription { description: string; } -class DescriptionPicker extends Component { +class DescriptionPicker extends Component { constructor(props) { super(props); this.state = {}; } render() { - const { optionsWithDesc, handlePicked, value, disabled, className } = this.props; + const { optionsWithDesc, onSelected, value, disabled, className } = this.props; return (
@@ -34,7 +34,7 @@ class DescriptionPicker extends Component { clearable={false} labelKey="label" options={optionsWithDesc} - onChange={handlePicked} + onChange={onSelected} className={`width-7 gf-form-input gf-form-input--form-dropdown ${className || ''}`} optionComponent={DescriptionOption} placeholder="Choose" diff --git a/public/app/core/components/Picker/TeamPicker.jest.tsx b/public/app/core/components/Picker/TeamPicker.jest.tsx index 20b7620e0ac..3db9f7bb4eb 100644 --- a/public/app/core/components/Picker/TeamPicker.jest.tsx +++ b/public/app/core/components/Picker/TeamPicker.jest.tsx @@ -1,19 +1,23 @@ -import React from 'react'; +import React from 'react'; import renderer from 'react-test-renderer'; -import TeamPicker from './TeamPicker'; +import { TeamPicker } from './TeamPicker'; -const model = { - backendSrv: { - get: () => { - return new Promise((resolve, reject) => {}); - }, +jest.mock('app/core/services/backend_srv', () => ({ + getBackendSrv: () => { + return { + get: () => { + return Promise.resolve([]); + }, + }; }, - handlePicked: () => {}, -}; +})); describe('TeamPicker', () => { it('renders correctly', () => { - const tree = renderer.create().toJSON(); + const props = { + onSelected: () => {}, + }; + const tree = renderer.create().toJSON(); expect(tree).toMatchSnapshot(); }); }); diff --git a/public/app/core/components/Picker/TeamPicker.tsx b/public/app/core/components/Picker/TeamPicker.tsx index 2dfff1850dd..04f108ff8da 100644 --- a/public/app/core/components/Picker/TeamPicker.tsx +++ b/public/app/core/components/Picker/TeamPicker.tsx @@ -1,18 +1,19 @@ -import React, { Component } from 'react'; +import React, { Component } from 'react'; import Select from 'react-select'; import PickerOption from './PickerOption'; -import withPicker from './withPicker'; import { debounce } from 'lodash'; +import { getBackendSrv } from 'app/core/services/backend_srv'; -export interface IProps { - backendSrv: any; - isLoading: boolean; - toggleLoading: any; - handlePicked: (user) => void; +export interface Props { + onSelected: (team: Team) => void; value?: string; className?: string; } +export interface State { + isLoading; +} + export interface Team { id: number; label: string; @@ -20,13 +21,12 @@ export interface Team { avatarUrl: string; } -class TeamPicker extends Component { +export class TeamPicker extends Component { debouncedSearch: any; - backendSrv: any; constructor(props) { super(props); - this.state = {}; + this.state = { isLoading: false }; this.search = this.search.bind(this); this.debouncedSearch = debounce(this.search, 300, { @@ -36,9 +36,9 @@ class TeamPicker extends Component { } search(query?: string) { - const { toggleLoading, backendSrv } = this.props; + const backendSrv = getBackendSrv(); + this.setState({ isLoading: true }); - toggleLoading(true); return backendSrv.get(`/api/teams/search?perpage=10&page=1&query=${query}`).then(result => { const teams = result.teams.map(team => { return { @@ -49,18 +49,18 @@ class TeamPicker extends Component { }; }); - toggleLoading(false); + this.setState({ isLoading: false }); return { options: teams }; }); } render() { - const AsyncComponent = this.state.creatable ? Select.AsyncCreatable : Select.Async; - const { isLoading, handlePicked, value, className } = this.props; + const { onSelected, value, className } = this.props; + const { isLoading } = this.state; return (
- { loadOptions={this.debouncedSearch} loadingPlaceholder="Loading..." noResultsText="No teams found" - onChange={handlePicked} + onChange={onSelected} className={`gf-form-input gf-form-input--form-dropdown ${className || ''}`} optionComponent={PickerOption} - placeholder="Choose" + placeholder="Select a team" value={value} autosize={true} /> @@ -80,5 +80,3 @@ class TeamPicker extends Component { ); } } - -export default withPicker(TeamPicker); diff --git a/public/app/core/components/Picker/UserPicker.jest.tsx b/public/app/core/components/Picker/UserPicker.jest.tsx index 756fa2d9801..054ca643700 100644 --- a/public/app/core/components/Picker/UserPicker.jest.tsx +++ b/public/app/core/components/Picker/UserPicker.jest.tsx @@ -1,19 +1,20 @@ -import React from 'react'; +import React from 'react'; import renderer from 'react-test-renderer'; -import UserPicker from './UserPicker'; +import { UserPicker } from './UserPicker'; -const model = { - backendSrv: { - get: () => { - return new Promise((resolve, reject) => {}); - }, +jest.mock('app/core/services/backend_srv', () => ({ + getBackendSrv: () => { + return { + get: () => { + return Promise.resolve([]); + }, + }; }, - handlePicked: () => {}, -}; +})); describe('UserPicker', () => { it('renders correctly', () => { - const tree = renderer.create().toJSON(); + const tree = renderer.create( {}} />).toJSON(); expect(tree).toMatchSnapshot(); }); }); diff --git a/public/app/core/components/Picker/UserPicker.tsx b/public/app/core/components/Picker/UserPicker.tsx index 77bf6c1fe15..e50513c44e1 100644 --- a/public/app/core/components/Picker/UserPicker.tsx +++ b/public/app/core/components/Picker/UserPicker.tsx @@ -1,18 +1,19 @@ import React, { Component } from 'react'; import Select from 'react-select'; import PickerOption from './PickerOption'; -import withPicker from './withPicker'; import { debounce } from 'lodash'; +import { getBackendSrv } from 'app/core/services/backend_srv'; -export interface IProps { - backendSrv: any; - isLoading: boolean; - toggleLoading: any; - handlePicked: (user) => void; +export interface Props { + onSelected: (user: User) => void; value?: string; className?: string; } +export interface State { + isLoading: boolean; +} + export interface User { id: number; label: string; @@ -20,13 +21,12 @@ export interface User { login: string; } -class UserPicker extends Component { +export class UserPicker extends Component { debouncedSearch: any; - backendSrv: any; constructor(props) { super(props); - this.state = {}; + this.state = { isLoading: false }; this.search = this.search.bind(this); this.debouncedSearch = debounce(this.search, 300, { @@ -36,29 +36,34 @@ class UserPicker extends Component { } search(query?: string) { - const { toggleLoading, backendSrv } = this.props; + const backendSrv = getBackendSrv(); - toggleLoading(true); - return backendSrv.get(`/api/org/users?query=${query}&limit=10`).then(result => { - const users = result.map(user => { + this.setState({ isLoading: true }); + + return backendSrv + .get(`/api/org/users?query=${query}&limit=10`) + .then(result => { return { - id: user.userId, - label: `${user.login} - ${user.email}`, - avatarUrl: user.avatarUrl, - login: user.login, + options: result.map(user => ({ + id: user.userId, + label: `${user.login} - ${user.email}`, + avatarUrl: user.avatarUrl, + login: user.login, + })), }; + }) + .finally(() => { + this.setState({ isLoading: false }); }); - toggleLoading(false); - return { options: users }; - }); } render() { - const AsyncComponent = this.state.creatable ? Select.AsyncCreatable : Select.Async; - const { isLoading, handlePicked, value, className } = this.props; + const { value, className } = this.props; + const { isLoading } = this.state; + return (
- { loadOptions={this.debouncedSearch} loadingPlaceholder="Loading..." noResultsText="No users found" - onChange={handlePicked} + onChange={this.props.onSelected} className={`gf-form-input gf-form-input--form-dropdown ${className || ''}`} optionComponent={PickerOption} - placeholder="Choose" + placeholder="Select user" value={value} autosize={true} /> @@ -78,5 +83,3 @@ class UserPicker extends Component { ); } } - -export default withPicker(UserPicker); diff --git a/public/app/core/components/Picker/withPicker.tsx b/public/app/core/components/Picker/withPicker.tsx deleted file mode 100644 index 838ef927c30..00000000000 --- a/public/app/core/components/Picker/withPicker.tsx +++ /dev/null @@ -1,34 +0,0 @@ -import React, { Component } from 'react'; - -export interface IProps { - backendSrv: any; - handlePicked: (data) => void; - value?: string; - className?: string; -} - -export default function withPicker(WrappedComponent) { - return class WithPicker extends Component { - constructor(props) { - super(props); - this.toggleLoading = this.toggleLoading.bind(this); - - this.state = { - isLoading: false, - }; - } - - toggleLoading(isLoading) { - this.setState(prevState => { - return { - ...prevState, - isLoading: isLoading, - }; - }); - } - - render() { - return ; - } - }; -} diff --git a/public/app/core/components/grafana_app.ts b/public/app/core/components/grafana_app.ts index fd2e32db3a7..bd6b6975006 100644 --- a/public/app/core/components/grafana_app.ts +++ b/public/app/core/components/grafana_app.ts @@ -8,7 +8,7 @@ import appEvents from 'app/core/app_events'; import Drop from 'tether-drop'; import { createStore } from 'app/stores/store'; import colors from 'app/core/utils/colors'; -import { BackendSrv } from 'app/core/services/backend_srv'; +import { BackendSrv, setBackendSrv } from 'app/core/services/backend_srv'; import { DatasourceSrv } from 'app/features/plugins/datasource_srv'; export class GrafanaCtrl { @@ -24,6 +24,8 @@ export class GrafanaCtrl { backendSrv: BackendSrv, datasourceSrv: DatasourceSrv ) { + // sets singleston instances for angular services so react components can access them + setBackendSrv(backendSrv); createStore({ backendSrv, datasourceSrv }); $scope.init = function() { diff --git a/public/app/core/components/scroll/page_scroll.ts b/public/app/core/components/scroll/page_scroll.ts index e6db344a4d6..b6603f06175 100644 --- a/public/app/core/components/scroll/page_scroll.ts +++ b/public/app/core/components/scroll/page_scroll.ts @@ -29,11 +29,13 @@ export function pageScrollbar() { scope.$on('$routeChangeSuccess', () => { lastPos = 0; elem[0].scrollTop = 0; - elem[0].focus(); + // Focus page to enable scrolling by keyboard + elem[0].focus({ preventScroll: true }); }); elem[0].tabIndex = -1; - elem[0].focus(); + // Focus page to enable scrolling by keyboard + elem[0].focus({ preventScroll: true }); }, }; } diff --git a/public/app/core/components/team_picker.ts b/public/app/core/components/team_picker.ts deleted file mode 100644 index 228767a76c4..00000000000 --- a/public/app/core/components/team_picker.ts +++ /dev/null @@ -1,64 +0,0 @@ -import coreModule from 'app/core/core_module'; -import _ from 'lodash'; - -const template = ` - -`; -export class TeamPickerCtrl { - group: any; - teamPicked: any; - debouncedSearchGroups: any; - - /** @ngInject */ - constructor(private backendSrv) { - this.debouncedSearchGroups = _.debounce(this.searchGroups, 500, { - leading: true, - trailing: false, - }); - this.reset(); - } - - reset() { - this.group = { text: 'Choose', value: null }; - } - - searchGroups(query: string) { - return Promise.resolve( - this.backendSrv.get('/api/teams/search?perpage=10&page=1&query=' + query).then(result => { - return _.map(result.teams, ug => { - return { text: ug.name, value: ug }; - }); - }) - ); - } - - onChange(option) { - this.teamPicked({ $group: option.value }); - } -} - -export function teamPicker() { - return { - restrict: 'E', - template: template, - controller: TeamPickerCtrl, - bindToController: true, - controllerAs: 'ctrl', - scope: { - teamPicked: '&', - }, - link: function(scope, elem, attrs, ctrl) { - scope.$on('team-picker-reset', () => { - ctrl.reset(); - }); - }, - }; -} - -coreModule.directive('teamPicker', teamPicker); diff --git a/public/app/core/components/user_picker.ts b/public/app/core/components/user_picker.ts deleted file mode 100644 index 606ded09885..00000000000 --- a/public/app/core/components/user_picker.ts +++ /dev/null @@ -1,71 +0,0 @@ -import coreModule from 'app/core/core_module'; -import _ from 'lodash'; - -const template = ` - -`; -export class UserPickerCtrl { - user: any; - debouncedSearchUsers: any; - userPicked: any; - - /** @ngInject */ - constructor(private backendSrv) { - this.reset(); - this.debouncedSearchUsers = _.debounce(this.searchUsers, 500, { - leading: true, - trailing: false, - }); - } - - searchUsers(query: string) { - return Promise.resolve( - this.backendSrv.get('/api/users/search?perpage=10&page=1&query=' + query).then(result => { - return _.map(result.users, user => { - return { text: user.login + ' - ' + user.email, value: user }; - }); - }) - ); - } - - onChange(option) { - this.userPicked({ $user: option.value }); - } - - reset() { - this.user = { text: 'Choose', value: null }; - } -} - -export interface User { - id: number; - name: string; - login: string; - email: string; -} - -export function userPicker() { - return { - restrict: 'E', - template: template, - controller: UserPickerCtrl, - bindToController: true, - controllerAs: 'ctrl', - scope: { - userPicked: '&', - }, - link: function(scope, elem, attrs, ctrl) { - scope.$on('user-picker-reset', () => { - ctrl.reset(); - }); - }, - }; -} - -coreModule.directive('userPicker', userPicker); diff --git a/public/app/core/core.ts b/public/app/core/core.ts index 2bfe84a74df..bff98e1fbb3 100644 --- a/public/app/core/core.ts +++ b/public/app/core/core.ts @@ -45,8 +45,6 @@ import { KeybindingSrv } from './services/keybindingSrv'; import { helpModal } from './components/help/help'; import { JsonExplorer } from './components/json_explorer/json_explorer'; import { NavModelSrv, NavModel } from './nav_model_srv'; -import { userPicker } from './components/user_picker'; -import { teamPicker } from './components/team_picker'; import { geminiScrollbar } from './components/scroll/scroll'; import { pageScrollbar } from './components/scroll/page_scroll'; import { gfPageDirective } from './components/gf_page'; @@ -85,8 +83,6 @@ export { JsonExplorer, NavModelSrv, NavModel, - userPicker, - teamPicker, geminiScrollbar, pageScrollbar, gfPageDirective, diff --git a/public/app/core/logs_model.ts b/public/app/core/logs_model.ts new file mode 100644 index 00000000000..46e95a471ce --- /dev/null +++ b/public/app/core/logs_model.ts @@ -0,0 +1,29 @@ +export enum LogLevel { + crit = 'crit', + warn = 'warn', + err = 'error', + error = 'error', + info = 'info', + debug = 'debug', + trace = 'trace', +} + +export interface LogSearchMatch { + start: number; + length: number; + text?: string; +} + +export interface LogRow { + key: string; + entry: string; + logLevel: LogLevel; + timestamp: string; + timeFromNow: string; + timeLocal: string; + searchMatches?: LogSearchMatch[]; +} + +export interface LogsModel { + rows: LogRow[]; +} diff --git a/public/app/core/services/backend_srv.ts b/public/app/core/services/backend_srv.ts index d582b6a3b18..1aeeedef4dd 100644 --- a/public/app/core/services/backend_srv.ts +++ b/public/app/core/services/backend_srv.ts @@ -368,3 +368,17 @@ export class BackendSrv { } coreModule.service('backendSrv', BackendSrv); + +// +// Code below is to expore the service to react components +// + +let singletonInstance: BackendSrv; + +export function setBackendSrv(instance: BackendSrv) { + singletonInstance = instance; +} + +export function getBackendSrv(): BackendSrv { + return singletonInstance; +} diff --git a/public/app/core/services/keybindingSrv.ts b/public/app/core/services/keybindingSrv.ts index cbc7871fbbd..672ae29740b 100644 --- a/public/app/core/services/keybindingSrv.ts +++ b/public/app/core/services/keybindingSrv.ts @@ -191,7 +191,7 @@ export class KeybindingSrv { range, }; const exploreState = encodePathComponent(JSON.stringify(state)); - this.$location.url(`/explore/${exploreState}`); + this.$location.url(`/explore?state=${exploreState}`); } } }); diff --git a/public/app/core/utils/kbn.ts b/public/app/core/utils/kbn.ts index 4302e62e3e0..4fc4829811f 100644 --- a/public/app/core/utils/kbn.ts +++ b/public/app/core/utils/kbn.ts @@ -449,6 +449,7 @@ kbn.valueFormats.currencyNOK = kbn.formatBuilders.currency('kr'); kbn.valueFormats.currencySEK = kbn.formatBuilders.currency('kr'); kbn.valueFormats.currencyCZK = kbn.formatBuilders.currency('czk'); kbn.valueFormats.currencyCHF = kbn.formatBuilders.currency('CHF'); +kbn.valueFormats.currencyPLN = kbn.formatBuilders.currency('zł'); // Data (Binary) kbn.valueFormats.bits = kbn.formatBuilders.binarySIPrefix('b'); @@ -880,6 +881,7 @@ kbn.getUnitFormats = function() { { text: 'Swedish Krona (kr)', value: 'currencySEK' }, { text: 'Czech koruna (czk)', value: 'currencyCZK' }, { text: 'Swiss franc (CHF)', value: 'currencyCHF' }, + { text: 'Polish Złoty (PLN)', value: 'currencyPLN' }, ], }, { @@ -957,7 +959,7 @@ kbn.getUnitFormats = function() { text: 'throughput', submenu: [ { text: 'ops/sec (ops)', value: 'ops' }, - { text: 'requets/sec (rps)', value: 'reqps' }, + { text: 'requests/sec (rps)', value: 'reqps' }, { text: 'reads/sec (rps)', value: 'rps' }, { text: 'writes/sec (wps)', value: 'wps' }, { text: 'I/O ops/sec (iops)', value: 'iops' }, diff --git a/public/app/features/dashboard/dashnav/dashnav.html b/public/app/features/dashboard/dashnav/dashnav.html index 269d4b0bada..6ec272b5ca4 100644 --- a/public/app/features/dashboard/dashnav/dashnav.html +++ b/public/app/features/dashboard/dashnav/dashnav.html @@ -3,7 +3,7 @@ diff --git a/public/app/features/dashboard/folder_picker/folder_picker.ts b/public/app/features/dashboard/folder_picker/folder_picker.ts index 28338c29d33..352b29d27a0 100644 --- a/public/app/features/dashboard/folder_picker/folder_picker.ts +++ b/public/app/features/dashboard/folder_picker/folder_picker.ts @@ -104,10 +104,7 @@ export class FolderPickerCtrl { appEvents.emit('alert-success', ['Folder Created', 'OK']); this.closeCreateFolder(); - this.folder = { - text: result.title, - value: result.id, - }; + this.folder = { text: result.title, value: result.id }; this.onFolderChange(this.folder); }); } @@ -149,17 +146,14 @@ export class FolderPickerCtrl { folder = result.length > 0 ? result[0] : resetFolder; } } - this.folder = folder; - this.onFolderLoad(); - }); - } - private onFolderLoad() { - if (this.onLoad) { - this.onLoad({ - $folder: { id: this.folder.value, title: this.folder.text }, - }); - } + this.folder = folder; + + // if this is not the same as our initial value notify parent + if (this.folder.id !== this.initialFolderId) { + this.onChange({ $folder: { id: this.folder.value, title: this.folder.text } }); + } + }); } } @@ -176,7 +170,6 @@ export function folderPicker() { labelClass: '@', rootName: '@', onChange: '&', - onLoad: '&', onCreateFolder: '&', enterFolderCreation: '&', exitFolderCreation: '&', diff --git a/public/app/features/org/all.ts b/public/app/features/org/all.ts index 97e01c53fe3..8872450e3ab 100644 --- a/public/app/features/org/all.ts +++ b/public/app/features/org/all.ts @@ -5,8 +5,6 @@ import './select_org_ctrl'; import './change_password_ctrl'; import './new_org_ctrl'; import './user_invite_ctrl'; -import './teams_ctrl'; -import './team_details_ctrl'; import './create_team_ctrl'; import './org_api_keys_ctrl'; import './org_details_ctrl'; diff --git a/public/app/features/org/partials/team_details.html b/public/app/features/org/partials/team_details.html deleted file mode 100644 index 3ce851d5546..00000000000 --- a/public/app/features/org/partials/team_details.html +++ /dev/null @@ -1,105 +0,0 @@ - - -
-

Team Details

- - -
- Name - -
-
- - Email - - This is optional and is primarily used for allowing custom team avatars. - - - -
- -
- -
- - -
- -

Team Members

-
-
- Add member - - -
-
- - - - - - - - - - - - - - - - -
UsernameEmail
{{member.login}}{{member.email}} - - - -
-
- - This team has no members yet. - -
- -
- -
- -

Mappings to external groups

-
-
- Add group - -
-
- -
-
- - - - - - - - - - - - -
Group
{{group.groupId}} - - - -
-
- - This team has no associated groups yet. - -
- -
diff --git a/public/app/features/org/partials/teams.html b/public/app/features/org/partials/teams.html deleted file mode 100755 index e15a15cf573..00000000000 --- a/public/app/features/org/partials/teams.html +++ /dev/null @@ -1,68 +0,0 @@ - - -
-
- -
- - - - Add Team - -
- -
- - - - - - - - - - - - - - - - - - - -
NameEmailMembers
- - - -
-
- -
-
    -
  1. - -
  2. -
-
- - - No Teams found. - -
diff --git a/public/app/features/org/specs/team_details_ctrl.jest.ts b/public/app/features/org/specs/team_details_ctrl.jest.ts deleted file mode 100644 index c636de7ec56..00000000000 --- a/public/app/features/org/specs/team_details_ctrl.jest.ts +++ /dev/null @@ -1,42 +0,0 @@ -import '../team_details_ctrl'; -import TeamDetailsCtrl from '../team_details_ctrl'; - -describe('TeamDetailsCtrl', () => { - var backendSrv = { - searchUsers: jest.fn(() => Promise.resolve([])), - get: jest.fn(() => Promise.resolve([])), - post: jest.fn(() => Promise.resolve([])), - }; - - //Team id - var routeParams = { - id: 1, - }; - - var navModelSrv = { - getNav: jest.fn(), - }; - - var teamDetailsCtrl = new TeamDetailsCtrl({ $broadcast: jest.fn() }, backendSrv, routeParams, navModelSrv); - - describe('when user is chosen to be added to team', () => { - beforeEach(() => { - teamDetailsCtrl = new TeamDetailsCtrl({ $broadcast: jest.fn() }, backendSrv, routeParams, navModelSrv); - const userItem = { - id: 2, - login: 'user2', - }; - teamDetailsCtrl.userPicked(userItem); - }); - - it('should parse the result and save to db', () => { - expect(backendSrv.post.mock.calls[0][0]).toBe('/api/teams/1/members'); - expect(backendSrv.post.mock.calls[0][1].userId).toBe(2); - }); - - it('should refresh the list after saving.', () => { - expect(backendSrv.get.mock.calls[0][0]).toBe('/api/teams/1'); - expect(backendSrv.get.mock.calls[1][0]).toBe('/api/teams/1/members'); - }); - }); -}); diff --git a/public/app/features/org/team_details_ctrl.ts b/public/app/features/org/team_details_ctrl.ts deleted file mode 100644 index 6e0fddafa9d..00000000000 --- a/public/app/features/org/team_details_ctrl.ts +++ /dev/null @@ -1,108 +0,0 @@ -import coreModule from 'app/core/core_module'; -import config from 'app/core/config'; - -export default class TeamDetailsCtrl { - team: Team; - teamMembers: User[] = []; - navModel: any; - teamGroups: TeamGroup[] = []; - newGroupId: string; - isMappingsEnabled: boolean; - - /** @ngInject **/ - constructor(private $scope, private backendSrv, private $routeParams, navModelSrv) { - this.navModel = navModelSrv.getNav('cfg', 'teams', 0); - this.userPicked = this.userPicked.bind(this); - this.get = this.get.bind(this); - this.newGroupId = ''; - this.isMappingsEnabled = config.buildInfo.isEnterprise; - this.get(); - } - - get() { - if (this.$routeParams && this.$routeParams.id) { - this.backendSrv.get(`/api/teams/${this.$routeParams.id}`).then(result => { - this.team = result; - }); - - this.backendSrv.get(`/api/teams/${this.$routeParams.id}/members`).then(result => { - this.teamMembers = result; - }); - - if (this.isMappingsEnabled) { - this.backendSrv.get(`/api/teams/${this.$routeParams.id}/groups`).then(result => { - this.teamGroups = result; - }); - } - } - } - - removeTeamMember(teamMember: TeamMember) { - this.$scope.appEvent('confirm-modal', { - title: 'Remove Member', - text: 'Are you sure you want to remove ' + teamMember.login + ' from this group?', - yesText: 'Remove', - icon: 'fa-warning', - onConfirm: () => { - this.removeMemberConfirmed(teamMember); - }, - }); - } - - removeMemberConfirmed(teamMember: TeamMember) { - this.backendSrv.delete(`/api/teams/${this.$routeParams.id}/members/${teamMember.userId}`).then(this.get); - } - - update() { - if (!this.$scope.teamDetailsForm.$valid) { - return; - } - - this.backendSrv.put('/api/teams/' + this.team.id, { - name: this.team.name, - email: this.team.email, - }); - } - - userPicked(user) { - this.backendSrv.post(`/api/teams/${this.$routeParams.id}/members`, { userId: user.id }).then(() => { - this.$scope.$broadcast('user-picker-reset'); - this.get(); - }); - } - - addGroup() { - this.backendSrv.post(`/api/teams/${this.$routeParams.id}/groups`, { groupId: this.newGroupId }).then(() => { - this.get(); - }); - } - - removeGroup(group: TeamGroup) { - this.backendSrv.delete(`/api/teams/${this.$routeParams.id}/groups/${group.groupId}`).then(this.get); - } -} - -export interface TeamGroup { - groupId: string; -} - -export interface Team { - id: number; - name: string; - email: string; -} - -export interface User { - id: number; - name: string; - login: string; - email: string; -} - -export interface TeamMember { - userId: number; - name: string; - login: string; -} - -coreModule.controller('TeamDetailsCtrl', TeamDetailsCtrl); diff --git a/public/app/features/org/teams_ctrl.ts b/public/app/features/org/teams_ctrl.ts deleted file mode 100644 index 29317e73d3b..00000000000 --- a/public/app/features/org/teams_ctrl.ts +++ /dev/null @@ -1,66 +0,0 @@ -import coreModule from 'app/core/core_module'; -import appEvents from 'app/core/app_events'; - -export class TeamsCtrl { - teams: any; - pages = []; - perPage = 50; - page = 1; - totalPages: number; - showPaging = false; - query: any = ''; - navModel: any; - - /** @ngInject */ - constructor(private backendSrv, navModelSrv) { - this.navModel = navModelSrv.getNav('cfg', 'teams', 0); - this.get(); - } - - get() { - this.backendSrv - .get(`/api/teams/search?perpage=${this.perPage}&page=${this.page}&query=${this.query}`) - .then(result => { - this.teams = result.teams; - this.page = result.page; - this.perPage = result.perPage; - this.totalPages = Math.ceil(result.totalCount / result.perPage); - this.showPaging = this.totalPages > 1; - this.pages = []; - - for (var i = 1; i < this.totalPages + 1; i++) { - this.pages.push({ page: i, current: i === this.page }); - } - }); - } - - navigateToPage(page) { - this.page = page.page; - this.get(); - } - - deleteTeam(team) { - appEvents.emit('confirm-modal', { - title: 'Delete', - text: 'Are you sure you want to delete Team ' + team.name + '?', - yesText: 'Delete', - icon: 'fa-warning', - onConfirm: () => { - this.deleteTeamConfirmed(team); - }, - }); - } - - deleteTeamConfirmed(team) { - this.backendSrv.delete('/api/teams/' + team.id).then(this.get.bind(this)); - } - - openTeamModal() { - appEvents.emit('show-modal', { - templateHtml: '', - modalClass: 'modal--narrow', - }); - } -} - -coreModule.controller('TeamsCtrl', TeamsCtrl); diff --git a/public/app/features/panel/metrics_panel_ctrl.ts b/public/app/features/panel/metrics_panel_ctrl.ts index 75c0de3bc6e..3d8a4ed3736 100644 --- a/public/app/features/panel/metrics_panel_ctrl.ts +++ b/public/app/features/panel/metrics_panel_ctrl.ts @@ -332,7 +332,7 @@ class MetricsPanelCtrl extends PanelCtrl { range, }; const exploreState = encodePathComponent(JSON.stringify(state)); - this.$location.url(`/explore/${exploreState}`); + this.$location.url(`/explore?state=${exploreState}`); } addQuery(target) { diff --git a/public/app/features/plugins/built_in_plugins.ts b/public/app/features/plugins/built_in_plugins.ts index 6998321dd75..2c5bf459eda 100644 --- a/public/app/features/plugins/built_in_plugins.ts +++ b/public/app/features/plugins/built_in_plugins.ts @@ -4,11 +4,13 @@ import * as elasticsearchPlugin from 'app/plugins/datasource/elasticsearch/modul import * as opentsdbPlugin from 'app/plugins/datasource/opentsdb/module'; import * as grafanaPlugin from 'app/plugins/datasource/grafana/module'; import * as influxdbPlugin from 'app/plugins/datasource/influxdb/module'; +import * as loggingPlugin from 'app/plugins/datasource/logging/module'; import * as mixedPlugin from 'app/plugins/datasource/mixed/module'; import * as mysqlPlugin from 'app/plugins/datasource/mysql/module'; import * as postgresPlugin from 'app/plugins/datasource/postgres/module'; import * as prometheusPlugin from 'app/plugins/datasource/prometheus/module'; import * as mssqlPlugin from 'app/plugins/datasource/mssql/module'; +import * as testDataDSPlugin from 'app/plugins/datasource/testdata/module'; import * as textPanel from 'app/plugins/panel/text/module'; import * as graphPanel from 'app/plugins/panel/graph/module'; @@ -20,9 +22,6 @@ import * as tablePanel from 'app/plugins/panel/table/module'; import * as singlestatPanel from 'app/plugins/panel/singlestat/module'; import * as gettingStartedPanel from 'app/plugins/panel/gettingstarted/module'; -import * as testDataAppPlugin from 'app/plugins/app/testdata/module'; -import * as testDataDSPlugin from 'app/plugins/app/testdata/datasource/module'; - const builtInPlugins = { 'app/plugins/datasource/graphite/module': graphitePlugin, 'app/plugins/datasource/cloudwatch/module': cloudwatchPlugin, @@ -30,13 +29,13 @@ const builtInPlugins = { 'app/plugins/datasource/opentsdb/module': opentsdbPlugin, 'app/plugins/datasource/grafana/module': grafanaPlugin, 'app/plugins/datasource/influxdb/module': influxdbPlugin, + 'app/plugins/datasource/logging/module': loggingPlugin, 'app/plugins/datasource/mixed/module': mixedPlugin, 'app/plugins/datasource/mysql/module': mysqlPlugin, 'app/plugins/datasource/postgres/module': postgresPlugin, 'app/plugins/datasource/mssql/module': mssqlPlugin, 'app/plugins/datasource/prometheus/module': prometheusPlugin, - 'app/plugins/app/testdata/module': testDataAppPlugin, - 'app/plugins/app/testdata/datasource/module': testDataDSPlugin, + 'app/plugins/datasource/testdata/module': testDataDSPlugin, 'app/plugins/panel/text/module': textPanel, 'app/plugins/panel/graph/module': graphPanel, diff --git a/public/app/features/plugins/datasource_srv.ts b/public/app/features/plugins/datasource_srv.ts index bff6f8b9f6a..df743640062 100644 --- a/public/app/features/plugins/datasource_srv.ts +++ b/public/app/features/plugins/datasource_srv.ts @@ -34,13 +34,13 @@ export class DatasourceSrv { } loadDatasource(name) { - var dsConfig = config.datasources[name]; + const dsConfig = config.datasources[name]; if (!dsConfig) { return this.$q.reject({ message: 'Datasource named ' + name + ' was not found' }); } - var deferred = this.$q.defer(); - var pluginDef = dsConfig.meta; + const deferred = this.$q.defer(); + const pluginDef = dsConfig.meta; importPluginModule(pluginDef.module) .then(plugin => { @@ -55,7 +55,7 @@ export class DatasourceSrv { throw new Error('Plugin module is missing Datasource constructor'); } - var instance = this.$injector.instantiate(plugin.Datasource, { instanceSettings: dsConfig }); + const instance = this.$injector.instantiate(plugin.Datasource, { instanceSettings: dsConfig }); instance.meta = pluginDef; instance.name = name; this.datasources[name] = instance; @@ -73,7 +73,7 @@ export class DatasourceSrv { } getAnnotationSources() { - var sources = []; + const sources = []; this.addDataSourceVariables(sources); @@ -86,6 +86,14 @@ export class DatasourceSrv { return sources; } + getExploreSources() { + const { datasources } = config; + const es = Object.keys(datasources) + .map(name => datasources[name]) + .filter(ds => ds.meta && ds.meta.explore); + return _.sortBy(es, ['name']); + } + getMetricSources(options) { var metricSources = []; @@ -155,3 +163,4 @@ export class DatasourceSrv { } coreModule.service('datasourceSrv', DatasourceSrv); +export default DatasourceSrv; diff --git a/public/app/features/plugins/ds_list_ctrl.ts b/public/app/features/plugins/ds_list_ctrl.ts index 577b931551a..89c760ae253 100644 --- a/public/app/features/plugins/ds_list_ctrl.ts +++ b/public/app/features/plugins/ds_list_ctrl.ts @@ -19,6 +19,7 @@ export class DataSourcesCtrl { onQueryUpdated() { let regex = new RegExp(this.searchQuery, 'ig'); this.datasources = _.filter(this.unfiltered, item => { + regex.lastIndex = 0; return regex.test(item.name) || regex.test(item.type); }); } diff --git a/public/app/features/plugins/plugin_loader.ts b/public/app/features/plugins/plugin_loader.ts index f999ee7e2ff..cce494d0a60 100644 --- a/public/app/features/plugins/plugin_loader.ts +++ b/public/app/features/plugins/plugin_loader.ts @@ -56,7 +56,7 @@ System.config({ css: 'vendor/plugin-css/css.js', }, meta: { - 'plugin*': { + '/*': { esModule: true, authorization: true, loader: 'plugin-loader', @@ -126,6 +126,7 @@ import 'vendor/flot/jquery.flot.stackpercent'; import 'vendor/flot/jquery.flot.fillbelow'; import 'vendor/flot/jquery.flot.crosshair'; import 'vendor/flot/jquery.flot.dashes'; +import 'vendor/flot/jquery.flot.gauge'; const flotDeps = [ 'jquery.flot', @@ -137,6 +138,7 @@ const flotDeps = [ 'jquery.flot.selection', 'jquery.flot.stackpercent', 'jquery.flot.events', + 'jquery.flot.gauge', ]; for (let flotDep of flotDeps) { exposeToPlugin(flotDep, { fakeDep: 1 }); diff --git a/public/app/features/plugins/specs/datasource_srv.jest.ts b/public/app/features/plugins/specs/datasource_srv.jest.ts index 5458662ef9b..b63e8537837 100644 --- a/public/app/features/plugins/specs/datasource_srv.jest.ts +++ b/public/app/features/plugins/specs/datasource_srv.jest.ts @@ -17,9 +17,35 @@ const templateSrv = { describe('datasource_srv', function() { let _datasourceSrv = new DatasourceSrv({}, {}, {}, templateSrv); - let metricSources; + + describe('when loading explore sources', () => { + beforeEach(() => { + config.datasources = { + explore1: { + name: 'explore1', + meta: { explore: true, metrics: true }, + }, + explore2: { + name: 'explore2', + meta: { explore: true, metrics: false }, + }, + nonExplore: { + name: 'nonExplore', + meta: { explore: false, metrics: true }, + }, + }; + }); + + it('should return list of explore sources', () => { + const exploreSources = _datasourceSrv.getExploreSources(); + expect(exploreSources.length).toBe(2); + expect(exploreSources[0].name).toBe('explore1'); + expect(exploreSources[1].name).toBe('explore2'); + }); + }); describe('when loading metric sources', () => { + let metricSources; let unsortedDatasources = { mmm: { type: 'test-db', diff --git a/public/app/features/templating/adhoc_variable.ts b/public/app/features/templating/adhoc_variable.ts index babeaf1f34e..9f8bd4c39a7 100644 --- a/public/app/features/templating/adhoc_variable.ts +++ b/public/app/features/templating/adhoc_variable.ts @@ -3,6 +3,7 @@ import { Variable, assignModelProperties, variableTypes } from './variable'; export class AdhocVariable implements Variable { filters: any[]; + skipUrlSync: boolean; defaults = { type: 'adhoc', @@ -11,6 +12,7 @@ export class AdhocVariable implements Variable { hide: 0, datasource: null, filters: [], + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/constant_variable.ts b/public/app/features/templating/constant_variable.ts index f2fb4294537..e727c6e98af 100644 --- a/public/app/features/templating/constant_variable.ts +++ b/public/app/features/templating/constant_variable.ts @@ -4,6 +4,7 @@ export class ConstantVariable implements Variable { query: string; options: any[]; current: any; + skipUrlSync: boolean; defaults = { type: 'constant', @@ -13,6 +14,7 @@ export class ConstantVariable implements Variable { query: '', current: {}, options: [], + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/custom_variable.ts b/public/app/features/templating/custom_variable.ts index c15178f6644..4490a41a38f 100644 --- a/public/app/features/templating/custom_variable.ts +++ b/public/app/features/templating/custom_variable.ts @@ -7,6 +7,7 @@ export class CustomVariable implements Variable { includeAll: boolean; multi: boolean; current: any; + skipUrlSync: boolean; defaults = { type: 'custom', @@ -19,6 +20,7 @@ export class CustomVariable implements Variable { includeAll: false, multi: false, allValue: null, + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/datasource_variable.ts b/public/app/features/templating/datasource_variable.ts index 4c326a94e3b..519ce21e4d4 100644 --- a/public/app/features/templating/datasource_variable.ts +++ b/public/app/features/templating/datasource_variable.ts @@ -7,6 +7,7 @@ export class DatasourceVariable implements Variable { options: any; current: any; refresh: any; + skipUrlSync: boolean; defaults = { type: 'datasource', @@ -18,6 +19,7 @@ export class DatasourceVariable implements Variable { options: [], query: '', refresh: 1, + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/interval_variable.ts b/public/app/features/templating/interval_variable.ts index 3faac316f98..b932819a7b7 100644 --- a/public/app/features/templating/interval_variable.ts +++ b/public/app/features/templating/interval_variable.ts @@ -11,6 +11,7 @@ export class IntervalVariable implements Variable { query: string; refresh: number; current: any; + skipUrlSync: boolean; defaults = { type: 'interval', @@ -24,6 +25,7 @@ export class IntervalVariable implements Variable { auto: false, auto_min: '10s', auto_count: 30, + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/query_variable.ts b/public/app/features/templating/query_variable.ts index 54bd7bb660c..5ddd6d32864 100644 --- a/public/app/features/templating/query_variable.ts +++ b/public/app/features/templating/query_variable.ts @@ -22,6 +22,7 @@ export class QueryVariable implements Variable { tagsQuery: string; tagValuesQuery: string; tags: any[]; + skipUrlSync: boolean; defaults = { type: 'query', @@ -42,6 +43,7 @@ export class QueryVariable implements Variable { useTags: false, tagsQuery: '', tagValuesQuery: '', + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/specs/template_srv.jest.ts b/public/app/features/templating/specs/template_srv.jest.ts index 59915776b4f..86b6aa7ec99 100644 --- a/public/app/features/templating/specs/template_srv.jest.ts +++ b/public/app/features/templating/specs/template_srv.jest.ts @@ -345,6 +345,49 @@ describe('templateSrv', function() { }); }); + describe('fillVariableValuesForUrl skip url sync', function() { + beforeEach(function() { + initTemplateSrv([ + { + name: 'test', + skipUrlSync: true, + current: { value: 'value' }, + getValueForUrl: function() { + return this.current.value; + }, + }, + ]); + }); + + it('should not include template variable value in url', function() { + var params = {}; + _templateSrv.fillVariableValuesForUrl(params); + expect(params['var-test']).toBe(undefined); + }); + }); + + describe('fillVariableValuesForUrl with multi value with skip url sync', function() { + beforeEach(function() { + initTemplateSrv([ + { + type: 'query', + name: 'test', + skipUrlSync: true, + current: { value: ['val1', 'val2'] }, + getValueForUrl: function() { + return this.current.value; + }, + }, + ]); + }); + + it('should not include template variable value in url', function() { + var params = {}; + _templateSrv.fillVariableValuesForUrl(params); + expect(params['var-test']).toBe(undefined); + }); + }); + describe('fillVariableValuesForUrl with multi value and scopedVars', function() { beforeEach(function() { initTemplateSrv([{ type: 'query', name: 'test', current: { value: ['val1', 'val2'] } }]); @@ -359,6 +402,20 @@ describe('templateSrv', function() { }); }); + describe('fillVariableValuesForUrl with multi value, scopedVars and skip url sync', function() { + beforeEach(function() { + initTemplateSrv([{ type: 'query', name: 'test', current: { value: ['val1', 'val2'] } }]); + }); + + it('should not set scoped value as url params', function() { + var params = {}; + _templateSrv.fillVariableValuesForUrl(params, { + test: { name: 'test', value: 'val1', skipUrlSync: true }, + }); + expect(params['var-test']).toBe(undefined); + }); + }); + describe('replaceWithText', function() { beforeEach(function() { initTemplateSrv([ diff --git a/public/app/features/templating/specs/variable_srv_init_specs.ts b/public/app/features/templating/specs/variable_srv_init.jest.ts similarity index 56% rename from public/app/features/templating/specs/variable_srv_init_specs.ts rename to public/app/features/templating/specs/variable_srv_init.jest.ts index 11639c6aa8f..ea8689f528b 100644 --- a/public/app/features/templating/specs/variable_srv_init_specs.ts +++ b/public/app/features/templating/specs/variable_srv_init.jest.ts @@ -1,36 +1,31 @@ -import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; - import '../all'; import _ from 'lodash'; -import helpers from 'test/specs/helpers'; -import { Emitter } from 'app/core/core'; +import { VariableSrv } from '../variable_srv'; +import $q from 'q'; describe('VariableSrv init', function() { - var ctx = new helpers.ControllerTestContext(); + let templateSrv = { + init: vars => { + this.variables = vars; + }, + variableInitialized: () => {}, + updateTemplateData: () => {}, + replace: str => + str.replace(this.regex, match => { + return match; + }), + }; - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.module(function($compileProvider) { - $compileProvider.preAssignBindingsEnabled(true); - }) - ); + let $injector = {}; + let $rootscope = { + $on: () => {}, + }; - beforeEach(ctx.providePhase(['datasourceSrv', 'timeSrv', 'templateSrv', '$location'])); - beforeEach( - angularMocks.inject(($rootScope, $q, $location, $injector) => { - ctx.$q = $q; - ctx.$rootScope = $rootScope; - ctx.$location = $location; - ctx.variableSrv = $injector.get('variableSrv'); - ctx.$rootScope.$digest(); - }) - ); + let ctx = {}; function describeInitScenario(desc, fn) { - describe(desc, function() { + describe(desc, () => { var scenario: any = { urlParams: {}, setup: setupFn => { @@ -38,22 +33,34 @@ describe('VariableSrv init', function() { }, }; - beforeEach(function() { + beforeEach(async () => { scenario.setupFn(); - ctx.datasource = {}; - ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when(scenario.queryResult)); - - ctx.datasourceSrv.get = sinon.stub().returns(ctx.$q.when(ctx.datasource)); - ctx.datasourceSrv.getMetricSources = sinon.stub().returns(scenario.metricSources); - - ctx.$location.search = sinon.stub().returns(scenario.urlParams); - ctx.dashboard = { - templating: { list: scenario.variables }, - events: new Emitter(), + ctx = { + datasource: { + metricFindQuery: jest.fn(() => Promise.resolve(scenario.queryResult)), + }, + datasourceSrv: { + get: () => Promise.resolve(ctx.datasource), + getMetricSources: () => scenario.metricSources, + }, + templateSrv, }; - ctx.variableSrv.init(ctx.dashboard); - ctx.$rootScope.$digest(); + ctx.variableSrv = new VariableSrv($rootscope, $q, {}, $injector, templateSrv); + + $injector.instantiate = (variable, model) => { + return getVarMockConstructor(variable, model, ctx); + }; + + ctx.variableSrv.datasource = ctx.datasource; + ctx.variableSrv.datasourceSrv = ctx.datasourceSrv; + + ctx.variableSrv.$location.search = () => scenario.urlParams; + ctx.variableSrv.dashboard = { + templating: { list: scenario.variables }, + }; + + await ctx.variableSrv.init(ctx.variableSrv.dashboard); scenario.variables = ctx.variableSrv.variables; }); @@ -78,8 +85,8 @@ describe('VariableSrv init', function() { }); it('should update current value', () => { - expect(scenario.variables[0].current.value).to.be('new'); - expect(scenario.variables[0].current.text).to.be('new'); + expect(scenario.variables[0].current.value).toBe('new'); + expect(scenario.variables[0].current.text).toBe('new'); }); }); }); @@ -111,12 +118,12 @@ describe('VariableSrv init', function() { }); it('should update child variable', () => { - expect(scenario.variables[1].options.length).to.be(2); - expect(scenario.variables[1].current.text).to.be('google-server1'); + expect(scenario.variables[1].options.length).toBe(2); + expect(scenario.variables[1].current.text).toBe('google-server1'); }); it('should only update it once', () => { - expect(ctx.datasource.metricFindQuery.callCount).to.be(1); + expect(ctx.variableSrv.datasource.metricFindQuery).toHaveBeenCalledTimes(1); }); }); }); @@ -140,9 +147,9 @@ describe('VariableSrv init', function() { ]; }); - it('should update current value', function() { + it('should update current value', () => { var variable = ctx.variableSrv.variables[0]; - expect(variable.options.length).to.be(2); + expect(variable.options.length).toBe(2); }); }); @@ -164,19 +171,19 @@ describe('VariableSrv init', function() { scenario.urlParams['var-apps'] = ['val2', 'val1']; }); - it('should update current value', function() { + it('should update current value', () => { var variable = ctx.variableSrv.variables[0]; - expect(variable.current.value.length).to.be(2); - expect(variable.current.value[0]).to.be('val2'); - expect(variable.current.value[1]).to.be('val1'); - expect(variable.current.text).to.be('val2 + val1'); - expect(variable.options[0].selected).to.be(true); - expect(variable.options[1].selected).to.be(true); + expect(variable.current.value.length).toBe(2); + expect(variable.current.value[0]).toBe('val2'); + expect(variable.current.value[1]).toBe('val1'); + expect(variable.current.text).toBe('val2 + val1'); + expect(variable.options[0].selected).toBe(true); + expect(variable.options[1].selected).toBe(true); }); - it('should set options that are not in value to selected false', function() { + it('should set options that are not in value to selected false', () => { var variable = ctx.variableSrv.variables[0]; - expect(variable.options[2].selected).to.be(false); + expect(variable.options[2].selected).toBe(false); }); }); @@ -198,19 +205,34 @@ describe('VariableSrv init', function() { scenario.urlParams['var-apps'] = ['val2', 'val1']; }); - it('should update current value', function() { + it('should update current value', () => { var variable = ctx.variableSrv.variables[0]; - expect(variable.current.value.length).to.be(2); - expect(variable.current.value[0]).to.be('val2'); - expect(variable.current.value[1]).to.be('val1'); - expect(variable.current.text).to.be('Val2 + Val1'); - expect(variable.options[0].selected).to.be(true); - expect(variable.options[1].selected).to.be(true); + expect(variable.current.value.length).toBe(2); + expect(variable.current.value[0]).toBe('val2'); + expect(variable.current.value[1]).toBe('val1'); + expect(variable.current.text).toBe('Val2 + Val1'); + expect(variable.options[0].selected).toBe(true); + expect(variable.options[1].selected).toBe(true); }); - it('should set options that are not in value to selected false', function() { + it('should set options that are not in value to selected false', () => { var variable = ctx.variableSrv.variables[0]; - expect(variable.options[2].selected).to.be(false); + expect(variable.options[2].selected).toBe(false); }); }); }); + +function getVarMockConstructor(variable, model, ctx) { + switch (model.model.type) { + case 'datasource': + return new variable(model.model, ctx.datasourceSrv, ctx.variableSrv, ctx.templateSrv); + case 'query': + return new variable(model.model, ctx.datasourceSrv, ctx.templateSrv, ctx.variableSrv); + case 'interval': + return new variable(model.model, {}, ctx.templateSrv, ctx.variableSrv); + case 'custom': + return new variable(model.model, ctx.variableSrv); + default: + return new variable(model.model); + } +} diff --git a/public/app/features/templating/template_srv.ts b/public/app/features/templating/template_srv.ts index cdabe577f96..fc79d12ff9e 100644 --- a/public/app/features/templating/template_srv.ts +++ b/public/app/features/templating/template_srv.ts @@ -250,8 +250,14 @@ export class TemplateSrv { fillVariableValuesForUrl(params, scopedVars) { _.each(this.variables, function(variable) { if (scopedVars && scopedVars[variable.name] !== void 0) { + if (scopedVars[variable.name].skipUrlSync) { + return; + } params['var-' + variable.name] = scopedVars[variable.name].value; } else { + if (variable.skipUrlSync) { + return; + } params['var-' + variable.name] = variable.getValueForUrl(); } }); diff --git a/public/app/plugins/app/testdata/dashboards/graph_last_1h.json b/public/app/plugins/app/testdata/dashboards/graph_last_1h.json deleted file mode 100644 index 5a4459cd62c..00000000000 --- a/public/app/plugins/app/testdata/dashboards/graph_last_1h.json +++ /dev/null @@ -1,1448 +0,0 @@ -{ - "annotations": { - "list": [] - }, - "editable": true, - "gnetId": null, - "graphTooltip": 0, - "hideControls": false, - "links": [], - "refresh": false, - "revision": 8, - "rows": [ - { - "collapse": false, - "height": "250px", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": "Grafana TestData", - "editable": true, - "error": false, - "fill": 1, - "id": 1, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "scenario": "random_walk", - "scenarioId": "no_data_points", - "target": "" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "No Data Points Warning", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": "Grafana TestData", - "editable": true, - "error": false, - "fill": 1, - "id": 2, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "scenario": "random_walk", - "scenarioId": "datapoints_outside_range", - "target": "" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Datapoints Outside Range Warning", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": "Grafana TestData", - "editable": true, - "error": false, - "fill": 1, - "id": 3, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "scenario": "random_walk", - "scenarioId": "random_walk", - "target": "" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Random walk series", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "repeat": null, - "repeatIteration": null, - "repeatRowId": null, - "showTitle": false, - "title": "New row", - "titleSize": "h6" - }, - { - "collapse": false, - "height": "250px", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": "Grafana TestData", - "editable": true, - "error": false, - "fill": 1, - "id": 4, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 8, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "scenario": "random_walk", - "scenarioId": "random_walk", - "target": "" - } - ], - "thresholds": [], - "timeFrom": "2s", - "timeShift": null, - "title": "Millisecond res x-axis and tooltip", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "content": "Just verify that the tooltip time has millisecond resolution ", - "editable": true, - "error": false, - "id": 6, - "links": [], - "mode": "markdown", - "span": 4, - "title": "", - "type": "text" - } - ], - "repeat": null, - "repeatIteration": null, - "repeatRowId": null, - "showTitle": false, - "title": "New row", - "titleSize": "h6" - }, - { - "collapse": false, - "height": 336, - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": "Grafana TestData", - "editable": true, - "error": false, - "fill": 1, - "id": 5, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [ - { - "alias": "B-series", - "yaxis": 2 - } - ], - "span": 8, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "B", - "scenarioId": "csv_metric_values", - "stringInput": "2000,3000,4000,1000,3000,10000", - "target": "" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "2 yaxis and axis labels", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "percent", - "label": "Perecent", - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": "Pressure", - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "content": "Verify that axis labels look ok", - "editable": true, - "error": false, - "id": 7, - "links": [], - "mode": "markdown", - "span": 4, - "title": "", - "type": "text" - } - ], - "repeat": null, - "repeatIteration": null, - "repeatRowId": null, - "showTitle": false, - "title": "New row", - "titleSize": "h6" - }, - { - "collapse": false, - "height": "250px", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": "Grafana TestData", - "editable": true, - "error": false, - "fill": 1, - "id": 8, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "B", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,null,null,null,null,null,null,100,10,10,20,30,40,10", - "target": "" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "null value connected", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": "Grafana TestData", - "editable": true, - "error": false, - "fill": 1, - "id": 10, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "null as zero", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "B", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,null,null,null,null,null,null,100,10,10,20,30,40,10", - "target": "" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "null value null as zero", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "content": "Should be a long line connecting the null region in the `connected` mode, and in zero it should just be a line with zero value at the null points. ", - "editable": true, - "error": false, - "id": 13, - "links": [], - "mode": "markdown", - "span": 4, - "title": "", - "type": "text" - } - ], - "repeat": null, - "repeatIteration": null, - "repeatRowId": null, - "showTitle": false, - "title": "New row", - "titleSize": "h6" - }, - { - "collapse": false, - "height": 250, - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": "Grafana TestData", - "editable": true, - "error": false, - "fill": 1, - "id": 9, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [ - { - "alias": "B-series", - "zindex": -3 - } - ], - "span": 8, - "stack": true, - "steppedLine": false, - "targets": [ - { - "hide": false, - "refId": "B", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,null,null,null,null,null,null,100,10,10,20,30,40,10", - "target": "" - }, - { - "alias": "", - "hide": false, - "refId": "A", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,10,20,30,40,40,40,100,10,20,20", - "target": "" - }, - { - "alias": "", - "hide": false, - "refId": "C", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,10,20,30,40,40,40,100,10,20,20", - "target": "" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Stacking value ontop of nulls", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "content": "Stacking values on top of nulls, should treat the null values as zero. ", - "editable": true, - "error": false, - "id": 14, - "links": [], - "mode": "markdown", - "span": 4, - "title": "", - "type": "text" - } - ], - "repeat": null, - "repeatIteration": null, - "repeatRowId": null, - "showTitle": false, - "title": "Dashboard Row", - "titleSize": "h6" - }, - { - "collapse": false, - "height": 250, - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": "Grafana TestData", - "editable": true, - "error": false, - "fill": 1, - "id": 12, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [ - { - "alias": "B-series", - "zindex": -3 - } - ], - "span": 8, - "stack": true, - "steppedLine": false, - "targets": [ - { - "alias": "", - "hide": false, - "refId": "B", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,40,null,null,null,null,null,null,100,10,10,20,30,40,10", - "target": "" - }, - { - "alias": "", - "hide": false, - "refId": "A", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,40,null,null,null,null,null,null,100,10,10,20,30,40,10", - "target": "" - }, - { - "alias": "", - "hide": false, - "refId": "C", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,40,null,null,null,null,null,null,100,10,10,20,30,40,10", - "target": "" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Stacking all series null segment", - "tooltip": { - "msResolution": false, - "shared": true, - "sort": 0, - "value_type": "cumulative" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "content": "Stacking when all values are null should leave a gap in the graph", - "editable": true, - "error": false, - "id": 15, - "links": [], - "mode": "markdown", - "span": 4, - "title": "", - "type": "text" - } - ], - "repeat": null, - "repeatIteration": null, - "repeatRowId": null, - "showTitle": false, - "title": "Dashboard Row", - "titleSize": "h6" - }, - { - "collapse": false, - "height": 250, - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": "Grafana TestData", - "decimals": 3, - "fill": 1, - "id": 20, - "legend": { - "alignAsTable": true, - "avg": true, - "current": true, - "max": true, - "min": true, - "show": true, - "total": true, - "values": true - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 12, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Legend Table Single Series Should Take Minimum Height", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "repeat": null, - "repeatIteration": null, - "repeatRowId": null, - "showTitle": false, - "title": "Dashboard Row", - "titleSize": "h6" - }, - { - "collapse": false, - "height": 250, - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": "Grafana TestData", - "decimals": 3, - "fill": 1, - "id": 16, - "legend": { - "alignAsTable": true, - "avg": true, - "current": true, - "max": true, - "min": true, - "show": true, - "total": true, - "values": true - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "B", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "C", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "D", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Legend Table No Scroll Visible", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": "Grafana TestData", - "decimals": 3, - "fill": 1, - "id": 17, - "legend": { - "alignAsTable": true, - "avg": true, - "current": true, - "max": true, - "min": true, - "show": true, - "total": true, - "values": true - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "B", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "C", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "D", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "E", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "F", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "G", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "H", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "I", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "J", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Legend Table Should Scroll", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "repeat": null, - "repeatIteration": null, - "repeatRowId": null, - "showTitle": false, - "title": "Dashboard Row", - "titleSize": "h6" - }, - { - "collapse": false, - "height": 250, - "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": "Grafana TestData", - "decimals": 3, - "fill": 1, - "id": 18, - "legend": { - "alignAsTable": true, - "avg": true, - "current": true, - "max": true, - "min": true, - "rightSide": true, - "show": true, - "total": true, - "values": true - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "B", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "C", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "D", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Legend Table No Scroll Visible", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": "Grafana TestData", - "decimals": 3, - "fill": 1, - "id": 19, - "legend": { - "alignAsTable": true, - "avg": true, - "current": true, - "max": true, - "min": true, - "rightSide": true, - "show": true, - "total": true, - "values": true - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 6, - "stack": false, - "steppedLine": false, - "targets": [ - { - "refId": "A", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "B", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "C", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "D", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "E", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "F", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "G", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "H", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "I", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "J", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "K", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - }, - { - "refId": "L", - "scenarioId": "csv_metric_values", - "stringInput": "1,20,90,30,5,0", - "target": "" - } - ], - "thresholds": [], - "timeFrom": null, - "timeShift": null, - "title": "Legend Table No Scroll Visible", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ] - } - ], - "repeat": null, - "repeatIteration": null, - "repeatRowId": null, - "showTitle": false, - "title": "Dashboard Row", - "titleSize": "h6" - } - ], - "schemaVersion": 14, - "style": "dark", - "tags": [ - "grafana-test" - ], - "templating": { - "list": [] - }, - "time": { - "from": "now-1h", - "to": "now" - }, - "timepicker": { - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, - "timezone": "browser", - "title": "TestData - Graph Panel Last 1h", - "version": 2 -} diff --git a/public/app/plugins/app/testdata/module.ts b/public/app/plugins/app/testdata/module.ts deleted file mode 100644 index 812aba20464..00000000000 --- a/public/app/plugins/app/testdata/module.ts +++ /dev/null @@ -1,34 +0,0 @@ -export class ConfigCtrl { - static template = ''; - - appEditCtrl: any; - - /** @ngInject **/ - constructor(private backendSrv) { - this.appEditCtrl.setPreUpdateHook(this.initDatasource.bind(this)); - } - - initDatasource() { - return this.backendSrv.get('/api/datasources').then(res => { - var found = false; - for (let ds of res) { - if (ds.type === 'grafana-testdata-datasource') { - found = true; - } - } - - if (!found) { - var dsInstance = { - name: 'Grafana TestData', - type: 'grafana-testdata-datasource', - access: 'direct', - jsonData: {}, - }; - - return this.backendSrv.post('/api/datasources', dsInstance); - } - - return Promise.resolve(); - }); - } -} diff --git a/public/app/plugins/app/testdata/plugin.json b/public/app/plugins/app/testdata/plugin.json deleted file mode 100644 index 3efcd687453..00000000000 --- a/public/app/plugins/app/testdata/plugin.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "type": "app", - "name": "Grafana TestData", - "id": "testdata", - - "info": { - "description": "Grafana test data app", - "author": { - "name": "Grafana Project", - "url": "https://grafana.com" - }, - "version": "1.0.17", - "updated": "2016-09-26" - }, - - "includes": [ - { - "type": "dashboard", - "name": "TestData - Graph Last 1h", - "path": "dashboards/graph_last_1h.json" - }, - { - "type": "dashboard", - "name": "TestData - Alerts", - "path": "dashboards/alerts.json" - } - ], - - "dependencies": { - "grafanaVersion": "4.x.x" - } -} diff --git a/public/app/plugins/datasource/cloudwatch/datasource.ts b/public/app/plugins/datasource/cloudwatch/datasource.ts index 391f65bd7ae..087bd19da71 100644 --- a/public/app/plugins/datasource/cloudwatch/datasource.ts +++ b/public/app/plugins/datasource/cloudwatch/datasource.ts @@ -30,7 +30,9 @@ export default class CloudWatchDatasource { var queries = _.filter(options.targets, item => { return ( - item.hide !== true && !!item.region && !!item.namespace && !!item.metricName && !_.isEmpty(item.statistics) + (item.id !== '' || item.hide !== true) && + ((!!item.region && !!item.namespace && !!item.metricName && !_.isEmpty(item.statistics)) || + item.expression.length > 0) ); }).map(item => { item.region = this.templateSrv.replace(this.getActualRegion(item.region), options.scopedVars); @@ -38,6 +40,17 @@ export default class CloudWatchDatasource { item.metricName = this.templateSrv.replace(item.metricName, options.scopedVars); item.dimensions = this.convertDimensionFormat(item.dimensions, options.scopedVars); item.period = String(this.getPeriod(item, options)); // use string format for period in graph query, and alerting + item.id = this.templateSrv.replace(item.id, options.scopedVars); + item.expression = this.templateSrv.replace(item.expression, options.scopedVars); + item.returnData = typeof item.hide === 'undefined' ? true : !item.hide; + + // valid ExtendedStatistics is like p90.00, check the pattern + let hasInvalidStatistics = item.statistics.some(s => { + return s.indexOf('p') === 0 && !/p\d{2}\.\d{2}/.test(s); + }); + if (hasInvalidStatistics) { + throw { message: 'Invalid extended statistics' }; + } return _.extend( { @@ -399,6 +412,11 @@ export default class CloudWatchDatasource { scopedVar[variable.name] = v; t.refId = target.refId + '_' + v.value; t.dimensions[dimensionKey] = templateSrv.replace(t.dimensions[dimensionKey], scopedVar); + if (variable.multi && target.id) { + t.id = target.id + window.btoa(v.value).replace(/=/g, '0'); // generate unique id + } else { + t.id = target.id; + } return t; }); } diff --git a/public/app/plugins/datasource/cloudwatch/partials/query.parameter.html b/public/app/plugins/datasource/cloudwatch/partials/query.parameter.html index 81bad39e23a..7da6e7d2a83 100644 --- a/public/app/plugins/datasource/cloudwatch/partials/query.parameter.html +++ b/public/app/plugins/datasource/cloudwatch/partials/query.parameter.html @@ -1,4 +1,4 @@ -
+
@@ -20,7 +20,7 @@
-
+
@@ -31,18 +31,35 @@
-
+
-
-
- - - +
+ + +
+
+ +
+
+ + +
+
+ + + Alias replacement variables:
  • {{metric}}
  • @@ -54,12 +71,12 @@
-
- +
+
-
-
+
+
diff --git a/public/app/plugins/datasource/cloudwatch/query_parameter_ctrl.ts b/public/app/plugins/datasource/cloudwatch/query_parameter_ctrl.ts index 0b47ebd7069..689cf270feb 100644 --- a/public/app/plugins/datasource/cloudwatch/query_parameter_ctrl.ts +++ b/public/app/plugins/datasource/cloudwatch/query_parameter_ctrl.ts @@ -27,6 +27,9 @@ export class CloudWatchQueryParameterCtrl { target.dimensions = target.dimensions || {}; target.period = target.period || ''; target.region = target.region || 'default'; + target.id = target.id || ''; + target.expression = target.expression || ''; + target.returnData = target.returnData || false; target.highResolution = target.highResolution || false; $scope.regionSegment = uiSegmentSrv.getSegmentForValue($scope.target.region, 'select region'); diff --git a/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts b/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts index 2dc6e57b1aa..a8968008661 100644 --- a/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts @@ -121,6 +121,26 @@ describe('CloudWatchDatasource', function() { }); }); + it('should cancel query for invalid extended statistics', function () { + var query = { + range: { from: 'now-1h', to: 'now' }, + rangeRaw: { from: 1483228800, to: 1483232400 }, + targets: [ + { + region: 'us-east-1', + namespace: 'AWS/EC2', + metricName: 'CPUUtilization', + dimensions: { + InstanceId: 'i-12345678', + }, + statistics: ['pNN.NN'], + period: '60s', + }, + ], + }; + expect(ctx.ds.query.bind(ctx.ds, query)).toThrow(/Invalid extended statistics/); + }); + it('should return series list', function(done) { ctx.ds.query(query).then(function(result) { expect(result.data[0].target).toBe(response.results.A.series[0].name); diff --git a/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts new file mode 100644 index 00000000000..b38ad56427b --- /dev/null +++ b/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts @@ -0,0 +1,324 @@ +import { uiSegmentSrv } from 'app/core/services/segment_srv'; +import gfunc from '../gfunc'; +import { GraphiteQueryCtrl } from '../query_ctrl'; + +describe('GraphiteQueryCtrl', () => { + let ctx = { + datasource: { + metricFindQuery: jest.fn(() => Promise.resolve([])), + getFuncDefs: jest.fn(() => Promise.resolve(gfunc.getFuncDefs('1.0'))), + getFuncDef: gfunc.getFuncDef, + waitForFuncDefsLoaded: jest.fn(() => Promise.resolve(null)), + createFuncInstance: gfunc.createFuncInstance, + }, + target: { target: 'aliasByNode(scaleToSeconds(test.prod.*,1),2)' }, + panelCtrl: { + refresh: jest.fn(), + }, + }; + + ctx.panelCtrl.panel = { + targets: [ctx.target], + }; + + beforeEach(() => { + GraphiteQueryCtrl.prototype.target = ctx.target; + GraphiteQueryCtrl.prototype.datasource = ctx.datasource; + + GraphiteQueryCtrl.prototype.panelCtrl = ctx.panelCtrl; + + ctx.ctrl = new GraphiteQueryCtrl( + {}, + {}, + new uiSegmentSrv({ trustAsHtml: html => html }, { highlightVariablesAsHtml: () => {} }), + {}, + {} + ); + }); + + describe('init', () => { + it('should validate metric key exists', () => { + expect(ctx.datasource.metricFindQuery.mock.calls[0][0]).toBe('test.prod.*'); + }); + + it('should delete last segment if no metrics are found', () => { + expect(ctx.ctrl.segments[2].value).toBe('select metric'); + }); + + it('should parse expression and build function model', () => { + expect(ctx.ctrl.queryModel.functions.length).toBe(2); + }); + }); + + describe('when adding function', () => { + beforeEach(() => { + ctx.ctrl.target.target = 'test.prod.*.count'; + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); + ctx.ctrl.parseTarget(); + ctx.ctrl.addFunction(gfunc.getFuncDef('aliasByNode')); + }); + + it('should add function with correct node number', () => { + expect(ctx.ctrl.queryModel.functions[0].params[0]).toBe(2); + }); + + it('should update target', () => { + expect(ctx.ctrl.target.target).toBe('aliasByNode(test.prod.*.count, 2)'); + }); + + it('should call refresh', () => { + expect(ctx.panelCtrl.refresh).toHaveBeenCalled(); + }); + }); + + describe('when adding function before any metric segment', () => { + beforeEach(() => { + ctx.ctrl.target.target = ''; + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: true }]); + ctx.ctrl.parseTarget(); + ctx.ctrl.addFunction(gfunc.getFuncDef('asPercent')); + }); + + it('should add function and remove select metric link', () => { + expect(ctx.ctrl.segments.length).toBe(0); + }); + }); + + describe('when initializing target without metric expression and only function', () => { + beforeEach(() => { + ctx.ctrl.target.target = 'asPercent(#A, #B)'; + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([]); + ctx.ctrl.parseTarget(); + }); + + it('should not add select metric segment', () => { + expect(ctx.ctrl.segments.length).toBe(1); + }); + + it('should add second series ref as param', () => { + expect(ctx.ctrl.queryModel.functions[0].params.length).toBe(1); + }); + }); + + describe('when initializing a target with single param func using variable', () => { + beforeEach(() => { + ctx.ctrl.target.target = 'movingAverage(prod.count, $var)'; + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([]); + ctx.ctrl.parseTarget(); + }); + + it('should add 2 segments', () => { + expect(ctx.ctrl.segments.length).toBe(2); + }); + + it('should add function param', () => { + expect(ctx.ctrl.queryModel.functions[0].params.length).toBe(1); + }); + }); + + describe('when initializing target without metric expression and function with series-ref', () => { + beforeEach(() => { + ctx.ctrl.target.target = 'asPercent(metric.node.count, #A)'; + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([]); + ctx.ctrl.parseTarget(); + }); + + it('should add segments', () => { + expect(ctx.ctrl.segments.length).toBe(3); + }); + + it('should have correct func params', () => { + expect(ctx.ctrl.queryModel.functions[0].params.length).toBe(1); + }); + }); + + describe('when getting altSegments and metricFindQuery returns empty array', () => { + beforeEach(() => { + ctx.ctrl.target.target = 'test.count'; + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([]); + ctx.ctrl.parseTarget(); + ctx.ctrl.getAltSegments(1).then(function(results) { + ctx.altSegments = results; + }); + }); + + it('should have no segments', () => { + expect(ctx.altSegments.length).toBe(0); + }); + }); + + describe('targetChanged', () => { + beforeEach(() => { + ctx.ctrl.target.target = 'aliasByNode(scaleToSeconds(test.prod.*, 1), 2)'; + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); + ctx.ctrl.parseTarget(); + ctx.ctrl.target.target = ''; + ctx.ctrl.targetChanged(); + }); + + it('should rebuild target after expression model', () => { + expect(ctx.ctrl.target.target).toBe('aliasByNode(scaleToSeconds(test.prod.*, 1), 2)'); + }); + + it('should call panelCtrl.refresh', () => { + expect(ctx.panelCtrl.refresh).toHaveBeenCalled(); + }); + }); + + describe('when updating targets with nested query', () => { + beforeEach(() => { + ctx.ctrl.target.target = 'scaleToSeconds(#A, 60)'; + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); + ctx.ctrl.parseTarget(); + }); + + it('should add function params', () => { + expect(ctx.ctrl.queryModel.segments.length).toBe(1); + expect(ctx.ctrl.queryModel.segments[0].value).toBe('#A'); + + expect(ctx.ctrl.queryModel.functions[0].params.length).toBe(1); + expect(ctx.ctrl.queryModel.functions[0].params[0]).toBe(60); + }); + + it('target should remain the same', () => { + expect(ctx.ctrl.target.target).toBe('scaleToSeconds(#A, 60)'); + }); + + it('targetFull should include nested queries', () => { + ctx.ctrl.panelCtrl.panel.targets = [ + { + target: 'nested.query.count', + refId: 'A', + }, + ]; + + ctx.ctrl.updateModelTarget(); + + expect(ctx.ctrl.target.target).toBe('scaleToSeconds(#A, 60)'); + + expect(ctx.ctrl.target.targetFull).toBe('scaleToSeconds(nested.query.count, 60)'); + }); + }); + + describe('when updating target used in other query', () => { + beforeEach(() => { + ctx.ctrl.target.target = 'metrics.a.count'; + ctx.ctrl.target.refId = 'A'; + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); + ctx.ctrl.parseTarget(); + + ctx.ctrl.panelCtrl.panel.targets = [ctx.ctrl.target, { target: 'sumSeries(#A)', refId: 'B' }]; + + ctx.ctrl.updateModelTarget(); + }); + + it('targetFull of other query should update', () => { + expect(ctx.ctrl.panel.targets[1].targetFull).toBe('sumSeries(metrics.a.count)'); + }); + }); + + describe('when adding seriesByTag function', () => { + beforeEach(() => { + ctx.ctrl.target.target = ''; + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); + ctx.ctrl.parseTarget(); + ctx.ctrl.addFunction(gfunc.getFuncDef('seriesByTag')); + }); + + it('should update functions', () => { + expect(ctx.ctrl.queryModel.getSeriesByTagFuncIndex()).toBe(0); + }); + + it('should update seriesByTagUsed flag', () => { + expect(ctx.ctrl.queryModel.seriesByTagUsed).toBe(true); + }); + + it('should update target', () => { + expect(ctx.ctrl.target.target).toBe('seriesByTag()'); + }); + + it('should call refresh', () => { + expect(ctx.panelCtrl.refresh).toHaveBeenCalled(); + }); + }); + + describe('when parsing seriesByTag function', () => { + beforeEach(() => { + ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')"; + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); + ctx.ctrl.parseTarget(); + }); + + it('should add tags', () => { + const expected = [ + { key: 'tag1', operator: '=', value: 'value1' }, + { key: 'tag2', operator: '!=~', value: 'value2' }, + ]; + expect(ctx.ctrl.queryModel.tags).toEqual(expected); + }); + + it('should add plus button', () => { + expect(ctx.ctrl.addTagSegments.length).toBe(1); + }); + }); + + describe('when tag added', () => { + beforeEach(() => { + ctx.ctrl.target.target = 'seriesByTag()'; + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); + ctx.ctrl.parseTarget(); + ctx.ctrl.addNewTag({ value: 'tag1' }); + }); + + it('should update tags with default value', () => { + const expected = [{ key: 'tag1', operator: '=', value: '' }]; + expect(ctx.ctrl.queryModel.tags).toEqual(expected); + }); + + it('should update target', () => { + const expected = "seriesByTag('tag1=')"; + expect(ctx.ctrl.target.target).toEqual(expected); + }); + }); + + describe('when tag changed', () => { + beforeEach(() => { + ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')"; + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); + ctx.ctrl.parseTarget(); + ctx.ctrl.tagChanged({ key: 'tag1', operator: '=', value: 'new_value' }, 0); + }); + + it('should update tags', () => { + const expected = [ + { key: 'tag1', operator: '=', value: 'new_value' }, + { key: 'tag2', operator: '!=~', value: 'value2' }, + ]; + expect(ctx.ctrl.queryModel.tags).toEqual(expected); + }); + + it('should update target', () => { + const expected = "seriesByTag('tag1=new_value', 'tag2!=~value2')"; + expect(ctx.ctrl.target.target).toEqual(expected); + }); + }); + + describe('when tag removed', () => { + beforeEach(() => { + ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')"; + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); + ctx.ctrl.parseTarget(); + ctx.ctrl.removeTag(0); + }); + + it('should update tags', () => { + const expected = [{ key: 'tag2', operator: '!=~', value: 'value2' }]; + expect(ctx.ctrl.queryModel.tags).toEqual(expected); + }); + + it('should update target', () => { + const expected = "seriesByTag('tag2!=~value2')"; + expect(ctx.ctrl.target.target).toEqual(expected); + }); + }); +}); diff --git a/public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts b/public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts deleted file mode 100644 index b4f7718930f..00000000000 --- a/public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts +++ /dev/null @@ -1,338 +0,0 @@ -import 'app/core/services/segment_srv'; -import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; - -import gfunc from '../gfunc'; -import helpers from 'test/specs/helpers'; -import { GraphiteQueryCtrl } from '../query_ctrl'; - -describe('GraphiteQueryCtrl', function() { - var ctx = new helpers.ControllerTestContext(); - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.module(function($compileProvider) { - $compileProvider.preAssignBindingsEnabled(true); - }) - ); - - beforeEach(ctx.providePhase()); - beforeEach( - angularMocks.inject(($rootScope, $controller, $q) => { - ctx.$q = $q; - ctx.scope = $rootScope.$new(); - ctx.target = { target: 'aliasByNode(scaleToSeconds(test.prod.*,1),2)' }; - ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); - ctx.datasource.getFuncDefs = sinon.stub().returns(ctx.$q.when(gfunc.getFuncDefs('1.0'))); - ctx.datasource.getFuncDef = gfunc.getFuncDef; - ctx.datasource.waitForFuncDefsLoaded = sinon.stub().returns(ctx.$q.when(null)); - ctx.datasource.createFuncInstance = gfunc.createFuncInstance; - ctx.panelCtrl = { panel: {} }; - ctx.panelCtrl = { - panel: { - targets: [ctx.target], - }, - }; - ctx.panelCtrl.refresh = sinon.spy(); - - ctx.ctrl = $controller( - GraphiteQueryCtrl, - { $scope: ctx.scope }, - { - panelCtrl: ctx.panelCtrl, - datasource: ctx.datasource, - target: ctx.target, - } - ); - ctx.scope.$digest(); - }) - ); - - describe('init', function() { - it('should validate metric key exists', function() { - expect(ctx.datasource.metricFindQuery.getCall(0).args[0]).to.be('test.prod.*'); - }); - - it('should delete last segment if no metrics are found', function() { - expect(ctx.ctrl.segments[2].value).to.be('select metric'); - }); - - it('should parse expression and build function model', function() { - expect(ctx.ctrl.queryModel.functions.length).to.be(2); - }); - }); - - describe('when adding function', function() { - beforeEach(function() { - ctx.ctrl.target.target = 'test.prod.*.count'; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); - ctx.ctrl.parseTarget(); - ctx.ctrl.addFunction(gfunc.getFuncDef('aliasByNode')); - }); - - it('should add function with correct node number', function() { - expect(ctx.ctrl.queryModel.functions[0].params[0]).to.be(2); - }); - - it('should update target', function() { - expect(ctx.ctrl.target.target).to.be('aliasByNode(test.prod.*.count, 2)'); - }); - - it('should call refresh', function() { - expect(ctx.panelCtrl.refresh.called).to.be(true); - }); - }); - - describe('when adding function before any metric segment', function() { - beforeEach(function() { - ctx.ctrl.target.target = ''; - ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([{ expandable: true }])); - ctx.ctrl.parseTarget(); - ctx.ctrl.addFunction(gfunc.getFuncDef('asPercent')); - }); - - it('should add function and remove select metric link', function() { - expect(ctx.ctrl.segments.length).to.be(0); - }); - }); - - describe('when initializing target without metric expression and only function', function() { - beforeEach(function() { - ctx.ctrl.target.target = 'asPercent(#A, #B)'; - ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); - ctx.ctrl.parseTarget(); - ctx.scope.$digest(); - }); - - it('should not add select metric segment', function() { - expect(ctx.ctrl.segments.length).to.be(1); - }); - - it('should add second series ref as param', function() { - expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1); - }); - }); - - describe('when initializing a target with single param func using variable', function() { - beforeEach(function() { - ctx.ctrl.target.target = 'movingAverage(prod.count, $var)'; - ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); - ctx.ctrl.parseTarget(); - }); - - it('should add 2 segments', function() { - expect(ctx.ctrl.segments.length).to.be(2); - }); - - it('should add function param', function() { - expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1); - }); - }); - - describe('when initializing target without metric expression and function with series-ref', function() { - beforeEach(function() { - ctx.ctrl.target.target = 'asPercent(metric.node.count, #A)'; - ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); - ctx.ctrl.parseTarget(); - }); - - it('should add segments', function() { - expect(ctx.ctrl.segments.length).to.be(3); - }); - - it('should have correct func params', function() { - expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1); - }); - }); - - describe('when getting altSegments and metricFindQuery returns empty array', function() { - beforeEach(function() { - ctx.ctrl.target.target = 'test.count'; - ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); - ctx.ctrl.parseTarget(); - ctx.ctrl.getAltSegments(1).then(function(results) { - ctx.altSegments = results; - }); - ctx.scope.$digest(); - }); - - it('should have no segments', function() { - expect(ctx.altSegments.length).to.be(0); - }); - }); - - describe('targetChanged', function() { - beforeEach(function() { - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); - ctx.ctrl.parseTarget(); - ctx.ctrl.target.target = ''; - ctx.ctrl.targetChanged(); - }); - - it('should rebuld target after expression model', function() { - expect(ctx.ctrl.target.target).to.be('aliasByNode(scaleToSeconds(test.prod.*, 1), 2)'); - }); - - it('should call panelCtrl.refresh', function() { - expect(ctx.panelCtrl.refresh.called).to.be(true); - }); - }); - - describe('when updating targets with nested query', function() { - beforeEach(function() { - ctx.ctrl.target.target = 'scaleToSeconds(#A, 60)'; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); - ctx.ctrl.parseTarget(); - }); - - it('should add function params', function() { - expect(ctx.ctrl.queryModel.segments.length).to.be(1); - expect(ctx.ctrl.queryModel.segments[0].value).to.be('#A'); - - expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1); - expect(ctx.ctrl.queryModel.functions[0].params[0]).to.be(60); - }); - - it('target should remain the same', function() { - expect(ctx.ctrl.target.target).to.be('scaleToSeconds(#A, 60)'); - }); - - it('targetFull should include nested queries', function() { - ctx.ctrl.panelCtrl.panel.targets = [ - { - target: 'nested.query.count', - refId: 'A', - }, - ]; - - ctx.ctrl.updateModelTarget(); - - expect(ctx.ctrl.target.target).to.be('scaleToSeconds(#A, 60)'); - - expect(ctx.ctrl.target.targetFull).to.be('scaleToSeconds(nested.query.count, 60)'); - }); - }); - - describe('when updating target used in other query', function() { - beforeEach(function() { - ctx.ctrl.target.target = 'metrics.a.count'; - ctx.ctrl.target.refId = 'A'; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); - ctx.ctrl.parseTarget(); - - ctx.ctrl.panelCtrl.panel.targets = [ctx.ctrl.target, { target: 'sumSeries(#A)', refId: 'B' }]; - - ctx.ctrl.updateModelTarget(); - }); - - it('targetFull of other query should update', function() { - expect(ctx.ctrl.panel.targets[1].targetFull).to.be('sumSeries(metrics.a.count)'); - }); - }); - - describe('when adding seriesByTag function', function() { - beforeEach(function() { - ctx.ctrl.target.target = ''; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); - ctx.ctrl.parseTarget(); - ctx.ctrl.addFunction(gfunc.getFuncDef('seriesByTag')); - }); - - it('should update functions', function() { - expect(ctx.ctrl.queryModel.getSeriesByTagFuncIndex()).to.be(0); - }); - - it('should update seriesByTagUsed flag', function() { - expect(ctx.ctrl.queryModel.seriesByTagUsed).to.be(true); - }); - - it('should update target', function() { - expect(ctx.ctrl.target.target).to.be('seriesByTag()'); - }); - - it('should call refresh', function() { - expect(ctx.panelCtrl.refresh.called).to.be(true); - }); - }); - - describe('when parsing seriesByTag function', function() { - beforeEach(function() { - ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')"; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); - ctx.ctrl.parseTarget(); - }); - - it('should add tags', function() { - const expected = [ - { key: 'tag1', operator: '=', value: 'value1' }, - { key: 'tag2', operator: '!=~', value: 'value2' }, - ]; - expect(ctx.ctrl.queryModel.tags).to.eql(expected); - }); - - it('should add plus button', function() { - expect(ctx.ctrl.addTagSegments.length).to.be(1); - }); - }); - - describe('when tag added', function() { - beforeEach(function() { - ctx.ctrl.target.target = 'seriesByTag()'; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); - ctx.ctrl.parseTarget(); - ctx.ctrl.addNewTag({ value: 'tag1' }); - }); - - it('should update tags with default value', function() { - const expected = [{ key: 'tag1', operator: '=', value: '' }]; - expect(ctx.ctrl.queryModel.tags).to.eql(expected); - }); - - it('should update target', function() { - const expected = "seriesByTag('tag1=')"; - expect(ctx.ctrl.target.target).to.eql(expected); - }); - }); - - describe('when tag changed', function() { - beforeEach(function() { - ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')"; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); - ctx.ctrl.parseTarget(); - ctx.ctrl.tagChanged({ key: 'tag1', operator: '=', value: 'new_value' }, 0); - }); - - it('should update tags', function() { - const expected = [ - { key: 'tag1', operator: '=', value: 'new_value' }, - { key: 'tag2', operator: '!=~', value: 'value2' }, - ]; - expect(ctx.ctrl.queryModel.tags).to.eql(expected); - }); - - it('should update target', function() { - const expected = "seriesByTag('tag1=new_value', 'tag2!=~value2')"; - expect(ctx.ctrl.target.target).to.eql(expected); - }); - }); - - describe('when tag removed', function() { - beforeEach(function() { - ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')"; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); - ctx.ctrl.parseTarget(); - ctx.ctrl.removeTag(0); - }); - - it('should update tags', function() { - const expected = [{ key: 'tag2', operator: '!=~', value: 'value2' }]; - expect(ctx.ctrl.queryModel.tags).to.eql(expected); - }); - - it('should update target', function() { - const expected = "seriesByTag('tag2!=~value2')"; - expect(ctx.ctrl.target.target).to.eql(expected); - }); - }); -}); diff --git a/public/app/plugins/datasource/influxdb/query_ctrl.ts b/public/app/plugins/datasource/influxdb/query_ctrl.ts index ce669c9f458..2be1ecc7bff 100644 --- a/public/app/plugins/datasource/influxdb/query_ctrl.ts +++ b/public/app/plugins/datasource/influxdb/query_ctrl.ts @@ -22,7 +22,6 @@ export class InfluxQueryCtrl extends QueryCtrl { /** @ngInject **/ constructor($scope, $injector, private templateSrv, private $q, private uiSegmentSrv) { super($scope, $injector); - this.target = this.target; this.queryModel = new InfluxQuery(this.target, templateSrv, this.panel.scopedVars); this.queryBuilder = new InfluxQueryBuilder(this.target, this.datasource.database); diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts new file mode 100644 index 00000000000..4e3fc47a5fd --- /dev/null +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -0,0 +1,178 @@ +import '../query_ctrl'; +import { uiSegmentSrv } from 'app/core/services/segment_srv'; +import { InfluxQueryCtrl } from '../query_ctrl'; + +describe('InfluxDBQueryCtrl', () => { + let ctx = {}; + + beforeEach(() => { + InfluxQueryCtrl.prototype.datasource = { + metricFindQuery: () => Promise.resolve([]), + }; + InfluxQueryCtrl.prototype.target = { target: {} }; + InfluxQueryCtrl.prototype.panelCtrl = { + panel: { + targets: [InfluxQueryCtrl.prototype.target], + }, + refresh: () => {}, + }; + + ctx.ctrl = new InfluxQueryCtrl( + {}, + {}, + {}, + {}, + new uiSegmentSrv({ trustAsHtml: html => html }, { highlightVariablesAsHtml: () => {} }) + ); + }); + + describe('init', () => { + it('should init tagSegments', () => { + expect(ctx.ctrl.tagSegments.length).toBe(1); + }); + + it('should init measurementSegment', () => { + expect(ctx.ctrl.measurementSegment.value).toBe('select measurement'); + }); + }); + + describe('when first tag segment is updated', () => { + beforeEach(() => { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + }); + + it('should update tag key', () => { + expect(ctx.ctrl.target.tags[0].key).toBe('asd'); + expect(ctx.ctrl.tagSegments[0].type).toBe('key'); + }); + + it('should add tagSegments', () => { + expect(ctx.ctrl.tagSegments.length).toBe(3); + }); + }); + + describe('when last tag value segment is updated', () => { + beforeEach(() => { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + }); + + it('should update tag value', () => { + expect(ctx.ctrl.target.tags[0].value).toBe('server1'); + }); + + it('should set tag operator', () => { + expect(ctx.ctrl.target.tags[0].operator).toBe('='); + }); + + it('should add plus button for another filter', () => { + expect(ctx.ctrl.tagSegments[3].fake).toBe(true); + }); + }); + + describe('when last tag value segment is updated to regex', () => { + beforeEach(() => { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); + }); + + it('should update operator', () => { + expect(ctx.ctrl.tagSegments[1].value).toBe('=~'); + expect(ctx.ctrl.target.tags[0].operator).toBe('=~'); + }); + }); + + describe('when second tag key is added', () => { + beforeEach(() => { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + }); + + it('should update tag key', () => { + expect(ctx.ctrl.target.tags[1].key).toBe('key2'); + }); + + it('should add AND segment', () => { + expect(ctx.ctrl.tagSegments[3].value).toBe('AND'); + }); + }); + + describe('when condition is changed', () => { + beforeEach(() => { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); + }); + + it('should update tag condition', () => { + expect(ctx.ctrl.target.tags[1].condition).toBe('OR'); + }); + + it('should update AND segment', () => { + expect(ctx.ctrl.tagSegments[3].value).toBe('OR'); + expect(ctx.ctrl.tagSegments.length).toBe(7); + }); + }); + + describe('when deleting first tag filter after value is selected', () => { + beforeEach(() => { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 0); + }); + + it('should remove tags', () => { + expect(ctx.ctrl.target.tags.length).toBe(0); + }); + + it('should remove all segment after 2 and replace with plus button', () => { + expect(ctx.ctrl.tagSegments.length).toBe(1); + expect(ctx.ctrl.tagSegments[0].type).toBe('plus-button'); + }); + }); + + describe('when deleting second tag value before second tag value is complete', () => { + beforeEach(() => { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', () => { + expect(ctx.ctrl.tagSegments.length).toBe(4); + expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button'); + }); + }); + + describe('when deleting second tag value before second tag value is complete', () => { + beforeEach(() => { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', () => { + expect(ctx.ctrl.tagSegments.length).toBe(4); + expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button'); + }); + }); + + describe('when deleting second tag value after second tag filter is complete', () => { + beforeEach(() => { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', () => { + expect(ctx.ctrl.tagSegments.length).toBe(4); + expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button'); + }); + }); +}); diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts deleted file mode 100644 index 4daa48d6b9d..00000000000 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts +++ /dev/null @@ -1,193 +0,0 @@ -import '../query_ctrl'; -import 'app/core/services/segment_srv'; -import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; -import helpers from 'test/specs/helpers'; -import { InfluxQueryCtrl } from '../query_ctrl'; - -describe('InfluxDBQueryCtrl', function() { - var ctx = new helpers.ControllerTestContext(); - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.module(function($compileProvider) { - $compileProvider.preAssignBindingsEnabled(true); - }) - ); - beforeEach(ctx.providePhase()); - - beforeEach( - angularMocks.inject(($rootScope, $controller, $q) => { - ctx.$q = $q; - ctx.scope = $rootScope.$new(); - ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); - ctx.target = { target: {} }; - ctx.panelCtrl = { - panel: { - targets: [ctx.target], - }, - }; - ctx.panelCtrl.refresh = sinon.spy(); - ctx.ctrl = $controller( - InfluxQueryCtrl, - { $scope: ctx.scope }, - { - panelCtrl: ctx.panelCtrl, - target: ctx.target, - datasource: ctx.datasource, - } - ); - }) - ); - - describe('init', function() { - it('should init tagSegments', function() { - expect(ctx.ctrl.tagSegments.length).to.be(1); - }); - - it('should init measurementSegment', function() { - expect(ctx.ctrl.measurementSegment.value).to.be('select measurement'); - }); - }); - - describe('when first tag segment is updated', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - }); - - it('should update tag key', function() { - expect(ctx.ctrl.target.tags[0].key).to.be('asd'); - expect(ctx.ctrl.tagSegments[0].type).to.be('key'); - }); - - it('should add tagSegments', function() { - expect(ctx.ctrl.tagSegments.length).to.be(3); - }); - }); - - describe('when last tag value segment is updated', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - }); - - it('should update tag value', function() { - expect(ctx.ctrl.target.tags[0].value).to.be('server1'); - }); - - it('should set tag operator', function() { - expect(ctx.ctrl.target.tags[0].operator).to.be('='); - }); - - it('should add plus button for another filter', function() { - expect(ctx.ctrl.tagSegments[3].fake).to.be(true); - }); - }); - - describe('when last tag value segment is updated to regex', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); - }); - - it('should update operator', function() { - expect(ctx.ctrl.tagSegments[1].value).to.be('=~'); - expect(ctx.ctrl.target.tags[0].operator).to.be('=~'); - }); - }); - - describe('when second tag key is added', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - }); - - it('should update tag key', function() { - expect(ctx.ctrl.target.tags[1].key).to.be('key2'); - }); - - it('should add AND segment', function() { - expect(ctx.ctrl.tagSegments[3].value).to.be('AND'); - }); - }); - - describe('when condition is changed', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); - }); - - it('should update tag condition', function() { - expect(ctx.ctrl.target.tags[1].condition).to.be('OR'); - }); - - it('should update AND segment', function() { - expect(ctx.ctrl.tagSegments[3].value).to.be('OR'); - expect(ctx.ctrl.tagSegments.length).to.be(7); - }); - }); - - describe('when deleting first tag filter after value is selected', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 0); - }); - - it('should remove tags', function() { - expect(ctx.ctrl.target.tags.length).to.be(0); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(1); - expect(ctx.ctrl.tagSegments[0].type).to.be('plus-button'); - }); - }); - - describe('when deleting second tag value before second tag value is complete', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(4); - expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); - }); - }); - - describe('when deleting second tag value before second tag value is complete', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(4); - expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); - }); - }); - - describe('when deleting second tag value after second tag filter is complete', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(4); - expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); - }); - }); -}); diff --git a/public/app/plugins/datasource/logging/README.md b/public/app/plugins/datasource/logging/README.md new file mode 100644 index 00000000000..33372605973 --- /dev/null +++ b/public/app/plugins/datasource/logging/README.md @@ -0,0 +1,3 @@ +# Grafana Logging Datasource - Native Plugin + +This is a **built in** datasource that allows you to connect to Grafana's logging service. \ No newline at end of file diff --git a/public/app/plugins/datasource/logging/datasource.jest.ts b/public/app/plugins/datasource/logging/datasource.jest.ts new file mode 100644 index 00000000000..212d352dfca --- /dev/null +++ b/public/app/plugins/datasource/logging/datasource.jest.ts @@ -0,0 +1,38 @@ +import { parseQuery } from './datasource'; + +describe('parseQuery', () => { + it('returns empty for empty string', () => { + expect(parseQuery('')).toEqual({ + query: '', + regexp: '', + }); + }); + + it('returns regexp for strings without query', () => { + expect(parseQuery('test')).toEqual({ + query: '', + regexp: 'test', + }); + }); + + it('returns query for strings without regexp', () => { + expect(parseQuery('{foo="bar"}')).toEqual({ + query: '{foo="bar"}', + regexp: '', + }); + }); + + it('returns query for strings with query and search string', () => { + expect(parseQuery('x {foo="bar"}')).toEqual({ + query: '{foo="bar"}', + regexp: 'x', + }); + }); + + it('returns query for strings with query and regexp', () => { + expect(parseQuery('{foo="bar"} x|y')).toEqual({ + query: '{foo="bar"}', + regexp: 'x|y', + }); + }); +}); diff --git a/public/app/plugins/datasource/logging/datasource.ts b/public/app/plugins/datasource/logging/datasource.ts new file mode 100644 index 00000000000..22edba5807a --- /dev/null +++ b/public/app/plugins/datasource/logging/datasource.ts @@ -0,0 +1,134 @@ +import _ from 'lodash'; + +import * as dateMath from 'app/core/utils/datemath'; + +import { processStreams } from './result_transformer'; + +const DEFAULT_LIMIT = 100; + +const DEFAULT_QUERY_PARAMS = { + direction: 'BACKWARD', + limit: DEFAULT_LIMIT, + regexp: '', + query: '', +}; + +const QUERY_REGEXP = /({\w+="[^"]+"})?\s*(\w[^{]+)?\s*({\w+="[^"]+"})?/; +export function parseQuery(input: string) { + const match = input.match(QUERY_REGEXP); + let query = ''; + let regexp = ''; + + if (match) { + if (match[1]) { + query = match[1]; + } + if (match[2]) { + regexp = match[2].trim(); + } + if (match[3]) { + if (match[1]) { + query = `${match[1].slice(0, -1)},${match[3].slice(1)}`; + } else { + query = match[3]; + } + } + } + + return { query, regexp }; +} + +function serializeParams(data: any) { + return Object.keys(data) + .map(k => { + const v = data[k]; + return encodeURIComponent(k) + '=' + encodeURIComponent(v); + }) + .join('&'); +} + +export default class LoggingDatasource { + /** @ngInject */ + constructor(private instanceSettings, private backendSrv, private templateSrv) {} + + _request(apiUrl: string, data?, options?: any) { + const baseUrl = this.instanceSettings.url; + const params = data ? serializeParams(data) : ''; + const url = `${baseUrl}${apiUrl}?${params}`; + const req = { + ...options, + url, + }; + return this.backendSrv.datasourceRequest(req); + } + + prepareQueryTarget(target, options) { + const interpolated = this.templateSrv.replace(target.expr); + const start = this.getTime(options.range.from, false); + const end = this.getTime(options.range.to, true); + return { + ...DEFAULT_QUERY_PARAMS, + ...parseQuery(interpolated), + start, + end, + }; + } + + query(options) { + const queryTargets = options.targets + .filter(target => target.expr) + .map(target => this.prepareQueryTarget(target, options)); + if (queryTargets.length === 0) { + return Promise.resolve({ data: [] }); + } + + const queries = queryTargets.map(target => this._request('/api/prom/query', target)); + + return Promise.all(queries).then((results: any[]) => { + // Flatten streams from multiple queries + const allStreams = results.reduce((acc, response, i) => { + const streams = response.data.streams || []; + // Inject search for match highlighting + const search = queryTargets[i].regexp; + streams.forEach(s => { + s.search = search; + }); + return [...acc, ...streams]; + }, []); + const model = processStreams(allStreams, DEFAULT_LIMIT); + return { data: model }; + }); + } + + metadataRequest(url) { + // HACK to get label values for {job=|}, will be replaced when implementing LoggingQueryField + const apiUrl = url.replace('v1', 'prom'); + return this._request(apiUrl, { silent: true }).then(res => { + const data = { data: { data: res.data.values || [] } }; + return data; + }); + } + + getTime(date, roundUp) { + if (_.isString(date)) { + date = dateMath.parse(date, roundUp); + } + return Math.ceil(date.valueOf() * 1e6); + } + + testDatasource() { + return this._request('/api/prom/label') + .then(res => { + if (res && res.data && res.data.values && res.data.values.length > 0) { + return { status: 'success', message: 'Data source connected and labels found.' }; + } + return { + status: 'error', + message: 'Data source connected, but no labels received. Verify that logging is configured properly.', + }; + }) + .catch(err => { + return { status: 'error', message: err.message }; + }); + } +} diff --git a/public/app/plugins/datasource/logging/img/grafana_icon.svg b/public/app/plugins/datasource/logging/img/grafana_icon.svg new file mode 100644 index 00000000000..72702223dc7 --- /dev/null +++ b/public/app/plugins/datasource/logging/img/grafana_icon.svg @@ -0,0 +1,57 @@ + + + + + + + + + + + + diff --git a/public/app/plugins/datasource/logging/module.ts b/public/app/plugins/datasource/logging/module.ts new file mode 100644 index 00000000000..5e3ffb3282a --- /dev/null +++ b/public/app/plugins/datasource/logging/module.ts @@ -0,0 +1,7 @@ +import Datasource from './datasource'; + +export class LoggingConfigCtrl { + static templateUrl = 'partials/config.html'; +} + +export { Datasource, LoggingConfigCtrl as ConfigCtrl }; diff --git a/public/app/plugins/datasource/logging/partials/config.html b/public/app/plugins/datasource/logging/partials/config.html new file mode 100644 index 00000000000..8e79cc0adcc --- /dev/null +++ b/public/app/plugins/datasource/logging/partials/config.html @@ -0,0 +1,2 @@ + + \ No newline at end of file diff --git a/public/app/plugins/datasource/logging/plugin.json b/public/app/plugins/datasource/logging/plugin.json new file mode 100644 index 00000000000..9aa844f21cb --- /dev/null +++ b/public/app/plugins/datasource/logging/plugin.json @@ -0,0 +1,28 @@ +{ + "type": "datasource", + "name": "Grafana Logging", + "id": "logging", + "metrics": false, + "alerting": false, + "annotations": false, + "logs": true, + "explore": true, + "info": { + "description": "Grafana Logging Data Source for Grafana", + "author": { + "name": "Grafana Project", + "url": "https://grafana.com" + }, + "logos": { + "small": "img/grafana_icon.svg", + "large": "img/grafana_icon.svg" + }, + "links": [ + { + "name": "Grafana Logging", + "url": "https://grafana.com/" + } + ], + "version": "5.3.0" + } +} \ No newline at end of file diff --git a/public/app/plugins/datasource/logging/result_transformer.jest.ts b/public/app/plugins/datasource/logging/result_transformer.jest.ts new file mode 100644 index 00000000000..0d203f748ba --- /dev/null +++ b/public/app/plugins/datasource/logging/result_transformer.jest.ts @@ -0,0 +1,45 @@ +import { LogLevel } from 'app/core/logs_model'; + +import { getLogLevel, getSearchMatches } from './result_transformer'; + +describe('getSearchMatches()', () => { + it('gets no matches for when search and or line are empty', () => { + expect(getSearchMatches('', '')).toEqual([]); + expect(getSearchMatches('foo', '')).toEqual([]); + expect(getSearchMatches('', 'foo')).toEqual([]); + }); + + it('gets no matches for unmatched search string', () => { + expect(getSearchMatches('foo', 'bar')).toEqual([]); + }); + + it('gets matches for matched search string', () => { + expect(getSearchMatches('foo', 'foo')).toEqual([{ length: 3, start: 0, text: 'foo' }]); + expect(getSearchMatches(' foo ', 'foo')).toEqual([{ length: 3, start: 1, text: 'foo' }]); + }); + + expect(getSearchMatches(' foo foo bar ', 'foo|bar')).toEqual([ + { length: 3, start: 1, text: 'foo' }, + { length: 3, start: 5, text: 'foo' }, + { length: 3, start: 9, text: 'bar' }, + ]); +}); + +describe('getLoglevel()', () => { + it('returns no log level on empty line', () => { + expect(getLogLevel('')).toBe(undefined); + }); + + it('returns no log level on when level is part of a word', () => { + expect(getLogLevel('this is a warning')).toBe(undefined); + }); + + it('returns log level on line contains a log level', () => { + expect(getLogLevel('warn: it is looking bad')).toBe(LogLevel.warn); + expect(getLogLevel('2007-12-12 12:12:12 [WARN]: it is looking bad')).toBe(LogLevel.warn); + }); + + it('returns first log level found', () => { + expect(getLogLevel('WARN this could be a debug message')).toBe(LogLevel.warn); + }); +}); diff --git a/public/app/plugins/datasource/logging/result_transformer.ts b/public/app/plugins/datasource/logging/result_transformer.ts new file mode 100644 index 00000000000..e238778614c --- /dev/null +++ b/public/app/plugins/datasource/logging/result_transformer.ts @@ -0,0 +1,71 @@ +import _ from 'lodash'; +import moment from 'moment'; + +import { LogLevel, LogsModel, LogRow } from 'app/core/logs_model'; + +export function getLogLevel(line: string): LogLevel { + if (!line) { + return undefined; + } + let level: LogLevel; + Object.keys(LogLevel).forEach(key => { + if (!level) { + const regexp = new RegExp(`\\b${key}\\b`, 'i'); + if (regexp.test(line)) { + level = LogLevel[key]; + } + } + }); + return level; +} + +export function getSearchMatches(line: string, search: string) { + // Empty search can send re.exec() into infinite loop, exit early + if (!line || !search) { + return []; + } + const regexp = new RegExp(`(?:${search})`, 'g'); + const matches = []; + let match; + while ((match = regexp.exec(line))) { + matches.push({ + text: match[0], + start: match.index, + length: match[0].length, + }); + } + return matches; +} + +export function processEntry(entry: { line: string; timestamp: string }, stream): LogRow { + const { line, timestamp } = entry; + const { labels } = stream; + const key = `EK${timestamp}${labels}`; + const time = moment(timestamp); + const timeFromNow = time.fromNow(); + const timeLocal = time.format('YYYY-MM-DD HH:mm:ss'); + const searchMatches = getSearchMatches(line, stream.search); + const logLevel = getLogLevel(line); + + return { + key, + logLevel, + searchMatches, + timeFromNow, + timeLocal, + entry: line, + timestamp: timestamp, + }; +} + +export function processStreams(streams, limit?: number): LogsModel { + const combinedEntries = streams.reduce((acc, stream) => { + return [...acc, ...stream.entries.map(entry => processEntry(entry, stream))]; + }, []); + const sortedEntries = _.chain(combinedEntries) + .sortBy('timestamp') + .reverse() + .slice(0, limit || combinedEntries.length) + .value(); + return { rows: sortedEntries }; +} diff --git a/public/app/plugins/datasource/opentsdb/datasource.ts b/public/app/plugins/datasource/opentsdb/datasource.ts index 39ad6c64e11..07ec4a794ec 100644 --- a/public/app/plugins/datasource/opentsdb/datasource.ts +++ b/public/app/plugins/datasource/opentsdb/datasource.ts @@ -480,17 +480,17 @@ export default class OpenTsDatasource { mapMetricsToTargets(metrics, options, tsdbVersion) { var interpolatedTagValue, arrTagV; - return _.map(metrics, function(metricData) { + return _.map(metrics, metricData => { if (tsdbVersion === 3) { return metricData.query.index; } else { - return _.findIndex(options.targets, function(target) { + return _.findIndex(options.targets, target => { if (target.filters && target.filters.length > 0) { return target.metric === metricData.metric; } else { return ( target.metric === metricData.metric && - _.every(target.tags, function(tagV, tagK) { + _.every(target.tags, (tagV, tagK) => { interpolatedTagValue = this.templateSrv.replace(tagV, options.scopedVars, 'pipe'); arrTagV = interpolatedTagValue.split('|'); return _.includes(arrTagV, metricData.tags[tagK]) || interpolatedTagValue === '*'; diff --git a/public/app/plugins/datasource/prometheus/datasource.ts b/public/app/plugins/datasource/prometheus/datasource.ts index d7d33264c99..75a946d6f36 100644 --- a/public/app/plugins/datasource/prometheus/datasource.ts +++ b/public/app/plugins/datasource/prometheus/datasource.ts @@ -17,11 +17,17 @@ export function alignRange(start, end, step) { } export function prometheusRegularEscape(value) { - return value.replace(/'/g, "\\\\'"); + if (typeof value === 'string') { + return value.replace(/'/g, "\\\\'"); + } + return value; } export function prometheusSpecialRegexEscape(value) { - return prometheusRegularEscape(value.replace(/\\/g, '\\\\\\\\').replace(/[$^*{}\[\]+?.()]/g, '\\\\$&')); + if (typeof value === 'string') { + return prometheusRegularEscape(value.replace(/\\/g, '\\\\\\\\').replace(/[$^*{}\[\]+?.()]/g, '\\\\$&')); + } + return value; } export class PrometheusDatasource { @@ -190,13 +196,14 @@ export class PrometheusDatasource { var intervalFactor = target.intervalFactor || 1; // Adjust the interval to take into account any specified minimum and interval factor plus Prometheus limits var adjustedInterval = this.adjustInterval(interval, minInterval, range, intervalFactor); - var scopedVars = options.scopedVars; + var scopedVars = { ...options.scopedVars, ...this.getRangeScopedVars() }; // If the interval was adjusted, make a shallow copy of scopedVars with updated interval vars if (interval !== adjustedInterval) { interval = adjustedInterval; scopedVars = Object.assign({}, options.scopedVars, { __interval: { text: interval + 's', value: interval + 's' }, __interval_ms: { text: interval * 1000, value: interval * 1000 }, + ...this.getRangeScopedVars(), }); } query.step = interval; @@ -279,11 +286,26 @@ export class PrometheusDatasource { return this.$q.when([]); } - let interpolated = this.templateSrv.replace(query, {}, this.interpolateQueryExpr); + let scopedVars = { + __interval: { text: this.interval, value: this.interval }, + __interval_ms: { text: kbn.interval_to_ms(this.interval), value: kbn.interval_to_ms(this.interval) }, + ...this.getRangeScopedVars(), + }; + let interpolated = this.templateSrv.replace(query, scopedVars, this.interpolateQueryExpr); var metricFindQuery = new PrometheusMetricFindQuery(this, interpolated, this.timeSrv); return metricFindQuery.process(); } + getRangeScopedVars() { + let range = this.timeSrv.timeRange(); + let msRange = range.to.diff(range.from); + let regularRange = kbn.secondsToHms(msRange / 1000); + return { + __range_ms: { text: msRange, value: msRange }, + __range: { text: regularRange, value: regularRange }, + }; + } + annotationQuery(options) { var annotation = options.annotation; var expr = annotation.expr || ''; @@ -357,6 +379,7 @@ export class PrometheusDatasource { state = { ...state, queries, + datasource: this.name, }; } return state; diff --git a/public/app/plugins/datasource/prometheus/plugin.json b/public/app/plugins/datasource/prometheus/plugin.json index 88847765159..2b723fd0b9d 100644 --- a/public/app/plugins/datasource/prometheus/plugin.json +++ b/public/app/plugins/datasource/prometheus/plugin.json @@ -2,21 +2,30 @@ "type": "datasource", "name": "Prometheus", "id": "prometheus", - "includes": [ - {"type": "dashboard", "name": "Prometheus Stats", "path": "dashboards/prometheus_stats.json"}, - {"type": "dashboard", "name": "Prometheus 2.0 Stats", "path": "dashboards/prometheus_2_stats.json"}, - {"type": "dashboard", "name": "Grafana Stats", "path": "dashboards/grafana_stats.json"} + { + "type": "dashboard", + "name": "Prometheus Stats", + "path": "dashboards/prometheus_stats.json" + }, + { + "type": "dashboard", + "name": "Prometheus 2.0 Stats", + "path": "dashboards/prometheus_2_stats.json" + }, + { + "type": "dashboard", + "name": "Grafana Stats", + "path": "dashboards/grafana_stats.json" + } ], - "metrics": true, "alerting": true, "annotations": true, - + "explore": true, "queryOptions": { "minInterval": true }, - "info": { "description": "Prometheus Data Source for Grafana", "author": { @@ -28,8 +37,11 @@ "large": "img/prometheus_logo.svg" }, "links": [ - {"name": "Prometheus", "url": "https://prometheus.io/"} + { + "name": "Prometheus", + "url": "https://prometheus.io/" + } ], "version": "5.0.0" } -} +} \ No newline at end of file diff --git a/public/app/plugins/datasource/prometheus/specs/completer_specs.ts b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts similarity index 79% rename from public/app/plugins/datasource/prometheus/specs/completer_specs.ts rename to public/app/plugins/datasource/prometheus/specs/completer.jest.ts index 84694834089..b29e4d27233 100644 --- a/public/app/plugins/datasource/prometheus/specs/completer_specs.ts +++ b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts @@ -1,47 +1,40 @@ -import { describe, it, sinon, expect } from 'test/lib/common'; -import helpers from 'test/specs/helpers'; - import { PromCompleter } from '../completer'; import { PrometheusDatasource } from '../datasource'; +import { BackendSrv } from 'app/core/services/backend_srv'; +jest.mock('../datasource'); +jest.mock('app/core/services/backend_srv'); describe('Prometheus editor completer', function() { - var ctx = new helpers.ServiceTestContext(); - beforeEach(ctx.providePhase(['templateSrv'])); - function getSessionStub(data) { return { - getTokenAt: sinon.stub().returns(data.currentToken), - getTokens: sinon.stub().returns(data.tokens), - getLine: sinon.stub().returns(data.line), + getTokenAt: jest.fn(() => data.currentToken), + getTokens: jest.fn(() => data.tokens), + getLine: jest.fn(() => data.line), }; } let editor = {}; - let datasourceStub = { - performInstantQuery: sinon - .stub() - .withArgs({ expr: '{__name__="node_cpu"' }) - .returns( - Promise.resolve({ - data: { - data: { - result: [ - { - metric: { - job: 'node', - instance: 'localhost:9100', - }, - }, - ], + + let backendSrv = {}; + let datasourceStub = new PrometheusDatasource({}, {}, backendSrv, {}, {}); + + datasourceStub.performInstantQuery = jest.fn(() => + Promise.resolve({ + data: { + data: { + result: [ + { + metric: { + job: 'node', + instance: 'localhost:9100', + }, }, - }, - }) - ), - performSuggestQuery: sinon - .stub() - .withArgs('node', true) - .returns(Promise.resolve(['node_cpu'])), - }; + ], + }, + }, + }) + ); + datasourceStub.performSuggestQuery = jest.fn(() => Promise.resolve(['node_cpu'])); let templateSrv = { variables: [ @@ -62,9 +55,9 @@ describe('Prometheus editor completer', function() { }); return completer.getCompletions(editor, session, { row: 0, column: 10 }, '[', (s, res) => { - expect(res[0].caption).to.eql('$__interval'); - expect(res[0].value).to.eql('[$__interval'); - expect(res[0].meta).to.eql('range vector'); + expect(res[0].caption).toEqual('$__interval'); + expect(res[0].value).toEqual('[$__interval'); + expect(res[0].meta).toEqual('range vector'); }); }); }); @@ -93,7 +86,7 @@ describe('Prometheus editor completer', function() { }); return completer.getCompletions(editor, session, { row: 0, column: 10 }, 'j', (s, res) => { - expect(res[0].meta).to.eql('label name'); + expect(res[0].meta).toEqual('label name'); }); }); }); @@ -125,7 +118,7 @@ describe('Prometheus editor completer', function() { }); return completer.getCompletions(editor, session, { row: 0, column: 23 }, 'j', (s, res) => { - expect(res[0].meta).to.eql('label name'); + expect(res[0].meta).toEqual('label name'); }); }); }); @@ -156,7 +149,7 @@ describe('Prometheus editor completer', function() { }); return completer.getCompletions(editor, session, { row: 0, column: 15 }, 'n', (s, res) => { - expect(res[0].meta).to.eql('label value'); + expect(res[0].meta).toEqual('label value'); }); }); }); @@ -192,7 +185,7 @@ describe('Prometheus editor completer', function() { }); return completer.getCompletions(editor, session, { row: 0, column: 23 }, 'm', (s, res) => { - expect(res[0].meta).to.eql('label name'); + expect(res[0].meta).toEqual('label name'); }); }); }); diff --git a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts index 219b990e5dd..b8b2b50f590 100644 --- a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts @@ -2,6 +2,7 @@ import _ from 'lodash'; import moment from 'moment'; import q from 'q'; import { alignRange, PrometheusDatasource, prometheusSpecialRegexEscape, prometheusRegularEscape } from '../datasource'; +jest.mock('../metric_find_query'); describe('PrometheusDatasource', () => { let ctx: any = {}; @@ -18,7 +19,14 @@ describe('PrometheusDatasource', () => { ctx.templateSrvMock = { replace: a => a, }; - ctx.timeSrvMock = {}; + ctx.timeSrvMock = { + timeRange: () => { + return { + from: moment(1531468681), + to: moment(1531489712), + }; + }, + }; beforeEach(() => { ctx.ds = new PrometheusDatasource(instanceSettings, q, ctx.backendSrvMock, ctx.templateSrvMock, ctx.timeSrvMock); @@ -166,6 +174,9 @@ describe('PrometheusDatasource', () => { }); describe('Prometheus regular escaping', function() { + it('should not escape non-string', function() { + expect(prometheusRegularEscape(12)).toEqual(12); + }); it('should not escape simple string', function() { expect(prometheusRegularEscape('cryptodepression')).toEqual('cryptodepression'); }); @@ -201,4 +212,37 @@ describe('PrometheusDatasource', () => { expect(prometheusSpecialRegexEscape('+looking$glass?')).toEqual('\\\\+looking\\\\$glass\\\\?'); }); }); + + describe('metricFindQuery', () => { + beforeEach(() => { + let query = 'query_result(topk(5,rate(http_request_duration_microseconds_count[$__interval])))'; + ctx.templateSrvMock.replace = jest.fn(); + ctx.timeSrvMock.timeRange = () => { + return { + from: moment(1531468681), + to: moment(1531489712), + }; + }; + ctx.ds = new PrometheusDatasource(instanceSettings, q, ctx.backendSrvMock, ctx.templateSrvMock, ctx.timeSrvMock); + ctx.ds.metricFindQuery(query); + }); + + it('should call templateSrv.replace with scopedVars', () => { + expect(ctx.templateSrvMock.replace.mock.calls[0][1]).toBeDefined(); + }); + + it('should have the correct range and range_ms', () => { + let range = ctx.templateSrvMock.replace.mock.calls[0][1].__range; + let rangeMs = ctx.templateSrvMock.replace.mock.calls[0][1].__range_ms; + expect(range).toEqual({ text: '21s', value: '21s' }); + expect(rangeMs).toEqual({ text: 21031, value: 21031 }); + }); + + it('should pass the default interval value', () => { + let interval = ctx.templateSrvMock.replace.mock.calls[0][1].__interval; + let intervalMs = ctx.templateSrvMock.replace.mock.calls[0][1].__interval_ms; + expect(interval).toEqual({ text: '15s', value: '15s' }); + expect(intervalMs).toEqual({ text: 15000, value: 15000 }); + }); + }); }); diff --git a/public/app/plugins/app/testdata/datasource/datasource.ts b/public/app/plugins/datasource/testdata/datasource.ts similarity index 100% rename from public/app/plugins/app/testdata/datasource/datasource.ts rename to public/app/plugins/datasource/testdata/datasource.ts diff --git a/public/app/plugins/app/testdata/datasource/module.ts b/public/app/plugins/datasource/testdata/module.ts similarity index 100% rename from public/app/plugins/app/testdata/datasource/module.ts rename to public/app/plugins/datasource/testdata/module.ts diff --git a/public/app/plugins/app/testdata/partials/query.editor.html b/public/app/plugins/datasource/testdata/partials/query.editor.html similarity index 99% rename from public/app/plugins/app/testdata/partials/query.editor.html rename to public/app/plugins/datasource/testdata/partials/query.editor.html index 247918bce1f..fc16f2a8b44 100644 --- a/public/app/plugins/app/testdata/partials/query.editor.html +++ b/public/app/plugins/datasource/testdata/partials/query.editor.html @@ -37,4 +37,3 @@
- diff --git a/public/app/plugins/app/testdata/datasource/plugin.json b/public/app/plugins/datasource/testdata/plugin.json similarity index 60% rename from public/app/plugins/app/testdata/datasource/plugin.json rename to public/app/plugins/datasource/testdata/plugin.json index 80445dfb3bc..774603982e0 100644 --- a/public/app/plugins/app/testdata/datasource/plugin.json +++ b/public/app/plugins/datasource/testdata/plugin.json @@ -1,7 +1,7 @@ { "type": "datasource", - "name": "Grafana TestDataDB", - "id": "grafana-testdata-datasource", + "name": "TestData DB", + "id": "testdata", "metrics": true, "alerting": true, @@ -13,8 +13,8 @@ "url": "https://grafana.com" }, "logos": { - "small": "", - "large": "" + "small": "../../../../img/grafana_icon.svg", + "large": "../../../../img/grafana_icon.svg" } } } diff --git a/public/app/plugins/app/testdata/datasource/query_ctrl.ts b/public/app/plugins/datasource/testdata/query_ctrl.ts similarity index 100% rename from public/app/plugins/app/testdata/datasource/query_ctrl.ts rename to public/app/plugins/datasource/testdata/query_ctrl.ts diff --git a/public/app/plugins/panel/graph/jquery.flot.events.js b/public/app/plugins/panel/graph/jquery.flot.events.js deleted file mode 100644 index 3ea3ca8f330..00000000000 --- a/public/app/plugins/panel/graph/jquery.flot.events.js +++ /dev/null @@ -1,604 +0,0 @@ -define([ - 'jquery', - 'lodash', - 'angular', - 'tether-drop', -], -function ($, _, angular, Drop) { - 'use strict'; - - function createAnnotationToolip(element, event, plot) { - var injector = angular.element(document).injector(); - var content = document.createElement('div'); - content.innerHTML = ''; - - injector.invoke(["$compile", "$rootScope", function($compile, $rootScope) { - var eventManager = plot.getOptions().events.manager; - var tmpScope = $rootScope.$new(true); - tmpScope.event = event; - tmpScope.onEdit = function() { - eventManager.editEvent(event); - }; - - $compile(content)(tmpScope); - tmpScope.$digest(); - tmpScope.$destroy(); - - var drop = new Drop({ - target: element[0], - content: content, - position: "bottom center", - classes: 'drop-popover drop-popover--annotation', - openOn: 'hover', - hoverCloseDelay: 200, - tetherOptions: { - constraints: [{to: 'window', pin: true, attachment: "both"}] - } - }); - - drop.open(); - - drop.on('close', function() { - setTimeout(function() { - drop.destroy(); - }); - }); - }]); - } - - var markerElementToAttachTo = null; - - function createEditPopover(element, event, plot) { - var eventManager = plot.getOptions().events.manager; - if (eventManager.editorOpen) { - // update marker element to attach to (needed in case of legend on the right - // when there is a double render pass and the initial marker element is removed) - markerElementToAttachTo = element; - return; - } - - // mark as openend - eventManager.editorOpened(); - // set marker element to attache to - markerElementToAttachTo = element; - - // wait for element to be attached and positioned - setTimeout(function() { - - var injector = angular.element(document).injector(); - var content = document.createElement('div'); - content.innerHTML = ''; - - injector.invoke(["$compile", "$rootScope", function($compile, $rootScope) { - var scope = $rootScope.$new(true); - var drop; - - scope.event = event; - scope.panelCtrl = eventManager.panelCtrl; - scope.close = function() { - drop.close(); - }; - - $compile(content)(scope); - scope.$digest(); - - drop = new Drop({ - target: markerElementToAttachTo[0], - content: content, - position: "bottom center", - classes: 'drop-popover drop-popover--form', - openOn: 'click', - tetherOptions: { - constraints: [{to: 'window', pin: true, attachment: "both"}] - } - }); - - drop.open(); - eventManager.editorOpened(); - - drop.on('close', function() { - // need timeout here in order call drop.destroy - setTimeout(function() { - eventManager.editorClosed(); - scope.$destroy(); - drop.destroy(); - }); - }); - }]); - - }, 100); - } - - /* - * jquery.flot.events - * - * description: Flot plugin for adding events/markers to the plot - * version: 0.2.5 - * authors: - * Alexander Wunschik - * Joel Oughton - * Nicolas Joseph - * - * website: https://github.com/mojoaxel/flot-events - * - * released under MIT License and GPLv2+ - */ - - /** - * A class that allows for the drawing an remove of some object - */ - var DrawableEvent = function(object, drawFunc, clearFunc, moveFunc, left, top, width, height) { - var _object = object; - var _drawFunc = drawFunc; - var _clearFunc = clearFunc; - var _moveFunc = moveFunc; - var _position = { left: left, top: top }; - var _width = width; - var _height = height; - - this.width = function() { return _width; }; - this.height = function() { return _height; }; - this.position = function() { return _position; }; - this.draw = function() { _drawFunc(_object); }; - this.clear = function() { _clearFunc(_object); }; - this.getObject = function() { return _object; }; - this.moveTo = function(position) { - _position = position; - _moveFunc(_object, _position); - }; - }; - - /** - * Event class that stores options (eventType, min, max, title, description) and the object to draw. - */ - var VisualEvent = function(options, drawableEvent) { - var _parent; - var _options = options; - var _drawableEvent = drawableEvent; - var _hidden = false; - - this.visual = function() { return _drawableEvent; }; - this.getOptions = function() { return _options; }; - this.getParent = function() { return _parent; }; - this.isHidden = function() { return _hidden; }; - this.hide = function() { _hidden = true; }; - this.unhide = function() { _hidden = false; }; - }; - - /** - * A Class that handles the event-markers inside the given plot - */ - var EventMarkers = function(plot) { - var _events = []; - - this._types = []; - this._plot = plot; - this.eventsEnabled = false; - - this.getEvents = function() { - return _events; - }; - - this.setTypes = function(types) { - return this._types = types; - }; - - /** - * create internal objects for the given events - */ - this.setupEvents = function(events) { - var that = this; - var parts = _.partition(events, 'isRegion'); - var regions = parts[0]; - events = parts[1]; - - $.each(events, function(index, event) { - var ve = new VisualEvent(event, that._buildDiv(event)); - _events.push(ve); - }); - - $.each(regions, function (index, event) { - var vre = new VisualEvent(event, that._buildRegDiv(event)); - _events.push(vre); - }); - - _events.sort(function(a, b) { - var ao = a.getOptions(), bo = b.getOptions(); - if (ao.min > bo.min) { return 1; } - if (ao.min < bo.min) { return -1; } - return 0; - }); - }; - - /** - * draw the events to the plot - */ - this.drawEvents = function() { - var that = this; - // var o = this._plot.getPlotOffset(); - - $.each(_events, function(index, event) { - // check event is inside the graph range - if (that._insidePlot(event.getOptions().min) && !event.isHidden()) { - event.visual().draw(); - } else { - event.visual().getObject().hide(); - } - }); - }; - - /** - * update the position of the event-markers (e.g. after scrolling or zooming) - */ - this.updateEvents = function() { - var that = this; - var o = this._plot.getPlotOffset(), left, top; - var xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - - $.each(_events, function(index, event) { - top = o.top + that._plot.height() - event.visual().height(); - left = xaxis.p2c(event.getOptions().min) + o.left - event.visual().width() / 2; - event.visual().moveTo({ top: top, left: left }); - }); - }; - - /** - * remove all events from the plot - */ - this._clearEvents = function() { - $.each(_events, function(index, val) { - val.visual().clear(); - }); - _events = []; - }; - - /** - * create a DOM element for the given event - */ - this._buildDiv = function(event) { - var that = this; - - var container = this._plot.getPlaceholder(); - var o = this._plot.getPlotOffset(); - var axes = this._plot.getAxes(); - var xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - var yaxis, top, left, color, markerSize, markerShow, lineStyle, lineWidth; - var markerTooltip; - - // determine the y axis used - if (axes.yaxis && axes.yaxis.used) { yaxis = axes.yaxis; } - if (axes.yaxis2 && axes.yaxis2.used) { yaxis = axes.yaxis2; } - - // map the eventType to a types object - var eventTypeId = event.eventType; - - if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].color) { - color = '#666'; - } else { - color = this._types[eventTypeId].color; - } - - if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].markerSize) { - markerSize = 8; //default marker size - } else { - markerSize = this._types[eventTypeId].markerSize; - } - - if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerShow === undefined) { - markerShow = true; - } else { - markerShow = this._types[eventTypeId].markerShow; - } - - if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerTooltip === undefined) { - markerTooltip = true; - } else { - markerTooltip = this._types[eventTypeId].markerTooltip; - } - - if (this._types == null || !this._types[eventTypeId] || !this._types[eventTypeId].lineStyle) { - lineStyle = 'dashed'; //default line style - } else { - lineStyle = this._types[eventTypeId].lineStyle.toLowerCase(); - } - - if (this._types == null || !this._types[eventTypeId] || this._types[eventTypeId].lineWidth === undefined) { - lineWidth = 1; //default line width - } else { - lineWidth = this._types[eventTypeId].lineWidth; - } - - var topOffset = xaxis.options.eventSectionHeight || 0; - topOffset = topOffset / 3; - - top = o.top + this._plot.height() + topOffset; - left = xaxis.p2c(event.min) + o.left; - - var line = $('
').css({ - "position": "absolute", - "opacity": 0.8, - "left": left + 'px', - "top": 8, - "width": lineWidth + "px", - "height": this._plot.height() + topOffset * 0.8, - "border-left-width": lineWidth + "px", - "border-left-style": lineStyle, - "border-left-color": color, - "color": color - }) - .appendTo(container); - - if (markerShow) { - var marker = $('
').css({ - "position": "absolute", - "left": (-markerSize - Math.round(lineWidth / 2)) + "px", - "font-size": 0, - "line-height": 0, - "width": 0, - "height": 0, - "border-left": markerSize+"px solid transparent", - "border-right": markerSize+"px solid transparent" - }); - - marker.appendTo(line); - - if (this._types[eventTypeId] && this._types[eventTypeId].position && this._types[eventTypeId].position.toUpperCase() === 'BOTTOM') { - marker.css({ - "top": top-markerSize-8 +"px", - "border-top": "none", - "border-bottom": markerSize+"px solid " + color - }); - } else { - marker.css({ - "top": "0px", - "border-top": markerSize+"px solid " + color, - "border-bottom": "none" - }); - } - - marker.data({ - "event": event - }); - - var mouseenter = function() { - createAnnotationToolip(marker, $(this).data("event"), that._plot); - }; - - if (event.editModel) { - createEditPopover(marker, event.editModel, that._plot); - } - - var mouseleave = function() { - that._plot.clearSelection(); - }; - - if (markerTooltip) { - marker.css({ "cursor": "help" }); - marker.hover(mouseenter, mouseleave); - } - } - - var drawableEvent = new DrawableEvent( - line, - function drawFunc(obj) { obj.show(); }, - function(obj) { obj.remove(); }, - function(obj, position) { - obj.css({ - top: position.top, - left: position.left - }); - }, - left, - top, - line.width(), - line.height() - ); - - return drawableEvent; - }; - - /** - * create a DOM element for the given region - */ - this._buildRegDiv = function (event) { - var that = this; - - var container = this._plot.getPlaceholder(); - var o = this._plot.getPlotOffset(); - var axes = this._plot.getAxes(); - var xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - var yaxis, top, left, lineWidth, regionWidth, lineStyle, color, markerTooltip; - - // determine the y axis used - if (axes.yaxis && axes.yaxis.used) { yaxis = axes.yaxis; } - if (axes.yaxis2 && axes.yaxis2.used) { yaxis = axes.yaxis2; } - - // map the eventType to a types object - var eventTypeId = event.eventType; - - if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].color) { - color = '#666'; - } else { - color = this._types[eventTypeId].color; - } - - if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerTooltip === undefined) { - markerTooltip = true; - } else { - markerTooltip = this._types[eventTypeId].markerTooltip; - } - - if (this._types == null || !this._types[eventTypeId] || this._types[eventTypeId].lineWidth === undefined) { - lineWidth = 1; //default line width - } else { - lineWidth = this._types[eventTypeId].lineWidth; - } - - if (this._types == null || !this._types[eventTypeId] || !this._types[eventTypeId].lineStyle) { - lineStyle = 'dashed'; //default line style - } else { - lineStyle = this._types[eventTypeId].lineStyle.toLowerCase(); - } - - var topOffset = 2; - top = o.top + this._plot.height() + topOffset; - - var timeFrom = Math.min(event.min, event.timeEnd); - var timeTo = Math.max(event.min, event.timeEnd); - left = xaxis.p2c(timeFrom) + o.left; - var right = xaxis.p2c(timeTo) + o.left; - regionWidth = right - left; - - _.each([left, right], function(position) { - var line = $('
').css({ - "position": "absolute", - "opacity": 0.8, - "left": position + 'px', - "top": 8, - "width": lineWidth + "px", - "height": that._plot.height() + topOffset, - "border-left-width": lineWidth + "px", - "border-left-style": lineStyle, - "border-left-color": color, - "color": color - }); - line.appendTo(container); - }); - - var region = $('
').css({ - "position": "absolute", - "opacity": 0.5, - "left": left + 'px', - "top": top, - "width": Math.round(regionWidth + lineWidth) + "px", - "height": "0.5rem", - "border-left-color": color, - "color": color, - "background-color": color - }); - region.appendTo(container); - - region.data({ - "event": event - }); - - var mouseenter = function () { - createAnnotationToolip(region, $(this).data("event"), that._plot); - }; - - if (event.editModel) { - createEditPopover(region, event.editModel, that._plot); - } - - var mouseleave = function () { - that._plot.clearSelection(); - }; - - if (markerTooltip) { - region.css({ "cursor": "help" }); - region.hover(mouseenter, mouseleave); - } - - var drawableEvent = new DrawableEvent( - region, - function drawFunc(obj) { obj.show(); }, - function (obj) { obj.remove(); }, - function (obj, position) { - obj.css({ - top: position.top, - left: position.left - }); - }, - left, - top, - region.width(), - region.height() - ); - - return drawableEvent; - }; - - /** - * check if the event is inside visible range - */ - this._insidePlot = function(x) { - var xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - var xc = xaxis.p2c(x); - return xc > 0 && xc < xaxis.p2c(xaxis.max); - }; - }; - - /** - * initialize the plugin for the given plot - */ - function init(plot) { - /*jshint validthis:true */ - var that = this; - var eventMarkers = new EventMarkers(plot); - - plot.getEvents = function() { - return eventMarkers._events; - }; - - plot.hideEvents = function() { - $.each(eventMarkers._events, function(index, event) { - event.visual().getObject().hide(); - }); - }; - - plot.showEvents = function() { - plot.hideEvents(); - $.each(eventMarkers._events, function(index, event) { - event.hide(); - }); - - that.eventMarkers.drawEvents(); - }; - - // change events on an existing plot - plot.setEvents = function(events) { - if (eventMarkers.eventsEnabled) { - eventMarkers.setupEvents(events); - } - }; - - plot.hooks.processOptions.push(function(plot, options) { - // enable the plugin - if (options.events.data != null) { - eventMarkers.eventsEnabled = true; - } - }); - - plot.hooks.draw.push(function(plot) { - var options = plot.getOptions(); - - if (eventMarkers.eventsEnabled) { - // check for first run - if (eventMarkers.getEvents().length < 1) { - eventMarkers.setTypes(options.events.types); - eventMarkers.setupEvents(options.events.data); - } else { - eventMarkers.updateEvents(); - } - } - - eventMarkers.drawEvents(); - }); - } - - var defaultOptions = { - events: { - data: null, - types: null, - xaxis: 1, - position: 'BOTTOM' - } - }; - - $.plot.plugins.push({ - init: init, - options: defaultOptions, - name: "events", - version: "0.2.5" - }); -}); diff --git a/public/app/plugins/panel/graph/jquery.flot.events.ts b/public/app/plugins/panel/graph/jquery.flot.events.ts new file mode 100644 index 00000000000..9dfe0a8573f --- /dev/null +++ b/public/app/plugins/panel/graph/jquery.flot.events.ts @@ -0,0 +1,671 @@ +import angular from 'angular'; +import $ from 'jquery'; +import _ from 'lodash'; +import Drop from 'tether-drop'; + +/** @ngInject */ +export function createAnnotationToolip(element, event, plot) { + let injector = angular.element(document).injector(); + let content = document.createElement('div'); + content.innerHTML = ''; + + injector.invoke([ + '$compile', + '$rootScope', + function($compile, $rootScope) { + let eventManager = plot.getOptions().events.manager; + let tmpScope = $rootScope.$new(true); + tmpScope.event = event; + tmpScope.onEdit = function() { + eventManager.editEvent(event); + }; + + $compile(content)(tmpScope); + tmpScope.$digest(); + tmpScope.$destroy(); + + let drop = new Drop({ + target: element[0], + content: content, + position: 'bottom center', + classes: 'drop-popover drop-popover--annotation', + openOn: 'hover', + hoverCloseDelay: 200, + tetherOptions: { + constraints: [{ to: 'window', pin: true, attachment: 'both' }], + }, + }); + + drop.open(); + + drop.on('close', function() { + setTimeout(function() { + drop.destroy(); + }); + }); + }, + ]); +} + +let markerElementToAttachTo = null; + +/** @ngInject */ +export function createEditPopover(element, event, plot) { + let eventManager = plot.getOptions().events.manager; + if (eventManager.editorOpen) { + // update marker element to attach to (needed in case of legend on the right + // when there is a double render pass and the inital marker element is removed) + markerElementToAttachTo = element; + return; + } + + // mark as openend + eventManager.editorOpened(); + // set marker elment to attache to + markerElementToAttachTo = element; + + // wait for element to be attached and positioned + setTimeout(function() { + let injector = angular.element(document).injector(); + let content = document.createElement('div'); + content.innerHTML = ''; + + injector.invoke([ + '$compile', + '$rootScope', + function($compile, $rootScope) { + let scope = $rootScope.$new(true); + let drop; + + scope.event = event; + scope.panelCtrl = eventManager.panelCtrl; + scope.close = function() { + drop.close(); + }; + + $compile(content)(scope); + scope.$digest(); + + drop = new Drop({ + target: markerElementToAttachTo[0], + content: content, + position: 'bottom center', + classes: 'drop-popover drop-popover--form', + openOn: 'click', + tetherOptions: { + constraints: [{ to: 'window', pin: true, attachment: 'both' }], + }, + }); + + drop.open(); + eventManager.editorOpened(); + + drop.on('close', function() { + // need timeout here in order call drop.destroy + setTimeout(function() { + eventManager.editorClosed(); + scope.$destroy(); + drop.destroy(); + }); + }); + }, + ]); + }, 100); +} + +/* + * jquery.flot.events + * + * description: Flot plugin for adding events/markers to the plot + * version: 0.2.5 + * authors: + * Alexander Wunschik + * Joel Oughton + * Nicolas Joseph + * + * website: https://github.com/mojoaxel/flot-events + * + * released under MIT License and GPLv2+ + */ + +/** + * A class that allows for the drawing an remove of some object + */ +export class DrawableEvent { + _object: any; + _drawFunc: any; + _clearFunc: any; + _moveFunc: any; + _position: any; + _width: any; + _height: any; + + /** @ngInject */ + constructor(object, drawFunc, clearFunc, moveFunc, left, top, width, height) { + this._object = object; + this._drawFunc = drawFunc; + this._clearFunc = clearFunc; + this._moveFunc = moveFunc; + this._position = { left: left, top: top }; + this._width = width; + this._height = height; + } + + width() { + return this._width; + } + height() { + return this._height; + } + position() { + return this._position; + } + draw() { + this._drawFunc(this._object); + } + clear() { + this._clearFunc(this._object); + } + getObject() { + return this._object; + } + moveTo(position) { + this._position = position; + this._moveFunc(this._object, this._position); + } +} + +/** + * Event class that stores options (eventType, min, max, title, description) and the object to draw. + */ +export class VisualEvent { + _parent: any; + _options: any; + _drawableEvent: any; + _hidden: any; + + /** @ngInject */ + constructor(options, drawableEvent) { + this._options = options; + this._drawableEvent = drawableEvent; + this._hidden = false; + } + + visual() { + return this._drawableEvent; + } + getOptions() { + return this._options; + } + getParent() { + return this._parent; + } + isHidden() { + return this._hidden; + } + hide() { + this._hidden = true; + } + unhide() { + this._hidden = false; + } +} + +/** + * A Class that handles the event-markers inside the given plot + */ +export class EventMarkers { + _events: any; + _types: any; + _plot: any; + eventsEnabled: any; + + /** @ngInject */ + constructor(plot) { + this._events = []; + this._types = []; + this._plot = plot; + this.eventsEnabled = false; + } + + getEvents() { + return this._events; + } + + setTypes(types) { + return (this._types = types); + } + + /** + * create internal objects for the given events + */ + setupEvents(events) { + let parts = _.partition(events, 'isRegion'); + let regions = parts[0]; + events = parts[1]; + + $.each(events, (index, event) => { + let ve = new VisualEvent(event, this._buildDiv(event)); + this._events.push(ve); + }); + + $.each(regions, (index, event) => { + let vre = new VisualEvent(event, this._buildRegDiv(event)); + this._events.push(vre); + }); + + this._events.sort((a, b) => { + let ao = a.getOptions(), + bo = b.getOptions(); + if (ao.min > bo.min) { + return 1; + } + if (ao.min < bo.min) { + return -1; + } + return 0; + }); + } + + /** + * draw the events to the plot + */ + drawEvents() { + // var o = this._plot.getPlotOffset(); + + $.each(this._events, (index, event) => { + // check event is inside the graph range + if (this._insidePlot(event.getOptions().min) && !event.isHidden()) { + event.visual().draw(); + } else { + event + .visual() + .getObject() + .hide(); + } + }); + } + + /** + * update the position of the event-markers (e.g. after scrolling or zooming) + */ + updateEvents() { + let o = this._plot.getPlotOffset(), + left, + top; + let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; + + $.each(this._events, (index, event) => { + top = o.top + this._plot.height() - event.visual().height(); + left = xaxis.p2c(event.getOptions().min) + o.left - event.visual().width() / 2; + event.visual().moveTo({ top: top, left: left }); + }); + } + + /** + * remove all events from the plot + */ + _clearEvents() { + $.each(this._events, (index, val) => { + val.visual().clear(); + }); + this._events = []; + } + + /** + * create a DOM element for the given event + */ + _buildDiv(event) { + let that = this; + + let container = this._plot.getPlaceholder(); + let o = this._plot.getPlotOffset(); + let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; + let top, left, color, markerSize, markerShow, lineStyle, lineWidth; + let markerTooltip; + + // map the eventType to a types object + let eventTypeId = event.eventType; + + if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].color) { + color = '#666'; + } else { + color = this._types[eventTypeId].color; + } + + if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].markerSize) { + markerSize = 8; //default marker size + } else { + markerSize = this._types[eventTypeId].markerSize; + } + + if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerShow === undefined) { + markerShow = true; + } else { + markerShow = this._types[eventTypeId].markerShow; + } + + if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerTooltip === undefined) { + markerTooltip = true; + } else { + markerTooltip = this._types[eventTypeId].markerTooltip; + } + + if (this._types == null || !this._types[eventTypeId] || !this._types[eventTypeId].lineStyle) { + lineStyle = 'dashed'; //default line style + } else { + lineStyle = this._types[eventTypeId].lineStyle.toLowerCase(); + } + + if (this._types == null || !this._types[eventTypeId] || this._types[eventTypeId].lineWidth === undefined) { + lineWidth = 1; //default line width + } else { + lineWidth = this._types[eventTypeId].lineWidth; + } + + let topOffset = xaxis.options.eventSectionHeight || 0; + topOffset = topOffset / 3; + + top = o.top + this._plot.height() + topOffset; + left = xaxis.p2c(event.min) + o.left; + + let line = $('
') + .css({ + position: 'absolute', + opacity: 0.8, + left: left + 'px', + top: 8, + width: lineWidth + 'px', + height: this._plot.height() + topOffset * 0.8, + 'border-left-width': lineWidth + 'px', + 'border-left-style': lineStyle, + 'border-left-color': color, + color: color, + }) + .appendTo(container); + + if (markerShow) { + let marker = $('
').css({ + position: 'absolute', + left: -markerSize - Math.round(lineWidth / 2) + 'px', + 'font-size': 0, + 'line-height': 0, + width: 0, + height: 0, + 'border-left': markerSize + 'px solid transparent', + 'border-right': markerSize + 'px solid transparent', + }); + + marker.appendTo(line); + + if ( + this._types[eventTypeId] && + this._types[eventTypeId].position && + this._types[eventTypeId].position.toUpperCase() === 'BOTTOM' + ) { + marker.css({ + top: top - markerSize - 8 + 'px', + 'border-top': 'none', + 'border-bottom': markerSize + 'px solid ' + color, + }); + } else { + marker.css({ + top: '0px', + 'border-top': markerSize + 'px solid ' + color, + 'border-bottom': 'none', + }); + } + + marker.data({ + event: event, + }); + + let mouseenter = function() { + createAnnotationToolip(marker, $(this).data('event'), that._plot); + }; + + if (event.editModel) { + createEditPopover(marker, event.editModel, that._plot); + } + + let mouseleave = function() { + that._plot.clearSelection(); + }; + + if (markerTooltip) { + marker.css({ cursor: 'help' }); + marker.hover(mouseenter, mouseleave); + } + } + + let drawableEvent = new DrawableEvent( + line, + function drawFunc(obj) { + obj.show(); + }, + function(obj) { + obj.remove(); + }, + function(obj, position) { + obj.css({ + top: position.top, + left: position.left, + }); + }, + left, + top, + line.width(), + line.height() + ); + + return drawableEvent; + } + + /** + * create a DOM element for the given region + */ + _buildRegDiv(event) { + let that = this; + + let container = this._plot.getPlaceholder(); + let o = this._plot.getPlotOffset(); + let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; + let top, left, lineWidth, regionWidth, lineStyle, color, markerTooltip; + + // map the eventType to a types object + let eventTypeId = event.eventType; + + if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].color) { + color = '#666'; + } else { + color = this._types[eventTypeId].color; + } + + if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerTooltip === undefined) { + markerTooltip = true; + } else { + markerTooltip = this._types[eventTypeId].markerTooltip; + } + + if (this._types == null || !this._types[eventTypeId] || this._types[eventTypeId].lineWidth === undefined) { + lineWidth = 1; //default line width + } else { + lineWidth = this._types[eventTypeId].lineWidth; + } + + if (this._types == null || !this._types[eventTypeId] || !this._types[eventTypeId].lineStyle) { + lineStyle = 'dashed'; //default line style + } else { + lineStyle = this._types[eventTypeId].lineStyle.toLowerCase(); + } + + let topOffset = 2; + top = o.top + this._plot.height() + topOffset; + + let timeFrom = Math.min(event.min, event.timeEnd); + let timeTo = Math.max(event.min, event.timeEnd); + left = xaxis.p2c(timeFrom) + o.left; + let right = xaxis.p2c(timeTo) + o.left; + regionWidth = right - left; + + _.each([left, right], position => { + let line = $('
').css({ + position: 'absolute', + opacity: 0.8, + left: position + 'px', + top: 8, + width: lineWidth + 'px', + height: this._plot.height() + topOffset, + 'border-left-width': lineWidth + 'px', + 'border-left-style': lineStyle, + 'border-left-color': color, + color: color, + }); + line.appendTo(container); + }); + + let region = $('
').css({ + position: 'absolute', + opacity: 0.5, + left: left + 'px', + top: top, + width: Math.round(regionWidth + lineWidth) + 'px', + height: '0.5rem', + 'border-left-color': color, + color: color, + 'background-color': color, + }); + region.appendTo(container); + + region.data({ + event: event, + }); + + let mouseenter = function() { + createAnnotationToolip(region, $(this).data('event'), that._plot); + }; + + if (event.editModel) { + createEditPopover(region, event.editModel, that._plot); + } + + let mouseleave = function() { + that._plot.clearSelection(); + }; + + if (markerTooltip) { + region.css({ cursor: 'help' }); + region.hover(mouseenter, mouseleave); + } + + let drawableEvent = new DrawableEvent( + region, + function drawFunc(obj) { + obj.show(); + }, + function(obj) { + obj.remove(); + }, + function(obj, position) { + obj.css({ + top: position.top, + left: position.left, + }); + }, + left, + top, + region.width(), + region.height() + ); + + return drawableEvent; + } + + /** + * check if the event is inside visible range + */ + _insidePlot(x) { + let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; + let xc = xaxis.p2c(x); + return xc > 0 && xc < xaxis.p2c(xaxis.max); + } +} + +/** + * initialize the plugin for the given plot + */ + +/** @ngInject */ +export function init(plot) { + /*jshint validthis:true */ + let that = this; + let eventMarkers = new EventMarkers(plot); + + plot.getEvents = function() { + return eventMarkers._events; + }; + + plot.hideEvents = function() { + $.each(eventMarkers._events, (index, event) => { + event + .visual() + .getObject() + .hide(); + }); + }; + + plot.showEvents = function() { + plot.hideEvents(); + $.each(eventMarkers._events, (index, event) => { + event.hide(); + }); + + that.eventMarkers.drawEvents(); + }; + + // change events on an existing plot + plot.setEvents = function(events) { + if (eventMarkers.eventsEnabled) { + eventMarkers.setupEvents(events); + } + }; + + plot.hooks.processOptions.push(function(plot, options) { + // enable the plugin + if (options.events.data != null) { + eventMarkers.eventsEnabled = true; + } + }); + + plot.hooks.draw.push(function(plot) { + let options = plot.getOptions(); + + if (eventMarkers.eventsEnabled) { + // check for first run + if (eventMarkers.getEvents().length < 1) { + eventMarkers.setTypes(options.events.types); + eventMarkers.setupEvents(options.events.data); + } else { + eventMarkers.updateEvents(); + } + } + + eventMarkers.drawEvents(); + }); +} + +let defaultOptions = { + events: { + data: null, + types: null, + xaxis: 1, + position: 'BOTTOM', + }, +}; + +$.plot.plugins.push({ + init: init, + options: defaultOptions, + name: 'events', + version: '0.2.5', +}); diff --git a/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts new file mode 100644 index 00000000000..3ebcf6cdf31 --- /dev/null +++ b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts @@ -0,0 +1,94 @@ +import moment from 'moment'; +import { GraphCtrl } from '../module'; + +jest.mock('../graph', () => ({})); + +describe('GraphCtrl', () => { + let injector = { + get: () => { + return { + timeRange: () => { + return { + from: '', + to: '', + }; + }, + }; + }, + }; + + let scope = { + $on: () => {}, + }; + + GraphCtrl.prototype.panel = { + events: { + on: () => {}, + }, + gridPos: { + w: 100, + }, + }; + + let ctx = {}; + + beforeEach(() => { + ctx.ctrl = new GraphCtrl(scope, injector, {}); + ctx.ctrl.annotationsPromise = Promise.resolve({}); + ctx.ctrl.updateTimeRange(); + }); + + describe('when time series are outside range', () => { + beforeEach(() => { + var data = [ + { + target: 'test.cpu1', + datapoints: [[45, 1234567890], [60, 1234567899]], + }, + ]; + + ctx.ctrl.range = { from: moment().valueOf(), to: moment().valueOf() }; + ctx.ctrl.onDataReceived(data); + }); + + it('should set datapointsOutside', () => { + expect(ctx.ctrl.dataWarning.title).toBe('Data points outside time range'); + }); + }); + + describe('when time series are inside range', () => { + beforeEach(() => { + var range = { + from: moment() + .subtract(1, 'days') + .valueOf(), + to: moment().valueOf(), + }; + + var data = [ + { + target: 'test.cpu1', + datapoints: [[45, range.from + 1000], [60, range.from + 10000]], + }, + ]; + + ctx.ctrl.range = range; + ctx.ctrl.onDataReceived(data); + }); + + it('should set datapointsOutside', () => { + expect(ctx.ctrl.dataWarning).toBe(null); + }); + }); + + describe('datapointsCount given 2 series', () => { + beforeEach(() => { + var data = [{ target: 'test.cpu1', datapoints: [] }, { target: 'test.cpu2', datapoints: [] }]; + ctx.ctrl.onDataReceived(data); + }); + + it('should set datapointsCount warning', () => { + expect(ctx.ctrl.dataWarning.title).toBe('No data points'); + }); + }); +}); diff --git a/public/app/plugins/panel/graph/specs/graph_ctrl_specs.ts b/public/app/plugins/panel/graph/specs/graph_ctrl_specs.ts deleted file mode 100644 index d5cefb345cf..00000000000 --- a/public/app/plugins/panel/graph/specs/graph_ctrl_specs.ts +++ /dev/null @@ -1,78 +0,0 @@ -import { describe, beforeEach, it, expect, angularMocks } from '../../../../../test/lib/common'; - -import moment from 'moment'; -import { GraphCtrl } from '../module'; -import helpers from '../../../../../test/specs/helpers'; - -describe('GraphCtrl', function() { - var ctx = new helpers.ControllerTestContext(); - - beforeEach(angularMocks.module('grafana.services')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach( - angularMocks.module(function($compileProvider) { - $compileProvider.preAssignBindingsEnabled(true); - }) - ); - - beforeEach(ctx.providePhase()); - beforeEach(ctx.createPanelController(GraphCtrl)); - beforeEach(() => { - ctx.ctrl.annotationsPromise = Promise.resolve({}); - ctx.ctrl.updateTimeRange(); - }); - - describe('when time series are outside range', function() { - beforeEach(function() { - var data = [ - { - target: 'test.cpu1', - datapoints: [[45, 1234567890], [60, 1234567899]], - }, - ]; - - ctx.ctrl.range = { from: moment().valueOf(), to: moment().valueOf() }; - ctx.ctrl.onDataReceived(data); - }); - - it('should set datapointsOutside', function() { - expect(ctx.ctrl.dataWarning.title).to.be('Data points outside time range'); - }); - }); - - describe('when time series are inside range', function() { - beforeEach(function() { - var range = { - from: moment() - .subtract(1, 'days') - .valueOf(), - to: moment().valueOf(), - }; - - var data = [ - { - target: 'test.cpu1', - datapoints: [[45, range.from + 1000], [60, range.from + 10000]], - }, - ]; - - ctx.ctrl.range = range; - ctx.ctrl.onDataReceived(data); - }); - - it('should set datapointsOutside', function() { - expect(ctx.ctrl.dataWarning).to.be(null); - }); - }); - - describe('datapointsCount given 2 series', function() { - beforeEach(function() { - var data = [{ target: 'test.cpu1', datapoints: [] }, { target: 'test.cpu2', datapoints: [] }]; - ctx.ctrl.onDataReceived(data); - }); - - it('should set datapointsCount warning', function() { - expect(ctx.ctrl.dataWarning.title).to.be('No data points'); - }); - }); -}); diff --git a/public/app/plugins/panel/singlestat/editor.html b/public/app/plugins/panel/singlestat/editor.html index 96576fd3c41..dd9ca55a760 100644 --- a/public/app/plugins/panel/singlestat/editor.html +++ b/public/app/plugins/panel/singlestat/editor.html @@ -56,10 +56,10 @@
Coloring
- +
- +
diff --git a/public/app/routes/routes.ts b/public/app/routes/routes.ts index 568b3438b38..d12711aca5b 100644 --- a/public/app/routes/routes.ts +++ b/public/app/routes/routes.ts @@ -5,6 +5,8 @@ import ServerStats from 'app/containers/ServerStats/ServerStats'; import AlertRuleList from 'app/containers/AlertRuleList/AlertRuleList'; import FolderSettings from 'app/containers/ManageDashboards/FolderSettings'; import FolderPermissions from 'app/containers/ManageDashboards/FolderPermissions'; +import TeamPages from 'app/containers/Teams/TeamPages'; +import TeamList from 'app/containers/Teams/TeamList'; /** @ngInject **/ export function setupAngularRoutes($routeProvider, $locationProvider) { @@ -110,7 +112,7 @@ export function setupAngularRoutes($routeProvider, $locationProvider) { controller: 'FolderDashboardsCtrl', controllerAs: 'ctrl', }) - .when('/explore/:initial?', { + .when('/explore', { template: '', resolve: { roles: () => ['Editor', 'Admin'], @@ -140,19 +142,23 @@ export function setupAngularRoutes($routeProvider, $locationProvider) { controller: 'OrgApiKeysCtrl', }) .when('/org/teams', { - templateUrl: 'public/app/features/org/partials/teams.html', - controller: 'TeamsCtrl', - controllerAs: 'ctrl', + template: '', + resolve: { + roles: () => ['Editor', 'Admin'], + component: () => TeamList, + }, }) .when('/org/teams/new', { templateUrl: 'public/app/features/org/partials/create_team.html', controller: 'CreateTeamCtrl', controllerAs: 'ctrl', }) - .when('/org/teams/edit/:id', { - templateUrl: 'public/app/features/org/partials/team_details.html', - controller: 'TeamDetailsCtrl', - controllerAs: 'ctrl', + .when('/org/teams/edit/:id/:page?', { + template: '', + resolve: { + roles: () => ['Admin'], + component: () => TeamPages, + }, }) .when('/profile', { templateUrl: 'public/app/features/org/partials/profile.html', diff --git a/public/app/stores/NavStore/NavItem.ts b/public/app/stores/NavStore/NavItem.ts index 4521d4291aa..3e8a2a837b3 100644 --- a/public/app/stores/NavStore/NavItem.ts +++ b/public/app/stores/NavStore/NavItem.ts @@ -1,4 +1,4 @@ -import { types } from 'mobx-state-tree'; +import { types } from 'mobx-state-tree'; export const NavItem = types.model('NavItem', { id: types.identifier(types.string), @@ -8,6 +8,7 @@ export const NavItem = types.model('NavItem', { icon: types.optional(types.string, ''), img: types.optional(types.string, ''), active: types.optional(types.boolean, false), + hideFromTabs: types.optional(types.boolean, false), breadcrumbs: types.optional(types.array(types.late(() => Breadcrumb)), []), children: types.optional(types.array(types.late(() => NavItem)), []), }); diff --git a/public/app/stores/NavStore/NavStore.ts b/public/app/stores/NavStore/NavStore.ts index 86348c00487..c69c32befa8 100644 --- a/public/app/stores/NavStore/NavStore.ts +++ b/public/app/stores/NavStore/NavStore.ts @@ -1,6 +1,7 @@ import _ from 'lodash'; import { types, getEnv } from 'mobx-state-tree'; import { NavItem } from './NavItem'; +import { ITeam } from '../TeamsStore/TeamsStore'; export const NavStore = types .model('NavStore', { @@ -115,4 +116,43 @@ export const NavStore = types self.main = NavItem.create(main); }, + + initTeamPage(team: ITeam, tab: string, isSyncEnabled: boolean) { + let main = { + img: team.avatarUrl, + id: 'team-' + team.id, + subTitle: 'Manage members & settings', + url: '', + text: team.name, + breadcrumbs: [{ title: 'Teams', url: 'org/teams' }], + children: [ + { + active: tab === 'members', + icon: 'gicon gicon-team', + id: 'team-members', + text: 'Members', + url: `org/teams/edit/${team.id}/members`, + }, + { + active: tab === 'settings', + icon: 'fa fa-fw fa-sliders', + id: 'team-settings', + text: 'Settings', + url: `org/teams/edit/${team.id}/settings`, + }, + ], + }; + + if (isSyncEnabled) { + main.children.splice(1, 0, { + active: tab === 'groupsync', + icon: 'fa fa-fw fa-refresh', + id: 'team-settings', + text: 'External group sync', + url: `org/teams/edit/${team.id}/groupsync`, + }); + } + + self.main = NavItem.create(main); + }, })); diff --git a/public/app/stores/RootStore/RootStore.ts b/public/app/stores/RootStore/RootStore.ts index c3bfe75d59c..8a915d20ef1 100644 --- a/public/app/stores/RootStore/RootStore.ts +++ b/public/app/stores/RootStore/RootStore.ts @@ -6,6 +6,7 @@ import { AlertListStore } from './../AlertListStore/AlertListStore'; import { ViewStore } from './../ViewStore/ViewStore'; import { FolderStore } from './../FolderStore/FolderStore'; import { PermissionsStore } from './../PermissionsStore/PermissionsStore'; +import { TeamsStore } from './../TeamsStore/TeamsStore'; export const RootStore = types.model({ search: types.optional(SearchStore, { @@ -28,6 +29,9 @@ export const RootStore = types.model({ routeParams: {}, }), folder: types.optional(FolderStore, {}), + teams: types.optional(TeamsStore, { + map: {}, + }), }); type IRootStoreType = typeof RootStore.Type; diff --git a/public/app/stores/TeamsStore/TeamsStore.ts b/public/app/stores/TeamsStore/TeamsStore.ts new file mode 100644 index 00000000000..01cdca895d4 --- /dev/null +++ b/public/app/stores/TeamsStore/TeamsStore.ts @@ -0,0 +1,156 @@ +import { types, getEnv, flow } from 'mobx-state-tree'; + +export const TeamMember = types.model('TeamMember', { + userId: types.identifier(types.number), + teamId: types.number, + avatarUrl: types.string, + email: types.string, + login: types.string, +}); + +type TeamMemberType = typeof TeamMember.Type; +export interface ITeamMember extends TeamMemberType {} + +export const TeamGroup = types.model('TeamGroup', { + groupId: types.identifier(types.string), + teamId: types.number, +}); + +type TeamGroupType = typeof TeamGroup.Type; +export interface ITeamGroup extends TeamGroupType {} + +export const Team = types + .model('Team', { + id: types.identifier(types.number), + name: types.string, + avatarUrl: types.string, + email: types.string, + memberCount: types.number, + search: types.optional(types.string, ''), + members: types.optional(types.map(TeamMember), {}), + groups: types.optional(types.map(TeamGroup), {}), + }) + .views(self => ({ + get filteredMembers() { + let members = this.members.values(); + let regex = new RegExp(self.search, 'i'); + return members.filter(member => { + return regex.test(member.login) || regex.test(member.email); + }); + }, + })) + .actions(self => ({ + setName(name: string) { + self.name = name; + }, + + setEmail(email: string) { + self.email = email; + }, + + setSearchQuery(query: string) { + self.search = query; + }, + + update: flow(function* load() { + const backendSrv = getEnv(self).backendSrv; + + yield backendSrv.put(`/api/teams/${self.id}`, { + name: self.name, + email: self.email, + }); + }), + + loadMembers: flow(function* load() { + const backendSrv = getEnv(self).backendSrv; + const rsp = yield backendSrv.get(`/api/teams/${self.id}/members`); + self.members.clear(); + + for (let member of rsp) { + self.members.set(member.userId.toString(), TeamMember.create(member)); + } + }), + + removeMember: flow(function* load(member: ITeamMember) { + const backendSrv = getEnv(self).backendSrv; + yield backendSrv.delete(`/api/teams/${self.id}/members/${member.userId}`); + // remove from store map + self.members.delete(member.userId.toString()); + }), + + addMember: flow(function* load(userId: number) { + const backendSrv = getEnv(self).backendSrv; + yield backendSrv.post(`/api/teams/${self.id}/members`, { userId: userId }); + }), + + loadGroups: flow(function* load() { + const backendSrv = getEnv(self).backendSrv; + const rsp = yield backendSrv.get(`/api/teams/${self.id}/groups`); + self.groups.clear(); + + for (let group of rsp) { + self.groups.set(group.groupId, TeamGroup.create(group)); + } + }), + + addGroup: flow(function* load(groupId: string) { + const backendSrv = getEnv(self).backendSrv; + yield backendSrv.post(`/api/teams/${self.id}/groups`, { groupId: groupId }); + self.groups.set( + groupId, + TeamGroup.create({ + teamId: self.id, + groupId: groupId, + }) + ); + }), + + removeGroup: flow(function* load(groupId: string) { + const backendSrv = getEnv(self).backendSrv; + yield backendSrv.delete(`/api/teams/${self.id}/groups/${groupId}`); + self.groups.delete(groupId); + }), + })); + +type TeamType = typeof Team.Type; +export interface ITeam extends TeamType {} + +export const TeamsStore = types + .model('TeamsStore', { + map: types.map(Team), + search: types.optional(types.string, ''), + }) + .views(self => ({ + get filteredTeams() { + let teams = this.map.values(); + let regex = new RegExp(self.search, 'i'); + return teams.filter(team => { + return regex.test(team.name); + }); + }, + })) + .actions(self => ({ + loadTeams: flow(function* load() { + const backendSrv = getEnv(self).backendSrv; + const rsp = yield backendSrv.get('/api/teams/search/', { perpage: 50, page: 1 }); + self.map.clear(); + + for (let team of rsp.teams) { + self.map.set(team.id.toString(), Team.create(team)); + } + }), + + setSearchQuery(query: string) { + self.search = query; + }, + + loadById: flow(function* load(id: string) { + if (self.map.has(id)) { + return; + } + + const backendSrv = getEnv(self).backendSrv; + const team = yield backendSrv.get(`/api/teams/${id}`); + self.map.set(id, Team.create(team)); + }), + })); diff --git a/public/sass/_variables.dark.scss b/public/sass/_variables.dark.scss index 4907540815d..01590ace585 100644 --- a/public/sass/_variables.dark.scss +++ b/public/sass/_variables.dark.scss @@ -44,7 +44,6 @@ $brand-success: $green; $brand-warning: $brand-primary; $brand-danger: $red; -$query-blue: $blue; $query-red: $red; $query-green: $green; $query-purple: $purple; @@ -93,24 +92,14 @@ $headings-color: darken($white, 11%); $abbr-border-color: $gray-3 !default; $text-muted: $text-color-weak; -$blockquote-small-color: $gray-3 !default; -$blockquote-border-color: $gray-4 !default; - $hr-border-color: rgba(0, 0, 0, 0.1) !default; -// Components -$component-active-color: #fff !default; -$component-active-bg: $brand-primary !default; - // Panel // ------------------------- $panel-bg: #212124; $panel-border-color: $dark-1; $panel-border: solid 1px $panel-border-color; -$panel-drop-zone-bg: repeating-linear-gradient(-128deg, #111, #111 10px, #191919 10px, #222 20px); $panel-header-hover-bg: $dark-4; -$panel-header-menu-hover-bg: $dark-5; -$panel-edit-shadow: 0 -30px 30px -30px $black; // page header $page-header-bg: linear-gradient(90deg, #292a2d, black); @@ -205,7 +194,6 @@ $input-box-shadow-focus: rgba(102, 175, 233, 0.6); $input-color-placeholder: $gray-1 !default; $input-label-bg: $gray-blue; $input-label-border-color: $dark-3; -$input-invalid-border-color: lighten($red, 5%); // Search $search-shadow: 0 0 30px 0 $black; @@ -223,7 +211,6 @@ $dropdownBorder: rgba(0, 0, 0, 0.2); $dropdownDividerTop: transparent; $dropdownDividerBottom: #444; $dropdownDivider: $dropdownDividerBottom; -$dropdownTitle: $link-color-disabled; $dropdownLinkColor: $text-color; $dropdownLinkColorHover: $white; @@ -232,8 +219,6 @@ $dropdownLinkColorActive: $white; $dropdownLinkBackgroundActive: $dark-4; $dropdownLinkBackgroundHover: $dark-4; -$dropdown-link-color: $gray-3; - // COMPONENT VARIABLES // -------------------------------------------------- @@ -246,22 +231,13 @@ $horizontalComponentOffset: 180px; // Wells // ------------------------- -$wellBackground: #131517; $navbarHeight: 55px; -$navbarBackgroundHighlight: $dark-3; $navbarBackground: $panel-bg; $navbarBorder: 1px solid $dark-3; $navbarShadow: 0 0 20px black; -$navbarText: $gray-4; $navbarLinkColor: $gray-4; -$navbarLinkColorHover: $white; -$navbarLinkColorActive: $navbarLinkColorHover; -$navbarLinkBackgroundHover: transparent; -$navbarLinkBackgroundActive: $navbarBackground; -$navbarBrandColor: $link-color; -$navbarDropdownShadow: inset 0px 4px 10px -4px $body-bg; $navbarButtonBackground: $navbarBackground; $navbarButtonBackgroundHighlight: $body-bg; @@ -275,20 +251,15 @@ $side-menu-bg-mobile: $side-menu-bg; $side-menu-item-hover-bg: $dark-2; $side-menu-shadow: 0 0 20px black; $side-menu-link-color: $link-color; -$breadcrumb-hover-hl: #111; // Menu dropdowns // ------------------------- $menu-dropdown-bg: $body-bg; $menu-dropdown-hover-bg: $dark-2; -$menu-dropdown-border-color: $dark-3; $menu-dropdown-shadow: 5px 5px 20px -5px $black; // Breadcrumb // ------------------------- -$page-nav-bg: $black; -$page-nav-shadow: 5px 5px 20px -5px $black; -$page-nav-breadcrumb-color: $gray-3; // Tabs // ------------------------- @@ -296,9 +267,6 @@ $tab-border-color: $dark-4; // Pagination // ------------------------- -$paginationBackground: $body-bg; -$paginationBorder: transparent; -$paginationActiveBackground: $blue; // Form states and alerts // ------------------------- @@ -343,10 +311,6 @@ $info-box-color: $gray-4; $footer-link-color: $gray-2; $footer-link-hover: $gray-4; -// collapse box -$collapse-box-body-border: $dark-5; -$collapse-box-body-error-border: $red; - // json-explorer $json-explorer-default-color: $text-color; $json-explorer-string-color: #23d662; @@ -357,7 +321,6 @@ $json-explorer-undefined-color: rgb(239, 143, 190); $json-explorer-function-color: #fd48cb; $json-explorer-rotate-time: 100ms; $json-explorer-toggler-opacity: 0.6; -$json-explorer-toggler-color: #45376f; $json-explorer-bracket-color: #9494ff; $json-explorer-key-color: #23a0db; $json-explorer-url-color: #027bff; @@ -383,3 +346,6 @@ $diff-json-changed-fg: $gray-5; $diff-json-changed-num: $text-color; $diff-json-icon: $gray-7; + +//Submenu +$variable-option-bg: $blue-dark; diff --git a/public/sass/_variables.light.scss b/public/sass/_variables.light.scss index 14716f6dfef..b6248da6a00 100644 --- a/public/sass/_variables.light.scss +++ b/public/sass/_variables.light.scss @@ -30,8 +30,8 @@ $white: #fff; // Accent colors // ------------------------- -$blue: #61c2f2; -$blue-dark: #0083b3; +$blue: #0083b3; +$blue-light: #00a8e6; $green: #3aa655; $red: #d44939; $yellow: #ff851b; @@ -45,7 +45,6 @@ $brand-success: $green; $brand-warning: $orange; $brand-danger: $red; -$query-blue: $blue-dark; $query-red: $red; $query-green: $green; $query-purple: $purple; @@ -82,7 +81,7 @@ $page-gradient: linear-gradient(-60deg, $gray-7, #f5f6f9 70%, $gray-7 98%); $link-color: $gray-1; $link-color-disabled: lighten($link-color, 30%); $link-hover-color: darken($link-color, 20%); -$external-link-color: $blue; +$external-link-color: $blue-light; // Typography // ------------------------- @@ -90,25 +89,15 @@ $headings-color: $text-color; $abbr-border-color: $gray-2 !default; $text-muted: $text-color-weak; -$blockquote-small-color: $gray-2 !default; -$blockquote-border-color: $gray-3 !default; - $hr-border-color: $dark-3 !default; -// Components -$component-active-color: $white !default; -$component-active-bg: $brand-primary !default; - // Panel // ------------------------- $panel-bg: $white; $panel-border-color: $gray-5; $panel-border: solid 1px $panel-border-color; -$panel-drop-zone-bg: repeating-linear-gradient(-128deg, $body-bg, $body-bg 10px, $gray-6 10px, $gray-6 20px); $panel-header-hover-bg: $gray-6; -$panel-header-menu-hover-bg: $gray-4; -$panel-edit-shadow: 0 0 30px 20px $black; // Page header $page-header-bg: linear-gradient(90deg, $white, $gray-7); @@ -160,8 +149,8 @@ $scrollbarBorder: $gray-4; $btn-primary-bg: $brand-primary; $btn-primary-bg-hl: lighten($brand-primary, 8%); -$btn-secondary-bg: $blue-dark; -$btn-secondary-bg-hl: lighten($blue-dark, 4%); +$btn-secondary-bg: $blue; +$btn-secondary-bg-hl: lighten($blue, 4%); $btn-success-bg: lighten($green, 3%); $btn-success-bg-hl: darken($green, 3%); @@ -178,7 +167,7 @@ $btn-inverse-text-color: $gray-1; $btn-inverse-text-shadow: 0 1px 0 rgba(255, 255, 255, 0.4); $btn-active-bg: $white; -$btn-active-text-color: $blue-dark; +$btn-active-text-color: $blue; $btn-link-color: $gray-1; @@ -201,7 +190,6 @@ $input-box-shadow-focus: $blue !default; $input-color-placeholder: $gray-4 !default; $input-label-bg: $gray-5; $input-label-border-color: $gray-5; -$input-invalid-border-color: lighten($red, 5%); // Sidemenu // ------------------------- @@ -215,15 +203,10 @@ $side-menu-link-color: $gray-6; // ------------------------- $menu-dropdown-bg: $gray-7; $menu-dropdown-hover-bg: $gray-6; -$menu-dropdown-border-color: $gray-4; $menu-dropdown-shadow: 5px 5px 10px -5px $gray-1; // Breadcrumb // ------------------------- -$page-nav-bg: $gray-5; -$page-nav-shadow: 5px 5px 20px -5px $gray-4; -$page-nav-breadcrumb-color: $black; -$breadcrumb-hover-hl: #d9dadd; // Tabs // ------------------------- @@ -235,8 +218,8 @@ $search-filter-box-bg: $gray-7; // Typeahead $typeahead-shadow: 0 5px 10px 0 $gray-5; -$typeahead-selected-bg: lighten($blue, 25%); -$typeahead-selected-color: $blue-dark; +$typeahead-selected-bg: lighten($blue, 57%); +$typeahead-selected-color: $blue; // Dropdowns // ------------------------- @@ -245,7 +228,6 @@ $dropdownBorder: $gray-4; $dropdownDividerTop: $gray-6; $dropdownDividerBottom: $white; $dropdownDivider: $dropdownDividerTop; -$dropdownTitle: $gray-3; $dropdownLinkColor: $dark-3; $dropdownLinkColorHover: $link-color; @@ -271,24 +253,16 @@ $horizontalComponentOffset: 180px; // Wells // ------------------------- -$wellBackground: $gray-3; // Navbar // ------------------------- $navbarHeight: 52px; -$navbarBackgroundHighlight: $white; $navbarBackground: $white; $navbarBorder: 1px solid $gray-4; $navbarShadow: 0 0 3px #c1c1c1; -$navbarText: #444; $navbarLinkColor: #444; -$navbarLinkColorHover: #000; -$navbarLinkColorActive: #333; -$navbarLinkBackgroundHover: transparent; -$navbarLinkBackgroundActive: darken($navbarBackground, 6.5%); -$navbarDropdownShadow: inset 0px 4px 7px -4px darken($body-bg, 20%); $navbarBrandColor: $navbarLinkColor; @@ -299,9 +273,6 @@ $navbar-button-border: $gray-4; // Pagination // ------------------------- -$paginationBackground: $gray-2; -$paginationBorder: transparent; -$paginationActiveBackground: $blue; // Form states and alerts // ------------------------- @@ -313,7 +284,7 @@ $info-text-color: $blue; $alert-error-bg: linear-gradient(90deg, #d44939, #e04d3d); $alert-success-bg: linear-gradient(90deg, #3aa655, #47b274); $alert-warning-bg: linear-gradient(90deg, #d44939, #e04d3d); -$alert-info-bg: $blue-dark; +$alert-info-bg: $blue; // popover $popover-bg: $page-bg; @@ -321,7 +292,7 @@ $popover-color: $text-color; $popover-border-color: $gray-5; $popover-shadow: 0 0 20px $white; -$popover-help-bg: $blue-dark; +$popover-help-bg: $blue; $popover-help-color: $gray-6; $popover-error-bg: $btn-danger-bg; @@ -338,7 +309,7 @@ $graph-tooltip-bg: $gray-5; $checkboxImageUrl: '../img/checkbox_white.png'; // info box -$info-box-background: linear-gradient(100deg, $blue-dark, darken($blue-dark, 5%)); +$info-box-background: linear-gradient(100deg, $blue, darken($blue, 5%)); $info-box-color: $gray-7; // footer @@ -346,8 +317,6 @@ $footer-link-color: $gray-3; $footer-link-hover: $dark-5; // collapse box -$collapse-box-body-border: $gray-4; -$collapse-box-body-error-border: $red; // json explorer $json-explorer-default-color: black; @@ -359,7 +328,6 @@ $json-explorer-undefined-color: rgb(202, 11, 105); $json-explorer-function-color: #ff20ed; $json-explorer-rotate-time: 100ms; $json-explorer-toggler-opacity: 0.6; -$json-explorer-toggler-color: #45376f; $json-explorer-bracket-color: blue; $json-explorer-key-color: #00008b; $json-explorer-url-color: blue; @@ -387,3 +355,6 @@ $diff-json-new: #664e33; $diff-json-changed-fg: $gray-6; $diff-json-changed-num: $gray-4; $diff-json-icon: $gray-4; + +//Submenu +$variable-option-bg: $blue-light; diff --git a/public/sass/_variables.scss b/public/sass/_variables.scss index f46cacb0dd1..636b60c65a7 100644 --- a/public/sass/_variables.scss +++ b/public/sass/_variables.scss @@ -3,13 +3,7 @@ // Quickly modify global styling by enabling or disabling optional features. $enable-flex: true !default; -$enable-rounded: true !default; -$enable-shadows: false !default; -$enable-gradients: false !default; -$enable-transitions: false !default; $enable-hover-media-query: false !default; -$enable-grid-classes: true !default; -$enable-print-styles: true !default; // Spacing // @@ -53,9 +47,9 @@ $enable-flex: true; // Typography // ------------------------- -$font-family-sans-serif: "Roboto", Helvetica, Arial, sans-serif; -$font-family-serif: Georgia, "Times New Roman", Times, serif; -$font-family-monospace: Menlo, Monaco, Consolas, "Courier New", monospace; +$font-family-sans-serif: 'Roboto', Helvetica, Arial, sans-serif; +$font-family-serif: Georgia, 'Times New Roman', Times, serif; +$font-family-monospace: Menlo, Monaco, Consolas, 'Courier New', monospace; $font-family-base: $font-family-sans-serif !default; $font-size-root: 14px !default; @@ -90,16 +84,12 @@ $lead-font-size: 1.25rem !default; $lead-font-weight: 300 !default; $headings-margin-bottom: ($spacer / 2) !default; -$headings-font-family: "Roboto", "Helvetica Neue", Helvetica, Arial, sans-serif; +$headings-font-family: 'Roboto', 'Helvetica Neue', Helvetica, Arial, sans-serif; $headings-font-weight: 400 !default; $headings-line-height: 1.1 !default; -$blockquote-font-size: ($font-size-base * 1.25) !default; -$blockquote-border-width: 0.25rem !default; - $hr-border-width: $border-width !default; $dt-font-weight: bold !default; -$list-inline-padding: 5px !default; // Components // @@ -112,9 +102,6 @@ $border-radius: 3px !default; $border-radius-lg: 5px !default; $border-radius-sm: 2px!default; -$caret-width: 0.3em !default; -$caret-width-lg: $caret-width !default; - // Page $page-sidebar-width: 11rem; @@ -130,7 +117,6 @@ $link-hover-decoration: none !default; // Customizes the `.table` component with basic values, each used across all table variations. $table-cell-padding: 4px 10px !default; -$table-sm-cell-padding: 0.3rem !default; // Forms $input-padding-x: 10px !default; @@ -139,31 +125,18 @@ $input-line-height: 18px !default; $input-btn-border-width: 1px; $input-border-radius: 0 $border-radius $border-radius 0 !default; -$input-border-radius-lg: 0 $border-radius-lg $border-radius-lg 0 !default; $input-border-radius-sm: 0 $border-radius-sm $border-radius-sm 0 !default; $label-border-radius: $border-radius 0 0 $border-radius !default; -$label-border-radius-lg: $border-radius-lg 0 0 $border-radius-lg !default; $label-border-radius-sm: $border-radius-sm 0 0 $border-radius-sm !default; -$input-padding-x-sm: 7px !default; $input-padding-y-sm: 4px !default; $input-padding-x-lg: 20px !default; $input-padding-y-lg: 10px !default; -$input-height: (($font-size-base * $line-height-base) + ($input-padding-y * 2)) - !default; -$input-height-lg: ( - ($font-size-lg * $line-height-lg) + ($input-padding-y-lg * 2) - ) - !default; -$input-height-sm: ( - ($font-size-sm * $line-height-sm) + ($input-padding-y-sm * 2) - ) - !default; +$input-height: (($font-size-base * $line-height-base) + ($input-padding-y * 2)) !default; -$form-group-margin-bottom: $spacer-y !default; $gf-form-margin: 0.2rem; $cursor-disabled: not-allowed !default; @@ -221,9 +194,9 @@ $panel-padding: 0px 10px 5px 10px; $tabs-padding: 10px 15px 9px; $external-services: ( - github: (bgColor: #464646, borderColor: #393939, icon: ""), - google: (bgColor: #e84d3c, borderColor: #b83e31, icon: ""), - grafanacom: (bgColor: inherit, borderColor: #393939, icon: ""), - oauth: (bgColor: inherit, borderColor: #393939, icon: "") + github: (bgColor: #464646, borderColor: #393939, icon: ''), + google: (bgColor: #e84d3c, borderColor: #b83e31, icon: ''), + grafanacom: (bgColor: inherit, borderColor: #393939, icon: ''), + oauth: (bgColor: inherit, borderColor: #393939, icon: '') ) !default; diff --git a/public/sass/base/_type.scss b/public/sass/base/_type.scss index 1c3516c2828..2de8665f06a 100644 --- a/public/sass/base/_type.scss +++ b/public/sass/base/_type.scss @@ -24,7 +24,7 @@ small { font-size: 85%; } strong { - font-weight: bold; + font-weight: $font-weight-semi-bold; } em { font-style: italic; @@ -249,7 +249,7 @@ dd { line-height: $line-height-base; } dt { - font-weight: bold; + font-weight: $font-weight-semi-bold; } dd { margin-left: $line-height-base / 2; @@ -376,7 +376,7 @@ a.external-link { padding: $spacer*0.5 $spacer; } th { - font-weight: normal; + font-weight: $font-weight-semi-bold; background: $table-bg-accent; } } @@ -415,3 +415,7 @@ a.external-link { color: $yellow; padding: 0; } + +th { + font-weight: $font-weight-semi-bold; +} diff --git a/public/sass/components/_gf-form.scss b/public/sass/components/_gf-form.scss index 756d88ee935..0de386f3f68 100644 --- a/public/sass/components/_gf-form.scss +++ b/public/sass/components/_gf-form.scss @@ -403,9 +403,9 @@ select.gf-form-input ~ .gf-form-help-icon { .cta-form { position: relative; - padding: 1rem; + padding: 1.5rem; background-color: $empty-list-cta-bg; - margin-bottom: 1rem; + margin-bottom: 2rem; border-top: 3px solid $green; } diff --git a/public/sass/components/_navbar.scss b/public/sass/components/_navbar.scss index 1a453b15069..0b7e3a79dcd 100644 --- a/public/sass/components/_navbar.scss +++ b/public/sass/components/_navbar.scss @@ -85,6 +85,14 @@ // icon hidden on smaller screens display: none; } + + &--folder { + color: $text-color-weak; + + @include media-breakpoint-down(md) { + display: none; + } + } } .navbar-buttons { diff --git a/public/sass/components/_query_editor.scss b/public/sass/components/_query_editor.scss index 6b2860d57bf..9fcfdf719ba 100644 --- a/public/sass/components/_query_editor.scss +++ b/public/sass/components/_query_editor.scss @@ -1,11 +1,11 @@ .query-keyword { font-weight: $font-weight-semi-bold; - color: $query-blue; + color: $blue; } .gf-form-disabled { .query-keyword { - color: darken($query-blue, 20%); + color: darken($blue, 20%); } } @@ -63,7 +63,7 @@ } .gf-form-query-letter-cell-letter { font-weight: bold; - color: $query-blue; + color: $blue; } .gf-form-query-letter-cell-ds { color: $text-color-weak; diff --git a/public/sass/components/_slate_editor.scss b/public/sass/components/_slate_editor.scss index de8a6e6d721..10b2238f4b8 100644 --- a/public/sass/components/_slate_editor.scss +++ b/public/sass/components/_slate_editor.scss @@ -71,6 +71,7 @@ .typeahead-item-hint { font-size: $font-size-xs; color: $text-color; + white-space: normal; } } } @@ -122,7 +123,7 @@ .token.attr-value, .token.keyword, .token.class-name { - color: $query-blue; + color: $blue; } .token.regex, diff --git a/public/sass/components/_submenu.scss b/public/sass/components/_submenu.scss index 0027e0b1999..1efd275bfad 100644 --- a/public/sass/components/_submenu.scss +++ b/public/sass/components/_submenu.scss @@ -138,7 +138,7 @@ .variable-option { &:hover, &.highlighted { - background-color: $blue-dark; + background-color: $variable-option-bg; } } diff --git a/public/sass/components/_timepicker.scss b/public/sass/components/_timepicker.scss index e4d8f4555e0..e12835d31c1 100644 --- a/public/sass/components/_timepicker.scss +++ b/public/sass/components/_timepicker.scss @@ -77,7 +77,7 @@ border: none; color: $text-color; &.active span { - color: $query-blue; + color: $blue; font-weight: bold; } .text-info { diff --git a/public/sass/pages/_dashboard.scss b/public/sass/pages/_dashboard.scss index 9b79279b99b..970b625c4f8 100644 --- a/public/sass/pages/_dashboard.scss +++ b/public/sass/pages/_dashboard.scss @@ -16,6 +16,7 @@ div.flot-text { height: 100%; &--solo { + margin: 0; .panel-container { border: none; z-index: $zindex-sidemenu + 1; diff --git a/public/sass/pages/_explore.scss b/public/sass/pages/_explore.scss index 876260c4f76..158f0eb68ad 100644 --- a/public/sass/pages/_explore.scss +++ b/public/sass/pages/_explore.scss @@ -60,6 +60,10 @@ flex-wrap: wrap; } + .datasource-picker { + min-width: 10rem; + } + .timepicker { display: flex; @@ -93,3 +97,40 @@ .query-row-tools { width: 4rem; } + +.explore { + .logs { + .logs-entries { + display: grid; + grid-column-gap: 1rem; + grid-row-gap: 0.1rem; + grid-template-columns: 4px minmax(100px, max-content) 1fr; + font-family: $font-family-monospace; + } + + .logs-row-match-highlight { + background-color: lighten($blue, 20%); + } + + .logs-row-level { + background-color: transparent; + margin: 6px 0; + border-radius: 2px; + opacity: 0.8; + } + + .logs-row-level-crit, + .logs-row-level-error, + .logs-row-level-err { + background-color: $red; + } + + .logs-row-level-warn { + background-color: $orange; + } + + .logs-row-level-info { + background-color: $green; + } + } +} diff --git a/public/test/jest-shim.ts b/public/test/jest-shim.ts index 80c4bb3d21b..dbf9ac4be50 100644 --- a/public/test/jest-shim.ts +++ b/public/test/jest-shim.ts @@ -1,6 +1,17 @@ declare var global: NodeJS.Global; -(global).requestAnimationFrame = (callback) => { +(global).requestAnimationFrame = callback => { setTimeout(callback, 0); }; +(Promise.prototype).finally = function(onFinally) { + return this.then( + /* onFulfilled */ + res => Promise.resolve(onFinally()).then(() => res), + /* onRejected */ + err => + Promise.resolve(onFinally()).then(() => { + throw err; + }) + ); +}; diff --git a/scripts/webpack/webpack.hot.js b/scripts/webpack/webpack.hot.js index 28c8cec504d..0305a6f465c 100644 --- a/scripts/webpack/webpack.hot.js +++ b/scripts/webpack/webpack.hot.js @@ -20,6 +20,7 @@ module.exports = merge(common, { path: path.resolve(__dirname, '../../public/build'), filename: '[name].[hash].js', publicPath: "/public/build/", + pathinfo: false, }, resolve: { @@ -37,6 +38,12 @@ module.exports = merge(common, { } }, + optimization: { + removeAvailableModules: false, + removeEmptyChunks: false, + splitChunks: false, + }, + module: { rules: [ { @@ -56,7 +63,8 @@ module.exports = merge(common, { { loader: 'ts-loader', options: { - transpileOnly: true + transpileOnly: true, + experimentalWatchApi: true }, }], }, diff --git a/tsconfig.json b/tsconfig.json index 3596930a62f..3ef1dd1b769 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,32 +1,43 @@ { - "compilerOptions": { - "moduleResolution": "node", - "outDir": "public/dist", - "target": "es5", - "lib": ["es6", "dom"], - "rootDir": "public/", - "jsx": "react", - "module": "esnext", - "declaration": false, - "allowSyntheticDefaultImports": true, - "inlineSourceMap": false, - "sourceMap": true, - "noEmitOnError": false, - "emitDecoratorMetadata": false, - "experimentalDecorators": true, - "noImplicitReturns": true, - "noImplicitThis": false, - "noImplicitUseStrict":false, - "noImplicitAny": false, - "noUnusedLocals": true, - "baseUrl": "public", - "paths": { - "app": ["app"] - } - }, - "include": [ - "public/app/**/*.ts", - "public/app/**/*.tsx", - "public/test/**/*.ts" - ] -} + "compilerOptions": { + "moduleResolution": "node", + "outDir": "public/dist", + "target": "es5", + "lib": [ + "es6", + "dom" + ], + "rootDir": "public/", + "jsx": "react", + "module": "esnext", + "declaration": false, + "allowSyntheticDefaultImports": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "importHelpers": true, // importing helper functions from tslib + "noEmitHelpers": true, // disable emitting inline helper functions + "removeComments": false, // comments are needed by angular injections + "inlineSourceMap": false, + "sourceMap": true, + "noEmitOnError": false, + "emitDecoratorMetadata": false, + "experimentalDecorators": true, + "noImplicitReturns": true, + "noImplicitThis": false, + "noImplicitUseStrict": false, + "noImplicitAny": false, + "noUnusedLocals": true, + "baseUrl": "public", + "pretty": true, + "paths": { + "app": [ + "app" + ] + } + }, + "include": [ + "public/app/**/*.ts", + "public/app/**/*.tsx", + "public/test/**/*.ts" + ] +} \ No newline at end of file diff --git a/vendor/github.com/aws/aws-sdk-go/aws/client/client.go b/vendor/github.com/aws/aws-sdk-go/aws/client/client.go index 3271a18e80e..212fe25e71e 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/client/client.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/client/client.go @@ -91,6 +91,6 @@ func (c *Client) AddDebugHandlers() { return } - c.Handlers.Send.PushFrontNamed(request.NamedHandler{Name: "awssdk.client.LogRequest", Fn: logRequest}) - c.Handlers.Send.PushBackNamed(request.NamedHandler{Name: "awssdk.client.LogResponse", Fn: logResponse}) + c.Handlers.Send.PushFrontNamed(LogHTTPRequestHandler) + c.Handlers.Send.PushBackNamed(LogHTTPResponseHandler) } diff --git a/vendor/github.com/aws/aws-sdk-go/aws/client/logger.go b/vendor/github.com/aws/aws-sdk-go/aws/client/logger.go index e223c54cc6c..ce9fb896d94 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/client/logger.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/client/logger.go @@ -44,12 +44,22 @@ func (reader *teeReaderCloser) Close() error { return reader.Source.Close() } +// LogHTTPRequestHandler is a SDK request handler to log the HTTP request sent +// to a service. Will include the HTTP request body if the LogLevel of the +// request matches LogDebugWithHTTPBody. +var LogHTTPRequestHandler = request.NamedHandler{ + Name: "awssdk.client.LogRequest", + Fn: logRequest, +} + func logRequest(r *request.Request) { logBody := r.Config.LogLevel.Matches(aws.LogDebugWithHTTPBody) bodySeekable := aws.IsReaderSeekable(r.Body) - dumpedBody, err := httputil.DumpRequestOut(r.HTTPRequest, logBody) + + b, err := httputil.DumpRequestOut(r.HTTPRequest, logBody) if err != nil { - r.Config.Logger.Log(fmt.Sprintf(logReqErrMsg, r.ClientInfo.ServiceName, r.Operation.Name, err)) + r.Config.Logger.Log(fmt.Sprintf(logReqErrMsg, + r.ClientInfo.ServiceName, r.Operation.Name, err)) return } @@ -63,7 +73,28 @@ func logRequest(r *request.Request) { r.ResetBody() } - r.Config.Logger.Log(fmt.Sprintf(logReqMsg, r.ClientInfo.ServiceName, r.Operation.Name, string(dumpedBody))) + r.Config.Logger.Log(fmt.Sprintf(logReqMsg, + r.ClientInfo.ServiceName, r.Operation.Name, string(b))) +} + +// LogHTTPRequestHeaderHandler is a SDK request handler to log the HTTP request sent +// to a service. Will only log the HTTP request's headers. The request payload +// will not be read. +var LogHTTPRequestHeaderHandler = request.NamedHandler{ + Name: "awssdk.client.LogRequestHeader", + Fn: logRequestHeader, +} + +func logRequestHeader(r *request.Request) { + b, err := httputil.DumpRequestOut(r.HTTPRequest, false) + if err != nil { + r.Config.Logger.Log(fmt.Sprintf(logReqErrMsg, + r.ClientInfo.ServiceName, r.Operation.Name, err)) + return + } + + r.Config.Logger.Log(fmt.Sprintf(logReqMsg, + r.ClientInfo.ServiceName, r.Operation.Name, string(b))) } const logRespMsg = `DEBUG: Response %s/%s Details: @@ -76,27 +107,44 @@ const logRespErrMsg = `DEBUG ERROR: Response %s/%s: %s -----------------------------------------------------` +// LogHTTPResponseHandler is a SDK request handler to log the HTTP response +// received from a service. Will include the HTTP response body if the LogLevel +// of the request matches LogDebugWithHTTPBody. +var LogHTTPResponseHandler = request.NamedHandler{ + Name: "awssdk.client.LogResponse", + Fn: logResponse, +} + func logResponse(r *request.Request) { lw := &logWriter{r.Config.Logger, bytes.NewBuffer(nil)} - r.HTTPResponse.Body = &teeReaderCloser{ - Reader: io.TeeReader(r.HTTPResponse.Body, lw), - Source: r.HTTPResponse.Body, + + logBody := r.Config.LogLevel.Matches(aws.LogDebugWithHTTPBody) + if logBody { + r.HTTPResponse.Body = &teeReaderCloser{ + Reader: io.TeeReader(r.HTTPResponse.Body, lw), + Source: r.HTTPResponse.Body, + } } handlerFn := func(req *request.Request) { - body, err := httputil.DumpResponse(req.HTTPResponse, false) + b, err := httputil.DumpResponse(req.HTTPResponse, false) if err != nil { - lw.Logger.Log(fmt.Sprintf(logRespErrMsg, req.ClientInfo.ServiceName, req.Operation.Name, err)) + lw.Logger.Log(fmt.Sprintf(logRespErrMsg, + req.ClientInfo.ServiceName, req.Operation.Name, err)) return } - b, err := ioutil.ReadAll(lw.buf) - if err != nil { - lw.Logger.Log(fmt.Sprintf(logRespErrMsg, req.ClientInfo.ServiceName, req.Operation.Name, err)) - return - } - lw.Logger.Log(fmt.Sprintf(logRespMsg, req.ClientInfo.ServiceName, req.Operation.Name, string(body))) - if req.Config.LogLevel.Matches(aws.LogDebugWithHTTPBody) { + lw.Logger.Log(fmt.Sprintf(logRespMsg, + req.ClientInfo.ServiceName, req.Operation.Name, string(b))) + + if logBody { + b, err := ioutil.ReadAll(lw.buf) + if err != nil { + lw.Logger.Log(fmt.Sprintf(logRespErrMsg, + req.ClientInfo.ServiceName, req.Operation.Name, err)) + return + } + lw.Logger.Log(string(b)) } } @@ -110,3 +158,27 @@ func logResponse(r *request.Request) { Name: handlerName, Fn: handlerFn, }) } + +// LogHTTPResponseHeaderHandler is a SDK request handler to log the HTTP +// response received from a service. Will only log the HTTP response's headers. +// The response payload will not be read. +var LogHTTPResponseHeaderHandler = request.NamedHandler{ + Name: "awssdk.client.LogResponseHeader", + Fn: logResponseHeader, +} + +func logResponseHeader(r *request.Request) { + if r.Config.Logger == nil { + return + } + + b, err := httputil.DumpResponse(r.HTTPResponse, false) + if err != nil { + r.Config.Logger.Log(fmt.Sprintf(logRespErrMsg, + r.ClientInfo.ServiceName, r.Operation.Name, err)) + return + } + + r.Config.Logger.Log(fmt.Sprintf(logRespMsg, + r.ClientInfo.ServiceName, r.Operation.Name, string(b))) +} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/client/metadata/client_info.go b/vendor/github.com/aws/aws-sdk-go/aws/client/metadata/client_info.go index 4778056ddfd..920e9fddf87 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/client/metadata/client_info.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/client/metadata/client_info.go @@ -3,6 +3,7 @@ package metadata // ClientInfo wraps immutable data from the client.Client structure. type ClientInfo struct { ServiceName string + ServiceID string APIVersion string Endpoint string SigningName string diff --git a/vendor/github.com/aws/aws-sdk-go/aws/credentials/credentials.go b/vendor/github.com/aws/aws-sdk-go/aws/credentials/credentials.go index 42416fc2f0f..ed086992f62 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/credentials/credentials.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/credentials/credentials.go @@ -178,7 +178,8 @@ func (e *Expiry) IsExpired() bool { type Credentials struct { creds Value forceRefresh bool - m sync.Mutex + + m sync.RWMutex provider Provider } @@ -201,6 +202,17 @@ func NewCredentials(provider Provider) *Credentials { // If Credentials.Expire() was called the credentials Value will be force // expired, and the next call to Get() will cause them to be refreshed. func (c *Credentials) Get() (Value, error) { + // Check the cached credentials first with just the read lock. + c.m.RLock() + if !c.isExpired() { + creds := c.creds + c.m.RUnlock() + return creds, nil + } + c.m.RUnlock() + + // Credentials are expired need to retrieve the credentials taking the full + // lock. c.m.Lock() defer c.m.Unlock() @@ -234,8 +246,8 @@ func (c *Credentials) Expire() { // If the Credentials were forced to be expired with Expire() this will // reflect that override. func (c *Credentials) IsExpired() bool { - c.m.Lock() - defer c.m.Unlock() + c.m.RLock() + defer c.m.RUnlock() return c.isExpired() } diff --git a/vendor/github.com/aws/aws-sdk-go/aws/csm/doc.go b/vendor/github.com/aws/aws-sdk-go/aws/csm/doc.go new file mode 100644 index 00000000000..152d785b362 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/aws/csm/doc.go @@ -0,0 +1,46 @@ +// Package csm provides Client Side Monitoring (CSM) which enables sending metrics +// via UDP connection. Using the Start function will enable the reporting of +// metrics on a given port. If Start is called, with different parameters, again, +// a panic will occur. +// +// Pause can be called to pause any metrics publishing on a given port. Sessions +// that have had their handlers modified via InjectHandlers may still be used. +// However, the handlers will act as a no-op meaning no metrics will be published. +// +// Example: +// r, err := csm.Start("clientID", ":31000") +// if err != nil { +// panic(fmt.Errorf("failed starting CSM: %v", err)) +// } +// +// sess, err := session.NewSession(&aws.Config{}) +// if err != nil { +// panic(fmt.Errorf("failed loading session: %v", err)) +// } +// +// r.InjectHandlers(&sess.Handlers) +// +// client := s3.New(sess) +// resp, err := client.GetObject(&s3.GetObjectInput{ +// Bucket: aws.String("bucket"), +// Key: aws.String("key"), +// }) +// +// // Will pause monitoring +// r.Pause() +// resp, err = client.GetObject(&s3.GetObjectInput{ +// Bucket: aws.String("bucket"), +// Key: aws.String("key"), +// }) +// +// // Resume monitoring +// r.Continue() +// +// Start returns a Reporter that is used to enable or disable monitoring. If +// access to the Reporter is required later, calling Get will return the Reporter +// singleton. +// +// Example: +// r := csm.Get() +// r.Continue() +package csm diff --git a/vendor/github.com/aws/aws-sdk-go/aws/csm/enable.go b/vendor/github.com/aws/aws-sdk-go/aws/csm/enable.go new file mode 100644 index 00000000000..2f0c6eac9a8 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/aws/csm/enable.go @@ -0,0 +1,67 @@ +package csm + +import ( + "fmt" + "sync" +) + +var ( + lock sync.Mutex +) + +// Client side metric handler names +const ( + APICallMetricHandlerName = "awscsm.SendAPICallMetric" + APICallAttemptMetricHandlerName = "awscsm.SendAPICallAttemptMetric" +) + +// Start will start the a long running go routine to capture +// client side metrics. Calling start multiple time will only +// start the metric listener once and will panic if a different +// client ID or port is passed in. +// +// Example: +// r, err := csm.Start("clientID", "127.0.0.1:8094") +// if err != nil { +// panic(fmt.Errorf("expected no error, but received %v", err)) +// } +// sess := session.NewSession() +// r.InjectHandlers(sess.Handlers) +// +// svc := s3.New(sess) +// out, err := svc.GetObject(&s3.GetObjectInput{ +// Bucket: aws.String("bucket"), +// Key: aws.String("key"), +// }) +func Start(clientID string, url string) (*Reporter, error) { + lock.Lock() + defer lock.Unlock() + + if sender == nil { + sender = newReporter(clientID, url) + } else { + if sender.clientID != clientID { + panic(fmt.Errorf("inconsistent client IDs. %q was expected, but received %q", sender.clientID, clientID)) + } + + if sender.url != url { + panic(fmt.Errorf("inconsistent URLs. %q was expected, but received %q", sender.url, url)) + } + } + + if err := connect(url); err != nil { + sender = nil + return nil, err + } + + return sender, nil +} + +// Get will return a reporter if one exists, if one does not exist, nil will +// be returned. +func Get() *Reporter { + lock.Lock() + defer lock.Unlock() + + return sender +} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/csm/metric.go b/vendor/github.com/aws/aws-sdk-go/aws/csm/metric.go new file mode 100644 index 00000000000..4b0d630e4c1 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/aws/csm/metric.go @@ -0,0 +1,51 @@ +package csm + +import ( + "strconv" + "time" +) + +type metricTime time.Time + +func (t metricTime) MarshalJSON() ([]byte, error) { + ns := time.Duration(time.Time(t).UnixNano()) + return []byte(strconv.FormatInt(int64(ns/time.Millisecond), 10)), nil +} + +type metric struct { + ClientID *string `json:"ClientId,omitempty"` + API *string `json:"Api,omitempty"` + Service *string `json:"Service,omitempty"` + Timestamp *metricTime `json:"Timestamp,omitempty"` + Type *string `json:"Type,omitempty"` + Version *int `json:"Version,omitempty"` + + AttemptCount *int `json:"AttemptCount,omitempty"` + Latency *int `json:"Latency,omitempty"` + + Fqdn *string `json:"Fqdn,omitempty"` + UserAgent *string `json:"UserAgent,omitempty"` + AttemptLatency *int `json:"AttemptLatency,omitempty"` + + SessionToken *string `json:"SessionToken,omitempty"` + Region *string `json:"Region,omitempty"` + AccessKey *string `json:"AccessKey,omitempty"` + HTTPStatusCode *int `json:"HttpStatusCode,omitempty"` + XAmzID2 *string `json:"XAmzId2,omitempty"` + XAmzRequestID *string `json:"XAmznRequestId,omitempty"` + + AWSException *string `json:"AwsException,omitempty"` + AWSExceptionMessage *string `json:"AwsExceptionMessage,omitempty"` + SDKException *string `json:"SdkException,omitempty"` + SDKExceptionMessage *string `json:"SdkExceptionMessage,omitempty"` + + DestinationIP *string `json:"DestinationIp,omitempty"` + ConnectionReused *int `json:"ConnectionReused,omitempty"` + + AcquireConnectionLatency *int `json:"AcquireConnectionLatency,omitempty"` + ConnectLatency *int `json:"ConnectLatency,omitempty"` + RequestLatency *int `json:"RequestLatency,omitempty"` + DNSLatency *int `json:"DnsLatency,omitempty"` + TCPLatency *int `json:"TcpLatency,omitempty"` + SSLLatency *int `json:"SslLatency,omitempty"` +} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/csm/metricChan.go b/vendor/github.com/aws/aws-sdk-go/aws/csm/metricChan.go new file mode 100644 index 00000000000..514fc3739a5 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/aws/csm/metricChan.go @@ -0,0 +1,54 @@ +package csm + +import ( + "sync/atomic" +) + +const ( + runningEnum = iota + pausedEnum +) + +var ( + // MetricsChannelSize of metrics to hold in the channel + MetricsChannelSize = 100 +) + +type metricChan struct { + ch chan metric + paused int64 +} + +func newMetricChan(size int) metricChan { + return metricChan{ + ch: make(chan metric, size), + } +} + +func (ch *metricChan) Pause() { + atomic.StoreInt64(&ch.paused, pausedEnum) +} + +func (ch *metricChan) Continue() { + atomic.StoreInt64(&ch.paused, runningEnum) +} + +func (ch *metricChan) IsPaused() bool { + v := atomic.LoadInt64(&ch.paused) + return v == pausedEnum +} + +// Push will push metrics to the metric channel if the channel +// is not paused +func (ch *metricChan) Push(m metric) bool { + if ch.IsPaused() { + return false + } + + select { + case ch.ch <- m: + return true + default: + return false + } +} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/csm/reporter.go b/vendor/github.com/aws/aws-sdk-go/aws/csm/reporter.go new file mode 100644 index 00000000000..1484c8fc5b1 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/aws/csm/reporter.go @@ -0,0 +1,230 @@ +package csm + +import ( + "encoding/json" + "net" + "time" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/awserr" + "github.com/aws/aws-sdk-go/aws/request" +) + +const ( + // DefaultPort is used when no port is specified + DefaultPort = "31000" +) + +// Reporter will gather metrics of API requests made and +// send those metrics to the CSM endpoint. +type Reporter struct { + clientID string + url string + conn net.Conn + metricsCh metricChan + done chan struct{} +} + +var ( + sender *Reporter +) + +func connect(url string) error { + const network = "udp" + if err := sender.connect(network, url); err != nil { + return err + } + + if sender.done == nil { + sender.done = make(chan struct{}) + go sender.start() + } + + return nil +} + +func newReporter(clientID, url string) *Reporter { + return &Reporter{ + clientID: clientID, + url: url, + metricsCh: newMetricChan(MetricsChannelSize), + } +} + +func (rep *Reporter) sendAPICallAttemptMetric(r *request.Request) { + if rep == nil { + return + } + + now := time.Now() + creds, _ := r.Config.Credentials.Get() + + m := metric{ + ClientID: aws.String(rep.clientID), + API: aws.String(r.Operation.Name), + Service: aws.String(r.ClientInfo.ServiceID), + Timestamp: (*metricTime)(&now), + UserAgent: aws.String(r.HTTPRequest.Header.Get("User-Agent")), + Region: r.Config.Region, + Type: aws.String("ApiCallAttempt"), + Version: aws.Int(1), + + XAmzRequestID: aws.String(r.RequestID), + + AttemptCount: aws.Int(r.RetryCount + 1), + AttemptLatency: aws.Int(int(now.Sub(r.AttemptTime).Nanoseconds() / int64(time.Millisecond))), + AccessKey: aws.String(creds.AccessKeyID), + } + + if r.HTTPResponse != nil { + m.HTTPStatusCode = aws.Int(r.HTTPResponse.StatusCode) + } + + if r.Error != nil { + if awserr, ok := r.Error.(awserr.Error); ok { + setError(&m, awserr) + } + } + + rep.metricsCh.Push(m) +} + +func setError(m *metric, err awserr.Error) { + msg := err.Message() + code := err.Code() + + switch code { + case "RequestError", + "SerializationError", + request.CanceledErrorCode: + + m.SDKException = &code + m.SDKExceptionMessage = &msg + default: + m.AWSException = &code + m.AWSExceptionMessage = &msg + } +} + +func (rep *Reporter) sendAPICallMetric(r *request.Request) { + if rep == nil { + return + } + + now := time.Now() + m := metric{ + ClientID: aws.String(rep.clientID), + API: aws.String(r.Operation.Name), + Service: aws.String(r.ClientInfo.ServiceID), + Timestamp: (*metricTime)(&now), + Type: aws.String("ApiCall"), + AttemptCount: aws.Int(r.RetryCount + 1), + Latency: aws.Int(int(time.Now().Sub(r.Time) / time.Millisecond)), + XAmzRequestID: aws.String(r.RequestID), + } + + // TODO: Probably want to figure something out for logging dropped + // metrics + rep.metricsCh.Push(m) +} + +func (rep *Reporter) connect(network, url string) error { + if rep.conn != nil { + rep.conn.Close() + } + + conn, err := net.Dial(network, url) + if err != nil { + return awserr.New("UDPError", "Could not connect", err) + } + + rep.conn = conn + + return nil +} + +func (rep *Reporter) close() { + if rep.done != nil { + close(rep.done) + } + + rep.metricsCh.Pause() +} + +func (rep *Reporter) start() { + defer func() { + rep.metricsCh.Pause() + }() + + for { + select { + case <-rep.done: + rep.done = nil + return + case m := <-rep.metricsCh.ch: + // TODO: What to do with this error? Probably should just log + b, err := json.Marshal(m) + if err != nil { + continue + } + + rep.conn.Write(b) + } + } +} + +// Pause will pause the metric channel preventing any new metrics from +// being added. +func (rep *Reporter) Pause() { + lock.Lock() + defer lock.Unlock() + + if rep == nil { + return + } + + rep.close() +} + +// Continue will reopen the metric channel and allow for monitoring +// to be resumed. +func (rep *Reporter) Continue() { + lock.Lock() + defer lock.Unlock() + if rep == nil { + return + } + + if !rep.metricsCh.IsPaused() { + return + } + + rep.metricsCh.Continue() +} + +// InjectHandlers will will enable client side metrics and inject the proper +// handlers to handle how metrics are sent. +// +// Example: +// // Start must be called in order to inject the correct handlers +// r, err := csm.Start("clientID", "127.0.0.1:8094") +// if err != nil { +// panic(fmt.Errorf("expected no error, but received %v", err)) +// } +// +// sess := session.NewSession() +// r.InjectHandlers(&sess.Handlers) +// +// // create a new service client with our client side metric session +// svc := s3.New(sess) +func (rep *Reporter) InjectHandlers(handlers *request.Handlers) { + if rep == nil { + return + } + + apiCallHandler := request.NamedHandler{Name: APICallMetricHandlerName, Fn: rep.sendAPICallMetric} + handlers.Complete.PushFrontNamed(apiCallHandler) + + apiCallAttemptHandler := request.NamedHandler{Name: APICallAttemptMetricHandlerName, Fn: rep.sendAPICallAttemptMetric} + handlers.AfterRetry.PushFrontNamed(apiCallAttemptHandler) +} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/endpoints/defaults.go b/vendor/github.com/aws/aws-sdk-go/aws/endpoints/defaults.go index 857f677dd10..c472a57fad2 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/endpoints/defaults.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/endpoints/defaults.go @@ -48,6 +48,7 @@ const ( A4bServiceID = "a4b" // A4b. AcmServiceID = "acm" // Acm. AcmPcaServiceID = "acm-pca" // AcmPca. + ApiMediatailorServiceID = "api.mediatailor" // ApiMediatailor. ApiPricingServiceID = "api.pricing" // ApiPricing. ApigatewayServiceID = "apigateway" // Apigateway. ApplicationAutoscalingServiceID = "application-autoscaling" // ApplicationAutoscaling. @@ -130,6 +131,7 @@ const ( ModelsLexServiceID = "models.lex" // ModelsLex. MonitoringServiceID = "monitoring" // Monitoring. MturkRequesterServiceID = "mturk-requester" // MturkRequester. + NeptuneServiceID = "neptune" // Neptune. OpsworksServiceID = "opsworks" // Opsworks. OpsworksCmServiceID = "opsworks-cm" // OpsworksCm. OrganizationsServiceID = "organizations" // Organizations. @@ -307,6 +309,16 @@ var awsPartition = partition{ "us-west-2": endpoint{}, }, }, + "api.mediatailor": service{ + + Endpoints: endpoints{ + "ap-northeast-1": endpoint{}, + "ap-southeast-1": endpoint{}, + "ap-southeast-2": endpoint{}, + "eu-west-1": endpoint{}, + "us-east-1": endpoint{}, + }, + }, "api.pricing": service{ Defaults: endpoint{ CredentialScope: credentialScope{ @@ -434,6 +446,7 @@ var awsPartition = partition{ Endpoints: endpoints{ "ap-northeast-1": endpoint{}, "ap-northeast-2": endpoint{}, + "ap-south-1": endpoint{}, "ap-southeast-1": endpoint{}, "ap-southeast-2": endpoint{}, "ca-central-1": endpoint{}, @@ -1046,6 +1059,7 @@ var awsPartition = partition{ "elasticfilesystem": service{ Endpoints: endpoints{ + "ap-northeast-2": endpoint{}, "ap-southeast-2": endpoint{}, "eu-central-1": endpoint{}, "eu-west-1": endpoint{}, @@ -1242,11 +1256,13 @@ var awsPartition = partition{ Endpoints: endpoints{ "ap-northeast-1": endpoint{}, + "ap-northeast-2": endpoint{}, "ap-south-1": endpoint{}, "ap-southeast-1": endpoint{}, "ap-southeast-2": endpoint{}, "eu-central-1": endpoint{}, "eu-west-1": endpoint{}, + "eu-west-2": endpoint{}, "us-east-1": endpoint{}, "us-east-2": endpoint{}, "us-west-2": endpoint{}, @@ -1509,8 +1525,10 @@ var awsPartition = partition{ Endpoints: endpoints{ "ap-northeast-1": endpoint{}, + "ap-northeast-2": endpoint{}, "ap-southeast-1": endpoint{}, "ap-southeast-2": endpoint{}, + "eu-central-1": endpoint{}, "eu-west-1": endpoint{}, "us-east-1": endpoint{}, "us-west-2": endpoint{}, @@ -1622,6 +1640,35 @@ var awsPartition = partition{ "us-east-1": endpoint{}, }, }, + "neptune": service{ + + Endpoints: endpoints{ + "eu-west-1": endpoint{ + Hostname: "rds.eu-west-1.amazonaws.com", + CredentialScope: credentialScope{ + Region: "eu-west-1", + }, + }, + "us-east-1": endpoint{ + Hostname: "rds.us-east-1.amazonaws.com", + CredentialScope: credentialScope{ + Region: "us-east-1", + }, + }, + "us-east-2": endpoint{ + Hostname: "rds.us-east-2.amazonaws.com", + CredentialScope: credentialScope{ + Region: "us-east-2", + }, + }, + "us-west-2": endpoint{ + Hostname: "rds.us-west-2.amazonaws.com", + CredentialScope: credentialScope{ + Region: "us-west-2", + }, + }, + }, + }, "opsworks": service{ Endpoints: endpoints{ @@ -1805,10 +1852,11 @@ var awsPartition = partition{ "runtime.sagemaker": service{ Endpoints: endpoints{ - "eu-west-1": endpoint{}, - "us-east-1": endpoint{}, - "us-east-2": endpoint{}, - "us-west-2": endpoint{}, + "ap-northeast-1": endpoint{}, + "eu-west-1": endpoint{}, + "us-east-1": endpoint{}, + "us-east-2": endpoint{}, + "us-west-2": endpoint{}, }, }, "s3": service{ @@ -1873,10 +1921,11 @@ var awsPartition = partition{ "sagemaker": service{ Endpoints: endpoints{ - "eu-west-1": endpoint{}, - "us-east-1": endpoint{}, - "us-east-2": endpoint{}, - "us-west-2": endpoint{}, + "ap-northeast-1": endpoint{}, + "eu-west-1": endpoint{}, + "us-east-1": endpoint{}, + "us-east-2": endpoint{}, + "us-west-2": endpoint{}, }, }, "sdb": service{ @@ -2081,6 +2130,10 @@ var awsPartition = partition{ "eu-west-1": endpoint{}, "eu-west-2": endpoint{}, "eu-west-3": endpoint{}, + "fips-us-east-1": endpoint{}, + "fips-us-east-2": endpoint{}, + "fips-us-west-1": endpoint{}, + "fips-us-west-2": endpoint{}, "sa-east-1": endpoint{}, "us-east-1": endpoint{ SSLCommonName: "queue.{dnsSuffix}", @@ -2507,13 +2560,15 @@ var awscnPartition = partition{ "ecr": service{ Endpoints: endpoints{ - "cn-north-1": endpoint{}, + "cn-north-1": endpoint{}, + "cn-northwest-1": endpoint{}, }, }, "ecs": service{ Endpoints: endpoints{ - "cn-north-1": endpoint{}, + "cn-north-1": endpoint{}, + "cn-northwest-1": endpoint{}, }, }, "elasticache": service{ diff --git a/vendor/github.com/aws/aws-sdk-go/aws/logger.go b/vendor/github.com/aws/aws-sdk-go/aws/logger.go index 3babb5abdb6..6ed15b2ecc2 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/logger.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/logger.go @@ -71,6 +71,12 @@ const ( // LogDebugWithRequestErrors states the SDK should log when service requests fail // to build, send, validate, or unmarshal. LogDebugWithRequestErrors + + // LogDebugWithEventStreamBody states the SDK should log EventStream + // request and response bodys. This should be used to log the EventStream + // wire unmarshaled message content of requests and responses made while + // using the SDK Will also enable LogDebug. + LogDebugWithEventStreamBody ) // A Logger is a minimalistic interface for the SDK to log messages to. Should diff --git a/vendor/github.com/aws/aws-sdk-go/aws/request/handlers.go b/vendor/github.com/aws/aws-sdk-go/aws/request/handlers.go index 802ac88ad5c..605a72d3c94 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/request/handlers.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/request/handlers.go @@ -14,6 +14,7 @@ type Handlers struct { Send HandlerList ValidateResponse HandlerList Unmarshal HandlerList + UnmarshalStream HandlerList UnmarshalMeta HandlerList UnmarshalError HandlerList Retry HandlerList @@ -30,6 +31,7 @@ func (h *Handlers) Copy() Handlers { Send: h.Send.copy(), ValidateResponse: h.ValidateResponse.copy(), Unmarshal: h.Unmarshal.copy(), + UnmarshalStream: h.UnmarshalStream.copy(), UnmarshalError: h.UnmarshalError.copy(), UnmarshalMeta: h.UnmarshalMeta.copy(), Retry: h.Retry.copy(), @@ -45,6 +47,7 @@ func (h *Handlers) Clear() { h.Send.Clear() h.Sign.Clear() h.Unmarshal.Clear() + h.UnmarshalStream.Clear() h.UnmarshalMeta.Clear() h.UnmarshalError.Clear() h.ValidateResponse.Clear() @@ -172,6 +175,21 @@ func (l *HandlerList) SwapNamed(n NamedHandler) (swapped bool) { return swapped } +// Swap will swap out all handlers matching the name passed in. The matched +// handlers will be swapped in. True is returned if the handlers were swapped. +func (l *HandlerList) Swap(name string, replace NamedHandler) bool { + var swapped bool + + for i := 0; i < len(l.list); i++ { + if l.list[i].Name == name { + l.list[i] = replace + swapped = true + } + } + + return swapped +} + // SetBackNamed will replace the named handler if it exists in the handler list. // If the handler does not exist the handler will be added to the end of the list. func (l *HandlerList) SetBackNamed(n NamedHandler) { diff --git a/vendor/github.com/aws/aws-sdk-go/aws/request/request.go b/vendor/github.com/aws/aws-sdk-go/aws/request/request.go index 69b7a01ad74..75f0fe07780 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/request/request.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/request/request.go @@ -46,6 +46,7 @@ type Request struct { Handlers Handlers Retryer + AttemptTime time.Time Time time.Time Operation *Operation HTTPRequest *http.Request @@ -121,6 +122,7 @@ func New(cfg aws.Config, clientInfo metadata.ClientInfo, handlers Handlers, Handlers: handlers.Copy(), Retryer: retryer, + AttemptTime: time.Now(), Time: time.Now(), ExpireTime: 0, Operation: operation, @@ -368,9 +370,9 @@ func (r *Request) Build() error { return r.Error } -// Sign will sign the request returning error if errors are encountered. +// Sign will sign the request, returning error if errors are encountered. // -// Send will build the request prior to signing. All Sign Handlers will +// Sign will build the request prior to signing. All Sign Handlers will // be executed in the order they were set. func (r *Request) Sign() error { r.Build() @@ -440,7 +442,7 @@ func (r *Request) GetBody() io.ReadSeeker { return r.safeBody } -// Send will send the request returning error if errors are encountered. +// Send will send the request, returning error if errors are encountered. // // Send will sign the request prior to sending. All Send Handlers will // be executed in the order they were set. @@ -461,6 +463,7 @@ func (r *Request) Send() error { }() for { + r.AttemptTime = time.Now() if aws.BoolValue(r.Retryable) { if r.Config.LogLevel.Matches(aws.LogDebugWithRequestRetries) { r.Config.Logger.Log(fmt.Sprintf("DEBUG: Retrying Request %s/%s, attempt %d", diff --git a/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_7.go b/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_7.go index 869b97a1a0f..e36e468b7c6 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_7.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_7.go @@ -21,7 +21,7 @@ func (noBody) WriteTo(io.Writer) (int64, error) { return 0, nil } var NoBody = noBody{} // ResetBody rewinds the request body back to its starting position, and -// set's the HTTP Request body reference. When the body is read prior +// sets the HTTP Request body reference. When the body is read prior // to being sent in the HTTP request it will need to be rewound. // // ResetBody will automatically be called by the SDK's build handler, but if diff --git a/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_8.go b/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_8.go index c32fc69bc56..7c6a8000f67 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_8.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_8.go @@ -11,7 +11,7 @@ import ( var NoBody = http.NoBody // ResetBody rewinds the request body back to its starting position, and -// set's the HTTP Request body reference. When the body is read prior +// sets the HTTP Request body reference. When the body is read prior // to being sent in the HTTP request it will need to be rewound. // // ResetBody will automatically be called by the SDK's build handler, but if diff --git a/vendor/github.com/aws/aws-sdk-go/aws/request/request_pagination.go b/vendor/github.com/aws/aws-sdk-go/aws/request/request_pagination.go index 159518a75cd..a633ed5acfa 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/request/request_pagination.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/request/request_pagination.go @@ -35,8 +35,12 @@ type Pagination struct { // NewRequest should always be built from the same API operations. It is // undefined if different API operations are returned on subsequent calls. NewRequest func() (*Request, error) + // EndPageOnSameToken, when enabled, will allow the paginator to stop on + // token that are the same as its previous tokens. + EndPageOnSameToken bool started bool + prevTokens []interface{} nextTokens []interface{} err error @@ -49,7 +53,15 @@ type Pagination struct { // // Will always return true if Next has not been called yet. func (p *Pagination) HasNextPage() bool { - return !(p.started && len(p.nextTokens) == 0) + if !p.started { + return true + } + + hasNextPage := len(p.nextTokens) != 0 + if p.EndPageOnSameToken { + return hasNextPage && !awsutil.DeepEqual(p.nextTokens, p.prevTokens) + } + return hasNextPage } // Err returns the error Pagination encountered when retrieving the next page. @@ -96,6 +108,7 @@ func (p *Pagination) Next() bool { return false } + p.prevTokens = p.nextTokens p.nextTokens = req.nextPageTokens() p.curPage = req.Data diff --git a/vendor/github.com/aws/aws-sdk-go/aws/session/env_config.go b/vendor/github.com/aws/aws-sdk-go/aws/session/env_config.go index 12b452177a8..82e04d76cde 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/session/env_config.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/session/env_config.go @@ -96,9 +96,23 @@ type envConfig struct { // // AWS_CA_BUNDLE=$HOME/my_custom_ca_bundle CustomCABundle string + + csmEnabled string + CSMEnabled bool + CSMPort string + CSMClientID string } var ( + csmEnabledEnvKey = []string{ + "AWS_CSM_ENABLED", + } + csmPortEnvKey = []string{ + "AWS_CSM_PORT", + } + csmClientIDEnvKey = []string{ + "AWS_CSM_CLIENT_ID", + } credAccessEnvKey = []string{ "AWS_ACCESS_KEY_ID", "AWS_ACCESS_KEY", @@ -157,6 +171,12 @@ func envConfigLoad(enableSharedConfig bool) envConfig { setFromEnvVal(&cfg.Creds.SecretAccessKey, credSecretEnvKey) setFromEnvVal(&cfg.Creds.SessionToken, credSessionEnvKey) + // CSM environment variables + setFromEnvVal(&cfg.csmEnabled, csmEnabledEnvKey) + setFromEnvVal(&cfg.CSMPort, csmPortEnvKey) + setFromEnvVal(&cfg.CSMClientID, csmClientIDEnvKey) + cfg.CSMEnabled = len(cfg.csmEnabled) > 0 + // Require logical grouping of credentials if len(cfg.Creds.AccessKeyID) == 0 || len(cfg.Creds.SecretAccessKey) == 0 { cfg.Creds = credentials.Value{} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/session/session.go b/vendor/github.com/aws/aws-sdk-go/aws/session/session.go index 259b5c0fecc..51f30556301 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/session/session.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/session/session.go @@ -15,6 +15,7 @@ import ( "github.com/aws/aws-sdk-go/aws/corehandlers" "github.com/aws/aws-sdk-go/aws/credentials" "github.com/aws/aws-sdk-go/aws/credentials/stscreds" + "github.com/aws/aws-sdk-go/aws/csm" "github.com/aws/aws-sdk-go/aws/defaults" "github.com/aws/aws-sdk-go/aws/endpoints" "github.com/aws/aws-sdk-go/aws/request" @@ -81,10 +82,16 @@ func New(cfgs ...*aws.Config) *Session { r.Error = err }) } + return s } - return deprecatedNewSession(cfgs...) + s := deprecatedNewSession(cfgs...) + if envCfg.CSMEnabled { + enableCSM(&s.Handlers, envCfg.CSMClientID, envCfg.CSMPort, s.Config.Logger) + } + + return s } // NewSession returns a new Session created from SDK defaults, config files, @@ -300,10 +307,22 @@ func deprecatedNewSession(cfgs ...*aws.Config) *Session { } initHandlers(s) - return s } +func enableCSM(handlers *request.Handlers, clientID string, port string, logger aws.Logger) { + logger.Log("Enabling CSM") + if len(port) == 0 { + port = csm.DefaultPort + } + + r, err := csm.Start(clientID, "127.0.0.1:"+port) + if err != nil { + return + } + r.InjectHandlers(handlers) +} + func newSession(opts Options, envCfg envConfig, cfgs ...*aws.Config) (*Session, error) { cfg := defaults.Config() handlers := defaults.Handlers() @@ -343,6 +362,9 @@ func newSession(opts Options, envCfg envConfig, cfgs ...*aws.Config) (*Session, } initHandlers(s) + if envCfg.CSMEnabled { + enableCSM(&s.Handlers, envCfg.CSMClientID, envCfg.CSMPort, s.Config.Logger) + } // Setup HTTP client with custom cert bundle if enabled if opts.CustomCABundle != nil { diff --git a/vendor/github.com/aws/aws-sdk-go/aws/signer/v4/v4.go b/vendor/github.com/aws/aws-sdk-go/aws/signer/v4/v4.go index 6e46376125b..f3586131538 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/signer/v4/v4.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/signer/v4/v4.go @@ -135,6 +135,7 @@ var requiredSignedHeaders = rules{ "X-Amz-Server-Side-Encryption-Customer-Key-Md5": struct{}{}, "X-Amz-Storage-Class": struct{}{}, "X-Amz-Website-Redirect-Location": struct{}{}, + "X-Amz-Content-Sha256": struct{}{}, }, }, patterns{"X-Amz-Meta-"}, @@ -671,8 +672,15 @@ func (ctx *signingCtx) buildSignature() { func (ctx *signingCtx) buildBodyDigest() error { hash := ctx.Request.Header.Get("X-Amz-Content-Sha256") if hash == "" { - if ctx.unsignedPayload || (ctx.isPresign && ctx.ServiceName == "s3") { + includeSHA256Header := ctx.unsignedPayload || + ctx.ServiceName == "s3" || + ctx.ServiceName == "glacier" + + s3Presign := ctx.isPresign && ctx.ServiceName == "s3" + + if ctx.unsignedPayload || s3Presign { hash = "UNSIGNED-PAYLOAD" + includeSHA256Header = !s3Presign } else if ctx.Body == nil { hash = emptyStringSHA256 } else { @@ -681,7 +689,8 @@ func (ctx *signingCtx) buildBodyDigest() error { } hash = hex.EncodeToString(makeSha256Reader(ctx.Body)) } - if ctx.unsignedPayload || ctx.ServiceName == "s3" || ctx.ServiceName == "glacier" { + + if includeSHA256Header { ctx.Request.Header.Set("X-Amz-Content-Sha256", hash) } } diff --git a/vendor/github.com/aws/aws-sdk-go/aws/version.go b/vendor/github.com/aws/aws-sdk-go/aws/version.go index befbff7df07..c108466609e 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/version.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/version.go @@ -5,4 +5,4 @@ package aws const SDKName = "aws-sdk-go" // SDKVersion is the version of this SDK -const SDKVersion = "1.13.49" +const SDKVersion = "1.14.12" diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/debug.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/debug.go new file mode 100644 index 00000000000..ecc7bf82fa2 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/debug.go @@ -0,0 +1,144 @@ +package eventstream + +import ( + "bytes" + "encoding/base64" + "encoding/json" + "fmt" + "strconv" +) + +type decodedMessage struct { + rawMessage + Headers decodedHeaders `json:"headers"` +} +type jsonMessage struct { + Length json.Number `json:"total_length"` + HeadersLen json.Number `json:"headers_length"` + PreludeCRC json.Number `json:"prelude_crc"` + Headers decodedHeaders `json:"headers"` + Payload []byte `json:"payload"` + CRC json.Number `json:"message_crc"` +} + +func (d *decodedMessage) UnmarshalJSON(b []byte) (err error) { + var jsonMsg jsonMessage + if err = json.Unmarshal(b, &jsonMsg); err != nil { + return err + } + + d.Length, err = numAsUint32(jsonMsg.Length) + if err != nil { + return err + } + d.HeadersLen, err = numAsUint32(jsonMsg.HeadersLen) + if err != nil { + return err + } + d.PreludeCRC, err = numAsUint32(jsonMsg.PreludeCRC) + if err != nil { + return err + } + d.Headers = jsonMsg.Headers + d.Payload = jsonMsg.Payload + d.CRC, err = numAsUint32(jsonMsg.CRC) + if err != nil { + return err + } + + return nil +} + +func (d *decodedMessage) MarshalJSON() ([]byte, error) { + jsonMsg := jsonMessage{ + Length: json.Number(strconv.Itoa(int(d.Length))), + HeadersLen: json.Number(strconv.Itoa(int(d.HeadersLen))), + PreludeCRC: json.Number(strconv.Itoa(int(d.PreludeCRC))), + Headers: d.Headers, + Payload: d.Payload, + CRC: json.Number(strconv.Itoa(int(d.CRC))), + } + + return json.Marshal(jsonMsg) +} + +func numAsUint32(n json.Number) (uint32, error) { + v, err := n.Int64() + if err != nil { + return 0, fmt.Errorf("failed to get int64 json number, %v", err) + } + + return uint32(v), nil +} + +func (d decodedMessage) Message() Message { + return Message{ + Headers: Headers(d.Headers), + Payload: d.Payload, + } +} + +type decodedHeaders Headers + +func (hs *decodedHeaders) UnmarshalJSON(b []byte) error { + var jsonHeaders []struct { + Name string `json:"name"` + Type valueType `json:"type"` + Value interface{} `json:"value"` + } + + decoder := json.NewDecoder(bytes.NewReader(b)) + decoder.UseNumber() + if err := decoder.Decode(&jsonHeaders); err != nil { + return err + } + + var headers Headers + for _, h := range jsonHeaders { + value, err := valueFromType(h.Type, h.Value) + if err != nil { + return err + } + headers.Set(h.Name, value) + } + (*hs) = decodedHeaders(headers) + + return nil +} + +func valueFromType(typ valueType, val interface{}) (Value, error) { + switch typ { + case trueValueType: + return BoolValue(true), nil + case falseValueType: + return BoolValue(false), nil + case int8ValueType: + v, err := val.(json.Number).Int64() + return Int8Value(int8(v)), err + case int16ValueType: + v, err := val.(json.Number).Int64() + return Int16Value(int16(v)), err + case int32ValueType: + v, err := val.(json.Number).Int64() + return Int32Value(int32(v)), err + case int64ValueType: + v, err := val.(json.Number).Int64() + return Int64Value(v), err + case bytesValueType: + v, err := base64.StdEncoding.DecodeString(val.(string)) + return BytesValue(v), err + case stringValueType: + v, err := base64.StdEncoding.DecodeString(val.(string)) + return StringValue(string(v)), err + case timestampValueType: + v, err := val.(json.Number).Int64() + return TimestampValue(timeFromEpochMilli(v)), err + case uuidValueType: + v, err := base64.StdEncoding.DecodeString(val.(string)) + var tv UUIDValue + copy(tv[:], v) + return tv, err + default: + panic(fmt.Sprintf("unknown type, %s, %T", typ.String(), val)) + } +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/decode.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/decode.go new file mode 100644 index 00000000000..4b972b2d666 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/decode.go @@ -0,0 +1,199 @@ +package eventstream + +import ( + "bytes" + "encoding/binary" + "encoding/hex" + "encoding/json" + "fmt" + "hash" + "hash/crc32" + "io" + + "github.com/aws/aws-sdk-go/aws" +) + +// Decoder provides decoding of an Event Stream messages. +type Decoder struct { + r io.Reader + logger aws.Logger +} + +// NewDecoder initializes and returns a Decoder for decoding event +// stream messages from the reader provided. +func NewDecoder(r io.Reader) *Decoder { + return &Decoder{ + r: r, + } +} + +// Decode attempts to decode a single message from the event stream reader. +// Will return the event stream message, or error if Decode fails to read +// the message from the stream. +func (d *Decoder) Decode(payloadBuf []byte) (m Message, err error) { + reader := d.r + if d.logger != nil { + debugMsgBuf := bytes.NewBuffer(nil) + reader = io.TeeReader(reader, debugMsgBuf) + defer func() { + logMessageDecode(d.logger, debugMsgBuf, m, err) + }() + } + + crc := crc32.New(crc32IEEETable) + hashReader := io.TeeReader(reader, crc) + + prelude, err := decodePrelude(hashReader, crc) + if err != nil { + return Message{}, err + } + + if prelude.HeadersLen > 0 { + lr := io.LimitReader(hashReader, int64(prelude.HeadersLen)) + m.Headers, err = decodeHeaders(lr) + if err != nil { + return Message{}, err + } + } + + if payloadLen := prelude.PayloadLen(); payloadLen > 0 { + buf, err := decodePayload(payloadBuf, io.LimitReader(hashReader, int64(payloadLen))) + if err != nil { + return Message{}, err + } + m.Payload = buf + } + + msgCRC := crc.Sum32() + if err := validateCRC(reader, msgCRC); err != nil { + return Message{}, err + } + + return m, nil +} + +// UseLogger specifies the Logger that that the decoder should use to log the +// message decode to. +func (d *Decoder) UseLogger(logger aws.Logger) { + d.logger = logger +} + +func logMessageDecode(logger aws.Logger, msgBuf *bytes.Buffer, msg Message, decodeErr error) { + w := bytes.NewBuffer(nil) + defer func() { logger.Log(w.String()) }() + + fmt.Fprintf(w, "Raw message:\n%s\n", + hex.Dump(msgBuf.Bytes())) + + if decodeErr != nil { + fmt.Fprintf(w, "Decode error: %v\n", decodeErr) + return + } + + rawMsg, err := msg.rawMessage() + if err != nil { + fmt.Fprintf(w, "failed to create raw message, %v\n", err) + return + } + + decodedMsg := decodedMessage{ + rawMessage: rawMsg, + Headers: decodedHeaders(msg.Headers), + } + + fmt.Fprintf(w, "Decoded message:\n") + encoder := json.NewEncoder(w) + if err := encoder.Encode(decodedMsg); err != nil { + fmt.Fprintf(w, "failed to generate decoded message, %v\n", err) + } +} + +func decodePrelude(r io.Reader, crc hash.Hash32) (messagePrelude, error) { + var p messagePrelude + + var err error + p.Length, err = decodeUint32(r) + if err != nil { + return messagePrelude{}, err + } + + p.HeadersLen, err = decodeUint32(r) + if err != nil { + return messagePrelude{}, err + } + + if err := p.ValidateLens(); err != nil { + return messagePrelude{}, err + } + + preludeCRC := crc.Sum32() + if err := validateCRC(r, preludeCRC); err != nil { + return messagePrelude{}, err + } + + p.PreludeCRC = preludeCRC + + return p, nil +} + +func decodePayload(buf []byte, r io.Reader) ([]byte, error) { + w := bytes.NewBuffer(buf[0:0]) + + _, err := io.Copy(w, r) + return w.Bytes(), err +} + +func decodeUint8(r io.Reader) (uint8, error) { + type byteReader interface { + ReadByte() (byte, error) + } + + if br, ok := r.(byteReader); ok { + v, err := br.ReadByte() + return uint8(v), err + } + + var b [1]byte + _, err := io.ReadFull(r, b[:]) + return uint8(b[0]), err +} +func decodeUint16(r io.Reader) (uint16, error) { + var b [2]byte + bs := b[:] + _, err := io.ReadFull(r, bs) + if err != nil { + return 0, err + } + return binary.BigEndian.Uint16(bs), nil +} +func decodeUint32(r io.Reader) (uint32, error) { + var b [4]byte + bs := b[:] + _, err := io.ReadFull(r, bs) + if err != nil { + return 0, err + } + return binary.BigEndian.Uint32(bs), nil +} +func decodeUint64(r io.Reader) (uint64, error) { + var b [8]byte + bs := b[:] + _, err := io.ReadFull(r, bs) + if err != nil { + return 0, err + } + return binary.BigEndian.Uint64(bs), nil +} + +func validateCRC(r io.Reader, expect uint32) error { + msgCRC, err := decodeUint32(r) + if err != nil { + return err + } + + if msgCRC != expect { + return ChecksumError{} + } + + return nil +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/encode.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/encode.go new file mode 100644 index 00000000000..150a60981d8 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/encode.go @@ -0,0 +1,114 @@ +package eventstream + +import ( + "bytes" + "encoding/binary" + "hash" + "hash/crc32" + "io" +) + +// Encoder provides EventStream message encoding. +type Encoder struct { + w io.Writer + + headersBuf *bytes.Buffer +} + +// NewEncoder initializes and returns an Encoder to encode Event Stream +// messages to an io.Writer. +func NewEncoder(w io.Writer) *Encoder { + return &Encoder{ + w: w, + headersBuf: bytes.NewBuffer(nil), + } +} + +// Encode encodes a single EventStream message to the io.Writer the Encoder +// was created with. An error is returned if writing the message fails. +func (e *Encoder) Encode(msg Message) error { + e.headersBuf.Reset() + + err := encodeHeaders(e.headersBuf, msg.Headers) + if err != nil { + return err + } + + crc := crc32.New(crc32IEEETable) + hashWriter := io.MultiWriter(e.w, crc) + + headersLen := uint32(e.headersBuf.Len()) + payloadLen := uint32(len(msg.Payload)) + + if err := encodePrelude(hashWriter, crc, headersLen, payloadLen); err != nil { + return err + } + + if headersLen > 0 { + if _, err := io.Copy(hashWriter, e.headersBuf); err != nil { + return err + } + } + + if payloadLen > 0 { + if _, err := hashWriter.Write(msg.Payload); err != nil { + return err + } + } + + msgCRC := crc.Sum32() + return binary.Write(e.w, binary.BigEndian, msgCRC) +} + +func encodePrelude(w io.Writer, crc hash.Hash32, headersLen, payloadLen uint32) error { + p := messagePrelude{ + Length: minMsgLen + headersLen + payloadLen, + HeadersLen: headersLen, + } + if err := p.ValidateLens(); err != nil { + return err + } + + err := binaryWriteFields(w, binary.BigEndian, + p.Length, + p.HeadersLen, + ) + if err != nil { + return err + } + + p.PreludeCRC = crc.Sum32() + err = binary.Write(w, binary.BigEndian, p.PreludeCRC) + if err != nil { + return err + } + + return nil +} + +func encodeHeaders(w io.Writer, headers Headers) error { + for _, h := range headers { + hn := headerName{ + Len: uint8(len(h.Name)), + } + copy(hn.Name[:hn.Len], h.Name) + if err := hn.encode(w); err != nil { + return err + } + + if err := h.Value.encode(w); err != nil { + return err + } + } + + return nil +} + +func binaryWriteFields(w io.Writer, order binary.ByteOrder, vs ...interface{}) error { + for _, v := range vs { + if err := binary.Write(w, order, v); err != nil { + return err + } + } + return nil +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/error.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/error.go new file mode 100644 index 00000000000..5481ef30796 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/error.go @@ -0,0 +1,23 @@ +package eventstream + +import "fmt" + +// LengthError provides the error for items being larger than a maximum length. +type LengthError struct { + Part string + Want int + Have int + Value interface{} +} + +func (e LengthError) Error() string { + return fmt.Sprintf("%s length invalid, %d/%d, %v", + e.Part, e.Want, e.Have, e.Value) +} + +// ChecksumError provides the error for message checksum invalidation errors. +type ChecksumError struct{} + +func (e ChecksumError) Error() string { + return "message checksum mismatch" +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/api.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/api.go new file mode 100644 index 00000000000..4a4e64c713e --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/api.go @@ -0,0 +1,160 @@ +package eventstreamapi + +import ( + "fmt" + "io" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/private/protocol" + "github.com/aws/aws-sdk-go/private/protocol/eventstream" +) + +// Unmarshaler provides the interface for unmarshaling a EventStream +// message into a SDK type. +type Unmarshaler interface { + UnmarshalEvent(protocol.PayloadUnmarshaler, eventstream.Message) error +} + +// EventStream headers with specific meaning to async API functionality. +const ( + MessageTypeHeader = `:message-type` // Identifies type of message. + EventMessageType = `event` + ErrorMessageType = `error` + ExceptionMessageType = `exception` + + // Message Events + EventTypeHeader = `:event-type` // Identifies message event type e.g. "Stats". + + // Message Error + ErrorCodeHeader = `:error-code` + ErrorMessageHeader = `:error-message` + + // Message Exception + ExceptionTypeHeader = `:exception-type` +) + +// EventReader provides reading from the EventStream of an reader. +type EventReader struct { + reader io.ReadCloser + decoder *eventstream.Decoder + + unmarshalerForEventType func(string) (Unmarshaler, error) + payloadUnmarshaler protocol.PayloadUnmarshaler + + payloadBuf []byte +} + +// NewEventReader returns a EventReader built from the reader and unmarshaler +// provided. Use ReadStream method to start reading from the EventStream. +func NewEventReader( + reader io.ReadCloser, + payloadUnmarshaler protocol.PayloadUnmarshaler, + unmarshalerForEventType func(string) (Unmarshaler, error), +) *EventReader { + return &EventReader{ + reader: reader, + decoder: eventstream.NewDecoder(reader), + payloadUnmarshaler: payloadUnmarshaler, + unmarshalerForEventType: unmarshalerForEventType, + payloadBuf: make([]byte, 10*1024), + } +} + +// UseLogger instructs the EventReader to use the logger and log level +// specified. +func (r *EventReader) UseLogger(logger aws.Logger, logLevel aws.LogLevelType) { + if logger != nil && logLevel.Matches(aws.LogDebugWithEventStreamBody) { + r.decoder.UseLogger(logger) + } +} + +// ReadEvent attempts to read a message from the EventStream and return the +// unmarshaled event value that the message is for. +// +// For EventStream API errors check if the returned error satisfies the +// awserr.Error interface to get the error's Code and Message components. +// +// EventUnmarshalers called with EventStream messages must take copies of the +// message's Payload. The payload will is reused between events read. +func (r *EventReader) ReadEvent() (event interface{}, err error) { + msg, err := r.decoder.Decode(r.payloadBuf) + if err != nil { + return nil, err + } + defer func() { + // Reclaim payload buffer for next message read. + r.payloadBuf = msg.Payload[0:0] + }() + + typ, err := GetHeaderString(msg, MessageTypeHeader) + if err != nil { + return nil, err + } + + switch typ { + case EventMessageType: + return r.unmarshalEventMessage(msg) + case ErrorMessageType: + return nil, r.unmarshalErrorMessage(msg) + default: + return nil, fmt.Errorf("unknown eventstream message type, %v", typ) + } +} + +func (r *EventReader) unmarshalEventMessage( + msg eventstream.Message, +) (event interface{}, err error) { + eventType, err := GetHeaderString(msg, EventTypeHeader) + if err != nil { + return nil, err + } + + ev, err := r.unmarshalerForEventType(eventType) + if err != nil { + return nil, err + } + + err = ev.UnmarshalEvent(r.payloadUnmarshaler, msg) + if err != nil { + return nil, err + } + + return ev, nil +} + +func (r *EventReader) unmarshalErrorMessage(msg eventstream.Message) (err error) { + var msgErr messageError + + msgErr.code, err = GetHeaderString(msg, ErrorCodeHeader) + if err != nil { + return err + } + + msgErr.msg, err = GetHeaderString(msg, ErrorMessageHeader) + if err != nil { + return err + } + + return msgErr +} + +// Close closes the EventReader's EventStream reader. +func (r *EventReader) Close() error { + return r.reader.Close() +} + +// GetHeaderString returns the value of the header as a string. If the header +// is not set or the value is not a string an error will be returned. +func GetHeaderString(msg eventstream.Message, headerName string) (string, error) { + headerVal := msg.Headers.Get(headerName) + if headerVal == nil { + return "", fmt.Errorf("error header %s not present", headerName) + } + + v, ok := headerVal.Get().(string) + if !ok { + return "", fmt.Errorf("error header value is not a string, %T", headerVal) + } + + return v, nil +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/error.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/error.go new file mode 100644 index 00000000000..5ea5a988b63 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/error.go @@ -0,0 +1,24 @@ +package eventstreamapi + +import "fmt" + +type messageError struct { + code string + msg string +} + +func (e messageError) Code() string { + return e.code +} + +func (e messageError) Message() string { + return e.msg +} + +func (e messageError) Error() string { + return fmt.Sprintf("%s: %s", e.code, e.msg) +} + +func (e messageError) OrigErr() error { + return nil +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header.go new file mode 100644 index 00000000000..3b44dde2f32 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header.go @@ -0,0 +1,166 @@ +package eventstream + +import ( + "encoding/binary" + "fmt" + "io" +) + +// Headers are a collection of EventStream header values. +type Headers []Header + +// Header is a single EventStream Key Value header pair. +type Header struct { + Name string + Value Value +} + +// Set associates the name with a value. If the header name already exists in +// the Headers the value will be replaced with the new one. +func (hs *Headers) Set(name string, value Value) { + var i int + for ; i < len(*hs); i++ { + if (*hs)[i].Name == name { + (*hs)[i].Value = value + return + } + } + + *hs = append(*hs, Header{ + Name: name, Value: value, + }) +} + +// Get returns the Value associated with the header. Nil is returned if the +// value does not exist. +func (hs Headers) Get(name string) Value { + for i := 0; i < len(hs); i++ { + if h := hs[i]; h.Name == name { + return h.Value + } + } + return nil +} + +// Del deletes the value in the Headers if it exists. +func (hs *Headers) Del(name string) { + for i := 0; i < len(*hs); i++ { + if (*hs)[i].Name == name { + copy((*hs)[i:], (*hs)[i+1:]) + (*hs) = (*hs)[:len(*hs)-1] + } + } +} + +func decodeHeaders(r io.Reader) (Headers, error) { + hs := Headers{} + + for { + name, err := decodeHeaderName(r) + if err != nil { + if err == io.EOF { + // EOF while getting header name means no more headers + break + } + return nil, err + } + + value, err := decodeHeaderValue(r) + if err != nil { + return nil, err + } + + hs.Set(name, value) + } + + return hs, nil +} + +func decodeHeaderName(r io.Reader) (string, error) { + var n headerName + + var err error + n.Len, err = decodeUint8(r) + if err != nil { + return "", err + } + + name := n.Name[:n.Len] + if _, err := io.ReadFull(r, name); err != nil { + return "", err + } + + return string(name), nil +} + +func decodeHeaderValue(r io.Reader) (Value, error) { + var raw rawValue + + typ, err := decodeUint8(r) + if err != nil { + return nil, err + } + raw.Type = valueType(typ) + + var v Value + + switch raw.Type { + case trueValueType: + v = BoolValue(true) + case falseValueType: + v = BoolValue(false) + case int8ValueType: + var tv Int8Value + err = tv.decode(r) + v = tv + case int16ValueType: + var tv Int16Value + err = tv.decode(r) + v = tv + case int32ValueType: + var tv Int32Value + err = tv.decode(r) + v = tv + case int64ValueType: + var tv Int64Value + err = tv.decode(r) + v = tv + case bytesValueType: + var tv BytesValue + err = tv.decode(r) + v = tv + case stringValueType: + var tv StringValue + err = tv.decode(r) + v = tv + case timestampValueType: + var tv TimestampValue + err = tv.decode(r) + v = tv + case uuidValueType: + var tv UUIDValue + err = tv.decode(r) + v = tv + default: + panic(fmt.Sprintf("unknown value type %d", raw.Type)) + } + + // Error could be EOF, let caller deal with it + return v, err +} + +const maxHeaderNameLen = 255 + +type headerName struct { + Len uint8 + Name [maxHeaderNameLen]byte +} + +func (v headerName) encode(w io.Writer) error { + if err := binary.Write(w, binary.BigEndian, v.Len); err != nil { + return err + } + + _, err := w.Write(v.Name[:v.Len]) + return err +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header_value.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header_value.go new file mode 100644 index 00000000000..d7786f92ce5 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header_value.go @@ -0,0 +1,501 @@ +package eventstream + +import ( + "encoding/base64" + "encoding/binary" + "fmt" + "io" + "strconv" + "time" +) + +const maxHeaderValueLen = 1<<15 - 1 // 2^15-1 or 32KB - 1 + +// valueType is the EventStream header value type. +type valueType uint8 + +// Header value types +const ( + trueValueType valueType = iota + falseValueType + int8ValueType // Byte + int16ValueType // Short + int32ValueType // Integer + int64ValueType // Long + bytesValueType + stringValueType + timestampValueType + uuidValueType +) + +func (t valueType) String() string { + switch t { + case trueValueType: + return "bool" + case falseValueType: + return "bool" + case int8ValueType: + return "int8" + case int16ValueType: + return "int16" + case int32ValueType: + return "int32" + case int64ValueType: + return "int64" + case bytesValueType: + return "byte_array" + case stringValueType: + return "string" + case timestampValueType: + return "timestamp" + case uuidValueType: + return "uuid" + default: + return fmt.Sprintf("unknown value type %d", uint8(t)) + } +} + +type rawValue struct { + Type valueType + Len uint16 // Only set for variable length slices + Value []byte // byte representation of value, BigEndian encoding. +} + +func (r rawValue) encodeScalar(w io.Writer, v interface{}) error { + return binaryWriteFields(w, binary.BigEndian, + r.Type, + v, + ) +} + +func (r rawValue) encodeFixedSlice(w io.Writer, v []byte) error { + binary.Write(w, binary.BigEndian, r.Type) + + _, err := w.Write(v) + return err +} + +func (r rawValue) encodeBytes(w io.Writer, v []byte) error { + if len(v) > maxHeaderValueLen { + return LengthError{ + Part: "header value", + Want: maxHeaderValueLen, Have: len(v), + Value: v, + } + } + r.Len = uint16(len(v)) + + err := binaryWriteFields(w, binary.BigEndian, + r.Type, + r.Len, + ) + if err != nil { + return err + } + + _, err = w.Write(v) + return err +} + +func (r rawValue) encodeString(w io.Writer, v string) error { + if len(v) > maxHeaderValueLen { + return LengthError{ + Part: "header value", + Want: maxHeaderValueLen, Have: len(v), + Value: v, + } + } + r.Len = uint16(len(v)) + + type stringWriter interface { + WriteString(string) (int, error) + } + + err := binaryWriteFields(w, binary.BigEndian, + r.Type, + r.Len, + ) + if err != nil { + return err + } + + if sw, ok := w.(stringWriter); ok { + _, err = sw.WriteString(v) + } else { + _, err = w.Write([]byte(v)) + } + + return err +} + +func decodeFixedBytesValue(r io.Reader, buf []byte) error { + _, err := io.ReadFull(r, buf) + return err +} + +func decodeBytesValue(r io.Reader) ([]byte, error) { + var raw rawValue + var err error + raw.Len, err = decodeUint16(r) + if err != nil { + return nil, err + } + + buf := make([]byte, raw.Len) + _, err = io.ReadFull(r, buf) + if err != nil { + return nil, err + } + + return buf, nil +} + +func decodeStringValue(r io.Reader) (string, error) { + v, err := decodeBytesValue(r) + return string(v), err +} + +// Value represents the abstract header value. +type Value interface { + Get() interface{} + String() string + valueType() valueType + encode(io.Writer) error +} + +// An BoolValue provides eventstream encoding, and representation +// of a Go bool value. +type BoolValue bool + +// Get returns the underlying type +func (v BoolValue) Get() interface{} { + return bool(v) +} + +// valueType returns the EventStream header value type value. +func (v BoolValue) valueType() valueType { + if v { + return trueValueType + } + return falseValueType +} + +func (v BoolValue) String() string { + return strconv.FormatBool(bool(v)) +} + +// encode encodes the BoolValue into an eventstream binary value +// representation. +func (v BoolValue) encode(w io.Writer) error { + return binary.Write(w, binary.BigEndian, v.valueType()) +} + +// An Int8Value provides eventstream encoding, and representation of a Go +// int8 value. +type Int8Value int8 + +// Get returns the underlying value. +func (v Int8Value) Get() interface{} { + return int8(v) +} + +// valueType returns the EventStream header value type value. +func (Int8Value) valueType() valueType { + return int8ValueType +} + +func (v Int8Value) String() string { + return fmt.Sprintf("0x%02x", int8(v)) +} + +// encode encodes the Int8Value into an eventstream binary value +// representation. +func (v Int8Value) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + + return raw.encodeScalar(w, v) +} + +func (v *Int8Value) decode(r io.Reader) error { + n, err := decodeUint8(r) + if err != nil { + return err + } + + *v = Int8Value(n) + return nil +} + +// An Int16Value provides eventstream encoding, and representation of a Go +// int16 value. +type Int16Value int16 + +// Get returns the underlying value. +func (v Int16Value) Get() interface{} { + return int16(v) +} + +// valueType returns the EventStream header value type value. +func (Int16Value) valueType() valueType { + return int16ValueType +} + +func (v Int16Value) String() string { + return fmt.Sprintf("0x%04x", int16(v)) +} + +// encode encodes the Int16Value into an eventstream binary value +// representation. +func (v Int16Value) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + return raw.encodeScalar(w, v) +} + +func (v *Int16Value) decode(r io.Reader) error { + n, err := decodeUint16(r) + if err != nil { + return err + } + + *v = Int16Value(n) + return nil +} + +// An Int32Value provides eventstream encoding, and representation of a Go +// int32 value. +type Int32Value int32 + +// Get returns the underlying value. +func (v Int32Value) Get() interface{} { + return int32(v) +} + +// valueType returns the EventStream header value type value. +func (Int32Value) valueType() valueType { + return int32ValueType +} + +func (v Int32Value) String() string { + return fmt.Sprintf("0x%08x", int32(v)) +} + +// encode encodes the Int32Value into an eventstream binary value +// representation. +func (v Int32Value) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + return raw.encodeScalar(w, v) +} + +func (v *Int32Value) decode(r io.Reader) error { + n, err := decodeUint32(r) + if err != nil { + return err + } + + *v = Int32Value(n) + return nil +} + +// An Int64Value provides eventstream encoding, and representation of a Go +// int64 value. +type Int64Value int64 + +// Get returns the underlying value. +func (v Int64Value) Get() interface{} { + return int64(v) +} + +// valueType returns the EventStream header value type value. +func (Int64Value) valueType() valueType { + return int64ValueType +} + +func (v Int64Value) String() string { + return fmt.Sprintf("0x%016x", int64(v)) +} + +// encode encodes the Int64Value into an eventstream binary value +// representation. +func (v Int64Value) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + return raw.encodeScalar(w, v) +} + +func (v *Int64Value) decode(r io.Reader) error { + n, err := decodeUint64(r) + if err != nil { + return err + } + + *v = Int64Value(n) + return nil +} + +// An BytesValue provides eventstream encoding, and representation of a Go +// byte slice. +type BytesValue []byte + +// Get returns the underlying value. +func (v BytesValue) Get() interface{} { + return []byte(v) +} + +// valueType returns the EventStream header value type value. +func (BytesValue) valueType() valueType { + return bytesValueType +} + +func (v BytesValue) String() string { + return base64.StdEncoding.EncodeToString([]byte(v)) +} + +// encode encodes the BytesValue into an eventstream binary value +// representation. +func (v BytesValue) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + + return raw.encodeBytes(w, []byte(v)) +} + +func (v *BytesValue) decode(r io.Reader) error { + buf, err := decodeBytesValue(r) + if err != nil { + return err + } + + *v = BytesValue(buf) + return nil +} + +// An StringValue provides eventstream encoding, and representation of a Go +// string. +type StringValue string + +// Get returns the underlying value. +func (v StringValue) Get() interface{} { + return string(v) +} + +// valueType returns the EventStream header value type value. +func (StringValue) valueType() valueType { + return stringValueType +} + +func (v StringValue) String() string { + return string(v) +} + +// encode encodes the StringValue into an eventstream binary value +// representation. +func (v StringValue) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + + return raw.encodeString(w, string(v)) +} + +func (v *StringValue) decode(r io.Reader) error { + s, err := decodeStringValue(r) + if err != nil { + return err + } + + *v = StringValue(s) + return nil +} + +// An TimestampValue provides eventstream encoding, and representation of a Go +// timestamp. +type TimestampValue time.Time + +// Get returns the underlying value. +func (v TimestampValue) Get() interface{} { + return time.Time(v) +} + +// valueType returns the EventStream header value type value. +func (TimestampValue) valueType() valueType { + return timestampValueType +} + +func (v TimestampValue) epochMilli() int64 { + nano := time.Time(v).UnixNano() + msec := nano / int64(time.Millisecond) + return msec +} + +func (v TimestampValue) String() string { + msec := v.epochMilli() + return strconv.FormatInt(msec, 10) +} + +// encode encodes the TimestampValue into an eventstream binary value +// representation. +func (v TimestampValue) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + + msec := v.epochMilli() + return raw.encodeScalar(w, msec) +} + +func (v *TimestampValue) decode(r io.Reader) error { + n, err := decodeUint64(r) + if err != nil { + return err + } + + *v = TimestampValue(timeFromEpochMilli(int64(n))) + return nil +} + +func timeFromEpochMilli(t int64) time.Time { + secs := t / 1e3 + msec := t % 1e3 + return time.Unix(secs, msec*int64(time.Millisecond)) +} + +// An UUIDValue provides eventstream encoding, and representation of a UUID +// value. +type UUIDValue [16]byte + +// Get returns the underlying value. +func (v UUIDValue) Get() interface{} { + return v[:] +} + +// valueType returns the EventStream header value type value. +func (UUIDValue) valueType() valueType { + return uuidValueType +} + +func (v UUIDValue) String() string { + return fmt.Sprintf(`%X-%X-%X-%X-%X`, v[0:4], v[4:6], v[6:8], v[8:10], v[10:]) +} + +// encode encodes the UUIDValue into an eventstream binary value +// representation. +func (v UUIDValue) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + + return raw.encodeFixedSlice(w, v[:]) +} + +func (v *UUIDValue) decode(r io.Reader) error { + tv := (*v)[:] + return decodeFixedBytesValue(r, tv) +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/message.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/message.go new file mode 100644 index 00000000000..2dc012a66e2 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/message.go @@ -0,0 +1,103 @@ +package eventstream + +import ( + "bytes" + "encoding/binary" + "hash/crc32" +) + +const preludeLen = 8 +const preludeCRCLen = 4 +const msgCRCLen = 4 +const minMsgLen = preludeLen + preludeCRCLen + msgCRCLen +const maxPayloadLen = 1024 * 1024 * 16 // 16MB +const maxHeadersLen = 1024 * 128 // 128KB +const maxMsgLen = minMsgLen + maxHeadersLen + maxPayloadLen + +var crc32IEEETable = crc32.MakeTable(crc32.IEEE) + +// A Message provides the eventstream message representation. +type Message struct { + Headers Headers + Payload []byte +} + +func (m *Message) rawMessage() (rawMessage, error) { + var raw rawMessage + + if len(m.Headers) > 0 { + var headers bytes.Buffer + if err := encodeHeaders(&headers, m.Headers); err != nil { + return rawMessage{}, err + } + raw.Headers = headers.Bytes() + raw.HeadersLen = uint32(len(raw.Headers)) + } + + raw.Length = raw.HeadersLen + uint32(len(m.Payload)) + minMsgLen + + hash := crc32.New(crc32IEEETable) + binaryWriteFields(hash, binary.BigEndian, raw.Length, raw.HeadersLen) + raw.PreludeCRC = hash.Sum32() + + binaryWriteFields(hash, binary.BigEndian, raw.PreludeCRC) + + if raw.HeadersLen > 0 { + hash.Write(raw.Headers) + } + + // Read payload bytes and update hash for it as well. + if len(m.Payload) > 0 { + raw.Payload = m.Payload + hash.Write(raw.Payload) + } + + raw.CRC = hash.Sum32() + + return raw, nil +} + +type messagePrelude struct { + Length uint32 + HeadersLen uint32 + PreludeCRC uint32 +} + +func (p messagePrelude) PayloadLen() uint32 { + return p.Length - p.HeadersLen - minMsgLen +} + +func (p messagePrelude) ValidateLens() error { + if p.Length == 0 || p.Length > maxMsgLen { + return LengthError{ + Part: "message prelude", + Want: maxMsgLen, + Have: int(p.Length), + } + } + if p.HeadersLen > maxHeadersLen { + return LengthError{ + Part: "message headers", + Want: maxHeadersLen, + Have: int(p.HeadersLen), + } + } + if payloadLen := p.PayloadLen(); payloadLen > maxPayloadLen { + return LengthError{ + Part: "message payload", + Want: maxPayloadLen, + Have: int(payloadLen), + } + } + + return nil +} + +type rawMessage struct { + messagePrelude + + Headers []byte + Payload []byte + + CRC uint32 +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/payload.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/payload.go new file mode 100644 index 00000000000..e21614a1250 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/payload.go @@ -0,0 +1,81 @@ +package protocol + +import ( + "io" + "io/ioutil" + "net/http" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/client/metadata" + "github.com/aws/aws-sdk-go/aws/request" +) + +// PayloadUnmarshaler provides the interface for unmarshaling a payload's +// reader into a SDK shape. +type PayloadUnmarshaler interface { + UnmarshalPayload(io.Reader, interface{}) error +} + +// HandlerPayloadUnmarshal implements the PayloadUnmarshaler from a +// HandlerList. This provides the support for unmarshaling a payload reader to +// a shape without needing a SDK request first. +type HandlerPayloadUnmarshal struct { + Unmarshalers request.HandlerList +} + +// UnmarshalPayload unmarshals the io.Reader payload into the SDK shape using +// the Unmarshalers HandlerList provided. Returns an error if unable +// unmarshaling fails. +func (h HandlerPayloadUnmarshal) UnmarshalPayload(r io.Reader, v interface{}) error { + req := &request.Request{ + HTTPRequest: &http.Request{}, + HTTPResponse: &http.Response{ + StatusCode: 200, + Header: http.Header{}, + Body: ioutil.NopCloser(r), + }, + Data: v, + } + + h.Unmarshalers.Run(req) + + return req.Error +} + +// PayloadMarshaler provides the interface for marshaling a SDK shape into and +// io.Writer. +type PayloadMarshaler interface { + MarshalPayload(io.Writer, interface{}) error +} + +// HandlerPayloadMarshal implements the PayloadMarshaler from a HandlerList. +// This provides support for marshaling a SDK shape into an io.Writer without +// needing a SDK request first. +type HandlerPayloadMarshal struct { + Marshalers request.HandlerList +} + +// MarshalPayload marshals the SDK shape into the io.Writer using the +// Marshalers HandlerList provided. Returns an error if unable if marshal +// fails. +func (h HandlerPayloadMarshal) MarshalPayload(w io.Writer, v interface{}) error { + req := request.New( + aws.Config{}, + metadata.ClientInfo{}, + request.Handlers{}, + nil, + &request.Operation{HTTPMethod: "GET"}, + v, + nil, + ) + + h.Marshalers.Run(req) + + if req.Error != nil { + return req.Error + } + + io.Copy(w, req.GetBody()) + + return nil +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/build.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/build.go index c405288d742..f761e0b3a5b 100644 --- a/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/build.go +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/build.go @@ -20,8 +20,10 @@ import ( "github.com/aws/aws-sdk-go/private/protocol" ) -// RFC822 returns an RFC822 formatted timestamp for AWS protocols -const RFC822 = "Mon, 2 Jan 2006 15:04:05 GMT" +// RFC1123GMT is a RFC1123 (RFC822) formated timestame. This format is not +// using the standard library's time.RFC1123 due to the desire to always use +// GMT as the timezone. +const RFC1123GMT = "Mon, 2 Jan 2006 15:04:05 GMT" // Whether the byte value can be sent without escaping in AWS URLs var noEscape [256]bool @@ -270,7 +272,7 @@ func convertType(v reflect.Value, tag reflect.StructTag) (str string, err error) case float64: str = strconv.FormatFloat(value, 'f', -1, 64) case time.Time: - str = value.UTC().Format(RFC822) + str = value.UTC().Format(RFC1123GMT) case aws.JSONValue: if len(value) == 0 { return "", errValueNotSet diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/unmarshal.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/unmarshal.go index 823f045eed7..9d4e7626775 100644 --- a/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/unmarshal.go +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/unmarshal.go @@ -198,7 +198,7 @@ func unmarshalHeader(v reflect.Value, header string, tag reflect.StructTag) erro } v.Set(reflect.ValueOf(&f)) case *time.Time: - t, err := time.Parse(RFC822, header) + t, err := time.Parse(time.RFC1123, header) if err != nil { return err } diff --git a/vendor/github.com/aws/aws-sdk-go/service/cloudwatch/service.go b/vendor/github.com/aws/aws-sdk-go/service/cloudwatch/service.go index 4b0aa76edcd..0d478662240 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/cloudwatch/service.go +++ b/vendor/github.com/aws/aws-sdk-go/service/cloudwatch/service.go @@ -29,8 +29,9 @@ var initRequest func(*request.Request) // Service information constants const ( - ServiceName = "monitoring" // Service endpoint prefix API calls made to. - EndpointsID = ServiceName // Service ID for Regions and Endpoints metadata. + ServiceName = "monitoring" // Name of service. + EndpointsID = ServiceName // ID to lookup a service endpoint with. + ServiceID = "CloudWatch" // ServiceID is a unique identifer of a specific service. ) // New creates a new instance of the CloudWatch client with a session. @@ -55,6 +56,7 @@ func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegio cfg, metadata.ClientInfo{ ServiceName: ServiceName, + ServiceID: ServiceID, SigningName: signingName, SigningRegion: signingRegion, Endpoint: endpoint, diff --git a/vendor/github.com/aws/aws-sdk-go/service/ec2/api.go b/vendor/github.com/aws/aws-sdk-go/service/ec2/api.go index 99d12a66e42..b48e40e205c 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/ec2/api.go +++ b/vendor/github.com/aws/aws-sdk-go/service/ec2/api.go @@ -2268,11 +2268,7 @@ func (c *EC2) CancelSpotInstanceRequestsRequest(input *CancelSpotInstanceRequest // CancelSpotInstanceRequests API operation for Amazon Elastic Compute Cloud. // -// Cancels one or more Spot Instance requests. Spot Instances are instances -// that Amazon EC2 starts on your behalf when the maximum price that you specify -// exceeds the current Spot price. For more information, see Spot Instance Requests -// (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-requests.html) in -// the Amazon EC2 User Guide for Linux Instances. +// Cancels one or more Spot Instance requests. // // Canceling a Spot Instance request does not terminate running Spot Instances // associated with the request. @@ -4179,8 +4175,8 @@ func (c *EC2) CreateNetworkInterfacePermissionRequest(input *CreateNetworkInterf // CreateNetworkInterfacePermission API operation for Amazon Elastic Compute Cloud. // -// Grants an AWS authorized partner account permission to attach the specified -// network interface to an instance in their account. +// Grants an AWS-authorized account permission to attach the specified network +// interface to an instance in their account. // // You can grant permission to a single AWS account only, and only one account // at a time. @@ -13675,11 +13671,7 @@ func (c *EC2) DescribeSpotInstanceRequestsRequest(input *DescribeSpotInstanceReq // DescribeSpotInstanceRequests API operation for Amazon Elastic Compute Cloud. // -// Describes the Spot Instance requests that belong to your account. Spot Instances -// are instances that Amazon EC2 launches when the Spot price that you specify -// exceeds the current Spot price. For more information, see Spot Instance Requests -// (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-requests.html) in -// the Amazon EC2 User Guide for Linux Instances. +// Describes the specified Spot Instance requests. // // You can use DescribeSpotInstanceRequests to find a running Spot Instance // by examining the response. If the status of the Spot Instance is fulfilled, @@ -21367,9 +21359,9 @@ func (c *EC2) RequestSpotInstancesRequest(input *RequestSpotInstancesInput) (req // RequestSpotInstances API operation for Amazon Elastic Compute Cloud. // -// Creates a Spot Instance request. Spot Instances are instances that Amazon -// EC2 launches when the maximum price that you specify exceeds the current -// Spot price. For more information, see Spot Instance Requests (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-requests.html) +// Creates a Spot Instance request. +// +// For more information, see Spot Instance Requests (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-requests.html) // in the Amazon EC2 User Guide for Linux Instances. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions @@ -37615,7 +37607,7 @@ type DescribeInstancesInput struct { // The maximum number of results to return in a single call. To retrieve the // remaining results, make another call with the returned NextToken value. This // value can be between 5 and 1000. You cannot specify this parameter and the - // instance IDs parameter or tag filters in the same call. + // instance IDs parameter in the same call. MaxResults *int64 `locationName:"maxResults" type:"integer"` // The token to request the next page of results. @@ -66458,19 +66450,23 @@ type StateReason struct { // The message for the state change. // - // * Server.InsufficientInstanceCapacity: There was insufficient instance - // capacity to satisfy the launch request. + // * Server.InsufficientInstanceCapacity: There was insufficient capacity + // available to satisfy the launch request. // - // * Server.InternalError: An internal error occurred during instance launch, - // resulting in termination. + // * Server.InternalError: An internal error caused the instance to terminate + // during launch. // // * Server.ScheduledStop: The instance was stopped due to a scheduled retirement. // - // * Server.SpotInstanceTermination: A Spot Instance was terminated due to - // an increase in the Spot price. + // * Server.SpotInstanceShutdown: The instance was stopped because the number + // of Spot requests with a maximum price equal to or higher than the Spot + // price exceeded available capacity or because of an increase in the Spot + // price. // - // * Client.InternalError: A client error caused the instance to terminate - // on launch. + // * Server.SpotInstanceTermination: The instance was terminated because + // the number of Spot requests with a maximum price equal to or higher than + // the Spot price exceeded available capacity or because of an increase in + // the Spot price. // // * Client.InstanceInitiatedShutdown: The instance was shut down using the // shutdown -h command from the instance. @@ -66478,14 +66474,17 @@ type StateReason struct { // * Client.InstanceTerminated: The instance was terminated or rebooted during // AMI creation. // + // * Client.InternalError: A client error caused the instance to terminate + // during launch. + // + // * Client.InvalidSnapshot.NotFound: The specified snapshot was not found. + // // * Client.UserInitiatedShutdown: The instance was shut down using the Amazon // EC2 API. // // * Client.VolumeLimitExceeded: The limit on the number of EBS volumes or // total storage was exceeded. Decrease usage or request an increase in your - // limits. - // - // * Client.InvalidSnapshot.NotFound: The specified snapshot was not found. + // account limits. Message *string `locationName:"message" type:"string"` } @@ -66969,7 +66968,7 @@ type TagSpecification struct { _ struct{} `type:"structure"` // The type of resource to tag. Currently, the resource types that support tagging - // on creation are instance and volume. + // on creation are instance, snapshot, and volume. ResourceType *string `locationName:"resourceType" type:"string" enum:"ResourceType"` // The tags to apply to the resource. @@ -70694,6 +70693,9 @@ const ( // InstanceTypeI316xlarge is a InstanceType enum value InstanceTypeI316xlarge = "i3.16xlarge" + // InstanceTypeI3Metal is a InstanceType enum value + InstanceTypeI3Metal = "i3.metal" + // InstanceTypeHi14xlarge is a InstanceType enum value InstanceTypeHi14xlarge = "hi1.4xlarge" @@ -70754,6 +70756,24 @@ const ( // InstanceTypeC518xlarge is a InstanceType enum value InstanceTypeC518xlarge = "c5.18xlarge" + // InstanceTypeC5dLarge is a InstanceType enum value + InstanceTypeC5dLarge = "c5d.large" + + // InstanceTypeC5dXlarge is a InstanceType enum value + InstanceTypeC5dXlarge = "c5d.xlarge" + + // InstanceTypeC5d2xlarge is a InstanceType enum value + InstanceTypeC5d2xlarge = "c5d.2xlarge" + + // InstanceTypeC5d4xlarge is a InstanceType enum value + InstanceTypeC5d4xlarge = "c5d.4xlarge" + + // InstanceTypeC5d9xlarge is a InstanceType enum value + InstanceTypeC5d9xlarge = "c5d.9xlarge" + + // InstanceTypeC5d18xlarge is a InstanceType enum value + InstanceTypeC5d18xlarge = "c5d.18xlarge" + // InstanceTypeCc14xlarge is a InstanceType enum value InstanceTypeCc14xlarge = "cc1.4xlarge" @@ -70832,6 +70852,24 @@ const ( // InstanceTypeM524xlarge is a InstanceType enum value InstanceTypeM524xlarge = "m5.24xlarge" + // InstanceTypeM5dLarge is a InstanceType enum value + InstanceTypeM5dLarge = "m5d.large" + + // InstanceTypeM5dXlarge is a InstanceType enum value + InstanceTypeM5dXlarge = "m5d.xlarge" + + // InstanceTypeM5d2xlarge is a InstanceType enum value + InstanceTypeM5d2xlarge = "m5d.2xlarge" + + // InstanceTypeM5d4xlarge is a InstanceType enum value + InstanceTypeM5d4xlarge = "m5d.4xlarge" + + // InstanceTypeM5d12xlarge is a InstanceType enum value + InstanceTypeM5d12xlarge = "m5d.12xlarge" + + // InstanceTypeM5d24xlarge is a InstanceType enum value + InstanceTypeM5d24xlarge = "m5d.24xlarge" + // InstanceTypeH12xlarge is a InstanceType enum value InstanceTypeH12xlarge = "h1.2xlarge" diff --git a/vendor/github.com/aws/aws-sdk-go/service/ec2/service.go b/vendor/github.com/aws/aws-sdk-go/service/ec2/service.go index ba4433d388e..6acbc43fe3d 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/ec2/service.go +++ b/vendor/github.com/aws/aws-sdk-go/service/ec2/service.go @@ -29,8 +29,9 @@ var initRequest func(*request.Request) // Service information constants const ( - ServiceName = "ec2" // Service endpoint prefix API calls made to. - EndpointsID = ServiceName // Service ID for Regions and Endpoints metadata. + ServiceName = "ec2" // Name of service. + EndpointsID = ServiceName // ID to lookup a service endpoint with. + ServiceID = "EC2" // ServiceID is a unique identifer of a specific service. ) // New creates a new instance of the EC2 client with a session. @@ -55,6 +56,7 @@ func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegio cfg, metadata.ClientInfo{ ServiceName: ServiceName, + ServiceID: ServiceID, SigningName: signingName, SigningRegion: signingRegion, Endpoint: endpoint, diff --git a/vendor/github.com/aws/aws-sdk-go/service/s3/api.go b/vendor/github.com/aws/aws-sdk-go/service/s3/api.go index a27823fdfb5..07fc06af1f9 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/s3/api.go +++ b/vendor/github.com/aws/aws-sdk-go/service/s3/api.go @@ -3,14 +3,21 @@ package s3 import ( + "bytes" "fmt" "io" + "sync" + "sync/atomic" "time" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/awsutil" + "github.com/aws/aws-sdk-go/aws/client" "github.com/aws/aws-sdk-go/aws/request" "github.com/aws/aws-sdk-go/private/protocol" + "github.com/aws/aws-sdk-go/private/protocol/eventstream" + "github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi" + "github.com/aws/aws-sdk-go/private/protocol/rest" "github.com/aws/aws-sdk-go/private/protocol/restxml" ) @@ -6017,6 +6024,88 @@ func (c *S3) RestoreObjectWithContext(ctx aws.Context, input *RestoreObjectInput return out, req.Send() } +const opSelectObjectContent = "SelectObjectContent" + +// SelectObjectContentRequest generates a "aws/request.Request" representing the +// client's request for the SelectObjectContent operation. The "output" return +// value will be populated with the request's response once the request completes +// successfuly. +// +// Use "Send" method on the returned Request to send the API call to the service. +// the "output" return value is not valid until after Send returns without error. +// +// See SelectObjectContent for more information on using the SelectObjectContent +// API call, and error handling. +// +// This method is useful when you want to inject custom logic or configuration +// into the SDK's request lifecycle. Such as custom headers, or retry logic. +// +// +// // Example sending a request using the SelectObjectContentRequest method. +// req, resp := client.SelectObjectContentRequest(params) +// +// err := req.Send() +// if err == nil { // resp is now filled +// fmt.Println(resp) +// } +// +// See also, https://docs.aws.amazon.com/goto/WebAPI/s3-2006-03-01/SelectObjectContent +func (c *S3) SelectObjectContentRequest(input *SelectObjectContentInput) (req *request.Request, output *SelectObjectContentOutput) { + op := &request.Operation{ + Name: opSelectObjectContent, + HTTPMethod: "POST", + HTTPPath: "/{Bucket}/{Key+}?select&select-type=2", + } + + if input == nil { + input = &SelectObjectContentInput{} + } + + output = &SelectObjectContentOutput{} + req = c.newRequest(op, input, output) + req.Handlers.Send.Swap(client.LogHTTPResponseHandler.Name, client.LogHTTPResponseHeaderHandler) + req.Handlers.Unmarshal.Swap(restxml.UnmarshalHandler.Name, rest.UnmarshalHandler) + req.Handlers.Unmarshal.PushBack(output.runEventStreamLoop) + return +} + +// SelectObjectContent API operation for Amazon Simple Storage Service. +// +// This operation filters the contents of an Amazon S3 object based on a simple +// Structured Query Language (SQL) statement. In the request, along with the +// SQL expression, you must also specify a data serialization format (JSON or +// CSV) of the object. Amazon S3 uses this to parse object data into records, +// and returns only records that match the specified SQL expression. You must +// also specify the data serialization format for the response. +// +// Returns awserr.Error for service API and SDK errors. Use runtime type assertions +// with awserr.Error's Code and Message methods to get detailed information about +// the error. +// +// See the AWS API reference guide for Amazon Simple Storage Service's +// API operation SelectObjectContent for usage and error information. +// See also, https://docs.aws.amazon.com/goto/WebAPI/s3-2006-03-01/SelectObjectContent +func (c *S3) SelectObjectContent(input *SelectObjectContentInput) (*SelectObjectContentOutput, error) { + req, out := c.SelectObjectContentRequest(input) + return out, req.Send() +} + +// SelectObjectContentWithContext is the same as SelectObjectContent with the addition of +// the ability to pass a context and additional request options. +// +// See SelectObjectContent for details on how to use this API operation. +// +// The context must be non-nil and will be used for request cancellation. If +// the context is nil a panic will occur. In the future the SDK may create +// sub-contexts for http.Requests. See https://golang.org/pkg/context/ +// for more information on using Contexts. +func (c *S3) SelectObjectContentWithContext(ctx aws.Context, input *SelectObjectContentInput, opts ...request.Option) (*SelectObjectContentOutput, error) { + req, out := c.SelectObjectContentRequest(input) + req.SetContext(ctx) + req.ApplyOptions(opts...) + return out, req.Send() +} + const opUploadPart = "UploadPart" // UploadPartRequest generates a "aws/request.Request" representing the @@ -7474,6 +7563,32 @@ func (s *Condition) SetKeyPrefixEquals(v string) *Condition { return s } +type ContinuationEvent struct { + _ struct{} `type:"structure"` +} + +// String returns the string representation +func (s ContinuationEvent) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s ContinuationEvent) GoString() string { + return s.String() +} + +// The ContinuationEvent is and event in the SelectObjectContentEventStream group of events. +func (s *ContinuationEvent) eventSelectObjectContentEventStream() {} + +// UnmarshalEvent unmarshals the EventStream Message into the ContinuationEvent value. +// This method is only used internally within the SDK's EventStream handling. +func (s *ContinuationEvent) UnmarshalEvent( + payloadUnmarshaler protocol.PayloadUnmarshaler, + msg eventstream.Message, +) error { + return nil +} + type CopyObjectInput struct { _ struct{} `type:"structure"` @@ -9919,6 +10034,32 @@ func (s *EncryptionConfiguration) SetReplicaKmsKeyID(v string) *EncryptionConfig return s } +type EndEvent struct { + _ struct{} `type:"structure"` +} + +// String returns the string representation +func (s EndEvent) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s EndEvent) GoString() string { + return s.String() +} + +// The EndEvent is and event in the SelectObjectContentEventStream group of events. +func (s *EndEvent) eventSelectObjectContentEventStream() {} + +// UnmarshalEvent unmarshals the EventStream Message into the EndEvent value. +// This method is only used internally within the SDK's EventStream handling. +func (s *EndEvent) UnmarshalEvent( + payloadUnmarshaler protocol.PayloadUnmarshaler, + msg eventstream.Message, +) error { + return nil +} + type Error struct { _ struct{} `type:"structure"` @@ -16380,6 +16521,87 @@ func (s *Part) SetSize(v int64) *Part { return s } +type Progress struct { + _ struct{} `type:"structure"` + + // Current number of uncompressed object bytes processed. + BytesProcessed *int64 `type:"long"` + + // Current number of bytes of records payload data returned. + BytesReturned *int64 `type:"long"` + + // Current number of object bytes scanned. + BytesScanned *int64 `type:"long"` +} + +// String returns the string representation +func (s Progress) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s Progress) GoString() string { + return s.String() +} + +// SetBytesProcessed sets the BytesProcessed field's value. +func (s *Progress) SetBytesProcessed(v int64) *Progress { + s.BytesProcessed = &v + return s +} + +// SetBytesReturned sets the BytesReturned field's value. +func (s *Progress) SetBytesReturned(v int64) *Progress { + s.BytesReturned = &v + return s +} + +// SetBytesScanned sets the BytesScanned field's value. +func (s *Progress) SetBytesScanned(v int64) *Progress { + s.BytesScanned = &v + return s +} + +type ProgressEvent struct { + _ struct{} `type:"structure" payload:"Details"` + + // The Progress event details. + Details *Progress `locationName:"Details" type:"structure"` +} + +// String returns the string representation +func (s ProgressEvent) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s ProgressEvent) GoString() string { + return s.String() +} + +// SetDetails sets the Details field's value. +func (s *ProgressEvent) SetDetails(v *Progress) *ProgressEvent { + s.Details = v + return s +} + +// The ProgressEvent is and event in the SelectObjectContentEventStream group of events. +func (s *ProgressEvent) eventSelectObjectContentEventStream() {} + +// UnmarshalEvent unmarshals the EventStream Message into the ProgressEvent value. +// This method is only used internally within the SDK's EventStream handling. +func (s *ProgressEvent) UnmarshalEvent( + payloadUnmarshaler protocol.PayloadUnmarshaler, + msg eventstream.Message, +) error { + if err := payloadUnmarshaler.UnmarshalPayload( + bytes.NewReader(msg.Payload), s, + ); err != nil { + return fmt.Errorf("failed to unmarshal payload, %v", err) + } + return nil +} + type PutBucketAccelerateConfigurationInput struct { _ struct{} `type:"structure" payload:"AccelerateConfiguration"` @@ -18622,6 +18844,45 @@ func (s *QueueConfigurationDeprecated) SetQueue(v string) *QueueConfigurationDep return s } +type RecordsEvent struct { + _ struct{} `type:"structure" payload:"Payload"` + + // The byte array of partial, one or more result records. + // + // Payload is automatically base64 encoded/decoded by the SDK. + Payload []byte `type:"blob"` +} + +// String returns the string representation +func (s RecordsEvent) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s RecordsEvent) GoString() string { + return s.String() +} + +// SetPayload sets the Payload field's value. +func (s *RecordsEvent) SetPayload(v []byte) *RecordsEvent { + s.Payload = v + return s +} + +// The RecordsEvent is and event in the SelectObjectContentEventStream group of events. +func (s *RecordsEvent) eventSelectObjectContentEventStream() {} + +// UnmarshalEvent unmarshals the EventStream Message into the RecordsEvent value. +// This method is only used internally within the SDK's EventStream handling. +func (s *RecordsEvent) UnmarshalEvent( + payloadUnmarshaler protocol.PayloadUnmarshaler, + msg eventstream.Message, +) error { + s.Payload = make([]byte, len(msg.Payload)) + copy(s.Payload, msg.Payload) + return nil +} + type Redirect struct { _ struct{} `type:"structure"` @@ -18939,6 +19200,30 @@ func (s *RequestPaymentConfiguration) SetPayer(v string) *RequestPaymentConfigur return s } +type RequestProgress struct { + _ struct{} `type:"structure"` + + // Specifies whether periodic QueryProgress frames should be sent. Valid values: + // TRUE, FALSE. Default value: FALSE. + Enabled *bool `type:"boolean"` +} + +// String returns the string representation +func (s RequestProgress) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s RequestProgress) GoString() string { + return s.String() +} + +// SetEnabled sets the Enabled field's value. +func (s *RequestProgress) SetEnabled(v bool) *RequestProgress { + s.Enabled = &v + return s +} + type RestoreObjectInput struct { _ struct{} `type:"structure" payload:"RestoreRequest"` @@ -19392,6 +19677,436 @@ func (s SSES3) GoString() string { return s.String() } +// SelectObjectContentEventStream provides handling of EventStreams for +// the SelectObjectContent API. +// +// Use this type to receive SelectObjectContentEventStream events. The events +// can be read from the Events channel member. +// +// The events that can be received are: +// +// * ContinuationEvent +// * EndEvent +// * ProgressEvent +// * RecordsEvent +// * StatsEvent +type SelectObjectContentEventStream struct { + // Reader is the EventStream reader for the SelectObjectContentEventStream + // events. This value is automatically set by the SDK when the API call is made + // Use this member when unit testing your code with the SDK to mock out the + // EventStream Reader. + // + // Must not be nil. + Reader SelectObjectContentEventStreamReader + + // StreamCloser is the io.Closer for the EventStream connection. For HTTP + // EventStream this is the response Body. The stream will be closed when + // the Close method of the EventStream is called. + StreamCloser io.Closer +} + +// Close closes the EventStream. This will also cause the Events channel to be +// closed. You can use the closing of the Events channel to terminate your +// application's read from the API's EventStream. +// +// Will close the underlying EventStream reader. For EventStream over HTTP +// connection this will also close the HTTP connection. +// +// Close must be called when done using the EventStream API. Not calling Close +// may result in resource leaks. +func (es *SelectObjectContentEventStream) Close() (err error) { + es.Reader.Close() + return es.Err() +} + +// Err returns any error that occurred while reading EventStream Events from +// the service API's response. Returns nil if there were no errors. +func (es *SelectObjectContentEventStream) Err() error { + if err := es.Reader.Err(); err != nil { + return err + } + es.StreamCloser.Close() + + return nil +} + +// Events returns a channel to read EventStream Events from the +// SelectObjectContent API. +// +// These events are: +// +// * ContinuationEvent +// * EndEvent +// * ProgressEvent +// * RecordsEvent +// * StatsEvent +func (es *SelectObjectContentEventStream) Events() <-chan SelectObjectContentEventStreamEvent { + return es.Reader.Events() +} + +// SelectObjectContentEventStreamEvent groups together all EventStream +// events read from the SelectObjectContent API. +// +// These events are: +// +// * ContinuationEvent +// * EndEvent +// * ProgressEvent +// * RecordsEvent +// * StatsEvent +type SelectObjectContentEventStreamEvent interface { + eventSelectObjectContentEventStream() +} + +// SelectObjectContentEventStreamReader provides the interface for reading EventStream +// Events from the SelectObjectContent API. The +// default implementation for this interface will be SelectObjectContentEventStream. +// +// The reader's Close method must allow multiple concurrent calls. +// +// These events are: +// +// * ContinuationEvent +// * EndEvent +// * ProgressEvent +// * RecordsEvent +// * StatsEvent +type SelectObjectContentEventStreamReader interface { + // Returns a channel of events as they are read from the event stream. + Events() <-chan SelectObjectContentEventStreamEvent + + // Close will close the underlying event stream reader. For event stream over + // HTTP this will also close the HTTP connection. + Close() error + + // Returns any error that has occured while reading from the event stream. + Err() error +} + +type readSelectObjectContentEventStream struct { + eventReader *eventstreamapi.EventReader + stream chan SelectObjectContentEventStreamEvent + errVal atomic.Value + + done chan struct{} + closeOnce sync.Once +} + +func newReadSelectObjectContentEventStream( + reader io.ReadCloser, + unmarshalers request.HandlerList, + logger aws.Logger, + logLevel aws.LogLevelType, +) *readSelectObjectContentEventStream { + r := &readSelectObjectContentEventStream{ + stream: make(chan SelectObjectContentEventStreamEvent), + done: make(chan struct{}), + } + + r.eventReader = eventstreamapi.NewEventReader( + reader, + protocol.HandlerPayloadUnmarshal{ + Unmarshalers: unmarshalers, + }, + r.unmarshalerForEventType, + ) + r.eventReader.UseLogger(logger, logLevel) + + return r +} + +// Close will close the underlying event stream reader. For EventStream over +// HTTP this will also close the HTTP connection. +func (r *readSelectObjectContentEventStream) Close() error { + r.closeOnce.Do(r.safeClose) + + return r.Err() +} + +func (r *readSelectObjectContentEventStream) safeClose() { + close(r.done) + err := r.eventReader.Close() + if err != nil { + r.errVal.Store(err) + } +} + +func (r *readSelectObjectContentEventStream) Err() error { + if v := r.errVal.Load(); v != nil { + return v.(error) + } + + return nil +} + +func (r *readSelectObjectContentEventStream) Events() <-chan SelectObjectContentEventStreamEvent { + return r.stream +} + +func (r *readSelectObjectContentEventStream) readEventStream() { + defer close(r.stream) + + for { + event, err := r.eventReader.ReadEvent() + if err != nil { + if err == io.EOF { + return + } + select { + case <-r.done: + // If closed already ignore the error + return + default: + } + r.errVal.Store(err) + return + } + + select { + case r.stream <- event.(SelectObjectContentEventStreamEvent): + case <-r.done: + return + } + } +} + +func (r *readSelectObjectContentEventStream) unmarshalerForEventType( + eventType string, +) (eventstreamapi.Unmarshaler, error) { + switch eventType { + case "Cont": + return &ContinuationEvent{}, nil + + case "End": + return &EndEvent{}, nil + + case "Progress": + return &ProgressEvent{}, nil + + case "Records": + return &RecordsEvent{}, nil + + case "Stats": + return &StatsEvent{}, nil + default: + return nil, fmt.Errorf( + "unknown event type name, %s, for SelectObjectContentEventStream", eventType) + } +} + +// Request to filter the contents of an Amazon S3 object based on a simple Structured +// Query Language (SQL) statement. In the request, along with the SQL expression, +// you must also specify a data serialization format (JSON or CSV) of the object. +// Amazon S3 uses this to parse object data into records, and returns only records +// that match the specified SQL expression. You must also specify the data serialization +// format for the response. For more information, go to S3Select API Documentation +// (https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectSELECTContent.html) +type SelectObjectContentInput struct { + _ struct{} `locationName:"SelectObjectContentRequest" type:"structure" xmlURI:"http://s3.amazonaws.com/doc/2006-03-01/"` + + // The S3 Bucket. + // + // Bucket is a required field + Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"` + + // The expression that is used to query the object. + // + // Expression is a required field + Expression *string `type:"string" required:"true"` + + // The type of the provided expression (e.g., SQL). + // + // ExpressionType is a required field + ExpressionType *string `type:"string" required:"true" enum:"ExpressionType"` + + // Describes the format of the data in the object that is being queried. + // + // InputSerialization is a required field + InputSerialization *InputSerialization `type:"structure" required:"true"` + + // The Object Key. + // + // Key is a required field + Key *string `location:"uri" locationName:"Key" min:"1" type:"string" required:"true"` + + // Describes the format of the data that you want Amazon S3 to return in response. + // + // OutputSerialization is a required field + OutputSerialization *OutputSerialization `type:"structure" required:"true"` + + // Specifies if periodic request progress information should be enabled. + RequestProgress *RequestProgress `type:"structure"` + + // The SSE Algorithm used to encrypt the object. For more information, go to + // Server-Side Encryption (Using Customer-Provided Encryption Keys (https://docs.aws.amazon.com/AmazonS3/latest/dev/ServerSideEncryptionCustomerKeys.html) + SSECustomerAlgorithm *string `location:"header" locationName:"x-amz-server-side-encryption-customer-algorithm" type:"string"` + + // The SSE Customer Key. For more information, go to Server-Side Encryption + // (Using Customer-Provided Encryption Keys (https://docs.aws.amazon.com/AmazonS3/latest/dev/ServerSideEncryptionCustomerKeys.html) + SSECustomerKey *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key" type:"string"` + + // The SSE Customer Key MD5. For more information, go to Server-Side Encryption + // (Using Customer-Provided Encryption Keys (https://docs.aws.amazon.com/AmazonS3/latest/dev/ServerSideEncryptionCustomerKeys.html) + SSECustomerKeyMD5 *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key-MD5" type:"string"` +} + +// String returns the string representation +func (s SelectObjectContentInput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s SelectObjectContentInput) GoString() string { + return s.String() +} + +// Validate inspects the fields of the type to determine if they are valid. +func (s *SelectObjectContentInput) Validate() error { + invalidParams := request.ErrInvalidParams{Context: "SelectObjectContentInput"} + if s.Bucket == nil { + invalidParams.Add(request.NewErrParamRequired("Bucket")) + } + if s.Expression == nil { + invalidParams.Add(request.NewErrParamRequired("Expression")) + } + if s.ExpressionType == nil { + invalidParams.Add(request.NewErrParamRequired("ExpressionType")) + } + if s.InputSerialization == nil { + invalidParams.Add(request.NewErrParamRequired("InputSerialization")) + } + if s.Key == nil { + invalidParams.Add(request.NewErrParamRequired("Key")) + } + if s.Key != nil && len(*s.Key) < 1 { + invalidParams.Add(request.NewErrParamMinLen("Key", 1)) + } + if s.OutputSerialization == nil { + invalidParams.Add(request.NewErrParamRequired("OutputSerialization")) + } + + if invalidParams.Len() > 0 { + return invalidParams + } + return nil +} + +// SetBucket sets the Bucket field's value. +func (s *SelectObjectContentInput) SetBucket(v string) *SelectObjectContentInput { + s.Bucket = &v + return s +} + +func (s *SelectObjectContentInput) getBucket() (v string) { + if s.Bucket == nil { + return v + } + return *s.Bucket +} + +// SetExpression sets the Expression field's value. +func (s *SelectObjectContentInput) SetExpression(v string) *SelectObjectContentInput { + s.Expression = &v + return s +} + +// SetExpressionType sets the ExpressionType field's value. +func (s *SelectObjectContentInput) SetExpressionType(v string) *SelectObjectContentInput { + s.ExpressionType = &v + return s +} + +// SetInputSerialization sets the InputSerialization field's value. +func (s *SelectObjectContentInput) SetInputSerialization(v *InputSerialization) *SelectObjectContentInput { + s.InputSerialization = v + return s +} + +// SetKey sets the Key field's value. +func (s *SelectObjectContentInput) SetKey(v string) *SelectObjectContentInput { + s.Key = &v + return s +} + +// SetOutputSerialization sets the OutputSerialization field's value. +func (s *SelectObjectContentInput) SetOutputSerialization(v *OutputSerialization) *SelectObjectContentInput { + s.OutputSerialization = v + return s +} + +// SetRequestProgress sets the RequestProgress field's value. +func (s *SelectObjectContentInput) SetRequestProgress(v *RequestProgress) *SelectObjectContentInput { + s.RequestProgress = v + return s +} + +// SetSSECustomerAlgorithm sets the SSECustomerAlgorithm field's value. +func (s *SelectObjectContentInput) SetSSECustomerAlgorithm(v string) *SelectObjectContentInput { + s.SSECustomerAlgorithm = &v + return s +} + +// SetSSECustomerKey sets the SSECustomerKey field's value. +func (s *SelectObjectContentInput) SetSSECustomerKey(v string) *SelectObjectContentInput { + s.SSECustomerKey = &v + return s +} + +func (s *SelectObjectContentInput) getSSECustomerKey() (v string) { + if s.SSECustomerKey == nil { + return v + } + return *s.SSECustomerKey +} + +// SetSSECustomerKeyMD5 sets the SSECustomerKeyMD5 field's value. +func (s *SelectObjectContentInput) SetSSECustomerKeyMD5(v string) *SelectObjectContentInput { + s.SSECustomerKeyMD5 = &v + return s +} + +type SelectObjectContentOutput struct { + _ struct{} `type:"structure" payload:"Payload"` + + // Use EventStream to use the API's stream. + EventStream *SelectObjectContentEventStream `type:"structure"` +} + +// String returns the string representation +func (s SelectObjectContentOutput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s SelectObjectContentOutput) GoString() string { + return s.String() +} + +// SetEventStream sets the EventStream field's value. +func (s *SelectObjectContentOutput) SetEventStream(v *SelectObjectContentEventStream) *SelectObjectContentOutput { + s.EventStream = v + return s +} + +func (s *SelectObjectContentOutput) runEventStreamLoop(r *request.Request) { + if r.Error != nil { + return + } + reader := newReadSelectObjectContentEventStream( + r.HTTPResponse.Body, + r.Handlers.UnmarshalStream, + r.Config.Logger, + r.Config.LogLevel.Value(), + ) + go reader.readEventStream() + + eventStream := &SelectObjectContentEventStream{ + StreamCloser: r.HTTPResponse.Body, + Reader: reader, + } + s.EventStream = eventStream +} + // Describes the parameters for Select job types. type SelectParameters struct { _ struct{} `type:"structure"` @@ -19696,6 +20411,87 @@ func (s *SseKmsEncryptedObjects) SetStatus(v string) *SseKmsEncryptedObjects { return s } +type Stats struct { + _ struct{} `type:"structure"` + + // Total number of uncompressed object bytes processed. + BytesProcessed *int64 `type:"long"` + + // Total number of bytes of records payload data returned. + BytesReturned *int64 `type:"long"` + + // Total number of object bytes scanned. + BytesScanned *int64 `type:"long"` +} + +// String returns the string representation +func (s Stats) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s Stats) GoString() string { + return s.String() +} + +// SetBytesProcessed sets the BytesProcessed field's value. +func (s *Stats) SetBytesProcessed(v int64) *Stats { + s.BytesProcessed = &v + return s +} + +// SetBytesReturned sets the BytesReturned field's value. +func (s *Stats) SetBytesReturned(v int64) *Stats { + s.BytesReturned = &v + return s +} + +// SetBytesScanned sets the BytesScanned field's value. +func (s *Stats) SetBytesScanned(v int64) *Stats { + s.BytesScanned = &v + return s +} + +type StatsEvent struct { + _ struct{} `type:"structure" payload:"Details"` + + // The Stats event details. + Details *Stats `locationName:"Details" type:"structure"` +} + +// String returns the string representation +func (s StatsEvent) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s StatsEvent) GoString() string { + return s.String() +} + +// SetDetails sets the Details field's value. +func (s *StatsEvent) SetDetails(v *Stats) *StatsEvent { + s.Details = v + return s +} + +// The StatsEvent is and event in the SelectObjectContentEventStream group of events. +func (s *StatsEvent) eventSelectObjectContentEventStream() {} + +// UnmarshalEvent unmarshals the EventStream Message into the StatsEvent value. +// This method is only used internally within the SDK's EventStream handling. +func (s *StatsEvent) UnmarshalEvent( + payloadUnmarshaler protocol.PayloadUnmarshaler, + msg eventstream.Message, +) error { + if err := payloadUnmarshaler.UnmarshalPayload( + bytes.NewReader(msg.Payload), s, + ); err != nil { + return fmt.Errorf("failed to unmarshal payload, %v", err) + } + return nil +} + type StorageClassAnalysis struct { _ struct{} `type:"structure"` diff --git a/vendor/github.com/aws/aws-sdk-go/service/s3/service.go b/vendor/github.com/aws/aws-sdk-go/service/s3/service.go index 614e477d3bb..20de53f29d7 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/s3/service.go +++ b/vendor/github.com/aws/aws-sdk-go/service/s3/service.go @@ -29,8 +29,9 @@ var initRequest func(*request.Request) // Service information constants const ( - ServiceName = "s3" // Service endpoint prefix API calls made to. - EndpointsID = ServiceName // Service ID for Regions and Endpoints metadata. + ServiceName = "s3" // Name of service. + EndpointsID = ServiceName // ID to lookup a service endpoint with. + ServiceID = "S3" // ServiceID is a unique identifer of a specific service. ) // New creates a new instance of the S3 client with a session. @@ -55,6 +56,7 @@ func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegio cfg, metadata.ClientInfo{ ServiceName: ServiceName, + ServiceID: ServiceID, SigningName: signingName, SigningRegion: signingRegion, Endpoint: endpoint, @@ -71,6 +73,8 @@ func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegio svc.Handlers.UnmarshalMeta.PushBackNamed(restxml.UnmarshalMetaHandler) svc.Handlers.UnmarshalError.PushBackNamed(restxml.UnmarshalErrorHandler) + svc.Handlers.UnmarshalStream.PushBackNamed(restxml.UnmarshalHandler) + // Run custom client initialization if present if initClient != nil { initClient(svc.Client) diff --git a/vendor/github.com/aws/aws-sdk-go/service/sts/service.go b/vendor/github.com/aws/aws-sdk-go/service/sts/service.go index 1ee5839e046..185c914d1b3 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/sts/service.go +++ b/vendor/github.com/aws/aws-sdk-go/service/sts/service.go @@ -29,8 +29,9 @@ var initRequest func(*request.Request) // Service information constants const ( - ServiceName = "sts" // Service endpoint prefix API calls made to. - EndpointsID = ServiceName // Service ID for Regions and Endpoints metadata. + ServiceName = "sts" // Name of service. + EndpointsID = ServiceName // ID to lookup a service endpoint with. + ServiceID = "STS" // ServiceID is a unique identifer of a specific service. ) // New creates a new instance of the STS client with a session. @@ -55,6 +56,7 @@ func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegio cfg, metadata.ClientInfo{ ServiceName: ServiceName, + ServiceID: ServiceID, SigningName: signingName, SigningRegion: signingRegion, Endpoint: endpoint, diff --git a/vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE b/vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE new file mode 100644 index 00000000000..c35c17af980 --- /dev/null +++ b/vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2015 Dmitri Shuralyov + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/shurcooL/sanitized_anchor_name/main.go b/vendor/github.com/shurcooL/sanitized_anchor_name/main.go new file mode 100644 index 00000000000..6a77d124317 --- /dev/null +++ b/vendor/github.com/shurcooL/sanitized_anchor_name/main.go @@ -0,0 +1,29 @@ +// Package sanitized_anchor_name provides a func to create sanitized anchor names. +// +// Its logic can be reused by multiple packages to create interoperable anchor names +// and links to those anchors. +// +// At this time, it does not try to ensure that generated anchor names +// are unique, that responsibility falls on the caller. +package sanitized_anchor_name // import "github.com/shurcooL/sanitized_anchor_name" + +import "unicode" + +// Create returns a sanitized anchor name for the given text. +func Create(text string) string { + var anchorName []rune + var futureDash = false + for _, r := range text { + switch { + case unicode.IsLetter(r) || unicode.IsNumber(r): + if futureDash && len(anchorName) > 0 { + anchorName = append(anchorName, '-') + } + futureDash = false + anchorName = append(anchorName, unicode.ToLower(r)) + default: + futureDash = true + } + } + return string(anchorName) +} diff --git a/yarn.lock b/yarn.lock index 6772d7c14a4..6e737e33348 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3101,7 +3101,7 @@ d3-request@1.0.6: d3-dsv "1" xmlhttprequest "1" -d3-scale-chromatic@^1.1.1: +d3-scale-chromatic@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/d3-scale-chromatic/-/d3-scale-chromatic-1.3.0.tgz#7ee38ffcaa7ad55cfed83a6a668aac5570c653c4" dependencies: @@ -7974,7 +7974,7 @@ mocha@^4.0.1: mkdirp "0.5.1" supports-color "4.4.0" -moment@^2.18.1: +moment@^2.22.2: version "2.22.2" resolved "https://registry.yarnpkg.com/moment/-/moment-2.22.2.tgz#3c257f9839fc0e93ff53149632239eb90783ff66" @@ -12029,6 +12029,10 @@ tslib@^1.8.0, tslib@^1.8.1, tslib@^1.9.0: version "1.9.2" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.9.2.tgz#8be0cc9a1f6dc7727c38deb16c2ebd1a2892988e" +tslib@^1.9.3: + version "1.9.3" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.9.3.tgz#d7e4dd79245d85428c4d7e4822a79917954ca286" + tslint-loader@^3.5.3: version "3.6.0" resolved "https://registry.yarnpkg.com/tslint-loader/-/tslint-loader-3.6.0.tgz#12ed4d5ef57d68be25cd12692fb2108b66469d76"