diff --git a/.bowerrc b/.bowerrc deleted file mode 100644 index 36643c6062c..00000000000 --- a/.bowerrc +++ /dev/null @@ -1,3 +0,0 @@ -{ - "directory": "public/vendor/" -} diff --git a/.gitignore b/.gitignore index 0e6affc6120..f14b48eb248 100644 --- a/.gitignore +++ b/.gitignore @@ -25,6 +25,7 @@ public/css/*.min.css .idea/ *.iml *.tmp +.DS_Store .vscode/ /data/* diff --git a/.jsfmtrc b/.jsfmtrc deleted file mode 100644 index 3ca3b4a3b50..00000000000 --- a/.jsfmtrc +++ /dev/null @@ -1,21 +0,0 @@ -{ - "preset" : "default", - - "lineBreak" : { - "before" : { - "VariableDeclarationWithoutInit" : 0, - }, - - "after": { - "AssignmentOperator": -1, - "ArgumentListArrayExpression": ">=1" - } - }, - - "whiteSpace" : { - "before" : { - }, - "after" : { - } - } -} diff --git a/CHANGELOG.md b/CHANGELOG.md index eae246c4a64..4760ee163bd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,11 +7,45 @@ - UX changes to nav & side menu - New dashboard grid layout system -# 4.5.0 (unreleased) +# 4.6.0 (unreleased) -## Enhancements +## New Features +* **GCS**: Adds support for Google Cloud Storage [#8370](https://github.com/grafana/grafana/issues/8370) thx [@chuhlomin](https://github.com/chuhlomin) +* **Prometheus**: Adds /metrics endpoint for exposing Grafana metrics. [#9187](https://github.com/grafana/grafana/pull/9187) +* **Graph**: Add support for local formating in axis. [#1395](https://github.com/grafana/grafana/issues/1395), thx [@m0nhawk](https://github.com/m0nhawk) +* **Jaeger**: Add support for open tracing using jaeger in Grafana. [#9213](https://github.com/grafana/grafana/pull/9213) +* **Unit types**: New date & time unit types added, useful in singlestat to show dates & times. [#3678](https://github.com/grafana/grafana/issues/3678), [#6710](https://github.com/grafana/grafana/issues/6710), [#2764](https://github.com/grafana/grafana/issues/6710) +* **CLI**: Make it possible to install plugins from any url [#5873](https://github.com/grafana/grafana/issues/5873) +* **Prometheus**: Add support for instant queries [#5765](https://github.com/grafana/grafana/issues/5765), thx [@mtanda](https://github.com/mtanda) +## Minor +* **SMTP**: Make it possible to set specific EHLO for smtp client. [#9319](https://github.com/grafana/grafana/issues/9319) +* **Dataproxy**: Allow grafan to renegotiate tls connection [#9250](https://github.com/grafana/grafana/issues/9250) + +# 4.5.2 (2017-09-22) + +## Fixes +* **Graphite**: Fix for issues with jsonData & graphiteVersion null errors [#9258](https://github.com/grafana/grafana/issues/9258) +* **Graphite**: Fix for Grafana internal metrics to Graphite sending NaN values [#9279](https://github.com/grafana/grafana/issues/9279) +* **HTTP API**: Fix for HEAD method requests [#9307](https://github.com/grafana/grafana/issues/9307) +* **Templating**: Fix for duplicate template variable queries when refresh is set to time range change [#9185](https://github.com/grafana/grafana/issues/9185) +* **Metrics**: dont write NaN values to graphite [#9279](https://github.com/grafana/grafana/issues/9279) + +# 4.5.1 (2017-09-15) + +## Fixes +* **MySQL**: Fixed issue with query editor not showing [#9247](https://github.com/grafana/grafana/issues/9247) + +## Breaking changes +* **Metrics**: The metric structure for internal metrics about Grafana published to graphite has changed. This might break dashboards for internal metrics. + +# 4.5.0 (2017-09-14) + +## Fixes & Enhancements since beta1 +* **Security**: Security fix for api vulnerability (in multiple org setups). * **Shortcuts**: Adds shortcut for creating new dashboard [#8876](https://github.com/grafana/grafana/pull/8876) thx [@mtanda](https://github.com/mtanda) +* **Graph**: Right Y-Axis label position fixed [#9172](https://github.com/grafana/grafana/pull/9172) +* **General**: Improve rounding of time intervals [#9197](https://github.com/grafana/grafana/pull/9197), thx [@alin-amana](https://github.com/alin-amana) # 4.5.0-beta1 (2017-09-05) @@ -32,6 +66,7 @@ ### Breaking change * **InfluxDB/Elasticsearch**: The panel & data source option named "Group by time interval" is now named "Min time interval" and does now always define a lower limit for the auto group by time. Without having to use `>` prefix (that prefix still works). This should in theory have close to zero actual impact on existing dashboards. It does mean that if you used this setting to define a hard group by time interval of, say "1d", if you zoomed to a time range wide enough the time range could increase above the "1d" range as the setting is now always considered a lower limit. +* **Elasticsearch**: Elasticsearch metric queries without date histogram now return table formated data making table panel much easier to use for this use case. Should not break/change existing dashboards with stock panels but external panel plugins can be affected. ## Changes diff --git a/bower.json b/bower.json deleted file mode 100644 index 1f7136128aa..00000000000 --- a/bower.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "name": "grafana", - "version": "2.0.2", - "homepage": "https://github.com/grafana/grafana", - "authors": [], - "license": "Apache 2.0", - "ignore": [ - "**/.*", - "node_modules", - "bower_components", - "public/vendor/", - "test", - "tests" - ], - "dependencies": { - "jquery": "3.1.0", - "lodash": "4.15.0", - "angular": "1.6.1", - "angular-route": "1.6.1", - "angular-mocks": "1.6.1", - "angular-sanitize": "1.6.1", - "angular-native-dragdrop": "1.2.2", - "angular-bindonce": "0.3.3", - "clipboard": "^1.5.16" - } -} diff --git a/conf/defaults.ini b/conf/defaults.ini index f0156b70511..45010a778df 100644 --- a/conf/defaults.ini +++ b/conf/defaults.ini @@ -318,6 +318,7 @@ key_file = skip_verify = false from_address = admin@grafana.localhost from_name = Grafana +ehlo_identity = [emails] welcome_email_on_sign_up = false @@ -452,9 +453,26 @@ url = https://grafana.com [grafana_com] url = https://grafana.com +#################################### Distributed tracing ############ +[tracing.jaeger] +# jaeger destination (ex localhost:6831) +address = +# tag that will always be included in when creating new spans. ex (tag1:value1,tag2:value2) +always_included_tag = +# Type specifies the type of the sampler: const, probabilistic, rateLimiting, or remote +sampler_type = const +# jaeger samplerconfig param +# for "const" sampler, 0 or 1 for always false/true respectively +# for "probabilistic" sampler, a probability between 0 and 1 +# for "rateLimiting" sampler, the number of spans per second +# for "remote" sampler, param is the same as for "probabilistic" +# and indicates the initial sampling rate before the actual one +# is received from the mothership +sampler_param = 1 + #################################### External Image Storage ############## [external_image_storage] -# You can choose between (s3, webdav) +# You can choose between (s3, webdav, gcs) provider = [external_image_storage.s3] @@ -467,3 +485,7 @@ url = username = password = public_url = + +[external_image_storage.gcs] +key_file = +bucket = diff --git a/conf/sample.ini b/conf/sample.ini index 80c7464f89c..752f40253c7 100644 --- a/conf/sample.ini +++ b/conf/sample.ini @@ -295,6 +295,8 @@ ;skip_verify = false ;from_address = admin@grafana.localhost ;from_name = Grafana +# EHLO identity in SMTP dialog (defaults to instance_name) +;ehlo_identity = dashboard.example.com [emails] ;welcome_email_on_sign_up = false @@ -391,6 +393,23 @@ ;address = ;prefix = prod.grafana.%(instance_name)s. +#################################### Distributed tracing ############ +[tracing.jaeger] +# Enable by setting the address sending traces to jaeger (ex localhost:6831) +;address = localhost:6831 +# Tag that will always be included in when creating new spans. ex (tag1:value1,tag2:value2) +;always_included_tag = tag1:value1 +# Type specifies the type of the sampler: const, probabilistic, rateLimiting, or remote +;sampler_type = const +# jaeger samplerconfig param +# for "const" sampler, 0 or 1 for always false/true respectively +# for "probabilistic" sampler, a probability between 0 and 1 +# for "rateLimiting" sampler, the number of spans per second +# for "remote" sampler, param is the same as for "probabilistic" +# and indicates the initial sampling rate before the actual one +# is received from the mothership +;sampler_param = 1 + #################################### Grafana.com integration ########################## # Url used to to import dashboards directly from Grafana.com [grafana_com] @@ -399,7 +418,7 @@ #################################### External image storage ########################## [external_image_storage] # Used for uploading images to public servers so they can be included in slack/email messages. -# you can choose between (s3, webdav) +# you can choose between (s3, webdav, gcs) ;provider = [external_image_storage.s3] @@ -412,3 +431,7 @@ ;public_url = ;username = ;password = + +[external_image_storage.gcs] +;key_file = +;bucket = diff --git a/docker/blocks/jaeger/fig b/docker/blocks/jaeger/fig new file mode 100644 index 00000000000..ab9e2ec599b --- /dev/null +++ b/docker/blocks/jaeger/fig @@ -0,0 +1,6 @@ +jaeger: + image: jaegertracing/all-in-one:latest + ports: + - "localhost:6831:6831/udp" + - "16686:16686" + diff --git a/docker/blocks/prometheus/Dockerfile b/docker/blocks/prometheus/Dockerfile index 0e07679ddd5..1ad28f524ff 100644 --- a/docker/blocks/prometheus/Dockerfile +++ b/docker/blocks/prometheus/Dockerfile @@ -1,2 +1,3 @@ FROM prom/prometheus ADD prometheus.yml /etc/prometheus/ +ADD alert.rules /etc/prometheus/ diff --git a/docker/blocks/prometheus/alert.rules b/docker/blocks/prometheus/alert.rules new file mode 100644 index 00000000000..563d1e89994 --- /dev/null +++ b/docker/blocks/prometheus/alert.rules @@ -0,0 +1,10 @@ +# Alert Rules + +ALERT AppCrash + IF process_open_fds > 0 + FOR 15s + LABELS { severity="critical" } + ANNOTATIONS { + summary = "Number of open fds > 0", + description = "Just testing" + } diff --git a/docker/blocks/prometheus/fig b/docker/blocks/prometheus/fig index 1c70c14fc37..7d9bea68046 100644 --- a/docker/blocks/prometheus/fig +++ b/docker/blocks/prometheus/fig @@ -18,3 +18,8 @@ fake-prometheus-data: environment: FD_DATASOURCE: prom +alertmanager: + image: quay.io/prometheus/alertmanager + net: host + ports: + - "9093:9093" diff --git a/docker/blocks/prometheus/prometheus.yml b/docker/blocks/prometheus/prometheus.yml index 35be33b80a7..ae40dfdf067 100644 --- a/docker/blocks/prometheus/prometheus.yml +++ b/docker/blocks/prometheus/prometheus.yml @@ -6,22 +6,30 @@ global: # Load and evaluate rules in this file every 'evaluation_interval' seconds. rule_files: + - "alert.rules" # - "first.rules" # - "second.rules" -# A scrape configuration containing exactly one endpoint to scrape: -# Here it's Prometheus itself. -scrape_configs: - # The job name is added as a label `job=` to any timeseries scraped from this config. - - job_name: 'prometheus' - - # Override the global default and scrape targets from this job every 5 seconds. - scrape_interval: 10s - scrape_timeout: 10s - - # metrics_path defaults to '/metrics' - # scheme defaults to 'http'. - +alerting: + alertmanagers: + - scheme: http static_configs: - #- targets: ['localhost:9090', '172.17.0.1:9091', '172.17.0.1:9100', '172.17.0.1:9150'] - - targets: ['localhost:9090', '127.0.0.1:9091', '127.0.0.1:9100', '127.0.0.1:9150'] + - targets: + - "127.0.0.1:9093" + +scrape_configs: + - job_name: 'prometheus' + static_configs: + - targets: ['localhost:9090'] + + - job_name: 'node_exporter' + static_configs: + - targets: ['127.0.0.1:9100'] + + - job_name: 'fake-data-gen' + static_configs: + - targets: ['127.0.0.1:9091'] + + - job_name: 'grafana' + static_configs: + - targets: ['127.0.0.1:3000'] diff --git a/docs/sources/administration/metrics.md b/docs/sources/administration/metrics.md new file mode 100644 index 00000000000..56d0290bc82 --- /dev/null +++ b/docs/sources/administration/metrics.md @@ -0,0 +1,15 @@ ++++ +title = "Internal metrics" +description = "Internal metrics exposed by Grafana" +keywords = ["grafana", "metrics", "internal metrics"] +type = "docs" +[menu.docs] +parent = "admin" +weight = 8 ++++ + +# Internal metrics + +Grafana collects some metrics about it self internally. Currently Grafana supports pushing metrics to graphite and exposing them to be scraped by Prometheus. + +To enabled internal metrics you have to enable it under the [metrics] section in your [grafana.ini](http://docs.grafana.org/installation/configuration/#enabled-6) config file.If you want to push metrics to graphite you have also have to configure the [metrics.graphite](http://docs.grafana.org/installation/configuration/#metrics-graphite) section. diff --git a/docs/sources/features/datasources/graphite.md b/docs/sources/features/datasources/graphite.md index 5943de2aead..05a7df7fea8 100644 --- a/docs/sources/features/datasources/graphite.md +++ b/docs/sources/features/datasources/graphite.md @@ -41,7 +41,7 @@ Proxy access means that the Grafana backend will proxy all requests from the bro Click the ``Select metric`` link to start navigating the metric space. One you start you can continue using the mouse or keyboard arrow keys. You can select a wildcard and still continue. -{{< docs-imagebox img="/img/docs/v45/graphite_query1_still.png" class="docs-image--center" +{{< docs-imagebox img="/img/docs/v45/graphite_query1_still.png" animated-gif="/img/docs/v45/graphite_query1.gif" >}} @@ -52,7 +52,7 @@ a function is selected it will be added and your focus will be in the text box o a parameter just click on it and it will turn into a text box. To delete a function click the function name followed by the x icon. -{{< docs-imagebox img="/img/docs/v45/graphite_query2_still.png" class="docs-image--center" +{{< docs-imagebox img="/img/docs/v45/graphite_query2_still.png" animated-gif="/img/docs/v45/graphite_query2.gif" >}} @@ -60,7 +60,7 @@ by the x icon. Some functions like aliasByNode support an optional second argument. To add this parameter specify for example 3,-2 as the first parameter and the function editor will adapt and move the -2 to a second parameter. To remove the second optional parameter just click on it and leave it blank and the editor will remove it. -{{< docs-imagebox img="/img/docs/v45/graphite_query3_still.png" class="docs-image--center" +{{< docs-imagebox img="/img/docs/v45/graphite_query3_still.png" animated-gif="/img/docs/v45/graphite_query3.gif" >}} @@ -68,6 +68,10 @@ Some functions like aliasByNode support an optional second argument. To add this You can reference queries by the row “letter” that they’re on (similar to Microsoft Excel). If you add a second query to a graph, you can reference the first query simply by typing in #A. This provides an easy and convenient way to build compounded queries. +{{< docs-imagebox img="/img/docs/v45/graphite_nested_queries_still.png" + animated-gif="/img/docs/v45/graphite_nested_queries.gif" >}} + + ## Point consolidation All Graphite metrics are consolidated so that Graphite doesn't return more data points than there are pixels in the graph. By default, diff --git a/docs/sources/features/datasources/influxdb.md b/docs/sources/features/datasources/influxdb.md index 904b732bb3a..97e523cd151 100644 --- a/docs/sources/features/datasources/influxdb.md +++ b/docs/sources/features/datasources/influxdb.md @@ -41,9 +41,7 @@ mode is also more secure as the username & password will never reach the browser ## Query Editor -{{< docs-imagebox img="/img/docs/v45/influxdb_query_still.png" class="docs-image--center" - animated-gif="/img/docs/v45/influxdb_query.gif" >}} - +{{< docs-imagebox img="/img/docs/v45/influxdb_query_still.png" class="docs-image--no-shadow" animated-gif="/img/docs/v45/influxdb_query.gif" >}} You find the InfluxDB editor in the metrics tab in Graph or Singlestat panel's edit mode. You enter edit mode by clicking the panel title, then edit. The editor allows you to select metrics and tags. @@ -59,10 +57,8 @@ will automatically adjust the filter tag condition to use the InfluxDB regex mat ### Field & Aggregation functions In the `SELECT` row you can specify what fields and functions you want to use. If you have a -group by time you need an aggregation function. Some functions like derivative require an aggregation function. - -The editor tries simplify and unify this part of the query. For example: -![](/img/docs/influxdb/select_editor.png) +group by time you need an aggregation function. Some functions like derivative require an aggregation function. The editor tries simplify and unify this part of the query. For example:
+![](/img/docs/influxdb/select_editor.png)
The above will generate the following InfluxDB `SELECT` clause: diff --git a/docs/sources/features/datasources/mysql.md b/docs/sources/features/datasources/mysql.md index 02e821050fa..f277f45f3c8 100644 --- a/docs/sources/features/datasources/mysql.md +++ b/docs/sources/features/datasources/mysql.md @@ -11,8 +11,7 @@ weight = 7 # Using MySQL in Grafana -> Only available in Grafana v4.3+. This data source is not ready for -> production use, currently in development (alpha state). +> Only available in Grafana v4.3+. Grafana ships with a built-in MySQL data source plugin that allow you to query any visualize data from a MySQL compatible database. @@ -58,8 +57,7 @@ If the `Format as` query option is set to `Table` then you can basically do any Query editor with example query: -![](/img/docs/v43/mysql_table_query.png) - +{{< docs-imagebox img="/img/docs/v45/mysql_table_query.png" >}} The query: diff --git a/docs/sources/features/datasources/prometheus.md b/docs/sources/features/datasources/prometheus.md index a6f544914b8..dceb2254e41 100644 --- a/docs/sources/features/datasources/prometheus.md +++ b/docs/sources/features/datasources/prometheus.md @@ -39,7 +39,8 @@ Name | Description Open a graph in edit mode by click the title > Edit (or by pressing `e` key while hovering over panel). -![](/img/docs/v43/prometheus_query_editor.png) +{{< docs-imagebox img="/img/docs/v45/prometheus_query_editor_still.png" + animated-gif="/img/docs/v45/prometheus_query_editor.gif" >}} Name | Description ------- | -------- diff --git a/docs/sources/features/panels/dashlist.md b/docs/sources/features/panels/dashlist.md index b6f7d21c1c8..0599fa77ff3 100644 --- a/docs/sources/features/panels/dashlist.md +++ b/docs/sources/features/panels/dashlist.md @@ -12,42 +12,23 @@ weight = 4 # Dashboard List Panel -The dashboard list panel allows you to display dynamic links to other dashboards. The list can be configured to use starred dashboards, a search query and/or dashboard tags. +{{< docs-imagebox img="/img/docs/v45/dashboard-list-panels.png" max-width= "800px" >}} - +The dashboard list panel allows you to display dynamic links to other dashboards. The list can be configured to use starred dashboards, recently viewed dashboards, a search query and/or dashboard tags. > On each dashboard load, the dashlist panel will re-query the dashboard list, always providing the most up to date results. -## Mode: Starred Dashboards +## Dashboard List Options -The `starred` dashboard selection displays starred dashboards, up to the number specified in the `Limit Number to` field, in alphabetical order. On dashboard load, the dashlist panel will re-query the favorites to appear in dashboard list panel, always providing the most up to date results. +{{< docs-imagebox img="/img/docs/v45/dashboard-list-options.png" max-width="600px" class="docs-image--no-shadow">}} - - - -## Mode: Search Dashboards - -The panel may be configured to search by either string query or tag(s). On dashboard load, the dashlist panel will re-query the dashboard list, always providing the most up to date results. - -To configure dashboard list in this manner, select `search` from the Mode select box. When selected, the Search Options section will appear. - - -Name | Description ------------- | ------------- -Mode | Set search or starred mode -Query | If in search mode specify the search query -Tags | if in search mode specify dashboard tags to search for -Limit number to | Specify the maximum number of dashboards - - -### Search by string - -To search by a string, enter a search query in the `Search Options: Query` field. Queries are case-insensitive, and partial values are accepted. - - -### Search by tag -To search by one or more tags, enter your selection in the `Search Options: Tags:` field. Note that existing tags will not appear as you type, and *are* case sensitive. To see a list of existing tags, you can always return to the dashboard, open the Dashboard Picker at the top and click `tags` link in the search bar. - +1. `Starred`: The starred dashboard selection displays starred dashboards in alphabetical order. +2. `Recently Viewed`: The recently viewed dashboard selection displays recently viewed dashboards in alphabetical order. +3. `Search`: The search dashboard selection displays dashboards by search query or tag(s). +4. `Show Headings`: When show headings is ticked the choosen list selection(Starred, Recently Viewed, Search) is shown as a heading. +5. `Max Items`: Max items set the maximum of items in a list. +6. `Query`: Here is where you enter your query you want to search by. Queries are case-insensitive, and partial values are accepted. +7. `Tags`: Here is where you enter your tag(s) you want to search by. Note that existing tags will not appear as you type, and *are* case sensitive. To see a list of existing tags, you can always return to the dashboard, open the Dashboard Picker at the top and click `tags` link in the search bar. > When multiple tags and strings appear, the dashboard list will display those matching ALL conditions. diff --git a/docs/sources/features/panels/graph.md b/docs/sources/features/panels/graph.md index 5dbf307b484..95be74c607e 100644 --- a/docs/sources/features/panels/graph.md +++ b/docs/sources/features/panels/graph.md @@ -13,14 +13,15 @@ weight = 1 The main panel in Grafana is simply named Graph. It provides a very rich set of graphing options. - +{{< docs-imagebox img="/img/docs/v45/graph_overview.png" class="docs-image--no-shadow" max-width= "900px" >}} -Clicking the title for a panel exposes a menu. The `edit` option opens additional configuration -options for the panel. +1. Clicking the title for a panel exposes a menu. The `edit` option opens additional configuration options for the panel. +2. Click to open color & axis selection. +3. Click to only show this series. Shift/Ctrl + click to hide series. ## General -![](/img/docs/v43/graph_general.png) +{{< docs-imagebox img="/img/docs/v43/graph_general.png" max-width= "900px" >}} The general tab allows customization of a panel's appearance and menu options. @@ -50,15 +51,11 @@ populate the template variable to a desired value from the link. The metrics tab defines what series data and sources to render. Each datasource provides different options. -## Axes & Grid +## Axes -![](/img/docs/v43/graph_axes_grid_options.png) +{{< docs-imagebox img="/img/docs/v43/graph_axes_grid_options.png" max-width= "900px" >}} -The Axes & Grid tab controls the display of axes, grids and legend. - -### Axes - -The ``Left Y`` and ``Right Y`` can be customized using: +The Axes tab controls the display of axes, grids and legend. The ``Left Y`` and ``Right Y`` can be customized using: - ``Unit`` - The display unit for the Y value - ``Grid Max`` - The maximum Y value. (default auto) @@ -105,7 +102,7 @@ It is just the sum of all data points received by Grafana. ## Display styles -![](/img/docs/v43/graph_display_styles.png) +{{< docs-imagebox img="/img/docs/v43/graph_display_styles.png" max-width= "900px" >}} Display styles control visual properties of the graph. @@ -160,4 +157,6 @@ There is an option under Series overrides to draw lines as dashes. Set Dashes to ## Time Range -![](/img/docs/v2/graph_time_range.png) +The time range tab allows you to override the dashboard time range and specify a panel specific time. Either through a relative from now time option or through a timeshift. + +{{< docs-imagebox img="/img/docs/v45/graph-time-range.png" max-width= "900px" >}} diff --git a/docs/sources/features/panels/singlestat.md b/docs/sources/features/panels/singlestat.md index 7447f6c9b84..0d186ec5f3c 100644 --- a/docs/sources/features/panels/singlestat.md +++ b/docs/sources/features/panels/singlestat.md @@ -12,7 +12,7 @@ weight = 2 # Singlestat Panel -![](/img/docs/v1/singlestat_panel2.png) +{{< docs-imagebox img="/img/docs/v45/singlestat-panel.png" max-width="900px" >}} The Singlestat Panel allows you to show the one main summary stat of a SINGLE series. It reduces the series into a single number (by looking at the max, min, average, or sum of values in the series). Singlestat also provides thresholds to color the stat or the Panel background. It can also translate the single number into a text value, and show a sparkline summary of the series. @@ -20,11 +20,9 @@ The Singlestat Panel allows you to show the one main summary stat of a SINGLE se The singlestat panel has a normal query editor to allow you define your exact metric queries like many other Panels. Through the Options tab, you can access the Singlestat-specific functionality. - +{{< docs-imagebox img="/img/docs/v45/singlestat-value-options.png" class="docs-image--no-shadow" max-width= "900px" >}} -1. `Big Value`: Big Value refers to how we display the main stat for the Singlestat Panel. This is always a single value that is displayed in the Panel in between two strings, `Prefix` and `Suffix`. The single number is calculated by choosing a function (min,max,average,current,total) of your metric query. This functions reduces your query into a single numeric value. -2. `Font Size`: You can use this section to select the font size of the different texts in the Singlestat Panel, i.e. prefix, value and postfix. -3. `Values`: The Value fields let you set the function (min, max, average, current, total, first, delta, range) that your entire query is reduced into a single value with. You can also set the font size of the Value field and font-size (as a %) of the metric query that the Panel is configured with. This reduces the entire query into a single summary value that is displayed. +1. `Stats`: The Stats field let you set the function (min, max, average, current, total, first, delta, range) that your entire query is reduced into a single value with. This reduces the entire query into a single summary value that is displayed. * `min` - The smallest value in the series * `max` - The largest value in the series * `avg` - The average of all the non-null values in the series @@ -34,47 +32,64 @@ The singlestat panel has a normal query editor to allow you define your exact me * `delta` - The total incremental increase (of a counter) in the series. An attempt is made to account for counter resets, but this will only be accurate for single instance metrics. Used to show total counter increase in time series. * `diff` - The difference betwen 'current' (last value) and 'first'. * `range` - The difference between 'min' and 'max'. Useful the show the range of change for a gauge. -4. `Prefix/Postfix`: The Prefix/Postfix fields let you define a custom label and font-size (as a %) to appear *before/after* the value. The `$__name` variable can be used here to use the series name or alias from the metric query. -5. `Units`: Units are appended to the the Singlestat within the panel, and will respect the color and threshold settings for the value. -6. `Decimals`: The Decimal field allows you to override the automatic decimal precision, and set it explicitly. +2. `Prefix/Postfix`: The Prefix/Postfix fields let you define a custom label to appear *before/after* the value. The `$__name` variable can be used here to use the series name or alias from the metric query. +3. `Units`: Units are appended to the the Singlestat within the panel, and will respect the color and threshold settings for the value. +4. `Decimals`: The Decimal field allows you to override the automatic decimal precision, and set it explicitly. +5. `Font Size`: You can use this section to select the font size of the different texts in the Singlestat Panel, i.e. prefix, value and postfix. ### Coloring The coloring options of the Singlestat Panel config allow you to dynamically change the colors based on the Singlestat value. - +{{< docs-imagebox img="/img/docs/v45/singlestat-color-options.png" max-width="500px" class="docs-image--right docs-image--no-shadow">}} 1. `Background`: This checkbox applies the configured thresholds and colors to the entirety of the Singlestat Panel background. -2. `Value`: This checkbox applies the configured thresholds and colors to the summary stat. -3. `Thresholds`: Change the background and value colors dynamically within the panel, depending on the Singlestat value. The threshold field accepts **2 comma-separated** values which represent 3 ranges that correspond to the three colors directly to the right. For example: if the thresholds are 70, 90 then the first color represents < 70, the second color represents between 70 and 90 and the third color represents > 90. -4. `Colors`: Select a color and opacity +2. `Thresholds`: Change the background and value colors dynamically within the panel, depending on the Singlestat value. The threshold field accepts **2 comma-separated** values which represent 3 ranges that correspond to the three colors directly to the right. For example: if the thresholds are 70, 90 then the first color represents < 70, the second color represents between 70 and 90 and the third color represents > 90. +3. `Colors`: Select a color and opacity +4. `Value`: This checkbox applies the configured thresholds and colors to the summary stat. 5. `Invert order`: This link toggles the threshold color order.
For example: Green, Orange, Red () will become Red, Orange, Green (). ### Spark Lines Sparklines are a great way of seeing the historical data related to the summary stat, providing valuable context at a glance. Sparklines act differently than traditional Graph Panels and do not include x or y axis, coordinates, a legend, or ability to interact with the graph. - +{{< docs-imagebox img="/img/docs/v45/singlestat-spark-options.png" max-width="500px" class="docs-image--right docs-image--no-shadow">}} 1. `Show`: The show checkbox will toggle whether the spark line is shown in the Panel. When unselected, only the Singlestat value will appear. -2. `Background`: Check if you want the sparklines to take up the full panel width, or uncheck if they should be below the main Singlestat value. +2. `Full Height`: Check if you want the sparklines to take up the full panel height, or uncheck if they should be below the main Singlestat value. 3. `Line Color`: This color selection applies to the color of the sparkline itself. 4. `Fill Color`: This color selection applies to the area below the sparkline. +
+ > ***Pro-tip:*** Reduce the opacity on fill colors for nice looking panels. +### Gauge + +Gauges gives a clear picture of how high a value is in it's context. It's a great way to see if a value is close to the thresholds. The gauge uses the colors set in the color options. + +{{< docs-imagebox img="/img/docs/v45/singlestat-gauge-options.png" max-width="500px" class="docs-image--right docs-image--no-shadow">}} + +1. `Show`: The show checkbox will toggle wether the gauge is shown in the panel. When unselected, only the Singlestat value will appear. +2. `Min/Max`: This sets the start and end point for the gauge. +3. `Threshold Labels`: Check if you want to show the threshold labels. Thresholds are set in the color options. +4. `Threshold Markers`: Check if you want to have a second meter showing the thresholds. + +
+ ### Value to text mapping +{{< docs-imagebox img="/img/docs/v45/singlestat-value-mapping.png" class="docs-image--right docs-image--no-shadow">}} + Value to text mapping allows you to translate the value of the summary stat into explicit text. The text will respect all styling, thresholds and customization defined for the value. This can be useful to translate the number of the main Singlestat value into a context-specific human-readable word or message. - +
## Troubleshooting ### Multiple Series Error - - +{{< docs-imagebox img="/img/docs/v45/singelstat-multiple-series-error.png" class="docs-image--right docs-image--no-shadow">}} Grafana 2.5 introduced stricter checking for multiple-series on singlestat panels. In previous versions, the panel logic did not verify that only a single series was used, and instead, displayed the first series encountered. Depending on your data source, this could have lead to inconsistent data being shown and/or a general confusion about which metric was being displayed. diff --git a/docs/sources/features/panels/table_panel.md b/docs/sources/features/panels/table_panel.md index 69cd02fdbcc..9c112c81265 100644 --- a/docs/sources/features/panels/table_panel.md +++ b/docs/sources/features/panels/table_panel.md @@ -12,7 +12,7 @@ weight = 2 # Table Panel - + The new table panel is very flexible, supporting both multiple modes for time series as well as for table, annotation and raw JSON data. It also provides date formatting and value formatting and coloring options. @@ -22,55 +22,63 @@ To view table panels in action and test different configurations with sample dat ## Options overview The table panel has many ways to manipulate your data for optimal presentation. +{{< docs-imagebox img="/img/docs/v45/table_options.png" class="docs-image--no-shadow" max-width= "500px" >}} - 1. `Data`: Control how your query is transformed into a table. -2. `Table Display`: Table display options. -3. `Column Styles`: Column value formatting and display options. +2. `Paging`: Table display options. + ## Data to Table - +{{< docs-imagebox img="/img/docs/v45/table_data_options.png" max-width="500px" class="docs-image--right">}} The data section contains the **To Table Transform (1)**. This is the primary option for how your data/metric query should be transformed into a table format. The **Columns (2)** option allows you to select what columns you want in the table. Only applicable for some transforms. +
+ ### Time series to rows - +{{< docs-imagebox img="/img/docs/v45/table_ts_to_rows.png" >}} In the most simple mode you can turn time series to rows. This means you get a `Time`, `Metric` and a `Value` column. Where `Metric` is the name of the time series. ### Time series to columns -![](/img/docs/v2/table_ts_to_columns2.png) +{{< docs-imagebox img="/img/docs/v45/table_ts_to_columns.png" >}} + This transform allows you to take multiple time series and group them by time. Which will result in the primary column being `Time` and a column for each time series. ### Time series aggregations -![](/img/docs/v2/table_ts_to_aggregations2.png) +{{< docs-imagebox img="/img/docs/v45/table_ts_to_aggregations.png" >}} + This table transformation will lay out your table into rows by metric, allowing columns of `Avg`, `Min`, `Max`, `Total`, `Current` and `Count`. More than one column can be added. ### Annotations -![](/img/docs/v2/table_annotations.png) + +{{< docs-imagebox img="/img/docs/v45/table_annotations.png" >}} + If you have annotations enabled in the dashboard you can have the table show them. If you configure this mode then any queries you have in the metrics tab will be ignored. ### JSON Data -![](/img/docs/v2/table_json_data.png) + +{{< docs-imagebox img="/img/docs/v45/table_json_data.png" max-width="500px" >}} If you have an Elasticsearch **Raw Document** query or an Elasticsearch query without a `date histogram` use this transform mode and pick the columns using the **Columns** section. -![](/img/docs/v2/elastic_raw_doc.png) + +{{< docs-imagebox img="/img/docs/v45/elastic_raw_doc.png" >}} ## Table Display - +{{< docs-imagebox img="/img/docs/v45/table_paging.png" class="docs-image--no-shadow docs-image--right" max-width="350px" >}} 1. `Pagination (Page Size)`: The table display fields allow you to control The `Pagination` (page size) is the threshold at which the table rows will be broken into pages. For example, if your table had 95 records with a pagination value of 10, your table would be split across 9 pages. 2. `Scroll`: The `scroll bar` checkbox toggles the ability to scroll within the panel, when unchecked, the panel height will grow to display all rows. @@ -81,13 +89,11 @@ transform mode and pick the columns using the **Columns** section. The column styles allow you control how dates and numbers are formatted. - +{{< docs-imagebox img="/img/docs/v45/table_column_styles.png" class="docs-image--no-shadow" >}} 1. `Name or regex`: The Name or Regex field controls what columns the rule should be applied to. The regex or name filter will be matched against the column name not against column values. -2. `Type`: The three supported types of types are `Number`, `String` and `Date`. -3. `Title`: Title for the column, when using a Regex the title can include replacement strings like `$1`. -4. `Format`: Specify date format. Only available when `Type` is set to `Date`. -5. `Coloring` and `Thresholds`: Specify color mode and thresholds limits. -6. `Unit` and `Decimals`: Specify unit and decimal precision for numbers. -7. `Add column style rule`: Add new column rule. +2. `Column Header`: Title for the column, when using a Regex the title can include replacement strings like `$1`. +3. `Add column style rule`: Add new column rule. +4. `Thresholds` and `Coloring`: Specify color mode and thresholds limits. +5. `Type`: The three supported types of types are `Number`, `String` and `Date`. `Unit` and `Decimals`: Specify unit and decimal precision for numbers.`Format`: Specify date format for dates. diff --git a/docs/sources/features/shortcuts.md b/docs/sources/features/shortcuts.md index ec7250b7a5a..caad521446e 100644 --- a/docs/sources/features/shortcuts.md +++ b/docs/sources/features/shortcuts.md @@ -8,7 +8,7 @@ weight = 7 # Keyboard shortcuts -{{< docs-imagebox img="/img/docs/v4/shortcuts.png" max-width="20rem" >}} +{{< docs-imagebox img="/img/docs/v4/shortcuts.png" max-width="20rem" class="docs-image--right" >}} Grafana v4 introduces a number of really powerful keyboard shortcuts. You can now focus a panel by hovering over it with your mouse. With a panel focused you can simple hit `e` to toggle panel diff --git a/docs/sources/guides/whats-new-in-v4-5.md b/docs/sources/guides/whats-new-in-v4-5.md index abce3fc4095..b2de451308a 100644 --- a/docs/sources/guides/whats-new-in-v4-5.md +++ b/docs/sources/guides/whats-new-in-v4-5.md @@ -16,16 +16,13 @@ weight = -4 ### New prometheus query editor -The new query editor has full syntax highlighting. As well as auto complete for metrics, functions, and range vectors. +The new query editor has full syntax highlighting. As well as auto complete for metrics, functions, and range vectors. There is also integrated function docs right from the query editor! -![](/img/docs/v45/new_prom_editor_1.png) - -There is also integrated function docs right from the query editor! - -![](/img/docs/v45/new_prom_editor_2.png) +{{< docs-imagebox img="/img/docs/v45/prometheus_query_editor_still.png" class="docs-image--block" animated-gif="/img/docs/v45/prometheus_query_editor.gif" >}} ### Elasticsearch: Add ad-hoc filters from the table panel -![](/img/docs/v45/elastic_ad_hoc_filters.png) + +{{< docs-imagebox img="/img/docs/v45/elastic_ad_hoc_filters.png" class="docs-image--block" >}} ### Table cell links! Create column styles that turn cells into links that use the value in the cell (or other other row values) to generate a url to another dashboard or system: diff --git a/docs/sources/http_api/admin.md b/docs/sources/http_api/admin.md index d7e5d3f8b7c..55bec79c7f8 100644 --- a/docs/sources/http_api/admin.md +++ b/docs/sources/http_api/admin.md @@ -161,6 +161,7 @@ Only works with Basic Authentication (username and password). See [introduction] "enabled":"false", "from_address":"admin@grafana.localhost", "from_name":"Grafana", + "ehlo_identity":"dashboard.example.com", "host":"localhost:25", "key_file":"", "password":"************", diff --git a/docs/sources/installation/configuration.md b/docs/sources/installation/configuration.md index ae2541b4a7f..bd36a56a75e 100644 --- a/docs/sources/installation/configuration.md +++ b/docs/sources/installation/configuration.md @@ -308,15 +308,15 @@ options are `Editor` and `Admin`. ## [auth.github] -You need to create a GitHub application (you find this under the GitHub -profile page). When you create the application you will need to specify +You need to create a GitHub OAuth application (you find this under the GitHub +settings page). When you create the application you will need to specify a callback URL. Specify this as callback: http://:/login/github This callback URL must match the full HTTP address that you use in your browser to access Grafana, but with the prefix path of `/login/github`. -When the GitHub application is created you will get a Client ID and a +When the GitHub OAuth application is created you will get a Client ID and a Client Secret. Specify these in the Grafana configuration file. For example: @@ -593,6 +593,9 @@ Address used when sending out emails, defaults to `admin@grafana.localhost` ### from_name Name to be used when sending out emails, defaults to `Grafana` +### ehlo_identity +Name to be used as client identity for EHLO in SMTP dialog, defaults to instance_name. + ## [log] ### mode @@ -645,7 +648,7 @@ Time to live for snapshots. These options control how images should be made public so they can be shared on services like slack. ### provider -You can choose between (s3, webdav). If left empty Grafana will ignore the upload action. +You can choose between (s3, webdav, gcs). If left empty Grafana will ignore the upload action. ## [external_image_storage.s3] @@ -677,6 +680,17 @@ basic auth username ### password basic auth password +## [external_image_storage.gcs] + +### key_file +Path to JSON key file associated with a Google service account to authenticate and authorize. +Service Account keys can be created and downloaded from https://console.developers.google.com/permissions/serviceaccounts. + +Service Account should have "Storage Object Writer" role. + +### bucket name +Bucket Name on Google Cloud Storage. + ## [alerting] ### enabled diff --git a/docs/sources/installation/debian.md b/docs/sources/installation/debian.md index a65e2efe339..330e3171e86 100644 --- a/docs/sources/installation/debian.md +++ b/docs/sources/installation/debian.md @@ -15,27 +15,31 @@ weight = 1 Description | Download ------------ | ------------- -Stable for Debian-based Linux | [grafana_4.4.3_amd64.deb](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_4.4.3_amd64.deb) -Beta for Debian-based Linux | [grafana_4.5.0-beta1_amd64.deb](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_4.5.0-beta1_amd64.deb) +Stable for Debian-based Linux | [grafana_4.5.2_amd64.deb](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_4.5.2_amd64.deb) + + Read [Upgrading Grafana]({{< relref "installation/upgrading.md" >}}) for tips and guidance on updating an existing installation. ## Install Stable + ```bash -wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_4.4.3_amd64.deb +wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_4.5.2_amd64.deb sudo apt-get install -y adduser libfontconfig -sudo dpkg -i grafana_4.4.3_amd64.deb +sudo dpkg -i grafana_4.5.2_amd64.deb ``` + ## APT Repository diff --git a/docs/sources/installation/mac.md b/docs/sources/installation/mac.md index 912a16c66e8..a65c663f398 100644 --- a/docs/sources/installation/mac.md +++ b/docs/sources/installation/mac.md @@ -43,3 +43,35 @@ To upgrade grafana if you've installed from HEAD: ``` brew reinstall --HEAD grafana/grafana/grafana ``` + +### Starting Grafana + +To start Grafana using homebrew services first make sure homebrew/services is installed. + +``` +brew tap homebrew/services +``` + +Then start Grafana using: + +``` +brew services start grafana +``` + + +### Configuration + +The Configuration file should be located at `/usr/local/etc/grafana/grafana.ini`. + +### Logs + +The log file should be located at `/usr/local/var/log/grafana/grafana.log`. + +### Plugins + +If you want to manually install a plugin place it here: `/usr/local/var/lib/grafana/plugins`. + +### Database + +The default sqlite database is located at `/usr/local/var/lib/grafana` + diff --git a/docs/sources/installation/rpm.md b/docs/sources/installation/rpm.md index 76c380e6750..30b017314c6 100644 --- a/docs/sources/installation/rpm.md +++ b/docs/sources/installation/rpm.md @@ -15,8 +15,9 @@ weight = 2 Description | Download ------------ | ------------- -Stable for CentOS / Fedora / OpenSuse / Redhat Linux | [4.4.3 (x86-64 rpm)](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.4.3-1.x86_64.rpm) -Latest Beta for CentOS / Fedora / OpenSuse / Redhat Linux | [4.5.0-beta1 (x86-64 rpm)](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.5.0-beta1.x86_64.rpm) +Stable for CentOS / Fedora / OpenSuse / Redhat Linux | [4.5.2 (x86-64 rpm)](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.5.2-1.x86_64.rpm) + + Read [Upgrading Grafana]({{< relref "installation/upgrading.md" >}}) for tips and guidance on updating an existing installation. @@ -25,19 +26,19 @@ installation. You can install Grafana using Yum directly. - $ sudo yum install https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.4.3-1.x86_64.rpm + $ sudo yum install https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.5.2-1.x86_64.rpm Or install manually using `rpm`. #### On CentOS / Fedora / Redhat: - $ wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.4.3-1.x86_64.rpm + $ wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.5.2-1.x86_64.rpm $ sudo yum install initscripts fontconfig - $ sudo rpm -Uvh grafana-4.4.3-1.x86_64.rpm + $ sudo rpm -Uvh grafana-4.5.2-1.x86_64.rpm #### On OpenSuse: - $ sudo rpm -i --nodeps grafana-4.4.3-1.x86_64.rpm + $ sudo rpm -i --nodeps grafana-4.5.2-1.x86_64.rpm ## Install via YUM Repository @@ -53,8 +54,7 @@ Add the following to a new file at `/etc/yum.repos.d/grafana.repo` sslverify=1 sslcacert=/etc/pki/tls/certs/ca-bundle.crt -There is also a testing repository if you want beta or release -candidates. +There is also a testing repository if you want beta or release candidates. baseurl=https://packagecloud.io/grafana/testing/el/6/$basearch diff --git a/docs/sources/installation/upgrading.md b/docs/sources/installation/upgrading.md index 846a42b454c..4cd8471e441 100644 --- a/docs/sources/installation/upgrading.md +++ b/docs/sources/installation/upgrading.md @@ -94,10 +94,10 @@ to the same location (and overwrite the existing files). This might overwrite yo recommend you place your config changes in a file named `/conf/custom.ini` as this will make upgrades easier without risking losing your config changes. -## Upgrading form 1.x +## Upgrading from 1.x [Migrating from 1.x to 2.x]({{< relref "installation/migrating_to2.md" >}}) -## Upgrading form 2.x +## Upgrading from 2.x -We are not aware of any issues upgrading directly from 2.x to 4.x but to on the safe side go via 3.x. +We are not aware of any issues upgrading directly from 2.x to 4.x but to be on the safe side go via 3.x => 4.x. diff --git a/docs/sources/installation/windows.md b/docs/sources/installation/windows.md index 97d17a2ca8e..ea9614c01be 100644 --- a/docs/sources/installation/windows.md +++ b/docs/sources/installation/windows.md @@ -13,7 +13,7 @@ weight = 3 Description | Download ------------ | ------------- -Latest stable package for Windows | [grafana.4.4.3.windows-x64.zip](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.4.3.windows-x64.zip) +Latest stable package for Windows | [grafana.4.5.2.windows-x64.zip](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.5.2.windows-x64.zip) Read [Upgrading Grafana]({{< relref "installation/upgrading.md" >}}) for tips and guidance on updating an existing installation. @@ -27,7 +27,7 @@ this folder to anywhere you want Grafana to run from. Go into the The default Grafana port is `3000`, this port requires extra permissions on windows. Edit `custom.ini` and uncomment the `http_port` -configuration option and change it to something like `8080` or similar. +configuration option (`;` is the comment character in ini files) and change it to something like `8080` or similar. That port should not require extra Windows privileges. Start Grafana by executing `grafana-server.exe`, preferably from the diff --git a/docs/sources/plugins/installation.md b/docs/sources/plugins/installation.md index 977abb055f1..27f6f583d9a 100644 --- a/docs/sources/plugins/installation.md +++ b/docs/sources/plugins/installation.md @@ -72,6 +72,11 @@ The Download URL from Grafana.com API is in this form: `https://grafana.com/api/plugins//versions//download` +You can specify a local URL by using the `--pluginUrl` option. +``` +grafana-cli --pluginUrl https://nexus.company.com/grafana/plugins/-.zip plugins install +``` + To manually install a Plugin via the Grafana.com API: 1. Find the plugin you want to download, the plugin id can be found on the Installation Tab on the plugin's page on Grafana.com. In this example, the plugin id is `jdbranham-diagram-panel`: diff --git a/docs/sources/reference/keyboard_shortcuts.md b/docs/sources/reference/keyboard_shortcuts.md deleted file mode 100644 index edd2e85f43b..00000000000 --- a/docs/sources/reference/keyboard_shortcuts.md +++ /dev/null @@ -1,34 +0,0 @@ -+++ -title = "Keyboard shortcuts" -keywords = ["grafana", "dashboard", "documentation", "shortcuts"] -type = "docs" -[menu.docs] -parent = "dashboard_features" -weight = 8 -+++ - -# Keyboard Shortcuts - -No mouse? No problem. Grafana has extensive keyboard shortcuts to allow you to navigate throughout the interface. This comes in especially handy when dealing with dealing with single-purpose machines powering on-wall displays that may not have a mouse available. - -## Dashboard Keyboard Shortcuts - -Press `Shift`+`?` to open the keyboard shortcut dialog from anywhere within the dashboard views. - - - - -|Shortcut|Action| -|---|---| -|`Esc`|Exit fullscreen edit/view mode, close search or any editor view| -|`F`|Open dashboard search view (also contains import/playlist controls)| -|`R`|Refresh (Fetches new data and rerenders panels)| -|`CTRL`+`S`|Save dashboard| -|`CTRL`+`H`|Hide row controls| -|`CTRL`+`Z`|Zoom out| -|`CTRL`+`O`|Enable/Disable shared graph crosshair| - - -**Note**: Grafana keyboard shortcuts are the same across operating system. - -Have a suggestion for a new keyboard shortcut? Let us know. diff --git a/docs/sources/reference/playlist.md b/docs/sources/reference/playlist.md index e2d59bc322d..f509ae4dc0d 100644 --- a/docs/sources/reference/playlist.md +++ b/docs/sources/reference/playlist.md @@ -16,7 +16,7 @@ Since Grafana automatically scales Dashboards to any resolution they're perfect ## Creating a Playlist -{{< docs-imagebox img="/img/docs/v3/playlist.png" max-width="25rem" >}} +{{< docs-imagebox img="/img/docs/v3/playlist.png" max-width="25rem" class="docs-image--right">}} The Playlist feature can be accessed from Grafana's sidemenu, in the Dashboard submenu. @@ -43,3 +43,25 @@ Playlists can also be manually controlled utilizing the Playlist controls at the Click the stop button to stop the Playlist, and exit to the current Dashboard. Click the next button to advance to the next Dashboard in the Playlist. Click the back button to rewind to the previous Dashboard in the Playlist. + +## TV or Kiosk Mode + +In TV mode the top navbar, row & panel controls will all fade to transparent. + +This happens automatically after one minute of user inactivity but can also be toggled manually +with the `d v` sequence shortcut. Any mouse movement or keyboard action will +restore navbar & controls. + +Another feature is the kiosk mode - in kiosk mode the navbar is completely hidden/removed from view. This can be enabled with the `d k` +shortcut. + +To put a playlist into kiosk mode, use the `d k` shortcut after the playlist has started. The same shortcut will toggle the playlist out of kiosk mode. + +### Linking to the Playlist in Kiosk Mode + +If you want to create a link to the playlist with kiosk mode enabled: + +1. Copy the Start Url (by right clicking on the Play button and choosing Copy link address). +2. Add the `?kiosk` parameter to the url. + +For example, to open the first playlist on the Grafana Play site in kiosk mode: [http://play.grafana.org/playlists/play/1?kiosk](http://play.grafana.org/playlists/play/1?kiosk) diff --git a/docs/sources/tutorials/hubot_howto.md b/docs/sources/tutorials/hubot_howto.md index 2c886b94f29..58c902951ee 100644 --- a/docs/sources/tutorials/hubot_howto.md +++ b/docs/sources/tutorials/hubot_howto.md @@ -74,7 +74,8 @@ If you do not get an image when opening this link verify that the required font ### Grafana API Key - +{{< docs-imagebox img="/img/docs/v2/orgdropdown_api_keys.png" max-width="150px" class="docs-image--right">}} + You need to set the environment variable `HUBOT_GRAFANA_API_KEY` to a Grafana API Key. You can add these from the API Keys page which you find in the Organization dropdown. diff --git a/latest.json b/latest.json index db7677517e0..fa72bc4fadf 100644 --- a/latest.json +++ b/latest.json @@ -1,4 +1,4 @@ { - "stable": "4.4.1", - "testing": "4.4.1" + "stable": "4.5.2", + "testing": "4.5.2" } diff --git a/package.json b/package.json index 69877c3fa07..1c03aac7778 100644 --- a/package.json +++ b/package.json @@ -4,12 +4,14 @@ "company": "Grafana Labs" }, "name": "grafana", - "version": "4.5.0-beta1", + "version": "4.6.0-pre1", "repository": { "type": "git", "url": "http://github.com/grafana/grafana.git" }, "devDependencies": { + "@types/react": "^16.0.5", + "@types/react-dom": "^15.5.4", "autoprefixer": "^6.4.0", "es6-promise": "^3.0.2", "es6-shim": "^0.35.1", @@ -48,7 +50,7 @@ "mocha": "3.2.0", "phantomjs-prebuilt": "^2.1.14", "reflect-metadata": "0.1.8", - "rxjs": "^5.0.0-rc.5", + "rxjs": "^5.4.3", "sass-lint": "^1.10.2", "systemjs": "0.19.41", "zone.js": "^0.7.2" @@ -60,22 +62,35 @@ }, "license": "Apache-2.0", "dependencies": { + "@types/enzyme": "^2.8.8", "ace-builds": "^1.2.8", + "angular": "^1.6.6", + "angular-bindonce": "^0.3.1", + "angular-mocks": "^1.6.6", + "angular-native-dragdrop": "^1.2.2", + "angular-route": "^1.6.6", + "angular-sanitize": "^1.6.6", + "clipboard": "^1.7.1", "eventemitter3": "^2.0.2", "gaze": "^1.1.2", "grunt-jscs": "3.0.1", "grunt-sass-lint": "^0.2.2", "grunt-sync": "^0.6.2", + "jquery": "^3.2.1", "karma-sinon": "^1.0.5", - "lodash": "^4.17.2", + "lodash": "^4.17.4", "mousetrap": "^1.6.0", + "ngreact": "^0.4.1", + "react": "^15.6.1", + "react-dom": "^15.6.1", + "react-test-renderer": "^15.6.1", "remarkable": "^1.7.1", "sinon": "1.17.6", "systemjs-builder": "^0.15.34", "tether": "^1.4.0", "tether-drop": "https://github.com/torkelo/drop", - "tslint": "^5.1.0", - "typescript": "^2.2.2", + "tslint": "^5.7.0", + "typescript": "^2.5.2", "virtual-scroll": "^1.1.1" } } diff --git a/packaging/publish/publish_both.sh b/packaging/publish/publish_both.sh index 7f38eb5a155..0a76851f6fa 100755 --- a/packaging/publish/publish_both.sh +++ b/packaging/publish/publish_both.sh @@ -1,5 +1,5 @@ #! /usr/bin/env bash -version=4.4.2 +version=4.5.2 wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_${version}_amd64.deb @@ -8,15 +8,15 @@ package_cloud push grafana/stable/debian/wheezy grafana_${version}_amd64.deb package_cloud push grafana/stable/debian/stretch grafana_${version}_amd64.deb package_cloud push grafana/testing/debian/jessie grafana_${version}_amd64.deb -package_cloud push grafana/testing/debian/wheezy grafana_${version}_amd64.deb -package_cloud push grafana/testing/debian/stretch grafana_${version}_amd64.deb +package_cloud push grafana/testing/debian/wheezy grafana_${version}_amd64.deb --verbose +package_cloud push grafana/testing/debian/stretch grafana_${version}_amd64.deb --verbose wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-${version}-1.x86_64.rpm -package_cloud push grafana/testing/el/6 grafana-${version}-1.x86_64.rpm -package_cloud push grafana/testing/el/7 grafana-${version}-1.x86_64.rpm +package_cloud push grafana/testing/el/6 grafana-${version}-1.x86_64.rpm --verbose +package_cloud push grafana/testing/el/7 grafana-${version}-1.x86_64.rpm --verbose -package_cloud push grafana/stable/el/7 grafana-${version}-1.x86_64.rpm -package_cloud push grafana/stable/el/6 grafana-${version}-1.x86_64.rpm +package_cloud push grafana/stable/el/7 grafana-${version}-1.x86_64.rpm --verbose +package_cloud push grafana/stable/el/6 grafana-${version}-1.x86_64.rpm --verbose rm grafana*.{deb,rpm} diff --git a/pkg/api/admin_users.go b/pkg/api/admin_users.go index 7690630aaa2..1868c589673 100644 --- a/pkg/api/admin_users.go +++ b/pkg/api/admin_users.go @@ -35,7 +35,7 @@ func AdminCreateUser(c *middleware.Context, form dtos.AdminCreateUserForm) { return } - metrics.M_Api_Admin_User_Create.Inc(1) + metrics.M_Api_Admin_User_Create.Inc() user := cmd.Result diff --git a/pkg/api/api.go b/pkg/api/api.go index 4024655775e..a979363d528 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -10,7 +10,7 @@ import ( // Register adds http routes func (hs *HttpServer) registerRoutes() { - r := hs.macaron + macaronR := hs.macaron reqSignedIn := middleware.Auth(&middleware.AuthOptions{ReqSignedIn: true}) reqGrafanaAdmin := middleware.Auth(&middleware.AuthOptions{ReqSignedIn: true, ReqGrafanaAdmin: true}) reqEditorRole := middleware.RoleAuth(m.ROLE_EDITOR, m.ROLE_ADMIN) @@ -19,7 +19,9 @@ func (hs *HttpServer) registerRoutes() { bind := binding.Bind // automatically set HEAD for every GET - r.SetAutoHead(true) + macaronR.SetAutoHead(true) + + r := newRouteRegister(middleware.RequestMetrics, middleware.RequestTracing) // not logged in views r.Get("/", reqSignedIn, Index) @@ -98,198 +100,195 @@ func (hs *HttpServer) registerRoutes() { r.Get("/api/login/ping", quota("session"), LoginApiPing) // authed api - r.Group("/api", func() { + r.Group("/api", func(apiRoute RouteRegister) { // user (signed in) - r.Group("/user", func() { - r.Get("/", wrap(GetSignedInUser)) - r.Put("/", bind(m.UpdateUserCommand{}), wrap(UpdateSignedInUser)) - r.Post("/using/:id", wrap(UserSetUsingOrg)) - r.Get("/orgs", wrap(GetSignedInUserOrgList)) + apiRoute.Group("/user", func(userRoute RouteRegister) { + userRoute.Get("/", wrap(GetSignedInUser)) + userRoute.Put("/", bind(m.UpdateUserCommand{}), wrap(UpdateSignedInUser)) + userRoute.Post("/using/:id", wrap(UserSetUsingOrg)) + userRoute.Get("/orgs", wrap(GetSignedInUserOrgList)) - r.Post("/stars/dashboard/:id", wrap(StarDashboard)) - r.Delete("/stars/dashboard/:id", wrap(UnstarDashboard)) + userRoute.Post("/stars/dashboard/:id", wrap(StarDashboard)) + userRoute.Delete("/stars/dashboard/:id", wrap(UnstarDashboard)) - r.Put("/password", bind(m.ChangeUserPasswordCommand{}), wrap(ChangeUserPassword)) - r.Get("/quotas", wrap(GetUserQuotas)) - r.Put("/helpflags/:id", wrap(SetHelpFlag)) + userRoute.Put("/password", bind(m.ChangeUserPasswordCommand{}), wrap(ChangeUserPassword)) + userRoute.Get("/quotas", wrap(GetUserQuotas)) + userRoute.Put("/helpflags/:id", wrap(SetHelpFlag)) // For dev purpose - r.Get("/helpflags/clear", wrap(ClearHelpFlags)) + userRoute.Get("/helpflags/clear", wrap(ClearHelpFlags)) - r.Get("/preferences", wrap(GetUserPreferences)) - r.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), wrap(UpdateUserPreferences)) + userRoute.Get("/preferences", wrap(GetUserPreferences)) + userRoute.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), wrap(UpdateUserPreferences)) }) // users (admin permission required) - r.Group("/users", func() { - r.Get("/", wrap(SearchUsers)) - r.Get("/search", wrap(SearchUsersWithPaging)) - r.Get("/:id", wrap(GetUserById)) - r.Get("/:id/orgs", wrap(GetUserOrgList)) + apiRoute.Group("/users", func(usersRoute RouteRegister) { + usersRoute.Get("/", wrap(SearchUsers)) + usersRoute.Get("/search", wrap(SearchUsersWithPaging)) + usersRoute.Get("/:id", wrap(GetUserById)) + usersRoute.Get("/:id/orgs", wrap(GetUserOrgList)) // query parameters /users/lookup?loginOrEmail=admin@example.com - r.Get("/lookup", wrap(GetUserByLoginOrEmail)) - r.Put("/:id", bind(m.UpdateUserCommand{}), wrap(UpdateUser)) - r.Post("/:id/using/:orgId", wrap(UpdateUserActiveOrg)) + usersRoute.Get("/lookup", wrap(GetUserByLoginOrEmail)) + usersRoute.Put("/:id", bind(m.UpdateUserCommand{}), wrap(UpdateUser)) + usersRoute.Post("/:id/using/:orgId", wrap(UpdateUserActiveOrg)) }, reqGrafanaAdmin) // org information available to all users. - r.Group("/org", func() { - r.Get("/", wrap(GetOrgCurrent)) - r.Get("/quotas", wrap(GetOrgQuotas)) + apiRoute.Group("/org", func(orgRoute RouteRegister) { + orgRoute.Get("/", wrap(GetOrgCurrent)) + orgRoute.Get("/quotas", wrap(GetOrgQuotas)) }) // current org - r.Group("/org", func() { - r.Put("/", bind(dtos.UpdateOrgForm{}), wrap(UpdateOrgCurrent)) - r.Put("/address", bind(dtos.UpdateOrgAddressForm{}), wrap(UpdateOrgAddressCurrent)) - r.Post("/users", quota("user"), bind(m.AddOrgUserCommand{}), wrap(AddOrgUserToCurrentOrg)) - r.Get("/users", wrap(GetOrgUsersForCurrentOrg)) - r.Patch("/users/:userId", bind(m.UpdateOrgUserCommand{}), wrap(UpdateOrgUserForCurrentOrg)) - r.Delete("/users/:userId", wrap(RemoveOrgUserForCurrentOrg)) + apiRoute.Group("/org", func(orgRoute RouteRegister) { + orgRoute.Put("/", bind(dtos.UpdateOrgForm{}), wrap(UpdateOrgCurrent)) + orgRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), wrap(UpdateOrgAddressCurrent)) + orgRoute.Post("/users", quota("user"), bind(m.AddOrgUserCommand{}), wrap(AddOrgUserToCurrentOrg)) + orgRoute.Get("/users", wrap(GetOrgUsersForCurrentOrg)) + orgRoute.Patch("/users/:userId", bind(m.UpdateOrgUserCommand{}), wrap(UpdateOrgUserForCurrentOrg)) + orgRoute.Delete("/users/:userId", wrap(RemoveOrgUserForCurrentOrg)) // invites - r.Get("/invites", wrap(GetPendingOrgInvites)) - r.Post("/invites", quota("user"), bind(dtos.AddInviteForm{}), wrap(AddOrgInvite)) - r.Patch("/invites/:code/revoke", wrap(RevokeInvite)) + orgRoute.Get("/invites", wrap(GetPendingOrgInvites)) + orgRoute.Post("/invites", quota("user"), bind(dtos.AddInviteForm{}), wrap(AddOrgInvite)) + orgRoute.Patch("/invites/:code/revoke", wrap(RevokeInvite)) // prefs - r.Get("/preferences", wrap(GetOrgPreferences)) - r.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), wrap(UpdateOrgPreferences)) + orgRoute.Get("/preferences", wrap(GetOrgPreferences)) + orgRoute.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), wrap(UpdateOrgPreferences)) }, reqOrgAdmin) // create new org - r.Post("/orgs", quota("org"), bind(m.CreateOrgCommand{}), wrap(CreateOrg)) + apiRoute.Post("/orgs", quota("org"), bind(m.CreateOrgCommand{}), wrap(CreateOrg)) // search all orgs - r.Get("/orgs", reqGrafanaAdmin, wrap(SearchOrgs)) + apiRoute.Get("/orgs", reqGrafanaAdmin, wrap(SearchOrgs)) // orgs (admin routes) - r.Group("/orgs/:orgId", func() { - r.Get("/", wrap(GetOrgById)) - r.Put("/", bind(dtos.UpdateOrgForm{}), wrap(UpdateOrg)) - r.Put("/address", bind(dtos.UpdateOrgAddressForm{}), wrap(UpdateOrgAddress)) - r.Delete("/", wrap(DeleteOrgById)) - r.Get("/users", wrap(GetOrgUsers)) - r.Post("/users", bind(m.AddOrgUserCommand{}), wrap(AddOrgUser)) - r.Patch("/users/:userId", bind(m.UpdateOrgUserCommand{}), wrap(UpdateOrgUser)) - r.Delete("/users/:userId", wrap(RemoveOrgUser)) - r.Get("/quotas", wrap(GetOrgQuotas)) - r.Put("/quotas/:target", bind(m.UpdateOrgQuotaCmd{}), wrap(UpdateOrgQuota)) + apiRoute.Group("/orgs/:orgId", func(orgsRoute RouteRegister) { + orgsRoute.Get("/", wrap(GetOrgById)) + orgsRoute.Put("/", bind(dtos.UpdateOrgForm{}), wrap(UpdateOrg)) + orgsRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), wrap(UpdateOrgAddress)) + orgsRoute.Delete("/", wrap(DeleteOrgById)) + orgsRoute.Get("/users", wrap(GetOrgUsers)) + orgsRoute.Post("/users", bind(m.AddOrgUserCommand{}), wrap(AddOrgUser)) + orgsRoute.Patch("/users/:userId", bind(m.UpdateOrgUserCommand{}), wrap(UpdateOrgUser)) + orgsRoute.Delete("/users/:userId", wrap(RemoveOrgUser)) + orgsRoute.Get("/quotas", wrap(GetOrgQuotas)) + orgsRoute.Put("/quotas/:target", bind(m.UpdateOrgQuotaCmd{}), wrap(UpdateOrgQuota)) }, reqGrafanaAdmin) // orgs (admin routes) - r.Group("/orgs/name/:name", func() { - r.Get("/", wrap(GetOrgByName)) + apiRoute.Group("/orgs/name/:name", func(orgsRoute RouteRegister) { + orgsRoute.Get("/", wrap(GetOrgByName)) }, reqGrafanaAdmin) // auth api keys - r.Group("/auth/keys", func() { - r.Get("/", wrap(GetApiKeys)) - r.Post("/", quota("api_key"), bind(m.AddApiKeyCommand{}), wrap(AddApiKey)) - r.Delete("/:id", wrap(DeleteApiKey)) + apiRoute.Group("/auth/keys", func(keysRoute RouteRegister) { + keysRoute.Get("/", wrap(GetApiKeys)) + keysRoute.Post("/", quota("api_key"), bind(m.AddApiKeyCommand{}), wrap(AddApiKey)) + keysRoute.Delete("/:id", wrap(DeleteApiKey)) }, reqOrgAdmin) // Preferences - r.Group("/preferences", func() { - r.Post("/set-home-dash", bind(m.SavePreferencesCommand{}), wrap(SetHomeDashboard)) + apiRoute.Group("/preferences", func(prefRoute RouteRegister) { + prefRoute.Post("/set-home-dash", bind(m.SavePreferencesCommand{}), wrap(SetHomeDashboard)) }) // Data sources - r.Group("/datasources", func() { - r.Get("/", wrap(GetDataSources)) - r.Post("/", quota("data_source"), bind(m.AddDataSourceCommand{}), AddDataSource) - r.Put("/:id", bind(m.UpdateDataSourceCommand{}), wrap(UpdateDataSource)) - r.Delete("/:id", DeleteDataSourceById) - r.Delete("/name/:name", DeleteDataSourceByName) - r.Get("/:id", wrap(GetDataSourceById)) - r.Get("/name/:name", wrap(GetDataSourceByName)) + apiRoute.Group("/datasources", func(datasourceRoute RouteRegister) { + datasourceRoute.Get("/", wrap(GetDataSources)) + datasourceRoute.Post("/", quota("data_source"), bind(m.AddDataSourceCommand{}), AddDataSource) + datasourceRoute.Put("/:id", bind(m.UpdateDataSourceCommand{}), wrap(UpdateDataSource)) + datasourceRoute.Delete("/:id", DeleteDataSourceById) + datasourceRoute.Delete("/name/:name", DeleteDataSourceByName) + datasourceRoute.Get("/:id", wrap(GetDataSourceById)) + datasourceRoute.Get("/name/:name", wrap(GetDataSourceByName)) }, reqOrgAdmin) - r.Get("/datasources/id/:name", wrap(GetDataSourceIdByName), reqSignedIn) + apiRoute.Get("/datasources/id/:name", wrap(GetDataSourceIdByName), reqSignedIn) - r.Get("/plugins", wrap(GetPluginList)) - r.Get("/plugins/:pluginId/settings", wrap(GetPluginSettingById)) - r.Get("/plugins/:pluginId/markdown/:name", wrap(GetPluginMarkdown)) + apiRoute.Get("/plugins", wrap(GetPluginList)) + apiRoute.Get("/plugins/:pluginId/settings", wrap(GetPluginSettingById)) + apiRoute.Get("/plugins/:pluginId/markdown/:name", wrap(GetPluginMarkdown)) - r.Group("/plugins", func() { - r.Get("/:pluginId/dashboards/", wrap(GetPluginDashboards)) - r.Post("/:pluginId/settings", bind(m.UpdatePluginSettingCmd{}), wrap(UpdatePluginSetting)) + apiRoute.Group("/plugins", func(pluginRoute RouteRegister) { + pluginRoute.Get("/:pluginId/dashboards/", wrap(GetPluginDashboards)) + pluginRoute.Post("/:pluginId/settings", bind(m.UpdatePluginSettingCmd{}), wrap(UpdatePluginSetting)) }, reqOrgAdmin) - r.Get("/frontend/settings/", GetFrontendSettings) - r.Any("/datasources/proxy/:id/*", reqSignedIn, hs.ProxyDataSourceRequest) - r.Any("/datasources/proxy/:id", reqSignedIn, hs.ProxyDataSourceRequest) + apiRoute.Get("/frontend/settings/", GetFrontendSettings) + apiRoute.Any("/datasources/proxy/:id/*", reqSignedIn, hs.ProxyDataSourceRequest) + apiRoute.Any("/datasources/proxy/:id", reqSignedIn, hs.ProxyDataSourceRequest) // Dashboard - r.Group("/dashboards", func() { - r.Get("/db/:slug", GetDashboard) - r.Delete("/db/:slug", reqEditorRole, DeleteDashboard) + apiRoute.Group("/dashboards", func(dashboardRoute RouteRegister) { + dashboardRoute.Get("/db/:slug", GetDashboard) + dashboardRoute.Delete("/db/:slug", reqEditorRole, DeleteDashboard) - r.Get("/id/:dashboardId/versions", wrap(GetDashboardVersions)) - r.Get("/id/:dashboardId/versions/:id", wrap(GetDashboardVersion)) - r.Post("/id/:dashboardId/restore", reqEditorRole, bind(dtos.RestoreDashboardVersionCommand{}), wrap(RestoreDashboardVersion)) + dashboardRoute.Get("/id/:dashboardId/versions", wrap(GetDashboardVersions)) + dashboardRoute.Get("/id/:dashboardId/versions/:id", wrap(GetDashboardVersion)) + dashboardRoute.Post("/id/:dashboardId/restore", reqEditorRole, bind(dtos.RestoreDashboardVersionCommand{}), wrap(RestoreDashboardVersion)) - r.Post("/calculate-diff", bind(dtos.CalculateDiffOptions{}), wrap(CalculateDashboardDiff)) + dashboardRoute.Post("/calculate-diff", bind(dtos.CalculateDiffOptions{}), wrap(CalculateDashboardDiff)) - r.Post("/db", reqEditorRole, bind(m.SaveDashboardCommand{}), wrap(PostDashboard)) - r.Get("/file/:file", GetDashboardFromJsonFile) - r.Get("/home", wrap(GetHomeDashboard)) - r.Get("/tags", GetDashboardTags) - r.Post("/import", bind(dtos.ImportDashboardCommand{}), wrap(ImportDashboard)) + dashboardRoute.Post("/db", reqEditorRole, bind(m.SaveDashboardCommand{}), wrap(PostDashboard)) + dashboardRoute.Get("/file/:file", GetDashboardFromJsonFile) + dashboardRoute.Get("/home", wrap(GetHomeDashboard)) + dashboardRoute.Get("/tags", GetDashboardTags) + dashboardRoute.Post("/import", bind(dtos.ImportDashboardCommand{}), wrap(ImportDashboard)) }) // Dashboard snapshots - r.Group("/dashboard/snapshots", func() { - r.Get("/", wrap(SearchDashboardSnapshots)) + apiRoute.Group("/dashboard/snapshots", func(dashboardRoute RouteRegister) { + dashboardRoute.Get("/", wrap(SearchDashboardSnapshots)) }) // Playlist - r.Group("/playlists", func() { - r.Get("/", wrap(SearchPlaylists)) - r.Get("/:id", ValidateOrgPlaylist, wrap(GetPlaylist)) - r.Get("/:id/items", ValidateOrgPlaylist, wrap(GetPlaylistItems)) - r.Get("/:id/dashboards", ValidateOrgPlaylist, wrap(GetPlaylistDashboards)) - r.Delete("/:id", reqEditorRole, ValidateOrgPlaylist, wrap(DeletePlaylist)) - r.Put("/:id", reqEditorRole, bind(m.UpdatePlaylistCommand{}), ValidateOrgPlaylist, wrap(UpdatePlaylist)) - r.Post("/", reqEditorRole, bind(m.CreatePlaylistCommand{}), wrap(CreatePlaylist)) + apiRoute.Group("/playlists", func(playlistRoute RouteRegister) { + playlistRoute.Get("/", wrap(SearchPlaylists)) + playlistRoute.Get("/:id", ValidateOrgPlaylist, wrap(GetPlaylist)) + playlistRoute.Get("/:id/items", ValidateOrgPlaylist, wrap(GetPlaylistItems)) + playlistRoute.Get("/:id/dashboards", ValidateOrgPlaylist, wrap(GetPlaylistDashboards)) + playlistRoute.Delete("/:id", reqEditorRole, ValidateOrgPlaylist, wrap(DeletePlaylist)) + playlistRoute.Put("/:id", reqEditorRole, bind(m.UpdatePlaylistCommand{}), ValidateOrgPlaylist, wrap(UpdatePlaylist)) + playlistRoute.Post("/", reqEditorRole, bind(m.CreatePlaylistCommand{}), wrap(CreatePlaylist)) }) // Search - r.Get("/search/", Search) + apiRoute.Get("/search/", Search) // metrics - r.Post("/tsdb/query", bind(dtos.MetricRequest{}), wrap(QueryMetrics)) - r.Get("/tsdb/testdata/scenarios", wrap(GetTestDataScenarios)) - r.Get("/tsdb/testdata/gensql", reqGrafanaAdmin, wrap(GenerateSqlTestData)) - r.Get("/tsdb/testdata/random-walk", wrap(GetTestDataRandomWalk)) + apiRoute.Post("/tsdb/query", bind(dtos.MetricRequest{}), wrap(QueryMetrics)) + apiRoute.Get("/tsdb/testdata/scenarios", wrap(GetTestDataScenarios)) + apiRoute.Get("/tsdb/testdata/gensql", reqGrafanaAdmin, wrap(GenerateSqlTestData)) + apiRoute.Get("/tsdb/testdata/random-walk", wrap(GetTestDataRandomWalk)) - // metrics - r.Get("/metrics", wrap(GetInternalMetrics)) - - r.Group("/alerts", func() { - r.Post("/test", bind(dtos.AlertTestCommand{}), wrap(AlertTest)) - r.Post("/:alertId/pause", bind(dtos.PauseAlertCommand{}), wrap(PauseAlert), reqEditorRole) - r.Get("/:alertId", ValidateOrgAlert, wrap(GetAlert)) - r.Get("/", wrap(GetAlerts)) - r.Get("/states-for-dashboard", wrap(GetAlertStatesForDashboard)) + apiRoute.Group("/alerts", func(alertsRoute RouteRegister) { + alertsRoute.Post("/test", bind(dtos.AlertTestCommand{}), wrap(AlertTest)) + alertsRoute.Post("/:alertId/pause", bind(dtos.PauseAlertCommand{}), wrap(PauseAlert), reqEditorRole) + alertsRoute.Get("/:alertId", ValidateOrgAlert, wrap(GetAlert)) + alertsRoute.Get("/", wrap(GetAlerts)) + alertsRoute.Get("/states-for-dashboard", wrap(GetAlertStatesForDashboard)) }) - r.Get("/alert-notifications", wrap(GetAlertNotifications)) - r.Get("/alert-notifiers", wrap(GetAlertNotifiers)) + apiRoute.Get("/alert-notifications", wrap(GetAlertNotifications)) + apiRoute.Get("/alert-notifiers", wrap(GetAlertNotifiers)) - r.Group("/alert-notifications", func() { - r.Post("/test", bind(dtos.NotificationTestCommand{}), wrap(NotificationTest)) - r.Post("/", bind(m.CreateAlertNotificationCommand{}), wrap(CreateAlertNotification)) - r.Put("/:notificationId", bind(m.UpdateAlertNotificationCommand{}), wrap(UpdateAlertNotification)) - r.Get("/:notificationId", wrap(GetAlertNotificationById)) - r.Delete("/:notificationId", wrap(DeleteAlertNotification)) + apiRoute.Group("/alert-notifications", func(alertNotifications RouteRegister) { + alertNotifications.Post("/test", bind(dtos.NotificationTestCommand{}), wrap(NotificationTest)) + alertNotifications.Post("/", bind(m.CreateAlertNotificationCommand{}), wrap(CreateAlertNotification)) + alertNotifications.Put("/:notificationId", bind(m.UpdateAlertNotificationCommand{}), wrap(UpdateAlertNotification)) + alertNotifications.Get("/:notificationId", wrap(GetAlertNotificationById)) + alertNotifications.Delete("/:notificationId", wrap(DeleteAlertNotification)) }, reqEditorRole) - r.Get("/annotations", wrap(GetAnnotations)) - r.Post("/annotations/mass-delete", reqOrgAdmin, bind(dtos.DeleteAnnotationsCmd{}), wrap(DeleteAnnotations)) + apiRoute.Get("/annotations", wrap(GetAnnotations)) + apiRoute.Post("/annotations/mass-delete", reqOrgAdmin, bind(dtos.DeleteAnnotationsCmd{}), wrap(DeleteAnnotations)) - r.Group("/annotations", func() { - r.Post("/", bind(dtos.PostAnnotationsCmd{}), wrap(PostAnnotation)) + apiRoute.Group("/annotations", func(annotationsRoute RouteRegister) { + annotationsRoute.Post("/", bind(dtos.PostAnnotationsCmd{}), wrap(PostAnnotation)) }, reqEditorRole) // error test @@ -298,16 +297,16 @@ func (hs *HttpServer) registerRoutes() { }, reqSignedIn) // admin api - r.Group("/api/admin", func() { - r.Get("/settings", AdminGetSettings) - r.Post("/users", bind(dtos.AdminCreateUserForm{}), AdminCreateUser) - r.Put("/users/:id/password", bind(dtos.AdminUpdateUserPasswordForm{}), AdminUpdateUserPassword) - r.Put("/users/:id/permissions", bind(dtos.AdminUpdateUserPermissionsForm{}), AdminUpdateUserPermissions) - r.Delete("/users/:id", AdminDeleteUser) - r.Get("/users/:id/quotas", wrap(GetUserQuotas)) - r.Put("/users/:id/quotas/:target", bind(m.UpdateUserQuotaCmd{}), wrap(UpdateUserQuota)) - r.Get("/stats", AdminGetStats) - r.Post("/pause-all-alerts", bind(dtos.PauseAllAlertsCommand{}), wrap(PauseAllAlerts)) + r.Group("/api/admin", func(adminRoute RouteRegister) { + adminRoute.Get("/settings", AdminGetSettings) + adminRoute.Post("/users", bind(dtos.AdminCreateUserForm{}), AdminCreateUser) + adminRoute.Put("/users/:id/password", bind(dtos.AdminUpdateUserPasswordForm{}), AdminUpdateUserPassword) + adminRoute.Put("/users/:id/permissions", bind(dtos.AdminUpdateUserPermissionsForm{}), AdminUpdateUserPermissions) + adminRoute.Delete("/users/:id", AdminDeleteUser) + adminRoute.Get("/users/:id/quotas", wrap(GetUserQuotas)) + adminRoute.Put("/users/:id/quotas/:target", bind(m.UpdateUserQuotaCmd{}), wrap(UpdateUserQuota)) + adminRoute.Get("/stats", AdminGetStats) + adminRoute.Post("/pause-all-alerts", bind(dtos.PauseAllAlertsCommand{}), wrap(PauseAllAlerts)) }, reqGrafanaAdmin) // rendering @@ -326,7 +325,9 @@ func (hs *HttpServer) registerRoutes() { // streams //r.Post("/api/streams/push", reqSignedIn, bind(dtos.StreamMessage{}), liveConn.PushToStream) - InitAppPluginRoutes(r) + r.Register(macaronR) - r.NotFound(NotFoundHandler) + InitAppPluginRoutes(macaronR) + + macaronR.NotFound(NotFoundHandler) } diff --git a/pkg/api/app_routes.go b/pkg/api/app_routes.go index 812ea667eee..a7638879a93 100644 --- a/pkg/api/app_routes.go +++ b/pkg/api/app_routes.go @@ -17,8 +17,11 @@ import ( ) var pluginProxyTransport = &http.Transport{ - TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, - Proxy: http.ProxyFromEnvironment, + TLSClientConfig: &tls.Config{ + InsecureSkipVerify: true, + Renegotiation: tls.RenegotiateFreelyAsClient, + }, + Proxy: http.ProxyFromEnvironment, Dial: (&net.Dialer{ Timeout: 30 * time.Second, KeepAlive: 30 * time.Second, diff --git a/pkg/api/cloudwatch/cloudwatch.go b/pkg/api/cloudwatch/cloudwatch.go index bfafc8b912b..e0076db40c5 100644 --- a/pkg/api/cloudwatch/cloudwatch.go +++ b/pkg/api/cloudwatch/cloudwatch.go @@ -266,7 +266,7 @@ func handleGetMetricStatistics(req *cwRequest, c *middleware.Context) { c.JsonApiErr(500, "Unable to call AWS API", err) return } - metrics.M_Aws_CloudWatch_GetMetricStatistics.Inc(1) + metrics.M_Aws_CloudWatch_GetMetricStatistics.Inc() c.JSON(200, resp) } @@ -302,7 +302,7 @@ func handleListMetrics(req *cwRequest, c *middleware.Context) { var resp cloudwatch.ListMetricsOutput err = svc.ListMetricsPages(params, func(page *cloudwatch.ListMetricsOutput, lastPage bool) bool { - metrics.M_Aws_CloudWatch_ListMetrics.Inc(1) + metrics.M_Aws_CloudWatch_ListMetrics.Inc() metrics, _ := awsutil.ValuesAtPath(page, "Metrics") for _, metric := range metrics { resp.Metrics = append(resp.Metrics, metric.(*cloudwatch.Metric)) diff --git a/pkg/api/cloudwatch/metrics.go b/pkg/api/cloudwatch/metrics.go index 4f3841d4ee4..16b496d6be6 100644 --- a/pkg/api/cloudwatch/metrics.go +++ b/pkg/api/cloudwatch/metrics.go @@ -126,7 +126,7 @@ func init() { "AWS/NATGateway": {"NatGatewayId"}, "AWS/OpsWorks": {"StackId", "LayerId", "InstanceId"}, "AWS/Redshift": {"NodeID", "ClusterIdentifier"}, - "AWS/RDS": {"DBInstanceIdentifier", "DBClusterIdentifier", "DatabaseClass", "EngineName", "Role"}, + "AWS/RDS": {"DBInstanceIdentifier", "DBClusterIdentifier", "DbClusterIdentifier", "DatabaseClass", "EngineName", "Role"}, "AWS/Route53": {"HealthCheckId", "Region"}, "AWS/S3": {"BucketName", "StorageType", "FilterId"}, "AWS/SES": {}, @@ -275,7 +275,7 @@ func getAllMetrics(cwData *datasourceInfo) (cloudwatch.ListMetricsOutput, error) var resp cloudwatch.ListMetricsOutput err = svc.ListMetricsPages(params, func(page *cloudwatch.ListMetricsOutput, lastPage bool) bool { - metrics.M_Aws_CloudWatch_ListMetrics.Inc(1) + metrics.M_Aws_CloudWatch_ListMetrics.Inc() metrics, _ := awsutil.ValuesAtPath(page, "Metrics") for _, metric := range metrics { resp.Metrics = append(resp.Metrics, metric.(*cloudwatch.Metric)) diff --git a/pkg/api/dashboard_snapshot.go b/pkg/api/dashboard_snapshot.go index cdcb871314d..a834bd4717d 100644 --- a/pkg/api/dashboard_snapshot.go +++ b/pkg/api/dashboard_snapshot.go @@ -34,13 +34,13 @@ func CreateDashboardSnapshot(c *middleware.Context, cmd m.CreateDashboardSnapsho cmd.OrgId = -1 cmd.UserId = -1 - metrics.M_Api_Dashboard_Snapshot_External.Inc(1) + metrics.M_Api_Dashboard_Snapshot_External.Inc() } else { cmd.Key = util.GetRandomString(32) cmd.DeleteKey = util.GetRandomString(32) cmd.OrgId = c.OrgId cmd.UserId = c.UserId - metrics.M_Api_Dashboard_Snapshot_Create.Inc(1) + metrics.M_Api_Dashboard_Snapshot_Create.Inc() } if err := bus.Dispatch(&cmd); err != nil { @@ -84,7 +84,7 @@ func GetDashboardSnapshot(c *middleware.Context) { }, } - metrics.M_Api_Dashboard_Snapshot_Get.Inc(1) + metrics.M_Api_Dashboard_Snapshot_Get.Inc() c.Resp.Header().Set("Cache-Control", "public, max-age=3600") c.JSON(200, dto) diff --git a/pkg/api/frontendsettings.go b/pkg/api/frontendsettings.go index 2693d5f9683..563f940904e 100644 --- a/pkg/api/frontendsettings.go +++ b/pkg/api/frontendsettings.go @@ -62,6 +62,8 @@ func getFrontendSettingsMap(c *middleware.Context) (map[string]interface{}, erro if ds.JsonData != nil { dsMap["jsonData"] = ds.JsonData + } else { + dsMap["jsonData"] = make(map[string]string) } if ds.Access == m.DS_ACCESS_DIRECT { diff --git a/pkg/api/http_server.go b/pkg/api/http_server.go index 54f08197bae..b43d55b2a8f 100644 --- a/pkg/api/http_server.go +++ b/pkg/api/http_server.go @@ -11,6 +11,8 @@ import ( "path" "time" + "github.com/prometheus/client_golang/prometheus/promhttp" + gocache "github.com/patrickmn/go-cache" macaron "gopkg.in/macaron.v1" @@ -165,9 +167,9 @@ func (hs *HttpServer) newMacaron() *macaron.Macaron { })) m.Use(hs.healthHandler) + m.Use(hs.metricsEndpoint) m.Use(middleware.GetContextHandler()) m.Use(middleware.Sessioner(&setting.SessionOptions)) - m.Use(middleware.RequestMetrics()) m.Use(middleware.OrgRedirect()) // needs to be after context handler @@ -180,6 +182,14 @@ func (hs *HttpServer) newMacaron() *macaron.Macaron { return m } +func (hs *HttpServer) metricsEndpoint(ctx *macaron.Context) { + if ctx.Req.Method != "GET" || ctx.Req.URL.Path != "/metrics" { + return + } + + promhttp.Handler().ServeHTTP(ctx.Resp, ctx.Req.Request) +} + func (hs *HttpServer) healthHandler(ctx *macaron.Context) { if ctx.Req.Method != "GET" || ctx.Req.URL.Path != "/api/health" { return diff --git a/pkg/api/login.go b/pkg/api/login.go index f31ea01b108..ebfe672f825 100644 --- a/pkg/api/login.go +++ b/pkg/api/login.go @@ -127,7 +127,7 @@ func LoginPost(c *middleware.Context, cmd dtos.LoginCommand) Response { c.SetCookie("redirect_to", "", -1, setting.AppSubUrl+"/") } - metrics.M_Api_Login_Post.Inc(1) + metrics.M_Api_Login_Post.Inc() return Json(200, result) } diff --git a/pkg/api/login_oauth.go b/pkg/api/login_oauth.go index 0d9ab83282c..4be49915fd9 100644 --- a/pkg/api/login_oauth.go +++ b/pkg/api/login_oauth.go @@ -186,7 +186,7 @@ func OAuthLogin(ctx *middleware.Context) { // login loginUserWithUser(userQuery.Result, ctx) - metrics.M_Api_Login_OAuth.Inc(1) + metrics.M_Api_Login_OAuth.Inc() if redirectTo, _ := url.QueryUnescape(ctx.GetCookie("redirect_to")); len(redirectTo) > 0 { ctx.SetCookie("redirect_to", "", -1, setting.AppSubUrl+"/") diff --git a/pkg/api/metrics.go b/pkg/api/metrics.go index 48c8d884329..717565cd4a1 100644 --- a/pkg/api/metrics.go +++ b/pkg/api/metrics.go @@ -2,13 +2,10 @@ package api import ( "context" - "encoding/json" - "net/http" "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/metrics" "github.com/grafana/grafana/pkg/middleware" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/tsdb" @@ -34,7 +31,7 @@ func QueryMetrics(c *middleware.Context, reqDto dtos.MetricRequest) Response { return ApiError(500, "failed to fetch data source", err) } - request := &tsdb.Request{TimeRange: timeRange} + request := &tsdb.TsdbQuery{TimeRange: timeRange} for _, query := range reqDto.Queries { request.Queries = append(request.Queries, &tsdb.Query{ @@ -46,7 +43,7 @@ func QueryMetrics(c *middleware.Context, reqDto dtos.MetricRequest) Response { }) } - resp, err := tsdb.HandleRequest(context.Background(), request) + resp, err := tsdb.HandleRequest(context.Background(), dsQuery.Result, request) if err != nil { return ApiError(500, "Metric request error", err) } @@ -79,58 +76,6 @@ func GetTestDataScenarios(c *middleware.Context) Response { return Json(200, &result) } -func GetInternalMetrics(c *middleware.Context) Response { - if metrics.UseNilMetrics { - return Json(200, util.DynMap{"message": "Metrics disabled"}) - } - - snapshots := metrics.MetricStats.GetSnapshots() - - resp := make(map[string]interface{}) - - for _, m := range snapshots { - metricName := m.Name() + m.StringifyTags() - - switch metric := m.(type) { - case metrics.Gauge: - resp[metricName] = map[string]interface{}{ - "value": metric.Value(), - } - case metrics.Counter: - resp[metricName] = map[string]interface{}{ - "count": metric.Count(), - } - case metrics.Timer: - percentiles := metric.Percentiles([]float64{0.25, 0.75, 0.90, 0.99}) - resp[metricName] = map[string]interface{}{ - "count": metric.Count(), - "min": metric.Min(), - "max": metric.Max(), - "mean": metric.Mean(), - "std": metric.StdDev(), - "p25": percentiles[0], - "p75": percentiles[1], - "p90": percentiles[2], - "p99": percentiles[3], - } - } - } - - var b []byte - var err error - if b, err = json.MarshalIndent(resp, "", " "); err != nil { - return ApiError(500, "body json marshal", err) - } - - return &NormalResponse{ - body: b, - status: 200, - header: http.Header{ - "Content-Type": []string{"application/json"}, - }, - } -} - // Genereates a index out of range error func GenerateError(c *middleware.Context) Response { var array []string @@ -153,18 +98,19 @@ func GetTestDataRandomWalk(c *middleware.Context) Response { intervalMs := c.QueryInt64("intervalMs") timeRange := tsdb.NewTimeRange(from, to) - request := &tsdb.Request{TimeRange: timeRange} + request := &tsdb.TsdbQuery{TimeRange: timeRange} + dsInfo := &models.DataSource{Type: "grafana-testdata-datasource"} request.Queries = append(request.Queries, &tsdb.Query{ RefId: "A", IntervalMs: intervalMs, Model: simplejson.NewFromAny(&util.DynMap{ "scenario": "random_walk", }), - DataSource: &models.DataSource{Type: "grafana-testdata-datasource"}, + DataSource: dsInfo, }) - resp, err := tsdb.HandleRequest(context.Background(), request) + resp, err := tsdb.HandleRequest(context.Background(), dsInfo, request) if err != nil { return ApiError(500, "Metric request error", err) } diff --git a/pkg/api/org.go b/pkg/api/org.go index 26b2cf92fd8..bddfebf80ce 100644 --- a/pkg/api/org.go +++ b/pkg/api/org.go @@ -89,7 +89,7 @@ func CreateOrg(c *middleware.Context, cmd m.CreateOrgCommand) Response { return ApiError(500, "Failed to create organization", err) } - metrics.M_Api_Org_Create.Inc(1) + metrics.M_Api_Org_Create.Inc() return Json(200, &util.DynMap{ "orgId": cmd.Result.Id, diff --git a/pkg/api/org_invite.go b/pkg/api/org_invite.go index b776186aec7..864e464133d 100644 --- a/pkg/api/org_invite.go +++ b/pkg/api/org_invite.go @@ -187,8 +187,8 @@ func CompleteInvite(c *middleware.Context, completeInvite dtos.CompleteInviteFor loginUserWithUser(user, c) - metrics.M_Api_User_SignUpCompleted.Inc(1) - metrics.M_Api_User_SignUpInvite.Inc(1) + metrics.M_Api_User_SignUpCompleted.Inc() + metrics.M_Api_User_SignUpInvite.Inc() return ApiSuccess("User created and logged in") } diff --git a/pkg/api/pluginproxy/ds_proxy.go b/pkg/api/pluginproxy/ds_proxy.go index fff8d099d4b..10fafe21fef 100644 --- a/pkg/api/pluginproxy/ds_proxy.go +++ b/pkg/api/pluginproxy/ds_proxy.go @@ -15,6 +15,8 @@ import ( "text/template" "time" + "github.com/opentracing/opentracing-go" + "github.com/grafana/grafana/pkg/api/cloudwatch" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/middleware" @@ -85,6 +87,20 @@ func (proxy *DataSourceProxy) HandleRequest() { proxy.logRequest() + span, ctx := opentracing.StartSpanFromContext(proxy.ctx.Req.Context(), "datasource reverse proxy") + proxy.ctx.Req.Request = proxy.ctx.Req.WithContext(ctx) + + defer span.Finish() + span.SetTag("datasource_id", proxy.ds.Id) + span.SetTag("datasource_type", proxy.ds.Type) + span.SetTag("user_id", proxy.ctx.SignedInUser.UserId) + span.SetTag("org_id", proxy.ctx.SignedInUser.OrgId) + + opentracing.GlobalTracer().Inject( + span.Context(), + opentracing.HTTPHeaders, + opentracing.HTTPHeadersCarrier(proxy.ctx.Req.Request.Header)) + reverseProxy.ServeHTTP(proxy.ctx.Resp, proxy.ctx.Req.Request) proxy.ctx.Resp.Header().Del("Set-Cookie") } diff --git a/pkg/api/route_register.go b/pkg/api/route_register.go new file mode 100644 index 00000000000..daa6f35e52c --- /dev/null +++ b/pkg/api/route_register.go @@ -0,0 +1,123 @@ +package api + +import ( + "net/http" + + macaron "gopkg.in/macaron.v1" +) + +type Router interface { + Handle(method, pattern string, handlers []macaron.Handler) *macaron.Route + Get(pattern string, handlers ...macaron.Handler) *macaron.Route +} + +type RouteRegister interface { + Get(string, ...macaron.Handler) + Post(string, ...macaron.Handler) + Delete(string, ...macaron.Handler) + Put(string, ...macaron.Handler) + Patch(string, ...macaron.Handler) + Any(string, ...macaron.Handler) + + Group(string, func(RouteRegister), ...macaron.Handler) + + Register(Router) *macaron.Router +} + +type RegisterNamedMiddleware func(name string) macaron.Handler + +func newRouteRegister(namedMiddleware ...RegisterNamedMiddleware) RouteRegister { + return &routeRegister{ + prefix: "", + routes: []route{}, + subfixHandlers: []macaron.Handler{}, + namedMiddleware: namedMiddleware, + } +} + +type route struct { + method string + pattern string + handlers []macaron.Handler +} + +type routeRegister struct { + prefix string + subfixHandlers []macaron.Handler + namedMiddleware []RegisterNamedMiddleware + routes []route + groups []*routeRegister +} + +func (rr *routeRegister) Group(pattern string, fn func(rr RouteRegister), handlers ...macaron.Handler) { + group := &routeRegister{ + prefix: rr.prefix + pattern, + subfixHandlers: append(rr.subfixHandlers, handlers...), + routes: []route{}, + namedMiddleware: rr.namedMiddleware, + } + + fn(group) + rr.groups = append(rr.groups, group) +} + +func (rr *routeRegister) Register(router Router) *macaron.Router { + for _, r := range rr.routes { + // GET requests have to be added to macaron routing using Get() + // Otherwise HEAD requests will not be allowed. + // https://github.com/go-macaron/macaron/blob/a325110f8b392bce3e5cdeb8c44bf98078ada3be/router.go#L198 + if r.method == http.MethodGet { + router.Get(r.pattern, r.handlers...) + } else { + router.Handle(r.method, r.pattern, r.handlers) + } + } + + for _, g := range rr.groups { + g.Register(router) + } + + return &macaron.Router{} +} + +func (rr *routeRegister) route(pattern, method string, handlers ...macaron.Handler) { + //inject tracing + + h := make([]macaron.Handler, 0) + for _, fn := range rr.namedMiddleware { + h = append(h, fn(pattern)) + } + + h = append(h, rr.subfixHandlers...) + h = append(h, handlers...) + + rr.routes = append(rr.routes, route{ + method: method, + pattern: rr.prefix + pattern, + handlers: h, + }) +} + +func (rr *routeRegister) Get(pattern string, handlers ...macaron.Handler) { + rr.route(pattern, http.MethodGet, handlers...) +} + +func (rr *routeRegister) Post(pattern string, handlers ...macaron.Handler) { + rr.route(pattern, http.MethodPost, handlers...) +} + +func (rr *routeRegister) Delete(pattern string, handlers ...macaron.Handler) { + rr.route(pattern, http.MethodDelete, handlers...) +} + +func (rr *routeRegister) Put(pattern string, handlers ...macaron.Handler) { + rr.route(pattern, http.MethodPut, handlers...) +} + +func (rr *routeRegister) Patch(pattern string, handlers ...macaron.Handler) { + rr.route(pattern, http.MethodPatch, handlers...) +} + +func (rr *routeRegister) Any(pattern string, handlers ...macaron.Handler) { + rr.route(pattern, "*", handlers...) +} diff --git a/pkg/api/route_register_test.go b/pkg/api/route_register_test.go new file mode 100644 index 00000000000..f8a043c48df --- /dev/null +++ b/pkg/api/route_register_test.go @@ -0,0 +1,196 @@ +package api + +import ( + "net/http" + "strconv" + "testing" + + macaron "gopkg.in/macaron.v1" +) + +type fakeRouter struct { + route []route +} + +func (fr *fakeRouter) Handle(method, pattern string, handlers []macaron.Handler) *macaron.Route { + fr.route = append(fr.route, route{ + pattern: pattern, + method: method, + handlers: handlers, + }) + + return &macaron.Route{} +} + +func (fr *fakeRouter) Get(pattern string, handlers ...macaron.Handler) *macaron.Route { + fr.route = append(fr.route, route{ + pattern: pattern, + method: http.MethodGet, + handlers: handlers, + }) + + return &macaron.Route{} +} + +func emptyHandlers(n int) []macaron.Handler { + res := []macaron.Handler{} + for i := 1; n >= i; i++ { + res = append(res, emptyHandler(strconv.Itoa(i))) + } + return res +} + +func emptyHandler(name string) macaron.Handler { + return struct{ name string }{name: name} +} + +func TestRouteSimpleRegister(t *testing.T) { + testTable := []route{ + {method: "DELETE", pattern: "/admin", handlers: emptyHandlers(2)}, + {method: "GET", pattern: "/down", handlers: emptyHandlers(3)}, + } + + // Setup + rr := newRouteRegister(func(name string) macaron.Handler { + return emptyHandler(name) + }) + + rr.Delete("/admin", emptyHandler("1")) + rr.Get("/down", emptyHandler("1"), emptyHandler("2")) + + fr := &fakeRouter{} + rr.Register(fr) + + // Validation + if len(fr.route) != len(testTable) { + t.Errorf("want %v routes, got %v", len(testTable), len(fr.route)) + } + + for i := range testTable { + if testTable[i].method != fr.route[i].method { + t.Errorf("want %s got %v", testTable[i].method, fr.route[i].method) + } + + if testTable[i].pattern != fr.route[i].pattern { + t.Errorf("want %s got %v", testTable[i].pattern, fr.route[i].pattern) + } + + if len(testTable[i].handlers) != len(fr.route[i].handlers) { + t.Errorf("want %d handlers got %d handlers \ntestcase: %v\nroute: %v\n", + len(testTable[i].handlers), + len(fr.route[i].handlers), + testTable[i], + fr.route[i]) + } + } +} + +func TestRouteGroupedRegister(t *testing.T) { + testTable := []route{ + {method: "DELETE", pattern: "/admin", handlers: emptyHandlers(1)}, + {method: "GET", pattern: "/down", handlers: emptyHandlers(2)}, + {method: "POST", pattern: "/user", handlers: emptyHandlers(1)}, + {method: "PUT", pattern: "/user/friends", handlers: emptyHandlers(1)}, + {method: "DELETE", pattern: "/user/admin", handlers: emptyHandlers(2)}, + {method: "GET", pattern: "/user/admin/all", handlers: emptyHandlers(4)}, + } + + // Setup + rr := newRouteRegister() + + rr.Delete("/admin", emptyHandler("1")) + rr.Get("/down", emptyHandler("1"), emptyHandler("2")) + + rr.Group("/user", func(user RouteRegister) { + user.Post("", emptyHandler("1")) + user.Put("/friends", emptyHandler("2")) + + user.Group("/admin", func(admin RouteRegister) { + admin.Delete("", emptyHandler("3")) + admin.Get("/all", emptyHandler("3"), emptyHandler("4"), emptyHandler("5")) + + }, emptyHandler("3")) + }) + + fr := &fakeRouter{} + rr.Register(fr) + + // Validation + if len(fr.route) != len(testTable) { + t.Errorf("want %v routes, got %v", len(testTable), len(fr.route)) + } + + for i := range testTable { + if testTable[i].method != fr.route[i].method { + t.Errorf("want %s got %v", testTable[i].method, fr.route[i].method) + } + + if testTable[i].pattern != fr.route[i].pattern { + t.Errorf("want %s got %v", testTable[i].pattern, fr.route[i].pattern) + } + + if len(testTable[i].handlers) != len(fr.route[i].handlers) { + t.Errorf("want %d handlers got %d handlers \ntestcase: %v\nroute: %v\n", + len(testTable[i].handlers), + len(fr.route[i].handlers), + testTable[i], + fr.route[i]) + } + } +} + +func TestNamedMiddlewareRouteRegister(t *testing.T) { + testTable := []route{ + {method: "DELETE", pattern: "/admin", handlers: emptyHandlers(2)}, + {method: "GET", pattern: "/down", handlers: emptyHandlers(3)}, + {method: "POST", pattern: "/user", handlers: emptyHandlers(2)}, + {method: "PUT", pattern: "/user/friends", handlers: emptyHandlers(2)}, + {method: "DELETE", pattern: "/user/admin", handlers: emptyHandlers(3)}, + {method: "GET", pattern: "/user/admin/all", handlers: emptyHandlers(5)}, + } + + // Setup + rr := newRouteRegister(func(name string) macaron.Handler { + return emptyHandler(name) + }) + + rr.Delete("/admin", emptyHandler("1")) + rr.Get("/down", emptyHandler("1"), emptyHandler("2")) + + rr.Group("/user", func(user RouteRegister) { + user.Post("", emptyHandler("1")) + user.Put("/friends", emptyHandler("2")) + + user.Group("/admin", func(admin RouteRegister) { + admin.Delete("", emptyHandler("3")) + admin.Get("/all", emptyHandler("3"), emptyHandler("4"), emptyHandler("5")) + + }, emptyHandler("3")) + }) + + fr := &fakeRouter{} + rr.Register(fr) + + // Validation + if len(fr.route) != len(testTable) { + t.Errorf("want %v routes, got %v", len(testTable), len(fr.route)) + } + + for i := range testTable { + if testTable[i].method != fr.route[i].method { + t.Errorf("want %s got %v", testTable[i].method, fr.route[i].method) + } + + if testTable[i].pattern != fr.route[i].pattern { + t.Errorf("want %s got %v", testTable[i].pattern, fr.route[i].pattern) + } + + if len(testTable[i].handlers) != len(fr.route[i].handlers) { + t.Errorf("want %d handlers got %d handlers \ntestcase: %v\nroute: %v\n", + len(testTable[i].handlers), + len(fr.route[i].handlers), + testTable[i], + fr.route[i]) + } + } +} diff --git a/pkg/api/signup.go b/pkg/api/signup.go index 9a6ade52f82..36ece023087 100644 --- a/pkg/api/signup.go +++ b/pkg/api/signup.go @@ -47,7 +47,7 @@ func SignUp(c *middleware.Context, form dtos.SignUpForm) Response { Code: cmd.Code, }) - metrics.M_Api_User_SignUpStarted.Inc(1) + metrics.M_Api_User_SignUpStarted.Inc() return Json(200, util.DynMap{"status": "SignUpCreated"}) } @@ -111,7 +111,7 @@ func SignUpStep2(c *middleware.Context, form dtos.SignUpStep2Form) Response { } loginUserWithUser(user, c) - metrics.M_Api_User_SignUpCompleted.Inc(1) + metrics.M_Api_User_SignUpCompleted.Inc() return Json(200, apiResponse) } diff --git a/pkg/cmd/grafana-cli/commands/command_line.go b/pkg/cmd/grafana-cli/commands/command_line.go index ce5d04c1bb5..d487aff8aaa 100644 --- a/pkg/cmd/grafana-cli/commands/command_line.go +++ b/pkg/cmd/grafana-cli/commands/command_line.go @@ -19,6 +19,7 @@ type CommandLine interface { PluginDirectory() string RepoDirectory() string + PluginURL() string } type contextCommandLine struct { @@ -44,3 +45,7 @@ func (c *contextCommandLine) PluginDirectory() string { func (c *contextCommandLine) RepoDirectory() string { return c.GlobalString("repo") } + +func (c *contextCommandLine) PluginURL() string { + return c.GlobalString("pluginUrl") +} diff --git a/pkg/cmd/grafana-cli/commands/commandstest/fake_commandLine.go b/pkg/cmd/grafana-cli/commands/commandstest/fake_commandLine.go index 8366e0feb15..f7afc57fda2 100644 --- a/pkg/cmd/grafana-cli/commands/commandstest/fake_commandLine.go +++ b/pkg/cmd/grafana-cli/commands/commandstest/fake_commandLine.go @@ -101,3 +101,7 @@ func (fcli *FakeCommandLine) RepoDirectory() string { func (fcli *FakeCommandLine) PluginDirectory() string { return fcli.GlobalString("pluginsDir") } + +func (fcli *FakeCommandLine) PluginURL() string { + return fcli.GlobalString("pluginUrl") +} diff --git a/pkg/cmd/grafana-cli/commands/install_command.go b/pkg/cmd/grafana-cli/commands/install_command.go index 1f391ea10b3..a1b249d9c81 100644 --- a/pkg/cmd/grafana-cli/commands/install_command.go +++ b/pkg/cmd/grafana-cli/commands/install_command.go @@ -58,37 +58,39 @@ func installCommand(c CommandLine) error { } func InstallPlugin(pluginName, version string, c CommandLine) error { - plugin, err := s.GetPlugin(pluginName, c.RepoDirectory()) pluginFolder := c.PluginDirectory() - if err != nil { - return err + downloadURL := c.PluginURL() + if downloadURL == "" { + plugin, err := s.GetPlugin(pluginName, c.RepoDirectory()) + if err != nil { + return err + } + + v, err := SelectVersion(plugin, version) + if err != nil { + return err + } + + if version == "" { + version = v.Version + } + downloadURL = fmt.Sprintf("%s/%s/versions/%s/download", + c.GlobalString("repo"), + pluginName, + version) } - v, err := SelectVersion(plugin, version) - if err != nil { - return err - } - - if version == "" { - version = v.Version - } - - downloadURL := fmt.Sprintf("%s/%s/versions/%s/download", - c.GlobalString("repo"), - pluginName, - version) - - logger.Infof("installing %v @ %v\n", plugin.Id, version) + logger.Infof("installing %v @ %v\n", pluginName, version) logger.Infof("from url: %v\n", downloadURL) logger.Infof("into: %v\n", pluginFolder) logger.Info("\n") - err = downloadFile(plugin.Id, pluginFolder, downloadURL) + err := downloadFile(pluginName, pluginFolder, downloadURL) if err != nil { return err } - logger.Infof("%s Installed %s successfully \n", color.GreenString("✔"), plugin.Id) + logger.Infof("%s Installed %s successfully \n", color.GreenString("✔"), pluginName) res, _ := s.ReadPlugin(pluginFolder, pluginName) for _, v := range res.Dependencies.Plugins { diff --git a/pkg/cmd/grafana-cli/main.go b/pkg/cmd/grafana-cli/main.go index e586e606fb5..73548c3b159 100644 --- a/pkg/cmd/grafana-cli/main.go +++ b/pkg/cmd/grafana-cli/main.go @@ -38,6 +38,12 @@ func main() { Value: "https://grafana.com/api/plugins", EnvVar: "GF_PLUGIN_REPO", }, + cli.StringFlag{ + Name: "pluginUrl", + Usage: "Full url to the plugin zip file instead of downloading the plugin from grafana.com/api", + Value: "", + EnvVar: "GF_PLUGIN_URL", + }, cli.BoolFlag{ Name: "debug, d", Usage: "enable debug logging", diff --git a/pkg/cmd/grafana-server/main.go b/pkg/cmd/grafana-server/main.go index 4551860a680..8f90da93177 100644 --- a/pkg/cmd/grafana-server/main.go +++ b/pkg/cmd/grafana-server/main.go @@ -14,6 +14,7 @@ import ( "net/http" _ "net/http/pprof" + "github.com/grafana/grafana/pkg/metrics" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/setting" @@ -22,14 +23,14 @@ import ( _ "github.com/grafana/grafana/pkg/services/alerting/notifiers" _ "github.com/grafana/grafana/pkg/tsdb/graphite" _ "github.com/grafana/grafana/pkg/tsdb/influxdb" - _ "github.com/grafana/grafana/pkg/tsdb/mqe" _ "github.com/grafana/grafana/pkg/tsdb/mysql" _ "github.com/grafana/grafana/pkg/tsdb/opentsdb" + _ "github.com/grafana/grafana/pkg/tsdb/prometheus" _ "github.com/grafana/grafana/pkg/tsdb/testdata" ) -var version = "4.1.0" +var version = "4.6.0" var commit = "NA" var buildstamp string var build_date string @@ -80,6 +81,8 @@ func main() { setting.BuildCommit = commit setting.BuildStamp = buildstampInt64 + metrics.M_Grafana_Version.WithLabelValues(version).Set(1) + server := NewGrafanaServer() server.Start() } diff --git a/pkg/cmd/grafana-server/server.go b/pkg/cmd/grafana-server/server.go index eab55ec3d9d..f6a94ac6ef0 100644 --- a/pkg/cmd/grafana-server/server.go +++ b/pkg/cmd/grafana-server/server.go @@ -24,6 +24,7 @@ import ( "github.com/grafana/grafana/pkg/services/search" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/social" + "github.com/grafana/grafana/pkg/tracing" ) func NewGrafanaServer() models.GrafanaServer { @@ -54,13 +55,21 @@ func (g *GrafanaServerImpl) Start() { g.writePIDFile() initSql() - metrics.Init() + metrics.Init(setting.Cfg) search.Init() login.Init() social.NewOAuthService() eventpublisher.Init() plugins.Init() + closer, err := tracing.Init(setting.Cfg) + if err != nil { + g.log.Error("Tracing settings is not valid", "error", err) + g.Shutdown(1, "Startup failed") + return + } + defer closer.Close() + // init alerting if setting.AlertingEnabled && setting.ExecuteAlerts { engine := alerting.NewEngine() @@ -71,8 +80,8 @@ func (g *GrafanaServerImpl) Start() { cleanUpService := cleanup.NewCleanUpService() g.childRoutines.Go(func() error { return cleanUpService.Run(g.context) }) - if err := notifications.Init(); err != nil { - g.log.Error("Notification service failed to initialize", "erro", err) + if err = notifications.Init(); err != nil { + g.log.Error("Notification service failed to initialize", "error", err) g.Shutdown(1, "Startup failed") return } diff --git a/pkg/components/imguploader/gcsuploader.go b/pkg/components/imguploader/gcsuploader.go new file mode 100644 index 00000000000..2271cec7db0 --- /dev/null +++ b/pkg/components/imguploader/gcsuploader.go @@ -0,0 +1,88 @@ +package imguploader + +import ( + "context" + "fmt" + "io/ioutil" + "net/http" + "os" + + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/util" + "golang.org/x/oauth2/google" +) + +const ( + tokenUrl string = "https://www.googleapis.com/auth/devstorage.read_write" + uploadUrl string = "https://www.googleapis.com/upload/storage/v1/b/%s/o?uploadType=media&name=%s&predefinedAcl=publicRead" +) + +type GCSUploader struct { + keyFile string + bucket string + log log.Logger +} + +func NewGCSUploader(keyFile, bucket string) *GCSUploader { + return &GCSUploader{ + keyFile: keyFile, + bucket: bucket, + log: log.New("gcsuploader"), + } +} + +func (u *GCSUploader) Upload(ctx context.Context, imageDiskPath string) (string, error) { + key := util.GetRandomString(20) + ".png" + + u.log.Debug("Opening key file ", u.keyFile) + data, err := ioutil.ReadFile(u.keyFile) + if err != nil { + return "", err + } + + u.log.Debug("Creating JWT conf") + conf, err := google.JWTConfigFromJSON(data, tokenUrl) + if err != nil { + return "", err + } + + u.log.Debug("Creating HTTP client") + client := conf.Client(ctx) + err = u.uploadFile(client, imageDiskPath, key) + if err != nil { + return "", err + } + + return fmt.Sprintf("https://storage.googleapis.com/%s/%s", u.bucket, key), nil +} + +func (u *GCSUploader) uploadFile(client *http.Client, imageDiskPath, key string) error { + u.log.Debug("Opening image file ", imageDiskPath) + + fileReader, err := os.Open(imageDiskPath) + if err != nil { + return err + } + + reqUrl := fmt.Sprintf(uploadUrl, u.bucket, key) + u.log.Debug("Request URL: ", reqUrl) + + req, err := http.NewRequest("POST", reqUrl, fileReader) + if err != nil { + return err + } + + req.Header.Add("Content-Type", "image/png") + u.log.Debug("Sending POST request to GCS") + + resp, err := client.Do(req) + if err != nil { + return err + } + + if resp.StatusCode != 200 { + return fmt.Errorf("GCS response status code %d", resp.StatusCode) + } + + return nil +} diff --git a/pkg/components/imguploader/gcsuploader_test.go b/pkg/components/imguploader/gcsuploader_test.go new file mode 100644 index 00000000000..bdc21084dbf --- /dev/null +++ b/pkg/components/imguploader/gcsuploader_test.go @@ -0,0 +1,24 @@ +package imguploader + +import ( + "context" + "testing" + + "github.com/grafana/grafana/pkg/setting" + . "github.com/smartystreets/goconvey/convey" +) + +func TestUploadToGCS(t *testing.T) { + SkipConvey("[Integration test] for external_image_store.gcs", t, func() { + setting.NewConfigContext(&setting.CommandLineArgs{ + HomePath: "../../../", + }) + + gcsUploader, _ := NewImageUploader() + + path, err := gcsUploader.Upload(context.Background(), "../../../public/img/logo_transparent_400x.png") + + So(err, ShouldBeNil) + So(path, ShouldNotEqual, "") + }) +} diff --git a/pkg/components/imguploader/imguploader.go b/pkg/components/imguploader/imguploader.go index 883ef8eefda..32058e6cc2d 100644 --- a/pkg/components/imguploader/imguploader.go +++ b/pkg/components/imguploader/imguploader.go @@ -1,6 +1,7 @@ package imguploader import ( + "context" "fmt" "regexp" @@ -8,13 +9,13 @@ import ( ) type ImageUploader interface { - Upload(path string) (string, error) + Upload(ctx context.Context, path string) (string, error) } type NopImageUploader struct { } -func (NopImageUploader) Upload(path string) (string, error) { +func (NopImageUploader) Upload(ctx context.Context, path string) (string, error) { return "", nil } @@ -52,6 +53,16 @@ func NewImageUploader() (ImageUploader, error) { password := webdavSec.Key("password").String() return NewWebdavImageUploader(url, username, password, public_url) + case "gcs": + gcssec, err := setting.Cfg.GetSection("external_image_storage.gcs") + if err != nil { + return nil, err + } + + keyFile := gcssec.Key("key_file").MustString("") + bucketName := gcssec.Key("bucket").MustString("") + + return NewGCSUploader(keyFile, bucketName), nil } return NopImageUploader{}, nil diff --git a/pkg/components/imguploader/imguploader_test.go b/pkg/components/imguploader/imguploader_test.go index 4faaa6415f0..44b4c090008 100644 --- a/pkg/components/imguploader/imguploader_test.go +++ b/pkg/components/imguploader/imguploader_test.go @@ -96,5 +96,28 @@ func TestImageUploaderFactory(t *testing.T) { So(original.username, ShouldEqual, "username") So(original.password, ShouldEqual, "password") }) + + Convey("GCS uploader", func() { + var err error + + setting.NewConfigContext(&setting.CommandLineArgs{ + HomePath: "../../../", + }) + + setting.ImageUploadProvider = "gcs" + + gcpSec, err := setting.Cfg.GetSection("external_image_storage.gcs") + gcpSec.NewKey("key_file", "/etc/secrets/project-79a52befa3f6.json") + gcpSec.NewKey("bucket", "project-grafana-east") + + uploader, err := NewImageUploader() + + So(err, ShouldBeNil) + original, ok := uploader.(*GCSUploader) + + So(ok, ShouldBeTrue) + So(original.keyFile, ShouldEqual, "/etc/secrets/project-79a52befa3f6.json") + So(original.bucket, ShouldEqual, "project-grafana-east") + }) }) } diff --git a/pkg/components/imguploader/s3uploader.go b/pkg/components/imguploader/s3uploader.go index 302420a27f4..a8694c0389c 100644 --- a/pkg/components/imguploader/s3uploader.go +++ b/pkg/components/imguploader/s3uploader.go @@ -1,6 +1,7 @@ package imguploader import ( + "context" "os" "time" @@ -34,7 +35,7 @@ func NewS3Uploader(region, bucket, acl, accessKey, secretKey string) *S3Uploader } } -func (u *S3Uploader) Upload(imageDiskPath string) (string, error) { +func (u *S3Uploader) Upload(ctx context.Context, imageDiskPath string) (string, error) { sess, err := session.NewSession() if err != nil { return "", err diff --git a/pkg/components/imguploader/s3uploader_test.go b/pkg/components/imguploader/s3uploader_test.go index f75ad05af64..b02d4676b5e 100644 --- a/pkg/components/imguploader/s3uploader_test.go +++ b/pkg/components/imguploader/s3uploader_test.go @@ -1,6 +1,7 @@ package imguploader import ( + "context" "testing" "github.com/grafana/grafana/pkg/setting" @@ -15,7 +16,7 @@ func TestUploadToS3(t *testing.T) { s3Uploader, _ := NewImageUploader() - path, err := s3Uploader.Upload("../../../public/img/logo_transparent_400x.png") + path, err := s3Uploader.Upload(context.Background(), "../../../public/img/logo_transparent_400x.png") So(err, ShouldBeNil) So(path, ShouldNotEqual, "") diff --git a/pkg/components/imguploader/webdavuploader.go b/pkg/components/imguploader/webdavuploader.go index 4a056e3a48a..01de7119756 100644 --- a/pkg/components/imguploader/webdavuploader.go +++ b/pkg/components/imguploader/webdavuploader.go @@ -2,6 +2,7 @@ package imguploader import ( "bytes" + "context" "fmt" "io/ioutil" "net" @@ -33,7 +34,7 @@ var netClient = &http.Client{ Transport: netTransport, } -func (u *WebdavUploader) Upload(pa string) (string, error) { +func (u *WebdavUploader) Upload(ctx context.Context, pa string) (string, error) { url, _ := url.Parse(u.url) filename := util.GetRandomString(20) + ".png" url.Path = path.Join(url.Path, filename) diff --git a/pkg/components/imguploader/webdavuploader_test.go b/pkg/components/imguploader/webdavuploader_test.go index e88e28bd712..5a8abd0542d 100644 --- a/pkg/components/imguploader/webdavuploader_test.go +++ b/pkg/components/imguploader/webdavuploader_test.go @@ -1,6 +1,7 @@ package imguploader import ( + "context" "testing" . "github.com/smartystreets/goconvey/convey" @@ -11,7 +12,7 @@ func TestUploadToWebdav(t *testing.T) { // Can be tested with this docker container: https://hub.docker.com/r/morrisjobke/webdav/ SkipConvey("[Integration test] for external_image_store.webdav", t, func() { webdavUploader, _ := NewWebdavImageUploader("http://localhost:8888/webdav/", "test", "test", "") - path, err := webdavUploader.Upload("../../../public/img/logo_transparent_400x.png") + path, err := webdavUploader.Upload(context.Background(), "../../../public/img/logo_transparent_400x.png") So(err, ShouldBeNil) So(path, ShouldStartWith, "http://localhost:8888/webdav/") @@ -19,7 +20,7 @@ func TestUploadToWebdav(t *testing.T) { SkipConvey("[Integration test] for external_image_store.webdav with public url", t, func() { webdavUploader, _ := NewWebdavImageUploader("http://localhost:8888/webdav/", "test", "test", "http://publicurl:8888/webdav") - path, err := webdavUploader.Upload("../../../public/img/logo_transparent_400x.png") + path, err := webdavUploader.Upload(context.Background(), "../../../public/img/logo_transparent_400x.png") So(err, ShouldBeNil) So(path, ShouldStartWith, "http://publicurl:8888/webdav/") diff --git a/pkg/metrics/EMWA.go b/pkg/metrics/EMWA.go deleted file mode 100644 index d99dc77b016..00000000000 --- a/pkg/metrics/EMWA.go +++ /dev/null @@ -1,122 +0,0 @@ -// includes code from -// https://raw.githubusercontent.com/rcrowley/go-metrics/master/sample.go -// Copyright 2012 Richard Crowley. All rights reserved. - -package metrics - -import ( - "math" - "sync" - "sync/atomic" -) - -// EWMAs continuously calculate an exponentially-weighted moving average -// based on an outside source of clock ticks. -type EWMA interface { - Rate() float64 - Snapshot() EWMA - Tick() - Update(int64) -} - -// NewEWMA constructs a new EWMA with the given alpha. -func NewEWMA(alpha float64) EWMA { - if UseNilMetrics { - return NilEWMA{} - } - return &StandardEWMA{alpha: alpha} -} - -// NewEWMA1 constructs a new EWMA for a one-minute moving average. -func NewEWMA1() EWMA { - return NewEWMA(1 - math.Exp(-5.0/60.0/1)) -} - -// NewEWMA5 constructs a new EWMA for a five-minute moving average. -func NewEWMA5() EWMA { - return NewEWMA(1 - math.Exp(-5.0/60.0/5)) -} - -// NewEWMA15 constructs a new EWMA for a fifteen-minute moving average. -func NewEWMA15() EWMA { - return NewEWMA(1 - math.Exp(-5.0/60.0/15)) -} - -// EWMASnapshot is a read-only copy of another EWMA. -type EWMASnapshot float64 - -// Rate returns the rate of events per second at the time the snapshot was -// taken. -func (a EWMASnapshot) Rate() float64 { return float64(a) } - -// Snapshot returns the snapshot. -func (a EWMASnapshot) Snapshot() EWMA { return a } - -// Tick panics. -func (EWMASnapshot) Tick() { - panic("Tick called on an EWMASnapshot") -} - -// Update panics. -func (EWMASnapshot) Update(int64) { - panic("Update called on an EWMASnapshot") -} - -// NilEWMA is a no-op EWMA. -type NilEWMA struct{} - -// Rate is a no-op. -func (NilEWMA) Rate() float64 { return 0.0 } - -// Snapshot is a no-op. -func (NilEWMA) Snapshot() EWMA { return NilEWMA{} } - -// Tick is a no-op. -func (NilEWMA) Tick() {} - -// Update is a no-op. -func (NilEWMA) Update(n int64) {} - -// StandardEWMA is the standard implementation of an EWMA and tracks the number -// of uncounted events and processes them on each tick. It uses the -// sync/atomic package to manage uncounted events. -type StandardEWMA struct { - uncounted int64 // /!\ this should be the first member to ensure 64-bit alignment - alpha float64 - rate float64 - init bool - mutex sync.Mutex -} - -// Rate returns the moving average rate of events per second. -func (a *StandardEWMA) Rate() float64 { - a.mutex.Lock() - defer a.mutex.Unlock() - return a.rate * float64(1e9) -} - -// Snapshot returns a read-only copy of the EWMA. -func (a *StandardEWMA) Snapshot() EWMA { - return EWMASnapshot(a.Rate()) -} - -// Tick ticks the clock to update the moving average. It assumes it is called -// every five seconds. -func (a *StandardEWMA) Tick() { - count := atomic.LoadInt64(&a.uncounted) - atomic.AddInt64(&a.uncounted, -count) - instantRate := float64(count) / float64(5e9) - a.mutex.Lock() - defer a.mutex.Unlock() - if a.init { - a.rate += a.alpha * (instantRate - a.rate) - } else { - a.init = true - a.rate = instantRate - } -} - -// Update adds n uncounted events. -func (a *StandardEWMA) Update(n int64) { - atomic.AddInt64(&a.uncounted, n) -} diff --git a/pkg/metrics/combos.go b/pkg/metrics/combos.go deleted file mode 100644 index b4da59c5b32..00000000000 --- a/pkg/metrics/combos.go +++ /dev/null @@ -1,46 +0,0 @@ -package metrics - -// type comboCounterRef struct { -// *MetricMeta -// usageCounter Counter -// metricCounter Counter -// } -// -// func RegComboCounter(name string, tagStrings ...string) Counter { -// meta := NewMetricMeta(name, tagStrings) -// cr := &comboCounterRef{ -// MetricMeta: meta, -// usageCounter: NewCounter(meta), -// metricCounter: NewCounter(meta), -// } -// -// UsageStats.Register(cr.usageCounter) -// MetricStats.Register(cr.metricCounter) -// -// return cr -// } -// -// func (c comboCounterRef) Clear() { -// c.usageCounter.Clear() -// c.metricCounter.Clear() -// } -// -// func (c comboCounterRef) Count() int64 { -// panic("Count called on a combocounter ref") -// } -// -// // Dec panics. -// func (c comboCounterRef) Dec(i int64) { -// c.usageCounter.Dec(i) -// c.metricCounter.Dec(i) -// } -// -// // Inc panics. -// func (c comboCounterRef) Inc(i int64) { -// c.usageCounter.Inc(i) -// c.metricCounter.Inc(i) -// } -// -// func (c comboCounterRef) Snapshot() Metric { -// return c.metricCounter.Snapshot() -// } diff --git a/pkg/metrics/common.go b/pkg/metrics/common.go deleted file mode 100644 index 2043d3a67cf..00000000000 --- a/pkg/metrics/common.go +++ /dev/null @@ -1,61 +0,0 @@ -package metrics - -import "github.com/grafana/grafana/pkg/log" - -type MetricMeta struct { - tags map[string]string - name string -} - -func NewMetricMeta(name string, tagStrings []string) *MetricMeta { - if len(tagStrings)%2 != 0 { - log.Fatal(3, "Metrics: tags array is missing value for key, %v", tagStrings) - } - - tags := make(map[string]string) - for i := 0; i < len(tagStrings); i += 2 { - tags[tagStrings[i]] = tagStrings[i+1] - } - - return &MetricMeta{ - tags: tags, - name: name, - } -} - -func (m *MetricMeta) Name() string { - return m.name -} - -func (m *MetricMeta) GetTagsCopy() map[string]string { - if len(m.tags) == 0 { - return make(map[string]string) - } - - copy := make(map[string]string) - for k2, v2 := range m.tags { - copy[k2] = v2 - } - - return copy -} - -func (m *MetricMeta) StringifyTags() string { - if len(m.tags) == 0 { - return "" - } - - str := "" - for key, value := range m.tags { - str += "." + key + "_" + value - } - - return str -} - -type Metric interface { - Name() string - GetTagsCopy() map[string]string - StringifyTags() string - Snapshot() Metric -} diff --git a/pkg/metrics/counter.go b/pkg/metrics/counter.go deleted file mode 100644 index 8322d370a36..00000000000 --- a/pkg/metrics/counter.go +++ /dev/null @@ -1,61 +0,0 @@ -package metrics - -import "sync/atomic" - -// Counters hold an int64 value that can be incremented and decremented. -type Counter interface { - Metric - - Clear() - Count() int64 - Dec(int64) - Inc(int64) -} - -// NewCounter constructs a new StandardCounter. -func NewCounter(meta *MetricMeta) Counter { - return &StandardCounter{ - MetricMeta: meta, - count: 0, - } -} - -func RegCounter(name string, tagStrings ...string) Counter { - cr := NewCounter(NewMetricMeta(name, tagStrings)) - MetricStats.Register(cr) - return cr -} - -// StandardCounter is the standard implementation of a Counter and uses the -// sync/atomic package to manage a single int64 value. -type StandardCounter struct { - count int64 //Due to a bug in golang the 64bit variable need to come first to be 64bit aligned. https://golang.org/pkg/sync/atomic/#pkg-note-BUG - *MetricMeta -} - -// Clear sets the counter to zero. -func (c *StandardCounter) Clear() { - atomic.StoreInt64(&c.count, 0) -} - -// Count returns the current count. -func (c *StandardCounter) Count() int64 { - return atomic.LoadInt64(&c.count) -} - -// Dec decrements the counter by the given amount. -func (c *StandardCounter) Dec(i int64) { - atomic.AddInt64(&c.count, -i) -} - -// Inc increments the counter by the given amount. -func (c *StandardCounter) Inc(i int64) { - atomic.AddInt64(&c.count, i) -} - -func (c *StandardCounter) Snapshot() Metric { - return &StandardCounter{ - MetricMeta: c.MetricMeta, - count: c.count, - } -} diff --git a/pkg/metrics/delta.go b/pkg/metrics/delta.go deleted file mode 100644 index 71354178209..00000000000 --- a/pkg/metrics/delta.go +++ /dev/null @@ -1,11 +0,0 @@ -package metrics - -import "math" - -func calculateDelta(oldValue, newValue int64) int64 { - if oldValue < newValue { - return newValue - oldValue - } else { - return (math.MaxInt64 - oldValue) + (newValue - math.MinInt64) + 1 - } -} diff --git a/pkg/metrics/gauge.go b/pkg/metrics/gauge.go deleted file mode 100644 index cca32da5622..00000000000 --- a/pkg/metrics/gauge.go +++ /dev/null @@ -1,83 +0,0 @@ -// includes code from -// https://raw.githubusercontent.com/rcrowley/go-metrics/master/sample.go -// Copyright 2012 Richard Crowley. All rights reserved. - -package metrics - -import "sync/atomic" - -// Gauges hold an int64 value that can be set arbitrarily. -type Gauge interface { - Metric - - Update(int64) - Value() int64 -} - -func NewGauge(meta *MetricMeta) Gauge { - if UseNilMetrics { - return NilGauge{} - } - return &StandardGauge{ - MetricMeta: meta, - value: 0, - } -} - -func RegGauge(name string, tagStrings ...string) Gauge { - tr := NewGauge(NewMetricMeta(name, tagStrings)) - MetricStats.Register(tr) - return tr -} - -// GaugeSnapshot is a read-only copy of another Gauge. -type GaugeSnapshot struct { - value int64 - *MetricMeta -} - -// Snapshot returns the snapshot. -func (g GaugeSnapshot) Snapshot() Metric { return g } - -// Update panics. -func (GaugeSnapshot) Update(int64) { - panic("Update called on a GaugeSnapshot") -} - -// Value returns the value at the time the snapshot was taken. -func (g GaugeSnapshot) Value() int64 { return g.value } - -// NilGauge is a no-op Gauge. -type NilGauge struct{ *MetricMeta } - -// Snapshot is a no-op. -func (NilGauge) Snapshot() Metric { return NilGauge{} } - -// Update is a no-op. -func (NilGauge) Update(v int64) {} - -// Value is a no-op. -func (NilGauge) Value() int64 { return 0 } - -// StandardGauge is the standard implementation of a Gauge and uses the -// sync/atomic package to manage a single int64 value. -// atomic needs 64-bit aligned memory which is ensure for first word -type StandardGauge struct { - value int64 - *MetricMeta -} - -// Snapshot returns a read-only copy of the gauge. -func (g *StandardGauge) Snapshot() Metric { - return GaugeSnapshot{MetricMeta: g.MetricMeta, value: g.value} -} - -// Update updates the gauge's value. -func (g *StandardGauge) Update(v int64) { - atomic.StoreInt64(&g.value, v) -} - -// Value returns the gauge's current value. -func (g *StandardGauge) Value() int64 { - return atomic.LoadInt64(&g.value) -} diff --git a/pkg/metrics/graphite.go b/pkg/metrics/graphite.go deleted file mode 100644 index 59c992776de..00000000000 --- a/pkg/metrics/graphite.go +++ /dev/null @@ -1,107 +0,0 @@ -package metrics - -import ( - "bytes" - "fmt" - "net" - "strings" - "time" - - "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/setting" -) - -type GraphitePublisher struct { - address string - protocol string - prefix string - prevCounts map[string]int64 -} - -func CreateGraphitePublisher() (*GraphitePublisher, error) { - graphiteSection, err := setting.Cfg.GetSection("metrics.graphite") - if err != nil { - return nil, nil - } - - address := graphiteSection.Key("address").String() - if address == "" { - return nil, nil - } - - publisher := &GraphitePublisher{} - publisher.prevCounts = make(map[string]int64) - publisher.protocol = "tcp" - publisher.prefix = graphiteSection.Key("prefix").MustString("prod.grafana.%(instance_name)s") - publisher.address = address - - safeInstanceName := strings.Replace(setting.InstanceName, ".", "_", -1) - prefix := graphiteSection.Key("prefix").Value() - - if prefix == "" { - prefix = "prod.grafana.%(instance_name)s." - } - - publisher.prefix = strings.Replace(prefix, "%(instance_name)s", safeInstanceName, -1) - return publisher, nil -} - -func (this *GraphitePublisher) Publish(metrics []Metric) { - conn, err := net.DialTimeout(this.protocol, this.address, time.Second*5) - - if err != nil { - log.Error(3, "Metrics: GraphitePublisher: Failed to connect to %s!", err) - return - } - - buf := bytes.NewBufferString("") - now := time.Now().Unix() - - for _, m := range metrics { - metricName := this.prefix + m.Name() + m.StringifyTags() - - switch metric := m.(type) { - case Counter: - this.addCount(buf, metricName+".count", metric.Count(), now) - case Gauge: - this.addCount(buf, metricName, metric.Value(), now) - case Timer: - percentiles := metric.Percentiles([]float64{0.25, 0.75, 0.90, 0.99}) - this.addCount(buf, metricName+".count", metric.Count(), now) - this.addInt(buf, metricName+".max", metric.Max(), now) - this.addInt(buf, metricName+".min", metric.Min(), now) - this.addFloat(buf, metricName+".mean", metric.Mean(), now) - this.addFloat(buf, metricName+".std", metric.StdDev(), now) - this.addFloat(buf, metricName+".p25", percentiles[0], now) - this.addFloat(buf, metricName+".p75", percentiles[1], now) - this.addFloat(buf, metricName+".p90", percentiles[2], now) - this.addFloat(buf, metricName+".p99", percentiles[3], now) - } - } - - log.Trace("Metrics: GraphitePublisher.Publish() \n%s", buf) - _, err = conn.Write(buf.Bytes()) - - if err != nil { - log.Error(3, "Metrics: GraphitePublisher: Failed to send metrics! %s", err) - } -} - -func (this *GraphitePublisher) addInt(buf *bytes.Buffer, metric string, value int64, now int64) { - buf.WriteString(fmt.Sprintf("%s %d %d\n", metric, value, now)) -} - -func (this *GraphitePublisher) addFloat(buf *bytes.Buffer, metric string, value float64, now int64) { - buf.WriteString(fmt.Sprintf("%s %f %d\n", metric, value, now)) -} - -func (this *GraphitePublisher) addCount(buf *bytes.Buffer, metric string, value int64, now int64) { - delta := value - - if last, ok := this.prevCounts[metric]; ok { - delta = calculateDelta(last, value) - } - - this.prevCounts[metric] = value - buf.WriteString(fmt.Sprintf("%s %d %d\n", metric, delta, now)) -} diff --git a/pkg/metrics/graphite_test.go b/pkg/metrics/graphite_test.go deleted file mode 100644 index ff2bf530d5e..00000000000 --- a/pkg/metrics/graphite_test.go +++ /dev/null @@ -1,77 +0,0 @@ -package metrics - -import ( - "testing" - - "github.com/grafana/grafana/pkg/setting" - - . "github.com/smartystreets/goconvey/convey" -) - -func TestGraphitePublisher(t *testing.T) { - - setting.CustomInitPath = "conf/does_not_exist.ini" - - Convey("Test graphite prefix replacement", t, func() { - var err error - err = setting.NewConfigContext(&setting.CommandLineArgs{ - HomePath: "../../", - }) - - So(err, ShouldBeNil) - - sec, err := setting.Cfg.NewSection("metrics.graphite") - sec.NewKey("prefix", "prod.grafana.%(instance_name)s.") - sec.NewKey("address", "localhost:2001") - - So(err, ShouldBeNil) - - setting.InstanceName = "hostname.with.dots.com" - publisher, err := CreateGraphitePublisher() - - So(err, ShouldBeNil) - So(publisher, ShouldNotBeNil) - - So(publisher.prefix, ShouldEqual, "prod.grafana.hostname_with_dots_com.") - So(publisher.address, ShouldEqual, "localhost:2001") - }) - - Convey("Test graphite publisher default prefix", t, func() { - var err error - err = setting.NewConfigContext(&setting.CommandLineArgs{ - HomePath: "../../", - }) - - So(err, ShouldBeNil) - - sec, err := setting.Cfg.NewSection("metrics.graphite") - sec.NewKey("address", "localhost:2001") - - So(err, ShouldBeNil) - - setting.InstanceName = "hostname.with.dots.com" - publisher, err := CreateGraphitePublisher() - - So(err, ShouldBeNil) - So(publisher, ShouldNotBeNil) - - So(publisher.prefix, ShouldEqual, "prod.grafana.hostname_with_dots_com.") - So(publisher.address, ShouldEqual, "localhost:2001") - }) - - Convey("Test graphite publisher default values", t, func() { - var err error - err = setting.NewConfigContext(&setting.CommandLineArgs{ - HomePath: "../../", - }) - - So(err, ShouldBeNil) - - _, err = setting.Cfg.NewSection("metrics.graphite") - - publisher, err := CreateGraphitePublisher() - - So(err, ShouldBeNil) - So(publisher, ShouldBeNil) - }) -} diff --git a/pkg/metrics/graphitebridge/graphite.go b/pkg/metrics/graphitebridge/graphite.go new file mode 100644 index 00000000000..8504a908d9a --- /dev/null +++ b/pkg/metrics/graphitebridge/graphite.go @@ -0,0 +1,412 @@ +// Copyright 2016 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package graphite provides a bridge to push Prometheus metrics to a Graphite +// server. +package graphitebridge + +import ( + "bufio" + "errors" + "fmt" + "io" + "math" + "net" + "sort" + "strings" + "time" + + "github.com/prometheus/common/expfmt" + "github.com/prometheus/common/model" + "golang.org/x/net/context" + + dto "github.com/prometheus/client_model/go" + + "github.com/prometheus/client_golang/prometheus" +) + +const ( + defaultInterval = 15 * time.Second + millisecondsPerSecond = 1000 +) + +// HandlerErrorHandling defines how a Handler serving metrics will handle +// errors. +type HandlerErrorHandling int + +// These constants cause handlers serving metrics to behave as described if +// errors are encountered. +const ( + // Ignore errors and try to push as many metrics to Graphite as possible. + ContinueOnError HandlerErrorHandling = iota + + // Abort the push to Graphite upon the first error encountered. + AbortOnError +) + +var metricCategoryPrefix []string = []string{ + "proxy_", + "api_", + "page_", + "alerting_", + "aws_", + "db_", + "stat_", + "go_", + "process_"} + +var trimMetricPrefix []string = []string{"grafana_"} + +// Config defines the Graphite bridge config. +type Config struct { + // The url to push data to. Required. + URL string + + // The prefix for the pushed Graphite metrics. Defaults to empty string. + Prefix string + + // The interval to use for pushing data to Graphite. Defaults to 15 seconds. + Interval time.Duration + + // The timeout for pushing metrics to Graphite. Defaults to 15 seconds. + Timeout time.Duration + + // The Gatherer to use for metrics. Defaults to prometheus.DefaultGatherer. + Gatherer prometheus.Gatherer + + // The logger that messages are written to. Defaults to no logging. + Logger Logger + + // ErrorHandling defines how errors are handled. Note that errors are + // logged regardless of the configured ErrorHandling provided Logger + // is not nil. + ErrorHandling HandlerErrorHandling + + // Graphite does not support ever increasing counter the same way + // prometheus does. Rollups and ingestion might cannot handle ever + // increasing counters. This option allows enabled the caller to + // calculate the delta by saving the last sent counter in memory + // and subtraction it from the collected value before sending. + CountersAsDelta bool +} + +// Bridge pushes metrics to the configured Graphite server. +type Bridge struct { + url string + prefix string + countersAsDetlas bool + interval time.Duration + timeout time.Duration + + errorHandling HandlerErrorHandling + logger Logger + + g prometheus.Gatherer + + lastValue map[model.Fingerprint]float64 +} + +// Logger is the minimal interface Bridge needs for logging. Note that +// log.Logger from the standard library implements this interface, and it is +// easy to implement by custom loggers, if they don't do so already anyway. +type Logger interface { + Println(v ...interface{}) +} + +// NewBridge returns a pointer to a new Bridge struct. +func NewBridge(c *Config) (*Bridge, error) { + b := &Bridge{} + + if c.URL == "" { + return nil, errors.New("missing URL") + } + b.url = c.URL + + if c.Gatherer == nil { + b.g = prometheus.DefaultGatherer + } else { + b.g = c.Gatherer + } + + if c.Logger != nil { + b.logger = c.Logger + } + + if c.Prefix != "" { + b.prefix = c.Prefix + } + + var z time.Duration + if c.Interval == z { + b.interval = defaultInterval + } else { + b.interval = c.Interval + } + + if c.Timeout == z { + b.timeout = defaultInterval + } else { + b.timeout = c.Timeout + } + + b.errorHandling = c.ErrorHandling + b.lastValue = map[model.Fingerprint]float64{} + b.countersAsDetlas = c.CountersAsDelta + + return b, nil +} + +// Run starts the event loop that pushes Prometheus metrics to Graphite at the +// configured interval. +func (b *Bridge) Run(ctx context.Context) { + ticker := time.NewTicker(b.interval) + defer ticker.Stop() + for { + select { + case <-ticker.C: + if err := b.Push(); err != nil && b.logger != nil { + b.logger.Println("error pushing to Graphite:", err) + } + case <-ctx.Done(): + return + } + } +} + +// Push pushes Prometheus metrics to the configured Graphite server. +func (b *Bridge) Push() error { + mfs, err := b.g.Gather() + if err != nil || len(mfs) == 0 { + switch b.errorHandling { + case AbortOnError: + return err + case ContinueOnError: + if b.logger != nil { + b.logger.Println("continue on error:", err) + } + default: + panic("unrecognized error handling value") + } + } + + conn, err := net.DialTimeout("tcp", b.url, b.timeout) + if err != nil { + return err + } + defer conn.Close() + + return b.writeMetrics(conn, mfs, b.prefix, model.Now()) +} + +func (b *Bridge) writeMetrics(w io.Writer, mfs []*dto.MetricFamily, prefix string, now model.Time) error { + for _, mf := range mfs { + vec, err := expfmt.ExtractSamples(&expfmt.DecodeOptions{ + Timestamp: now, + }, mf) + if err != nil { + return err + } + + buf := bufio.NewWriter(w) + for _, s := range vec { + if math.IsNaN(float64(s.Value)) { + continue + } + + if err := writePrefix(buf, prefix); err != nil { + return err + } + + if err := writeMetric(buf, s.Metric, mf); err != nil { + return err + } + + value := b.replaceCounterWithDelta(mf, s.Metric, s.Value) + if _, err := fmt.Fprintf(buf, " %g %d\n", value, int64(s.Timestamp)/millisecondsPerSecond); err != nil { + return err + } + if err := buf.Flush(); err != nil { + return err + } + } + } + + return nil +} + +func writeMetric(buf *bufio.Writer, m model.Metric, mf *dto.MetricFamily) error { + metricName, hasName := m[model.MetricNameLabel] + numLabels := len(m) - 1 + if !hasName { + numLabels = len(m) + } + + for _, v := range trimMetricPrefix { + if strings.HasPrefix(string(metricName), v) { + metricName = model.LabelValue(strings.Replace(string(metricName), v, "", 1)) + } + } + + for _, v := range metricCategoryPrefix { + if strings.HasPrefix(string(metricName), v) { + group := strings.Replace(v, "_", " ", 1) + metricName = model.LabelValue(strings.Replace(string(metricName), v, group, 1)) + } + } + + labelStrings := make([]string, 0, numLabels) + for label, value := range m { + if label != model.MetricNameLabel { + labelStrings = append(labelStrings, fmt.Sprintf("%s %s", string(label), string(value))) + } + } + + var err error + switch numLabels { + case 0: + if hasName { + if err := writeSanitized(buf, string(metricName)); err != nil { + return err + } + } + default: + sort.Strings(labelStrings) + if err = writeSanitized(buf, string(metricName)); err != nil { + return err + } + for _, s := range labelStrings { + if err = buf.WriteByte('.'); err != nil { + return err + } + if err = writeSanitized(buf, s); err != nil { + return err + } + } + } + + if err = addExtentionConventionForRollups(buf, mf, m); err != nil { + return err + } + + return nil +} + +func addExtentionConventionForRollups(buf *bufio.Writer, mf *dto.MetricFamily, m model.Metric) error { + // Adding `.count` `.sum` suffix makes it possible to configure + // different rollup strategies based on metric type + + mfType := mf.GetType() + var err error + if mfType == dto.MetricType_COUNTER { + if _, err = fmt.Fprint(buf, ".count"); err != nil { + return err + } + } + + if mfType == dto.MetricType_SUMMARY || mfType == dto.MetricType_HISTOGRAM { + if strings.HasSuffix(string(m[model.MetricNameLabel]), "_count") { + if _, err = fmt.Fprint(buf, ".count"); err != nil { + return err + } + } + } + if mfType == dto.MetricType_HISTOGRAM { + if strings.HasSuffix(string(m[model.MetricNameLabel]), "_sum") { + if _, err = fmt.Fprint(buf, ".sum"); err != nil { + return err + } + } + } + + return nil +} + +func writePrefix(buf *bufio.Writer, s string) error { + for _, c := range s { + if _, err := buf.WriteRune(replaceInvalid(c)); err != nil { + return err + } + } + + return nil +} + +func writeSanitized(buf *bufio.Writer, s string) error { + prevUnderscore := false + + for _, c := range s { + c = replaceInvalidRune(c) + if c == '_' { + if prevUnderscore { + continue + } + prevUnderscore = true + } else { + prevUnderscore = false + } + if _, err := buf.WriteRune(c); err != nil { + return err + } + } + + return nil +} + +func replaceInvalid(c rune) rune { + if c == ' ' || c == '.' { + return '.' + } + return replaceInvalidRune(c) +} + +func replaceInvalidRune(c rune) rune { + if c == ' ' { + return '.' + } + if !((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '-' || c == '_' || c == ':' || (c >= '0' && c <= '9')) { + return '_' + } + return c +} + +func (b *Bridge) replaceCounterWithDelta(mf *dto.MetricFamily, metric model.Metric, value model.SampleValue) float64 { + if !b.countersAsDetlas { + return float64(value) + } + + mfType := mf.GetType() + if mfType == dto.MetricType_COUNTER { + return b.returnDelta(metric, value) + } + + if mfType == dto.MetricType_SUMMARY { + if strings.HasSuffix(string(metric[model.MetricNameLabel]), "_count") { + return b.returnDelta(metric, value) + } + } + + return float64(value) +} + +func (b *Bridge) returnDelta(metric model.Metric, value model.SampleValue) float64 { + key := metric.Fingerprint() + _, exists := b.lastValue[key] + if !exists { + b.lastValue[key] = 0 + } + + delta := float64(value) - b.lastValue[key] + b.lastValue[key] = float64(value) + + return delta +} diff --git a/pkg/metrics/graphitebridge/graphite_test.go b/pkg/metrics/graphitebridge/graphite_test.go new file mode 100644 index 00000000000..4776a5ee5d9 --- /dev/null +++ b/pkg/metrics/graphitebridge/graphite_test.go @@ -0,0 +1,568 @@ +package graphitebridge + +import ( + "bufio" + "bytes" + "io" + "net" + "regexp" + "testing" + "time" + + "github.com/prometheus/client_golang/prometheus" + dto "github.com/prometheus/client_model/go" + "github.com/prometheus/common/model" +) + +func TestCountersAsDelta(t *testing.T) { + b, _ := NewBridge(&Config{ + URL: "localhost:12345", + CountersAsDelta: true, + }) + ty := dto.MetricType(0) + mf := &dto.MetricFamily{ + Type: &ty, + Metric: []*dto.Metric{}, + } + m := model.Metric{} + + var want float64 + var got float64 + want = float64(1) + got = b.replaceCounterWithDelta(mf, m, model.SampleValue(1)) + if got != want { + t.Fatalf("want %v got %v", want, got) + } + + got = b.replaceCounterWithDelta(mf, m, model.SampleValue(2)) + if got != want { + t.Fatalf("want %v got %v", want, got) + } +} + +func TestCountersAsDeltaDisabled(t *testing.T) { + b, _ := NewBridge(&Config{ + URL: "localhost:12345", + CountersAsDelta: false, + }) + ty := dto.MetricType(0) + mf := &dto.MetricFamily{ + Type: &ty, + Metric: []*dto.Metric{}, + } + m := model.Metric{} + + var want float64 + var got float64 + want = float64(1) + got = b.replaceCounterWithDelta(mf, m, model.SampleValue(1)) + if got != want { + t.Fatalf("want %v got %v", want, got) + } + + want = float64(2) + got = b.replaceCounterWithDelta(mf, m, model.SampleValue(2)) + if got != want { + t.Fatalf("want %v got %v", want, got) + } +} + +func TestSanitize(t *testing.T) { + testCases := []struct { + in, out string + }{ + {in: "hello", out: "hello"}, + {in: "hE/l1o", out: "hE_l1o"}, + {in: "he,*ll(.o", out: "he_ll_o"}, + {in: "hello_there%^&", out: "hello_there_"}, + } + + var buf bytes.Buffer + w := bufio.NewWriter(&buf) + + for i, tc := range testCases { + if err := writeSanitized(w, tc.in); err != nil { + t.Fatalf("write failed: %v", err) + } + if err := w.Flush(); err != nil { + t.Fatalf("flush failed: %v", err) + } + + if want, got := tc.out, buf.String(); want != got { + t.Fatalf("test case index %d: got sanitized string %s, want %s", i, got, want) + } + + buf.Reset() + } +} + +func TestSanitizePrefix(t *testing.T) { + testCases := []struct { + in, out string + }{ + {in: "service.prod.", out: "service.prod."}, + {in: "service.prod", out: "service.prod"}, + } + + var buf bytes.Buffer + w := bufio.NewWriter(&buf) + + for i, tc := range testCases { + if err := writePrefix(w, tc.in); err != nil { + t.Fatalf("write failed: %v", err) + } + if err := w.Flush(); err != nil { + t.Fatalf("flush failed: %v", err) + } + + if want, got := tc.out, buf.String(); want != got { + t.Fatalf("test case index %d: got sanitized string %s, want %s", i, got, want) + } + + buf.Reset() + } +} + +func TestWriteSummary(t *testing.T) { + sumVec := prometheus.NewSummaryVec( + prometheus.SummaryOpts{ + Name: "name", + Help: "docstring", + Namespace: "grafana", + ConstLabels: prometheus.Labels{"constname": "constvalue"}, + Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001}, + }, + []string{"labelname"}, + ) + + reg := prometheus.NewRegistry() + reg.MustRegister(sumVec) + + b, err := NewBridge(&Config{ + URL: "localhost:8080", + Gatherer: reg, + CountersAsDelta: true, + }) + if err != nil { + t.Fatalf("cannot create bridge. err: %v", err) + } + + sumVec.WithLabelValues("val1").Observe(float64(10)) + sumVec.WithLabelValues("val1").Observe(float64(20)) + sumVec.WithLabelValues("val1").Observe(float64(30)) + sumVec.WithLabelValues("val2").Observe(float64(20)) + sumVec.WithLabelValues("val2").Observe(float64(30)) + sumVec.WithLabelValues("val2").Observe(float64(40)) + + mfs, err := reg.Gather() + if err != nil { + t.Fatalf("error: %v", err) + } + + now := model.Time(1477043083) + var buf bytes.Buffer + err = b.writeMetrics(&buf, mfs, "prefix.", now) + if err != nil { + t.Fatalf("error: %v", err) + } + + want := `prefix.name.constname.constvalue.labelname.val1.quantile.0_5 20 1477043 +prefix.name.constname.constvalue.labelname.val1.quantile.0_9 30 1477043 +prefix.name.constname.constvalue.labelname.val1.quantile.0_99 30 1477043 +prefix.name_sum.constname.constvalue.labelname.val1 60 1477043 +prefix.name_count.constname.constvalue.labelname.val1.count 3 1477043 +prefix.name.constname.constvalue.labelname.val2.quantile.0_5 30 1477043 +prefix.name.constname.constvalue.labelname.val2.quantile.0_9 40 1477043 +prefix.name.constname.constvalue.labelname.val2.quantile.0_99 40 1477043 +prefix.name_sum.constname.constvalue.labelname.val2 90 1477043 +prefix.name_count.constname.constvalue.labelname.val2.count 3 1477043 +` + + if got := buf.String(); want != got { + t.Fatalf("wanted \n%s\n, got \n%s\n", want, got) + } +} + +func TestWriteHistogram(t *testing.T) { + histVec := prometheus.NewHistogramVec( + prometheus.HistogramOpts{ + Name: "name", + Help: "docstring", + Namespace: "grafana", + ConstLabels: prometheus.Labels{"constname": "constvalue"}, + Buckets: []float64{0.01, 0.02, 0.05, 0.1}, + }, + []string{"labelname"}, + ) + + reg := prometheus.NewRegistry() + reg.MustRegister(histVec) + + b, err := NewBridge(&Config{ + URL: "localhost:8080", + Gatherer: reg, + CountersAsDelta: true, + }) + if err != nil { + t.Fatalf("error creating bridge: %v", err) + } + + histVec.WithLabelValues("val1").Observe(float64(10)) + histVec.WithLabelValues("val1").Observe(float64(20)) + histVec.WithLabelValues("val1").Observe(float64(30)) + histVec.WithLabelValues("val2").Observe(float64(20)) + histVec.WithLabelValues("val2").Observe(float64(30)) + histVec.WithLabelValues("val2").Observe(float64(40)) + + mfs, err := reg.Gather() + if err != nil { + t.Fatalf("error: %v", err) + } + + now := model.Time(1477043083) + var buf bytes.Buffer + err = b.writeMetrics(&buf, mfs, "prefix.", now) + if err != nil { + t.Fatalf("error: %v", err) + } + + want := `prefix.name_bucket.constname.constvalue.labelname.val1.le.0_01 0 1477043 +prefix.name_bucket.constname.constvalue.labelname.val1.le.0_02 0 1477043 +prefix.name_bucket.constname.constvalue.labelname.val1.le.0_05 0 1477043 +prefix.name_bucket.constname.constvalue.labelname.val1.le.0_1 0 1477043 +prefix.name_sum.constname.constvalue.labelname.val1.sum 60 1477043 +prefix.name_count.constname.constvalue.labelname.val1.count 3 1477043 +prefix.name_bucket.constname.constvalue.labelname.val1.le._Inf 3 1477043 +prefix.name_bucket.constname.constvalue.labelname.val2.le.0_01 0 1477043 +prefix.name_bucket.constname.constvalue.labelname.val2.le.0_02 0 1477043 +prefix.name_bucket.constname.constvalue.labelname.val2.le.0_05 0 1477043 +prefix.name_bucket.constname.constvalue.labelname.val2.le.0_1 0 1477043 +prefix.name_sum.constname.constvalue.labelname.val2.sum 90 1477043 +prefix.name_count.constname.constvalue.labelname.val2.count 3 1477043 +prefix.name_bucket.constname.constvalue.labelname.val2.le._Inf 3 1477043 +` + if got := buf.String(); want != got { + t.Fatalf("wanted \n%s\n, got \n%s\n", want, got) + } +} + +func TestCounterVec(t *testing.T) { + cntVec := prometheus.NewCounterVec( + prometheus.CounterOpts{ + Name: "page_response", + Namespace: "grafana", + Help: "docstring", + ConstLabels: prometheus.Labels{"constname": "constvalue"}, + }, + []string{"labelname"}, + ) + + apicntVec := prometheus.NewCounterVec( + prometheus.CounterOpts{ + Name: "api_response", + Namespace: "grafana", + Help: "docstring", + ConstLabels: prometheus.Labels{"constname": "constvalue"}, + }, + []string{"labelname"}, + ) + + reg := prometheus.NewRegistry() + reg.MustRegister(cntVec) + reg.MustRegister(apicntVec) + + cntVec.WithLabelValues("val1").Inc() + cntVec.WithLabelValues("val2").Inc() + apicntVec.WithLabelValues("val1").Inc() + apicntVec.WithLabelValues("val2").Inc() + + b, err := NewBridge(&Config{ + URL: "localhost:8080", + Gatherer: reg, + CountersAsDelta: true, + }) + if err != nil { + t.Fatalf("error creating bridge: %v", err) + } + + // first collect + mfs, err := reg.Gather() + if err != nil { + t.Fatalf("error: %v", err) + } + + var buf bytes.Buffer + err = b.writeMetrics(&buf, mfs, "prefix.", model.Time(1477043083)) + if err != nil { + t.Fatalf("error: %v", err) + } + + want := `prefix.api.response.constname.constvalue.labelname.val1.count 1 1477043 +prefix.api.response.constname.constvalue.labelname.val2.count 1 1477043 +prefix.page.response.constname.constvalue.labelname.val1.count 1 1477043 +prefix.page.response.constname.constvalue.labelname.val2.count 1 1477043 +` + if got := buf.String(); want != got { + t.Fatalf("wanted \n%s\n, got \n%s\n", want, got) + } + + //next collect + cntVec.WithLabelValues("val1").Inc() + cntVec.WithLabelValues("val2").Inc() + apicntVec.WithLabelValues("val1").Inc() + apicntVec.WithLabelValues("val2").Inc() + + mfs, err = reg.Gather() + if err != nil { + t.Fatalf("error: %v", err) + } + + buf = bytes.Buffer{} + err = b.writeMetrics(&buf, mfs, "prefix.", model.Time(1477053083)) + if err != nil { + t.Fatalf("error: %v", err) + } + + want2 := `prefix.api.response.constname.constvalue.labelname.val1.count 1 1477053 +prefix.api.response.constname.constvalue.labelname.val2.count 1 1477053 +prefix.page.response.constname.constvalue.labelname.val1.count 1 1477053 +prefix.page.response.constname.constvalue.labelname.val2.count 1 1477053 +` + if got := buf.String(); want2 != got { + t.Fatalf("wanted \n%s\n, got \n%s\n", want2, got) + } +} + +func TestCounter(t *testing.T) { + cntVec := prometheus.NewCounter( + prometheus.CounterOpts{ + Name: "page_response", + Help: "docstring", + Namespace: "grafana", + ConstLabels: prometheus.Labels{"constname": "constvalue"}, + }) + + reg := prometheus.NewRegistry() + reg.MustRegister(cntVec) + + cntVec.Inc() + + b, err := NewBridge(&Config{ + URL: "localhost:8080", + Gatherer: reg, + CountersAsDelta: true, + }) + if err != nil { + t.Fatalf("error creating bridge: %v", err) + } + + // first collect + mfs, err := reg.Gather() + if err != nil { + t.Fatalf("error: %v", err) + } + + var buf bytes.Buffer + err = b.writeMetrics(&buf, mfs, "prefix.", model.Time(1477043083)) + if err != nil { + t.Fatalf("error: %v", err) + } + + want := "prefix.page.response.constname.constvalue.count 1 1477043\n" + if got := buf.String(); want != got { + t.Fatalf("wanted \n%s\n, got \n%s\n", want, got) + } + + //next collect + cntVec.Inc() + + mfs, err = reg.Gather() + if err != nil { + t.Fatalf("error: %v", err) + } + + buf = bytes.Buffer{} + err = b.writeMetrics(&buf, mfs, "prefix.", model.Time(1477053083)) + if err != nil { + t.Fatalf("error: %v", err) + } + + want2 := "prefix.page.response.constname.constvalue.count 1 1477053\n" + if got := buf.String(); want2 != got { + t.Fatalf("wanted \n%s\n, got \n%s\n", want2, got) + } +} + +func TestTrimGrafanaNamespace(t *testing.T) { + cntVec := prometheus.NewCounter( + prometheus.CounterOpts{ + Name: "http_request_total", + Help: "docstring", + ConstLabels: prometheus.Labels{"constname": "constvalue"}, + }) + + reg := prometheus.NewRegistry() + reg.MustRegister(cntVec) + + cntVec.Inc() + + b, err := NewBridge(&Config{ + URL: "localhost:8080", + Gatherer: reg, + CountersAsDelta: true, + }) + if err != nil { + t.Fatalf("error creating bridge: %v", err) + } + + // first collect + mfs, err := reg.Gather() + if err != nil { + t.Fatalf("error: %v", err) + } + + var buf bytes.Buffer + err = b.writeMetrics(&buf, mfs, "prefix.", model.Time(1477043083)) + if err != nil { + t.Fatalf("error: %v", err) + } + + want := "prefix.http_request_total.constname.constvalue.count 1 1477043\n" + if got := buf.String(); want != got { + t.Fatalf("wanted \n%s\n, got \n%s\n", want, got) + } +} + +func TestSkipNanValues(t *testing.T) { + cntVec := prometheus.NewSummary( + prometheus.SummaryOpts{ + Name: "http_request_total", + Help: "docstring", + ConstLabels: prometheus.Labels{"constname": "constvalue"}, + }) + + reg := prometheus.NewRegistry() + reg.MustRegister(cntVec) + + b, err := NewBridge(&Config{ + URL: "localhost:8080", + Gatherer: reg, + CountersAsDelta: true, + }) + if err != nil { + t.Fatalf("error creating bridge: %v", err) + } + + // first collect + mfs, err := reg.Gather() + if err != nil { + t.Fatalf("error: %v", err) + } + + var buf bytes.Buffer + err = b.writeMetrics(&buf, mfs, "prefix.", model.Time(1477043083)) + if err != nil { + t.Fatalf("error: %v", err) + } + + want := `prefix.http_request_total_sum.constname.constvalue 0 1477043 +prefix.http_request_total_count.constname.constvalue.count 0 1477043 +` + + if got := buf.String(); want != got { + t.Fatalf("wanted \n%s\n, got \n%s\n", want, got) + } +} + +func TestPush(t *testing.T) { + reg := prometheus.NewRegistry() + cntVec := prometheus.NewCounterVec( + prometheus.CounterOpts{ + Name: "name", + Help: "docstring", + Namespace: "grafana", + ConstLabels: prometheus.Labels{"constname": "constvalue"}, + }, + []string{"labelname"}, + ) + cntVec.WithLabelValues("val1").Inc() + cntVec.WithLabelValues("val2").Inc() + reg.MustRegister(cntVec) + + host := "localhost" + port := ":56789" + b, err := NewBridge(&Config{ + URL: host + port, + Gatherer: reg, + Prefix: "prefix.", + }) + if err != nil { + t.Fatalf("error creating bridge: %v", err) + } + + nmg, err := newMockGraphite(port) + if err != nil { + t.Fatalf("error creating mock graphite: %v", err) + } + defer nmg.Close() + + err = b.Push() + if err != nil { + t.Fatalf("error pushing: %v", err) + } + + wants := []string{ + "prefix.name.constname.constvalue.labelname.val1.count 1", + "prefix.name.constname.constvalue.labelname.val2.count 1", + } + + select { + case got := <-nmg.readc: + for _, want := range wants { + matched, err := regexp.MatchString(want, got) + if err != nil { + t.Fatalf("error pushing: %v", err) + } + if !matched { + t.Fatalf("missing metric:\nno match for %s received by server:\n%s", want, got) + } + } + return + case err := <-nmg.errc: + t.Fatalf("error reading push: %v", err) + case <-time.After(50 * time.Millisecond): + t.Fatalf("no result from graphite server") + } +} + +func newMockGraphite(port string) (*mockGraphite, error) { + readc := make(chan string) + errc := make(chan error) + ln, err := net.Listen("tcp", port) + if err != nil { + return nil, err + } + + go func() { + conn, err := ln.Accept() + if err != nil { + errc <- err + } + var b bytes.Buffer + io.Copy(&b, conn) + readc <- b.String() + }() + + return &mockGraphite{ + readc: readc, + errc: errc, + Listener: ln, + }, nil +} + +type mockGraphite struct { + readc chan string + errc chan error + + net.Listener +} diff --git a/pkg/metrics/histogram.go b/pkg/metrics/histogram.go deleted file mode 100644 index 32338da4b69..00000000000 --- a/pkg/metrics/histogram.go +++ /dev/null @@ -1,189 +0,0 @@ -// includes code from -// https://raw.githubusercontent.com/rcrowley/go-metrics/master/sample.go -// Copyright 2012 Richard Crowley. All rights reserved. - -package metrics - -// Histograms calculate distribution statistics from a series of int64 values. -type Histogram interface { - Metric - - Clear() - Count() int64 - Max() int64 - Mean() float64 - Min() int64 - Percentile(float64) float64 - Percentiles([]float64) []float64 - StdDev() float64 - Sum() int64 - Update(int64) - Variance() float64 -} - -func NewHistogram(meta *MetricMeta, s Sample) Histogram { - return &StandardHistogram{ - MetricMeta: meta, - sample: s, - } -} - -// HistogramSnapshot is a read-only copy of another Histogram. -type HistogramSnapshot struct { - *MetricMeta - sample *SampleSnapshot -} - -// Clear panics. -func (*HistogramSnapshot) Clear() { - panic("Clear called on a HistogramSnapshot") -} - -// Count returns the number of samples recorded at the time the snapshot was -// taken. -func (h *HistogramSnapshot) Count() int64 { return h.sample.Count() } - -// Max returns the maximum value in the sample at the time the snapshot was -// taken. -func (h *HistogramSnapshot) Max() int64 { return h.sample.Max() } - -// Mean returns the mean of the values in the sample at the time the snapshot -// was taken. -func (h *HistogramSnapshot) Mean() float64 { return h.sample.Mean() } - -// Min returns the minimum value in the sample at the time the snapshot was -// taken. -func (h *HistogramSnapshot) Min() int64 { return h.sample.Min() } - -// Percentile returns an arbitrary percentile of values in the sample at the -// time the snapshot was taken. -func (h *HistogramSnapshot) Percentile(p float64) float64 { - return h.sample.Percentile(p) -} - -// Percentiles returns a slice of arbitrary percentiles of values in the sample -// at the time the snapshot was taken. -func (h *HistogramSnapshot) Percentiles(ps []float64) []float64 { - return h.sample.Percentiles(ps) -} - -// Sample returns the Sample underlying the histogram. -func (h *HistogramSnapshot) Sample() Sample { return h.sample } - -// Snapshot returns the snapshot. -func (h *HistogramSnapshot) Snapshot() Metric { return h } - -// StdDev returns the standard deviation of the values in the sample at the -// time the snapshot was taken. -func (h *HistogramSnapshot) StdDev() float64 { return h.sample.StdDev() } - -// Sum returns the sum in the sample at the time the snapshot was taken. -func (h *HistogramSnapshot) Sum() int64 { return h.sample.Sum() } - -// Update panics. -func (*HistogramSnapshot) Update(int64) { - panic("Update called on a HistogramSnapshot") -} - -// Variance returns the variance of inputs at the time the snapshot was taken. -func (h *HistogramSnapshot) Variance() float64 { return h.sample.Variance() } - -// NilHistogram is a no-op Histogram. -type NilHistogram struct { - *MetricMeta -} - -// Clear is a no-op. -func (NilHistogram) Clear() {} - -// Count is a no-op. -func (NilHistogram) Count() int64 { return 0 } - -// Max is a no-op. -func (NilHistogram) Max() int64 { return 0 } - -// Mean is a no-op. -func (NilHistogram) Mean() float64 { return 0.0 } - -// Min is a no-op. -func (NilHistogram) Min() int64 { return 0 } - -// Percentile is a no-op. -func (NilHistogram) Percentile(p float64) float64 { return 0.0 } - -// Percentiles is a no-op. -func (NilHistogram) Percentiles(ps []float64) []float64 { - return make([]float64, len(ps)) -} - -// Sample is a no-op. -func (NilHistogram) Sample() Sample { return NilSample{} } - -// Snapshot is a no-op. -func (n NilHistogram) Snapshot() Metric { return n } - -// StdDev is a no-op. -func (NilHistogram) StdDev() float64 { return 0.0 } - -// Sum is a no-op. -func (NilHistogram) Sum() int64 { return 0 } - -// Update is a no-op. -func (NilHistogram) Update(v int64) {} - -// Variance is a no-op. -func (NilHistogram) Variance() float64 { return 0.0 } - -// StandardHistogram is the standard implementation of a Histogram and uses a -// Sample to bound its memory use. -type StandardHistogram struct { - *MetricMeta - sample Sample -} - -// Clear clears the histogram and its sample. -func (h *StandardHistogram) Clear() { h.sample.Clear() } - -// Count returns the number of samples recorded since the histogram was last -// cleared. -func (h *StandardHistogram) Count() int64 { return h.sample.Count() } - -// Max returns the maximum value in the sample. -func (h *StandardHistogram) Max() int64 { return h.sample.Max() } - -// Mean returns the mean of the values in the sample. -func (h *StandardHistogram) Mean() float64 { return h.sample.Mean() } - -// Min returns the minimum value in the sample. -func (h *StandardHistogram) Min() int64 { return h.sample.Min() } - -// Percentile returns an arbitrary percentile of the values in the sample. -func (h *StandardHistogram) Percentile(p float64) float64 { - return h.sample.Percentile(p) -} - -// Percentiles returns a slice of arbitrary percentiles of the values in the -// sample. -func (h *StandardHistogram) Percentiles(ps []float64) []float64 { - return h.sample.Percentiles(ps) -} - -// Sample returns the Sample underlying the histogram. -func (h *StandardHistogram) Sample() Sample { return h.sample } - -// Snapshot returns a read-only copy of the histogram. -func (h *StandardHistogram) Snapshot() Metric { - return &HistogramSnapshot{sample: h.sample.Snapshot().(*SampleSnapshot)} -} - -// StdDev returns the standard deviation of the values in the sample. -func (h *StandardHistogram) StdDev() float64 { return h.sample.StdDev() } - -// Sum returns the sum in the sample. -func (h *StandardHistogram) Sum() int64 { return h.sample.Sum() } - -// Update samples a new value. -func (h *StandardHistogram) Update(v int64) { h.sample.Update(v) } - -// Variance returns the variance of the values in the sample. -func (h *StandardHistogram) Variance() float64 { return h.sample.Variance() } diff --git a/pkg/metrics/histogram_test.go b/pkg/metrics/histogram_test.go deleted file mode 100644 index 010402123c2..00000000000 --- a/pkg/metrics/histogram_test.go +++ /dev/null @@ -1,90 +0,0 @@ -// includes code from -// https://raw.githubusercontent.com/rcrowley/go-metrics/master/sample.go -// Copyright 2012 Richard Crowley. All rights reserved. - -package metrics - -import "testing" - -func BenchmarkHistogram(b *testing.B) { - h := NewHistogram(nil, NewUniformSample(100)) - b.ResetTimer() - for i := 0; i < b.N; i++ { - h.Update(int64(i)) - } -} - -func TestHistogram10000(t *testing.T) { - h := NewHistogram(nil, NewUniformSample(100000)) - for i := 1; i <= 10000; i++ { - h.Update(int64(i)) - } - testHistogram10000(t, h) -} - -func TestHistogramEmpty(t *testing.T) { - h := NewHistogram(nil, NewUniformSample(100)) - if count := h.Count(); 0 != count { - t.Errorf("h.Count(): 0 != %v\n", count) - } - if min := h.Min(); 0 != min { - t.Errorf("h.Min(): 0 != %v\n", min) - } - if max := h.Max(); 0 != max { - t.Errorf("h.Max(): 0 != %v\n", max) - } - if mean := h.Mean(); 0.0 != mean { - t.Errorf("h.Mean(): 0.0 != %v\n", mean) - } - if stdDev := h.StdDev(); 0.0 != stdDev { - t.Errorf("h.StdDev(): 0.0 != %v\n", stdDev) - } - ps := h.Percentiles([]float64{0.5, 0.75, 0.99}) - if 0.0 != ps[0] { - t.Errorf("median: 0.0 != %v\n", ps[0]) - } - if 0.0 != ps[1] { - t.Errorf("75th percentile: 0.0 != %v\n", ps[1]) - } - if 0.0 != ps[2] { - t.Errorf("99th percentile: 0.0 != %v\n", ps[2]) - } -} - -func TestHistogramSnapshot(t *testing.T) { - h := NewHistogram(nil, NewUniformSample(100000)) - for i := 1; i <= 10000; i++ { - h.Update(int64(i)) - } - snapshot := h.Snapshot().(Histogram) - h.Update(0) - testHistogram10000(t, snapshot) -} - -func testHistogram10000(t *testing.T, h Histogram) { - if count := h.Count(); 10000 != count { - t.Errorf("h.Count(): 10000 != %v\n", count) - } - if min := h.Min(); 1 != min { - t.Errorf("h.Min(): 1 != %v\n", min) - } - if max := h.Max(); 10000 != max { - t.Errorf("h.Max(): 10000 != %v\n", max) - } - if mean := h.Mean(); 5000.5 != mean { - t.Errorf("h.Mean(): 5000.5 != %v\n", mean) - } - if stdDev := h.StdDev(); 2886.751331514372 != stdDev { - t.Errorf("h.StdDev(): 2886.751331514372 != %v\n", stdDev) - } - ps := h.Percentiles([]float64{0.5, 0.75, 0.99}) - if 5000.5 != ps[0] { - t.Errorf("median: 5000.5 != %v\n", ps[0]) - } - if 7500.75 != ps[1] { - t.Errorf("75th percentile: 7500.75 != %v\n", ps[1]) - } - if 9900.99 != ps[2] { - t.Errorf("99th percentile: 9900.99 != %v\n", ps[2]) - } -} diff --git a/pkg/metrics/init.go b/pkg/metrics/init.go new file mode 100644 index 00000000000..833b148d319 --- /dev/null +++ b/pkg/metrics/init.go @@ -0,0 +1,38 @@ +package metrics + +import ( + "context" + + ini "gopkg.in/ini.v1" + + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/metrics/graphitebridge" +) + +var metricsLogger log.Logger = log.New("metrics") + +type logWrapper struct { + logger log.Logger +} + +func (lw *logWrapper) Println(v ...interface{}) { + lw.logger.Info("graphite metric bridge", v...) +} + +func Init(file *ini.File) { + cfg := ReadSettings(file) + internalInit(cfg) +} + +func internalInit(settings *MetricSettings) { + initMetricVars(settings) + + if settings.GraphiteBridgeConfig != nil { + bridge, err := graphitebridge.NewBridge(settings.GraphiteBridgeConfig) + if err != nil { + metricsLogger.Error("failed to create graphite bridge", "error", err) + } else { + go bridge.Run(context.Background()) + } + } +} diff --git a/pkg/metrics/meter.go b/pkg/metrics/meter.go deleted file mode 100644 index 265bff99cb6..00000000000 --- a/pkg/metrics/meter.go +++ /dev/null @@ -1,221 +0,0 @@ -// includes code from -// https://raw.githubusercontent.com/rcrowley/go-metrics/master/sample.go -// Copyright 2012 Richard Crowley. All rights reserved. - -package metrics - -import ( - "sync" - "time" -) - -// Meters count events to produce exponentially-weighted moving average rates -// at one-, five-, and fifteen-minutes and a mean rate. -type Meter interface { - Metric - - Count() int64 - Mark(int64) - Rate1() float64 - Rate5() float64 - Rate15() float64 - RateMean() float64 -} - -// NewMeter constructs a new StandardMeter and launches a goroutine. -func NewMeter(meta *MetricMeta) Meter { - if UseNilMetrics { - return NilMeter{} - } - - m := newStandardMeter(meta) - arbiter.Lock() - defer arbiter.Unlock() - arbiter.meters = append(arbiter.meters, m) - if !arbiter.started { - arbiter.started = true - go arbiter.tick() - } - return m -} - -type MeterSnapshot struct { - *MetricMeta - count int64 - rate1, rate5, rate15, rateMean float64 -} - -// Count returns the count of events at the time the snapshot was taken. -func (m *MeterSnapshot) Count() int64 { return m.count } - -// Mark panics. -func (*MeterSnapshot) Mark(n int64) { - panic("Mark called on a MeterSnapshot") -} - -// Rate1 returns the one-minute moving average rate of events per second at the -// time the snapshot was taken. -func (m *MeterSnapshot) Rate1() float64 { return m.rate1 } - -// Rate5 returns the five-minute moving average rate of events per second at -// the time the snapshot was taken. -func (m *MeterSnapshot) Rate5() float64 { return m.rate5 } - -// Rate15 returns the fifteen-minute moving average rate of events per second -// at the time the snapshot was taken. -func (m *MeterSnapshot) Rate15() float64 { return m.rate15 } - -// RateMean returns the meter's mean rate of events per second at the time the -// snapshot was taken. -func (m *MeterSnapshot) RateMean() float64 { return m.rateMean } - -// Snapshot returns the snapshot. -func (m *MeterSnapshot) Snapshot() Metric { return m } - -// NilMeter is a no-op Meter. -type NilMeter struct{ *MetricMeta } - -// Count is a no-op. -func (NilMeter) Count() int64 { return 0 } - -// Mark is a no-op. -func (NilMeter) Mark(n int64) {} - -// Rate1 is a no-op. -func (NilMeter) Rate1() float64 { return 0.0 } - -// Rate5 is a no-op. -func (NilMeter) Rate5() float64 { return 0.0 } - -// Rate15is a no-op. -func (NilMeter) Rate15() float64 { return 0.0 } - -// RateMean is a no-op. -func (NilMeter) RateMean() float64 { return 0.0 } - -// Snapshot is a no-op. -func (NilMeter) Snapshot() Metric { return NilMeter{} } - -// StandardMeter is the standard implementation of a Meter. -type StandardMeter struct { - *MetricMeta - lock sync.RWMutex - snapshot *MeterSnapshot - a1, a5, a15 EWMA - startTime time.Time -} - -func newStandardMeter(meta *MetricMeta) *StandardMeter { - return &StandardMeter{ - MetricMeta: meta, - snapshot: &MeterSnapshot{MetricMeta: meta}, - a1: NewEWMA1(), - a5: NewEWMA5(), - a15: NewEWMA15(), - startTime: time.Now(), - } -} - -// Count returns the number of events recorded. -func (m *StandardMeter) Count() int64 { - m.lock.RLock() - count := m.snapshot.count - m.lock.RUnlock() - return count -} - -// Mark records the occurrence of n events. -func (m *StandardMeter) Mark(n int64) { - m.lock.Lock() - defer m.lock.Unlock() - m.snapshot.count += n - m.a1.Update(n) - m.a5.Update(n) - m.a15.Update(n) - m.updateSnapshot() -} - -// Rate1 returns the one-minute moving average rate of events per second. -func (m *StandardMeter) Rate1() float64 { - m.lock.RLock() - rate1 := m.snapshot.rate1 - m.lock.RUnlock() - return rate1 -} - -// Rate5 returns the five-minute moving average rate of events per second. -func (m *StandardMeter) Rate5() float64 { - m.lock.RLock() - rate5 := m.snapshot.rate5 - m.lock.RUnlock() - return rate5 -} - -// Rate15 returns the fifteen-minute moving average rate of events per second. -func (m *StandardMeter) Rate15() float64 { - m.lock.RLock() - rate15 := m.snapshot.rate15 - m.lock.RUnlock() - return rate15 -} - -// RateMean returns the meter's mean rate of events per second. -func (m *StandardMeter) RateMean() float64 { - m.lock.RLock() - rateMean := m.snapshot.rateMean - m.lock.RUnlock() - return rateMean -} - -// Snapshot returns a read-only copy of the meter. -func (m *StandardMeter) Snapshot() Metric { - m.lock.RLock() - snapshot := *m.snapshot - m.lock.RUnlock() - return &snapshot -} - -func (m *StandardMeter) updateSnapshot() { - // should run with write lock held on m.lock - snapshot := m.snapshot - snapshot.rate1 = m.a1.Rate() - snapshot.rate5 = m.a5.Rate() - snapshot.rate15 = m.a15.Rate() - snapshot.rateMean = float64(snapshot.count) / time.Since(m.startTime).Seconds() -} - -func (m *StandardMeter) tick() { - m.lock.Lock() - defer m.lock.Unlock() - m.a1.Tick() - m.a5.Tick() - m.a15.Tick() - m.updateSnapshot() -} - -type meterArbiter struct { - sync.RWMutex - started bool - meters []*StandardMeter - ticker *time.Ticker -} - -var arbiter = meterArbiter{ticker: time.NewTicker(5e9)} - -// Ticks meters on the scheduled interval -func (ma *meterArbiter) tick() { - for { - select { - case <-ma.ticker.C: - ma.tickMeters() - } - } -} - -func (ma *meterArbiter) tickMeters() { - ma.RLock() - defer ma.RUnlock() - for _, meter := range ma.meters { - meter.tick() - } -} diff --git a/pkg/metrics/metrics.go b/pkg/metrics/metrics.go index dc0f7e9cabe..4b155ae3208 100644 --- a/pkg/metrics/metrics.go +++ b/pkg/metrics/metrics.go @@ -1,146 +1,407 @@ package metrics -var MetricStats Registry -var UseNilMetrics bool +import ( + "bytes" + "encoding/json" + "net/http" + "runtime" + "strings" + "time" -func init() { - // init with nil metrics - initMetricVars(&MetricSettings{}) -} - -var ( - M_Instance_Start Counter - M_Page_Status_200 Counter - M_Page_Status_500 Counter - M_Page_Status_404 Counter - M_Page_Status_Unknown Counter - M_Api_Status_200 Counter - M_Api_Status_404 Counter - M_Api_Status_500 Counter - M_Api_Status_Unknown Counter - M_Proxy_Status_200 Counter - M_Proxy_Status_404 Counter - M_Proxy_Status_500 Counter - M_Proxy_Status_Unknown Counter - M_Api_User_SignUpStarted Counter - M_Api_User_SignUpCompleted Counter - M_Api_User_SignUpInvite Counter - M_Api_Dashboard_Save Timer - M_Api_Dashboard_Get Timer - M_Api_Dashboard_Search Timer - M_Api_Admin_User_Create Counter - M_Api_Login_Post Counter - M_Api_Login_OAuth Counter - M_Api_Org_Create Counter - M_Api_Dashboard_Snapshot_Create Counter - M_Api_Dashboard_Snapshot_External Counter - M_Api_Dashboard_Snapshot_Get Counter - M_Models_Dashboard_Insert Counter - M_Alerting_Result_State_Alerting Counter - M_Alerting_Result_State_Ok Counter - M_Alerting_Result_State_Paused Counter - M_Alerting_Result_State_NoData Counter - M_Alerting_Result_State_Pending Counter - M_Alerting_Notification_Sent_Slack Counter - M_Alerting_Notification_Sent_Email Counter - M_Alerting_Notification_Sent_Webhook Counter - M_Alerting_Notification_Sent_DingDing Counter - M_Alerting_Notification_Sent_PagerDuty Counter - M_Alerting_Notification_Sent_LINE Counter - M_Alerting_Notification_Sent_Victorops Counter - M_Alerting_Notification_Sent_OpsGenie Counter - M_Alerting_Notification_Sent_Telegram Counter - M_Alerting_Notification_Sent_Threema Counter - M_Alerting_Notification_Sent_Sensu Counter - M_Alerting_Notification_Sent_Pushover Counter - M_Aws_CloudWatch_GetMetricStatistics Counter - M_Aws_CloudWatch_ListMetrics Counter - M_DB_DataSource_QueryById Counter - - // Timers - M_DataSource_ProxyReq_Timer Timer - M_Alerting_Execution_Time Timer - - // StatTotals - M_Alerting_Active_Alerts Gauge - M_StatTotal_Dashboards Gauge - M_StatTotal_Users Gauge - M_StatTotal_Orgs Gauge - M_StatTotal_Playlists Gauge + "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/setting" + "github.com/prometheus/client_golang/prometheus" ) -func initMetricVars(settings *MetricSettings) { - UseNilMetrics = settings.Enabled == false - MetricStats = NewRegistry() +const exporterName = "grafana" - M_Instance_Start = RegCounter("instance_start") +var ( + M_Instance_Start prometheus.Counter + M_Page_Status *prometheus.CounterVec + M_Api_Status *prometheus.CounterVec + M_Proxy_Status *prometheus.CounterVec + M_Http_Request_Total *prometheus.CounterVec + M_Http_Request_Summary *prometheus.SummaryVec - M_Page_Status_200 = RegCounter("page.resp_status", "code", "200") - M_Page_Status_500 = RegCounter("page.resp_status", "code", "500") - M_Page_Status_404 = RegCounter("page.resp_status", "code", "404") - M_Page_Status_Unknown = RegCounter("page.resp_status", "code", "unknown") + M_Api_User_SignUpStarted prometheus.Counter + M_Api_User_SignUpCompleted prometheus.Counter + M_Api_User_SignUpInvite prometheus.Counter + M_Api_Dashboard_Save prometheus.Summary + M_Api_Dashboard_Get prometheus.Summary + M_Api_Dashboard_Search prometheus.Summary + M_Api_Admin_User_Create prometheus.Counter + M_Api_Login_Post prometheus.Counter + M_Api_Login_OAuth prometheus.Counter + M_Api_Org_Create prometheus.Counter - M_Api_Status_200 = RegCounter("api.resp_status", "code", "200") - M_Api_Status_404 = RegCounter("api.resp_status", "code", "404") - M_Api_Status_500 = RegCounter("api.resp_status", "code", "500") - M_Api_Status_Unknown = RegCounter("api.resp_status", "code", "unknown") - - M_Proxy_Status_200 = RegCounter("proxy.resp_status", "code", "200") - M_Proxy_Status_404 = RegCounter("proxy.resp_status", "code", "404") - M_Proxy_Status_500 = RegCounter("proxy.resp_status", "code", "500") - M_Proxy_Status_Unknown = RegCounter("proxy.resp_status", "code", "unknown") - - M_Api_User_SignUpStarted = RegCounter("api.user.signup_started") - M_Api_User_SignUpCompleted = RegCounter("api.user.signup_completed") - M_Api_User_SignUpInvite = RegCounter("api.user.signup_invite") - - M_Api_Dashboard_Save = RegTimer("api.dashboard.save") - M_Api_Dashboard_Get = RegTimer("api.dashboard.get") - M_Api_Dashboard_Search = RegTimer("api.dashboard.search") - - M_Api_Admin_User_Create = RegCounter("api.admin.user_create") - M_Api_Login_Post = RegCounter("api.login.post") - M_Api_Login_OAuth = RegCounter("api.login.oauth") - M_Api_Org_Create = RegCounter("api.org.create") - - M_Api_Dashboard_Snapshot_Create = RegCounter("api.dashboard_snapshot.create") - M_Api_Dashboard_Snapshot_External = RegCounter("api.dashboard_snapshot.external") - M_Api_Dashboard_Snapshot_Get = RegCounter("api.dashboard_snapshot.get") - - M_Models_Dashboard_Insert = RegCounter("models.dashboard.insert") - - M_Alerting_Result_State_Alerting = RegCounter("alerting.result", "state", "alerting") - M_Alerting_Result_State_Ok = RegCounter("alerting.result", "state", "ok") - M_Alerting_Result_State_Paused = RegCounter("alerting.result", "state", "paused") - M_Alerting_Result_State_NoData = RegCounter("alerting.result", "state", "no_data") - M_Alerting_Result_State_Pending = RegCounter("alerting.result", "state", "pending") - - M_Alerting_Notification_Sent_Slack = RegCounter("alerting.notifications_sent", "type", "slack") - M_Alerting_Notification_Sent_Email = RegCounter("alerting.notifications_sent", "type", "email") - M_Alerting_Notification_Sent_Webhook = RegCounter("alerting.notifications_sent", "type", "webhook") - M_Alerting_Notification_Sent_DingDing = RegCounter("alerting.notifications_sent", "type", "dingding") - M_Alerting_Notification_Sent_PagerDuty = RegCounter("alerting.notifications_sent", "type", "pagerduty") - M_Alerting_Notification_Sent_Victorops = RegCounter("alerting.notifications_sent", "type", "victorops") - M_Alerting_Notification_Sent_OpsGenie = RegCounter("alerting.notifications_sent", "type", "opsgenie") - M_Alerting_Notification_Sent_Telegram = RegCounter("alerting.notifications_sent", "type", "telegram") - M_Alerting_Notification_Sent_Threema = RegCounter("alerting.notifications_sent", "type", "threema") - M_Alerting_Notification_Sent_Sensu = RegCounter("alerting.notifications_sent", "type", "sensu") - M_Alerting_Notification_Sent_LINE = RegCounter("alerting.notifications_sent", "type", "LINE") - M_Alerting_Notification_Sent_Pushover = RegCounter("alerting.notifications_sent", "type", "pushover") - - M_Aws_CloudWatch_GetMetricStatistics = RegCounter("aws.cloudwatch.get_metric_statistics") - M_Aws_CloudWatch_ListMetrics = RegCounter("aws.cloudwatch.list_metrics") - - M_DB_DataSource_QueryById = RegCounter("db.datasource.query_by_id") + M_Api_Dashboard_Snapshot_Create prometheus.Counter + M_Api_Dashboard_Snapshot_External prometheus.Counter + M_Api_Dashboard_Snapshot_Get prometheus.Counter + M_Api_Dashboard_Insert prometheus.Counter + M_Alerting_Result_State *prometheus.CounterVec + M_Alerting_Notification_Sent *prometheus.CounterVec + M_Aws_CloudWatch_GetMetricStatistics prometheus.Counter + M_Aws_CloudWatch_ListMetrics prometheus.Counter + M_DB_DataSource_QueryById prometheus.Counter // Timers - M_DataSource_ProxyReq_Timer = RegTimer("api.dataproxy.request.all") - M_Alerting_Execution_Time = RegTimer("alerting.execution_time") + M_DataSource_ProxyReq_Timer prometheus.Summary + M_Alerting_Execution_Time prometheus.Summary // StatTotals - M_Alerting_Active_Alerts = RegGauge("alerting.active_alerts") - M_StatTotal_Dashboards = RegGauge("stat_totals", "stat", "dashboards") - M_StatTotal_Users = RegGauge("stat_totals", "stat", "users") - M_StatTotal_Orgs = RegGauge("stat_totals", "stat", "orgs") - M_StatTotal_Playlists = RegGauge("stat_totals", "stat", "playlists") + M_Alerting_Active_Alerts prometheus.Gauge + M_StatTotal_Dashboards prometheus.Gauge + M_StatTotal_Users prometheus.Gauge + M_StatTotal_Orgs prometheus.Gauge + M_StatTotal_Playlists prometheus.Gauge + M_Grafana_Version *prometheus.GaugeVec +) + +func init() { + M_Instance_Start = prometheus.NewCounter(prometheus.CounterOpts{ + Name: "instance_start_total", + Help: "counter for started instances", + Namespace: exporterName, + }) + + M_Page_Status = prometheus.NewCounterVec( + prometheus.CounterOpts{ + Name: "page_response_status_total", + Help: "page http response status", + Namespace: exporterName, + }, + []string{"code"}, + ) + + M_Api_Status = prometheus.NewCounterVec( + prometheus.CounterOpts{ + Name: "api_response_status_total", + Help: "api http response status", + Namespace: exporterName, + }, + []string{"code"}, + ) + + M_Proxy_Status = prometheus.NewCounterVec( + prometheus.CounterOpts{ + Name: "proxy_response_status_total", + Help: "proxy http response status", + Namespace: exporterName, + }, + []string{"code"}, + ) + + M_Http_Request_Total = prometheus.NewCounterVec( + prometheus.CounterOpts{ + Name: "http_request_total", + Help: "http request counter", + }, + []string{"handler", "statuscode", "method"}, + ) + + M_Http_Request_Summary = prometheus.NewSummaryVec( + prometheus.SummaryOpts{ + Name: "http_request_duration_milliseconds", + Help: "http request summary", + }, + []string{"handler", "statuscode", "method"}, + ) + + M_Api_User_SignUpStarted = prometheus.NewCounter(prometheus.CounterOpts{ + Name: "api_user_signup_started_total", + Help: "amount of users who started the signup flow", + Namespace: exporterName, + }) + + M_Api_User_SignUpCompleted = prometheus.NewCounter(prometheus.CounterOpts{ + Name: "api_user_signup_completed_total", + Help: "amount of users who completed the signup flow", + Namespace: exporterName, + }) + + M_Api_User_SignUpInvite = prometheus.NewCounter(prometheus.CounterOpts{ + Name: "api_user_signup_invite_total", + Help: "amount of users who have been invited", + Namespace: exporterName, + }) + + M_Api_Dashboard_Save = prometheus.NewSummary(prometheus.SummaryOpts{ + Name: "api_dashboard_save_milliseconds", + Help: "summary for dashboard save duration", + Namespace: exporterName, + }) + + M_Api_Dashboard_Get = prometheus.NewSummary(prometheus.SummaryOpts{ + Name: "api_dashboard_get_milliseconds", + Help: "summary for dashboard get duration", + Namespace: exporterName, + }) + + M_Api_Dashboard_Search = prometheus.NewSummary(prometheus.SummaryOpts{ + Name: "api_dashboard_search_milliseconds", + Help: "summary for dashboard search duration", + Namespace: exporterName, + }) + + M_Api_Admin_User_Create = prometheus.NewCounter(prometheus.CounterOpts{ + Name: "api_admin_user_created_total", + Help: "api admin user created counter", + Namespace: exporterName, + }) + + M_Api_Login_Post = prometheus.NewCounter(prometheus.CounterOpts{ + Name: "api_login_post_total", + Help: "api login post counter", + Namespace: exporterName, + }) + + M_Api_Login_OAuth = prometheus.NewCounter(prometheus.CounterOpts{ + Name: "api_login_oauth_total", + Help: "api login oauth counter", + Namespace: exporterName, + }) + + M_Api_Org_Create = prometheus.NewCounter(prometheus.CounterOpts{ + Name: "api_org_create_total", + Help: "api org created counter", + Namespace: exporterName, + }) + + M_Api_Dashboard_Snapshot_Create = prometheus.NewCounter(prometheus.CounterOpts{ + Name: "api_dashboard_snapshot_create_total", + Help: "dashboard snapshots created", + Namespace: exporterName, + }) + + M_Api_Dashboard_Snapshot_External = prometheus.NewCounter(prometheus.CounterOpts{ + Name: "api_dashboard_snapshot_external_total", + Help: "external dashboard snapshots created", + Namespace: exporterName, + }) + + M_Api_Dashboard_Snapshot_Get = prometheus.NewCounter(prometheus.CounterOpts{ + Name: "api_dashboard_snapshot_get_total", + Help: "loaded dashboards", + Namespace: exporterName, + }) + + M_Api_Dashboard_Insert = prometheus.NewCounter(prometheus.CounterOpts{ + Name: "api_models_dashboard_insert_total", + Help: "dashboards inserted ", + Namespace: exporterName, + }) + + M_Alerting_Result_State = prometheus.NewCounterVec(prometheus.CounterOpts{ + Name: "alerting_result_total", + Help: "alert execution result counter", + Namespace: exporterName, + }, []string{"state"}) + + M_Alerting_Notification_Sent = prometheus.NewCounterVec(prometheus.CounterOpts{ + Name: "alerting_notification_sent_total", + Help: "counter for how many alert notifications been sent", + Namespace: exporterName, + }, []string{"type"}) + + M_Aws_CloudWatch_GetMetricStatistics = prometheus.NewCounter(prometheus.CounterOpts{ + Name: "aws_cloudwatch_get_metric_statistics_total", + Help: "counter for getting metric statistics from aws", + Namespace: exporterName, + }) + + M_Aws_CloudWatch_ListMetrics = prometheus.NewCounter(prometheus.CounterOpts{ + Name: "aws_cloudwatch_list_metrics_total", + Help: "counter for getting list of metrics from aws", + Namespace: exporterName, + }) + + M_DB_DataSource_QueryById = prometheus.NewCounter(prometheus.CounterOpts{ + Name: "db_datasource_query_by_id_total", + Help: "counter for getting datasource by id", + Namespace: exporterName, + }) + + M_DataSource_ProxyReq_Timer = prometheus.NewSummary(prometheus.SummaryOpts{ + Name: "api_dataproxy_request_all_milliseconds", + Help: "summary for dashboard search duration", + Namespace: exporterName, + }) + + M_Alerting_Execution_Time = prometheus.NewSummary(prometheus.SummaryOpts{ + Name: "alerting_execution_time_milliseconds", + Help: "summary of alert exeuction duration", + Namespace: exporterName, + }) + + M_Alerting_Active_Alerts = prometheus.NewGauge(prometheus.GaugeOpts{ + Name: "alerting_active_alerts", + Help: "amount of active alerts", + Namespace: exporterName, + }) + + M_StatTotal_Dashboards = prometheus.NewGauge(prometheus.GaugeOpts{ + Name: "stat_totals_dashboard", + Help: "total amount of dashboards", + Namespace: exporterName, + }) + + M_StatTotal_Users = prometheus.NewGauge(prometheus.GaugeOpts{ + Name: "stat_total_users", + Help: "total amount of users", + Namespace: exporterName, + }) + + M_StatTotal_Orgs = prometheus.NewGauge(prometheus.GaugeOpts{ + Name: "stat_total_orgs", + Help: "total amount of orgs", + Namespace: exporterName, + }) + + M_StatTotal_Playlists = prometheus.NewGauge(prometheus.GaugeOpts{ + Name: "stat_total_playlists", + Help: "total amount of playlists", + Namespace: exporterName, + }) + + M_Grafana_Version = prometheus.NewGaugeVec(prometheus.GaugeOpts{ + Name: "info", + Help: "Information about the Grafana", + Namespace: exporterName, + }, []string{"version"}) + +} + +func initMetricVars(settings *MetricSettings) { + prometheus.MustRegister( + M_Instance_Start, + M_Page_Status, + M_Api_Status, + M_Proxy_Status, + M_Http_Request_Total, + M_Http_Request_Summary, + M_Api_User_SignUpStarted, + M_Api_User_SignUpCompleted, + M_Api_User_SignUpInvite, + M_Api_Dashboard_Save, + M_Api_Dashboard_Get, + M_Api_Dashboard_Search, + M_DataSource_ProxyReq_Timer, + M_Alerting_Execution_Time, + M_Api_Admin_User_Create, + M_Api_Login_Post, + M_Api_Login_OAuth, + M_Api_Org_Create, + M_Api_Dashboard_Snapshot_Create, + M_Api_Dashboard_Snapshot_External, + M_Api_Dashboard_Snapshot_Get, + M_Api_Dashboard_Insert, + M_Alerting_Result_State, + M_Alerting_Notification_Sent, + M_Aws_CloudWatch_GetMetricStatistics, + M_Aws_CloudWatch_ListMetrics, + M_DB_DataSource_QueryById, + M_Alerting_Active_Alerts, + M_StatTotal_Dashboards, + M_StatTotal_Users, + M_StatTotal_Orgs, + M_StatTotal_Playlists, + M_Grafana_Version) + + go instrumentationLoop(settings) +} + +func instrumentationLoop(settings *MetricSettings) chan struct{} { + M_Instance_Start.Inc() + + onceEveryDayTick := time.NewTicker(time.Hour * 24) + secondTicker := time.NewTicker(time.Second * time.Duration(settings.IntervalSeconds)) + + for { + select { + case <-onceEveryDayTick.C: + sendUsageStats() + case <-secondTicker.C: + updateTotalStats() + } + } +} + +var metricPublishCounter int64 = 0 + +func updateTotalStats() { + metricPublishCounter++ + if metricPublishCounter == 1 || metricPublishCounter%10 == 0 { + statsQuery := models.GetSystemStatsQuery{} + if err := bus.Dispatch(&statsQuery); err != nil { + metricsLogger.Error("Failed to get system stats", "error", err) + return + } + + M_StatTotal_Dashboards.Set(float64(statsQuery.Result.Dashboards)) + M_StatTotal_Users.Set(float64(statsQuery.Result.Users)) + M_StatTotal_Playlists.Set(float64(statsQuery.Result.Playlists)) + M_StatTotal_Orgs.Set(float64(statsQuery.Result.Orgs)) + } +} + +func sendUsageStats() { + if !setting.ReportingEnabled { + return + } + + metricsLogger.Debug("Sending anonymous usage stats to stats.grafana.org") + + version := strings.Replace(setting.BuildVersion, ".", "_", -1) + + metrics := map[string]interface{}{} + report := map[string]interface{}{ + "version": version, + "metrics": metrics, + "os": runtime.GOOS, + "arch": runtime.GOARCH, + } + + statsQuery := models.GetSystemStatsQuery{} + if err := bus.Dispatch(&statsQuery); err != nil { + metricsLogger.Error("Failed to get system stats", "error", err) + return + } + + metrics["stats.dashboards.count"] = statsQuery.Result.Dashboards + metrics["stats.users.count"] = statsQuery.Result.Users + metrics["stats.orgs.count"] = statsQuery.Result.Orgs + metrics["stats.playlist.count"] = statsQuery.Result.Playlists + metrics["stats.plugins.apps.count"] = len(plugins.Apps) + metrics["stats.plugins.panels.count"] = len(plugins.Panels) + metrics["stats.plugins.datasources.count"] = len(plugins.DataSources) + metrics["stats.alerts.count"] = statsQuery.Result.Alerts + metrics["stats.active_users.count"] = statsQuery.Result.ActiveUsers + metrics["stats.datasources.count"] = statsQuery.Result.Datasources + + dsStats := models.GetDataSourceStatsQuery{} + if err := bus.Dispatch(&dsStats); err != nil { + metricsLogger.Error("Failed to get datasource stats", "error", err) + return + } + + // send counters for each data source + // but ignore any custom data sources + // as sending that name could be sensitive information + dsOtherCount := 0 + for _, dsStat := range dsStats.Result { + if models.IsKnownDataSourcePlugin(dsStat.Type) { + metrics["stats.ds."+dsStat.Type+".count"] = dsStat.Count + } else { + dsOtherCount += dsStat.Count + } + } + metrics["stats.ds.other.count"] = dsOtherCount + + out, _ := json.MarshalIndent(report, "", " ") + data := bytes.NewBuffer(out) + + client := http.Client{Timeout: time.Duration(5 * time.Second)} + go client.Post("https://stats.grafana.org/grafana-usage-report", "application/json", data) } diff --git a/pkg/metrics/publish.go b/pkg/metrics/publish.go deleted file mode 100644 index d7eb86b8c56..00000000000 --- a/pkg/metrics/publish.go +++ /dev/null @@ -1,135 +0,0 @@ -package metrics - -import ( - "bytes" - "encoding/json" - "net/http" - "runtime" - "strings" - "time" - - "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/log" - m "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/plugins" - "github.com/grafana/grafana/pkg/setting" -) - -var metricsLogger log.Logger = log.New("metrics") -var metricPublishCounter int64 = 0 - -func Init() { - settings := readSettings() - initMetricVars(settings) - go instrumentationLoop(settings) -} - -func instrumentationLoop(settings *MetricSettings) chan struct{} { - M_Instance_Start.Inc(1) - - onceEveryDayTick := time.NewTicker(time.Hour * 24) - secondTicker := time.NewTicker(time.Second * time.Duration(settings.IntervalSeconds)) - - for { - select { - case <-onceEveryDayTick.C: - sendUsageStats() - case <-secondTicker.C: - if settings.Enabled { - sendMetrics(settings) - } - } - } -} - -func sendMetrics(settings *MetricSettings) { - if len(settings.Publishers) == 0 { - return - } - - updateTotalStats() - - metrics := MetricStats.GetSnapshots() - for _, publisher := range settings.Publishers { - publisher.Publish(metrics) - } -} - -func updateTotalStats() { - - // every interval also publish totals - metricPublishCounter++ - if metricPublishCounter%10 == 0 { - // get stats - statsQuery := m.GetSystemStatsQuery{} - if err := bus.Dispatch(&statsQuery); err != nil { - metricsLogger.Error("Failed to get system stats", "error", err) - return - } - - M_StatTotal_Dashboards.Update(statsQuery.Result.Dashboards) - M_StatTotal_Users.Update(statsQuery.Result.Users) - M_StatTotal_Playlists.Update(statsQuery.Result.Playlists) - M_StatTotal_Orgs.Update(statsQuery.Result.Orgs) - } -} - -func sendUsageStats() { - if !setting.ReportingEnabled { - return - } - - metricsLogger.Debug("Sending anonymous usage stats to stats.grafana.org") - - version := strings.Replace(setting.BuildVersion, ".", "_", -1) - - metrics := map[string]interface{}{} - report := map[string]interface{}{ - "version": version, - "metrics": metrics, - "os": runtime.GOOS, - "arch": runtime.GOARCH, - } - - statsQuery := m.GetSystemStatsQuery{} - if err := bus.Dispatch(&statsQuery); err != nil { - metricsLogger.Error("Failed to get system stats", "error", err) - return - } - - metrics["stats.dashboards.count"] = statsQuery.Result.Dashboards - metrics["stats.users.count"] = statsQuery.Result.Users - metrics["stats.orgs.count"] = statsQuery.Result.Orgs - metrics["stats.playlist.count"] = statsQuery.Result.Playlists - metrics["stats.plugins.apps.count"] = len(plugins.Apps) - metrics["stats.plugins.panels.count"] = len(plugins.Panels) - metrics["stats.plugins.datasources.count"] = len(plugins.DataSources) - metrics["stats.alerts.count"] = statsQuery.Result.Alerts - metrics["stats.active_users.count"] = statsQuery.Result.ActiveUsers - metrics["stats.datasources.count"] = statsQuery.Result.Datasources - - dsStats := m.GetDataSourceStatsQuery{} - if err := bus.Dispatch(&dsStats); err != nil { - metricsLogger.Error("Failed to get datasource stats", "error", err) - return - } - - // send counters for each data source - // but ignore any custom data sources - // as sending that name could be sensitive information - dsOtherCount := 0 - for _, dsStat := range dsStats.Result { - if m.IsKnownDataSourcePlugin(dsStat.Type) { - metrics["stats.ds."+dsStat.Type+".count"] = dsStat.Count - } else { - dsOtherCount += dsStat.Count - } - } - metrics["stats.ds.other.count"] = dsOtherCount - - out, _ := json.MarshalIndent(report, "", " ") - data := bytes.NewBuffer(out) - - client := http.Client{Timeout: time.Duration(5 * time.Second)} - go client.Post("https://stats.grafana.org/grafana-usage-report", "application/json", data) -} diff --git a/pkg/metrics/registry.go b/pkg/metrics/registry.go deleted file mode 100644 index 6c40d4fde9f..00000000000 --- a/pkg/metrics/registry.go +++ /dev/null @@ -1,37 +0,0 @@ -package metrics - -import "sync" - -type Registry interface { - GetSnapshots() []Metric - Register(metric Metric) -} - -// The standard implementation of a Registry is a mutex-protected map -// of names to metrics. -type StandardRegistry struct { - metrics []Metric - mutex sync.Mutex -} - -// Create a new registry. -func NewRegistry() Registry { - return &StandardRegistry{ - metrics: make([]Metric, 0), - } -} - -func (r *StandardRegistry) Register(metric Metric) { - r.mutex.Lock() - defer r.mutex.Unlock() - r.metrics = append(r.metrics, metric) -} - -// Call the given function for each registered metric. -func (r *StandardRegistry) GetSnapshots() []Metric { - metrics := make([]Metric, len(r.metrics)) - for i, metric := range r.metrics { - metrics[i] = metric.Snapshot() - } - return metrics -} diff --git a/pkg/metrics/sample.go b/pkg/metrics/sample.go deleted file mode 100644 index 4288f29cce6..00000000000 --- a/pkg/metrics/sample.go +++ /dev/null @@ -1,607 +0,0 @@ -// includes code from -// https://raw.githubusercontent.com/rcrowley/go-metrics/master/sample.go -// Copyright 2012 Richard Crowley. All rights reserved. - -package metrics - -import ( - "math" - "math/rand" - "sort" - "sync" - "time" -) - -const rescaleThreshold = time.Hour - -// Samples maintain a statistically-significant selection of values from -// a stream. -type Sample interface { - Clear() - Count() int64 - Max() int64 - Mean() float64 - Min() int64 - Percentile(float64) float64 - Percentiles([]float64) []float64 - Size() int - Snapshot() Sample - StdDev() float64 - Sum() int64 - Update(int64) - Values() []int64 - Variance() float64 -} - -// ExpDecaySample is an exponentially-decaying sample using a forward-decaying -// priority reservoir. See Cormode et al's "Forward Decay: A Practical Time -// Decay Model for Streaming Systems". -// -// -type ExpDecaySample struct { - alpha float64 - count int64 - mutex sync.Mutex - reservoirSize int - t0, t1 time.Time - values *expDecaySampleHeap -} - -// NewExpDecaySample constructs a new exponentially-decaying sample with the -// given reservoir size and alpha. -func NewExpDecaySample(reservoirSize int, alpha float64) Sample { - s := &ExpDecaySample{ - alpha: alpha, - reservoirSize: reservoirSize, - t0: time.Now(), - values: newExpDecaySampleHeap(reservoirSize), - } - s.t1 = s.t0.Add(rescaleThreshold) - return s -} - -// Clear clears all samples. -func (s *ExpDecaySample) Clear() { - s.mutex.Lock() - defer s.mutex.Unlock() - s.count = 0 - s.t0 = time.Now() - s.t1 = s.t0.Add(rescaleThreshold) - s.values.Clear() -} - -// Count returns the number of samples recorded, which may exceed the -// reservoir size. -func (s *ExpDecaySample) Count() int64 { - s.mutex.Lock() - defer s.mutex.Unlock() - return s.count -} - -// Max returns the maximum value in the sample, which may not be the maximum -// value ever to be part of the sample. -func (s *ExpDecaySample) Max() int64 { - return SampleMax(s.Values()) -} - -// Mean returns the mean of the values in the sample. -func (s *ExpDecaySample) Mean() float64 { - return SampleMean(s.Values()) -} - -// Min returns the minimum value in the sample, which may not be the minimum -// value ever to be part of the sample. -func (s *ExpDecaySample) Min() int64 { - return SampleMin(s.Values()) -} - -// Percentile returns an arbitrary percentile of values in the sample. -func (s *ExpDecaySample) Percentile(p float64) float64 { - return SamplePercentile(s.Values(), p) -} - -// Percentiles returns a slice of arbitrary percentiles of values in the -// sample. -func (s *ExpDecaySample) Percentiles(ps []float64) []float64 { - return SamplePercentiles(s.Values(), ps) -} - -// Size returns the size of the sample, which is at most the reservoir size. -func (s *ExpDecaySample) Size() int { - s.mutex.Lock() - defer s.mutex.Unlock() - return s.values.Size() -} - -// Snapshot returns a read-only copy of the sample. -func (s *ExpDecaySample) Snapshot() Sample { - s.mutex.Lock() - defer s.mutex.Unlock() - vals := s.values.Values() - values := make([]int64, len(vals)) - for i, v := range vals { - values[i] = v.v - } - return &SampleSnapshot{ - count: s.count, - values: values, - } -} - -// StdDev returns the standard deviation of the values in the sample. -func (s *ExpDecaySample) StdDev() float64 { - return SampleStdDev(s.Values()) -} - -// Sum returns the sum of the values in the sample. -func (s *ExpDecaySample) Sum() int64 { - return SampleSum(s.Values()) -} - -// Update samples a new value. -func (s *ExpDecaySample) Update(v int64) { - s.update(time.Now(), v) -} - -// Values returns a copy of the values in the sample. -func (s *ExpDecaySample) Values() []int64 { - s.mutex.Lock() - defer s.mutex.Unlock() - vals := s.values.Values() - values := make([]int64, len(vals)) - for i, v := range vals { - values[i] = v.v - } - return values -} - -// Variance returns the variance of the values in the sample. -func (s *ExpDecaySample) Variance() float64 { - return SampleVariance(s.Values()) -} - -// update samples a new value at a particular timestamp. This is a method all -// its own to facilitate testing. -func (s *ExpDecaySample) update(t time.Time, v int64) { - s.mutex.Lock() - defer s.mutex.Unlock() - s.count++ - if s.values.Size() == s.reservoirSize { - s.values.Pop() - } - s.values.Push(expDecaySample{ - k: math.Exp(t.Sub(s.t0).Seconds()*s.alpha) / rand.Float64(), - v: v, - }) - if t.After(s.t1) { - values := s.values.Values() - t0 := s.t0 - s.values.Clear() - s.t0 = t - s.t1 = s.t0.Add(rescaleThreshold) - for _, v := range values { - v.k = v.k * math.Exp(-s.alpha*s.t0.Sub(t0).Seconds()) - s.values.Push(v) - } - } -} - -// NilSample is a no-op Sample. -type NilSample struct{} - -// Clear is a no-op. -func (NilSample) Clear() {} - -// Count is a no-op. -func (NilSample) Count() int64 { return 0 } - -// Max is a no-op. -func (NilSample) Max() int64 { return 0 } - -// Mean is a no-op. -func (NilSample) Mean() float64 { return 0.0 } - -// Min is a no-op. -func (NilSample) Min() int64 { return 0 } - -// Percentile is a no-op. -func (NilSample) Percentile(p float64) float64 { return 0.0 } - -// Percentiles is a no-op. -func (NilSample) Percentiles(ps []float64) []float64 { - return make([]float64, len(ps)) -} - -// Size is a no-op. -func (NilSample) Size() int { return 0 } - -// Sample is a no-op. -func (NilSample) Snapshot() Sample { return NilSample{} } - -// StdDev is a no-op. -func (NilSample) StdDev() float64 { return 0.0 } - -// Sum is a no-op. -func (NilSample) Sum() int64 { return 0 } - -// Update is a no-op. -func (NilSample) Update(v int64) {} - -// Values is a no-op. -func (NilSample) Values() []int64 { return []int64{} } - -// Variance is a no-op. -func (NilSample) Variance() float64 { return 0.0 } - -// SampleMax returns the maximum value of the slice of int64. -func SampleMax(values []int64) int64 { - if 0 == len(values) { - return 0 - } - var max int64 = math.MinInt64 - for _, v := range values { - if max < v { - max = v - } - } - return max -} - -// SampleMean returns the mean value of the slice of int64. -func SampleMean(values []int64) float64 { - if 0 == len(values) { - return 0.0 - } - return float64(SampleSum(values)) / float64(len(values)) -} - -// SampleMin returns the minimum value of the slice of int64. -func SampleMin(values []int64) int64 { - if 0 == len(values) { - return 0 - } - var min int64 = math.MaxInt64 - for _, v := range values { - if min > v { - min = v - } - } - return min -} - -// SamplePercentiles returns an arbitrary percentile of the slice of int64. -func SamplePercentile(values int64Slice, p float64) float64 { - return SamplePercentiles(values, []float64{p})[0] -} - -// SamplePercentiles returns a slice of arbitrary percentiles of the slice of -// int64. -func SamplePercentiles(values int64Slice, ps []float64) []float64 { - scores := make([]float64, len(ps)) - size := len(values) - if size > 0 { - sort.Sort(values) - for i, p := range ps { - pos := p * float64(size+1) - if pos < 1.0 { - scores[i] = float64(values[0]) - } else if pos >= float64(size) { - scores[i] = float64(values[size-1]) - } else { - lower := float64(values[int(pos)-1]) - upper := float64(values[int(pos)]) - scores[i] = lower + (pos-math.Floor(pos))*(upper-lower) - } - } - } - return scores -} - -// SampleSnapshot is a read-only copy of another Sample. -type SampleSnapshot struct { - count int64 - values []int64 -} - -// Clear panics. -func (*SampleSnapshot) Clear() { - panic("Clear called on a SampleSnapshot") -} - -// Count returns the count of inputs at the time the snapshot was taken. -func (s *SampleSnapshot) Count() int64 { return s.count } - -// Max returns the maximal value at the time the snapshot was taken. -func (s *SampleSnapshot) Max() int64 { return SampleMax(s.values) } - -// Mean returns the mean value at the time the snapshot was taken. -func (s *SampleSnapshot) Mean() float64 { return SampleMean(s.values) } - -// Min returns the minimal value at the time the snapshot was taken. -func (s *SampleSnapshot) Min() int64 { return SampleMin(s.values) } - -// Percentile returns an arbitrary percentile of values at the time the -// snapshot was taken. -func (s *SampleSnapshot) Percentile(p float64) float64 { - return SamplePercentile(s.values, p) -} - -// Percentiles returns a slice of arbitrary percentiles of values at the time -// the snapshot was taken. -func (s *SampleSnapshot) Percentiles(ps []float64) []float64 { - return SamplePercentiles(s.values, ps) -} - -// Size returns the size of the sample at the time the snapshot was taken. -func (s *SampleSnapshot) Size() int { return len(s.values) } - -// Snapshot returns the snapshot. -func (s *SampleSnapshot) Snapshot() Sample { return s } - -// StdDev returns the standard deviation of values at the time the snapshot was -// taken. -func (s *SampleSnapshot) StdDev() float64 { return SampleStdDev(s.values) } - -// Sum returns the sum of values at the time the snapshot was taken. -func (s *SampleSnapshot) Sum() int64 { return SampleSum(s.values) } - -// Update panics. -func (*SampleSnapshot) Update(int64) { - panic("Update called on a SampleSnapshot") -} - -// Values returns a copy of the values in the sample. -func (s *SampleSnapshot) Values() []int64 { - values := make([]int64, len(s.values)) - copy(values, s.values) - return values -} - -// Variance returns the variance of values at the time the snapshot was taken. -func (s *SampleSnapshot) Variance() float64 { return SampleVariance(s.values) } - -// SampleStdDev returns the standard deviation of the slice of int64. -func SampleStdDev(values []int64) float64 { - return math.Sqrt(SampleVariance(values)) -} - -// SampleSum returns the sum of the slice of int64. -func SampleSum(values []int64) int64 { - var sum int64 - for _, v := range values { - sum += v - } - return sum -} - -// SampleVariance returns the variance of the slice of int64. -func SampleVariance(values []int64) float64 { - if 0 == len(values) { - return 0.0 - } - m := SampleMean(values) - var sum float64 - for _, v := range values { - d := float64(v) - m - sum += d * d - } - return sum / float64(len(values)) -} - -// A uniform sample using Vitter's Algorithm R. -// -// -type UniformSample struct { - count int64 - mutex sync.Mutex - reservoirSize int - values []int64 -} - -// NewUniformSample constructs a new uniform sample with the given reservoir -// size. -func NewUniformSample(reservoirSize int) Sample { - return &UniformSample{ - reservoirSize: reservoirSize, - values: make([]int64, 0, reservoirSize), - } -} - -// Clear clears all samples. -func (s *UniformSample) Clear() { - s.mutex.Lock() - defer s.mutex.Unlock() - s.count = 0 - s.values = make([]int64, 0, s.reservoirSize) -} - -// Count returns the number of samples recorded, which may exceed the -// reservoir size. -func (s *UniformSample) Count() int64 { - s.mutex.Lock() - defer s.mutex.Unlock() - return s.count -} - -// Max returns the maximum value in the sample, which may not be the maximum -// value ever to be part of the sample. -func (s *UniformSample) Max() int64 { - s.mutex.Lock() - defer s.mutex.Unlock() - return SampleMax(s.values) -} - -// Mean returns the mean of the values in the sample. -func (s *UniformSample) Mean() float64 { - s.mutex.Lock() - defer s.mutex.Unlock() - return SampleMean(s.values) -} - -// Min returns the minimum value in the sample, which may not be the minimum -// value ever to be part of the sample. -func (s *UniformSample) Min() int64 { - s.mutex.Lock() - defer s.mutex.Unlock() - return SampleMin(s.values) -} - -// Percentile returns an arbitrary percentile of values in the sample. -func (s *UniformSample) Percentile(p float64) float64 { - s.mutex.Lock() - defer s.mutex.Unlock() - return SamplePercentile(s.values, p) -} - -// Percentiles returns a slice of arbitrary percentiles of values in the -// sample. -func (s *UniformSample) Percentiles(ps []float64) []float64 { - s.mutex.Lock() - defer s.mutex.Unlock() - return SamplePercentiles(s.values, ps) -} - -// Size returns the size of the sample, which is at most the reservoir size. -func (s *UniformSample) Size() int { - s.mutex.Lock() - defer s.mutex.Unlock() - return len(s.values) -} - -// Snapshot returns a read-only copy of the sample. -func (s *UniformSample) Snapshot() Sample { - s.mutex.Lock() - defer s.mutex.Unlock() - values := make([]int64, len(s.values)) - copy(values, s.values) - return &SampleSnapshot{ - count: s.count, - values: values, - } -} - -// StdDev returns the standard deviation of the values in the sample. -func (s *UniformSample) StdDev() float64 { - s.mutex.Lock() - defer s.mutex.Unlock() - return SampleStdDev(s.values) -} - -// Sum returns the sum of the values in the sample. -func (s *UniformSample) Sum() int64 { - s.mutex.Lock() - defer s.mutex.Unlock() - return SampleSum(s.values) -} - -// Update samples a new value. -func (s *UniformSample) Update(v int64) { - s.mutex.Lock() - defer s.mutex.Unlock() - s.count++ - if len(s.values) < s.reservoirSize { - s.values = append(s.values, v) - } else { - r := rand.Int63n(s.count) - if r < int64(len(s.values)) { - s.values[int(r)] = v - } - } -} - -// Values returns a copy of the values in the sample. -func (s *UniformSample) Values() []int64 { - s.mutex.Lock() - defer s.mutex.Unlock() - values := make([]int64, len(s.values)) - copy(values, s.values) - return values -} - -// Variance returns the variance of the values in the sample. -func (s *UniformSample) Variance() float64 { - s.mutex.Lock() - defer s.mutex.Unlock() - return SampleVariance(s.values) -} - -// expDecaySample represents an individual sample in a heap. -type expDecaySample struct { - k float64 - v int64 -} - -func newExpDecaySampleHeap(reservoirSize int) *expDecaySampleHeap { - return &expDecaySampleHeap{make([]expDecaySample, 0, reservoirSize)} -} - -// expDecaySampleHeap is a min-heap of expDecaySamples. -// The internal implementation is copied from the standard library's container/heap -type expDecaySampleHeap struct { - s []expDecaySample -} - -func (h *expDecaySampleHeap) Clear() { - h.s = h.s[:0] -} - -func (h *expDecaySampleHeap) Push(s expDecaySample) { - n := len(h.s) - h.s = h.s[0 : n+1] - h.s[n] = s - h.up(n) -} - -func (h *expDecaySampleHeap) Pop() expDecaySample { - n := len(h.s) - 1 - h.s[0], h.s[n] = h.s[n], h.s[0] - h.down(0, n) - - n = len(h.s) - s := h.s[n-1] - h.s = h.s[0 : n-1] - return s -} - -func (h *expDecaySampleHeap) Size() int { - return len(h.s) -} - -func (h *expDecaySampleHeap) Values() []expDecaySample { - return h.s -} - -func (h *expDecaySampleHeap) up(j int) { - for { - i := (j - 1) / 2 // parent - if i == j || !(h.s[j].k < h.s[i].k) { - break - } - h.s[i], h.s[j] = h.s[j], h.s[i] - j = i - } -} - -func (h *expDecaySampleHeap) down(i, n int) { - for { - j1 := 2*i + 1 - if j1 >= n || j1 < 0 { // j1 < 0 after int overflow - break - } - j := j1 // left child - if j2 := j1 + 1; j2 < n && !(h.s[j1].k < h.s[j2].k) { - j = j2 // = 2*i + 2 // right child - } - if !(h.s[j].k < h.s[i].k) { - break - } - h.s[i], h.s[j] = h.s[j], h.s[i] - i = j - } -} - -type int64Slice []int64 - -func (p int64Slice) Len() int { return len(p) } -func (p int64Slice) Less(i, j int) bool { return p[i] < p[j] } -func (p int64Slice) Swap(i, j int) { p[i], p[j] = p[j], p[i] } diff --git a/pkg/metrics/sample_test.go b/pkg/metrics/sample_test.go deleted file mode 100644 index 755a8cf0173..00000000000 --- a/pkg/metrics/sample_test.go +++ /dev/null @@ -1,367 +0,0 @@ -// includes code from -// https://raw.githubusercontent.com/rcrowley/go-metrics/master/sample.go -// Copyright 2012 Richard Crowley. All rights reserved. - -package metrics - -import ( - "math/rand" - "runtime" - "testing" - "time" -) - -// Benchmark{Compute,Copy}{1000,1000000} demonstrate that, even for relatively -// expensive computations like Variance, the cost of copying the Sample, as -// approximated by a make and copy, is much greater than the cost of the -// computation for small samples and only slightly less for large samples. -func BenchmarkCompute1000(b *testing.B) { - s := make([]int64, 1000) - for i := 0; i < len(s); i++ { - s[i] = int64(i) - } - b.ResetTimer() - for i := 0; i < b.N; i++ { - SampleVariance(s) - } -} -func BenchmarkCompute1000000(b *testing.B) { - s := make([]int64, 1000000) - for i := 0; i < len(s); i++ { - s[i] = int64(i) - } - b.ResetTimer() - for i := 0; i < b.N; i++ { - SampleVariance(s) - } -} -func BenchmarkCopy1000(b *testing.B) { - s := make([]int64, 1000) - for i := 0; i < len(s); i++ { - s[i] = int64(i) - } - b.ResetTimer() - for i := 0; i < b.N; i++ { - sCopy := make([]int64, len(s)) - copy(sCopy, s) - } -} -func BenchmarkCopy1000000(b *testing.B) { - s := make([]int64, 1000000) - for i := 0; i < len(s); i++ { - s[i] = int64(i) - } - b.ResetTimer() - for i := 0; i < b.N; i++ { - sCopy := make([]int64, len(s)) - copy(sCopy, s) - } -} - -func BenchmarkExpDecaySample257(b *testing.B) { - benchmarkSample(b, NewExpDecaySample(257, 0.015)) -} - -func BenchmarkExpDecaySample514(b *testing.B) { - benchmarkSample(b, NewExpDecaySample(514, 0.015)) -} - -func BenchmarkExpDecaySample1028(b *testing.B) { - benchmarkSample(b, NewExpDecaySample(1028, 0.015)) -} - -func BenchmarkUniformSample257(b *testing.B) { - benchmarkSample(b, NewUniformSample(257)) -} - -func BenchmarkUniformSample514(b *testing.B) { - benchmarkSample(b, NewUniformSample(514)) -} - -func BenchmarkUniformSample1028(b *testing.B) { - benchmarkSample(b, NewUniformSample(1028)) -} - -func TestExpDecaySample10(t *testing.T) { - rand.Seed(1) - s := NewExpDecaySample(100, 0.99) - for i := 0; i < 10; i++ { - s.Update(int64(i)) - } - if size := s.Count(); 10 != size { - t.Errorf("s.Count(): 10 != %v\n", size) - } - if size := s.Size(); 10 != size { - t.Errorf("s.Size(): 10 != %v\n", size) - } - if l := len(s.Values()); 10 != l { - t.Errorf("len(s.Values()): 10 != %v\n", l) - } - for _, v := range s.Values() { - if v > 10 || v < 0 { - t.Errorf("out of range [0, 10): %v\n", v) - } - } -} - -func TestExpDecaySample100(t *testing.T) { - rand.Seed(1) - s := NewExpDecaySample(1000, 0.01) - for i := 0; i < 100; i++ { - s.Update(int64(i)) - } - if size := s.Count(); 100 != size { - t.Errorf("s.Count(): 100 != %v\n", size) - } - if size := s.Size(); 100 != size { - t.Errorf("s.Size(): 100 != %v\n", size) - } - if l := len(s.Values()); 100 != l { - t.Errorf("len(s.Values()): 100 != %v\n", l) - } - for _, v := range s.Values() { - if v > 100 || v < 0 { - t.Errorf("out of range [0, 100): %v\n", v) - } - } -} - -func TestExpDecaySample1000(t *testing.T) { - rand.Seed(1) - s := NewExpDecaySample(100, 0.99) - for i := 0; i < 1000; i++ { - s.Update(int64(i)) - } - if size := s.Count(); 1000 != size { - t.Errorf("s.Count(): 1000 != %v\n", size) - } - if size := s.Size(); 100 != size { - t.Errorf("s.Size(): 100 != %v\n", size) - } - if l := len(s.Values()); 100 != l { - t.Errorf("len(s.Values()): 100 != %v\n", l) - } - for _, v := range s.Values() { - if v > 1000 || v < 0 { - t.Errorf("out of range [0, 1000): %v\n", v) - } - } -} - -// This test makes sure that the sample's priority is not amplified by using -// nanosecond duration since start rather than second duration since start. -// The priority becomes +Inf quickly after starting if this is done, -// effectively freezing the set of samples until a rescale step happens. -func TestExpDecaySampleNanosecondRegression(t *testing.T) { - rand.Seed(1) - s := NewExpDecaySample(100, 0.99) - for i := 0; i < 100; i++ { - s.Update(10) - } - time.Sleep(1 * time.Millisecond) - for i := 0; i < 100; i++ { - s.Update(20) - } - v := s.Values() - avg := float64(0) - for i := 0; i < len(v); i++ { - avg += float64(v[i]) - } - avg /= float64(len(v)) - if avg > 16 || avg < 14 { - t.Errorf("out of range [14, 16]: %v\n", avg) - } -} - -func TestExpDecaySampleRescale(t *testing.T) { - s := NewExpDecaySample(2, 0.001).(*ExpDecaySample) - s.update(time.Now(), 1) - s.update(time.Now().Add(time.Hour+time.Microsecond), 1) - for _, v := range s.values.Values() { - if v.k == 0.0 { - t.Fatal("v.k == 0.0") - } - } -} - -func TestExpDecaySampleSnapshot(t *testing.T) { - now := time.Now() - rand.Seed(1) - s := NewExpDecaySample(100, 0.99) - for i := 1; i <= 10000; i++ { - s.(*ExpDecaySample).update(now.Add(time.Duration(i)), int64(i)) - } - snapshot := s.Snapshot() - s.Update(1) - testExpDecaySampleStatistics(t, snapshot) -} - -func TestExpDecaySampleStatistics(t *testing.T) { - now := time.Now() - rand.Seed(1) - s := NewExpDecaySample(100, 0.99) - for i := 1; i <= 10000; i++ { - s.(*ExpDecaySample).update(now.Add(time.Duration(i)), int64(i)) - } - testExpDecaySampleStatistics(t, s) -} - -func TestUniformSample(t *testing.T) { - rand.Seed(1) - s := NewUniformSample(100) - for i := 0; i < 1000; i++ { - s.Update(int64(i)) - } - if size := s.Count(); 1000 != size { - t.Errorf("s.Count(): 1000 != %v\n", size) - } - if size := s.Size(); 100 != size { - t.Errorf("s.Size(): 100 != %v\n", size) - } - if l := len(s.Values()); 100 != l { - t.Errorf("len(s.Values()): 100 != %v\n", l) - } - for _, v := range s.Values() { - if v > 1000 || v < 0 { - t.Errorf("out of range [0, 100): %v\n", v) - } - } -} - -func TestUniformSampleIncludesTail(t *testing.T) { - rand.Seed(1) - s := NewUniformSample(100) - max := 100 - for i := 0; i < max; i++ { - s.Update(int64(i)) - } - v := s.Values() - sum := 0 - exp := (max - 1) * max / 2 - for i := 0; i < len(v); i++ { - sum += int(v[i]) - } - if exp != sum { - t.Errorf("sum: %v != %v\n", exp, sum) - } -} - -func TestUniformSampleSnapshot(t *testing.T) { - s := NewUniformSample(100) - for i := 1; i <= 10000; i++ { - s.Update(int64(i)) - } - snapshot := s.Snapshot() - s.Update(1) - testUniformSampleStatistics(t, snapshot) -} - -func TestUniformSampleStatistics(t *testing.T) { - rand.Seed(1) - s := NewUniformSample(100) - for i := 1; i <= 10000; i++ { - s.Update(int64(i)) - } - testUniformSampleStatistics(t, s) -} - -func benchmarkSample(b *testing.B, s Sample) { - var memStats runtime.MemStats - runtime.ReadMemStats(&memStats) - pauseTotalNs := memStats.PauseTotalNs - b.ResetTimer() - for i := 0; i < b.N; i++ { - s.Update(1) - } - b.StopTimer() - runtime.GC() - runtime.ReadMemStats(&memStats) - b.Logf("GC cost: %d ns/op", int(memStats.PauseTotalNs-pauseTotalNs)/b.N) -} - -func testExpDecaySampleStatistics(t *testing.T, s Sample) { - if count := s.Count(); 10000 != count { - t.Errorf("s.Count(): 10000 != %v\n", count) - } - if min := s.Min(); 107 != min { - t.Errorf("s.Min(): 107 != %v\n", min) - } - if max := s.Max(); 10000 != max { - t.Errorf("s.Max(): 10000 != %v\n", max) - } - if mean := s.Mean(); 4965.98 != mean { - t.Errorf("s.Mean(): 4965.98 != %v\n", mean) - } - if stdDev := s.StdDev(); 2959.825156930727 != stdDev { - t.Errorf("s.StdDev(): 2959.825156930727 != %v\n", stdDev) - } - ps := s.Percentiles([]float64{0.5, 0.75, 0.99}) - if 4615 != ps[0] { - t.Errorf("median: 4615 != %v\n", ps[0]) - } - if 7672 != ps[1] { - t.Errorf("75th percentile: 7672 != %v\n", ps[1]) - } - if 9998.99 != ps[2] { - t.Errorf("99th percentile: 9998.99 != %v\n", ps[2]) - } -} - -func testUniformSampleStatistics(t *testing.T, s Sample) { - if count := s.Count(); 10000 != count { - t.Errorf("s.Count(): 10000 != %v\n", count) - } - if min := s.Min(); 37 != min { - t.Errorf("s.Min(): 37 != %v\n", min) - } - if max := s.Max(); 9989 != max { - t.Errorf("s.Max(): 9989 != %v\n", max) - } - if mean := s.Mean(); 4748.14 != mean { - t.Errorf("s.Mean(): 4748.14 != %v\n", mean) - } - if stdDev := s.StdDev(); 2826.684117548333 != stdDev { - t.Errorf("s.StdDev(): 2826.684117548333 != %v\n", stdDev) - } - ps := s.Percentiles([]float64{0.5, 0.75, 0.99}) - if 4599 != ps[0] { - t.Errorf("median: 4599 != %v\n", ps[0]) - } - if 7380.5 != ps[1] { - t.Errorf("75th percentile: 7380.5 != %v\n", ps[1]) - } - if 9986.429999999998 != ps[2] { - t.Errorf("99th percentile: 9986.429999999998 != %v\n", ps[2]) - } -} - -// TestUniformSampleConcurrentUpdateCount would expose data race problems with -// concurrent Update and Count calls on Sample when test is called with -race -// argument -func TestUniformSampleConcurrentUpdateCount(t *testing.T) { - if testing.Short() { - t.Skip("skipping in short mode") - } - s := NewUniformSample(100) - for i := 0; i < 100; i++ { - s.Update(int64(i)) - } - quit := make(chan struct{}) - go func() { - t := time.NewTicker(10 * time.Millisecond) - for { - select { - case <-t.C: - s.Update(rand.Int63()) - case <-quit: - t.Stop() - return - } - } - }() - for i := 0; i < 1000; i++ { - s.Count() - time.Sleep(5 * time.Millisecond) - } - quit <- struct{}{} -} diff --git a/pkg/metrics/settings.go b/pkg/metrics/settings.go index 691bf6b6e73..5e51f85768a 100644 --- a/pkg/metrics/settings.go +++ b/pkg/metrics/settings.go @@ -1,25 +1,27 @@ package metrics -import "github.com/grafana/grafana/pkg/setting" +import ( + "strings" + "time" -type MetricPublisher interface { - Publish(metrics []Metric) -} + "github.com/grafana/grafana/pkg/metrics/graphitebridge" + "github.com/grafana/grafana/pkg/setting" + "github.com/prometheus/client_golang/prometheus" + ini "gopkg.in/ini.v1" +) type MetricSettings struct { - Enabled bool - IntervalSeconds int64 - - Publishers []MetricPublisher + Enabled bool + IntervalSeconds int64 + GraphiteBridgeConfig *graphitebridge.Config } -func readSettings() *MetricSettings { +func ReadSettings(file *ini.File) *MetricSettings { var settings = &MetricSettings{ - Enabled: false, - Publishers: make([]MetricPublisher, 0), + Enabled: false, } - var section, err = setting.Cfg.GetSection("metrics") + var section, err = file.GetSection("metrics") if err != nil { metricsLogger.Crit("Unable to find metrics config section", "error", err) return nil @@ -32,12 +34,46 @@ func readSettings() *MetricSettings { return settings } - if graphitePublisher, err := CreateGraphitePublisher(); err != nil { - metricsLogger.Error("Failed to init Graphite metric publisher", "error", err) - } else if graphitePublisher != nil { - metricsLogger.Info("Metrics publisher initialized", "type", "graphite") - settings.Publishers = append(settings.Publishers, graphitePublisher) + cfg, err := parseGraphiteSettings(settings, file) + if err != nil { + metricsLogger.Crit("Unable to parse metrics graphite section", "error", err) + return nil } + settings.GraphiteBridgeConfig = cfg + return settings } + +func parseGraphiteSettings(settings *MetricSettings, file *ini.File) (*graphitebridge.Config, error) { + graphiteSection, err := setting.Cfg.GetSection("metrics.graphite") + if err != nil { + return nil, nil + } + + address := graphiteSection.Key("address").String() + if address == "" { + return nil, nil + } + + cfg := &graphitebridge.Config{ + URL: address, + Prefix: graphiteSection.Key("prefix").MustString("prod.grafana.%(instance_name)s"), + CountersAsDelta: true, + Gatherer: prometheus.DefaultGatherer, + Interval: time.Duration(settings.IntervalSeconds) * time.Second, + Timeout: 10 * time.Second, + Logger: &logWrapper{logger: metricsLogger}, + ErrorHandling: graphitebridge.ContinueOnError, + } + + safeInstanceName := strings.Replace(setting.InstanceName, ".", "_", -1) + prefix := graphiteSection.Key("prefix").Value() + + if prefix == "" { + prefix = "prod.grafana.%(instance_name)s." + } + + cfg.Prefix = strings.Replace(prefix, "%(instance_name)s", safeInstanceName, -1) + return cfg, nil +} diff --git a/pkg/metrics/timer.go b/pkg/metrics/timer.go deleted file mode 100644 index 61c3bf9533d..00000000000 --- a/pkg/metrics/timer.go +++ /dev/null @@ -1,310 +0,0 @@ -// includes code from -// https://raw.githubusercontent.com/rcrowley/go-metrics/master/sample.go -// Copyright 2012 Richard Crowley. All rights reserved. - -package metrics - -import ( - "sync" - "time" -) - -// Timers capture the duration and rate of events. -type Timer interface { - Metric - - Count() int64 - Max() int64 - Mean() float64 - Min() int64 - Percentile(float64) float64 - Percentiles([]float64) []float64 - Rate1() float64 - Rate5() float64 - Rate15() float64 - RateMean() float64 - StdDev() float64 - Sum() int64 - Time(func()) - Update(time.Duration) - UpdateSince(time.Time) - Variance() float64 -} - -// NewCustomTimer constructs a new StandardTimer from a Histogram and a Meter. -func NewCustomTimer(meta *MetricMeta, h Histogram, m Meter) Timer { - if UseNilMetrics { - return NilTimer{} - } - return &StandardTimer{ - MetricMeta: meta, - histogram: h, - meter: m, - } -} - -// NewTimer constructs a new StandardTimer using an exponentially-decaying -// sample with the same reservoir size and alpha as UNIX load averages. -func NewTimer(meta *MetricMeta) Timer { - if UseNilMetrics { - return NilTimer{} - } - return &StandardTimer{ - MetricMeta: meta, - histogram: NewHistogram(meta, NewExpDecaySample(1028, 0.015)), - meter: NewMeter(meta), - } -} - -func RegTimer(name string, tagStrings ...string) Timer { - tr := NewTimer(NewMetricMeta(name, tagStrings)) - MetricStats.Register(tr) - return tr -} - -// NilTimer is a no-op Timer. -type NilTimer struct { - *MetricMeta - h Histogram - m Meter -} - -// Count is a no-op. -func (NilTimer) Count() int64 { return 0 } - -// Max is a no-op. -func (NilTimer) Max() int64 { return 0 } - -// Mean is a no-op. -func (NilTimer) Mean() float64 { return 0.0 } - -// Min is a no-op. -func (NilTimer) Min() int64 { return 0 } - -// Percentile is a no-op. -func (NilTimer) Percentile(p float64) float64 { return 0.0 } - -// Percentiles is a no-op. -func (NilTimer) Percentiles(ps []float64) []float64 { - return make([]float64, len(ps)) -} - -// Rate1 is a no-op. -func (NilTimer) Rate1() float64 { return 0.0 } - -// Rate5 is a no-op. -func (NilTimer) Rate5() float64 { return 0.0 } - -// Rate15 is a no-op. -func (NilTimer) Rate15() float64 { return 0.0 } - -// RateMean is a no-op. -func (NilTimer) RateMean() float64 { return 0.0 } - -// Snapshot is a no-op. -func (n NilTimer) Snapshot() Metric { return n } - -// StdDev is a no-op. -func (NilTimer) StdDev() float64 { return 0.0 } - -// Sum is a no-op. -func (NilTimer) Sum() int64 { return 0 } - -// Time is a no-op. -func (NilTimer) Time(func()) {} - -// Update is a no-op. -func (NilTimer) Update(time.Duration) {} - -// UpdateSince is a no-op. -func (NilTimer) UpdateSince(time.Time) {} - -// Variance is a no-op. -func (NilTimer) Variance() float64 { return 0.0 } - -// StandardTimer is the standard implementation of a Timer and uses a Histogram -// and Meter. -type StandardTimer struct { - *MetricMeta - histogram Histogram - meter Meter - mutex sync.Mutex -} - -// Count returns the number of events recorded. -func (t *StandardTimer) Count() int64 { - return t.histogram.Count() -} - -// Max returns the maximum value in the sample. -func (t *StandardTimer) Max() int64 { - return t.histogram.Max() -} - -// Mean returns the mean of the values in the sample. -func (t *StandardTimer) Mean() float64 { - return t.histogram.Mean() -} - -// Min returns the minimum value in the sample. -func (t *StandardTimer) Min() int64 { - return t.histogram.Min() -} - -// Percentile returns an arbitrary percentile of the values in the sample. -func (t *StandardTimer) Percentile(p float64) float64 { - return t.histogram.Percentile(p) -} - -// Percentiles returns a slice of arbitrary percentiles of the values in the -// sample. -func (t *StandardTimer) Percentiles(ps []float64) []float64 { - return t.histogram.Percentiles(ps) -} - -// Rate1 returns the one-minute moving average rate of events per second. -func (t *StandardTimer) Rate1() float64 { - return t.meter.Rate1() -} - -// Rate5 returns the five-minute moving average rate of events per second. -func (t *StandardTimer) Rate5() float64 { - return t.meter.Rate5() -} - -// Rate15 returns the fifteen-minute moving average rate of events per second. -func (t *StandardTimer) Rate15() float64 { - return t.meter.Rate15() -} - -// RateMean returns the meter's mean rate of events per second. -func (t *StandardTimer) RateMean() float64 { - return t.meter.RateMean() -} - -// Snapshot returns a read-only copy of the timer. -func (t *StandardTimer) Snapshot() Metric { - t.mutex.Lock() - defer t.mutex.Unlock() - return &TimerSnapshot{ - MetricMeta: t.MetricMeta, - histogram: t.histogram.Snapshot().(*HistogramSnapshot), - meter: t.meter.Snapshot().(*MeterSnapshot), - } -} - -// StdDev returns the standard deviation of the values in the sample. -func (t *StandardTimer) StdDev() float64 { - return t.histogram.StdDev() -} - -// Sum returns the sum in the sample. -func (t *StandardTimer) Sum() int64 { - return t.histogram.Sum() -} - -// Record the duration of the execution of the given function. -func (t *StandardTimer) Time(f func()) { - ts := time.Now() - f() - t.Update(time.Since(ts)) -} - -// Record the duration of an event. -func (t *StandardTimer) Update(d time.Duration) { - t.mutex.Lock() - defer t.mutex.Unlock() - t.histogram.Update(int64(d)) - t.meter.Mark(1) -} - -// Record the duration of an event that started at a time and ends now. -func (t *StandardTimer) UpdateSince(ts time.Time) { - t.mutex.Lock() - defer t.mutex.Unlock() - sinceMs := time.Since(ts) / time.Millisecond - t.histogram.Update(int64(sinceMs)) - t.meter.Mark(1) -} - -// Variance returns the variance of the values in the sample. -func (t *StandardTimer) Variance() float64 { - return t.histogram.Variance() -} - -// TimerSnapshot is a read-only copy of another Timer. -type TimerSnapshot struct { - *MetricMeta - histogram *HistogramSnapshot - meter *MeterSnapshot -} - -// Count returns the number of events recorded at the time the snapshot was -// taken. -func (t *TimerSnapshot) Count() int64 { return t.histogram.Count() } - -// Max returns the maximum value at the time the snapshot was taken. -func (t *TimerSnapshot) Max() int64 { return t.histogram.Max() } - -// Mean returns the mean value at the time the snapshot was taken. -func (t *TimerSnapshot) Mean() float64 { return t.histogram.Mean() } - -// Min returns the minimum value at the time the snapshot was taken. -func (t *TimerSnapshot) Min() int64 { return t.histogram.Min() } - -// Percentile returns an arbitrary percentile of sampled values at the time the -// snapshot was taken. -func (t *TimerSnapshot) Percentile(p float64) float64 { - return t.histogram.Percentile(p) -} - -// Percentiles returns a slice of arbitrary percentiles of sampled values at -// the time the snapshot was taken. -func (t *TimerSnapshot) Percentiles(ps []float64) []float64 { - return t.histogram.Percentiles(ps) -} - -// Rate1 returns the one-minute moving average rate of events per second at the -// time the snapshot was taken. -func (t *TimerSnapshot) Rate1() float64 { return t.meter.Rate1() } - -// Rate5 returns the five-minute moving average rate of events per second at -// the time the snapshot was taken. -func (t *TimerSnapshot) Rate5() float64 { return t.meter.Rate5() } - -// Rate15 returns the fifteen-minute moving average rate of events per second -// at the time the snapshot was taken. -func (t *TimerSnapshot) Rate15() float64 { return t.meter.Rate15() } - -// RateMean returns the meter's mean rate of events per second at the time the -// snapshot was taken. -func (t *TimerSnapshot) RateMean() float64 { return t.meter.RateMean() } - -// Snapshot returns the snapshot. -func (t *TimerSnapshot) Snapshot() Metric { return t } - -// StdDev returns the standard deviation of the values at the time the snapshot -// was taken. -func (t *TimerSnapshot) StdDev() float64 { return t.histogram.StdDev() } - -// Sum returns the sum at the time the snapshot was taken. -func (t *TimerSnapshot) Sum() int64 { return t.histogram.Sum() } - -// Time panics. -func (*TimerSnapshot) Time(func()) { - panic("Time called on a TimerSnapshot") -} - -// Update panics. -func (*TimerSnapshot) Update(time.Duration) { - panic("Update called on a TimerSnapshot") -} - -// UpdateSince panics. -func (*TimerSnapshot) UpdateSince(time.Time) { - panic("UpdateSince called on a TimerSnapshot") -} - -// Variance returns the variance of the values at the time the snapshot was -// taken. -func (t *TimerSnapshot) Variance() float64 { return t.histogram.Variance() } diff --git a/pkg/middleware/logger.go b/pkg/middleware/logger.go index 4db0aac069f..94f707800be 100644 --- a/pkg/middleware/logger.go +++ b/pkg/middleware/logger.go @@ -19,8 +19,8 @@ import ( "net/http" "time" - "github.com/grafana/grafana/pkg/metrics" "github.com/grafana/grafana/pkg/setting" + "github.com/prometheus/client_golang/prometheus" "gopkg.in/macaron.v1" ) @@ -35,8 +35,8 @@ func Logger() macaron.Handler { timeTakenMs := time.Since(start) / time.Millisecond if timer, ok := c.Data["perfmon.timer"]; ok { - timerTyped := timer.(metrics.Timer) - timerTyped.Update(timeTakenMs) + timerTyped := timer.(prometheus.Summary) + timerTyped.Observe(float64(timeTakenMs)) } status := rw.Status() diff --git a/pkg/middleware/middleware.go b/pkg/middleware/middleware.go index 949a9c16766..946ebae6b87 100644 --- a/pkg/middleware/middleware.go +++ b/pkg/middleware/middleware.go @@ -10,10 +10,10 @@ import ( "github.com/grafana/grafana/pkg/components/apikeygen" "github.com/grafana/grafana/pkg/log" l "github.com/grafana/grafana/pkg/login" - "github.com/grafana/grafana/pkg/metrics" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" + "github.com/prometheus/client_golang/prometheus" ) type Context struct { @@ -251,7 +251,7 @@ func (ctx *Context) HasHelpFlag(flag m.HelpFlags1) bool { return ctx.HelpFlags1.HasFlag(flag) } -func (ctx *Context) TimeRequest(timer metrics.Timer) { +func (ctx *Context) TimeRequest(timer prometheus.Summary) { ctx.Data["perfmon.timer"] = timer } diff --git a/pkg/middleware/request_metrics.go b/pkg/middleware/request_metrics.go index 417a1817d15..f2d71c0a0fe 100644 --- a/pkg/middleware/request_metrics.go +++ b/pkg/middleware/request_metrics.go @@ -2,19 +2,28 @@ package middleware import ( "net/http" + "strconv" "strings" + "time" "github.com/grafana/grafana/pkg/metrics" "gopkg.in/macaron.v1" ) -func RequestMetrics() macaron.Handler { +func RequestMetrics(handler string) macaron.Handler { return func(res http.ResponseWriter, req *http.Request, c *macaron.Context) { rw := res.(macaron.ResponseWriter) + now := time.Now() c.Next() status := rw.Status() + code := sanitizeCode(status) + method := sanitizeMethod(req.Method) + metrics.M_Http_Request_Total.WithLabelValues(handler, code, method).Inc() + duration := time.Since(now).Nanoseconds() / int64(time.Millisecond) + metrics.M_Http_Request_Summary.WithLabelValues(handler, code, method).Observe(float64(duration)) + if strings.HasPrefix(req.RequestURI, "/api/datasources/proxy") { countProxyRequests(status) } else if strings.HasPrefix(req.RequestURI, "/api/") { @@ -28,38 +37,165 @@ func RequestMetrics() macaron.Handler { func countApiRequests(status int) { switch status { case 200: - metrics.M_Api_Status_200.Inc(1) + metrics.M_Api_Status.WithLabelValues("200").Inc() case 404: - metrics.M_Api_Status_404.Inc(1) + metrics.M_Api_Status.WithLabelValues("404").Inc() case 500: - metrics.M_Api_Status_500.Inc(1) + metrics.M_Api_Status.WithLabelValues("500").Inc() default: - metrics.M_Api_Status_Unknown.Inc(1) + metrics.M_Api_Status.WithLabelValues("unknown").Inc() } } func countPageRequests(status int) { switch status { case 200: - metrics.M_Page_Status_200.Inc(1) + metrics.M_Page_Status.WithLabelValues("200").Inc() case 404: - metrics.M_Page_Status_404.Inc(1) + metrics.M_Page_Status.WithLabelValues("404").Inc() case 500: - metrics.M_Page_Status_500.Inc(1) + metrics.M_Page_Status.WithLabelValues("500").Inc() default: - metrics.M_Page_Status_Unknown.Inc(1) + metrics.M_Page_Status.WithLabelValues("unknown").Inc() } } func countProxyRequests(status int) { switch status { case 200: - metrics.M_Proxy_Status_200.Inc(1) + metrics.M_Proxy_Status.WithLabelValues("200").Inc() case 404: - metrics.M_Proxy_Status_404.Inc(1) + metrics.M_Proxy_Status.WithLabelValues("400").Inc() case 500: - metrics.M_Proxy_Status_500.Inc(1) + metrics.M_Proxy_Status.WithLabelValues("500").Inc() default: - metrics.M_Proxy_Status_Unknown.Inc(1) + metrics.M_Proxy_Status.WithLabelValues("unknown").Inc() + } +} + +func sanitizeMethod(m string) string { + switch m { + case "GET", "get": + return "get" + case "PUT", "put": + return "put" + case "HEAD", "head": + return "head" + case "POST", "post": + return "post" + case "DELETE", "delete": + return "delete" + case "CONNECT", "connect": + return "connect" + case "OPTIONS", "options": + return "options" + case "NOTIFY", "notify": + return "notify" + default: + return strings.ToLower(m) + } +} + +// If the wrapped http.Handler has not set a status code, i.e. the value is +// currently 0, santizeCode will return 200, for consistency with behavior in +// the stdlib. +func sanitizeCode(s int) string { + switch s { + case 100: + return "100" + case 101: + return "101" + + case 200, 0: + return "200" + case 201: + return "201" + case 202: + return "202" + case 203: + return "203" + case 204: + return "204" + case 205: + return "205" + case 206: + return "206" + + case 300: + return "300" + case 301: + return "301" + case 302: + return "302" + case 304: + return "304" + case 305: + return "305" + case 307: + return "307" + + case 400: + return "400" + case 401: + return "401" + case 402: + return "402" + case 403: + return "403" + case 404: + return "404" + case 405: + return "405" + case 406: + return "406" + case 407: + return "407" + case 408: + return "408" + case 409: + return "409" + case 410: + return "410" + case 411: + return "411" + case 412: + return "412" + case 413: + return "413" + case 414: + return "414" + case 415: + return "415" + case 416: + return "416" + case 417: + return "417" + case 418: + return "418" + + case 500: + return "500" + case 501: + return "501" + case 502: + return "502" + case 503: + return "503" + case 504: + return "504" + case 505: + return "505" + + case 428: + return "428" + case 429: + return "429" + case 431: + return "431" + case 511: + return "511" + + default: + return strconv.Itoa(s) } } diff --git a/pkg/middleware/request_tracing.go b/pkg/middleware/request_tracing.go new file mode 100644 index 00000000000..c35c2a00734 --- /dev/null +++ b/pkg/middleware/request_tracing.go @@ -0,0 +1,36 @@ +package middleware + +import ( + "fmt" + "net/http" + + opentracing "github.com/opentracing/opentracing-go" + "github.com/opentracing/opentracing-go/ext" + + "gopkg.in/macaron.v1" +) + +func RequestTracing(handler string) macaron.Handler { + return func(res http.ResponseWriter, req *http.Request, c *macaron.Context) { + rw := res.(macaron.ResponseWriter) + + tracer := opentracing.GlobalTracer() + wireContext, _ := tracer.Extract(opentracing.HTTPHeaders, opentracing.HTTPHeadersCarrier(req.Header)) + span := tracer.StartSpan(fmt.Sprintf("HTTP %s", handler), ext.RPCServerOption(wireContext)) + defer span.Finish() + + ctx := opentracing.ContextWithSpan(req.Context(), span) + c.Req.Request = req.WithContext(ctx) + + c.Next() + + status := rw.Status() + + ext.HTTPStatusCode.Set(span, uint16(status)) + ext.HTTPUrl.Set(span, req.RequestURI) + ext.HTTPMethod.Set(span, req.Method) + if status >= 400 { + ext.Error.Set(span, true) + } + } +} diff --git a/pkg/models/datasource.go b/pkg/models/datasource.go index 3fdfd9c47da..4f10033425f 100644 --- a/pkg/models/datasource.go +++ b/pkg/models/datasource.go @@ -54,19 +54,31 @@ type DataSource struct { } var knownDatasourcePlugins map[string]bool = map[string]bool{ - DS_ES: true, - DS_GRAPHITE: true, - DS_INFLUXDB: true, - DS_INFLUXDB_08: true, - DS_KAIROSDB: true, - DS_CLOUDWATCH: true, - DS_PROMETHEUS: true, - DS_OPENTSDB: true, - "opennms": true, - "druid": true, - "dalmatinerdb": true, - "gnocci": true, - "zabbix": true, + DS_ES: true, + DS_GRAPHITE: true, + DS_INFLUXDB: true, + DS_INFLUXDB_08: true, + DS_KAIROSDB: true, + DS_CLOUDWATCH: true, + DS_PROMETHEUS: true, + DS_OPENTSDB: true, + "opennms": true, + "druid": true, + "dalmatinerdb": true, + "gnocci": true, + "zabbix": true, + "newrelic-app": true, + "grafana-datadog-datasource": true, + "grafana-simple-json": true, + "grafana-splunk-datasource": true, + "udoprog-heroic-datasource": true, + "grafana-openfalcon-datasource": true, + "opennms-datasource": true, + "rackerlabs-blueflood-datasource": true, + "crate-datasource": true, + "ayoungprogrammer-finance-datasource": true, + "monasca-datasource": true, + "vertamedia-clickhouse-datasource": true, } func IsKnownDataSourcePlugin(dsType string) bool { diff --git a/pkg/models/datasource_cache.go b/pkg/models/datasource_cache.go index e32c0ac9e7c..4a459c2783b 100644 --- a/pkg/models/datasource_cache.go +++ b/pkg/models/datasource_cache.go @@ -48,6 +48,7 @@ func (ds *DataSource) GetHttpTransport() (*http.Transport, error) { transport := &http.Transport{ TLSClientConfig: &tls.Config{ InsecureSkipVerify: true, + Renegotiation: tls.RenegotiateFreelyAsClient, }, Proxy: http.ProxyFromEnvironment, Dial: (&net.Dialer{ diff --git a/pkg/services/alerting/conditions/query.go b/pkg/services/alerting/conditions/query.go index 433eb1b597f..d499c5e8532 100644 --- a/pkg/services/alerting/conditions/query.go +++ b/pkg/services/alerting/conditions/query.go @@ -112,7 +112,7 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange * req := c.getRequestForAlertRule(getDsInfo.Result, timeRange) result := make(tsdb.TimeSeriesSlice, 0) - resp, err := c.HandleRequest(context.Ctx, req) + resp, err := c.HandleRequest(context.Ctx, getDsInfo.Result, req) if err != nil { if err == gocontext.DeadlineExceeded { return nil, fmt.Errorf("Alert execution exceeded the timeout") @@ -139,8 +139,8 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange * return result, nil } -func (c *QueryCondition) getRequestForAlertRule(datasource *m.DataSource, timeRange *tsdb.TimeRange) *tsdb.Request { - req := &tsdb.Request{ +func (c *QueryCondition) getRequestForAlertRule(datasource *m.DataSource, timeRange *tsdb.TimeRange) *tsdb.TsdbQuery { + req := &tsdb.TsdbQuery{ TimeRange: timeRange, Queries: []*tsdb.Query{ { diff --git a/pkg/services/alerting/conditions/query_test.go b/pkg/services/alerting/conditions/query_test.go index 17b47b2832b..0ea6470bc2d 100644 --- a/pkg/services/alerting/conditions/query_test.go +++ b/pkg/services/alerting/conditions/query_test.go @@ -168,7 +168,7 @@ func (ctx *queryConditionTestContext) exec() (*alerting.ConditionResult, error) ctx.condition = condition - condition.HandleRequest = func(context context.Context, req *tsdb.Request) (*tsdb.Response, error) { + condition.HandleRequest = func(context context.Context, dsInfo *m.DataSource, req *tsdb.TsdbQuery) (*tsdb.Response, error) { return &tsdb.Response{ Results: map[string]*tsdb.QueryResult{ "A": {Series: ctx.series}, diff --git a/pkg/services/alerting/engine.go b/pkg/services/alerting/engine.go index 51d48287c14..4448a5cb978 100644 --- a/pkg/services/alerting/engine.go +++ b/pkg/services/alerting/engine.go @@ -2,8 +2,13 @@ package alerting import ( "context" + "fmt" "time" + "github.com/opentracing/opentracing-go" + "github.com/opentracing/opentracing-go/ext" + tlog "github.com/opentracing/opentracing-go/log" + "github.com/benbjohnson/clock" "github.com/grafana/grafana/pkg/log" "golang.org/x/sync/errgroup" @@ -99,22 +104,44 @@ func (e *Engine) processJob(grafanaCtx context.Context, job *Job) error { }() alertCtx, cancelFn := context.WithTimeout(context.Background(), alertTimeout) + span := opentracing.StartSpan("alert execution") + alertCtx = opentracing.ContextWithSpan(alertCtx, span) job.Running = true evalContext := NewEvalContext(alertCtx, job.Rule) + evalContext.Ctx = alertCtx done := make(chan struct{}) - go func() { defer func() { if err := recover(); err != nil { e.log.Error("Alert Panic", "error", err, "stack", log.Stack(1)) + ext.Error.Set(span, true) + span.LogFields( + tlog.Error(fmt.Errorf("%v", err)), + tlog.String("message", "failed to execute alert rule. panic was recovered."), + ) + span.Finish() close(done) } }() e.evalHandler.Eval(evalContext) e.resultHandler.Handle(evalContext) + + span.SetTag("alertId", evalContext.Rule.Id) + span.SetTag("dashboardId", evalContext.Rule.DashboardId) + span.SetTag("firing", evalContext.Firing) + span.SetTag("nodatapoints", evalContext.NoDataFound) + if evalContext.Error != nil { + ext.Error.Set(span, true) + span.LogFields( + tlog.Error(evalContext.Error), + tlog.String("message", "alerting execution failed"), + ) + } + + span.Finish() close(done) }() diff --git a/pkg/services/alerting/eval_handler.go b/pkg/services/alerting/eval_handler.go index 4958cab097b..5c2861b9154 100644 --- a/pkg/services/alerting/eval_handler.go +++ b/pkg/services/alerting/eval_handler.go @@ -63,8 +63,8 @@ func (e *DefaultEvalHandler) Eval(context *EvalContext) { context.EndTime = time.Now() context.Rule.State = e.getNewState(context) - elapsedTime := context.EndTime.Sub(context.StartTime) / time.Millisecond - metrics.M_Alerting_Execution_Time.Update(elapsedTime) + elapsedTime := context.EndTime.Sub(context.StartTime).Nanoseconds() / int64(time.Millisecond) + metrics.M_Alerting_Execution_Time.Observe(float64(elapsedTime)) } // This should be move into evalContext once its been refactored. diff --git a/pkg/services/alerting/extractor.go b/pkg/services/alerting/extractor.go index 7f14e195799..b8579ffdc6a 100644 --- a/pkg/services/alerting/extractor.go +++ b/pkg/services/alerting/extractor.go @@ -89,6 +89,11 @@ func (e *DashAlertExtractor) GetAlerts() ([]*m.Alert, error) { continue } + panelId, err := panel.Get("id").Int64() + if err != nil { + return nil, fmt.Errorf("panel id is required. err %v", err) + } + // backward compatibility check, can be removed later enabled, hasEnabled := jsonAlert.CheckGet("enabled") if hasEnabled && enabled.MustBool() == false { @@ -103,7 +108,7 @@ func (e *DashAlertExtractor) GetAlerts() ([]*m.Alert, error) { alert := &m.Alert{ DashboardId: e.Dash.Id, OrgId: e.OrgId, - PanelId: panel.Get("id").MustInt64(), + PanelId: panelId, Id: jsonAlert.Get("id").MustInt64(), Name: jsonAlert.Get("name").MustString(), Handler: jsonAlert.Get("handler").MustInt64(), diff --git a/pkg/services/alerting/extractor_test.go b/pkg/services/alerting/extractor_test.go index cb174783478..b7f83404452 100644 --- a/pkg/services/alerting/extractor_test.go +++ b/pkg/services/alerting/extractor_test.go @@ -200,6 +200,83 @@ func TestAlertRuleExtraction(t *testing.T) { }) }) + Convey("Panels missing id should return error", func() { + panelWithoutId := ` + { + "id": 57, + "title": "Graphite 4", + "originalTitle": "Graphite 4", + "tags": ["graphite"], + "rows": [ + { + "panels": [ + { + "title": "Active desktop users", + "editable": true, + "type": "graph", + "targets": [ + { + "refId": "A", + "target": "aliasByNode(statsd.fakesite.counters.session_start.desktop.count, 4)" + } + ], + "datasource": null, + "alert": { + "name": "name1", + "message": "desc1", + "handler": 1, + "frequency": "60s", + "conditions": [ + { + "type": "query", + "query": {"params": ["A", "5m", "now"]}, + "reducer": {"type": "avg", "params": []}, + "evaluator": {"type": ">", "params": [100]} + } + ] + } + }, + { + "title": "Active mobile users", + "id": 4, + "targets": [ + {"refId": "A", "target": ""}, + {"refId": "B", "target": "aliasByNode(statsd.fakesite.counters.session_start.mobile.count, 4)"} + ], + "datasource": "graphite2", + "alert": { + "name": "name2", + "message": "desc2", + "handler": 0, + "frequency": "60s", + "severity": "warning", + "conditions": [ + { + "type": "query", + "query": {"params": ["B", "5m", "now"]}, + "reducer": {"type": "avg", "params": []}, + "evaluator": {"type": ">", "params": [100]} + } + ] + } + } + ] + } + ] + }` + + dashJson, err := simplejson.NewJson([]byte(panelWithoutId)) + So(err, ShouldBeNil) + dash := m.NewDashboardFromJson(dashJson) + extractor := NewDashAlertExtractor(dash, 1) + + _, err = extractor.GetAlerts() + + Convey("panels without Id should return error", func() { + So(err, ShouldNotBeNil) + }) + }) + Convey("Parse and validate dashboard containing influxdb alert", func() { json2 := `{ diff --git a/pkg/services/alerting/notifier.go b/pkg/services/alerting/notifier.go index e831b566b22..be74d41cc6c 100644 --- a/pkg/services/alerting/notifier.go +++ b/pkg/services/alerting/notifier.go @@ -10,6 +10,8 @@ import ( "github.com/grafana/grafana/pkg/components/imguploader" "github.com/grafana/grafana/pkg/components/renderer" "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/metrics" + m "github.com/grafana/grafana/pkg/models" ) @@ -66,6 +68,7 @@ func (n *notificationService) sendNotifications(context *EvalContext, notifiers for _, notifier := range notifiers { not := notifier //avoid updating scope variable in go routine n.log.Info("Sending notification", "type", not.GetType(), "id", not.GetNotifierId(), "isDefault", not.GetIsDefault()) + metrics.M_Alerting_Notification_Sent.WithLabelValues(not.GetType()).Inc() g.Go(func() error { return not.Notify(context) }) } @@ -97,7 +100,7 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) { context.ImageOnDiskPath = imagePath } - context.ImagePublicUrl, err = uploader.Upload(context.ImageOnDiskPath) + context.ImagePublicUrl, err = uploader.Upload(context.Ctx, context.ImageOnDiskPath) if err != nil { return err } diff --git a/pkg/services/alerting/notifiers/dingding.go b/pkg/services/alerting/notifiers/dingding.go index db8735bd795..e32b9d34f91 100644 --- a/pkg/services/alerting/notifiers/dingding.go +++ b/pkg/services/alerting/notifiers/dingding.go @@ -4,7 +4,6 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/metrics" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" ) @@ -47,7 +46,6 @@ type DingDingNotifier struct { func (this *DingDingNotifier) Notify(evalContext *alerting.EvalContext) error { this.log.Info("Sending dingding") - metrics.M_Alerting_Notification_Sent_DingDing.Inc(1) messageUrl, err := evalContext.GetRuleUrl() if err != nil { diff --git a/pkg/services/alerting/notifiers/email.go b/pkg/services/alerting/notifiers/email.go index cdde05ba62f..7e8c4b33c0c 100644 --- a/pkg/services/alerting/notifiers/email.go +++ b/pkg/services/alerting/notifiers/email.go @@ -6,7 +6,6 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/metrics" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" "github.com/grafana/grafana/pkg/setting" @@ -61,7 +60,6 @@ func NewEmailNotifier(model *m.AlertNotification) (alerting.Notifier, error) { func (this *EmailNotifier) Notify(evalContext *alerting.EvalContext) error { this.log.Info("Sending alert notification to", "addresses", this.Addresses) - metrics.M_Alerting_Notification_Sent_Email.Inc(1) ruleUrl, err := evalContext.GetRuleUrl() if err != nil { diff --git a/pkg/services/alerting/notifiers/line.go b/pkg/services/alerting/notifiers/line.go index 69ad0d65896..4fbaa2d543e 100644 --- a/pkg/services/alerting/notifiers/line.go +++ b/pkg/services/alerting/notifiers/line.go @@ -2,12 +2,12 @@ package notifiers import ( "fmt" + "net/url" + "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/metrics" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" - "net/url" ) func init() { @@ -53,7 +53,6 @@ type LineNotifier struct { func (this *LineNotifier) Notify(evalContext *alerting.EvalContext) error { this.log.Info("Executing line notification", "ruleId", evalContext.Rule.Id, "notification", this.Name) - metrics.M_Alerting_Notification_Sent_LINE.Inc(1) var err error switch evalContext.Rule.State { diff --git a/pkg/services/alerting/notifiers/opsgenie.go b/pkg/services/alerting/notifiers/opsgenie.go index 6792702b539..e67ccfb10e5 100644 --- a/pkg/services/alerting/notifiers/opsgenie.go +++ b/pkg/services/alerting/notifiers/opsgenie.go @@ -7,7 +7,6 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/metrics" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" ) @@ -65,7 +64,6 @@ type OpsGenieNotifier struct { } func (this *OpsGenieNotifier) Notify(evalContext *alerting.EvalContext) error { - metrics.M_Alerting_Notification_Sent_OpsGenie.Inc(1) var err error switch evalContext.Rule.State { diff --git a/pkg/services/alerting/notifiers/pagerduty.go b/pkg/services/alerting/notifiers/pagerduty.go index 31c2cdeb679..1cf8b3a1ba8 100644 --- a/pkg/services/alerting/notifiers/pagerduty.go +++ b/pkg/services/alerting/notifiers/pagerduty.go @@ -6,7 +6,6 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/metrics" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" ) @@ -63,7 +62,6 @@ type PagerdutyNotifier struct { } func (this *PagerdutyNotifier) Notify(evalContext *alerting.EvalContext) error { - metrics.M_Alerting_Notification_Sent_PagerDuty.Inc(1) if evalContext.Rule.State == m.AlertStateOK && !this.AutoResolve { this.log.Info("Not sending a trigger to Pagerduty", "state", evalContext.Rule.State, "auto resolve", this.AutoResolve) diff --git a/pkg/services/alerting/notifiers/pushover.go b/pkg/services/alerting/notifiers/pushover.go index 7a2297f9e89..ecb4ed42e3e 100644 --- a/pkg/services/alerting/notifiers/pushover.go +++ b/pkg/services/alerting/notifiers/pushover.go @@ -7,7 +7,6 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/metrics" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" ) @@ -125,7 +124,6 @@ type PushoverNotifier struct { } func (this *PushoverNotifier) Notify(evalContext *alerting.EvalContext) error { - metrics.M_Alerting_Notification_Sent_Pushover.Inc(1) ruleUrl, err := evalContext.GetRuleUrl() if err != nil { this.log.Error("Failed get rule link", "error", err) diff --git a/pkg/services/alerting/notifiers/sensu.go b/pkg/services/alerting/notifiers/sensu.go index 00661f864b3..a4c41e14bae 100644 --- a/pkg/services/alerting/notifiers/sensu.go +++ b/pkg/services/alerting/notifiers/sensu.go @@ -7,7 +7,6 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/metrics" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" ) @@ -74,7 +73,6 @@ type SensuNotifier struct { func (this *SensuNotifier) Notify(evalContext *alerting.EvalContext) error { this.log.Info("Sending sensu result") - metrics.M_Alerting_Notification_Sent_Sensu.Inc(1) bodyJSON := simplejson.New() bodyJSON.Set("ruleId", evalContext.Rule.Id) diff --git a/pkg/services/alerting/notifiers/slack.go b/pkg/services/alerting/notifiers/slack.go index a0d08ae2ba7..d917daa3620 100644 --- a/pkg/services/alerting/notifiers/slack.go +++ b/pkg/services/alerting/notifiers/slack.go @@ -6,7 +6,6 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/metrics" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" "github.com/grafana/grafana/pkg/setting" @@ -79,7 +78,6 @@ type SlackNotifier struct { func (this *SlackNotifier) Notify(evalContext *alerting.EvalContext) error { this.log.Info("Executing slack notification", "ruleId", evalContext.Rule.Id, "notification", this.Name) - metrics.M_Alerting_Notification_Sent_Slack.Inc(1) ruleUrl, err := evalContext.GetRuleUrl() if err != nil { diff --git a/pkg/services/alerting/notifiers/telegram.go b/pkg/services/alerting/notifiers/telegram.go index 71169c15599..7fb029e57c8 100644 --- a/pkg/services/alerting/notifiers/telegram.go +++ b/pkg/services/alerting/notifiers/telegram.go @@ -6,7 +6,6 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/metrics" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" ) @@ -80,7 +79,6 @@ func NewTelegramNotifier(model *m.AlertNotification) (alerting.Notifier, error) func (this *TelegramNotifier) Notify(evalContext *alerting.EvalContext) error { this.log.Info("Sending alert notification to", "bot_token", this.BotToken) this.log.Info("Sending alert notification to", "chat_id", this.ChatID) - metrics.M_Alerting_Notification_Sent_Telegram.Inc(1) bodyJSON := simplejson.New() diff --git a/pkg/services/alerting/notifiers/threema.go b/pkg/services/alerting/notifiers/threema.go index 80c84c26a24..e4ffffc9108 100644 --- a/pkg/services/alerting/notifiers/threema.go +++ b/pkg/services/alerting/notifiers/threema.go @@ -7,7 +7,6 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/metrics" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" ) @@ -118,7 +117,6 @@ func NewThreemaNotifier(model *m.AlertNotification) (alerting.Notifier, error) { func (notifier *ThreemaNotifier) Notify(evalContext *alerting.EvalContext) error { notifier.log.Info("Sending alert notification from", "threema_id", notifier.GatewayID) notifier.log.Info("Sending alert notification to", "threema_id", notifier.RecipientID) - metrics.M_Alerting_Notification_Sent_Threema.Inc(1) // Set up basic API request data data := url.Values{} diff --git a/pkg/services/alerting/notifiers/victorops.go b/pkg/services/alerting/notifiers/victorops.go index 2fbb2f64d1b..4b4db553cde 100644 --- a/pkg/services/alerting/notifiers/victorops.go +++ b/pkg/services/alerting/notifiers/victorops.go @@ -6,7 +6,6 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/metrics" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" "github.com/grafana/grafana/pkg/setting" @@ -72,7 +71,6 @@ type VictoropsNotifier struct { // Notify sends notification to Victorops via POST to URL endpoint func (this *VictoropsNotifier) Notify(evalContext *alerting.EvalContext) error { this.log.Info("Executing victorops notification", "ruleId", evalContext.Rule.Id, "notification", this.Name) - metrics.M_Alerting_Notification_Sent_Victorops.Inc(1) ruleUrl, err := evalContext.GetRuleUrl() if err != nil { diff --git a/pkg/services/alerting/notifiers/webhook.go b/pkg/services/alerting/notifiers/webhook.go index 87868d331b2..4c97ed2b75e 100644 --- a/pkg/services/alerting/notifiers/webhook.go +++ b/pkg/services/alerting/notifiers/webhook.go @@ -4,7 +4,6 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/metrics" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" ) @@ -68,7 +67,6 @@ type WebhookNotifier struct { func (this *WebhookNotifier) Notify(evalContext *alerting.EvalContext) error { this.log.Info("Sending webhook") - metrics.M_Alerting_Notification_Sent_Webhook.Inc(1) bodyJSON := simplejson.New() bodyJSON.Set("title", evalContext.GetNotificationTitle()) diff --git a/pkg/services/alerting/reader.go b/pkg/services/alerting/reader.go index 1ecef79c434..45f0c65d4fb 100644 --- a/pkg/services/alerting/reader.go +++ b/pkg/services/alerting/reader.go @@ -59,7 +59,7 @@ func (arr *DefaultRuleReader) Fetch() []*Rule { } } - metrics.M_Alerting_Active_Alerts.Update(int64(len(res))) + metrics.M_Alerting_Active_Alerts.Set(float64(len(res))) return res } diff --git a/pkg/services/alerting/result_handler.go b/pkg/services/alerting/result_handler.go index 972fbd3a461..d34dbf5a632 100644 --- a/pkg/services/alerting/result_handler.go +++ b/pkg/services/alerting/result_handler.go @@ -42,7 +42,7 @@ func (handler *DefaultResultHandler) Handle(evalContext *EvalContext) error { annotationData.Set("noData", true) } - countStateResult(evalContext.Rule.State) + metrics.M_Alerting_Result_State.WithLabelValues(string(evalContext.Rule.State)).Inc() if evalContext.ShouldUpdateAlertState() { handler.log.Info("New state change", "alertId", evalContext.Rule.Id, "newState", evalContext.Rule.State, "prev state", evalContext.PrevAlertState) @@ -95,18 +95,3 @@ func (handler *DefaultResultHandler) Handle(evalContext *EvalContext) error { return nil } - -func countStateResult(state m.AlertStateType) { - switch state { - case m.AlertStatePending: - metrics.M_Alerting_Result_State_Pending.Inc(1) - case m.AlertStateAlerting: - metrics.M_Alerting_Result_State_Alerting.Inc(1) - case m.AlertStateOK: - metrics.M_Alerting_Result_State_Ok.Inc(1) - case m.AlertStatePaused: - metrics.M_Alerting_Result_State_Paused.Inc(1) - case m.AlertStateNoData: - metrics.M_Alerting_Result_State_NoData.Inc(1) - } -} diff --git a/pkg/services/notifications/mailer.go b/pkg/services/notifications/mailer.go index df9f1138b15..7fbf39ee41d 100644 --- a/pkg/services/notifications/mailer.go +++ b/pkg/services/notifications/mailer.go @@ -101,7 +101,11 @@ func createDialer() (*gomail.Dialer, error) { d := gomail.NewDialer(host, iPort, setting.Smtp.User, setting.Smtp.Password) d.TLSConfig = tlsconfig - d.LocalName = setting.InstanceName + if setting.Smtp.EhloIdentity != "" { + d.LocalName = setting.Smtp.EhloIdentity + } else { + d.LocalName = setting.InstanceName + } return d, nil } diff --git a/pkg/services/sqlstore/dashboard.go b/pkg/services/sqlstore/dashboard.go index 50b02bf0970..27812eef32e 100644 --- a/pkg/services/sqlstore/dashboard.go +++ b/pkg/services/sqlstore/dashboard.go @@ -75,7 +75,7 @@ func SaveDashboard(cmd *m.SaveDashboardCommand) error { if dash.Id == 0 { dash.Version = 1 - metrics.M_Models_Dashboard_Insert.Inc(1) + metrics.M_Api_Dashboard_Insert.Inc() dash.Data.Set("version", dash.Version) affectedRows, err = sess.Insert(dash) } else { diff --git a/pkg/services/sqlstore/datasource.go b/pkg/services/sqlstore/datasource.go index 5d8e0e049ae..88dc34fd196 100644 --- a/pkg/services/sqlstore/datasource.go +++ b/pkg/services/sqlstore/datasource.go @@ -20,7 +20,7 @@ func init() { } func GetDataSourceById(query *m.GetDataSourceByIdQuery) error { - metrics.M_DB_DataSource_QueryById.Inc(1) + metrics.M_DB_DataSource_QueryById.Inc() datasource := m.DataSource{OrgId: query.OrgId, Id: query.Id} has, err := x.Get(&datasource) diff --git a/pkg/services/sqlstore/user.go b/pkg/services/sqlstore/user.go index 0ff60b5f2e3..37c3bc9d1d8 100644 --- a/pkg/services/sqlstore/user.go +++ b/pkg/services/sqlstore/user.go @@ -28,7 +28,6 @@ func init() { bus.AddHandler("sql", SearchUsers) bus.AddHandler("sql", GetUserOrgList) bus.AddHandler("sql", DeleteUser) - bus.AddHandler("sql", SetUsingOrg) bus.AddHandler("sql", UpdateUserPermissions) bus.AddHandler("sql", SetUserHelpFlag) } diff --git a/pkg/setting/setting_smtp.go b/pkg/setting/setting_smtp.go index db065070620..9d8b8a529a5 100644 --- a/pkg/setting/setting_smtp.go +++ b/pkg/setting/setting_smtp.go @@ -1,15 +1,16 @@ package setting type SmtpSettings struct { - Enabled bool - Host string - User string - Password string - CertFile string - KeyFile string - FromAddress string - FromName string - SkipVerify bool + Enabled bool + Host string + User string + Password string + CertFile string + KeyFile string + FromAddress string + FromName string + EhloIdentity string + SkipVerify bool SendWelcomeEmailOnSignUp bool TemplatesPattern string @@ -25,6 +26,7 @@ func readSmtpSettings() { Smtp.KeyFile = sec.Key("key_file").String() Smtp.FromAddress = sec.Key("from_address").String() Smtp.FromName = sec.Key("from_name").String() + Smtp.EhloIdentity = sec.Key("ehlo_identity").String() Smtp.SkipVerify = sec.Key("skip_verify").MustBool(false) emails := Cfg.Section("emails") diff --git a/pkg/tracing/tracing.go b/pkg/tracing/tracing.go new file mode 100644 index 00000000000..be81cd7e794 --- /dev/null +++ b/pkg/tracing/tracing.go @@ -0,0 +1,116 @@ +package tracing + +import ( + "io" + "strings" + + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/setting" + + opentracing "github.com/opentracing/opentracing-go" + jaegercfg "github.com/uber/jaeger-client-go/config" + ini "gopkg.in/ini.v1" +) + +var ( + logger log.Logger = log.New("tracing") +) + +type TracingSettings struct { + Enabled bool + Address string + CustomTags map[string]string + SamplerType string + SamplerParam float64 +} + +func Init(file *ini.File) (io.Closer, error) { + settings := parseSettings(file) + return internalInit(settings) +} + +func parseSettings(file *ini.File) *TracingSettings { + settings := &TracingSettings{} + + var section, err = setting.Cfg.GetSection("tracing.jaeger") + if err != nil { + return settings + } + + settings.Address = section.Key("address").MustString("") + if settings.Address != "" { + settings.Enabled = true + } + + settings.CustomTags = splitTagSettings(section.Key("always_included_tag").MustString("")) + settings.SamplerType = section.Key("sampler_type").MustString("") + settings.SamplerParam = section.Key("sampler_param").MustFloat64(1) + + return settings +} + +func internalInit(settings *TracingSettings) (io.Closer, error) { + if !settings.Enabled { + return &nullCloser{}, nil + } + + cfg := jaegercfg.Configuration{ + Disabled: !settings.Enabled, + Sampler: &jaegercfg.SamplerConfig{ + Type: settings.SamplerType, + Param: settings.SamplerParam, + }, + Reporter: &jaegercfg.ReporterConfig{ + LogSpans: false, + LocalAgentHostPort: settings.Address, + }, + } + + jLogger := &jaegerLogWrapper{logger: log.New("jaeger")} + + options := []jaegercfg.Option{} + options = append(options, jaegercfg.Logger(jLogger)) + + for tag, value := range settings.CustomTags { + options = append(options, jaegercfg.Tag(tag, value)) + } + + tracer, closer, err := cfg.New("grafana", options...) + if err != nil { + return nil, err + } + + logger.Info("Initialized jaeger tracer", "address", settings.Address) + opentracing.InitGlobalTracer(tracer) + return closer, nil +} + +func splitTagSettings(input string) map[string]string { + res := map[string]string{} + + tags := strings.Split(input, ",") + for _, v := range tags { + kv := strings.Split(v, ":") + if len(kv) > 1 { + res[kv[0]] = kv[1] + } + } + + return res +} + +type jaegerLogWrapper struct { + logger log.Logger +} + +func (jlw *jaegerLogWrapper) Error(msg string) { + jlw.logger.Error(msg) +} + +func (jlw *jaegerLogWrapper) Infof(msg string, args ...interface{}) { + jlw.logger.Info(msg, args) +} + +type nullCloser struct{} + +func (*nullCloser) Close() error { return nil } diff --git a/pkg/tracing/tracing_test.go b/pkg/tracing/tracing_test.go new file mode 100644 index 00000000000..27e4de777a3 --- /dev/null +++ b/pkg/tracing/tracing_test.go @@ -0,0 +1,36 @@ +package tracing + +import "testing" + +func TestGroupSplit(t *testing.T) { + tests := []struct { + input string + expected map[string]string + }{ + { + input: "tag1:value1,tag2:value2", + expected: map[string]string{ + "tag1": "value1", + "tag2": "value2", + }, + }, + { + input: "", + expected: map[string]string{}, + }, + { + input: "tag1", + expected: map[string]string{}, + }, + } + + for _, test := range tests { + tags := splitTagSettings(test.input) + for k, v := range test.expected { + value, exists := tags[k] + if !exists || value != v { + t.Errorf("tags does not match %v ", test) + } + } + } +} diff --git a/pkg/tsdb/batch.go b/pkg/tsdb/batch.go deleted file mode 100644 index 8130ac94b0d..00000000000 --- a/pkg/tsdb/batch.go +++ /dev/null @@ -1,90 +0,0 @@ -package tsdb - -import "context" - -type Batch struct { - DataSourceId int64 - Queries QuerySlice - Depends map[string]bool - Done bool - Started bool -} - -type BatchSlice []*Batch - -func newBatch(dsId int64, queries QuerySlice) *Batch { - return &Batch{ - DataSourceId: dsId, - Queries: queries, - Depends: make(map[string]bool), - } -} - -func (bg *Batch) process(ctx context.Context, queryContext *QueryContext) { - executor, err := getExecutorFor(bg.Queries[0].DataSource) - - if err != nil { - bg.Done = true - result := &BatchResult{ - Error: err, - QueryResults: make(map[string]*QueryResult), - } - for _, query := range bg.Queries { - result.QueryResults[query.RefId] = &QueryResult{Error: result.Error} - } - queryContext.ResultsChan <- result - return - } - - res := executor.Execute(ctx, bg.Queries, queryContext) - bg.Done = true - queryContext.ResultsChan <- res -} - -func (bg *Batch) addQuery(query *Query) { - bg.Queries = append(bg.Queries, query) -} - -func (bg *Batch) allDependenciesAreIn(context *QueryContext) bool { - for key := range bg.Depends { - if _, exists := context.Results[key]; !exists { - return false - } - } - - return true -} - -func getBatches(req *Request) (BatchSlice, error) { - batches := make(BatchSlice, 0) - - for _, query := range req.Queries { - if foundBatch := findMatchingBatchGroup(query, batches); foundBatch != nil { - foundBatch.addQuery(query) - } else { - newBatch := newBatch(query.DataSource.Id, QuerySlice{query}) - batches = append(batches, newBatch) - - for _, refId := range query.Depends { - for _, batch := range batches { - for _, batchQuery := range batch.Queries { - if batchQuery.RefId == refId { - newBatch.Depends[refId] = true - } - } - } - } - } - } - - return batches, nil -} - -func findMatchingBatchGroup(query *Query, batches BatchSlice) *Batch { - for _, batch := range batches { - if batch.DataSourceId == query.DataSource.Id { - return batch - } - } - return nil -} diff --git a/pkg/tsdb/executor.go b/pkg/tsdb/executor.go deleted file mode 100644 index 251b3dc947a..00000000000 --- a/pkg/tsdb/executor.go +++ /dev/null @@ -1,36 +0,0 @@ -package tsdb - -import ( - "context" - "fmt" - - "github.com/grafana/grafana/pkg/models" -) - -type Executor interface { - Execute(ctx context.Context, queries QuerySlice, query *QueryContext) *BatchResult -} - -var registry map[string]GetExecutorFn - -type GetExecutorFn func(dsInfo *models.DataSource) (Executor, error) - -func init() { - registry = make(map[string]GetExecutorFn) -} - -func getExecutorFor(dsInfo *models.DataSource) (Executor, error) { - if fn, exists := registry[dsInfo.Type]; exists { - executor, err := fn(dsInfo) - if err != nil { - return nil, err - } - - return executor, nil - } - return nil, fmt.Errorf("Could not find executor for data source type: %s", dsInfo.Type) -} - -func RegisterExecutor(pluginId string, fn GetExecutorFn) { - registry[pluginId] = fn -} diff --git a/pkg/tsdb/fake_test.go b/pkg/tsdb/fake_test.go index 3c773971240..572b3c77ff3 100644 --- a/pkg/tsdb/fake_test.go +++ b/pkg/tsdb/fake_test.go @@ -11,7 +11,7 @@ type FakeExecutor struct { resultsFn map[string]ResultsFn } -type ResultsFn func(context *QueryContext) *QueryResult +type ResultsFn func(context *TsdbQuery) *QueryResult func NewFakeExecutor(dsInfo *models.DataSource) (*FakeExecutor, error) { return &FakeExecutor{ @@ -20,18 +20,18 @@ func NewFakeExecutor(dsInfo *models.DataSource) (*FakeExecutor, error) { }, nil } -func (e *FakeExecutor) Execute(ctx context.Context, queries QuerySlice, context *QueryContext) *BatchResult { - result := &BatchResult{QueryResults: make(map[string]*QueryResult)} - for _, query := range queries { +func (e *FakeExecutor) Query(ctx context.Context, dsInfo *models.DataSource, context *TsdbQuery) (*Response, error) { + result := &Response{Results: make(map[string]*QueryResult)} + for _, query := range context.Queries { if results, has := e.results[query.RefId]; has { - result.QueryResults[query.RefId] = results + result.Results[query.RefId] = results } if testFunc, has := e.resultsFn[query.RefId]; has { - result.QueryResults[query.RefId] = testFunc(context) + result.Results[query.RefId] = testFunc(context) } } - return result + return result, nil } func (e *FakeExecutor) Return(refId string, series TimeSeriesSlice) { diff --git a/pkg/tsdb/graphite/graphite.go b/pkg/tsdb/graphite/graphite.go index a467e839b82..7cadf055ff6 100644 --- a/pkg/tsdb/graphite/graphite.go +++ b/pkg/tsdb/graphite/graphite.go @@ -17,24 +17,15 @@ import ( "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/tsdb" + opentracing "github.com/opentracing/opentracing-go" ) type GraphiteExecutor struct { - *models.DataSource HttpClient *http.Client } -func NewGraphiteExecutor(datasource *models.DataSource) (tsdb.Executor, error) { - httpClient, err := datasource.GetHttpClient() - - if err != nil { - return nil, err - } - - return &GraphiteExecutor{ - DataSource: datasource, - HttpClient: httpClient, - }, nil +func NewGraphiteExecutor(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { + return &GraphiteExecutor{}, nil } var ( @@ -43,50 +34,69 @@ var ( func init() { glog = log.New("tsdb.graphite") - tsdb.RegisterExecutor("graphite", NewGraphiteExecutor) + tsdb.RegisterTsdbQueryEndpoint("graphite", NewGraphiteExecutor) } -func (e *GraphiteExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, context *tsdb.QueryContext) *tsdb.BatchResult { - result := &tsdb.BatchResult{} +func (e *GraphiteExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { + result := &tsdb.Response{} + + from := "-" + formatTimeRange(tsdbQuery.TimeRange.From) + until := formatTimeRange(tsdbQuery.TimeRange.To) + var target string formData := url.Values{ - "from": []string{"-" + formatTimeRange(context.TimeRange.From)}, - "until": []string{formatTimeRange(context.TimeRange.To)}, + "from": []string{from}, + "until": []string{until}, "format": []string{"json"}, "maxDataPoints": []string{"500"}, } - for _, query := range queries { + for _, query := range tsdbQuery.Queries { if fullTarget, err := query.Model.Get("targetFull").String(); err == nil { - formData["target"] = []string{fixIntervalFormat(fullTarget)} + target = fixIntervalFormat(fullTarget) } else { - formData["target"] = []string{fixIntervalFormat(query.Model.Get("target").MustString())} + target = fixIntervalFormat(query.Model.Get("target").MustString()) } } + formData["target"] = []string{target} + if setting.Env == setting.DEV { glog.Debug("Graphite request", "params", formData) } - req, err := e.createRequest(formData) + req, err := e.createRequest(dsInfo, formData) if err != nil { - result.Error = err - return result + return nil, err } - res, err := ctxhttp.Do(ctx, e.HttpClient, req) + httpClient, err := dsInfo.GetHttpClient() if err != nil { - result.Error = err - return result + return nil, err + } + + span, ctx := opentracing.StartSpanFromContext(ctx, "graphite query") + span.SetTag("target", target) + span.SetTag("from", from) + span.SetTag("until", until) + defer span.Finish() + + opentracing.GlobalTracer().Inject( + span.Context(), + opentracing.HTTPHeaders, + opentracing.HTTPHeadersCarrier(req.Header)) + + res, err := ctxhttp.Do(ctx, httpClient, req) + if err != nil { + return nil, err } data, err := e.parseResponse(res) if err != nil { - result.Error = err - return result + return nil, err } - result.QueryResults = make(map[string]*tsdb.QueryResult) + result.Results = make(map[string]*tsdb.QueryResult) queryRes := tsdb.NewQueryResult() for _, series := range data { @@ -100,8 +110,8 @@ func (e *GraphiteExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, } } - result.QueryResults["A"] = queryRes - return result + result.Results["A"] = queryRes + return result, nil } func (e *GraphiteExecutor) parseResponse(res *http.Response) ([]TargetResponseDTO, error) { @@ -126,8 +136,8 @@ func (e *GraphiteExecutor) parseResponse(res *http.Response) ([]TargetResponseDT return data, nil } -func (e *GraphiteExecutor) createRequest(data url.Values) (*http.Request, error) { - u, _ := url.Parse(e.Url) +func (e *GraphiteExecutor) createRequest(dsInfo *models.DataSource, data url.Values) (*http.Request, error) { + u, _ := url.Parse(dsInfo.Url) u.Path = path.Join(u.Path, "render") req, err := http.NewRequest(http.MethodPost, u.String(), strings.NewReader(data.Encode())) @@ -137,8 +147,8 @@ func (e *GraphiteExecutor) createRequest(data url.Values) (*http.Request, error) } req.Header.Set("Content-Type", "application/x-www-form-urlencoded") - if e.BasicAuth { - req.SetBasicAuth(e.BasicAuthUser, e.BasicAuthPassword) + if dsInfo.BasicAuth { + req.SetBasicAuth(dsInfo.BasicAuthUser, dsInfo.BasicAuthPassword) } return req, err diff --git a/pkg/tsdb/influxdb/influxdb.go b/pkg/tsdb/influxdb/influxdb.go index 21d359d24bf..6100d3b401e 100644 --- a/pkg/tsdb/influxdb/influxdb.go +++ b/pkg/tsdb/influxdb/influxdb.go @@ -17,24 +17,16 @@ import ( ) type InfluxDBExecutor struct { - *models.DataSource + //*models.DataSource QueryParser *InfluxdbQueryParser ResponseParser *ResponseParser - HttpClient *http.Client + //HttpClient *http.Client } -func NewInfluxDBExecutor(datasource *models.DataSource) (tsdb.Executor, error) { - httpClient, err := datasource.GetHttpClient() - - if err != nil { - return nil, err - } - +func NewInfluxDBExecutor(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { return &InfluxDBExecutor{ - DataSource: datasource, QueryParser: &InfluxdbQueryParser{}, ResponseParser: &ResponseParser{}, - HttpClient: httpClient, }, nil } @@ -44,38 +36,43 @@ var ( func init() { glog = log.New("tsdb.influxdb") - tsdb.RegisterExecutor("influxdb", NewInfluxDBExecutor) + tsdb.RegisterTsdbQueryEndpoint("influxdb", NewInfluxDBExecutor) } -func (e *InfluxDBExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, context *tsdb.QueryContext) *tsdb.BatchResult { - result := &tsdb.BatchResult{} +func (e *InfluxDBExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { + result := &tsdb.Response{} - query, err := e.getQuery(queries, context) + query, err := e.getQuery(dsInfo, tsdbQuery.Queries, tsdbQuery) if err != nil { - return result.WithError(err) + return nil, err } - rawQuery, err := query.Build(context) + rawQuery, err := query.Build(tsdbQuery) if err != nil { - return result.WithError(err) + return nil, err } if setting.Env == setting.DEV { glog.Debug("Influxdb query", "raw query", rawQuery) } - req, err := e.createRequest(rawQuery) + req, err := e.createRequest(dsInfo, rawQuery) if err != nil { - return result.WithError(err) + return nil, err } - resp, err := ctxhttp.Do(ctx, e.HttpClient, req) + httpClient, err := dsInfo.GetHttpClient() if err != nil { - return result.WithError(err) + return nil, err + } + + resp, err := ctxhttp.Do(ctx, httpClient, req) + if err != nil { + return nil, err } if resp.StatusCode/100 != 2 { - return result.WithError(fmt.Errorf("Influxdb returned statuscode invalid status code: %v", resp.Status)) + return nil, fmt.Errorf("Influxdb returned statuscode invalid status code: %v", resp.Status) } var response Response @@ -85,23 +82,23 @@ func (e *InfluxDBExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, err = dec.Decode(&response) if err != nil { - return result.WithError(err) + return nil, err } if response.Err != nil { - return result.WithError(response.Err) + return nil, response.Err } - result.QueryResults = make(map[string]*tsdb.QueryResult) - result.QueryResults["A"] = e.ResponseParser.Parse(&response, query) + result.Results = make(map[string]*tsdb.QueryResult) + result.Results["A"] = e.ResponseParser.Parse(&response, query) - return result + return result, nil } -func (e *InfluxDBExecutor) getQuery(queries tsdb.QuerySlice, context *tsdb.QueryContext) (*Query, error) { +func (e *InfluxDBExecutor) getQuery(dsInfo *models.DataSource, queries []*tsdb.Query, context *tsdb.TsdbQuery) (*Query, error) { for _, v := range queries { - query, err := e.QueryParser.Parse(v.Model, e.DataSource) + query, err := e.QueryParser.Parse(v.Model, dsInfo) if err != nil { return nil, err } @@ -112,8 +109,8 @@ func (e *InfluxDBExecutor) getQuery(queries tsdb.QuerySlice, context *tsdb.Query return nil, fmt.Errorf("query request contains no queries") } -func (e *InfluxDBExecutor) createRequest(query string) (*http.Request, error) { - u, _ := url.Parse(e.Url) +func (e *InfluxDBExecutor) createRequest(dsInfo *models.DataSource, query string) (*http.Request, error) { + u, _ := url.Parse(dsInfo.Url) u.Path = path.Join(u.Path, "query") req, err := http.NewRequest(http.MethodGet, u.String(), nil) @@ -123,18 +120,18 @@ func (e *InfluxDBExecutor) createRequest(query string) (*http.Request, error) { params := req.URL.Query() params.Set("q", query) - params.Set("db", e.Database) + params.Set("db", dsInfo.Database) params.Set("epoch", "s") req.URL.RawQuery = params.Encode() req.Header.Set("User-Agent", "Grafana") - if e.BasicAuth { - req.SetBasicAuth(e.BasicAuthUser, e.BasicAuthPassword) + if dsInfo.BasicAuth { + req.SetBasicAuth(dsInfo.BasicAuthUser, dsInfo.BasicAuthPassword) } - if !e.BasicAuth && e.User != "" { - req.SetBasicAuth(e.User, e.Password) + if !dsInfo.BasicAuth && dsInfo.User != "" { + req.SetBasicAuth(dsInfo.User, dsInfo.Password) } glog.Debug("Influxdb request", "url", req.URL.String()) diff --git a/pkg/tsdb/influxdb/query.go b/pkg/tsdb/influxdb/query.go index 0bcdf1d5291..3a796e6db63 100644 --- a/pkg/tsdb/influxdb/query.go +++ b/pkg/tsdb/influxdb/query.go @@ -16,7 +16,7 @@ var ( regexpMeasurementPattern *regexp.Regexp = regexp.MustCompile(`^\/.*\/$`) ) -func (query *Query) Build(queryContext *tsdb.QueryContext) (string, error) { +func (query *Query) Build(queryContext *tsdb.TsdbQuery) (string, error) { var res string if query.UseRawQuery && query.RawQuery != "" { @@ -41,7 +41,7 @@ func (query *Query) Build(queryContext *tsdb.QueryContext) (string, error) { return res, nil } -func getDefinedInterval(query *Query, queryContext *tsdb.QueryContext) (*tsdb.Interval, error) { +func getDefinedInterval(query *Query, queryContext *tsdb.TsdbQuery) (*tsdb.Interval, error) { defaultInterval := tsdb.CalculateInterval(queryContext.TimeRange) if query.Interval == "" { @@ -104,7 +104,7 @@ func (query *Query) renderTags() []string { return res } -func (query *Query) renderTimeFilter(queryContext *tsdb.QueryContext) string { +func (query *Query) renderTimeFilter(queryContext *tsdb.TsdbQuery) string { from := "now() - " + queryContext.TimeRange.From to := "" @@ -115,7 +115,7 @@ func (query *Query) renderTimeFilter(queryContext *tsdb.QueryContext) string { return fmt.Sprintf("time > %s%s", from, to) } -func (query *Query) renderSelectors(queryContext *tsdb.QueryContext) string { +func (query *Query) renderSelectors(queryContext *tsdb.TsdbQuery) string { res := "SELECT " var selectors []string @@ -163,7 +163,7 @@ func (query *Query) renderWhereClause() string { return res } -func (query *Query) renderGroupBy(queryContext *tsdb.QueryContext) string { +func (query *Query) renderGroupBy(queryContext *tsdb.TsdbQuery) string { groupBy := "" for i, group := range query.GroupBy { if i == 0 { diff --git a/pkg/tsdb/influxdb/query_part.go b/pkg/tsdb/influxdb/query_part.go index 9c41fbbedba..981aea40526 100644 --- a/pkg/tsdb/influxdb/query_part.go +++ b/pkg/tsdb/influxdb/query_part.go @@ -15,7 +15,7 @@ type DefinitionParameters struct { } type QueryDefinition struct { - Renderer func(query *Query, queryContext *tsdb.QueryContext, part *QueryPart, innerExpr string) string + Renderer func(query *Query, queryContext *tsdb.TsdbQuery, part *QueryPart, innerExpr string) string Params []DefinitionParameters } @@ -94,14 +94,14 @@ func init() { renders["alias"] = QueryDefinition{Renderer: aliasRenderer} } -func fieldRenderer(query *Query, queryContext *tsdb.QueryContext, part *QueryPart, innerExpr string) string { +func fieldRenderer(query *Query, queryContext *tsdb.TsdbQuery, part *QueryPart, innerExpr string) string { if part.Params[0] == "*" { return "*" } return fmt.Sprintf(`"%s"`, part.Params[0]) } -func functionRenderer(query *Query, queryContext *tsdb.QueryContext, part *QueryPart, innerExpr string) string { +func functionRenderer(query *Query, queryContext *tsdb.TsdbQuery, part *QueryPart, innerExpr string) string { for i, param := range part.Params { if part.Type == "time" && param == "auto" { part.Params[i] = "$__interval" @@ -117,15 +117,15 @@ func functionRenderer(query *Query, queryContext *tsdb.QueryContext, part *Query return fmt.Sprintf("%s(%s)", part.Type, params) } -func suffixRenderer(query *Query, queryContext *tsdb.QueryContext, part *QueryPart, innerExpr string) string { +func suffixRenderer(query *Query, queryContext *tsdb.TsdbQuery, part *QueryPart, innerExpr string) string { return fmt.Sprintf("%s %s", innerExpr, part.Params[0]) } -func aliasRenderer(query *Query, queryContext *tsdb.QueryContext, part *QueryPart, innerExpr string) string { +func aliasRenderer(query *Query, queryContext *tsdb.TsdbQuery, part *QueryPart, innerExpr string) string { return fmt.Sprintf(`%s AS "%s"`, innerExpr, part.Params[0]) } -func (r QueryDefinition) Render(query *Query, queryContext *tsdb.QueryContext, part *QueryPart, innerExpr string) string { +func (r QueryDefinition) Render(query *Query, queryContext *tsdb.TsdbQuery, part *QueryPart, innerExpr string) string { return r.Renderer(query, queryContext, part, innerExpr) } @@ -149,6 +149,6 @@ type QueryPart struct { Params []string } -func (qp *QueryPart) Render(query *Query, queryContext *tsdb.QueryContext, expr string) string { +func (qp *QueryPart) Render(query *Query, queryContext *tsdb.TsdbQuery, expr string) string { return qp.Def.Renderer(query, queryContext, qp, expr) } diff --git a/pkg/tsdb/influxdb/query_part_test.go b/pkg/tsdb/influxdb/query_part_test.go index b5bae2fdf37..d23865174c8 100644 --- a/pkg/tsdb/influxdb/query_part_test.go +++ b/pkg/tsdb/influxdb/query_part_test.go @@ -10,7 +10,7 @@ import ( func TestInfluxdbQueryPart(t *testing.T) { Convey("Influxdb query parts", t, func() { - queryContext := &tsdb.QueryContext{TimeRange: tsdb.NewTimeRange("5m", "now")} + queryContext := &tsdb.TsdbQuery{TimeRange: tsdb.NewTimeRange("5m", "now")} query := &Query{} Convey("render field ", func() { diff --git a/pkg/tsdb/influxdb/query_test.go b/pkg/tsdb/influxdb/query_test.go index 0977ef8c1ce..5c8dc1eaf69 100644 --- a/pkg/tsdb/influxdb/query_test.go +++ b/pkg/tsdb/influxdb/query_test.go @@ -28,7 +28,7 @@ func TestInfluxdbQueryBuilder(t *testing.T) { tag1 := &Tag{Key: "hostname", Value: "server1", Operator: "="} tag2 := &Tag{Key: "hostname", Value: "server2", Operator: "=", Condition: "OR"} - queryContext := &tsdb.QueryContext{ + queryContext := &tsdb.TsdbQuery{ TimeRange: tsdb.NewTimeRange("5m", "now"), } @@ -101,12 +101,12 @@ func TestInfluxdbQueryBuilder(t *testing.T) { query := Query{} Convey("render from: 2h to now-1h", func() { query := Query{} - queryContext := &tsdb.QueryContext{TimeRange: tsdb.NewTimeRange("2h", "now-1h")} + queryContext := &tsdb.TsdbQuery{TimeRange: tsdb.NewTimeRange("2h", "now-1h")} So(query.renderTimeFilter(queryContext), ShouldEqual, "time > now() - 2h and time < now() - 1h") }) Convey("render from: 10m", func() { - queryContext := &tsdb.QueryContext{TimeRange: tsdb.NewTimeRange("10m", "now")} + queryContext := &tsdb.TsdbQuery{TimeRange: tsdb.NewTimeRange("10m", "now")} So(query.renderTimeFilter(queryContext), ShouldEqual, "time > now() - 10m") }) }) diff --git a/pkg/tsdb/models.go b/pkg/tsdb/models.go index 5ae27867c44..cbda7c97515 100644 --- a/pkg/tsdb/models.go +++ b/pkg/tsdb/models.go @@ -6,43 +6,24 @@ import ( "github.com/grafana/grafana/pkg/models" ) +type TsdbQuery struct { + TimeRange *TimeRange + Queries []*Query +} + type Query struct { RefId string Model *simplejson.Json Depends []string DataSource *models.DataSource Results []*TimeSeries - Exclude bool MaxDataPoints int64 IntervalMs int64 } -type QuerySlice []*Query - -type Request struct { - TimeRange *TimeRange - Queries QuerySlice -} - type Response struct { - BatchTimings []*BatchTiming `json:"timings"` - Results map[string]*QueryResult `json:"results"` - Message string `json:"message,omitempty"` -} - -type BatchTiming struct { - TimeElapsed int64 -} - -type BatchResult struct { - Error error - QueryResults map[string]*QueryResult - Timings *BatchTiming -} - -func (br *BatchResult) WithError(err error) *BatchResult { - br.Error = err - return br + Results map[string]*QueryResult `json:"results"` + Message string `json:"message,omitempty"` } type QueryResult struct { diff --git a/pkg/tsdb/mqe/httpClient.go b/pkg/tsdb/mqe/httpClient.go deleted file mode 100644 index d8bf0888a35..00000000000 --- a/pkg/tsdb/mqe/httpClient.go +++ /dev/null @@ -1,129 +0,0 @@ -package mqe - -import ( - "context" - "net/http" - "net/url" - "path" - "strings" - - "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/setting" - "github.com/grafana/grafana/pkg/tsdb" - - "golang.org/x/net/context/ctxhttp" -) - -var ( - MaxWorker int = 4 -) - -type apiClient struct { - *models.DataSource - log log.Logger - httpClient *http.Client - responseParser *ResponseParser -} - -func NewApiClient(httpClient *http.Client, datasource *models.DataSource) *apiClient { - return &apiClient{ - DataSource: datasource, - log: log.New("tsdb.mqe"), - httpClient: httpClient, - responseParser: NewResponseParser(), - } -} - -func (e *apiClient) PerformRequests(ctx context.Context, queries []QueryToSend) (*tsdb.QueryResult, error) { - queryResult := &tsdb.QueryResult{} - - queryCount := len(queries) - jobsChan := make(chan QueryToSend, queryCount) - resultChan := make(chan []*tsdb.TimeSeries, queryCount) - errorsChan := make(chan error, 1) - for w := 1; w <= MaxWorker; w++ { - go e.spawnWorker(ctx, w, jobsChan, resultChan, errorsChan) - } - - for _, v := range queries { - jobsChan <- v - } - close(jobsChan) - - resultCounter := 0 - for { - select { - case timeseries := <-resultChan: - queryResult.Series = append(queryResult.Series, timeseries...) - resultCounter++ - - if resultCounter == queryCount { - close(resultChan) - return queryResult, nil - } - case err := <-errorsChan: - return nil, err - case <-ctx.Done(): - return nil, ctx.Err() - } - } -} - -func (e *apiClient) spawnWorker(ctx context.Context, id int, jobs chan QueryToSend, results chan []*tsdb.TimeSeries, errors chan error) { - e.log.Debug("Spawning worker", "id", id) - for query := range jobs { - if setting.Env == setting.DEV { - e.log.Debug("Sending request", "query", query.RawQuery) - } - - req, err := e.createRequest(query.RawQuery) - - resp, err := ctxhttp.Do(ctx, e.httpClient, req) - if err != nil { - errors <- err - return - } - - series, err := e.responseParser.Parse(resp, query) - if err != nil { - errors <- err - return - } - - results <- series - } - e.log.Debug("Worker is complete", "id", id) -} - -func (e *apiClient) createRequest(query string) (*http.Request, error) { - u, err := url.Parse(e.Url) - if err != nil { - return nil, err - } - - u.Path = path.Join(u.Path, "query") - - payload := simplejson.New() - payload.Set("query", query) - - jsonPayload, err := payload.MarshalJSON() - if err != nil { - return nil, err - } - - req, err := http.NewRequest(http.MethodPost, u.String(), strings.NewReader(string(jsonPayload))) - if err != nil { - return nil, err - } - - req.Header.Set("User-Agent", "Grafana") - req.Header.Set("Content-Type", "application/json") - - if e.BasicAuth { - req.SetBasicAuth(e.BasicAuthUser, e.BasicAuthPassword) - } - - return req, nil -} diff --git a/pkg/tsdb/mqe/model_parser.go b/pkg/tsdb/mqe/model_parser.go deleted file mode 100644 index d139bea9c15..00000000000 --- a/pkg/tsdb/mqe/model_parser.go +++ /dev/null @@ -1,60 +0,0 @@ -package mqe - -import ( - "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" -) - -func NewQueryParser() *QueryParser { - return &QueryParser{} -} - -type QueryParser struct{} - -func (qp *QueryParser) Parse(model *simplejson.Json, dsInfo *models.DataSource, queryContext *tsdb.QueryContext) (*Query, error) { - query := &Query{TimeRange: queryContext.TimeRange} - query.AddClusterToAlias = model.Get("addClusterToAlias").MustBool(false) - query.AddHostToAlias = model.Get("addHostToAlias").MustBool(false) - query.UseRawQuery = model.Get("rawQuery").MustBool(false) - query.RawQuery = model.Get("query").MustString("") - - query.Cluster = model.Get("cluster").MustStringArray([]string{}) - query.Hosts = model.Get("hosts").MustStringArray([]string{}) - - var metrics []Metric - var err error - for _, metricsObj := range model.Get("metrics").MustArray() { - metricJson := simplejson.NewFromAny(metricsObj) - var m Metric - - m.Alias = metricJson.Get("alias").MustString("") - m.Metric, err = metricJson.Get("metric").String() - if err != nil { - return nil, err - } - - metrics = append(metrics, m) - } - - query.Metrics = metrics - - var functions []Function - for _, functionListObj := range model.Get("functionList").MustArray() { - functionListJson := simplejson.NewFromAny(functionListObj) - var f Function - - f.Func = functionListJson.Get("func").MustString("") - if err != nil { - return nil, err - } - - if f.Func != "" { - functions = append(functions, f) - } - } - - query.FunctionList = functions - - return query, nil -} diff --git a/pkg/tsdb/mqe/model_parser_test.go b/pkg/tsdb/mqe/model_parser_test.go deleted file mode 100644 index 58e7b7eb28d..00000000000 --- a/pkg/tsdb/mqe/model_parser_test.go +++ /dev/null @@ -1,127 +0,0 @@ -package mqe - -import ( - "testing" - - "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" - . "github.com/smartystreets/goconvey/convey" -) - -func TestMQEQueryParser(t *testing.T) { - Convey("MQE query parser", t, func() { - parser := &QueryParser{} - - dsInfo := &models.DataSource{JsonData: simplejson.New()} - queryContext := &tsdb.QueryContext{} - - Convey("can parse simple mqe model", func() { - json := ` - { - "cluster": [], - "hosts": [ - "staples-lab-1" - ], - "metrics": [ - { - "metric": "os.cpu.all*" - } - ], - "rawQuery": "", - "refId": "A" - } - ` - modelJson, err := simplejson.NewJson([]byte(json)) - So(err, ShouldBeNil) - - query, err := parser.Parse(modelJson, dsInfo, queryContext) - So(err, ShouldBeNil) - So(query.UseRawQuery, ShouldBeFalse) - - So(len(query.Cluster), ShouldEqual, 0) - So(query.Hosts[0], ShouldEqual, "staples-lab-1") - So(query.Metrics[0].Metric, ShouldEqual, "os.cpu.all*") - }) - - Convey("can parse multi serie mqe model", func() { - json := ` - { - "cluster": [ - "demoapp" - ], - "hosts": [ - "staples-lab-1" - ], - "metrics": [ - { - "metric": "os.cpu.all.active_percentage" - }, - { - "metric": "os.disk.sda.io_time" - } - ], - "functionList": [ - { - "func": "aggregate.min" - }, - { - "func": "aggregate.max" - } - ], - "rawQuery": "", - "refId": "A", - "addClusterToAlias": true, - "addHostToAlias": true - } - ` - modelJson, err := simplejson.NewJson([]byte(json)) - So(err, ShouldBeNil) - - query, err := parser.Parse(modelJson, dsInfo, queryContext) - So(err, ShouldBeNil) - So(query.UseRawQuery, ShouldBeFalse) - So(query.Cluster[0], ShouldEqual, "demoapp") - So(query.Metrics[0].Metric, ShouldEqual, "os.cpu.all.active_percentage") - So(query.Metrics[1].Metric, ShouldEqual, "os.disk.sda.io_time") - So(query.FunctionList[0].Func, ShouldEqual, "aggregate.min") - So(query.FunctionList[1].Func, ShouldEqual, "aggregate.max") - }) - - Convey("can parse raw query", func() { - json := ` - { - "addClusterToAlias": true, - "addHostToAlias": true, - "cluster": [], - "hosts": [ - "staples-lab-1" - ], - "metrics": [ - { - "alias": "cpu active", - "metric": "os.cpu.all.active_percentage" - }, - { - "alias": "disk sda time", - "metric": "os.disk.sda.io_time" - } - ], - "rawQuery": true, - "query": "raw-query", - "refId": "A" - } - ` - modelJson, err := simplejson.NewJson([]byte(json)) - So(err, ShouldBeNil) - - query, err := parser.Parse(modelJson, dsInfo, queryContext) - So(err, ShouldBeNil) - - So(query.UseRawQuery, ShouldBeTrue) - So(query.RawQuery, ShouldEqual, "raw-query") - So(query.AddClusterToAlias, ShouldBeTrue) - So(query.AddHostToAlias, ShouldBeTrue) - }) - }) -} diff --git a/pkg/tsdb/mqe/mqe.go b/pkg/tsdb/mqe/mqe.go deleted file mode 100644 index ae828e95c20..00000000000 --- a/pkg/tsdb/mqe/mqe.go +++ /dev/null @@ -1,85 +0,0 @@ -package mqe - -import ( - "context" - "net/http" - - "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" -) - -type MQEExecutor struct { - *models.DataSource - queryParser *QueryParser - apiClient *apiClient - httpClient *http.Client - log log.Logger - tokenClient *TokenClient -} - -func NewMQEExecutor(dsInfo *models.DataSource) (tsdb.Executor, error) { - httpclient, err := dsInfo.GetHttpClient() - if err != nil { - return nil, err - } - - return &MQEExecutor{ - DataSource: dsInfo, - httpClient: httpclient, - log: log.New("tsdb.mqe"), - queryParser: NewQueryParser(), - apiClient: NewApiClient(httpclient, dsInfo), - tokenClient: NewTokenClient(dsInfo), - }, nil -} - -func init() { - tsdb.RegisterExecutor("mqe-datasource", NewMQEExecutor) -} - -type QueryToSend struct { - RawQuery string - Metric Metric - QueryRef *Query -} - -func (e *MQEExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, queryContext *tsdb.QueryContext) *tsdb.BatchResult { - result := &tsdb.BatchResult{} - - availableSeries, err := e.tokenClient.GetTokenData(ctx) - if err != nil { - return result.WithError(err) - } - - var mqeQueries []*Query - for _, v := range queries { - q, err := e.queryParser.Parse(v.Model, e.DataSource, queryContext) - if err != nil { - return result.WithError(err) - } - mqeQueries = append(mqeQueries, q) - } - - var rawQueries []QueryToSend - for _, v := range mqeQueries { - queries, err := v.Build(availableSeries.Metrics) - if err != nil { - return result.WithError(err) - } - - rawQueries = append(rawQueries, queries...) - } - - e.log.Debug("Sending request", "url", e.DataSource.Url) - - queryResult, err := e.apiClient.PerformRequests(ctx, rawQueries) - if err != nil { - return result.WithError(err) - } - - result.QueryResults = make(map[string]*tsdb.QueryResult) - result.QueryResults["A"] = queryResult - - return result -} diff --git a/pkg/tsdb/mqe/response_parser.go b/pkg/tsdb/mqe/response_parser.go deleted file mode 100644 index f3fdb00f0aa..00000000000 --- a/pkg/tsdb/mqe/response_parser.go +++ /dev/null @@ -1,177 +0,0 @@ -package mqe - -import ( - "encoding/json" - "io/ioutil" - "net/http" - "strconv" - "strings" - - "fmt" - - "regexp" - - "github.com/grafana/grafana/pkg/components/null" - "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/tsdb" -) - -func NewResponseParser() *ResponseParser { - return &ResponseParser{ - log: log.New("tsdb.mqe"), - } -} - -var ( - indexAliasPattern *regexp.Regexp - wildcardAliasPattern *regexp.Regexp -) - -func init() { - indexAliasPattern = regexp.MustCompile(`\$(\d)`) - wildcardAliasPattern = regexp.MustCompile(`[*!]`) -} - -type MQEResponse struct { - Success bool `json:"success"` - Name string `json:"name"` - Body []MQEResponseSerie `json:"body"` -} - -type ResponseTimeRange struct { - Start int64 `json:"start"` - End int64 `json:"end"` - Resolution int64 `json:"Resolution"` -} - -type MQEResponseSerie struct { - Query string `json:"query"` - Name string `json:"name"` - Type string `json:"type"` - Series []MQESerie `json:"series"` - TimeRange ResponseTimeRange `json:"timerange"` -} - -type MQESerie struct { - Values []null.Float `json:"values"` - Tagset map[string]string `json:"tagset"` -} - -type ResponseParser struct { - log log.Logger -} - -func (parser *ResponseParser) Parse(res *http.Response, queryRef QueryToSend) ([]*tsdb.TimeSeries, error) { - body, err := ioutil.ReadAll(res.Body) - defer res.Body.Close() - if err != nil { - return nil, err - } - - if res.StatusCode/100 != 2 { - parser.log.Error("Request failed", "status code", res.StatusCode, "body", string(body)) - return nil, fmt.Errorf("Returned invalid statuscode") - } - - var data *MQEResponse = &MQEResponse{} - err = json.Unmarshal(body, data) - if err != nil { - parser.log.Info("Failed to unmarshal response", "error", err, "status", res.Status, "body", string(body)) - return nil, err - } - - if !data.Success { - return nil, fmt.Errorf("Request failed.") - } - - var series []*tsdb.TimeSeries - for _, body := range data.Body { - for _, mqeSerie := range body.Series { - serie := &tsdb.TimeSeries{ - Tags: map[string]string{}, - Name: parser.formatLegend(body, mqeSerie, queryRef), - } - for key, value := range mqeSerie.Tagset { - serie.Tags[key] = value - } - - for i, value := range mqeSerie.Values { - timestamp := body.TimeRange.Start + int64(i)*body.TimeRange.Resolution - serie.Points = append(serie.Points, tsdb.NewTimePoint(value, float64(timestamp))) - } - - series = append(series, serie) - } - } - - return series, nil -} - -func (parser *ResponseParser) formatLegend(body MQEResponseSerie, mqeSerie MQESerie, queryToSend QueryToSend) string { - namePrefix := "" - - //append predefined tags to seriename - for key, value := range mqeSerie.Tagset { - if key == "cluster" && queryToSend.QueryRef.AddClusterToAlias { - namePrefix += value + " " - } - } - for key, value := range mqeSerie.Tagset { - if key == "host" && queryToSend.QueryRef.AddHostToAlias { - namePrefix += value + " " - } - } - - return namePrefix + parser.formatName(body, queryToSend) -} - -func (parser *ResponseParser) formatName(body MQEResponseSerie, queryToSend QueryToSend) string { - if indexAliasPattern.MatchString(queryToSend.Metric.Alias) { - return parser.indexAlias(body, queryToSend) - } - - if wildcardAliasPattern.MatchString(queryToSend.Metric.Metric) && wildcardAliasPattern.MatchString(queryToSend.Metric.Alias) { - return parser.wildcardAlias(body, queryToSend) - } - - return body.Name -} - -func (parser *ResponseParser) wildcardAlias(body MQEResponseSerie, queryToSend QueryToSend) string { - regString := strings.Replace(queryToSend.Metric.Metric, `*`, `(.*)`, 1) - reg, err := regexp.Compile(regString) - if err != nil { - return queryToSend.Metric.Alias - } - - matches := reg.FindAllStringSubmatch(queryToSend.RawQuery, -1) - - if len(matches) == 0 || len(matches[0]) < 2 { - return queryToSend.Metric.Alias - } - - return matches[0][1] -} - -func (parser *ResponseParser) indexAlias(body MQEResponseSerie, queryToSend QueryToSend) string { - queryNameParts := strings.Split(queryToSend.Metric.Metric, `.`) - - name := indexAliasPattern.ReplaceAllStringFunc(queryToSend.Metric.Alias, func(in string) string { - positionName := strings.TrimSpace(strings.Replace(in, "$", "", 1)) - - pos, err := strconv.Atoi(positionName) - if err != nil { - return "" - } - - for i, part := range queryNameParts { - if i == pos-1 { - return strings.TrimSpace(part) - } - } - - return "" - }) - - return strings.Replace(name, " ", ".", -1) -} diff --git a/pkg/tsdb/mqe/response_parser_test.go b/pkg/tsdb/mqe/response_parser_test.go deleted file mode 100644 index 34259aaea48..00000000000 --- a/pkg/tsdb/mqe/response_parser_test.go +++ /dev/null @@ -1,187 +0,0 @@ -package mqe - -import ( - "testing" - - "net/http" - "strings" - - "io/ioutil" - - "github.com/grafana/grafana/pkg/components/null" - . "github.com/smartystreets/goconvey/convey" -) - -var ( - testJson string -) - -func TestMQEResponseParser(t *testing.T) { - Convey("MQE response parser", t, func() { - parser := NewResponseParser() - - Convey("Can parse response", func() { - queryRef := QueryToSend{ - QueryRef: &Query{ - AddClusterToAlias: true, - AddHostToAlias: true, - }, - Metric: Metric{Alias: ""}, - } - - response := &http.Response{ - StatusCode: 200, - Body: ioutil.NopCloser(strings.NewReader(testJson)), - } - res, err := parser.Parse(response, queryRef) - So(err, ShouldBeNil) - So(len(res), ShouldEqual, 2) - So(len(res[0].Points), ShouldEqual, 14) - So(res[0].Name, ShouldEqual, "demoapp staples-lab-1 os.disk.sda3.weighted_io_time") - startTime := 1479287280000 - for i := 0; i < 11; i++ { - So(res[0].Points[i][0].Float64, ShouldEqual, i+1) - So(res[0].Points[i][1].Float64, ShouldEqual, startTime+(i*30000)) - } - - }) - - Convey("Can format legend", func() { - mqeSerie := MQESerie{ - Tagset: map[string]string{ - "cluster": "demoapp", - "host": "staples-lab-1", - }, - Values: []null.Float{null.NewFloat(3, true)}, - } - - Convey("with empty alias", func() { - serie := MQEResponseSerie{Name: "os.disk.sda3.weighted_io_time"} - queryRef := QueryToSend{ - QueryRef: &Query{ - AddClusterToAlias: true, - AddHostToAlias: true, - }, - Metric: Metric{Alias: ""}, - } - legend := parser.formatLegend(serie, mqeSerie, queryRef) - So(legend, ShouldEqual, "demoapp staples-lab-1 os.disk.sda3.weighted_io_time") - }) - - Convey("with index alias (ex $2 $3)", func() { - serie := MQEResponseSerie{Name: "os.disk.sda3.weighted_io_time"} - queryRef := QueryToSend{ - QueryRef: &Query{ - AddClusterToAlias: true, - AddHostToAlias: true, - }, - Metric: Metric{Alias: "$2 $3", Metric: "os.disk.sda3.weighted_io_time"}, - } - legend := parser.formatLegend(serie, mqeSerie, queryRef) - So(legend, ShouldEqual, "demoapp staples-lab-1 disk.sda3") - }) - - Convey("with wildcard alias", func() { - serie := MQEResponseSerie{Name: "os.disk.sda3.weighted_io_time", Query: "os.disk.*"} - - queryRef := QueryToSend{ - QueryRef: &Query{ - AddClusterToAlias: true, - AddHostToAlias: true, - }, - RawQuery: "os.disk.sda3.weighted_io_time", - Metric: Metric{Alias: "*", Metric: "os.disk.*.weighted_io_time"}, - } - legend := parser.formatLegend(serie, mqeSerie, queryRef) - So(legend, ShouldEqual, "demoapp staples-lab-1 sda3") - }) - }) - }) -} - -func init() { - testJson = `{ - "success": true, - "name": "select", - "body": [ - { - "query": "os.disk.sda3.weighted_io_time", - "name": "os.disk.sda3.weighted_io_time", - "type": "series", - "series": [ - { - "tagset": { - "cluster": "demoapp", - "host": "staples-lab-1" - }, - "values": [1,2,3,4,5,6,7,8,9,10,11, null, null, null] - }, - { - "tagset": { - "cluster": "demoapp", - "host": "staples-lab-2" - }, - "values": [11,10,9,8,7,6,5,4,3,2,1] - } - ], - "timerange": { - "start": 1479287280000, - "end": 1479287580000, - "resolution": 30000 - } - } - ], - "metadata": { - "description": { - "cluster": [ - "demoapp" - ], - "host": [ - "staples-lab-1", - "staples-lab-2" - ] - }, - "notes": null, - "profile": [ - { - "name": "Parsing Query", - "start": "2016-11-16T04:16:21.874354721-05:00", - "finish": "2016-11-16T04:16:21.874762291-05:00" - }, - { - "name": "Cassandra GetAllTags", - "start": "2016-11-16T04:16:21.874907171-05:00", - "finish": "2016-11-16T04:16:21.876401922-05:00" - }, - { - "name": "CachedMetricMetadataAPI_GetAllTags_Expired", - "start": "2016-11-16T04:16:21.874904751-05:00", - "finish": "2016-11-16T04:16:21.876407852-05:00" - }, - { - "name": "CachedMetricMetadataAPI_GetAllTags", - "start": "2016-11-16T04:16:21.874899491-05:00", - "finish": "2016-11-16T04:16:21.876410382-05:00" - }, - { - "name": "Blueflood FetchSingleTimeseries Resolution", - "description": "os.disk.sda3.weighted_io_time [app=demoapp,host=staples-lab-1]\n at 30s", - "start": "2016-11-16T04:16:21.876623312-05:00", - "finish": "2016-11-16T04:16:21.881763444-05:00" - }, - { - "name": "Blueflood FetchSingleTimeseries Resolution", - "description": "os.disk.sda3.weighted_io_time [app=demoapp,host=staples-lab-2]\n at 30s", - "start": "2016-11-16T04:16:21.876642682-05:00", - "finish": "2016-11-16T04:16:21.881895914-05:00" - }, - { - "name": "Blueflood FetchMultipleTimeseries", - "start": "2016-11-16T04:16:21.876418022-05:00", - "finish": "2016-11-16T04:16:21.881921474-05:00" - } - ] - } - } - ` -} diff --git a/pkg/tsdb/mqe/token_client.go b/pkg/tsdb/mqe/token_client.go deleted file mode 100644 index df136738ab6..00000000000 --- a/pkg/tsdb/mqe/token_client.go +++ /dev/null @@ -1,101 +0,0 @@ -package mqe - -import ( - "context" - "encoding/json" - "fmt" - "io/ioutil" - "net/http" - "net/url" - "path" - "time" - - "golang.org/x/net/context/ctxhttp" - - "strconv" - - "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/models" - "github.com/patrickmn/go-cache" -) - -var tokenCache *cache.Cache - -func init() { - tokenCache = cache.New(5*time.Minute, 30*time.Second) -} - -type TokenClient struct { - log log.Logger - Datasource *models.DataSource - HttpClient *http.Client -} - -func NewTokenClient(datasource *models.DataSource) *TokenClient { - httpClient, _ := datasource.GetHttpClient() - - return &TokenClient{ - log: log.New("tsdb.mqe.tokenclient"), - Datasource: datasource, - HttpClient: httpClient, - } -} - -func (client *TokenClient) GetTokenData(ctx context.Context) (*TokenBody, error) { - key := strconv.FormatInt(client.Datasource.Id, 10) - - item, found := tokenCache.Get(key) - if found { - if result, ok := item.(*TokenBody); ok { - return result, nil - } - } - - b, err := client.RequestTokenData(ctx) - if err != nil { - return nil, err - } - - tokenCache.Set(key, b, cache.DefaultExpiration) - - return b, nil -} - -func (client *TokenClient) RequestTokenData(ctx context.Context) (*TokenBody, error) { - u, _ := url.Parse(client.Datasource.Url) - u.Path = path.Join(u.Path, "token") - - req, err := http.NewRequest(http.MethodGet, u.String(), nil) - if err != nil { - client.log.Info("Failed to create request", "error", err) - } - - res, err := ctxhttp.Do(ctx, client.HttpClient, req) - if err != nil { - return nil, err - } - - body, err := ioutil.ReadAll(res.Body) - defer res.Body.Close() - if err != nil { - return nil, err - } - - if res.StatusCode/100 != 2 { - client.log.Info("Request failed", "status", res.Status, "body", string(body)) - return nil, fmt.Errorf("Request failed status: %v", res.Status) - } - - var result *TokenResponse - err = json.Unmarshal(body, &result) - if err != nil { - client.log.Info("Failed to unmarshal response", "error", err, "status", res.Status, "body", string(body)) - return nil, err - } - - if !result.Success { - return nil, fmt.Errorf("Request failed for unknown reason.") - } - - return &result.Body, nil -} diff --git a/pkg/tsdb/mqe/token_client_test.go b/pkg/tsdb/mqe/token_client_test.go deleted file mode 100644 index f940f798b36..00000000000 --- a/pkg/tsdb/mqe/token_client_test.go +++ /dev/null @@ -1,27 +0,0 @@ -package mqe - -import ( - "context" - "testing" - - "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/models" - . "github.com/smartystreets/goconvey/convey" -) - -func TestTokenClient(t *testing.T) { - SkipConvey("Token client", t, func() { - dsInfo := &models.DataSource{ - JsonData: simplejson.New(), - Url: "", - } - - client := NewTokenClient(dsInfo) - - body, err := client.RequestTokenData(context.TODO()) - - So(err, ShouldBeNil) - //So(len(body.Functions), ShouldBeGreaterThan, 1) - So(len(body.Metrics), ShouldBeGreaterThan, 1) - }) -} diff --git a/pkg/tsdb/mqe/types.go b/pkg/tsdb/mqe/types.go deleted file mode 100644 index 0bd436ee9bd..00000000000 --- a/pkg/tsdb/mqe/types.go +++ /dev/null @@ -1,137 +0,0 @@ -package mqe - -import ( - "fmt" - - "strings" - - "regexp" - - "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/tsdb" -) - -type Metric struct { - Metric string - Alias string -} - -type Function struct { - Func string -} - -type Query struct { - Metrics []Metric - Hosts []string - Cluster []string - FunctionList []Function - AddClusterToAlias bool - AddHostToAlias bool - - TimeRange *tsdb.TimeRange - UseRawQuery bool - RawQuery string -} - -var ( - containsWildcardPattern *regexp.Regexp = regexp.MustCompile(`\*`) -) - -func (q *Query) Build(availableSeries []string) ([]QueryToSend, error) { - var queriesToSend []QueryToSend - where := q.buildWhereClause() - functions := q.buildFunctionList() - - for _, metric := range q.Metrics { - alias := "" - if metric.Alias != "" { - alias = fmt.Sprintf(" {%s}", metric.Alias) - } - - if !containsWildcardPattern.Match([]byte(metric.Metric)) { - rawQuery := q.renderQuerystring(metric.Metric, functions, alias, where, q.TimeRange) - queriesToSend = append(queriesToSend, QueryToSend{ - RawQuery: rawQuery, - QueryRef: q, - Metric: metric, - }) - } else { - m := strings.Replace(metric.Metric, "*", ".*", -1) - mp, err := regexp.Compile(m) - - if err != nil { - log.Error2("failed to compile regex for ", "metric", m) - continue - } - - //TODO: this lookup should be cached - for _, wildcardMatch := range availableSeries { - if mp.Match([]byte(wildcardMatch)) { - rawQuery := q.renderQuerystring(wildcardMatch, functions, alias, where, q.TimeRange) - queriesToSend = append(queriesToSend, QueryToSend{ - RawQuery: rawQuery, - QueryRef: q, - Metric: metric, - }) - } - } - } - } - - return queriesToSend, nil -} - -func (q *Query) renderQuerystring(path, functions, alias, where string, timerange *tsdb.TimeRange) string { - return fmt.Sprintf( - "`%s`%s%s %s from %v to %v", - path, - functions, - alias, - where, - q.TimeRange.GetFromAsMsEpoch(), - q.TimeRange.GetToAsMsEpoch()) -} - -func (q *Query) buildFunctionList() string { - functions := "" - for _, v := range q.FunctionList { - functions = fmt.Sprintf("%s|%s", functions, v.Func) - } - - return functions -} - -func (q *Query) buildWhereClause() string { - hasApps := len(q.Cluster) > 0 - hasHosts := len(q.Hosts) > 0 - - where := "" - if hasHosts || hasApps { - where += "where " - } - - if hasApps { - apps := strings.Join(q.Cluster, "', '") - where += fmt.Sprintf("cluster in ('%s')", apps) - } - - if hasHosts && hasApps { - where += " and " - } - - if hasHosts { - hosts := strings.Join(q.Hosts, "', '") - where += fmt.Sprintf("host in ('%s')", hosts) - } - - return where -} - -type TokenBody struct { - Metrics []string -} - -type TokenResponse struct { - Success bool - Body TokenBody -} diff --git a/pkg/tsdb/mqe/types_test.go b/pkg/tsdb/mqe/types_test.go deleted file mode 100644 index 6f716937f1e..00000000000 --- a/pkg/tsdb/mqe/types_test.go +++ /dev/null @@ -1,95 +0,0 @@ -package mqe - -import ( - "testing" - - "time" - - "fmt" - - "github.com/grafana/grafana/pkg/tsdb" - . "github.com/smartystreets/goconvey/convey" -) - -func TestWildcardExpansion(t *testing.T) { - availableMetrics := []string{ - "os.cpu.all.idle", - "os.cpu.1.idle", - "os.cpu.2.idle", - "os.cpu.3.idle", - } - - now := time.Now() - from := now.Add((time.Minute*5)*-1).UnixNano() / int64(time.Millisecond) - to := now.UnixNano() / int64(time.Millisecond) - - Convey("Can expanding query", t, func() { - Convey("Without wildcard series", func() { - query := &Query{ - Metrics: []Metric{ - {Metric: "os.cpu.3.idle", Alias: ""}, - {Metric: "os.cpu.2.idle", Alias: ""}, - {Metric: "os.cpu.1.idle", Alias: "cpu"}, - }, - Hosts: []string{"staples-lab-1", "staples-lab-2"}, - Cluster: []string{"demoapp-1", "demoapp-2"}, - AddClusterToAlias: false, - AddHostToAlias: false, - FunctionList: []Function{ - {Func: "aggregate.min"}, - }, - TimeRange: &tsdb.TimeRange{Now: now, From: "5m", To: "now"}, - } - - expandeQueries, err := query.Build(availableMetrics) - So(err, ShouldBeNil) - So(len(expandeQueries), ShouldEqual, 3) - So(expandeQueries[0].RawQuery, ShouldEqual, fmt.Sprintf("`os.cpu.3.idle`|aggregate.min where cluster in ('demoapp-1', 'demoapp-2') and host in ('staples-lab-1', 'staples-lab-2') from %v to %v", from, to)) - So(expandeQueries[1].RawQuery, ShouldEqual, fmt.Sprintf("`os.cpu.2.idle`|aggregate.min where cluster in ('demoapp-1', 'demoapp-2') and host in ('staples-lab-1', 'staples-lab-2') from %v to %v", from, to)) - So(expandeQueries[2].RawQuery, ShouldEqual, fmt.Sprintf("`os.cpu.1.idle`|aggregate.min {cpu} where cluster in ('demoapp-1', 'demoapp-2') and host in ('staples-lab-1', 'staples-lab-2') from %v to %v", from, to)) - }) - - Convey("With two aggregate functions", func() { - query := &Query{ - Metrics: []Metric{ - {Metric: "os.cpu.3.idle", Alias: ""}, - }, - Hosts: []string{"staples-lab-1", "staples-lab-2"}, - Cluster: []string{"demoapp-1", "demoapp-2"}, - AddClusterToAlias: false, - AddHostToAlias: false, - FunctionList: []Function{ - {Func: "aggregate.min"}, - {Func: "aggregate.max"}, - }, - TimeRange: &tsdb.TimeRange{Now: now, From: "5m", To: "now"}, - } - - expandeQueries, err := query.Build(availableMetrics) - So(err, ShouldBeNil) - So(len(expandeQueries), ShouldEqual, 1) - So(expandeQueries[0].RawQuery, ShouldEqual, fmt.Sprintf("`os.cpu.3.idle`|aggregate.min|aggregate.max where cluster in ('demoapp-1', 'demoapp-2') and host in ('staples-lab-1', 'staples-lab-2') from %v to %v", from, to)) - }) - - Convey("Containing wildcard series", func() { - query := &Query{ - Metrics: []Metric{ - {Metric: "os.cpu*", Alias: ""}, - }, - Hosts: []string{"staples-lab-1"}, - AddClusterToAlias: false, - AddHostToAlias: false, - TimeRange: &tsdb.TimeRange{Now: now, From: "5m", To: "now"}, - } - - expandeQueries, err := query.Build(availableMetrics) - So(err, ShouldBeNil) - So(len(expandeQueries), ShouldEqual, 4) - - So(expandeQueries[0].RawQuery, ShouldEqual, fmt.Sprintf("`os.cpu.all.idle` where host in ('staples-lab-1') from %v to %v", from, to)) - So(expandeQueries[1].RawQuery, ShouldEqual, fmt.Sprintf("`os.cpu.1.idle` where host in ('staples-lab-1') from %v to %v", from, to)) - So(expandeQueries[2].RawQuery, ShouldEqual, fmt.Sprintf("`os.cpu.2.idle` where host in ('staples-lab-1') from %v to %v", from, to)) - So(expandeQueries[3].RawQuery, ShouldEqual, fmt.Sprintf("`os.cpu.3.idle` where host in ('staples-lab-1') from %v to %v", from, to)) - }) - }) -} diff --git a/pkg/tsdb/mysql/mysql.go b/pkg/tsdb/mysql/mysql.go index e75dab8c04f..19aa50096b1 100644 --- a/pkg/tsdb/mysql/mysql.go +++ b/pkg/tsdb/mysql/mysql.go @@ -21,9 +21,8 @@ import ( ) type MysqlExecutor struct { - datasource *models.DataSource - engine *xorm.Engine - log log.Logger + engine *xorm.Engine + log log.Logger } type engineCacheType struct { @@ -38,16 +37,15 @@ var engineCache = engineCacheType{ } func init() { - tsdb.RegisterExecutor("mysql", NewMysqlExecutor) + tsdb.RegisterTsdbQueryEndpoint("mysql", NewMysqlExecutor) } -func NewMysqlExecutor(datasource *models.DataSource) (tsdb.Executor, error) { +func NewMysqlExecutor(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { executor := &MysqlExecutor{ - datasource: datasource, - log: log.New("tsdb.mysql"), + log: log.New("tsdb.mysql"), } - err := executor.initEngine() + err := executor.initEngine(datasource) if err != nil { return nil, err } @@ -55,18 +53,24 @@ func NewMysqlExecutor(datasource *models.DataSource) (tsdb.Executor, error) { return executor, nil } -func (e *MysqlExecutor) initEngine() error { +func (e *MysqlExecutor) initEngine(dsInfo *models.DataSource) error { engineCache.Lock() defer engineCache.Unlock() - if engine, present := engineCache.cache[e.datasource.Id]; present { - if version, _ := engineCache.versions[e.datasource.Id]; version == e.datasource.Version { + if engine, present := engineCache.cache[dsInfo.Id]; present { + if version, _ := engineCache.versions[dsInfo.Id]; version == dsInfo.Version { e.engine = engine return nil } } - cnnstr := fmt.Sprintf("%s:%s@%s(%s)/%s?collation=utf8mb4_unicode_ci&parseTime=true&loc=UTC", e.datasource.User, e.datasource.Password, "tcp", e.datasource.Url, e.datasource.Database) + cnnstr := fmt.Sprintf("%s:%s@%s(%s)/%s?collation=utf8mb4_unicode_ci&parseTime=true&loc=UTC", + dsInfo.User, + dsInfo.Password, + "tcp", + dsInfo.Url, + dsInfo.Database) + e.log.Debug("getEngine", "connection", cnnstr) engine, err := xorm.NewEngine("mysql", cnnstr) @@ -76,29 +80,29 @@ func (e *MysqlExecutor) initEngine() error { return err } - engineCache.cache[e.datasource.Id] = engine + engineCache.cache[dsInfo.Id] = engine e.engine = engine return nil } -func (e *MysqlExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, context *tsdb.QueryContext) *tsdb.BatchResult { - result := &tsdb.BatchResult{ - QueryResults: make(map[string]*tsdb.QueryResult), +func (e *MysqlExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { + result := &tsdb.Response{ + Results: make(map[string]*tsdb.QueryResult), } - macroEngine := NewMysqlMacroEngine(context.TimeRange) + macroEngine := NewMysqlMacroEngine(tsdbQuery.TimeRange) session := e.engine.NewSession() defer session.Close() db := session.DB() - for _, query := range queries { + for _, query := range tsdbQuery.Queries { rawSql := query.Model.Get("rawSql").MustString() if rawSql == "" { continue } queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefId} - result.QueryResults[query.RefId] = queryResult + result.Results[query.RefId] = queryResult rawSql, err := macroEngine.Interpolate(rawSql) if err != nil { @@ -134,7 +138,7 @@ func (e *MysqlExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, co } } - return result + return result, nil } func (e MysqlExecutor) TransformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult) error { @@ -272,8 +276,6 @@ func (e MysqlExecutor) TransformToTimeSeries(query *tsdb.Query, rows *core.Rows, rowData.metric = "Unknown" } - //e.log.Debug("Rows", "metric", rowData.metric, "time", rowData.time, "value", rowData.value) - if !rowData.time.Valid { return fmt.Errorf("Found row with no time value") } diff --git a/pkg/tsdb/opentsdb/opentsdb.go b/pkg/tsdb/opentsdb/opentsdb.go index 987f341c7dc..29daa0c3bb4 100644 --- a/pkg/tsdb/opentsdb/opentsdb.go +++ b/pkg/tsdb/opentsdb/opentsdb.go @@ -22,20 +22,22 @@ import ( ) type OpenTsdbExecutor struct { - *models.DataSource - httpClient *http.Client + //*models.DataSource + //httpClient *http.Client } -func NewOpenTsdbExecutor(datasource *models.DataSource) (tsdb.Executor, error) { - httpClient, err := datasource.GetHttpClient() +func NewOpenTsdbExecutor(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { + /* + httpClient, err := datasource.GetHttpClient() - if err != nil { - return nil, err - } + if err != nil { + return nil, err + } + */ return &OpenTsdbExecutor{ - DataSource: datasource, - httpClient: httpClient, + //DataSource: datasource, + //httpClient: httpClient, }, nil } @@ -45,18 +47,18 @@ var ( func init() { plog = log.New("tsdb.opentsdb") - tsdb.RegisterExecutor("opentsdb", NewOpenTsdbExecutor) + tsdb.RegisterTsdbQueryEndpoint("opentsdb", NewOpenTsdbExecutor) } -func (e *OpenTsdbExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, queryContext *tsdb.QueryContext) *tsdb.BatchResult { - result := &tsdb.BatchResult{} +func (e *OpenTsdbExecutor) Query(ctx context.Context, dsInfo *models.DataSource, queryContext *tsdb.TsdbQuery) (*tsdb.Response, error) { + result := &tsdb.Response{} var tsdbQuery OpenTsdbQuery tsdbQuery.Start = queryContext.TimeRange.GetFromAsMsEpoch() tsdbQuery.End = queryContext.TimeRange.GetToAsMsEpoch() - for _, query := range queries { + for _, query := range queryContext.Queries { metric := e.buildMetric(query) tsdbQuery.Queries = append(tsdbQuery.Queries, metric) } @@ -65,29 +67,32 @@ func (e *OpenTsdbExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, plog.Debug("OpenTsdb request", "params", tsdbQuery) } - req, err := e.createRequest(tsdbQuery) + req, err := e.createRequest(dsInfo, tsdbQuery) if err != nil { - result.Error = err - return result + return nil, err } - res, err := ctxhttp.Do(ctx, e.httpClient, req) + httpClient, err := dsInfo.GetHttpClient() if err != nil { - result.Error = err - return result + return nil, err + } + + res, err := ctxhttp.Do(ctx, httpClient, req) + if err != nil { + return nil, err } queryResult, err := e.parseResponse(tsdbQuery, res) if err != nil { - return result.WithError(err) + return nil, err } - result.QueryResults = queryResult - return result + result.Results = queryResult + return result, nil } -func (e *OpenTsdbExecutor) createRequest(data OpenTsdbQuery) (*http.Request, error) { - u, _ := url.Parse(e.Url) +func (e *OpenTsdbExecutor) createRequest(dsInfo *models.DataSource, data OpenTsdbQuery) (*http.Request, error) { + u, _ := url.Parse(dsInfo.Url) u.Path = path.Join(u.Path, "api/query") postData, err := json.Marshal(data) @@ -99,8 +104,8 @@ func (e *OpenTsdbExecutor) createRequest(data OpenTsdbQuery) (*http.Request, err } req.Header.Set("Content-Type", "application/json") - if e.BasicAuth { - req.SetBasicAuth(e.BasicAuthUser, e.BasicAuthPassword) + if dsInfo.BasicAuth { + req.SetBasicAuth(dsInfo.BasicAuthUser, dsInfo.BasicAuthPassword) } return req, err diff --git a/pkg/tsdb/prometheus/prometheus.go b/pkg/tsdb/prometheus/prometheus.go index fad3ca533ac..33219837281 100644 --- a/pkg/tsdb/prometheus/prometheus.go +++ b/pkg/tsdb/prometheus/prometheus.go @@ -7,18 +7,20 @@ import ( "strings" "time" + "github.com/opentracing/opentracing-go" + "net/http" "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/tsdb" - "github.com/prometheus/client_golang/api/prometheus" - pmodel "github.com/prometheus/common/model" + api "github.com/prometheus/client_golang/api" + apiv1 "github.com/prometheus/client_golang/api/prometheus/v1" + "github.com/prometheus/common/model" ) type PrometheusExecutor struct { - *models.DataSource Transport *http.Transport } @@ -34,15 +36,14 @@ func (bat basicAuthTransport) RoundTrip(req *http.Request) (*http.Response, erro return bat.Transport.RoundTrip(req) } -func NewPrometheusExecutor(dsInfo *models.DataSource) (tsdb.Executor, error) { +func NewPrometheusExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { transport, err := dsInfo.GetHttpTransport() if err != nil { return nil, err } return &PrometheusExecutor{ - DataSource: dsInfo, - Transport: transport, + Transport: transport, }, nil } @@ -53,66 +54,72 @@ var ( func init() { plog = log.New("tsdb.prometheus") - tsdb.RegisterExecutor("prometheus", NewPrometheusExecutor) + tsdb.RegisterTsdbQueryEndpoint("prometheus", NewPrometheusExecutor) legendFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`) } -func (e *PrometheusExecutor) getClient() (prometheus.QueryAPI, error) { - cfg := prometheus.Config{ - Address: e.DataSource.Url, - Transport: e.Transport, +func (e *PrometheusExecutor) getClient(dsInfo *models.DataSource) (apiv1.API, error) { + cfg := api.Config{ + Address: dsInfo.Url, + RoundTripper: e.Transport, } - if e.BasicAuth { - cfg.Transport = basicAuthTransport{ + if dsInfo.BasicAuth { + cfg.RoundTripper = basicAuthTransport{ Transport: e.Transport, - username: e.BasicAuthUser, - password: e.BasicAuthPassword, + username: dsInfo.BasicAuthUser, + password: dsInfo.BasicAuthPassword, } } - client, err := prometheus.New(cfg) + client, err := api.NewClient(cfg) if err != nil { return nil, err } - return prometheus.NewQueryAPI(client), nil + return apiv1.NewAPI(client), nil } -func (e *PrometheusExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, queryContext *tsdb.QueryContext) *tsdb.BatchResult { - result := &tsdb.BatchResult{} +func (e *PrometheusExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { + result := &tsdb.Response{} - client, err := e.getClient() + client, err := e.getClient(dsInfo) if err != nil { - return result.WithError(err) + return nil, err } - query, err := parseQuery(queries, queryContext) + query, err := parseQuery(tsdbQuery.Queries, tsdbQuery) if err != nil { - return result.WithError(err) + return nil, err } - timeRange := prometheus.Range{ + timeRange := apiv1.Range{ Start: query.Start, End: query.End, Step: query.Step, } + span, ctx := opentracing.StartSpanFromContext(ctx, "alerting.prometheus") + span.SetTag("expr", query.Expr) + span.SetTag("start_unixnano", int64(query.Start.UnixNano())) + span.SetTag("stop_unixnano", int64(query.End.UnixNano())) + defer span.Finish() + value, err := client.QueryRange(ctx, query.Expr, timeRange) if err != nil { - return result.WithError(err) + return nil, err } queryResult, err := parseResponse(value, query) if err != nil { - return result.WithError(err) + return nil, err } - result.QueryResults = queryResult - return result + result.Results = queryResult + return result, nil } -func formatLegend(metric pmodel.Metric, query *PrometheusQuery) string { +func formatLegend(metric model.Metric, query *PrometheusQuery) string { if query.LegendFormat == "" { return metric.String() } @@ -121,7 +128,7 @@ func formatLegend(metric pmodel.Metric, query *PrometheusQuery) string { labelName := strings.Replace(string(in), "{{", "", 1) labelName = strings.Replace(labelName, "}}", "", 1) labelName = strings.TrimSpace(labelName) - if val, exists := metric[pmodel.LabelName(labelName)]; exists { + if val, exists := metric[model.LabelName(labelName)]; exists { return []byte(val) } @@ -131,7 +138,7 @@ func formatLegend(metric pmodel.Metric, query *PrometheusQuery) string { return string(result) } -func parseQuery(queries tsdb.QuerySlice, queryContext *tsdb.QueryContext) (*PrometheusQuery, error) { +func parseQuery(queries []*tsdb.Query, queryContext *tsdb.TsdbQuery) (*PrometheusQuery, error) { queryModel := queries[0] expr, err := queryModel.Model.Get("expr").String() @@ -165,11 +172,11 @@ func parseQuery(queries tsdb.QuerySlice, queryContext *tsdb.QueryContext) (*Prom }, nil } -func parseResponse(value pmodel.Value, query *PrometheusQuery) (map[string]*tsdb.QueryResult, error) { +func parseResponse(value model.Value, query *PrometheusQuery) (map[string]*tsdb.QueryResult, error) { queryResults := make(map[string]*tsdb.QueryResult) queryRes := tsdb.NewQueryResult() - data, ok := value.(pmodel.Matrix) + data, ok := value.(model.Matrix) if !ok { return queryResults, fmt.Errorf("Unsupported result format: %s", value.Type().String()) } diff --git a/pkg/tsdb/query_context.go b/pkg/tsdb/query_context.go deleted file mode 100644 index db40ba6253c..00000000000 --- a/pkg/tsdb/query_context.go +++ /dev/null @@ -1,21 +0,0 @@ -package tsdb - -import "sync" - -type QueryContext struct { - TimeRange *TimeRange - Queries QuerySlice - Results map[string]*QueryResult - ResultsChan chan *BatchResult - Lock sync.RWMutex - BatchWaits sync.WaitGroup -} - -func NewQueryContext(queries QuerySlice, timeRange *TimeRange) *QueryContext { - return &QueryContext{ - TimeRange: timeRange, - Queries: queries, - ResultsChan: make(chan *BatchResult), - Results: make(map[string]*QueryResult), - } -} diff --git a/pkg/tsdb/query_endpoint.go b/pkg/tsdb/query_endpoint.go new file mode 100644 index 00000000000..4e4f1c9ec9e --- /dev/null +++ b/pkg/tsdb/query_endpoint.go @@ -0,0 +1,36 @@ +package tsdb + +import ( + "context" + "fmt" + + "github.com/grafana/grafana/pkg/models" +) + +type TsdbQueryEndpoint interface { + Query(ctx context.Context, ds *models.DataSource, query *TsdbQuery) (*Response, error) +} + +var registry map[string]GetTsdbQueryEndpointFn + +type GetTsdbQueryEndpointFn func(dsInfo *models.DataSource) (TsdbQueryEndpoint, error) + +func init() { + registry = make(map[string]GetTsdbQueryEndpointFn) +} + +func getTsdbQueryEndpointFor(dsInfo *models.DataSource) (TsdbQueryEndpoint, error) { + if fn, exists := registry[dsInfo.Type]; exists { + executor, err := fn(dsInfo) + if err != nil { + return nil, err + } + + return executor, nil + } + return nil, fmt.Errorf("Could not find executor for data source type: %s", dsInfo.Type) +} + +func RegisterTsdbQueryEndpoint(pluginId string, fn GetTsdbQueryEndpointFn) { + registry[pluginId] = fn +} diff --git a/pkg/tsdb/request.go b/pkg/tsdb/request.go index ed10a023e9c..162116fb2fa 100644 --- a/pkg/tsdb/request.go +++ b/pkg/tsdb/request.go @@ -2,62 +2,17 @@ package tsdb import ( "context" + + "github.com/grafana/grafana/pkg/models" ) -type HandleRequestFunc func(ctx context.Context, req *Request) (*Response, error) +type HandleRequestFunc func(ctx context.Context, dsInfo *models.DataSource, req *TsdbQuery) (*Response, error) -func HandleRequest(ctx context.Context, req *Request) (*Response, error) { - context := NewQueryContext(req.Queries, req.TimeRange) - - batches, err := getBatches(req) +func HandleRequest(ctx context.Context, dsInfo *models.DataSource, req *TsdbQuery) (*Response, error) { + endpoint, err := getTsdbQueryEndpointFor(dsInfo) if err != nil { return nil, err } - currentlyExecuting := 0 - - for _, batch := range batches { - if len(batch.Depends) == 0 { - currentlyExecuting += 1 - batch.Started = true - go batch.process(ctx, context) - } - } - - response := &Response{} - - for currentlyExecuting != 0 { - select { - case batchResult := <-context.ResultsChan: - currentlyExecuting -= 1 - - response.BatchTimings = append(response.BatchTimings, batchResult.Timings) - - if batchResult.Error != nil { - return nil, batchResult.Error - } - - for refId, result := range batchResult.QueryResults { - context.Results[refId] = result - } - - for _, batch := range batches { - // not interested in started batches - if batch.Started { - continue - } - - if batch.allDependenciesAreIn(context) { - currentlyExecuting += 1 - batch.Started = true - go batch.process(ctx, context) - } - } - case <-ctx.Done(): - return nil, ctx.Err() - } - } - - response.Results = context.Results - return response, nil + return endpoint.Query(ctx, dsInfo, req) } diff --git a/pkg/tsdb/testdata/scenarios.go b/pkg/tsdb/testdata/scenarios.go index 0c59973cd7b..0a7f1467933 100644 --- a/pkg/tsdb/testdata/scenarios.go +++ b/pkg/tsdb/testdata/scenarios.go @@ -11,7 +11,7 @@ import ( "github.com/grafana/grafana/pkg/tsdb" ) -type ScenarioHandler func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult +type ScenarioHandler func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult type Scenario struct { Id string `json:"id"` @@ -29,13 +29,74 @@ func init() { logger.Debug("Initializing TestData Scenario") + registerScenario(&Scenario{ + Id: "exponential_heatmap_bucket_data", + Name: "Exponential heatmap bucket data", + + Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult { + to := context.TimeRange.GetToAsMsEpoch() + + var series []*tsdb.TimeSeries + start := 1 + factor := 2 + for i := 0; i < 10; i++ { + timeWalkerMs := context.TimeRange.GetFromAsMsEpoch() + serie := &tsdb.TimeSeries{Name: strconv.Itoa(start)} + start *= factor + + points := make(tsdb.TimeSeriesPoints, 0) + for j := int64(0); j < 100 && timeWalkerMs < to; j++ { + v := float64(rand.Int63n(100)) + points = append(points, tsdb.NewTimePoint(null.FloatFrom(v), float64(timeWalkerMs))) + timeWalkerMs += query.IntervalMs * 50 + } + + serie.Points = points + series = append(series, serie) + } + + queryRes := tsdb.NewQueryResult() + queryRes.Series = append(queryRes.Series, series...) + return queryRes + }, + }) + + registerScenario(&Scenario{ + Id: "linear_heatmap_bucket_data", + Name: "Linear heatmap bucket data", + + Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult { + to := context.TimeRange.GetToAsMsEpoch() + + var series []*tsdb.TimeSeries + for i := 0; i < 10; i++ { + timeWalkerMs := context.TimeRange.GetFromAsMsEpoch() + serie := &tsdb.TimeSeries{Name: strconv.Itoa(i * 10)} + + points := make(tsdb.TimeSeriesPoints, 0) + for j := int64(0); j < 100 && timeWalkerMs < to; j++ { + v := float64(rand.Int63n(100)) + points = append(points, tsdb.NewTimePoint(null.FloatFrom(v), float64(timeWalkerMs))) + timeWalkerMs += query.IntervalMs * 50 + } + + serie.Points = points + series = append(series, serie) + } + + queryRes := tsdb.NewQueryResult() + queryRes.Series = append(queryRes.Series, series...) + return queryRes + }, + }) + registerScenario(&Scenario{ Id: "random_walk", Name: "Random Walk", - Handler: func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult { - timeWalkerMs := context.TimeRange.GetFromAsMsEpoch() - to := context.TimeRange.GetToAsMsEpoch() + Handler: func(query *tsdb.Query, tsdbQuery *tsdb.TsdbQuery) *tsdb.QueryResult { + timeWalkerMs := tsdbQuery.TimeRange.GetFromAsMsEpoch() + to := tsdbQuery.TimeRange.GetToAsMsEpoch() series := newSeriesForQuery(query) @@ -60,7 +121,7 @@ func init() { registerScenario(&Scenario{ Id: "no_data_points", Name: "No Data Points", - Handler: func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult { + Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult { return tsdb.NewQueryResult() }, }) @@ -68,7 +129,7 @@ func init() { registerScenario(&Scenario{ Id: "datapoints_outside_range", Name: "Datapoints Outside Range", - Handler: func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult { + Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult { queryRes := tsdb.NewQueryResult() series := newSeriesForQuery(query) @@ -85,7 +146,7 @@ func init() { Id: "csv_metric_values", Name: "CSV Metric Values", StringInput: "1,20,90,30,5,0", - Handler: func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult { + Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult { queryRes := tsdb.NewQueryResult() stringInput := query.Model.Get("stringInput").MustString() diff --git a/pkg/tsdb/testdata/testdata.go b/pkg/tsdb/testdata/testdata.go index 6aefd8686d8..a1ab250ad37 100644 --- a/pkg/tsdb/testdata/testdata.go +++ b/pkg/tsdb/testdata/testdata.go @@ -13,7 +13,7 @@ type TestDataExecutor struct { log log.Logger } -func NewTestDataExecutor(dsInfo *models.DataSource) (tsdb.Executor, error) { +func NewTestDataExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { return &TestDataExecutor{ DataSource: dsInfo, log: log.New("tsdb.testdata"), @@ -21,22 +21,22 @@ func NewTestDataExecutor(dsInfo *models.DataSource) (tsdb.Executor, error) { } func init() { - tsdb.RegisterExecutor("grafana-testdata-datasource", NewTestDataExecutor) + tsdb.RegisterTsdbQueryEndpoint("grafana-testdata-datasource", NewTestDataExecutor) } -func (e *TestDataExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, context *tsdb.QueryContext) *tsdb.BatchResult { - result := &tsdb.BatchResult{} - result.QueryResults = make(map[string]*tsdb.QueryResult) +func (e *TestDataExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { + result := &tsdb.Response{} + result.Results = make(map[string]*tsdb.QueryResult) - for _, query := range queries { + for _, query := range tsdbQuery.Queries { scenarioId := query.Model.Get("scenarioId").MustString("random_walk") if scenario, exist := ScenarioRegistry[scenarioId]; exist { - result.QueryResults[query.RefId] = scenario.Handler(query, context) - result.QueryResults[query.RefId].RefId = query.RefId + result.Results[query.RefId] = scenario.Handler(query, tsdbQuery) + result.Results[query.RefId].RefId = query.RefId } else { e.log.Error("Scenario not found", "scenarioId", scenarioId) } } - return result + return result, nil } diff --git a/pkg/tsdb/tsdb_test.go b/pkg/tsdb/tsdb_test.go index 2b1a2372cd6..3ce85cd8c7f 100644 --- a/pkg/tsdb/tsdb_test.go +++ b/pkg/tsdb/tsdb_test.go @@ -3,60 +3,15 @@ package tsdb import ( "context" "testing" - "time" "github.com/grafana/grafana/pkg/models" . "github.com/smartystreets/goconvey/convey" ) func TestMetricQuery(t *testing.T) { - - Convey("When batches groups for query", t, func() { - - Convey("Given 3 queries for 2 data sources", func() { - request := &Request{ - Queries: QuerySlice{ - {RefId: "A", DataSource: &models.DataSource{Id: 1}}, - {RefId: "B", DataSource: &models.DataSource{Id: 1}}, - {RefId: "C", DataSource: &models.DataSource{Id: 2}}, - }, - } - - batches, err := getBatches(request) - So(err, ShouldBeNil) - - Convey("Should group into two batches", func() { - So(len(batches), ShouldEqual, 2) - }) - }) - - Convey("Given query 2 depends on query 1", func() { - request := &Request{ - Queries: QuerySlice{ - {RefId: "A", DataSource: &models.DataSource{Id: 1}}, - {RefId: "B", DataSource: &models.DataSource{Id: 2}}, - {RefId: "C", DataSource: &models.DataSource{Id: 3}, Depends: []string{"A", "B"}}, - }, - } - - batches, err := getBatches(request) - So(err, ShouldBeNil) - - Convey("Should return three batch groups", func() { - So(len(batches), ShouldEqual, 3) - }) - - Convey("Group 3 should have group 1 and 2 as dependencies", func() { - So(batches[2].Depends["A"], ShouldEqual, true) - So(batches[2].Depends["B"], ShouldEqual, true) - }) - - }) - }) - Convey("When executing request with one query", t, func() { - req := &Request{ - Queries: QuerySlice{ + req := &TsdbQuery{ + Queries: []*Query{ {RefId: "A", DataSource: &models.DataSource{Id: 1, Type: "test"}}, }, } @@ -64,7 +19,7 @@ func TestMetricQuery(t *testing.T) { fakeExecutor := registerFakeExecutor() fakeExecutor.Return("A", TimeSeriesSlice{&TimeSeries{Name: "argh"}}) - res, err := HandleRequest(context.TODO(), req) + res, err := HandleRequest(context.TODO(), &models.DataSource{Id: 1, Type: "test"}, req) So(err, ShouldBeNil) Convey("Should return query results", func() { @@ -74,8 +29,8 @@ func TestMetricQuery(t *testing.T) { }) Convey("When executing one request with two queries from same data source", t, func() { - req := &Request{ - Queries: QuerySlice{ + req := &TsdbQuery{ + Queries: []*Query{ {RefId: "A", DataSource: &models.DataSource{Id: 1, Type: "test"}}, {RefId: "B", DataSource: &models.DataSource{Id: 1, Type: "test"}}, }, @@ -85,91 +40,30 @@ func TestMetricQuery(t *testing.T) { fakeExecutor.Return("A", TimeSeriesSlice{&TimeSeries{Name: "argh"}}) fakeExecutor.Return("B", TimeSeriesSlice{&TimeSeries{Name: "barg"}}) - res, err := HandleRequest(context.TODO(), req) + res, err := HandleRequest(context.TODO(), &models.DataSource{Id: 1, Type: "test"}, req) So(err, ShouldBeNil) Convey("Should return query results", func() { So(len(res.Results), ShouldEqual, 2) So(res.Results["B"].Series[0].Name, ShouldEqual, "barg") }) - - Convey("Should have been batched in one request", func() { - So(len(res.BatchTimings), ShouldEqual, 1) - }) - - }) - - Convey("When executing one request with three queries from different datasources", t, func() { - req := &Request{ - Queries: QuerySlice{ - {RefId: "A", DataSource: &models.DataSource{Id: 1, Type: "test"}}, - {RefId: "B", DataSource: &models.DataSource{Id: 1, Type: "test"}}, - {RefId: "C", DataSource: &models.DataSource{Id: 2, Type: "test"}}, - }, - } - - res, err := HandleRequest(context.TODO(), req) - So(err, ShouldBeNil) - - Convey("Should have been batched in two requests", func() { - So(len(res.BatchTimings), ShouldEqual, 2) - }) }) Convey("When query uses data source of unknown type", t, func() { - req := &Request{ - Queries: QuerySlice{ + req := &TsdbQuery{ + Queries: []*Query{ {RefId: "A", DataSource: &models.DataSource{Id: 1, Type: "asdasdas"}}, }, } - _, err := HandleRequest(context.TODO(), req) + _, err := HandleRequest(context.TODO(), &models.DataSource{Id: 12, Type: "testjughjgjg"}, req) So(err, ShouldNotBeNil) }) - - Convey("When executing request that depend on other query", t, func() { - req := &Request{ - Queries: QuerySlice{ - { - RefId: "A", DataSource: &models.DataSource{Id: 1, Type: "test"}, - }, - { - RefId: "B", DataSource: &models.DataSource{Id: 2, Type: "test"}, Depends: []string{"A"}, - }, - }, - } - - fakeExecutor := registerFakeExecutor() - fakeExecutor.HandleQuery("A", func(c *QueryContext) *QueryResult { - time.Sleep(10 * time.Millisecond) - return &QueryResult{ - Series: TimeSeriesSlice{ - &TimeSeries{Name: "Ares"}, - }} - }) - fakeExecutor.HandleQuery("B", func(c *QueryContext) *QueryResult { - return &QueryResult{ - Series: TimeSeriesSlice{ - &TimeSeries{Name: "Bres+" + c.Results["A"].Series[0].Name}, - }} - }) - - res, err := HandleRequest(context.TODO(), req) - So(err, ShouldBeNil) - - Convey("Should have been batched in two requests", func() { - So(len(res.BatchTimings), ShouldEqual, 2) - }) - - Convey("Query B should have access to Query A results", func() { - So(res.Results["B"].Series[0].Name, ShouldEqual, "Bres+Ares") - }) - }) } func registerFakeExecutor() *FakeExecutor { executor, _ := NewFakeExecutor(nil) - RegisterExecutor("test", func(dsInfo *models.DataSource) (Executor, error) { + RegisterTsdbQueryEndpoint("test", func(dsInfo *models.DataSource) (TsdbQueryEndpoint, error) { return executor, nil }) diff --git a/public/app/app.ts b/public/app/app.ts index 3603e793d5c..059c7a6b3f0 100644 --- a/public/app/app.ts +++ b/public/app/app.ts @@ -9,6 +9,9 @@ import 'angular-sanitize'; import 'angular-dragdrop'; import 'angular-bindonce'; import 'angular-ui'; +import 'react'; +import 'react-dom'; +import 'ngreact'; import $ from 'jquery'; import angular from 'angular'; @@ -84,6 +87,7 @@ export class GrafanaApp { 'pasvaz.bindonce', 'ui.bootstrap', 'ui.bootstrap.tpls', + 'react' ]; var module_types = ['controllers', 'directives', 'factories', 'services', 'filters', 'routes']; diff --git a/public/app/core/components/PasswordStrength.tsx b/public/app/core/components/PasswordStrength.tsx new file mode 100644 index 00000000000..e5e30f5d721 --- /dev/null +++ b/public/app/core/components/PasswordStrength.tsx @@ -0,0 +1,38 @@ +import * as React from 'react'; +import coreModule from '../core_module'; + +export interface IProps { + password: string; +} + +export class PasswordStrength extends React.Component { + + constructor(props) { + super(props); + } + + render() { + let strengthText = "strength: strong like a bull."; + let strengthClass = "password-strength-good"; + + if (this.props.password.length < 4) { + strengthText = "strength: weak sauce."; + strengthClass = "password-strength-bad"; + } + + if (this.props.password.length <= 8) { + strengthText = "strength: you can do better."; + strengthClass = "password-strength-ok"; + } + + return ( +
+ {strengthText} +
+ ); + } +} + +coreModule.directive('passwordStrength', function(reactDirective) { + return reactDirective(PasswordStrength, ['password']); +}); diff --git a/public/app/core/components/code_editor/code_editor.ts b/public/app/core/components/code_editor/code_editor.ts index 7c41eb3b3a2..61ebb17d9b3 100644 --- a/public/app/core/components/code_editor/code_editor.ts +++ b/public/app/core/components/code_editor/code_editor.ts @@ -40,11 +40,11 @@ const DEFAULT_MAX_LINES = 10; const DEFAULT_TAB_SIZE = 2; const DEFAULT_BEHAVIOURS = true; -const GRAFANA_MODULES = ['mode-prometheus', 'snippets-prometheus', 'theme-grafana-dark']; +const GRAFANA_MODULES = ['theme-grafana-dark']; const GRAFANA_MODULE_BASE = "public/app/core/components/code_editor/"; // Trick for loading additional modules -function setModuleUrl(moduleType, name) { +function setModuleUrl(moduleType, name, pluginBaseUrl = null) { let baseUrl = ACE_SRC_BASE; let aceModeName = `ace/${moduleType}/${name}`; let moduleName = `${moduleType}-${name}`; @@ -54,6 +54,10 @@ function setModuleUrl(moduleType, name) { baseUrl = GRAFANA_MODULE_BASE; } + if (pluginBaseUrl) { + baseUrl = pluginBaseUrl + '/'; + } + if (moduleType === 'snippets') { componentName = `${moduleType}/${name}.js`; } @@ -159,8 +163,8 @@ function link(scope, elem, attrs) { function setLangMode(lang) { let aceModeName = `ace/mode/${lang}`; - setModuleUrl("mode", lang); - setModuleUrl("snippets", lang); + setModuleUrl("mode", lang, scope.datasource.meta.baseUrl || null); + setModuleUrl("snippets", lang, scope.datasource.meta.baseUrl || null); editorSession.setMode(aceModeName); ace.config.loadModule("ace/ext/language_tools", (language_tools) => { @@ -210,6 +214,7 @@ export function codeEditorDirective() { template: editorTemplate, scope: { content: "=", + datasource: "=", codeEditorFocus: "<", onChange: "&", getCompleter: "&" diff --git a/public/app/core/components/collapse_box.ts b/public/app/core/components/collapse_box.ts deleted file mode 100644 index 7fc234cb583..00000000000 --- a/public/app/core/components/collapse_box.ts +++ /dev/null @@ -1,58 +0,0 @@ -/// - -import coreModule from 'app/core/core_module'; - -const template = ` - -`; - -export class CollapseBoxCtrl { - isOpen: boolean; - stateChanged: () => void; - - /** @ngInject **/ - constructor(private $timeout) { - this.isOpen = false; - } - - toggle() { - this.isOpen = !this.isOpen; - this.$timeout(() => { - this.stateChanged(); - }); - } -} - -export function collapseBox() { - return { - restrict: 'E', - template: template, - controller: CollapseBoxCtrl, - bindToController: true, - controllerAs: 'ctrl', - scope: { - "title": "@", - "isOpen": "=?", - "stateChanged": "&" - }, - transclude: { - 'actions': '?collapseBoxActions', - 'body': 'collapseBoxBody', - }, - link: function(scope, elem, attrs) { - } - }; -} - -coreModule.directive('collapseBox', collapseBox); diff --git a/public/app/core/components/colorpicker.ts b/public/app/core/components/colorpicker.ts index 1834a1eb8a1..f64903ad0b6 100644 --- a/public/app/core/components/colorpicker.ts +++ b/public/app/core/components/colorpicker.ts @@ -1,8 +1,5 @@ /// -import config from 'app/core/config'; -import _ from 'lodash'; -import $ from 'jquery'; import coreModule from 'app/core/core_module'; var template = ` @@ -37,7 +34,7 @@ export class ColorPickerCtrl { showAxisControls: boolean; /** @ngInject */ - constructor(private $scope, private $rootScope) { + constructor(private $scope, $rootScope) { this.colors = $rootScope.colors; this.autoClose = $scope.autoClose; this.series = $scope.series; diff --git a/public/app/core/components/dashboard_selector.ts b/public/app/core/components/dashboard_selector.ts index 4209ff3f8dc..f68e70a17c0 100644 --- a/public/app/core/components/dashboard_selector.ts +++ b/public/app/core/components/dashboard_selector.ts @@ -1,8 +1,5 @@ /// -import config from 'app/core/config'; -import _ from 'lodash'; -import $ from 'jquery'; import coreModule from 'app/core/core_module'; var template = ` diff --git a/public/app/core/components/form_dropdown/form_dropdown.ts b/public/app/core/components/form_dropdown/form_dropdown.ts index 6e3156df1fc..b1cbd9b6ced 100644 --- a/public/app/core/components/form_dropdown/form_dropdown.ts +++ b/public/app/core/components/form_dropdown/form_dropdown.ts @@ -1,6 +1,5 @@ /// -import config from 'app/core/config'; import _ from 'lodash'; import $ from 'jquery'; import coreModule from '../../core_module'; diff --git a/public/app/core/components/grafana_app.ts b/public/app/core/components/grafana_app.ts index ff5751f33dc..0eee3ae43fd 100644 --- a/public/app/core/components/grafana_app.ts +++ b/public/app/core/components/grafana_app.ts @@ -1,9 +1,7 @@ /// import config from 'app/core/config'; -import store from 'app/core/store'; import _ from 'lodash'; -import angular from 'angular'; import $ from 'jquery'; import coreModule from 'app/core/core_module'; diff --git a/public/app/core/components/help/help.ts b/public/app/core/components/help/help.ts index 943a21f3c5f..bf84b43b5f9 100644 --- a/public/app/core/components/help/help.ts +++ b/public/app/core/components/help/help.ts @@ -8,7 +8,7 @@ export class HelpCtrl { shortcuts: any; /** @ngInject */ - constructor(private $scope, $sce) { + constructor() { this.tabIndex = 0; this.shortcuts = { 'Global': [ diff --git a/public/app/core/components/info_popover.ts b/public/app/core/components/info_popover.ts index df90728157d..a6ea853b7bb 100644 --- a/public/app/core/components/info_popover.ts +++ b/public/app/core/components/info_popover.ts @@ -1,7 +1,6 @@ /// import _ from 'lodash'; -import $ from 'jquery'; import coreModule from 'app/core/core_module'; import Drop from 'tether-drop'; diff --git a/public/app/core/components/json_explorer/helpers.ts b/public/app/core/components/json_explorer/helpers.ts index 2f2b80e73ea..7c4429d7c76 100644 --- a/public/app/core/components/json_explorer/helpers.ts +++ b/public/app/core/components/json_explorer/helpers.ts @@ -60,7 +60,7 @@ export function getValuePreview (object: Object, value: string): string { if (type === 'string') { value = '"' + escapeString(value) + '"'; } - if (type === 'function'){ + if (type === 'function') { // Remove content of the function return object.toString() diff --git a/public/app/core/components/json_explorer/json_explorer.ts b/public/app/core/components/json_explorer/json_explorer.ts index c2810f23b54..a7079477abc 100644 --- a/public/app/core/components/json_explorer/json_explorer.ts +++ b/public/app/core/components/json_explorer/json_explorer.ts @@ -6,7 +6,6 @@ import { getObjectName, getType, getValuePreview, - getPreview, cssClass, createElement } from './helpers'; @@ -191,7 +190,7 @@ export class JsonExplorer { if (this.element) { if (this.isOpen) { this.appendChildren(this.config.animateOpen); - } else{ + } else { this.removeChildren(this.config.animateClose); } this.element.classList.toggle(cssClass('open')); diff --git a/public/app/core/components/layout_selector/layout_selector.ts b/public/app/core/components/layout_selector/layout_selector.ts index e9d90aa1f48..b2071723677 100644 --- a/public/app/core/components/layout_selector/layout_selector.ts +++ b/public/app/core/components/layout_selector/layout_selector.ts @@ -1,9 +1,6 @@ /// -import config from 'app/core/config'; import store from 'app/core/store'; -import _ from 'lodash'; -import $ from 'jquery'; import coreModule from 'app/core/core_module'; var template = ` diff --git a/public/app/core/components/navbar/navbar.html b/public/app/core/components/navbar/navbar.html index 085e2eef920..e160d3b3eed 100644 --- a/public/app/core/components/navbar/navbar.html +++ b/public/app/core/components/navbar/navbar.html @@ -1,5 +1,5 @@ diff --git a/public/app/features/dashboard/export/export_modal.ts b/public/app/features/dashboard/export/export_modal.ts index 6c76b7e824f..6b877ffd239 100644 --- a/public/app/features/dashboard/export/export_modal.ts +++ b/public/app/features/dashboard/export/export_modal.ts @@ -1,11 +1,7 @@ /// -import kbn from 'app/core/utils/kbn'; import angular from 'angular'; import coreModule from 'app/core/core_module'; -import appEvents from 'app/core/app_events'; -import config from 'app/core/config'; -import _ from 'lodash'; import {DashboardExporter} from './exporter'; @@ -15,7 +11,7 @@ export class DashExportCtrl { dismiss: () => void; /** @ngInject */ - constructor(private backendSrv, private dashboardSrv, datasourceSrv, private $scope) { + constructor(private dashboardSrv, datasourceSrv, private $scope) { this.exporter = new DashboardExporter(datasourceSrv); this.exporter.makeExportable(this.dashboardSrv.getCurrent()).then(dash => { diff --git a/public/app/features/dashboard/export/exporter.ts b/public/app/features/dashboard/export/exporter.ts index d76e782905b..9bbcd8ab3dc 100644 --- a/public/app/features/dashboard/export/exporter.ts +++ b/public/app/features/dashboard/export/exporter.ts @@ -1,9 +1,7 @@ /// import config from 'app/core/config'; -import angular from 'angular'; import _ from 'lodash'; - import {DynamicDashboardSrv} from '../dynamic_dashboard_srv'; export class DashboardExporter { @@ -40,7 +38,7 @@ export class DashboardExporter { var templateizeDatasourceUsage = obj => { // ignore data source properties that contain a variable if (obj.datasource && obj.datasource.indexOf('$') === 0) { - if (variableLookup[obj.datasource.substring(1)]){ + if (variableLookup[obj.datasource.substring(1)]) { return; } } diff --git a/public/app/features/dashboard/export_data/export_data_modal.ts b/public/app/features/dashboard/export_data/export_data_modal.ts index 559d03c5dca..f4bc5d3f2ea 100644 --- a/public/app/features/dashboard/export_data/export_data_modal.ts +++ b/public/app/features/dashboard/export_data/export_data_modal.ts @@ -10,8 +10,6 @@ export class ExportDataModalCtrl { asRows: Boolean = true; dateTimeFormat: String = 'YYYY-MM-DDTHH:mm:ssZ'; excel: false; - /** @ngInject */ - constructor(private $scope) { } export() { if (this.panel === 'table') { diff --git a/public/app/features/dashboard/history/history.ts b/public/app/features/dashboard/history/history.ts index f199d84ffb4..bddcf6bf8d6 100644 --- a/public/app/features/dashboard/history/history.ts +++ b/public/app/features/dashboard/history/history.ts @@ -26,14 +26,12 @@ export class HistoryListCtrl { isNewLatest: boolean; /** @ngInject */ - constructor(private $scope, - private $route, + constructor(private $route, private $rootScope, private $location, - private $window, - private $timeout, private $q, - private historySrv: HistorySrv) { + private historySrv: HistorySrv, + public $scope) { this.appending = false; this.diff = 'basic'; diff --git a/public/app/features/dashboard/import/dash_import.ts b/public/app/features/dashboard/import/dash_import.ts index ff71df82dd6..8f3de6adc60 100644 --- a/public/app/features/dashboard/import/dash_import.ts +++ b/public/app/features/dashboard/import/dash_import.ts @@ -1,8 +1,6 @@ /// -import kbn from 'app/core/utils/kbn'; import coreModule from 'app/core/core_module'; -import appEvents from 'app/core/app_events'; import config from 'app/core/config'; import _ from 'lodash'; @@ -19,7 +17,7 @@ export class DashImportCtrl { gnetInfo: any; /** @ngInject */ - constructor(private backendSrv, private $location, private $scope, private $routeParams) { + constructor(private backendSrv, private $location, private $scope, $routeParams) { this.step = 1; this.nameExists = false; diff --git a/public/app/features/dashboard/model.ts b/public/app/features/dashboard/model.ts index 01c805694a6..3eb3d25f4fe 100644 --- a/public/app/features/dashboard/model.ts +++ b/public/app/features/dashboard/model.ts @@ -1,6 +1,5 @@ /// -import config from 'app/core/config'; import angular from 'angular'; import moment from 'moment'; import _ from 'lodash'; @@ -238,6 +237,9 @@ export class DashboardModel { delete newPanel.repeatIteration; delete newPanel.repeatPanelId; delete newPanel.scopedVars; + if (newPanel.alert) { + delete newPanel.thresholds; + } delete newPanel.alert; row.addPanel(newPanel); diff --git a/public/app/features/dashboard/row/add_panel.ts b/public/app/features/dashboard/row/add_panel.ts index 1ea0cc5159e..13269bdb8b4 100644 --- a/public/app/features/dashboard/row/add_panel.ts +++ b/public/app/features/dashboard/row/add_panel.ts @@ -3,7 +3,7 @@ import _ from 'lodash'; import config from 'app/core/config'; -import {coreModule, appEvents} from 'app/core/core'; +import {coreModule} from 'app/core/core'; export class AddPanelCtrl { row: any; @@ -15,7 +15,7 @@ export class AddPanelCtrl { panelSearch: any; /** @ngInject */ - constructor(private $scope, private $timeout, private $rootScope) { + constructor(private $timeout, private $rootScope) { this.row = this.rowCtrl.row; this.dashboard = this.rowCtrl.dashboard; this.activeIndex = 0; diff --git a/public/app/features/dashboard/row/options.ts b/public/app/features/dashboard/row/options.ts index 8f19b734d8a..5de0186ac95 100644 --- a/public/app/features/dashboard/row/options.ts +++ b/public/app/features/dashboard/row/options.ts @@ -1,8 +1,5 @@ /// -import _ from 'lodash'; - -import config from 'app/core/config'; import {coreModule} from 'app/core/core'; // import VirtualScroll from 'virtual-scroll'; // console.log(VirtualScroll); @@ -14,7 +11,7 @@ export class RowOptionsCtrl { fontSizes = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6']; /** @ngInject */ - constructor(private $scope, private $timeout, private $rootScope) { + constructor() { this.row = this.rowCtrl.row; this.dashboard = this.rowCtrl.dashboard; this.row.titleSize = this.row.titleSize || 'h6'; diff --git a/public/app/features/dashboard/row/row_ctrl.ts b/public/app/features/dashboard/row/row_ctrl.ts index 34b03b3c3be..496335eaac5 100644 --- a/public/app/features/dashboard/row/row_ctrl.ts +++ b/public/app/features/dashboard/row/row_ctrl.ts @@ -3,7 +3,7 @@ import _ from 'lodash'; import config from 'app/core/config'; -import {coreModule, appEvents} from 'app/core/core'; +import {coreModule} from 'app/core/core'; import './options'; import './add_panel'; @@ -186,7 +186,6 @@ coreModule.directive('panelWidth', function($rootScope) { coreModule.directive('panelDropZone', function($timeout) { return function(scope, element) { var row = scope.ctrl.row; - var dashboard = scope.ctrl.dashboard; var indrag = false; var textEl = element.find('.panel-drop-zone-text'); diff --git a/public/app/features/dashboard/row/row_model.ts b/public/app/features/dashboard/row/row_model.ts index 037a7ad1a4a..ecdffb8c8a8 100644 --- a/public/app/features/dashboard/row/row_model.ts +++ b/public/app/features/dashboard/row/row_model.ts @@ -1,7 +1,7 @@ /// import _ from 'lodash'; -import {Emitter, contextSrv, appEvents, assignModelProperties} from 'app/core/core'; +import {Emitter, appEvents, assignModelProperties} from 'app/core/core'; export class DashboardRow { panels: any; diff --git a/public/app/features/dashboard/save_as_modal.ts b/public/app/features/dashboard/save_as_modal.ts index 3a0a877f1eb..7e0b754d559 100644 --- a/public/app/features/dashboard/save_as_modal.ts +++ b/public/app/features/dashboard/save_as_modal.ts @@ -36,7 +36,7 @@ export class SaveDashboardAsModalCtrl { dismiss: () => void; /** @ngInject */ - constructor(private $scope, private dashboardSrv) { + constructor(private dashboardSrv) { var dashboard = this.dashboardSrv.getCurrent(); this.clone = dashboard.getSaveModelClone(); this.clone.id = null; diff --git a/public/app/features/dashboard/save_modal.ts b/public/app/features/dashboard/save_modal.ts index 36bae222243..d9b5349fbf6 100644 --- a/public/app/features/dashboard/save_modal.ts +++ b/public/app/features/dashboard/save_modal.ts @@ -55,7 +55,7 @@ export class SaveDashboardModalCtrl { dismiss: () => void; /** @ngInject */ - constructor(private $scope, private dashboardSrv) { + constructor(private dashboardSrv) { this.message = ''; this.max = 64; } diff --git a/public/app/features/dashboard/specs/dashboard_model_specs.ts b/public/app/features/dashboard/specs/dashboard_model_specs.ts index 1c7415d342e..6ca84ba89f3 100644 --- a/public/app/features/dashboard/specs/dashboard_model_specs.ts +++ b/public/app/features/dashboard/specs/dashboard_model_specs.ts @@ -1,4 +1,4 @@ -import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common'; +import {describe, beforeEach, it, expect} from 'test/lib/common'; import _ from 'lodash'; import {DashboardModel} from '../model'; diff --git a/public/app/features/dashboard/specs/dashboard_srv_specs.ts b/public/app/features/dashboard/specs/dashboard_srv_specs.ts index 666a3bc0bf2..634f0e82f38 100644 --- a/public/app/features/dashboard/specs/dashboard_srv_specs.ts +++ b/public/app/features/dashboard/specs/dashboard_srv_specs.ts @@ -1,4 +1,4 @@ -import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common'; +import {describe, beforeEach} from 'test/lib/common'; import {DashboardSrv} from '../dashboard_srv'; diff --git a/public/app/features/dashboard/specs/dynamic_dashboard_srv_specs.ts b/public/app/features/dashboard/specs/dynamic_dashboard_srv_specs.ts index 93feddc0654..cb7ea3bed46 100644 --- a/public/app/features/dashboard/specs/dynamic_dashboard_srv_specs.ts +++ b/public/app/features/dashboard/specs/dynamic_dashboard_srv_specs.ts @@ -1,6 +1,6 @@ -import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common'; +import {describe, beforeEach, it, expect, angularMocks} from 'test/lib/common'; -import {DashboardSrv} from '../dashboard_srv'; +import '../dashboard_srv'; import {DynamicDashboardSrv} from '../dynamic_dashboard_srv'; function dynamicDashScenario(desc, func) { diff --git a/public/app/features/dashboard/specs/exporter_specs.ts b/public/app/features/dashboard/specs/exporter_specs.ts index 0aaadae2b63..9364cea8c47 100644 --- a/public/app/features/dashboard/specs/exporter_specs.ts +++ b/public/app/features/dashboard/specs/exporter_specs.ts @@ -1,4 +1,4 @@ -import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common'; +import {describe, beforeEach, it, sinon, expect} from 'test/lib/common'; import _ from 'lodash'; import config from 'app/core/config'; diff --git a/public/app/features/dashboard/specs/history_ctrl_specs.ts b/public/app/features/dashboard/specs/history_ctrl_specs.ts index 0bb09546a34..d6365470887 100644 --- a/public/app/features/dashboard/specs/history_ctrl_specs.ts +++ b/public/app/features/dashboard/specs/history_ctrl_specs.ts @@ -3,14 +3,14 @@ import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/co import _ from 'lodash'; import {HistoryListCtrl} from 'app/features/dashboard/history/history'; import { versions, compare, restore } from 'test/mocks/history-mocks'; -import config from 'app/core/config'; describe('HistoryListCtrl', function() { var RESTORE_ID = 4; var ctx: any = {}; var versionsResponse: any = versions(); - var restoreResponse: any = restore(7, RESTORE_ID); + + restore(7, RESTORE_ID); beforeEach(angularMocks.module('grafana.core')); beforeEach(angularMocks.module('grafana.services')); @@ -65,7 +65,7 @@ describe('HistoryListCtrl', function() { expect(ctx.ctrl.mode).to.be('list'); expect(ctx.ctrl.delta).to.eql({ basic: '', json: '' }); expect(ctx.ctrl.canCompare).to.be(false); - expect(_.find(ctx.ctrl.revisions, rev => rev.checked)).to.be.undefined; + expect(_.find(ctx.ctrl.revisions, rev => rev.checked)).to.be(undefined); }); it('should indicate loading has finished', function() { @@ -103,7 +103,7 @@ describe('HistoryListCtrl', function() { it('should reset the controller\'s state', function() { expect(ctx.ctrl.mode).to.be('list'); expect(ctx.ctrl.delta).to.eql({ basic: '', json: '' }); - expect(_.find(ctx.ctrl.revisions, rev => rev.checked)).to.be.undefined; + expect(_.find(ctx.ctrl.revisions, rev => rev.checked)).to.be(undefined); }); it('should indicate loading has finished', function() { diff --git a/public/app/features/dashboard/specs/history_srv_specs.ts b/public/app/features/dashboard/specs/history_srv_specs.ts index 4678759c438..e72a626467e 100644 --- a/public/app/features/dashboard/specs/history_srv_specs.ts +++ b/public/app/features/dashboard/specs/history_srv_specs.ts @@ -1,8 +1,8 @@ -import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common'; +import {describe, beforeEach, it, expect, angularMocks} from 'test/lib/common'; import helpers from 'test/specs/helpers'; -import HistorySrv from '../history/history_srv'; -import { versions, compare, restore } from 'test/mocks/history-mocks'; +import '../history/history_srv'; +import {versions, restore} from 'test/mocks/history-mocks'; describe('historySrv', function() { var ctx = new helpers.ServiceTestContext(); diff --git a/public/app/features/dashboard/specs/row_model_specs.ts b/public/app/features/dashboard/specs/row_model_specs.ts deleted file mode 100644 index d7f573b485a..00000000000 --- a/public/app/features/dashboard/specs/row_model_specs.ts +++ /dev/null @@ -1,10 +0,0 @@ -import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common'; - -import _ from 'lodash'; -import {DashboardRow} from '../row/row_model'; - -describe('DashboardRow', function() { - -}); - - diff --git a/public/app/features/dashboard/specs/time_srv_specs.ts b/public/app/features/dashboard/specs/time_srv_specs.ts index 3b008738430..3c0f2637f0e 100644 --- a/public/app/features/dashboard/specs/time_srv_specs.ts +++ b/public/app/features/dashboard/specs/time_srv_specs.ts @@ -1,8 +1,7 @@ -import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common'; +import {describe, beforeEach, it, expect, angularMocks} from 'test/lib/common'; import helpers from 'test/specs/helpers'; -import _ from 'lodash'; -import TimeSrv from '../time_srv'; +import '../time_srv'; import moment from 'moment'; describe('timeSrv', function() { diff --git a/public/app/features/dashboard/submenu/submenu.ts b/public/app/features/dashboard/submenu/submenu.ts index 69081f01cab..a18ca217f63 100644 --- a/public/app/features/dashboard/submenu/submenu.ts +++ b/public/app/features/dashboard/submenu/submenu.ts @@ -11,7 +11,6 @@ export class SubmenuCtrl { /** @ngInject */ constructor(private $rootScope, private variableSrv, - private templateSrv, private $location) { this.annotations = this.dashboard.templating.list; this.variables = this.variableSrv.variables; diff --git a/public/app/features/dashboard/time_srv.ts b/public/app/features/dashboard/time_srv.ts index abde4152b63..db6ba437e73 100644 --- a/public/app/features/dashboard/time_srv.ts +++ b/public/app/features/dashboard/time_srv.ts @@ -1,7 +1,5 @@ /// -import config from 'app/core/config'; -import angular from 'angular'; import moment from 'moment'; import _ from 'lodash'; import coreModule from 'app/core/core_module'; @@ -116,16 +114,14 @@ class TimeSrv { setAutoRefresh(interval) { this.dashboard.refresh = interval; + this.cancelNextRefresh(); if (interval) { var intervalMs = kbn.interval_to_ms(interval); - this.$timeout(() => { + this.refreshTimer = this.timer.register(this.$timeout(() => { this.startNextRefreshTimer(intervalMs); this.refreshDashboard(); - }, intervalMs); - - } else { - this.cancelNextRefresh(); + }, intervalMs)); } // update url diff --git a/public/app/features/dashboard/unsaved_changes_modal.ts b/public/app/features/dashboard/unsaved_changes_modal.ts index 27ff094ca47..cacfbe0f045 100644 --- a/public/app/features/dashboard/unsaved_changes_modal.ts +++ b/public/app/features/dashboard/unsaved_changes_modal.ts @@ -10,7 +10,7 @@ const template = ` Unsaved changes - + @@ -35,7 +35,7 @@ export class UnsavedChangesModalCtrl { dismiss: () => void; /** @ngInject */ - constructor(private $rootScope, private unsavedChangesSrv) { + constructor(private unsavedChangesSrv) { } discard() { diff --git a/public/app/features/dashboard/upload.ts b/public/app/features/dashboard/upload.ts index 57be7b8fd11..45a1f1b4a0c 100644 --- a/public/app/features/dashboard/upload.ts +++ b/public/app/features/dashboard/upload.ts @@ -1,6 +1,5 @@ /// -import kbn from 'app/core/utils/kbn'; import coreModule from 'app/core/core_module'; var template = ` diff --git a/public/app/features/org/org_users_ctrl.ts b/public/app/features/org/org_users_ctrl.ts index 1fa624e4a7e..5b77139ebe5 100644 --- a/public/app/features/org/org_users_ctrl.ts +++ b/public/app/features/org/org_users_ctrl.ts @@ -1,7 +1,6 @@ /// import config from 'app/core/config'; -import _ from 'lodash'; import coreModule from 'app/core/core_module'; import Remarkable from 'remarkable'; @@ -18,7 +17,7 @@ export class OrgUsersCtrl { addUsersBtnName: string; /** @ngInject */ - constructor(private $scope, private $http, private backendSrv, navModelSrv, $sce) { + constructor(private $scope, private backendSrv, navModelSrv, $sce) { this.user = { loginOrEmail: '', role: 'Viewer', diff --git a/public/app/features/org/prefs_control.ts b/public/app/features/org/prefs_control.ts index 3ea3ec95b5f..07b277680ad 100644 --- a/public/app/features/org/prefs_control.ts +++ b/public/app/features/org/prefs_control.ts @@ -1,7 +1,6 @@ /// import config from 'app/core/config'; -import _ from 'lodash'; import coreModule from 'app/core/core_module'; export class PrefsControlCtrl { diff --git a/public/app/features/org/profile_ctrl.ts b/public/app/features/org/profile_ctrl.ts index 02f20553e3f..84999abafad 100644 --- a/public/app/features/org/profile_ctrl.ts +++ b/public/app/features/org/profile_ctrl.ts @@ -2,7 +2,6 @@ import config from 'app/core/config'; import {coreModule} from 'app/core/core'; -import _ from 'lodash'; export class ProfileCtrl { user: any; diff --git a/public/app/features/panel/metrics_panel_ctrl.ts b/public/app/features/panel/metrics_panel_ctrl.ts index 0e94df437a8..df73efb9cdb 100644 --- a/public/app/features/panel/metrics_panel_ctrl.ts +++ b/public/app/features/panel/metrics_panel_ctrl.ts @@ -1,6 +1,5 @@ /// -import angular from 'angular'; import config from 'app/core/config'; import $ from 'jquery'; import _ from 'lodash'; @@ -10,7 +9,6 @@ import {PanelCtrl} from './panel_ctrl'; import * as rangeUtil from 'app/core/utils/rangeutil'; import * as dateMath from 'app/core/utils/datemath'; -import {Subject} from 'vendor/npm/rxjs/Subject'; import {metricsTabDirective} from './metrics_tab'; class MetricsPanelCtrl extends PanelCtrl { diff --git a/public/app/features/panel/metrics_tab.ts b/public/app/features/panel/metrics_tab.ts index 4feaf516438..026d6fce650 100644 --- a/public/app/features/panel/metrics_tab.ts +++ b/public/app/features/panel/metrics_tab.ts @@ -1,6 +1,5 @@ /// -import _ from 'lodash'; import {DashboardModel} from '../dashboard/model'; import Remarkable from 'remarkable'; @@ -9,7 +8,7 @@ export class MetricsTabCtrl { panel: any; panelCtrl: any; datasources: any[]; - current: any; + datasourceInstance: any; nextRefId: string; dashboard: DashboardModel; panelDsValue: any; @@ -22,30 +21,33 @@ export class MetricsTabCtrl { queryOptions: any; /** @ngInject */ - constructor($scope, private $sce, private datasourceSrv, private backendSrv, private $timeout) { + constructor($scope, private $sce, datasourceSrv, private backendSrv) { this.panelCtrl = $scope.ctrl; $scope.ctrl = this; this.panel = this.panelCtrl.panel; this.dashboard = this.panelCtrl.dashboard; this.datasources = datasourceSrv.getMetricSources(); - this.panelDsValue = this.panelCtrl.panel.datasource || null; + this.panelDsValue = this.panelCtrl.panel.datasource; for (let ds of this.datasources) { if (ds.value === this.panelDsValue) { - this.current = ds; + this.datasourceInstance = ds; } } this.addQueryDropdown = {text: 'Add Query', value: null, fake: true}; + // update next ref id this.panelCtrl.nextRefId = this.dashboard.getNextQueryLetter(this.panel); this.updateDatasourceOptions(); } updateDatasourceOptions() { - this.hasQueryHelp = this.current.meta.hasQueryHelp; - this.queryOptions = this.current.meta.queryOptions; + if (this.datasourceInstance) { + this.hasQueryHelp = this.datasourceInstance.meta.hasQueryHelp; + this.queryOptions = this.datasourceInstance.meta.queryOptions; + } } getOptions(includeBuiltin) { @@ -61,7 +63,7 @@ export class MetricsTabCtrl { return; } - this.current = option.datasource; + this.datasourceInstance = option.datasource; this.panelCtrl.setDatasource(option.datasource); this.updateDatasourceOptions(); } @@ -71,7 +73,6 @@ export class MetricsTabCtrl { return; } - var target: any = {isNew: true}; this.panelCtrl.addQuery({isNew: true, datasource: option.datasource.name}); this.addQueryDropdown = {text: 'Add Query', value: null, fake: true}; } @@ -85,7 +86,7 @@ export class MetricsTabCtrl { this.queryTroubleshooterOpen = false; this.helpOpen = !this.helpOpen; - this.backendSrv.get(`/api/plugins/${this.current.meta.id}/markdown/query_help`).then(res => { + this.backendSrv.get(`/api/plugins/${this.datasourceInstance.meta.id}/markdown/query_help`).then(res => { var md = new Remarkable(); this.helpHtml = this.$sce.trustAsHtml(md.render(res)); }); diff --git a/public/app/features/panel/panel_ctrl.ts b/public/app/features/panel/panel_ctrl.ts index b61f722974d..767f61d971a 100644 --- a/public/app/features/panel/panel_ctrl.ts +++ b/public/app/features/panel/panel_ctrl.ts @@ -2,7 +2,6 @@ import config from 'app/core/config'; import _ from 'lodash'; -import angular from 'angular'; import $ from 'jquery'; import {profiler} from 'app/core/profiler'; import Remarkable from 'remarkable'; @@ -213,7 +212,6 @@ export class PanelCtrl { } replacePanel(newPanel, oldPanel) { - var row = this.row; var index = _.indexOf(this.row.panels, oldPanel); this.row.panels.splice(index, 1); diff --git a/public/app/features/panel/panel_directive.ts b/public/app/features/panel/panel_directive.ts index 1b6805179c0..13a4c2be368 100644 --- a/public/app/features/panel/panel_directive.ts +++ b/public/app/features/panel/panel_directive.ts @@ -2,9 +2,7 @@ import angular from 'angular'; import $ from 'jquery'; -import _ from 'lodash'; import Drop from 'tether-drop'; -import {appEvents} from 'app/core/core'; var module = angular.module('grafana.directives'); diff --git a/public/app/features/panel/panel_editor_tab.ts b/public/app/features/panel/panel_editor_tab.ts index 9bbb979bcc0..e9851f5fe15 100644 --- a/public/app/features/panel/panel_editor_tab.ts +++ b/public/app/features/panel/panel_editor_tab.ts @@ -1,7 +1,6 @@ /// import angular from 'angular'; -import config from 'app/core/config'; var directiveModule = angular.module('grafana.directives'); diff --git a/public/app/features/panel/partials/metrics_tab.html b/public/app/features/panel/partials/metrics_tab.html index c3643152716..03a2c374a14 100644 --- a/public/app/features/panel/partials/metrics_tab.html +++ b/public/app/features/panel/partials/metrics_tab.html @@ -73,7 +73,7 @@ -
+
@@ -89,11 +89,11 @@ {{ctrl.panelCtrl.nextRefId}} - - diff --git a/public/app/partials/signup_step2.html b/public/app/partials/signup_step2.html index afb6d0d4fc4..b0e6c6ad88a 100644 --- a/public/app/partials/signup_step2.html +++ b/public/app/partials/signup_step2.html @@ -55,7 +55,7 @@
-
+
diff --git a/public/app/plugins/app/testdata/datasource/query_ctrl.ts b/public/app/plugins/app/testdata/datasource/query_ctrl.ts index 6b0ad93f26c..e783584eb5d 100644 --- a/public/app/plugins/app/testdata/datasource/query_ctrl.ts +++ b/public/app/plugins/app/testdata/datasource/query_ctrl.ts @@ -2,7 +2,6 @@ import _ from 'lodash'; -import {TestDataDatasource} from './datasource'; import {QueryCtrl} from 'app/plugins/sdk'; export class TestDataQueryCtrl extends QueryCtrl { diff --git a/public/app/plugins/datasource/cloudwatch/config_ctrl.ts b/public/app/plugins/datasource/cloudwatch/config_ctrl.ts index ee76b0d15fa..7f96b8060b6 100644 --- a/public/app/plugins/datasource/cloudwatch/config_ctrl.ts +++ b/public/app/plugins/datasource/cloudwatch/config_ctrl.ts @@ -1,8 +1,5 @@ /// -import angular from 'angular'; -import _ from 'lodash'; - export class CloudWatchConfigCtrl { static templateUrl = 'partials/config.html'; current: any; diff --git a/public/app/plugins/datasource/cloudwatch/datasource.js b/public/app/plugins/datasource/cloudwatch/datasource.js index 3807464e3d5..2ec89b854c6 100644 --- a/public/app/plugins/datasource/cloudwatch/datasource.js +++ b/public/app/plugins/datasource/cloudwatch/datasource.js @@ -386,8 +386,9 @@ function (angular, _, moment, dateMath, kbn, templatingVariable, CloudWatchAnnot }) .each(function(dp) { var timestamp = new Date(dp.Timestamp).getTime(); - if (lastTimestamp && (timestamp - lastTimestamp) > periodMs) { + while (lastTimestamp && (timestamp - lastTimestamp) > periodMs) { dps.push([null, lastTimestamp + periodMs]); + lastTimestamp = lastTimestamp + periodMs; } lastTimestamp = timestamp; if (!extended) { diff --git a/public/app/plugins/datasource/cloudwatch/query_ctrl.ts b/public/app/plugins/datasource/cloudwatch/query_ctrl.ts index 1a64d199603..f7f785f5261 100644 --- a/public/app/plugins/datasource/cloudwatch/query_ctrl.ts +++ b/public/app/plugins/datasource/cloudwatch/query_ctrl.ts @@ -1,7 +1,6 @@ /// import './query_parameter_ctrl'; -import _ from 'lodash'; import {QueryCtrl} from 'app/plugins/sdk'; export class CloudWatchQueryCtrl extends QueryCtrl { diff --git a/public/app/plugins/datasource/cloudwatch/specs/annotation_query_specs.ts b/public/app/plugins/datasource/cloudwatch/specs/annotation_query_specs.ts index e3a8fc3f9cb..d469ca8685b 100644 --- a/public/app/plugins/datasource/cloudwatch/specs/annotation_query_specs.ts +++ b/public/app/plugins/datasource/cloudwatch/specs/annotation_query_specs.ts @@ -1,5 +1,5 @@ import "../datasource"; -import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common'; +import {describe, beforeEach, it, expect, angularMocks} from 'test/lib/common'; import moment from 'moment'; import helpers from 'test/specs/helpers'; import {CloudWatchDatasource} from "../datasource"; diff --git a/public/app/plugins/datasource/cloudwatch/specs/datasource_specs.ts b/public/app/plugins/datasource/cloudwatch/specs/datasource_specs.ts index dbb2ffd6129..0130aee60d3 100644 --- a/public/app/plugins/datasource/cloudwatch/specs/datasource_specs.ts +++ b/public/app/plugins/datasource/cloudwatch/specs/datasource_specs.ts @@ -1,7 +1,6 @@ import "../datasource"; -import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common'; -import moment from 'moment'; +import {describe, beforeEach, it, expect, angularMocks} from 'test/lib/common'; import helpers from 'test/specs/helpers'; import {CloudWatchDatasource} from "../datasource"; diff --git a/public/app/plugins/datasource/elasticsearch/config_ctrl.ts b/public/app/plugins/datasource/elasticsearch/config_ctrl.ts index 5c5704eb22c..fdf941e81f7 100644 --- a/public/app/plugins/datasource/elasticsearch/config_ctrl.ts +++ b/public/app/plugins/datasource/elasticsearch/config_ctrl.ts @@ -1,6 +1,5 @@ /// -import angular from 'angular'; import _ from 'lodash'; export class ElasticConfigCtrl { diff --git a/public/app/plugins/datasource/elasticsearch/query_ctrl.ts b/public/app/plugins/datasource/elasticsearch/query_ctrl.ts index ef6f1c5cdc7..025410c5205 100644 --- a/public/app/plugins/datasource/elasticsearch/query_ctrl.ts +++ b/public/app/plugins/datasource/elasticsearch/query_ctrl.ts @@ -15,7 +15,7 @@ export class ElasticQueryCtrl extends QueryCtrl { rawQueryOld: string; /** @ngInject **/ - constructor($scope, $injector, private $rootScope, private $timeout, private uiSegmentSrv) { + constructor($scope, $injector, private $rootScope, private uiSegmentSrv) { super($scope, $injector); this.esVersion = this.datasource.esVersion; diff --git a/public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts b/public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts index 0838dc33654..4ea574064fe 100644 --- a/public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts +++ b/public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts @@ -1,5 +1,5 @@ import _ from 'lodash'; -import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common'; +import {describe, beforeEach, it, expect, angularMocks} from 'test/lib/common'; import moment from 'moment'; import angular from 'angular'; import helpers from 'test/specs/helpers'; @@ -7,7 +7,6 @@ import {ElasticDatasource} from "../datasource"; describe('ElasticDatasource', function() { var ctx = new helpers.ServiceTestContext(); - var instanceSettings: any = {jsonData: {}}; beforeEach(angularMocks.module('grafana.core')); beforeEach(angularMocks.module('grafana.services')); @@ -113,7 +112,7 @@ describe('ElasticDatasource', function() { }); describe('When getting fields', function() { - var requestOptions, parts, header; + var requestOptions; beforeEach(function() { createDatasource({url: 'http://es.com', index: 'metricbeat'}); diff --git a/public/app/plugins/datasource/elasticsearch/specs/index_pattern_specs.ts b/public/app/plugins/datasource/elasticsearch/specs/index_pattern_specs.ts index db000fb999d..3fee81083b7 100644 --- a/public/app/plugins/datasource/elasticsearch/specs/index_pattern_specs.ts +++ b/public/app/plugins/datasource/elasticsearch/specs/index_pattern_specs.ts @@ -1,6 +1,6 @@ /// -import {describe, beforeEach, it, sinon, expect} from 'test/lib/common'; +import {describe, it, expect} from 'test/lib/common'; import moment from 'moment'; import IndexPattern from '../index_pattern'; diff --git a/public/app/plugins/datasource/elasticsearch/specs/query_builder_specs.ts b/public/app/plugins/datasource/elasticsearch/specs/query_builder_specs.ts index 8b05236d823..3440309897b 100644 --- a/public/app/plugins/datasource/elasticsearch/specs/query_builder_specs.ts +++ b/public/app/plugins/datasource/elasticsearch/specs/query_builder_specs.ts @@ -1,5 +1,5 @@ -import {describe, beforeEach, it, sinon, expect} from 'test/lib/common'; +import {describe, beforeEach, it, expect} from 'test/lib/common'; import ElasticQueryBuilder from '../query_builder'; describe('ElasticQueryBuilder', function() { diff --git a/public/app/plugins/datasource/elasticsearch/specs/query_def_specs.ts b/public/app/plugins/datasource/elasticsearch/specs/query_def_specs.ts index ed09f17a7c5..e89c9774769 100644 --- a/public/app/plugins/datasource/elasticsearch/specs/query_def_specs.ts +++ b/public/app/plugins/datasource/elasticsearch/specs/query_def_specs.ts @@ -1,5 +1,5 @@ -import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common'; +import {describe, it, expect} from 'test/lib/common'; import queryDef from '../query_def'; diff --git a/public/app/plugins/datasource/grafana-live/datasource.ts b/public/app/plugins/datasource/grafana-live/datasource.ts index 36605e5b6bc..c2ee5ec8655 100644 --- a/public/app/plugins/datasource/grafana-live/datasource.ts +++ b/public/app/plugins/datasource/grafana-live/datasource.ts @@ -2,8 +2,6 @@ import {liveSrv} from 'app/core/core'; -import {Observable} from 'vendor/npm/rxjs/Observable'; - class DataObservable { target: any; diff --git a/public/app/plugins/datasource/grafana-live/module.ts b/public/app/plugins/datasource/grafana-live/module.ts index b17abd02feb..6835eaf622b 100644 --- a/public/app/plugins/datasource/grafana-live/module.ts +++ b/public/app/plugins/datasource/grafana-live/module.ts @@ -1,6 +1,5 @@ /// -import angular from 'angular'; import {GrafanaStreamDS} from './datasource'; import {QueryCtrl} from 'app/plugins/sdk'; diff --git a/public/app/plugins/datasource/grafana/module.ts b/public/app/plugins/datasource/grafana/module.ts index 615a0b2d175..d03914bda19 100644 --- a/public/app/plugins/datasource/grafana/module.ts +++ b/public/app/plugins/datasource/grafana/module.ts @@ -1,6 +1,5 @@ /// -import angular from 'angular'; import {GrafanaDatasource} from './datasource'; import {QueryCtrl} from 'app/plugins/sdk'; diff --git a/public/app/plugins/datasource/graphite/config_ctrl.ts b/public/app/plugins/datasource/graphite/config_ctrl.ts index 03b501975ff..4f6ebef77ea 100644 --- a/public/app/plugins/datasource/graphite/config_ctrl.ts +++ b/public/app/plugins/datasource/graphite/config_ctrl.ts @@ -1,8 +1,5 @@ /// -import angular from 'angular'; -import _ from 'lodash'; - export class GraphiteConfigCtrl { static templateUrl = 'public/app/plugins/datasource/graphite/partials/config.html'; current: any; @@ -16,6 +13,7 @@ export class GraphiteConfigCtrl { graphiteVersions = [ {name: '0.9.x', value: '0.9'}, {name: '1.0.x', value: '1.0'}, + {name: '1.1.x', value: '1.1'}, ]; } diff --git a/public/app/plugins/datasource/graphite/datasource.ts b/public/app/plugins/datasource/graphite/datasource.ts index fd13f66c7b5..2c846ee6be8 100644 --- a/public/app/plugins/datasource/graphite/datasource.ts +++ b/public/app/plugins/datasource/graphite/datasource.ts @@ -1,9 +1,6 @@ /// -import angular from 'angular'; import _ from 'lodash'; -import moment from 'moment'; - import * as dateMath from 'app/core/utils/datemath'; /** @ngInject */ diff --git a/public/app/plugins/datasource/graphite/gfunc.js b/public/app/plugins/datasource/graphite/gfunc.js index 5186550ae0a..3a6981e147e 100644 --- a/public/app/plugins/datasource/graphite/gfunc.js +++ b/public/app/plugins/datasource/graphite/gfunc.js @@ -822,6 +822,49 @@ function (_, $) { version: '1.0' }); + addFuncDef({ + name: 'seriesByTag', + category: categories.Special, + params: [ + { name: "tagExpression", type: "string" }, + { name: "tagExpression", type: "string", optional: true }, + { name: "tagExpression", type: "string", optional: true }, + { name: "tagExpression", type: "string", optional: true }, + ], + version: '1.1' + }); + + addFuncDef({ + name: "groupByTags", + category: categories.Special, + params: [ + { + name: "function", + type: "string", + options: ['sum', 'avg', 'maxSeries'] + }, + { name: "tag", type: "string" }, + { name: "tag", type: "string", optional: true }, + { name: "tag", type: "string", optional: true }, + { name: "tag", type: "string", optional: true }, + ], + defaultParams: ["sum", "tag"], + version: '1.1' + }); + + addFuncDef({ + name: "aliasByTags", + category: categories.Special, + params: [ + { name: "tag", type: "string" }, + { name: "tag", type: "string", optional: true }, + { name: "tag", type: "string", optional: true }, + { name: "tag", type: "string", optional: true }, + ], + defaultParams: ["tag"], + version: '1.1' + }); + _.each(categories, function(funcList, catName) { categories[catName] = _.sortBy(funcList, 'name'); }); @@ -873,7 +916,7 @@ function (_, $) { // if string contains ',' and next param is optional, split and update both if (this._hasMultipleParamsInString(strValue, index)) { _.each(strValue.split(','), function(partVal, idx) { - this.updateParam(partVal.trim(), idx); + this.updateParam(partVal.trim(), index + idx); }.bind(this)); return; } diff --git a/public/app/plugins/datasource/graphite/query_ctrl.ts b/public/app/plugins/datasource/graphite/query_ctrl.ts index 09fe5d6cb26..7127636d47f 100644 --- a/public/app/plugins/datasource/graphite/query_ctrl.ts +++ b/public/app/plugins/datasource/graphite/query_ctrl.ts @@ -3,9 +3,7 @@ import './add_graphite_func'; import './func_editor'; -import angular from 'angular'; import _ from 'lodash'; -import moment from 'moment'; import gfunc from './gfunc'; import {Parser} from './parser'; import {QueryCtrl} from 'app/plugins/sdk'; @@ -96,7 +94,8 @@ export class GraphiteQueryCtrl extends QueryCtrl { if ((index-1) >= func.def.params.length) { throw { message: 'invalid number of parameters to method ' + func.def.name }; } - this.addFunctionParameter(func, astNode.value, index, true); + var shiftBack = this.isShiftParamsBack(func); + this.addFunctionParameter(func, astNode.value, index, shiftBack); break; case 'metric': if (this.segments.length > 0) { @@ -113,6 +112,10 @@ export class GraphiteQueryCtrl extends QueryCtrl { } } + isShiftParamsBack(func) { + return func.def.name !== 'seriesByTag'; + } + getSegmentPathUpTo(index) { var arr = this.segments.slice(0, index); @@ -184,7 +187,6 @@ export class GraphiteQueryCtrl extends QueryCtrl { altSegments.unshift(this.uiSegmentSrv.newSegment('*')); return altSegments; }).catch(err => { - appEvents.emit('alert-error', ['Error', err]); return []; }); } diff --git a/public/app/plugins/datasource/graphite/specs/datasource_specs.ts b/public/app/plugins/datasource/graphite/specs/datasource_specs.ts index 0fd6f7b118a..47c2fa070fe 100644 --- a/public/app/plugins/datasource/graphite/specs/datasource_specs.ts +++ b/public/app/plugins/datasource/graphite/specs/datasource_specs.ts @@ -1,5 +1,5 @@ -import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common'; +import {describe, beforeEach, it, expect, angularMocks} from 'test/lib/common'; import helpers from 'test/specs/helpers'; import {GraphiteDatasource} from "../datasource"; diff --git a/public/app/plugins/datasource/influxdb/datasource.ts b/public/app/plugins/datasource/influxdb/datasource.ts index ddab664f570..a29e520c159 100644 --- a/public/app/plugins/datasource/influxdb/datasource.ts +++ b/public/app/plugins/datasource/influxdb/datasource.ts @@ -1,6 +1,5 @@ /// -import angular from 'angular'; import _ from 'lodash'; import * as dateMath from 'app/core/utils/datemath'; diff --git a/public/app/plugins/datasource/influxdb/query_ctrl.ts b/public/app/plugins/datasource/influxdb/query_ctrl.ts index 2240183fa48..5c5bece75a2 100644 --- a/public/app/plugins/datasource/influxdb/query_ctrl.ts +++ b/public/app/plugins/datasource/influxdb/query_ctrl.ts @@ -352,12 +352,13 @@ export class InfluxQueryCtrl extends QueryCtrl { this.panelCtrl.refresh(); } - getTagValueOperator(tagValue, tagOperator) { + getTagValueOperator(tagValue, tagOperator): string { if (tagOperator !== '=~' && tagOperator !== '!~' && /^\/.*\/$/.test(tagValue)) { return '=~'; } else if ((tagOperator === '=~' || tagOperator === '!~') && /^(?!\/.*\/$)/.test(tagValue)) { return '='; } + return null; } getCollapsedText() { diff --git a/public/app/plugins/datasource/influxdb/query_part.ts b/public/app/plugins/datasource/influxdb/query_part.ts index 4c6609d0f74..3ce29b10d5b 100644 --- a/public/app/plugins/datasource/influxdb/query_part.ts +++ b/public/app/plugins/datasource/influxdb/query_part.ts @@ -6,8 +6,6 @@ import { QueryPart, functionRenderer, suffixRenderer, - identityRenderer, - quotedIdentityRenderer, } from 'app/core/components/query_part/query_part'; var index = []; diff --git a/public/app/plugins/datasource/influxdb/specs/influx_query_specs.ts b/public/app/plugins/datasource/influxdb/specs/influx_query_specs.ts index 88963f9c132..47ad3498ece 100644 --- a/public/app/plugins/datasource/influxdb/specs/influx_query_specs.ts +++ b/public/app/plugins/datasource/influxdb/specs/influx_query_specs.ts @@ -1,4 +1,4 @@ -import {describe, beforeEach, it, sinon, expect} from 'test/lib/common'; +import {describe, it, expect} from 'test/lib/common'; import InfluxQuery from '../influx_query'; diff --git a/public/app/plugins/datasource/influxdb/specs/influx_series_specs.ts b/public/app/plugins/datasource/influxdb/specs/influx_series_specs.ts index 2daa21d7e30..ef0a742528f 100644 --- a/public/app/plugins/datasource/influxdb/specs/influx_series_specs.ts +++ b/public/app/plugins/datasource/influxdb/specs/influx_series_specs.ts @@ -1,4 +1,4 @@ -import {describe, beforeEach, it, sinon, expect} from 'test/lib/common'; +import {describe, it, expect} from 'test/lib/common'; import InfluxSeries from '../influx_series'; describe('when generating timeseries from influxdb response', function() { diff --git a/public/app/plugins/datasource/influxdb/specs/query_builder_specs.ts b/public/app/plugins/datasource/influxdb/specs/query_builder_specs.ts index 0120e349b50..86b1d4f08dd 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_builder_specs.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_builder_specs.ts @@ -1,4 +1,4 @@ -import {describe, beforeEach, it, sinon, expect} from 'test/lib/common'; +import {describe, it, expect} from 'test/lib/common'; import InfluxQueryBuilder from '../query_builder'; describe('InfluxQueryBuilder', function() { diff --git a/public/app/plugins/datasource/influxdb/specs/query_part_specs.ts b/public/app/plugins/datasource/influxdb/specs/query_part_specs.ts index ba32c961909..131eea80806 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_part_specs.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_part_specs.ts @@ -1,5 +1,5 @@ -import {describe, beforeEach, it, sinon, expect} from 'test/lib/common'; +import {describe, it, expect} from 'test/lib/common'; import queryPart from '../query_part'; diff --git a/public/app/plugins/datasource/influxdb/specs/response_parser_specs.ts b/public/app/plugins/datasource/influxdb/specs/response_parser_specs.ts index f545753d10e..b24baec8f89 100644 --- a/public/app/plugins/datasource/influxdb/specs/response_parser_specs.ts +++ b/public/app/plugins/datasource/influxdb/specs/response_parser_specs.ts @@ -1,5 +1,5 @@ import _ from 'lodash'; -import {describe, beforeEach, it, sinon, expect} from 'test/lib/common'; +import {describe, it, expect} from 'test/lib/common'; import ResponseParser from '../response_parser'; describe("influxdb response parser", () => { diff --git a/public/app/plugins/datasource/mixed/module.ts b/public/app/plugins/datasource/mixed/module.ts index cdc740cf11a..54d4eeb700e 100644 --- a/public/app/plugins/datasource/mixed/module.ts +++ b/public/app/plugins/datasource/mixed/module.ts @@ -1,7 +1,5 @@ /// -import angular from 'angular'; import {MixedDatasource} from './datasource'; - export {MixedDatasource, MixedDatasource as Datasource}; diff --git a/public/app/plugins/datasource/mysql/mode-sql.js b/public/app/plugins/datasource/mysql/mode-sql.js new file mode 100644 index 00000000000..110cfb24649 --- /dev/null +++ b/public/app/plugins/datasource/mysql/mode-sql.js @@ -0,0 +1,106 @@ +// jshint ignore: start +// jscs: disable + +ace.define("ace/mode/sql_highlight_rules",["require","exports","module","ace/lib/oop","ace/mode/text_highlight_rules"], function(require, exports, module) { +"use strict"; + +var oop = require("../lib/oop"); +var TextHighlightRules = require("./text_highlight_rules").TextHighlightRules; + +var SqlHighlightRules = function() { + + var keywords = ( + "select|insert|update|delete|from|where|and|or|group|by|order|limit|offset|having|as|case|" + + "when|else|end|type|left|right|join|on|outer|desc|asc|union|create|table|primary|key|if|" + + "foreign|not|references|default|null|inner|cross|natural|database|drop|grant" + ); + + var builtinConstants = ( + "true|false" + ); + + var builtinFunctions = ( + "avg|count|first|last|max|min|sum|ucase|lcase|mid|len|round|rank|now|format|" + + "coalesce|ifnull|isnull|nvl" + ); + + var dataTypes = ( + "int|numeric|decimal|date|varchar|char|bigint|float|double|bit|binary|text|set|timestamp|" + + "money|real|number|integer" + ); + + var keywordMapper = this.createKeywordMapper({ + "support.function": builtinFunctions, + "keyword": keywords, + "constant.language": builtinConstants, + "storage.type": dataTypes + }, "identifier", true); + + this.$rules = { + "start" : [ { + token : "comment", + regex : "--.*$" + }, { + token : "comment", + start : "/\\*", + end : "\\*/" + }, { + token : "string", // " string + regex : '".*?"' + }, { + token : "string", // ' string + regex : "'.*?'" + }, { + token : "string", // ` string (apache drill) + regex : "`.*?`" + }, { + token : "constant.numeric", // float + regex : "[+-]?\\d+(?:(?:\\.\\d*)?(?:[eE][+-]?\\d+)?)?\\b" + }, { + token : keywordMapper, + regex : "[a-zA-Z_$][a-zA-Z0-9_$]*\\b" + }, { + token : "keyword.operator", + regex : "\\+|\\-|\\/|\\/\\/|%|<@>|@>|<@|&|\\^|~|<|>|<=|=>|==|!=|<>|=" + }, { + token : "paren.lparen", + regex : "[\\(]" + }, { + token : "paren.rparen", + regex : "[\\)]" + }, { + token : "text", + regex : "\\s+" + } ] + }; + this.normalizeRules(); +}; + +oop.inherits(SqlHighlightRules, TextHighlightRules); + +exports.SqlHighlightRules = SqlHighlightRules; +}); + +ace.define("ace/mode/sql",["require","exports","module","ace/lib/oop","ace/mode/text","ace/mode/sql_highlight_rules"], function(require, exports, module) { +"use strict"; + +var oop = require("../lib/oop"); +var TextMode = require("./text").Mode; +var SqlHighlightRules = require("./sql_highlight_rules").SqlHighlightRules; + +var Mode = function() { + this.HighlightRules = SqlHighlightRules; + this.$behaviour = this.$defaultBehaviour; +}; +oop.inherits(Mode, TextMode); + +(function() { + + this.lineCommentStart = "--"; + + this.$id = "ace/mode/sql"; +}).call(Mode.prototype); + +exports.Mode = Mode; + +}); diff --git a/public/app/plugins/datasource/mysql/module.ts b/public/app/plugins/datasource/mysql/module.ts index 156cff61b61..c32c832d6a0 100644 --- a/public/app/plugins/datasource/mysql/module.ts +++ b/public/app/plugins/datasource/mysql/module.ts @@ -1,7 +1,5 @@ /// -import angular from 'angular'; -import _ from 'lodash'; import {MysqlDatasource} from './datasource'; import {MysqlQueryCtrl} from './query_ctrl'; diff --git a/public/app/plugins/datasource/mysql/partials/query.editor.html b/public/app/plugins/datasource/mysql/partials/query.editor.html index 15b18cbb988..cd8b4eee7c4 100644 --- a/public/app/plugins/datasource/mysql/partials/query.editor.html +++ b/public/app/plugins/datasource/mysql/partials/query.editor.html @@ -1,7 +1,7 @@
- +
diff --git a/public/app/plugins/datasource/mysql/query_ctrl.ts b/public/app/plugins/datasource/mysql/query_ctrl.ts index cf3fe9f2830..6774efe9b81 100644 --- a/public/app/plugins/datasource/mysql/query_ctrl.ts +++ b/public/app/plugins/datasource/mysql/query_ctrl.ts @@ -1,8 +1,6 @@ /// -import angular from 'angular'; import _ from 'lodash'; -import {MysqlDatasource} from './datasource'; import {QueryCtrl} from 'app/plugins/sdk'; export interface MysqlQuery { diff --git a/public/app/plugins/datasource/mysql/response_parser.ts b/public/app/plugins/datasource/mysql/response_parser.ts index 1b89cc78599..5501e4fc17a 100644 --- a/public/app/plugins/datasource/mysql/response_parser.ts +++ b/public/app/plugins/datasource/mysql/response_parser.ts @@ -3,7 +3,7 @@ import _ from 'lodash'; export default class ResponseParser { - constructor(private $q){} + constructor(private $q) {} processQueryResult(res) { var data = []; @@ -47,7 +47,7 @@ export default class ResponseParser { const textColIndex = this.findColIndex(columns, '__text'); const valueColIndex = this.findColIndex(columns, '__value'); - if (columns.length === 2 && textColIndex !== -1 && valueColIndex !== -1){ + if (columns.length === 2 && textColIndex !== -1 && valueColIndex !== -1) { return this.transformToKeyValueList(rows, textColIndex, valueColIndex); } diff --git a/public/app/plugins/datasource/mysql/specs/datasource_specs.ts b/public/app/plugins/datasource/mysql/specs/datasource_specs.ts index 11ddaa34086..08d2f8922a5 100644 --- a/public/app/plugins/datasource/mysql/specs/datasource_specs.ts +++ b/public/app/plugins/datasource/mysql/specs/datasource_specs.ts @@ -1,4 +1,4 @@ -import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common'; +import {describe, beforeEach, it, expect, angularMocks} from 'test/lib/common'; import moment from 'moment'; import helpers from 'test/specs/helpers'; import {MysqlDatasource} from '../datasource'; diff --git a/public/app/plugins/datasource/opentsdb/config_ctrl.ts b/public/app/plugins/datasource/opentsdb/config_ctrl.ts index a9259337926..6c450ffdd12 100644 --- a/public/app/plugins/datasource/opentsdb/config_ctrl.ts +++ b/public/app/plugins/datasource/opentsdb/config_ctrl.ts @@ -1,8 +1,5 @@ /// -import angular from 'angular'; -import _ from 'lodash'; - export class OpenTsConfigCtrl { static templateUrl = 'public/app/plugins/datasource/opentsdb/partials/config.html'; current: any; diff --git a/public/app/plugins/datasource/opentsdb/specs/datasource-specs.ts b/public/app/plugins/datasource/opentsdb/specs/datasource-specs.ts index 1f63dc88667..cddcd621c01 100644 --- a/public/app/plugins/datasource/opentsdb/specs/datasource-specs.ts +++ b/public/app/plugins/datasource/opentsdb/specs/datasource-specs.ts @@ -1,4 +1,4 @@ -import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common'; +import {describe, beforeEach, it, expect, angularMocks} from 'test/lib/common'; import helpers from 'test/specs/helpers'; import {OpenTsDatasource} from "../datasource"; diff --git a/public/app/plugins/datasource/prometheus/completer.ts b/public/app/plugins/datasource/prometheus/completer.ts index e1b0984399d..2651f75df98 100644 --- a/public/app/plugins/datasource/prometheus/completer.ts +++ b/public/app/plugins/datasource/prometheus/completer.ts @@ -39,7 +39,6 @@ export class PromCompleter { } var query = prefix; - var line = editor.session.getLine(pos.row); return this.datasource.performSuggestQuery(query, true).then(metricNames => { callback(null, metricNames.map(name => { diff --git a/public/app/plugins/datasource/prometheus/datasource.ts b/public/app/plugins/datasource/prometheus/datasource.ts index 7e0758a253c..85139336dd4 100644 --- a/public/app/plugins/datasource/prometheus/datasource.ts +++ b/public/app/plugins/datasource/prometheus/datasource.ts @@ -1,6 +1,5 @@ /// -import angular from 'angular'; import _ from 'lodash'; import moment from 'moment'; @@ -98,6 +97,7 @@ export class PrometheusDatasource { activeTargets.push(target); var query: any = {}; + query.instant = target.instant; var interval = this.intervalSeconds(options.interval); // Minimum interval ("Min step"), if specified for the query. or same as interval otherwise @@ -130,12 +130,15 @@ export class PrometheusDatasource { } var allQueryPromise = _.map(queries, query => { - return this.performTimeSeriesQuery(query, start, end); + if (!query.instant) { + return this.performTimeSeriesQuery(query, start, end); + } else { + return this.performInstantQuery(query, end); + } }); return this.$q.all(allQueryPromise).then(responseList => { var result = []; - var index = 0; _.each(responseList, (response, index) => { if (response.status === 'error') { @@ -146,7 +149,11 @@ export class PrometheusDatasource { result.push(self.transformMetricDataToTable(response.data.data.result)); } else { for (let metricData of response.data.data.result) { - result.push(self.transformMetricData(metricData, activeTargets[index], start, end)); + if (response.data.data.resultType === 'matrix') { + result.push(self.transformMetricData(metricData, activeTargets[index], start, end)); + } else if (response.data.data.resultType === 'vector') { + result.push(self.transformInstantMetricData(metricData, activeTargets[index])); + } } } }); @@ -174,6 +181,11 @@ export class PrometheusDatasource { return this._request('GET', url, query.requestId); } + performInstantQuery(query, time) { + var url = '/api/v1/query?query=' + encodeURIComponent(query.expr) + '&time=' + time; + return this._request('GET', url, query.requestId); + } + performSuggestQuery(query, cache = false) { var url = '/api/v1/label/__name__/values'; @@ -335,6 +347,9 @@ export class PrometheusDatasource { // Populate rows, set value to empty string when label not present. _.each(md, function(series) { + if (series.value) { + series.values = [series.value]; + } if (series.values) { for (i = 0; i < series.values.length; i++) { var values = series.values[i]; @@ -358,6 +373,13 @@ export class PrometheusDatasource { return table; } + transformInstantMetricData(md, options) { + var dps = [], metricLabel = null; + metricLabel = this.createMetricLabel(md.metric, options); + dps.push([parseFloat(md.value[1]), md.value[0] * 1000]); + return { target: metricLabel, datapoints: dps }; + } + createMetricLabel(labelData, options) { if (_.isUndefined(options) || _.isEmpty(options.legendFormat)) { return this.getOriginalMetricName(labelData); diff --git a/public/app/plugins/datasource/prometheus/metric_find_query.js b/public/app/plugins/datasource/prometheus/metric_find_query.js index a33044fddf6..6bfd324cd6e 100644 --- a/public/app/plugins/datasource/prometheus/metric_find_query.js +++ b/public/app/plugins/datasource/prometheus/metric_find_query.js @@ -95,9 +95,7 @@ function (_) { PrometheusMetricFindQuery.prototype.queryResultQuery = function(query) { var end = this.datasource.getPrometheusTime(this.range.to, true); - var url = '/api/v1/query?query=' + encodeURIComponent(query) + '&time=' + end; - - return this.datasource._request('GET', url) + return this.datasource.performInstantQuery({ expr: query }, end) .then(function(result) { return _.map(result.data.data.result, function(metricData) { var text = metricData.metric.__name__ || ''; diff --git a/public/app/core/components/code_editor/mode-prometheus.js b/public/app/plugins/datasource/prometheus/mode-prometheus.js similarity index 100% rename from public/app/core/components/code_editor/mode-prometheus.js rename to public/app/plugins/datasource/prometheus/mode-prometheus.js diff --git a/public/app/plugins/datasource/prometheus/partials/query.editor.html b/public/app/plugins/datasource/prometheus/partials/query.editor.html index a34a752bd95..98fd01eee15 100644 --- a/public/app/plugins/datasource/prometheus/partials/query.editor.html +++ b/public/app/plugins/datasource/prometheus/partials/query.editor.html @@ -1,7 +1,7 @@
-
@@ -45,6 +45,8 @@
+ +