mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
merge with master
This commit is contained in:
@@ -1,7 +1,7 @@
|
||||
[run]
|
||||
init_cmds = [
|
||||
["go", "build", "-o", "./bin/grafana-server", "./pkg/cmd/grafana-server"],
|
||||
["./bin/grafana-server"]
|
||||
["./bin/grafana-server", "cfg:app_mode=development"]
|
||||
]
|
||||
watch_all = true
|
||||
watch_dirs = [
|
||||
@@ -9,9 +9,9 @@ watch_dirs = [
|
||||
"$WORKDIR/public/views",
|
||||
"$WORKDIR/conf",
|
||||
]
|
||||
watch_exts = [".go", ".ini", ".toml", ".html"]
|
||||
watch_exts = [".go", ".ini", ".toml"]
|
||||
build_delay = 1500
|
||||
cmds = [
|
||||
["go", "build", "-o", "./bin/grafana-server", "./pkg/cmd/grafana-server"],
|
||||
["./bin/grafana-server"]
|
||||
["./bin/grafana-server", "cfg:app_mode=development"]
|
||||
]
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@@ -4,6 +4,8 @@ coverage/
|
||||
.aws-config.json
|
||||
awsconfig
|
||||
/dist
|
||||
/public/build
|
||||
/public/views/index.html
|
||||
/emails/dist
|
||||
/public_gen
|
||||
/public/vendor/npm
|
||||
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"browser": true,
|
||||
|
||||
"esversion": 6,
|
||||
"bitwise":false,
|
||||
"curly": true,
|
||||
"eqnull": true,
|
||||
|
@@ -17,10 +17,17 @@
|
||||
* **Unit types**: New date & time unit types added, useful in singlestat to show dates & times. [#3678](https://github.com/grafana/grafana/issues/3678), [#6710](https://github.com/grafana/grafana/issues/6710), [#2764](https://github.com/grafana/grafana/issues/6710)
|
||||
* **CLI**: Make it possible to install plugins from any url [#5873](https://github.com/grafana/grafana/issues/5873)
|
||||
* **Prometheus**: Add support for instant queries [#5765](https://github.com/grafana/grafana/issues/5765), thx [@mtanda](https://github.com/mtanda)
|
||||
* **Cloudwatch**: Add support for alerting using the cloudwatch datasource [#8050](https://github.com/grafana/grafana/pull/8050), thx [@mtanda](https://github.com/mtanda)
|
||||
* **Pagerduty**: Include triggering series in pagerduty notification [#8479](https://github.com/grafana/grafana/issues/8479), thx [@rickymoorhouse](https://github.com/rickymoorhouse)
|
||||
|
||||
## Minor
|
||||
* **SMTP**: Make it possible to set specific EHLO for smtp client. [#9319](https://github.com/grafana/grafana/issues/9319)
|
||||
* **Dataproxy**: Allow grafan to renegotiate tls connection [#9250](https://github.com/grafana/grafana/issues/9250)
|
||||
* **HTTP**: set net.Dialer.DualStack to true for all http clients [#9367](https://github.com/grafana/grafana/pull/9367)
|
||||
* **Alerting**: Add diff and percent diff as series reducers [#9386](https://github.com/grafana/grafana/pull/9386), thx [@shanhuhai5739](https://github.com/shanhuhai5739)
|
||||
|
||||
## Tech
|
||||
* **Go**: Grafana is now built using golang 1.9
|
||||
|
||||
# 4.5.2 (2017-09-22)
|
||||
|
||||
|
@@ -31,7 +31,7 @@ module.exports = function (grunt) {
|
||||
require('load-grunt-tasks')(grunt);
|
||||
|
||||
// load task definitions
|
||||
grunt.loadTasks('tasks');
|
||||
grunt.loadTasks('./scripts/grunt');
|
||||
|
||||
// Utility function to load plugin settings into config
|
||||
function loadConfig(config,path) {
|
||||
@@ -46,7 +46,7 @@ module.exports = function (grunt) {
|
||||
}
|
||||
|
||||
// Merge that object with what with whatever we have here
|
||||
loadConfig(config,'./tasks/options/');
|
||||
loadConfig(config,'./scripts/grunt/options/');
|
||||
// pass the config to grunt
|
||||
grunt.initConfig(config);
|
||||
};
|
||||
|
22
README.md
22
README.md
@@ -24,7 +24,7 @@ the latest master builds [here](https://grafana.com/grafana/download)
|
||||
|
||||
### Dependencies
|
||||
|
||||
- Go 1.8.1
|
||||
- Go 1.9
|
||||
- NodeJS LTS
|
||||
|
||||
### Building the backend
|
||||
@@ -37,8 +37,7 @@ go run build.go build
|
||||
|
||||
### Building frontend assets
|
||||
|
||||
To build less to css for the frontend you will need a recent version of **node (v6+)**,
|
||||
npm (v2.5.0) and grunt (v0.4.5). Run the following:
|
||||
For this you need nodejs (v.6+).
|
||||
|
||||
```bash
|
||||
npm install -g yarn
|
||||
@@ -46,13 +45,24 @@ yarn install --pure-lockfile
|
||||
npm run build
|
||||
```
|
||||
|
||||
To build the frontend assets only on changes:
|
||||
To rebuild frontend assets (typesript, sass etc) as you change them start the watcher via.
|
||||
|
||||
```bash
|
||||
npm run dev
|
||||
npm run watch
|
||||
```
|
||||
|
||||
Run tests
|
||||
```bash
|
||||
npm run test
|
||||
```
|
||||
|
||||
Run tests in watch mode
|
||||
```bash
|
||||
npm run watch-test
|
||||
```
|
||||
|
||||
### Recompile backend on source change
|
||||
|
||||
To rebuild on source change.
|
||||
```bash
|
||||
go get github.com/Unknwon/bra
|
||||
@@ -69,6 +79,8 @@ You only need to add the options you want to override. Config files are applied
|
||||
1. grafana.ini
|
||||
1. custom.ini
|
||||
|
||||
In your custom.ini uncomment (remove the leading `;`) sign. And set `app_mode = development`.
|
||||
|
||||
## Contribute
|
||||
If you have any idea for an improvement or found a bug do not hesitate to open an issue.
|
||||
And if you have time clone this repo and submit a pull request and help me make Grafana
|
||||
|
@@ -7,7 +7,7 @@ clone_folder: c:\gopath\src\github.com\grafana\grafana
|
||||
environment:
|
||||
nodejs_version: "6"
|
||||
GOPATH: c:\gopath
|
||||
GOVERSION: 1.8
|
||||
GOVERSION: 1.9
|
||||
|
||||
install:
|
||||
- rmdir c:\go /s /q
|
||||
|
@@ -9,7 +9,7 @@ machine:
|
||||
GOPATH: "/home/ubuntu/.go_workspace"
|
||||
ORG_PATH: "github.com/grafana"
|
||||
REPO_PATH: "${ORG_PATH}/grafana"
|
||||
GODIST: "go1.8.linux-amd64.tar.gz"
|
||||
GODIST: "go1.9.linux-amd64.tar.gz"
|
||||
post:
|
||||
- mkdir -p ~/download
|
||||
- mkdir -p ~/docker
|
||||
|
27
docs/sources/features/panels/alertlist.md
Normal file
27
docs/sources/features/panels/alertlist.md
Normal file
@@ -0,0 +1,27 @@
|
||||
+++
|
||||
title = "Alert List"
|
||||
keywords = ["grafana", "alert list", "documentation", "panel", "alertlist"]
|
||||
type = "docs"
|
||||
aliases = ["/reference/alertlist/"]
|
||||
[menu.docs]
|
||||
name = "Alert list"
|
||||
parent = "panels"
|
||||
weight = 4
|
||||
+++
|
||||
|
||||
|
||||
# Alert List Panel
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v45/alert-list-panel.png" max-width="850px" >}}
|
||||
|
||||
The alert list panel allows you to display your dashbords alerts. The list can be configured to show current state or recent state changes. You can read more about alerts [here](http://docs.grafana.org/alerting/rules).
|
||||
|
||||
## Alert List Options
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v45/alert-list-options.png" max-width="600px" class="docs-image--no-shadow docs-image--right">}}
|
||||
|
||||
1. **Show**: Lets you choose between current state or recent state changes.
|
||||
2. **Max Items**: Max items set the maximum of items in a list.
|
||||
3. **Sort Order**: Lets you sort your list alphabeticaly(asc/desc) or by importance.
|
||||
4. **Alerts From** This Dashboard`: Shows alerts only from the dashboard the alert list is in.
|
||||
5. **State Filter**: Here you can filter your list by one or more parameters.
|
@@ -12,7 +12,7 @@ weight = 4
|
||||
|
||||
# Dashboard List Panel
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v45/dashboard-list-panels.png" max-width= "800px" >}}
|
||||
{{< docs-imagebox img="/img/docs/v45/dashboard-list-panels.png" max-width="850px">}}
|
||||
|
||||
The dashboard list panel allows you to display dynamic links to other dashboards. The list can be configured to use starred dashboards, recently viewed dashboards, a search query and/or dashboard tags.
|
||||
|
||||
@@ -20,15 +20,17 @@ The dashboard list panel allows you to display dynamic links to other dashboards
|
||||
|
||||
## Dashboard List Options
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v45/dashboard-list-options.png" max-width="600px" class="docs-image--no-shadow">}}
|
||||
{{< docs-imagebox img="/img/docs/v45/dashboard-list-options.png" class="docs-image--no-shadow docs-image--right">}}
|
||||
|
||||
1. `Starred`: The starred dashboard selection displays starred dashboards in alphabetical order.
|
||||
2. `Recently Viewed`: The recently viewed dashboard selection displays recently viewed dashboards in alphabetical order.
|
||||
3. `Search`: The search dashboard selection displays dashboards by search query or tag(s).
|
||||
4. `Show Headings`: When show headings is ticked the choosen list selection(Starred, Recently Viewed, Search) is shown as a heading.
|
||||
5. `Max Items`: Max items set the maximum of items in a list.
|
||||
6. `Query`: Here is where you enter your query you want to search by. Queries are case-insensitive, and partial values are accepted.
|
||||
7. `Tags`: Here is where you enter your tag(s) you want to search by. Note that existing tags will not appear as you type, and *are* case sensitive. To see a list of existing tags, you can always return to the dashboard, open the Dashboard Picker at the top and click `tags` link in the search bar.
|
||||
1. **Starred**: The starred dashboard selection displays starred dashboards in alphabetical order.
|
||||
2. **Recently Viewed**: The recently viewed dashboard selection displays recently viewed dashboards in alphabetical order.
|
||||
3. **Search**: The search dashboard selection displays dashboards by search query or tag(s).
|
||||
4. **Show Headings**: When show headings is ticked the choosen list selection(Starred, Recently Viewed, Search) is shown as a heading.
|
||||
5. **Max Items**: Max items set the maximum of items in a list.
|
||||
6. **Query**: Here is where you enter your query you want to search by. Queries are case-insensitive, and partial values are accepted.
|
||||
7. **Tags**: Here is where you enter your tag(s) you want to search by. Note that existing tags will not appear as you type, and *are* case sensitive. To see a list of existing tags, you can always return to the dashboard, open the Dashboard Picker at the top and click `tags` link in the search bar.
|
||||
|
||||
<div class="clearfix"></div>
|
||||
|
||||
> When multiple tags and strings appear, the dashboard list will display those matching ALL conditions.
|
||||
|
||||
|
@@ -11,11 +11,12 @@ weight = 1
|
||||
|
||||
# Graph Panel
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v45/graph_overview.png" class="docs-image--no-shadow" max-width="850px" >}}
|
||||
|
||||
The main panel in Grafana is simply named Graph. It provides a very rich set of graphing options.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v45/graph_overview.png" class="docs-image--no-shadow" max-width= "900px" >}}
|
||||
|
||||
1. Clicking the title for a panel exposes a menu. The `edit` option opens additional configuration options for the panel.
|
||||
1. Clicking the title for a panel exposes a menu. The `edit` option opens additional configuration
|
||||
options for the panel.
|
||||
2. Click to open color & axis selection.
|
||||
3. Click to only show this series. Shift/Ctrl + click to hide series.
|
||||
|
||||
@@ -27,9 +28,9 @@ The general tab allows customization of a panel's appearance and menu options.
|
||||
|
||||
### General Options
|
||||
|
||||
- ``Title`` - The panel title on the dashboard
|
||||
- ``Span`` - The panel width in columns
|
||||
- ``Height`` - The panel contents height in pixels
|
||||
- **Title** - The panel title on the dashboard
|
||||
- **Span** - The panel width in columns
|
||||
- **Height** - The panel contents height in pixels
|
||||
|
||||
### Drilldown / detail link
|
||||
|
||||
@@ -72,7 +73,7 @@ There are three options:
|
||||
|
||||
- The `Series` option means that the data is grouped by series and not by time. The y-axis still represents the value.
|
||||
|
||||
<img src="/img/docs/v4/x_axis_mode_series.png" class="no-shadow">
|
||||
{{< docs-imagebox img="/img/docs/v45/graph-x-axis-mode-series.png" max-width="700px">}}
|
||||
|
||||
- The `Histogram` option converts the graph into a histogram. A Histogram is a kind of bar chart that groups numbers into ranges, often called buckets or bins. Taller bars show that more data falls in that range. Histograms and buckets are described in more detail [here](http://docs.grafana.org/features/panels/heatmap/#histograms-and-buckets).
|
||||
|
||||
@@ -157,6 +158,12 @@ There is an option under Series overrides to draw lines as dashes. Set Dashes to
|
||||
|
||||
## Time Range
|
||||
|
||||
<<<<<<< HEAD
|
||||
The time range tab allows you to override the dashboard time range and specify a panel specific time. Either through a relative from now time option or through a timeshift.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v45/graph-time-range.png" max-width= "900px" >}}
|
||||
||||||| merged common ancestors
|
||||

|
||||
=======
|
||||
<img src="/img/docs/v45/graph-time-range.png" class="no-shadow">
|
||||
>>>>>>> 0a65100eaf64cd57b38110001bf614630821610c
|
||||
|
@@ -12,15 +12,15 @@ weight = 2
|
||||
|
||||
# Singlestat Panel
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v45/singlestat-panel.png" max-width="900px" >}}
|
||||
{{< docs-imagebox img="/img/docs/v45/singlestat-panel.png" class="docs-image--no-shadow" max-width="900px" >}}
|
||||
|
||||
The Singlestat Panel allows you to show the one main summary stat of a SINGLE series. It reduces the series into a single number (by looking at the max, min, average, or sum of values in the series). Singlestat also provides thresholds to color the stat or the Panel background. It can also translate the single number into a text value, and show a sparkline summary of the series.
|
||||
|
||||
### Singlestat Panel Configuration
|
||||
|
||||
The singlestat panel has a normal query editor to allow you define your exact metric queries like many other Panels. Through the Options tab, you can access the Singlestat-specific functionality.
|
||||
The singlestat panel has a normal query editor to allow you define your exact metric queries like many other Panels. In the Options tab, you can access the Singlestat-specific functionality.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v45/singlestat-value-options.png" class="docs-image--no-shadow" max-width= "900px" >}}
|
||||
{{< docs-imagebox img="/img/docs/v45/singlestat-value-options.png" class="docs-image--no-shadow" max-width="900px" >}}
|
||||
|
||||
1. `Stats`: The Stats field let you set the function (min, max, average, current, total, first, delta, range) that your entire query is reduced into a single value with. This reduces the entire query into a single summary value that is displayed.
|
||||
* `min` - The smallest value in the series
|
||||
@@ -64,6 +64,7 @@ Sparklines are a great way of seeing the historical data related to the summary
|
||||
|
||||
> ***Pro-tip:*** Reduce the opacity on fill colors for nice looking panels.
|
||||
|
||||
<<<<<<< HEAD
|
||||
### Gauge
|
||||
|
||||
Gauges gives a clear picture of how high a value is in it's context. It's a great way to see if a value is close to the thresholds. The gauge uses the colors set in the color options.
|
||||
@@ -77,13 +78,39 @@ Gauges gives a clear picture of how high a value is in it's context. It's a grea
|
||||
|
||||
<div class="clearfix"></div>
|
||||
|
||||
||||||| merged common ancestors
|
||||
=======
|
||||
### Gauge
|
||||
|
||||
Gauges gives a clear picture of how high a value is in it's context. It's a great way to see if a value is close to the thresholds. The gauge uses the colors set in the color options.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v45/singlestat-gauge-options.png" max-width="500px" class="docs-image--right docs-image--no-shadow">}}
|
||||
|
||||
1. `Show`: The show checkbox will toggle wether the gauge is shown in the panel. When unselected, only the Singlestat value will appear.
|
||||
2. `Min/Max`: This sets the start and end point for the gauge.
|
||||
3. `Threshold Labels`: Check if you want to show the threshold labels. Thresholds are set in the color options.
|
||||
4. `Threshold Markers`: Check if you want to have a second meter showing the thresholds.
|
||||
|
||||
>>>>>>> 0a65100eaf64cd57b38110001bf614630821610c
|
||||
### Value to text mapping
|
||||
|
||||
<<<<<<< HEAD
|
||||
{{< docs-imagebox img="/img/docs/v45/singlestat-value-mapping.png" class="docs-image--right docs-image--no-shadow">}}
|
||||
|
||||
Value to text mapping allows you to translate the value of the summary stat into explicit text. The text will respect all styling, thresholds and customization defined for the value. This can be useful to translate the number of the main Singlestat value into a context-specific human-readable word or message.
|
||||
||||||| merged common ancestors
|
||||
Value to text mapping allows you to translate the value of the summary stat into explicit text. The text will respect all styling, thresholds and customization defined for the value. This can be useful to translate the number of the main Singlestat value into a context-specific human-readable word or message.
|
||||
=======
|
||||
{{< docs-imagebox img="/img/docs/v45/singlestat-value-mapping.png" class="docs-image--right docs-image--no-shadow">}}
|
||||
>>>>>>> 0a65100eaf64cd57b38110001bf614630821610c
|
||||
|
||||
<<<<<<< HEAD
|
||||
<div class="clearfix"></div>
|
||||
||||||| merged common ancestors
|
||||
<img class="no-shadow" src="/img/docs/v1/Singlestat-ValueMapping.png">
|
||||
=======
|
||||
Value to text mapping allows you to translate the value of the summary stat into explicit text. The text will respect all styling, thresholds and customization defined for the value. This can be useful to translate the number of the main Singlestat value into a context-specific human-readable word or message.
|
||||
>>>>>>> 0a65100eaf64cd57b38110001bf614630821610c
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
|
23
docs/sources/features/panels/text.md
Normal file
23
docs/sources/features/panels/text.md
Normal file
@@ -0,0 +1,23 @@
|
||||
+++
|
||||
title = "Text"
|
||||
keywords = ["grafana", "text", "documentation", "panel"]
|
||||
type = "docs"
|
||||
aliases = ["/reference/alertlist/"]
|
||||
[menu.docs]
|
||||
name = "Text"
|
||||
parent = "panels"
|
||||
weight = 4
|
||||
+++
|
||||
|
||||
|
||||
# Text Panel
|
||||
|
||||
The text panel lets you make information and description panels etc. for your dashboards. There are three modes you can write in: markdown, HTML or text.
|
||||
|
||||
## Text Options
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v45/text-options.png" max-width="600px" class="docs-image--no-shadow">}}
|
||||
|
||||
1. `Mode`: Here you can choose between markdown, HTML or text.
|
||||
2. `Content`: Here you write your content.
|
||||
|
@@ -13,7 +13,7 @@ dev environment. Grafana ships with its own required backend server; also comple
|
||||
|
||||
## Dependencies
|
||||
|
||||
- [Go 1.8.1](https://golang.org/dl/)
|
||||
- [Go 1.9](https://golang.org/dl/)
|
||||
- [NodeJS LTS](https://nodejs.org/download/)
|
||||
- [Git](https://git-scm.com/downloads)
|
||||
|
||||
@@ -27,7 +27,7 @@ go get github.com/grafana/grafana
|
||||
|
||||
On Windows use setx instead of export and then restart your command prompt:
|
||||
```
|
||||
setx GOPATH %cd%
|
||||
setx GOPATH %cd%
|
||||
```
|
||||
|
||||
You may see an error such as: `package github.com/grafana/grafana: no buildable Go source files`. This is just a warning, and you can proceed with the directions.
|
||||
@@ -43,35 +43,25 @@ go run build.go build # (or 'go build ./pkg/cmd/grafana-server')
|
||||
The Grafana backend includes Sqlite3 which requires GCC to compile. So in order to compile Grafana on windows you need
|
||||
to install GCC. We recommend [TDM-GCC](http://tdm-gcc.tdragon.net/download).
|
||||
|
||||
[node-gyp](https://github.com/nodejs/node-gyp#installation) is the Node.js native addon build tool and it requires extra dependencies to be installed on Windows. In a command prompt which is run as administrator, run:
|
||||
[node-gyp](https://github.com/nodejs/node-gyp#installation) is the Node.js native addon build tool and it requires extra dependencies to be installed on Windows. In a command prompt which is run as administrator, run:
|
||||
|
||||
```
|
||||
npm --add-python-to-path='true' --debug install --global windows-build-tools
|
||||
```
|
||||
|
||||
## Build the Front-end Assets
|
||||
## Build the Frontend Assets
|
||||
|
||||
To build less to css for the frontend you will need a recent version of node (v0.12.0),
|
||||
npm (v2.5.0) and grunt (v0.4.5). Run the following:
|
||||
For this you need nodejs (v.6+).
|
||||
|
||||
```
|
||||
npm install -g yarn
|
||||
yarn install --pure-lockfile
|
||||
npm install -g grunt-cli
|
||||
grunt
|
||||
npm run build
|
||||
```
|
||||
|
||||
## Recompile backend on source change
|
||||
To rebuild on source change
|
||||
```
|
||||
go get github.com/Unknwon/bra
|
||||
bra run
|
||||
```
|
||||
|
||||
If the `bra run` command does not work, make sure that the bin directory in your Go workspace directory is in the path. $GOPATH/bin (or %GOPATH%\bin in Windows) is in your path.
|
||||
|
||||
## Running Grafana Locally
|
||||
You can run a local instance of Grafana by running:
|
||||
|
||||
```
|
||||
./bin/grafana-server
|
||||
```
|
||||
@@ -81,16 +71,21 @@ If you built it with `go build .`, run `./grafana`
|
||||
|
||||
Open grafana in your browser (default [http://localhost:3000](http://localhost:3000)) and login with admin user (default user/pass = admin/admin).
|
||||
|
||||
## Developing for Grafana
|
||||
To add features, customize your config, etc, you'll need to rebuild on source change.
|
||||
## Developing Grafana
|
||||
|
||||
To add features, customize your config, etc, you'll need to rebuild the backend when you change the source code. We use a tool named `bra` that
|
||||
does this.
|
||||
|
||||
```
|
||||
go get github.com/Unknwon/bra
|
||||
|
||||
bra run
|
||||
```
|
||||
You'll also need to run `grunt watch` to watch for changes to the front-end.
|
||||
|
||||
You'll also need to run `npm run watch` to watch for changes to the front-end (typescript, html, sass)
|
||||
|
||||
## Creating optimized release packages
|
||||
|
||||
This step builds linux packages and requires that fpm is installed. Install fpm via `gem install fpm`.
|
||||
|
||||
```
|
||||
@@ -105,6 +100,10 @@ You only need to add the options you want to override. Config files are applied
|
||||
1. grafana.ini
|
||||
2. custom.ini
|
||||
|
||||
### Set app_mode to development
|
||||
|
||||
In your custom.ini uncomment (remove the leading `;`) sign. And set `app_mode = development`.
|
||||
|
||||
Learn more about Grafana config options in the [Configuration section](/installation/configuration/)
|
||||
|
||||
## Create a pull requests
|
||||
@@ -119,7 +118,7 @@ Please contribute to the Grafana project and submit a pull request! Build new fe
|
||||
|
||||
**Problem**: When running `bra run` for the first time you get an error that it is not a recognized command.
|
||||
|
||||
**Solution**: Add the bin directory in your Go workspace directory to the path. Per default this is `$HOME/go/bin` on Linux and `%USERPROFILE%\go\bin` on Windows or `$GOPATH/bin` (`%GOPATH%\bin` on Windows) if you have set your own workspace directory.
|
||||
**Solution**: Add the bin directory in your Go workspace directory to the path. Per default this is `$HOME/go/bin` on Linux and `%USERPROFILE%\go\bin` on Windows or `$GOPATH/bin` (`%GOPATH%\bin` on Windows) if you have set your own workspace directory.
|
||||
<br><br>
|
||||
|
||||
**Problem**: When executing a `go get` command on Windows and you get an error about the git repository not existing.
|
||||
|
@@ -1,23 +1,30 @@
|
||||
var webpack = require('webpack');
|
||||
var path = require('path');
|
||||
var webpackTestConfig = require('./scripts/webpack/webpack.test.js');
|
||||
|
||||
module.exports = function(config) {
|
||||
|
||||
'use strict';
|
||||
|
||||
config.set({
|
||||
basePath: __dirname + '/public_gen',
|
||||
|
||||
frameworks: ['mocha', 'expect', 'sinon'],
|
||||
|
||||
// list of files / patterns to load in the browser
|
||||
files: [
|
||||
'vendor/npm/es6-shim/es6-shim.js',
|
||||
'vendor/npm/systemjs/dist/system.src.js',
|
||||
'test/test-main.js',
|
||||
|
||||
{pattern: '**/*.js', included: false},
|
||||
{ pattern: 'public/test/index.ts', watched: false }
|
||||
],
|
||||
|
||||
preprocessors: {
|
||||
'public/test/index.ts': ['webpack', 'sourcemap'],
|
||||
},
|
||||
|
||||
webpack: webpackTestConfig,
|
||||
webpackServer: {
|
||||
noInfo: true, // please don't spam the console when running in karma!
|
||||
},
|
||||
|
||||
// list of files to exclude
|
||||
exclude: [],
|
||||
|
||||
reporters: ['dots'],
|
||||
port: 9876,
|
||||
colors: true,
|
||||
@@ -26,9 +33,8 @@ module.exports = function(config) {
|
||||
browsers: ['PhantomJS'],
|
||||
captureTimeout: 20000,
|
||||
singleRun: true,
|
||||
autoWatchBatchDelay: 1000,
|
||||
browserNoActivityTimeout: 60000,
|
||||
|
||||
// autoWatchBatchDelay: 1000,
|
||||
// browserNoActivityTimeout: 60000,
|
||||
});
|
||||
|
||||
};
|
||||
|
103
package.json
103
package.json
@@ -10,14 +10,27 @@
|
||||
"url": "http://github.com/grafana/grafana.git"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/d3": "^4.10.1",
|
||||
"@types/enzyme": "^2.8.9",
|
||||
"@types/node": "^8.0.31",
|
||||
"@types/react": "^16.0.5",
|
||||
"@types/react-dom": "^15.5.4",
|
||||
"autoprefixer": "^6.4.0",
|
||||
"babel-core": "^6.26.0",
|
||||
"babel-loader": "^7.1.2",
|
||||
"babel-preset-es2015": "^6.24.1",
|
||||
"css-loader": "^0.28.7",
|
||||
"enzyme": "^3.0.0",
|
||||
"enzyme-adapter-react-16": "^1.0.0",
|
||||
"es6-promise": "^3.0.2",
|
||||
"es6-shim": "^0.35.1",
|
||||
"es6-shim": "^0.35.3",
|
||||
"expect.js": "~0.2.0",
|
||||
"expose-loader": "^0.7.3",
|
||||
"extract-text-webpack-plugin": "^3.0.0",
|
||||
"file-loader": "^0.11.2",
|
||||
"gaze": "^1.1.2",
|
||||
"glob": "~7.0.0",
|
||||
"grunt": "^0.4.5",
|
||||
"grunt": "1.0.1",
|
||||
"grunt-angular-templates": "^1.1.0",
|
||||
"grunt-cli": "~1.2.0",
|
||||
"grunt-contrib-clean": "~1.0.0",
|
||||
@@ -25,75 +38,89 @@
|
||||
"grunt-contrib-concat": "^1.0.1",
|
||||
"grunt-contrib-copy": "~1.0.0",
|
||||
"grunt-contrib-cssmin": "~1.0.2",
|
||||
"grunt-contrib-htmlmin": "~2.0.0",
|
||||
"grunt-contrib-jshint": "~1.1.0",
|
||||
"grunt-contrib-uglify": "~2.0.0",
|
||||
"grunt-contrib-watch": "^1.0.0",
|
||||
"grunt-exec": "^1.0.1",
|
||||
"grunt-filerev": "^2.3.1",
|
||||
"grunt-jscs": "3.0.1",
|
||||
"grunt-karma": "~2.0.0",
|
||||
"grunt-ng-annotate": "^3.0.0",
|
||||
"grunt-notify": "^0.4.5",
|
||||
"grunt-postcss": "^0.8.0",
|
||||
"grunt-sass": "^2.0.0",
|
||||
"grunt-string-replace": "~1.3.1",
|
||||
"grunt-systemjs-builder": "^0.2.7",
|
||||
"grunt-sass-lint": "^0.2.2",
|
||||
"grunt-usemin": "3.1.1",
|
||||
"grunt-webpack": "^3.0.2",
|
||||
"html-loader": "^0.5.1",
|
||||
"html-webpack-plugin": "^2.30.1",
|
||||
"jshint-stylish": "~2.2.1",
|
||||
"karma": "1.3.0",
|
||||
"karma-chrome-launcher": "~2.0.0",
|
||||
"json-loader": "^0.5.7",
|
||||
"karma": "1.7.0",
|
||||
"karma-chrome-launcher": "~2.2.0",
|
||||
"karma-coverage": "1.1.1",
|
||||
"karma-expect": "~1.1.3",
|
||||
"karma-mocha": "~1.3.0",
|
||||
"karma-phantomjs-launcher": "1.0.2",
|
||||
"karma-phantomjs-launcher": "1.0.4",
|
||||
"karma-sinon": "^1.0.5",
|
||||
"karma-sourcemap-loader": "^0.3.7",
|
||||
"karma-webpack": "^2.0.4",
|
||||
"load-grunt-tasks": "3.5.2",
|
||||
"mocha": "3.2.0",
|
||||
"phantomjs-prebuilt": "^2.1.14",
|
||||
"reflect-metadata": "0.1.8",
|
||||
"mocha": "3.5.0",
|
||||
"ng-annotate-loader": "^0.6.1",
|
||||
"ng-annotate-webpack-plugin": "^0.2.1-pre",
|
||||
"ngtemplate-loader": "^2.0.1",
|
||||
"phantomjs-prebuilt": "^2.1.15",
|
||||
"postcss-browser-reporter": "^0.5.0",
|
||||
"postcss-loader": "^2.0.6",
|
||||
"postcss-reporter": "^5.0.0",
|
||||
"react-test-renderer": "^16.0.0",
|
||||
"rxjs": "^5.4.3",
|
||||
"sass-lint": "^1.10.2",
|
||||
"systemjs": "0.19.41",
|
||||
"zone.js": "^0.7.2"
|
||||
"sass-loader": "^6.0.6",
|
||||
"sinon": "1.17.6",
|
||||
"systemjs": "0.20.19",
|
||||
"systemjs-plugin-css": "^0.1.36",
|
||||
"ts-loader": "^2.3.7",
|
||||
"tslint": "^5.7.0",
|
||||
"tslint-loader": "^3.5.3",
|
||||
"typescript": "^2.5.2",
|
||||
"webpack": "^3.6.0",
|
||||
"webpack-bundle-analyzer": "^2.9.0",
|
||||
"webpack-cleanup-plugin": "^0.5.1",
|
||||
"webpack-merge": "^4.1.0",
|
||||
"zone.js": "^0.7.2",
|
||||
"awesome-typescript-loader": "^3.2.3",
|
||||
"angular-mocks": "^1.6.6",
|
||||
"karma-sinon": "^1.0.5",
|
||||
"npm": "^5.4.2"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "./node_modules/grunt-cli/bin/grunt",
|
||||
"dev": "./node_modules/.bin/webpack --progress --colors --config scripts/webpack/webpack.dev.js",
|
||||
"watch": "./node_modules/.bin/webpack --progress --colors --watch --config scripts/webpack/webpack.dev.js",
|
||||
"build": "./node_modules/grunt-cli/bin/grunt build",
|
||||
"test": "./node_modules/grunt-cli/bin/grunt test",
|
||||
"dev": "./node_modules/grunt-cli/bin/grunt && ./node_modules/grunt-cli/bin/grunt watch"
|
||||
"watch-test": "./node_modules/grunt-cli/bin/grunt karma:dev"
|
||||
},
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@types/enzyme": "^2.8.8",
|
||||
"ace-builds": "^1.2.8",
|
||||
"babel-polyfill": "^6.26.0",
|
||||
"jquery": "^3.2.1",
|
||||
"angular": "^1.6.6",
|
||||
"angular-bindonce": "^0.3.1",
|
||||
"angular-mocks": "^1.6.6",
|
||||
"angular-native-dragdrop": "^1.2.2",
|
||||
"angular-route": "^1.6.6",
|
||||
"angular-sanitize": "^1.6.6",
|
||||
"brace": "^0.10.0",
|
||||
"clipboard": "^1.7.1",
|
||||
"eventemitter3": "^2.0.2",
|
||||
"gaze": "^1.1.2",
|
||||
"gridstack": "https://github.com/grafana/gridstack.js#grafana",
|
||||
"gemini-scrollbar": "https://github.com/grafana/gemini-scrollbar#grafana",
|
||||
"grunt-jscs": "3.0.1",
|
||||
"grunt-sass-lint": "^0.2.2",
|
||||
"grunt-sync": "^0.6.2",
|
||||
"jquery-ui-dist": "^1.12.1",
|
||||
"jquery": "^3.2.1",
|
||||
"karma-sinon": "^1.0.5",
|
||||
"file-saver": "^1.3.3",
|
||||
"lodash": "^4.17.4",
|
||||
"moment": "^2.18.1",
|
||||
"mousetrap": "^1.6.0",
|
||||
"ngreact": "^0.4.1",
|
||||
"react": "^15.6.1",
|
||||
"react-dom": "^15.6.1",
|
||||
"react-test-renderer": "^15.6.1",
|
||||
"react": "^16.0.0",
|
||||
"react-dom": "^16.0.0",
|
||||
"remarkable": "^1.7.1",
|
||||
"sinon": "1.17.6",
|
||||
"systemjs-builder": "^0.15.34",
|
||||
"tether": "^1.4.0",
|
||||
"tether-drop": "https://github.com/torkelo/drop",
|
||||
"tslint": "^5.7.0",
|
||||
"typescript": "^2.5.2",
|
||||
"virtual-scroll": "^1.1.1"
|
||||
"tether-drop": "https://github.com/torkelo/drop"
|
||||
}
|
||||
}
|
||||
|
@@ -25,6 +25,7 @@ var pluginProxyTransport = &http.Transport{
|
||||
Dial: (&net.Dialer{
|
||||
Timeout: 30 * time.Second,
|
||||
KeepAlive: 30 * time.Second,
|
||||
DualStack: true,
|
||||
}).Dial,
|
||||
TLSHandshakeTimeout: 10 * time.Second,
|
||||
}
|
||||
|
@@ -1,516 +0,0 @@
|
||||
package cloudwatch
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/aws/awsutil"
|
||||
"github.com/aws/aws-sdk-go/aws/credentials"
|
||||
"github.com/aws/aws-sdk-go/aws/credentials/ec2rolecreds"
|
||||
"github.com/aws/aws-sdk-go/aws/credentials/endpointcreds"
|
||||
"github.com/aws/aws-sdk-go/aws/ec2metadata"
|
||||
"github.com/aws/aws-sdk-go/aws/session"
|
||||
"github.com/aws/aws-sdk-go/service/cloudwatch"
|
||||
"github.com/aws/aws-sdk-go/service/ec2"
|
||||
"github.com/aws/aws-sdk-go/service/sts"
|
||||
"github.com/grafana/grafana/pkg/metrics"
|
||||
"github.com/grafana/grafana/pkg/middleware"
|
||||
m "github.com/grafana/grafana/pkg/models"
|
||||
)
|
||||
|
||||
type actionHandler func(*cwRequest, *middleware.Context)
|
||||
|
||||
var actionHandlers map[string]actionHandler
|
||||
|
||||
type cwRequest struct {
|
||||
Region string `json:"region"`
|
||||
Action string `json:"action"`
|
||||
Body []byte `json:"-"`
|
||||
DataSource *m.DataSource
|
||||
}
|
||||
|
||||
type datasourceInfo struct {
|
||||
Profile string
|
||||
Region string
|
||||
AuthType string
|
||||
AssumeRoleArn string
|
||||
Namespace string
|
||||
|
||||
AccessKey string
|
||||
SecretKey string
|
||||
}
|
||||
|
||||
func (req *cwRequest) GetDatasourceInfo() *datasourceInfo {
|
||||
authType := req.DataSource.JsonData.Get("authType").MustString()
|
||||
assumeRoleArn := req.DataSource.JsonData.Get("assumeRoleArn").MustString()
|
||||
accessKey := ""
|
||||
secretKey := ""
|
||||
|
||||
for key, value := range req.DataSource.SecureJsonData.Decrypt() {
|
||||
if key == "accessKey" {
|
||||
accessKey = value
|
||||
}
|
||||
if key == "secretKey" {
|
||||
secretKey = value
|
||||
}
|
||||
}
|
||||
|
||||
return &datasourceInfo{
|
||||
AuthType: authType,
|
||||
AssumeRoleArn: assumeRoleArn,
|
||||
Region: req.Region,
|
||||
Profile: req.DataSource.Database,
|
||||
AccessKey: accessKey,
|
||||
SecretKey: secretKey,
|
||||
}
|
||||
}
|
||||
|
||||
func init() {
|
||||
actionHandlers = map[string]actionHandler{
|
||||
"GetMetricStatistics": handleGetMetricStatistics,
|
||||
"ListMetrics": handleListMetrics,
|
||||
"DescribeAlarms": handleDescribeAlarms,
|
||||
"DescribeAlarmsForMetric": handleDescribeAlarmsForMetric,
|
||||
"DescribeAlarmHistory": handleDescribeAlarmHistory,
|
||||
"DescribeInstances": handleDescribeInstances,
|
||||
"__GetRegions": handleGetRegions,
|
||||
"__GetNamespaces": handleGetNamespaces,
|
||||
"__GetMetrics": handleGetMetrics,
|
||||
"__GetDimensions": handleGetDimensions,
|
||||
}
|
||||
}
|
||||
|
||||
type cache struct {
|
||||
credential *credentials.Credentials
|
||||
expiration *time.Time
|
||||
}
|
||||
|
||||
var awsCredentialCache map[string]cache = make(map[string]cache)
|
||||
var credentialCacheLock sync.RWMutex
|
||||
|
||||
func getCredentials(dsInfo *datasourceInfo) (*credentials.Credentials, error) {
|
||||
cacheKey := dsInfo.Profile + ":" + dsInfo.AssumeRoleArn
|
||||
credentialCacheLock.RLock()
|
||||
if _, ok := awsCredentialCache[cacheKey]; ok {
|
||||
if awsCredentialCache[cacheKey].expiration != nil &&
|
||||
(*awsCredentialCache[cacheKey].expiration).After(time.Now().UTC()) {
|
||||
result := awsCredentialCache[cacheKey].credential
|
||||
credentialCacheLock.RUnlock()
|
||||
return result, nil
|
||||
}
|
||||
}
|
||||
credentialCacheLock.RUnlock()
|
||||
|
||||
accessKeyId := ""
|
||||
secretAccessKey := ""
|
||||
sessionToken := ""
|
||||
var expiration *time.Time
|
||||
expiration = nil
|
||||
if dsInfo.AuthType == "arn" && strings.Index(dsInfo.AssumeRoleArn, "arn:aws:iam:") == 0 {
|
||||
params := &sts.AssumeRoleInput{
|
||||
RoleArn: aws.String(dsInfo.AssumeRoleArn),
|
||||
RoleSessionName: aws.String("GrafanaSession"),
|
||||
DurationSeconds: aws.Int64(900),
|
||||
}
|
||||
|
||||
stsSess, err := session.NewSession()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
stsCreds := credentials.NewChainCredentials(
|
||||
[]credentials.Provider{
|
||||
&credentials.EnvProvider{},
|
||||
&credentials.SharedCredentialsProvider{Filename: "", Profile: dsInfo.Profile},
|
||||
remoteCredProvider(stsSess),
|
||||
})
|
||||
stsConfig := &aws.Config{
|
||||
Region: aws.String(dsInfo.Region),
|
||||
Credentials: stsCreds,
|
||||
}
|
||||
|
||||
sess, err := session.NewSession(stsConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
svc := sts.New(sess, stsConfig)
|
||||
resp, err := svc.AssumeRole(params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if resp.Credentials != nil {
|
||||
accessKeyId = *resp.Credentials.AccessKeyId
|
||||
secretAccessKey = *resp.Credentials.SecretAccessKey
|
||||
sessionToken = *resp.Credentials.SessionToken
|
||||
expiration = resp.Credentials.Expiration
|
||||
}
|
||||
}
|
||||
|
||||
sess, err := session.NewSession()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
creds := credentials.NewChainCredentials(
|
||||
[]credentials.Provider{
|
||||
&credentials.StaticProvider{Value: credentials.Value{
|
||||
AccessKeyID: accessKeyId,
|
||||
SecretAccessKey: secretAccessKey,
|
||||
SessionToken: sessionToken,
|
||||
}},
|
||||
&credentials.EnvProvider{},
|
||||
&credentials.StaticProvider{Value: credentials.Value{
|
||||
AccessKeyID: dsInfo.AccessKey,
|
||||
SecretAccessKey: dsInfo.SecretKey,
|
||||
}},
|
||||
&credentials.SharedCredentialsProvider{Filename: "", Profile: dsInfo.Profile},
|
||||
remoteCredProvider(sess),
|
||||
})
|
||||
|
||||
credentialCacheLock.Lock()
|
||||
awsCredentialCache[cacheKey] = cache{
|
||||
credential: creds,
|
||||
expiration: expiration,
|
||||
}
|
||||
credentialCacheLock.Unlock()
|
||||
|
||||
return creds, nil
|
||||
}
|
||||
|
||||
func remoteCredProvider(sess *session.Session) credentials.Provider {
|
||||
ecsCredURI := os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI")
|
||||
|
||||
if len(ecsCredURI) > 0 {
|
||||
return ecsCredProvider(sess, ecsCredURI)
|
||||
}
|
||||
return ec2RoleProvider(sess)
|
||||
}
|
||||
|
||||
func ecsCredProvider(sess *session.Session, uri string) credentials.Provider {
|
||||
const host = `169.254.170.2`
|
||||
|
||||
c := ec2metadata.New(sess)
|
||||
return endpointcreds.NewProviderClient(
|
||||
c.Client.Config,
|
||||
c.Client.Handlers,
|
||||
fmt.Sprintf("http://%s%s", host, uri),
|
||||
func(p *endpointcreds.Provider) { p.ExpiryWindow = 5 * time.Minute })
|
||||
}
|
||||
|
||||
func ec2RoleProvider(sess *session.Session) credentials.Provider {
|
||||
return &ec2rolecreds.EC2RoleProvider{Client: ec2metadata.New(sess), ExpiryWindow: 5 * time.Minute}
|
||||
}
|
||||
|
||||
func getAwsConfig(req *cwRequest) (*aws.Config, error) {
|
||||
creds, err := getCredentials(req.GetDatasourceInfo())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cfg := &aws.Config{
|
||||
Region: aws.String(req.Region),
|
||||
Credentials: creds,
|
||||
}
|
||||
return cfg, nil
|
||||
}
|
||||
|
||||
func handleGetMetricStatistics(req *cwRequest, c *middleware.Context) {
|
||||
cfg, err := getAwsConfig(req)
|
||||
if err != nil {
|
||||
c.JsonApiErr(500, "Unable to call AWS API", err)
|
||||
return
|
||||
}
|
||||
sess, err := session.NewSession(cfg)
|
||||
if err != nil {
|
||||
c.JsonApiErr(500, "Unable to call AWS API", err)
|
||||
return
|
||||
}
|
||||
svc := cloudwatch.New(sess, cfg)
|
||||
|
||||
reqParam := &struct {
|
||||
Parameters struct {
|
||||
Namespace string `json:"namespace"`
|
||||
MetricName string `json:"metricName"`
|
||||
Dimensions []*cloudwatch.Dimension `json:"dimensions"`
|
||||
Statistics []*string `json:"statistics"`
|
||||
ExtendedStatistics []*string `json:"extendedStatistics"`
|
||||
StartTime int64 `json:"startTime"`
|
||||
EndTime int64 `json:"endTime"`
|
||||
Period int64 `json:"period"`
|
||||
} `json:"parameters"`
|
||||
}{}
|
||||
json.Unmarshal(req.Body, reqParam)
|
||||
|
||||
params := &cloudwatch.GetMetricStatisticsInput{
|
||||
Namespace: aws.String(reqParam.Parameters.Namespace),
|
||||
MetricName: aws.String(reqParam.Parameters.MetricName),
|
||||
Dimensions: reqParam.Parameters.Dimensions,
|
||||
StartTime: aws.Time(time.Unix(reqParam.Parameters.StartTime, 0)),
|
||||
EndTime: aws.Time(time.Unix(reqParam.Parameters.EndTime, 0)),
|
||||
Period: aws.Int64(reqParam.Parameters.Period),
|
||||
}
|
||||
if len(reqParam.Parameters.Statistics) != 0 {
|
||||
params.Statistics = reqParam.Parameters.Statistics
|
||||
}
|
||||
if len(reqParam.Parameters.ExtendedStatistics) != 0 {
|
||||
params.ExtendedStatistics = reqParam.Parameters.ExtendedStatistics
|
||||
}
|
||||
|
||||
resp, err := svc.GetMetricStatistics(params)
|
||||
if err != nil {
|
||||
c.JsonApiErr(500, "Unable to call AWS API", err)
|
||||
return
|
||||
}
|
||||
metrics.M_Aws_CloudWatch_GetMetricStatistics.Inc()
|
||||
|
||||
c.JSON(200, resp)
|
||||
}
|
||||
|
||||
func handleListMetrics(req *cwRequest, c *middleware.Context) {
|
||||
cfg, err := getAwsConfig(req)
|
||||
if err != nil {
|
||||
c.JsonApiErr(500, "Unable to call AWS API", err)
|
||||
return
|
||||
}
|
||||
sess, err := session.NewSession(cfg)
|
||||
if err != nil {
|
||||
c.JsonApiErr(500, "Unable to call AWS API", err)
|
||||
return
|
||||
}
|
||||
svc := cloudwatch.New(sess, cfg)
|
||||
|
||||
reqParam := &struct {
|
||||
Parameters struct {
|
||||
Namespace string `json:"namespace"`
|
||||
MetricName string `json:"metricName"`
|
||||
Dimensions []*cloudwatch.DimensionFilter `json:"dimensions"`
|
||||
} `json:"parameters"`
|
||||
}{}
|
||||
json.Unmarshal(req.Body, reqParam)
|
||||
|
||||
params := &cloudwatch.ListMetricsInput{
|
||||
Namespace: aws.String(reqParam.Parameters.Namespace),
|
||||
MetricName: aws.String(reqParam.Parameters.MetricName),
|
||||
Dimensions: reqParam.Parameters.Dimensions,
|
||||
}
|
||||
|
||||
var resp cloudwatch.ListMetricsOutput
|
||||
err = svc.ListMetricsPages(params,
|
||||
func(page *cloudwatch.ListMetricsOutput, lastPage bool) bool {
|
||||
metrics.M_Aws_CloudWatch_ListMetrics.Inc()
|
||||
metrics, _ := awsutil.ValuesAtPath(page, "Metrics")
|
||||
for _, metric := range metrics {
|
||||
resp.Metrics = append(resp.Metrics, metric.(*cloudwatch.Metric))
|
||||
}
|
||||
return !lastPage
|
||||
})
|
||||
if err != nil {
|
||||
c.JsonApiErr(500, "Unable to call AWS API", err)
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(200, resp)
|
||||
}
|
||||
|
||||
func handleDescribeAlarms(req *cwRequest, c *middleware.Context) {
|
||||
cfg, err := getAwsConfig(req)
|
||||
if err != nil {
|
||||
c.JsonApiErr(500, "Unable to call AWS API", err)
|
||||
return
|
||||
}
|
||||
sess, err := session.NewSession(cfg)
|
||||
if err != nil {
|
||||
c.JsonApiErr(500, "Unable to call AWS API", err)
|
||||
return
|
||||
}
|
||||
svc := cloudwatch.New(sess, cfg)
|
||||
|
||||
reqParam := &struct {
|
||||
Parameters struct {
|
||||
ActionPrefix string `json:"actionPrefix"`
|
||||
AlarmNamePrefix string `json:"alarmNamePrefix"`
|
||||
AlarmNames []*string `json:"alarmNames"`
|
||||
StateValue string `json:"stateValue"`
|
||||
} `json:"parameters"`
|
||||
}{}
|
||||
json.Unmarshal(req.Body, reqParam)
|
||||
|
||||
params := &cloudwatch.DescribeAlarmsInput{
|
||||
MaxRecords: aws.Int64(100),
|
||||
}
|
||||
if reqParam.Parameters.ActionPrefix != "" {
|
||||
params.ActionPrefix = aws.String(reqParam.Parameters.ActionPrefix)
|
||||
}
|
||||
if reqParam.Parameters.AlarmNamePrefix != "" {
|
||||
params.AlarmNamePrefix = aws.String(reqParam.Parameters.AlarmNamePrefix)
|
||||
}
|
||||
if len(reqParam.Parameters.AlarmNames) != 0 {
|
||||
params.AlarmNames = reqParam.Parameters.AlarmNames
|
||||
}
|
||||
if reqParam.Parameters.StateValue != "" {
|
||||
params.StateValue = aws.String(reqParam.Parameters.StateValue)
|
||||
}
|
||||
|
||||
resp, err := svc.DescribeAlarms(params)
|
||||
if err != nil {
|
||||
c.JsonApiErr(500, "Unable to call AWS API", err)
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(200, resp)
|
||||
}
|
||||
|
||||
func handleDescribeAlarmsForMetric(req *cwRequest, c *middleware.Context) {
|
||||
cfg, err := getAwsConfig(req)
|
||||
if err != nil {
|
||||
c.JsonApiErr(500, "Unable to call AWS API", err)
|
||||
return
|
||||
}
|
||||
sess, err := session.NewSession(cfg)
|
||||
if err != nil {
|
||||
c.JsonApiErr(500, "Unable to call AWS API", err)
|
||||
return
|
||||
}
|
||||
svc := cloudwatch.New(sess, cfg)
|
||||
|
||||
reqParam := &struct {
|
||||
Parameters struct {
|
||||
Namespace string `json:"namespace"`
|
||||
MetricName string `json:"metricName"`
|
||||
Dimensions []*cloudwatch.Dimension `json:"dimensions"`
|
||||
Statistic string `json:"statistic"`
|
||||
ExtendedStatistic string `json:"extendedStatistic"`
|
||||
Period int64 `json:"period"`
|
||||
} `json:"parameters"`
|
||||
}{}
|
||||
json.Unmarshal(req.Body, reqParam)
|
||||
|
||||
params := &cloudwatch.DescribeAlarmsForMetricInput{
|
||||
Namespace: aws.String(reqParam.Parameters.Namespace),
|
||||
MetricName: aws.String(reqParam.Parameters.MetricName),
|
||||
Period: aws.Int64(reqParam.Parameters.Period),
|
||||
}
|
||||
if len(reqParam.Parameters.Dimensions) != 0 {
|
||||
params.Dimensions = reqParam.Parameters.Dimensions
|
||||
}
|
||||
if reqParam.Parameters.Statistic != "" {
|
||||
params.Statistic = aws.String(reqParam.Parameters.Statistic)
|
||||
}
|
||||
if reqParam.Parameters.ExtendedStatistic != "" {
|
||||
params.ExtendedStatistic = aws.String(reqParam.Parameters.ExtendedStatistic)
|
||||
}
|
||||
|
||||
resp, err := svc.DescribeAlarmsForMetric(params)
|
||||
if err != nil {
|
||||
c.JsonApiErr(500, "Unable to call AWS API", err)
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(200, resp)
|
||||
}
|
||||
|
||||
func handleDescribeAlarmHistory(req *cwRequest, c *middleware.Context) {
|
||||
cfg, err := getAwsConfig(req)
|
||||
if err != nil {
|
||||
c.JsonApiErr(500, "Unable to call AWS API", err)
|
||||
return
|
||||
}
|
||||
sess, err := session.NewSession(cfg)
|
||||
if err != nil {
|
||||
c.JsonApiErr(500, "Unable to call AWS API", err)
|
||||
return
|
||||
}
|
||||
svc := cloudwatch.New(sess, cfg)
|
||||
|
||||
reqParam := &struct {
|
||||
Parameters struct {
|
||||
AlarmName string `json:"alarmName"`
|
||||
HistoryItemType string `json:"historyItemType"`
|
||||
StartDate int64 `json:"startDate"`
|
||||
EndDate int64 `json:"endDate"`
|
||||
} `json:"parameters"`
|
||||
}{}
|
||||
json.Unmarshal(req.Body, reqParam)
|
||||
|
||||
params := &cloudwatch.DescribeAlarmHistoryInput{
|
||||
AlarmName: aws.String(reqParam.Parameters.AlarmName),
|
||||
StartDate: aws.Time(time.Unix(reqParam.Parameters.StartDate, 0)),
|
||||
EndDate: aws.Time(time.Unix(reqParam.Parameters.EndDate, 0)),
|
||||
}
|
||||
if reqParam.Parameters.HistoryItemType != "" {
|
||||
params.HistoryItemType = aws.String(reqParam.Parameters.HistoryItemType)
|
||||
}
|
||||
|
||||
resp, err := svc.DescribeAlarmHistory(params)
|
||||
if err != nil {
|
||||
c.JsonApiErr(500, "Unable to call AWS API", err)
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(200, resp)
|
||||
}
|
||||
|
||||
func handleDescribeInstances(req *cwRequest, c *middleware.Context) {
|
||||
cfg, err := getAwsConfig(req)
|
||||
if err != nil {
|
||||
c.JsonApiErr(500, "Unable to call AWS API", err)
|
||||
return
|
||||
}
|
||||
sess, err := session.NewSession(cfg)
|
||||
if err != nil {
|
||||
c.JsonApiErr(500, "Unable to call AWS API", err)
|
||||
return
|
||||
}
|
||||
svc := ec2.New(sess, cfg)
|
||||
|
||||
reqParam := &struct {
|
||||
Parameters struct {
|
||||
Filters []*ec2.Filter `json:"filters"`
|
||||
InstanceIds []*string `json:"instanceIds"`
|
||||
} `json:"parameters"`
|
||||
}{}
|
||||
json.Unmarshal(req.Body, reqParam)
|
||||
|
||||
params := &ec2.DescribeInstancesInput{}
|
||||
if len(reqParam.Parameters.Filters) > 0 {
|
||||
params.Filters = reqParam.Parameters.Filters
|
||||
}
|
||||
if len(reqParam.Parameters.InstanceIds) > 0 {
|
||||
params.InstanceIds = reqParam.Parameters.InstanceIds
|
||||
}
|
||||
|
||||
var resp ec2.DescribeInstancesOutput
|
||||
err = svc.DescribeInstancesPages(params,
|
||||
func(page *ec2.DescribeInstancesOutput, lastPage bool) bool {
|
||||
reservations, _ := awsutil.ValuesAtPath(page, "Reservations")
|
||||
for _, reservation := range reservations {
|
||||
resp.Reservations = append(resp.Reservations, reservation.(*ec2.Reservation))
|
||||
}
|
||||
return !lastPage
|
||||
})
|
||||
if err != nil {
|
||||
c.JsonApiErr(500, "Unable to call AWS API", err)
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(200, resp)
|
||||
}
|
||||
|
||||
func HandleRequest(c *middleware.Context, ds *m.DataSource) {
|
||||
var req cwRequest
|
||||
req.Body, _ = ioutil.ReadAll(c.Req.Request.Body)
|
||||
req.DataSource = ds
|
||||
json.Unmarshal(req.Body, &req)
|
||||
|
||||
if handler, found := actionHandlers[req.Action]; !found {
|
||||
c.JsonApiErr(500, "Unexpected AWS Action", errors.New(req.Action))
|
||||
return
|
||||
} else {
|
||||
handler(&req, c)
|
||||
}
|
||||
}
|
@@ -19,6 +19,7 @@ var grafanaComProxyTransport = &http.Transport{
|
||||
Dial: (&net.Dialer{
|
||||
Timeout: 30 * time.Second,
|
||||
KeepAlive: 30 * time.Second,
|
||||
DualStack: true,
|
||||
}).Dial,
|
||||
TLSHandshakeTimeout: 10 * time.Second,
|
||||
}
|
||||
|
@@ -17,7 +17,6 @@ import (
|
||||
|
||||
"github.com/opentracing/opentracing-go"
|
||||
|
||||
"github.com/grafana/grafana/pkg/api/cloudwatch"
|
||||
"github.com/grafana/grafana/pkg/log"
|
||||
"github.com/grafana/grafana/pkg/middleware"
|
||||
m "github.com/grafana/grafana/pkg/models"
|
||||
@@ -63,11 +62,6 @@ func NewDataSourceProxy(ds *m.DataSource, plugin *plugins.DataSourcePlugin, ctx
|
||||
}
|
||||
|
||||
func (proxy *DataSourceProxy) HandleRequest() {
|
||||
if proxy.ds.Type == m.DS_CLOUDWATCH {
|
||||
cloudwatch.HandleRequest(proxy.ctx, proxy.ds)
|
||||
return
|
||||
}
|
||||
|
||||
if err := proxy.validateRequest(); err != nil {
|
||||
proxy.ctx.JsonApiErr(403, err.Error(), nil)
|
||||
return
|
||||
|
@@ -30,6 +30,7 @@ func Init(version string) {
|
||||
DialContext: (&net.Dialer{
|
||||
Timeout: 30 * time.Second,
|
||||
KeepAlive: 30 * time.Second,
|
||||
DualStack: true,
|
||||
}).DialContext,
|
||||
MaxIdleConns: 100,
|
||||
IdleConnTimeout: 90 * time.Second,
|
||||
|
@@ -21,6 +21,7 @@ import (
|
||||
|
||||
_ "github.com/grafana/grafana/pkg/services/alerting/conditions"
|
||||
_ "github.com/grafana/grafana/pkg/services/alerting/notifiers"
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/cloudwatch"
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/graphite"
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/influxdb"
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/mysql"
|
||||
|
@@ -24,7 +24,8 @@ type WebdavUploader struct {
|
||||
var netTransport = &http.Transport{
|
||||
Proxy: http.ProxyFromEnvironment,
|
||||
Dial: (&net.Dialer{
|
||||
Timeout: 60 * time.Second,
|
||||
Timeout: 60 * time.Second,
|
||||
DualStack: true,
|
||||
}).Dial,
|
||||
TLSHandshakeTimeout: 5 * time.Second,
|
||||
}
|
||||
|
@@ -54,6 +54,7 @@ func (ds *DataSource) GetHttpTransport() (*http.Transport, error) {
|
||||
Dial: (&net.Dialer{
|
||||
Timeout: 30 * time.Second,
|
||||
KeepAlive: 30 * time.Second,
|
||||
DualStack: true,
|
||||
}).Dial,
|
||||
TLSHandshakeTimeout: 10 * time.Second,
|
||||
ExpectContinueTimeout: 1 * time.Second,
|
||||
|
@@ -94,6 +94,53 @@ func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) null.Float {
|
||||
value = (values[(length/2)-1] + values[length/2]) / 2
|
||||
}
|
||||
}
|
||||
case "diff":
|
||||
var (
|
||||
points = series.Points
|
||||
first float64
|
||||
i int
|
||||
)
|
||||
// get the newest point
|
||||
for i = len(points) - 1; i >= 0; i-- {
|
||||
if points[i][0].Valid {
|
||||
allNull = false
|
||||
first = points[i][0].Float64
|
||||
break
|
||||
}
|
||||
}
|
||||
// get other points
|
||||
points = points[0:i]
|
||||
for i := len(points) - 1; i >= 0; i-- {
|
||||
if points[i][0].Valid {
|
||||
allNull = false
|
||||
value = first - points[i][0].Float64
|
||||
break
|
||||
}
|
||||
}
|
||||
case "percent_diff":
|
||||
var (
|
||||
points = series.Points
|
||||
first float64
|
||||
i int
|
||||
)
|
||||
// get the newest point
|
||||
for i = len(points) - 1; i >= 0; i-- {
|
||||
if points[i][0].Valid {
|
||||
allNull = false
|
||||
first = points[i][0].Float64
|
||||
break
|
||||
}
|
||||
}
|
||||
// get other points
|
||||
points = points[0:i]
|
||||
for i := len(points) - 1; i >= 0; i-- {
|
||||
if points[i][0].Valid {
|
||||
allNull = false
|
||||
val := (first - points[i][0].Float64) / points[i][0].Float64 * 100
|
||||
value = math.Abs(val)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if allNull {
|
||||
|
@@ -80,6 +80,17 @@ func TestSimpleReducer(t *testing.T) {
|
||||
|
||||
So(reducer.Reduce(series).Float64, ShouldEqual, float64(3))
|
||||
})
|
||||
|
||||
Convey("diff", func() {
|
||||
result := testReducer("diff", 30, 40)
|
||||
So(result, ShouldEqual, float64(10))
|
||||
})
|
||||
|
||||
Convey("percent_diff", func() {
|
||||
result := testReducer("percent_diff", 30, 40)
|
||||
So(result, ShouldEqual, float64(33.33333333333333))
|
||||
})
|
||||
|
||||
})
|
||||
}
|
||||
|
||||
|
@@ -3,6 +3,8 @@ package notifiers
|
||||
import (
|
||||
"strconv"
|
||||
|
||||
"fmt"
|
||||
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/log"
|
||||
@@ -72,6 +74,10 @@ func (this *PagerdutyNotifier) Notify(evalContext *alerting.EvalContext) error {
|
||||
if evalContext.Rule.State == m.AlertStateOK {
|
||||
eventType = "resolve"
|
||||
}
|
||||
customData := "Triggered metrics:\n\n"
|
||||
for _, evt := range evalContext.EvalMatches {
|
||||
customData = customData + fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value)
|
||||
}
|
||||
|
||||
this.log.Info("Notifying Pagerduty", "event_type", eventType)
|
||||
|
||||
@@ -79,6 +85,7 @@ func (this *PagerdutyNotifier) Notify(evalContext *alerting.EvalContext) error {
|
||||
bodyJSON.Set("service_key", this.Key)
|
||||
bodyJSON.Set("description", evalContext.Rule.Name+" - "+evalContext.Rule.Message)
|
||||
bodyJSON.Set("client", "Grafana")
|
||||
bodyJSON.Set("details", customData)
|
||||
bodyJSON.Set("event_type", eventType)
|
||||
bodyJSON.Set("incident_key", "alertId-"+strconv.FormatInt(evalContext.Rule.Id, 10))
|
||||
|
||||
|
@@ -27,7 +27,8 @@ type Webhook struct {
|
||||
var netTransport = &http.Transport{
|
||||
Proxy: http.ProxyFromEnvironment,
|
||||
Dial: (&net.Dialer{
|
||||
Timeout: 30 * time.Second,
|
||||
Timeout: 30 * time.Second,
|
||||
DualStack: true,
|
||||
}).Dial,
|
||||
TLSHandshakeTimeout: 5 * time.Second,
|
||||
}
|
||||
|
@@ -264,12 +264,16 @@ func applyCommandLineDefaultProperties(props map[string]string) {
|
||||
|
||||
func applyCommandLineProperties(props map[string]string) {
|
||||
for _, section := range Cfg.Sections() {
|
||||
sectionName := section.Name() + "."
|
||||
if section.Name() == ini.DEFAULT_SECTION {
|
||||
sectionName = ""
|
||||
}
|
||||
for _, key := range section.Keys() {
|
||||
keyString := fmt.Sprintf("%s.%s", section.Name(), key.Name())
|
||||
keyString := sectionName + key.Name()
|
||||
value, exists := props[keyString]
|
||||
if exists {
|
||||
key.SetValue(value)
|
||||
appliedCommandLineProperties = append(appliedCommandLineProperties, fmt.Sprintf("%s=%s", keyString, value))
|
||||
key.SetValue(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -449,16 +453,11 @@ func validateStaticRootPath() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
if _, err := os.Stat(path.Join(StaticRootPath, "css")); err == nil {
|
||||
return nil
|
||||
if _, err := os.Stat(path.Join(StaticRootPath, "build")); err != nil {
|
||||
logger.Error("Failed to detect generated javascript files in public/build")
|
||||
}
|
||||
|
||||
if _, err := os.Stat(StaticRootPath + "_gen/css"); err == nil {
|
||||
StaticRootPath = StaticRootPath + "_gen"
|
||||
return nil
|
||||
}
|
||||
|
||||
return fmt.Errorf("Failed to detect generated css or javascript files in static root (%s), have you executed default grunt task?", StaticRootPath)
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewConfigContext(args *CommandLineArgs) error {
|
||||
@@ -656,4 +655,5 @@ func LogConfigurationInfo() {
|
||||
logger.Info("Path Data", "path", DataPath)
|
||||
logger.Info("Path Logs", "path", LogsPath)
|
||||
logger.Info("Path Plugins", "path", PluginsPath)
|
||||
logger.Info("App mode " + Env)
|
||||
}
|
||||
|
@@ -80,8 +80,8 @@ func internalInit(settings *TracingSettings) (io.Closer, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
logger.Info("Initialized jaeger tracer", "address", settings.Address)
|
||||
opentracing.InitGlobalTracer(tracer)
|
||||
logger.Info("Initializing Jaeger tracer", "address", settings.Address)
|
||||
return closer, nil
|
||||
}
|
||||
|
||||
|
220
pkg/tsdb/cloudwatch/annotation_query.go
Normal file
220
pkg/tsdb/cloudwatch/annotation_query.go
Normal file
@@ -0,0 +1,220 @@
|
||||
package cloudwatch
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"time"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/cloudwatch"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
)
|
||||
|
||||
func (e *CloudWatchExecutor) executeAnnotationQuery(ctx context.Context, queryContext *tsdb.TsdbQuery) (*tsdb.Response, error) {
|
||||
result := &tsdb.Response{
|
||||
Results: make(map[string]*tsdb.QueryResult),
|
||||
}
|
||||
firstQuery := queryContext.Queries[0]
|
||||
queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: firstQuery.RefId}
|
||||
|
||||
parameters := firstQuery.Model
|
||||
usePrefixMatch := parameters.Get("prefixMatching").MustBool(false)
|
||||
region := parameters.Get("region").MustString("")
|
||||
namespace := parameters.Get("namespace").MustString("")
|
||||
metricName := parameters.Get("metricName").MustString("")
|
||||
dimensions := parameters.Get("dimensions").MustMap()
|
||||
statistics, extendedStatistics, err := parseStatistics(parameters)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
period := int64(parameters.Get("period").MustInt(0))
|
||||
if period == 0 && !usePrefixMatch {
|
||||
period = 300
|
||||
}
|
||||
actionPrefix := parameters.Get("actionPrefix").MustString("")
|
||||
alarmNamePrefix := parameters.Get("alarmNamePrefix").MustString("")
|
||||
|
||||
svc, err := e.getClient(region)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var alarmNames []*string
|
||||
if usePrefixMatch {
|
||||
params := &cloudwatch.DescribeAlarmsInput{
|
||||
MaxRecords: aws.Int64(100),
|
||||
ActionPrefix: aws.String(actionPrefix),
|
||||
AlarmNamePrefix: aws.String(alarmNamePrefix),
|
||||
}
|
||||
resp, err := svc.DescribeAlarms(params)
|
||||
if err != nil {
|
||||
return nil, errors.New("Failed to call cloudwatch:DescribeAlarms")
|
||||
}
|
||||
alarmNames = filterAlarms(resp, namespace, metricName, dimensions, statistics, extendedStatistics, period)
|
||||
} else {
|
||||
if region == "" || namespace == "" || metricName == "" || len(statistics) == 0 {
|
||||
return result, nil
|
||||
}
|
||||
|
||||
var qd []*cloudwatch.Dimension
|
||||
for k, v := range dimensions {
|
||||
if vv, ok := v.(string); ok {
|
||||
qd = append(qd, &cloudwatch.Dimension{
|
||||
Name: aws.String(k),
|
||||
Value: aws.String(vv),
|
||||
})
|
||||
}
|
||||
}
|
||||
for _, s := range statistics {
|
||||
params := &cloudwatch.DescribeAlarmsForMetricInput{
|
||||
Namespace: aws.String(namespace),
|
||||
MetricName: aws.String(metricName),
|
||||
Dimensions: qd,
|
||||
Statistic: aws.String(s),
|
||||
Period: aws.Int64(int64(period)),
|
||||
}
|
||||
resp, err := svc.DescribeAlarmsForMetric(params)
|
||||
if err != nil {
|
||||
return nil, errors.New("Failed to call cloudwatch:DescribeAlarmsForMetric")
|
||||
}
|
||||
for _, alarm := range resp.MetricAlarms {
|
||||
alarmNames = append(alarmNames, alarm.AlarmName)
|
||||
}
|
||||
}
|
||||
for _, s := range extendedStatistics {
|
||||
params := &cloudwatch.DescribeAlarmsForMetricInput{
|
||||
Namespace: aws.String(namespace),
|
||||
MetricName: aws.String(metricName),
|
||||
Dimensions: qd,
|
||||
ExtendedStatistic: aws.String(s),
|
||||
Period: aws.Int64(int64(period)),
|
||||
}
|
||||
resp, err := svc.DescribeAlarmsForMetric(params)
|
||||
if err != nil {
|
||||
return nil, errors.New("Failed to call cloudwatch:DescribeAlarmsForMetric")
|
||||
}
|
||||
for _, alarm := range resp.MetricAlarms {
|
||||
alarmNames = append(alarmNames, alarm.AlarmName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
startTime, err := queryContext.TimeRange.ParseFrom()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
endTime, err := queryContext.TimeRange.ParseTo()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
annotations := make([]map[string]string, 0)
|
||||
for _, alarmName := range alarmNames {
|
||||
params := &cloudwatch.DescribeAlarmHistoryInput{
|
||||
AlarmName: alarmName,
|
||||
StartDate: aws.Time(startTime),
|
||||
EndDate: aws.Time(endTime),
|
||||
MaxRecords: aws.Int64(100),
|
||||
}
|
||||
resp, err := svc.DescribeAlarmHistory(params)
|
||||
if err != nil {
|
||||
return nil, errors.New("Failed to call cloudwatch:DescribeAlarmHistory")
|
||||
}
|
||||
for _, history := range resp.AlarmHistoryItems {
|
||||
annotation := make(map[string]string)
|
||||
annotation["time"] = history.Timestamp.UTC().Format(time.RFC3339)
|
||||
annotation["title"] = *history.AlarmName
|
||||
annotation["tags"] = *history.HistoryItemType
|
||||
annotation["text"] = *history.HistorySummary
|
||||
annotations = append(annotations, annotation)
|
||||
}
|
||||
}
|
||||
|
||||
transformAnnotationToTable(annotations, queryResult)
|
||||
result.Results[firstQuery.RefId] = queryResult
|
||||
return result, err
|
||||
}
|
||||
|
||||
func transformAnnotationToTable(data []map[string]string, result *tsdb.QueryResult) {
|
||||
table := &tsdb.Table{
|
||||
Columns: make([]tsdb.TableColumn, 4),
|
||||
Rows: make([]tsdb.RowValues, 0),
|
||||
}
|
||||
table.Columns[0].Text = "time"
|
||||
table.Columns[1].Text = "title"
|
||||
table.Columns[2].Text = "tags"
|
||||
table.Columns[3].Text = "text"
|
||||
|
||||
for _, r := range data {
|
||||
values := make([]interface{}, 4)
|
||||
values[0] = r["time"]
|
||||
values[1] = r["title"]
|
||||
values[2] = r["tags"]
|
||||
values[3] = r["text"]
|
||||
table.Rows = append(table.Rows, values)
|
||||
}
|
||||
result.Tables = append(result.Tables, table)
|
||||
result.Meta.Set("rowCount", len(data))
|
||||
}
|
||||
|
||||
func filterAlarms(alarms *cloudwatch.DescribeAlarmsOutput, namespace string, metricName string, dimensions map[string]interface{}, statistics []string, extendedStatistics []string, period int64) []*string {
|
||||
alarmNames := make([]*string, 0)
|
||||
|
||||
for _, alarm := range alarms.MetricAlarms {
|
||||
if namespace != "" && *alarm.Namespace != namespace {
|
||||
continue
|
||||
}
|
||||
if metricName != "" && *alarm.MetricName != metricName {
|
||||
continue
|
||||
}
|
||||
|
||||
match := true
|
||||
if len(dimensions) == 0 {
|
||||
// all match
|
||||
} else if len(alarm.Dimensions) != len(dimensions) {
|
||||
match = false
|
||||
} else {
|
||||
for _, d := range alarm.Dimensions {
|
||||
if _, ok := dimensions[*d.Name]; !ok {
|
||||
match = false
|
||||
}
|
||||
}
|
||||
}
|
||||
if !match {
|
||||
continue
|
||||
}
|
||||
|
||||
if len(statistics) != 0 {
|
||||
found := false
|
||||
for _, s := range statistics {
|
||||
if *alarm.Statistic == s {
|
||||
found = true
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if len(extendedStatistics) != 0 {
|
||||
found := false
|
||||
for _, s := range extendedStatistics {
|
||||
if *alarm.Statistic == s {
|
||||
found = true
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if period != 0 && *alarm.Period != period {
|
||||
continue
|
||||
}
|
||||
|
||||
alarmNames = append(alarmNames, alarm.AlarmName)
|
||||
}
|
||||
|
||||
return alarmNames
|
||||
}
|
361
pkg/tsdb/cloudwatch/cloudwatch.go
Normal file
361
pkg/tsdb/cloudwatch/cloudwatch.go
Normal file
@@ -0,0 +1,361 @@
|
||||
package cloudwatch
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/aws/request"
|
||||
"github.com/aws/aws-sdk-go/service/cloudwatch"
|
||||
"github.com/grafana/grafana/pkg/components/null"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/metrics"
|
||||
)
|
||||
|
||||
type CloudWatchExecutor struct {
|
||||
*models.DataSource
|
||||
}
|
||||
|
||||
type DatasourceInfo struct {
|
||||
Profile string
|
||||
Region string
|
||||
AuthType string
|
||||
AssumeRoleArn string
|
||||
Namespace string
|
||||
|
||||
AccessKey string
|
||||
SecretKey string
|
||||
}
|
||||
|
||||
func NewCloudWatchExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
|
||||
return &CloudWatchExecutor{}, nil
|
||||
}
|
||||
|
||||
var (
|
||||
plog log.Logger
|
||||
standardStatistics map[string]bool
|
||||
aliasFormat *regexp.Regexp
|
||||
)
|
||||
|
||||
func init() {
|
||||
plog = log.New("tsdb.cloudwatch")
|
||||
tsdb.RegisterTsdbQueryEndpoint("cloudwatch", NewCloudWatchExecutor)
|
||||
standardStatistics = map[string]bool{
|
||||
"Average": true,
|
||||
"Maximum": true,
|
||||
"Minimum": true,
|
||||
"Sum": true,
|
||||
"SampleCount": true,
|
||||
}
|
||||
aliasFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`)
|
||||
}
|
||||
|
||||
func (e *CloudWatchExecutor) Query(ctx context.Context, dsInfo *models.DataSource, queryContext *tsdb.TsdbQuery) (*tsdb.Response, error) {
|
||||
var result *tsdb.Response
|
||||
e.DataSource = dsInfo
|
||||
queryType := queryContext.Queries[0].Model.Get("type").MustString("")
|
||||
var err error
|
||||
|
||||
switch queryType {
|
||||
case "metricFindQuery":
|
||||
result, err = e.executeMetricFindQuery(ctx, queryContext)
|
||||
break
|
||||
case "annotationQuery":
|
||||
result, err = e.executeAnnotationQuery(ctx, queryContext)
|
||||
break
|
||||
case "timeSeriesQuery":
|
||||
fallthrough
|
||||
default:
|
||||
result, err = e.executeTimeSeriesQuery(ctx, queryContext)
|
||||
break
|
||||
}
|
||||
|
||||
return result, err
|
||||
}
|
||||
|
||||
func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryContext *tsdb.TsdbQuery) (*tsdb.Response, error) {
|
||||
result := &tsdb.Response{
|
||||
Results: make(map[string]*tsdb.QueryResult),
|
||||
}
|
||||
|
||||
errCh := make(chan error, 1)
|
||||
resCh := make(chan *tsdb.QueryResult, 1)
|
||||
|
||||
currentlyExecuting := 0
|
||||
for i, model := range queryContext.Queries {
|
||||
queryType := model.Model.Get("type").MustString()
|
||||
if queryType != "timeSeriesQuery" && queryType != "" {
|
||||
continue
|
||||
}
|
||||
currentlyExecuting++
|
||||
go func(refId string, index int) {
|
||||
queryRes, err := e.executeQuery(ctx, queryContext.Queries[index].Model, queryContext)
|
||||
currentlyExecuting--
|
||||
if err != nil {
|
||||
errCh <- err
|
||||
} else {
|
||||
queryRes.RefId = refId
|
||||
resCh <- queryRes
|
||||
}
|
||||
}(model.RefId, i)
|
||||
}
|
||||
|
||||
for currentlyExecuting != 0 {
|
||||
select {
|
||||
case res := <-resCh:
|
||||
result.Results[res.RefId] = res
|
||||
case err := <-errCh:
|
||||
return result, err
|
||||
case <-ctx.Done():
|
||||
return result, ctx.Err()
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (e *CloudWatchExecutor) executeQuery(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) (*tsdb.QueryResult, error) {
|
||||
query, err := parseQuery(parameters)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
client, err := e.getClient(query.Region)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
startTime, err := queryContext.TimeRange.ParseFrom()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
endTime, err := queryContext.TimeRange.ParseTo()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
params := &cloudwatch.GetMetricStatisticsInput{
|
||||
Namespace: aws.String(query.Namespace),
|
||||
MetricName: aws.String(query.MetricName),
|
||||
Dimensions: query.Dimensions,
|
||||
Period: aws.Int64(int64(query.Period)),
|
||||
StartTime: aws.Time(startTime),
|
||||
EndTime: aws.Time(endTime),
|
||||
}
|
||||
if len(query.Statistics) > 0 {
|
||||
params.Statistics = query.Statistics
|
||||
}
|
||||
if len(query.ExtendedStatistics) > 0 {
|
||||
params.ExtendedStatistics = query.ExtendedStatistics
|
||||
}
|
||||
|
||||
if setting.Env == setting.DEV {
|
||||
plog.Debug("CloudWatch query", "raw query", params)
|
||||
}
|
||||
|
||||
resp, err := client.GetMetricStatisticsWithContext(ctx, params, request.WithResponseReadTimeout(10*time.Second))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
metrics.M_Aws_CloudWatch_GetMetricStatistics.Inc()
|
||||
|
||||
queryRes, err := parseResponse(resp, query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return queryRes, nil
|
||||
}
|
||||
|
||||
func parseDimensions(model *simplejson.Json) ([]*cloudwatch.Dimension, error) {
|
||||
var result []*cloudwatch.Dimension
|
||||
|
||||
for k, v := range model.Get("dimensions").MustMap() {
|
||||
kk := k
|
||||
if vv, ok := v.(string); ok {
|
||||
result = append(result, &cloudwatch.Dimension{
|
||||
Name: &kk,
|
||||
Value: &vv,
|
||||
})
|
||||
} else {
|
||||
return nil, errors.New("failed to parse")
|
||||
}
|
||||
}
|
||||
|
||||
sort.Slice(result, func(i, j int) bool {
|
||||
return *result[i].Name < *result[j].Name
|
||||
})
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func parseStatistics(model *simplejson.Json) ([]string, []string, error) {
|
||||
var statistics []string
|
||||
var extendedStatistics []string
|
||||
|
||||
for _, s := range model.Get("statistics").MustArray() {
|
||||
if ss, ok := s.(string); ok {
|
||||
if _, isStandard := standardStatistics[ss]; isStandard {
|
||||
statistics = append(statistics, ss)
|
||||
} else {
|
||||
extendedStatistics = append(extendedStatistics, ss)
|
||||
}
|
||||
} else {
|
||||
return nil, nil, errors.New("failed to parse")
|
||||
}
|
||||
}
|
||||
|
||||
return statistics, extendedStatistics, nil
|
||||
}
|
||||
|
||||
func parseQuery(model *simplejson.Json) (*CloudWatchQuery, error) {
|
||||
region, err := model.Get("region").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
namespace, err := model.Get("namespace").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
metricName, err := model.Get("metricName").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
dimensions, err := parseDimensions(model)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
statistics, extendedStatistics, err := parseStatistics(model)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p := model.Get("period").MustString("")
|
||||
if p == "" {
|
||||
if namespace == "AWS/EC2" {
|
||||
p = "300"
|
||||
} else {
|
||||
p = "60"
|
||||
}
|
||||
}
|
||||
|
||||
period := 300
|
||||
if regexp.MustCompile(`^\d+$`).Match([]byte(p)) {
|
||||
period, err = strconv.Atoi(p)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
d, err := time.ParseDuration(p)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
period = int(d.Seconds())
|
||||
}
|
||||
|
||||
alias := model.Get("alias").MustString("{{metric}}_{{stat}}")
|
||||
|
||||
return &CloudWatchQuery{
|
||||
Region: region,
|
||||
Namespace: namespace,
|
||||
MetricName: metricName,
|
||||
Dimensions: dimensions,
|
||||
Statistics: aws.StringSlice(statistics),
|
||||
ExtendedStatistics: aws.StringSlice(extendedStatistics),
|
||||
Period: period,
|
||||
Alias: alias,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func formatAlias(query *CloudWatchQuery, stat string, dimensions map[string]string) string {
|
||||
data := map[string]string{}
|
||||
data["region"] = query.Region
|
||||
data["namespace"] = query.Namespace
|
||||
data["metric"] = query.MetricName
|
||||
data["stat"] = stat
|
||||
for k, v := range dimensions {
|
||||
data[k] = v
|
||||
}
|
||||
|
||||
result := aliasFormat.ReplaceAllFunc([]byte(query.Alias), func(in []byte) []byte {
|
||||
labelName := strings.Replace(string(in), "{{", "", 1)
|
||||
labelName = strings.Replace(labelName, "}}", "", 1)
|
||||
labelName = strings.TrimSpace(labelName)
|
||||
if val, exists := data[labelName]; exists {
|
||||
return []byte(val)
|
||||
}
|
||||
|
||||
return in
|
||||
})
|
||||
|
||||
return string(result)
|
||||
}
|
||||
|
||||
func parseResponse(resp *cloudwatch.GetMetricStatisticsOutput, query *CloudWatchQuery) (*tsdb.QueryResult, error) {
|
||||
queryRes := tsdb.NewQueryResult()
|
||||
|
||||
var value float64
|
||||
for _, s := range append(query.Statistics, query.ExtendedStatistics...) {
|
||||
series := tsdb.TimeSeries{
|
||||
Tags: map[string]string{},
|
||||
}
|
||||
for _, d := range query.Dimensions {
|
||||
series.Tags[*d.Name] = *d.Value
|
||||
}
|
||||
series.Name = formatAlias(query, *s, series.Tags)
|
||||
|
||||
lastTimestamp := make(map[string]time.Time)
|
||||
sort.Slice(resp.Datapoints, func(i, j int) bool {
|
||||
return (*resp.Datapoints[i].Timestamp).Before(*resp.Datapoints[j].Timestamp)
|
||||
})
|
||||
for _, v := range resp.Datapoints {
|
||||
switch *s {
|
||||
case "Average":
|
||||
value = *v.Average
|
||||
case "Maximum":
|
||||
value = *v.Maximum
|
||||
case "Minimum":
|
||||
value = *v.Minimum
|
||||
case "Sum":
|
||||
value = *v.Sum
|
||||
case "SampleCount":
|
||||
value = *v.SampleCount
|
||||
default:
|
||||
if strings.Index(*s, "p") == 0 && v.ExtendedStatistics[*s] != nil {
|
||||
value = *v.ExtendedStatistics[*s]
|
||||
}
|
||||
}
|
||||
|
||||
// terminate gap of data points
|
||||
timestamp := *v.Timestamp
|
||||
if _, ok := lastTimestamp[*s]; ok {
|
||||
nextTimestampFromLast := lastTimestamp[*s].Add(time.Duration(query.Period) * time.Second)
|
||||
for timestamp.After(nextTimestampFromLast) {
|
||||
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), float64(nextTimestampFromLast.Unix()*1000)))
|
||||
nextTimestampFromLast = nextTimestampFromLast.Add(time.Duration(query.Period) * time.Second)
|
||||
}
|
||||
}
|
||||
lastTimestamp[*s] = timestamp
|
||||
|
||||
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(value), float64(timestamp.Unix()*1000)))
|
||||
}
|
||||
|
||||
queryRes.Series = append(queryRes.Series, &series)
|
||||
}
|
||||
|
||||
return queryRes, nil
|
||||
}
|
181
pkg/tsdb/cloudwatch/cloudwatch_test.go
Normal file
181
pkg/tsdb/cloudwatch/cloudwatch_test.go
Normal file
@@ -0,0 +1,181 @@
|
||||
package cloudwatch
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/service/cloudwatch"
|
||||
"github.com/grafana/grafana/pkg/components/null"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestCloudWatch(t *testing.T) {
|
||||
Convey("CloudWatch", t, func() {
|
||||
|
||||
Convey("can parse cloudwatch json model", func() {
|
||||
json := `
|
||||
{
|
||||
"region": "us-east-1",
|
||||
"namespace": "AWS/ApplicationELB",
|
||||
"metricName": "TargetResponseTime",
|
||||
"dimensions": {
|
||||
"LoadBalancer": "lb",
|
||||
"TargetGroup": "tg"
|
||||
},
|
||||
"statistics": [
|
||||
"Average",
|
||||
"Maximum",
|
||||
"p50.00",
|
||||
"p90.00"
|
||||
],
|
||||
"period": "60",
|
||||
"alias": "{{metric}}_{{stat}}"
|
||||
}
|
||||
`
|
||||
modelJson, err := simplejson.NewJson([]byte(json))
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res, err := parseQuery(modelJson)
|
||||
So(err, ShouldBeNil)
|
||||
So(res.Region, ShouldEqual, "us-east-1")
|
||||
So(res.Namespace, ShouldEqual, "AWS/ApplicationELB")
|
||||
So(res.MetricName, ShouldEqual, "TargetResponseTime")
|
||||
So(len(res.Dimensions), ShouldEqual, 2)
|
||||
So(*res.Dimensions[0].Name, ShouldEqual, "LoadBalancer")
|
||||
So(*res.Dimensions[0].Value, ShouldEqual, "lb")
|
||||
So(*res.Dimensions[1].Name, ShouldEqual, "TargetGroup")
|
||||
So(*res.Dimensions[1].Value, ShouldEqual, "tg")
|
||||
So(len(res.Statistics), ShouldEqual, 2)
|
||||
So(*res.Statistics[0], ShouldEqual, "Average")
|
||||
So(*res.Statistics[1], ShouldEqual, "Maximum")
|
||||
So(len(res.ExtendedStatistics), ShouldEqual, 2)
|
||||
So(*res.ExtendedStatistics[0], ShouldEqual, "p50.00")
|
||||
So(*res.ExtendedStatistics[1], ShouldEqual, "p90.00")
|
||||
So(res.Period, ShouldEqual, 60)
|
||||
So(res.Alias, ShouldEqual, "{{metric}}_{{stat}}")
|
||||
})
|
||||
|
||||
Convey("can parse cloudwatch response", func() {
|
||||
timestamp := time.Unix(0, 0)
|
||||
resp := &cloudwatch.GetMetricStatisticsOutput{
|
||||
Label: aws.String("TargetResponseTime"),
|
||||
Datapoints: []*cloudwatch.Datapoint{
|
||||
{
|
||||
Timestamp: aws.Time(timestamp),
|
||||
Average: aws.Float64(10.0),
|
||||
Maximum: aws.Float64(20.0),
|
||||
ExtendedStatistics: map[string]*float64{
|
||||
"p50.00": aws.Float64(30.0),
|
||||
"p90.00": aws.Float64(40.0),
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
query := &CloudWatchQuery{
|
||||
Region: "us-east-1",
|
||||
Namespace: "AWS/ApplicationELB",
|
||||
MetricName: "TargetResponseTime",
|
||||
Dimensions: []*cloudwatch.Dimension{
|
||||
{
|
||||
Name: aws.String("LoadBalancer"),
|
||||
Value: aws.String("lb"),
|
||||
},
|
||||
{
|
||||
Name: aws.String("TargetGroup"),
|
||||
Value: aws.String("tg"),
|
||||
},
|
||||
},
|
||||
Statistics: []*string{aws.String("Average"), aws.String("Maximum")},
|
||||
ExtendedStatistics: []*string{aws.String("p50.00"), aws.String("p90.00")},
|
||||
Period: 60,
|
||||
Alias: "{{namespace}}_{{metric}}_{{stat}}",
|
||||
}
|
||||
|
||||
queryRes, err := parseResponse(resp, query)
|
||||
So(err, ShouldBeNil)
|
||||
So(queryRes.Series[0].Name, ShouldEqual, "AWS/ApplicationELB_TargetResponseTime_Average")
|
||||
So(queryRes.Series[0].Tags["LoadBalancer"], ShouldEqual, "lb")
|
||||
So(queryRes.Series[0].Tags["TargetGroup"], ShouldEqual, "tg")
|
||||
So(queryRes.Series[0].Points[0][0].String(), ShouldEqual, null.FloatFrom(10.0).String())
|
||||
So(queryRes.Series[1].Points[0][0].String(), ShouldEqual, null.FloatFrom(20.0).String())
|
||||
So(queryRes.Series[2].Points[0][0].String(), ShouldEqual, null.FloatFrom(30.0).String())
|
||||
So(queryRes.Series[3].Points[0][0].String(), ShouldEqual, null.FloatFrom(40.0).String())
|
||||
})
|
||||
|
||||
Convey("terminate gap of data points", func() {
|
||||
timestamp := time.Unix(0, 0)
|
||||
resp := &cloudwatch.GetMetricStatisticsOutput{
|
||||
Label: aws.String("TargetResponseTime"),
|
||||
Datapoints: []*cloudwatch.Datapoint{
|
||||
{
|
||||
Timestamp: aws.Time(timestamp),
|
||||
Average: aws.Float64(10.0),
|
||||
Maximum: aws.Float64(20.0),
|
||||
ExtendedStatistics: map[string]*float64{
|
||||
"p50.00": aws.Float64(30.0),
|
||||
"p90.00": aws.Float64(40.0),
|
||||
},
|
||||
},
|
||||
{
|
||||
Timestamp: aws.Time(timestamp.Add(60 * time.Second)),
|
||||
Average: aws.Float64(20.0),
|
||||
Maximum: aws.Float64(30.0),
|
||||
ExtendedStatistics: map[string]*float64{
|
||||
"p50.00": aws.Float64(40.0),
|
||||
"p90.00": aws.Float64(50.0),
|
||||
},
|
||||
},
|
||||
{
|
||||
Timestamp: aws.Time(timestamp.Add(180 * time.Second)),
|
||||
Average: aws.Float64(30.0),
|
||||
Maximum: aws.Float64(40.0),
|
||||
ExtendedStatistics: map[string]*float64{
|
||||
"p50.00": aws.Float64(50.0),
|
||||
"p90.00": aws.Float64(60.0),
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
query := &CloudWatchQuery{
|
||||
Region: "us-east-1",
|
||||
Namespace: "AWS/ApplicationELB",
|
||||
MetricName: "TargetResponseTime",
|
||||
Dimensions: []*cloudwatch.Dimension{
|
||||
{
|
||||
Name: aws.String("LoadBalancer"),
|
||||
Value: aws.String("lb"),
|
||||
},
|
||||
{
|
||||
Name: aws.String("TargetGroup"),
|
||||
Value: aws.String("tg"),
|
||||
},
|
||||
},
|
||||
Statistics: []*string{aws.String("Average"), aws.String("Maximum")},
|
||||
ExtendedStatistics: []*string{aws.String("p50.00"), aws.String("p90.00")},
|
||||
Period: 60,
|
||||
Alias: "{{namespace}}_{{metric}}_{{stat}}",
|
||||
}
|
||||
|
||||
queryRes, err := parseResponse(resp, query)
|
||||
So(err, ShouldBeNil)
|
||||
So(queryRes.Series[0].Points[0][0].String(), ShouldEqual, null.FloatFrom(10.0).String())
|
||||
So(queryRes.Series[1].Points[0][0].String(), ShouldEqual, null.FloatFrom(20.0).String())
|
||||
So(queryRes.Series[2].Points[0][0].String(), ShouldEqual, null.FloatFrom(30.0).String())
|
||||
So(queryRes.Series[3].Points[0][0].String(), ShouldEqual, null.FloatFrom(40.0).String())
|
||||
So(queryRes.Series[0].Points[1][0].String(), ShouldEqual, null.FloatFrom(20.0).String())
|
||||
So(queryRes.Series[1].Points[1][0].String(), ShouldEqual, null.FloatFrom(30.0).String())
|
||||
So(queryRes.Series[2].Points[1][0].String(), ShouldEqual, null.FloatFrom(40.0).String())
|
||||
So(queryRes.Series[3].Points[1][0].String(), ShouldEqual, null.FloatFrom(50.0).String())
|
||||
So(queryRes.Series[0].Points[2][0].String(), ShouldEqual, null.FloatFromPtr(nil).String())
|
||||
So(queryRes.Series[1].Points[2][0].String(), ShouldEqual, null.FloatFromPtr(nil).String())
|
||||
So(queryRes.Series[2].Points[2][0].String(), ShouldEqual, null.FloatFromPtr(nil).String())
|
||||
So(queryRes.Series[3].Points[2][0].String(), ShouldEqual, null.FloatFromPtr(nil).String())
|
||||
So(queryRes.Series[0].Points[3][0].String(), ShouldEqual, null.FloatFrom(30.0).String())
|
||||
So(queryRes.Series[1].Points[3][0].String(), ShouldEqual, null.FloatFrom(40.0).String())
|
||||
So(queryRes.Series[2].Points[3][0].String(), ShouldEqual, null.FloatFrom(50.0).String())
|
||||
So(queryRes.Series[3].Points[3][0].String(), ShouldEqual, null.FloatFrom(60.0).String())
|
||||
})
|
||||
})
|
||||
}
|
196
pkg/tsdb/cloudwatch/credentials.go
Normal file
196
pkg/tsdb/cloudwatch/credentials.go
Normal file
@@ -0,0 +1,196 @@
|
||||
package cloudwatch
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/aws/credentials"
|
||||
"github.com/aws/aws-sdk-go/aws/credentials/ec2rolecreds"
|
||||
"github.com/aws/aws-sdk-go/aws/credentials/endpointcreds"
|
||||
"github.com/aws/aws-sdk-go/aws/ec2metadata"
|
||||
"github.com/aws/aws-sdk-go/aws/session"
|
||||
"github.com/aws/aws-sdk-go/service/cloudwatch"
|
||||
"github.com/aws/aws-sdk-go/service/sts"
|
||||
)
|
||||
|
||||
type cache struct {
|
||||
credential *credentials.Credentials
|
||||
expiration *time.Time
|
||||
}
|
||||
|
||||
var awsCredentialCache map[string]cache = make(map[string]cache)
|
||||
var credentialCacheLock sync.RWMutex
|
||||
|
||||
func GetCredentials(dsInfo *DatasourceInfo) (*credentials.Credentials, error) {
|
||||
cacheKey := dsInfo.AccessKey + ":" + dsInfo.Profile + ":" + dsInfo.AssumeRoleArn
|
||||
credentialCacheLock.RLock()
|
||||
if _, ok := awsCredentialCache[cacheKey]; ok {
|
||||
if awsCredentialCache[cacheKey].expiration != nil &&
|
||||
(*awsCredentialCache[cacheKey].expiration).After(time.Now().UTC()) {
|
||||
result := awsCredentialCache[cacheKey].credential
|
||||
credentialCacheLock.RUnlock()
|
||||
return result, nil
|
||||
}
|
||||
}
|
||||
credentialCacheLock.RUnlock()
|
||||
|
||||
accessKeyId := ""
|
||||
secretAccessKey := ""
|
||||
sessionToken := ""
|
||||
var expiration *time.Time
|
||||
expiration = nil
|
||||
if dsInfo.AuthType == "arn" && strings.Index(dsInfo.AssumeRoleArn, "arn:aws:iam:") == 0 {
|
||||
params := &sts.AssumeRoleInput{
|
||||
RoleArn: aws.String(dsInfo.AssumeRoleArn),
|
||||
RoleSessionName: aws.String("GrafanaSession"),
|
||||
DurationSeconds: aws.Int64(900),
|
||||
}
|
||||
|
||||
stsSess, err := session.NewSession()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
stsCreds := credentials.NewChainCredentials(
|
||||
[]credentials.Provider{
|
||||
&credentials.EnvProvider{},
|
||||
&credentials.SharedCredentialsProvider{Filename: "", Profile: dsInfo.Profile},
|
||||
remoteCredProvider(stsSess),
|
||||
})
|
||||
stsConfig := &aws.Config{
|
||||
Region: aws.String(dsInfo.Region),
|
||||
Credentials: stsCreds,
|
||||
}
|
||||
|
||||
sess, err := session.NewSession(stsConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
svc := sts.New(sess, stsConfig)
|
||||
resp, err := svc.AssumeRole(params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if resp.Credentials != nil {
|
||||
accessKeyId = *resp.Credentials.AccessKeyId
|
||||
secretAccessKey = *resp.Credentials.SecretAccessKey
|
||||
sessionToken = *resp.Credentials.SessionToken
|
||||
expiration = resp.Credentials.Expiration
|
||||
}
|
||||
} else {
|
||||
now := time.Now()
|
||||
e := now.Add(5 * time.Minute)
|
||||
expiration = &e
|
||||
}
|
||||
|
||||
sess, err := session.NewSession()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
creds := credentials.NewChainCredentials(
|
||||
[]credentials.Provider{
|
||||
&credentials.StaticProvider{Value: credentials.Value{
|
||||
AccessKeyID: accessKeyId,
|
||||
SecretAccessKey: secretAccessKey,
|
||||
SessionToken: sessionToken,
|
||||
}},
|
||||
&credentials.EnvProvider{},
|
||||
&credentials.StaticProvider{Value: credentials.Value{
|
||||
AccessKeyID: dsInfo.AccessKey,
|
||||
SecretAccessKey: dsInfo.SecretKey,
|
||||
}},
|
||||
&credentials.SharedCredentialsProvider{Filename: "", Profile: dsInfo.Profile},
|
||||
remoteCredProvider(sess),
|
||||
})
|
||||
|
||||
credentialCacheLock.Lock()
|
||||
awsCredentialCache[cacheKey] = cache{
|
||||
credential: creds,
|
||||
expiration: expiration,
|
||||
}
|
||||
credentialCacheLock.Unlock()
|
||||
|
||||
return creds, nil
|
||||
}
|
||||
|
||||
func remoteCredProvider(sess *session.Session) credentials.Provider {
|
||||
ecsCredURI := os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI")
|
||||
|
||||
if len(ecsCredURI) > 0 {
|
||||
return ecsCredProvider(sess, ecsCredURI)
|
||||
}
|
||||
return ec2RoleProvider(sess)
|
||||
}
|
||||
|
||||
func ecsCredProvider(sess *session.Session, uri string) credentials.Provider {
|
||||
const host = `169.254.170.2`
|
||||
|
||||
c := ec2metadata.New(sess)
|
||||
return endpointcreds.NewProviderClient(
|
||||
c.Client.Config,
|
||||
c.Client.Handlers,
|
||||
fmt.Sprintf("http://%s%s", host, uri),
|
||||
func(p *endpointcreds.Provider) { p.ExpiryWindow = 5 * time.Minute })
|
||||
}
|
||||
|
||||
func ec2RoleProvider(sess *session.Session) credentials.Provider {
|
||||
return &ec2rolecreds.EC2RoleProvider{Client: ec2metadata.New(sess), ExpiryWindow: 5 * time.Minute}
|
||||
}
|
||||
|
||||
func (e *CloudWatchExecutor) getDsInfo(region string) *DatasourceInfo {
|
||||
authType := e.DataSource.JsonData.Get("authType").MustString()
|
||||
assumeRoleArn := e.DataSource.JsonData.Get("assumeRoleArn").MustString()
|
||||
accessKey := ""
|
||||
secretKey := ""
|
||||
for key, value := range e.DataSource.SecureJsonData.Decrypt() {
|
||||
if key == "accessKey" {
|
||||
accessKey = value
|
||||
}
|
||||
if key == "secretKey" {
|
||||
secretKey = value
|
||||
}
|
||||
}
|
||||
|
||||
datasourceInfo := &DatasourceInfo{
|
||||
Region: region,
|
||||
Profile: e.DataSource.Database,
|
||||
AuthType: authType,
|
||||
AssumeRoleArn: assumeRoleArn,
|
||||
AccessKey: accessKey,
|
||||
SecretKey: secretKey,
|
||||
}
|
||||
|
||||
return datasourceInfo
|
||||
}
|
||||
|
||||
func (e *CloudWatchExecutor) getAwsConfig(dsInfo *DatasourceInfo) (*aws.Config, error) {
|
||||
creds, err := GetCredentials(dsInfo)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cfg := &aws.Config{
|
||||
Region: aws.String(dsInfo.Region),
|
||||
Credentials: creds,
|
||||
}
|
||||
return cfg, nil
|
||||
}
|
||||
|
||||
func (e *CloudWatchExecutor) getClient(region string) (*cloudwatch.CloudWatch, error) {
|
||||
datasourceInfo := e.getDsInfo(region)
|
||||
cfg, err := e.getAwsConfig(datasourceInfo)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sess, err := session.NewSession(cfg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
client := cloudwatch.New(sess, cfg)
|
||||
return client, nil
|
||||
}
|
@@ -1,7 +1,9 @@
|
||||
package cloudwatch
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"context"
|
||||
"errors"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
@@ -11,14 +13,20 @@ import (
|
||||
"github.com/aws/aws-sdk-go/aws/awsutil"
|
||||
"github.com/aws/aws-sdk-go/aws/session"
|
||||
"github.com/aws/aws-sdk-go/service/cloudwatch"
|
||||
"github.com/aws/aws-sdk-go/service/ec2"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/metrics"
|
||||
"github.com/grafana/grafana/pkg/middleware"
|
||||
"github.com/grafana/grafana/pkg/util"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
)
|
||||
|
||||
var metricsMap map[string][]string
|
||||
var dimensionsMap map[string][]string
|
||||
|
||||
type suggestData struct {
|
||||
Text string
|
||||
Value string
|
||||
}
|
||||
|
||||
type CustomMetricsCache struct {
|
||||
Expire time.Time
|
||||
Cache []string
|
||||
@@ -144,117 +152,355 @@ func init() {
|
||||
customMetricsDimensionsMap = make(map[string]map[string]map[string]*CustomMetricsCache)
|
||||
}
|
||||
|
||||
func (e *CloudWatchExecutor) executeMetricFindQuery(ctx context.Context, queryContext *tsdb.TsdbQuery) (*tsdb.Response, error) {
|
||||
result := &tsdb.Response{
|
||||
Results: make(map[string]*tsdb.QueryResult),
|
||||
}
|
||||
firstQuery := queryContext.Queries[0]
|
||||
queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: firstQuery.RefId}
|
||||
|
||||
parameters := firstQuery.Model
|
||||
subType := firstQuery.Model.Get("subtype").MustString()
|
||||
var data []suggestData
|
||||
var err error
|
||||
switch subType {
|
||||
case "regions":
|
||||
data, err = e.handleGetRegions(ctx, parameters, queryContext)
|
||||
break
|
||||
case "namespaces":
|
||||
data, err = e.handleGetNamespaces(ctx, parameters, queryContext)
|
||||
break
|
||||
case "metrics":
|
||||
data, err = e.handleGetMetrics(ctx, parameters, queryContext)
|
||||
break
|
||||
case "dimension_keys":
|
||||
data, err = e.handleGetDimensions(ctx, parameters, queryContext)
|
||||
break
|
||||
case "dimension_values":
|
||||
data, err = e.handleGetDimensionValues(ctx, parameters, queryContext)
|
||||
break
|
||||
case "ebs_volume_ids":
|
||||
data, err = e.handleGetEbsVolumeIds(ctx, parameters, queryContext)
|
||||
break
|
||||
case "ec2_instance_attribute":
|
||||
data, err = e.handleGetEc2InstanceAttribute(ctx, parameters, queryContext)
|
||||
break
|
||||
}
|
||||
|
||||
transformToTable(data, queryResult)
|
||||
result.Results[firstQuery.RefId] = queryResult
|
||||
return result, err
|
||||
}
|
||||
|
||||
func transformToTable(data []suggestData, result *tsdb.QueryResult) {
|
||||
table := &tsdb.Table{
|
||||
Columns: make([]tsdb.TableColumn, 2),
|
||||
Rows: make([]tsdb.RowValues, 0),
|
||||
}
|
||||
table.Columns[0].Text = "text"
|
||||
table.Columns[1].Text = "value"
|
||||
|
||||
for _, r := range data {
|
||||
values := make([]interface{}, 2)
|
||||
values[0] = r.Text
|
||||
values[1] = r.Value
|
||||
table.Rows = append(table.Rows, values)
|
||||
}
|
||||
result.Tables = append(result.Tables, table)
|
||||
result.Meta.Set("rowCount", len(data))
|
||||
}
|
||||
|
||||
// Whenever this list is updated, frontend list should also be updated.
|
||||
// Please update the region list in public/app/plugins/datasource/cloudwatch/partials/config.html
|
||||
func handleGetRegions(req *cwRequest, c *middleware.Context) {
|
||||
func (e *CloudWatchExecutor) handleGetRegions(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) {
|
||||
regions := []string{
|
||||
"ap-northeast-1", "ap-northeast-2", "ap-southeast-1", "ap-southeast-2", "ap-south-1", "ca-central-1", "cn-north-1",
|
||||
"eu-central-1", "eu-west-1", "eu-west-2", "sa-east-1", "us-east-1", "us-east-2", "us-gov-west-1", "us-west-1", "us-west-2",
|
||||
}
|
||||
|
||||
result := []interface{}{}
|
||||
result := make([]suggestData, 0)
|
||||
for _, region := range regions {
|
||||
result = append(result, util.DynMap{"text": region, "value": region})
|
||||
result = append(result, suggestData{Text: region, Value: region})
|
||||
}
|
||||
|
||||
c.JSON(200, result)
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func handleGetNamespaces(req *cwRequest, c *middleware.Context) {
|
||||
func (e *CloudWatchExecutor) handleGetNamespaces(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) {
|
||||
keys := []string{}
|
||||
for key := range metricsMap {
|
||||
keys = append(keys, key)
|
||||
}
|
||||
|
||||
customNamespaces := req.DataSource.JsonData.Get("customMetricsNamespaces").MustString()
|
||||
customNamespaces := e.DataSource.JsonData.Get("customMetricsNamespaces").MustString()
|
||||
if customNamespaces != "" {
|
||||
keys = append(keys, strings.Split(customNamespaces, ",")...)
|
||||
}
|
||||
|
||||
sort.Sort(sort.StringSlice(keys))
|
||||
|
||||
result := []interface{}{}
|
||||
result := make([]suggestData, 0)
|
||||
for _, key := range keys {
|
||||
result = append(result, util.DynMap{"text": key, "value": key})
|
||||
result = append(result, suggestData{Text: key, Value: key})
|
||||
}
|
||||
|
||||
c.JSON(200, result)
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func handleGetMetrics(req *cwRequest, c *middleware.Context) {
|
||||
reqParam := &struct {
|
||||
Parameters struct {
|
||||
Namespace string `json:"namespace"`
|
||||
} `json:"parameters"`
|
||||
}{}
|
||||
|
||||
json.Unmarshal(req.Body, reqParam)
|
||||
func (e *CloudWatchExecutor) handleGetMetrics(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) {
|
||||
region := parameters.Get("region").MustString()
|
||||
namespace := parameters.Get("namespace").MustString()
|
||||
|
||||
var namespaceMetrics []string
|
||||
if !isCustomMetrics(reqParam.Parameters.Namespace) {
|
||||
if !isCustomMetrics(namespace) {
|
||||
var exists bool
|
||||
if namespaceMetrics, exists = metricsMap[reqParam.Parameters.Namespace]; !exists {
|
||||
c.JsonApiErr(404, "Unable to find namespace "+reqParam.Parameters.Namespace, nil)
|
||||
return
|
||||
if namespaceMetrics, exists = metricsMap[namespace]; !exists {
|
||||
return nil, errors.New("Unable to find namespace " + namespace)
|
||||
}
|
||||
} else {
|
||||
var err error
|
||||
cwData := req.GetDatasourceInfo()
|
||||
cwData.Namespace = reqParam.Parameters.Namespace
|
||||
dsInfo := e.getDsInfo(region)
|
||||
dsInfo.Namespace = namespace
|
||||
|
||||
if namespaceMetrics, err = getMetricsForCustomMetrics(cwData, getAllMetrics); err != nil {
|
||||
c.JsonApiErr(500, "Unable to call AWS API", err)
|
||||
return
|
||||
if namespaceMetrics, err = getMetricsForCustomMetrics(dsInfo, getAllMetrics); err != nil {
|
||||
return nil, errors.New("Unable to call AWS API")
|
||||
}
|
||||
}
|
||||
sort.Sort(sort.StringSlice(namespaceMetrics))
|
||||
|
||||
result := []interface{}{}
|
||||
result := make([]suggestData, 0)
|
||||
for _, name := range namespaceMetrics {
|
||||
result = append(result, util.DynMap{"text": name, "value": name})
|
||||
result = append(result, suggestData{Text: name, Value: name})
|
||||
}
|
||||
|
||||
c.JSON(200, result)
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func handleGetDimensions(req *cwRequest, c *middleware.Context) {
|
||||
reqParam := &struct {
|
||||
Parameters struct {
|
||||
Namespace string `json:"namespace"`
|
||||
} `json:"parameters"`
|
||||
}{}
|
||||
|
||||
json.Unmarshal(req.Body, reqParam)
|
||||
func (e *CloudWatchExecutor) handleGetDimensions(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) {
|
||||
region := parameters.Get("region").MustString()
|
||||
namespace := parameters.Get("namespace").MustString()
|
||||
|
||||
var dimensionValues []string
|
||||
if !isCustomMetrics(reqParam.Parameters.Namespace) {
|
||||
if !isCustomMetrics(namespace) {
|
||||
var exists bool
|
||||
if dimensionValues, exists = dimensionsMap[reqParam.Parameters.Namespace]; !exists {
|
||||
c.JsonApiErr(404, "Unable to find dimension "+reqParam.Parameters.Namespace, nil)
|
||||
return
|
||||
if dimensionValues, exists = dimensionsMap[namespace]; !exists {
|
||||
return nil, errors.New("Unable to find dimension " + namespace)
|
||||
}
|
||||
} else {
|
||||
var err error
|
||||
dsInfo := req.GetDatasourceInfo()
|
||||
dsInfo.Namespace = reqParam.Parameters.Namespace
|
||||
dsInfo := e.getDsInfo(region)
|
||||
dsInfo.Namespace = namespace
|
||||
|
||||
if dimensionValues, err = getDimensionsForCustomMetrics(dsInfo, getAllMetrics); err != nil {
|
||||
c.JsonApiErr(500, "Unable to call AWS API", err)
|
||||
return
|
||||
return nil, errors.New("Unable to call AWS API")
|
||||
}
|
||||
}
|
||||
sort.Sort(sort.StringSlice(dimensionValues))
|
||||
|
||||
result := []interface{}{}
|
||||
result := make([]suggestData, 0)
|
||||
for _, name := range dimensionValues {
|
||||
result = append(result, util.DynMap{"text": name, "value": name})
|
||||
result = append(result, suggestData{Text: name, Value: name})
|
||||
}
|
||||
|
||||
c.JSON(200, result)
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func getAllMetrics(cwData *datasourceInfo) (cloudwatch.ListMetricsOutput, error) {
|
||||
creds, err := getCredentials(cwData)
|
||||
func (e *CloudWatchExecutor) handleGetDimensionValues(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) {
|
||||
region := parameters.Get("region").MustString()
|
||||
namespace := parameters.Get("namespace").MustString()
|
||||
metricName := parameters.Get("metricName").MustString()
|
||||
dimensionKey := parameters.Get("dimensionKey").MustString()
|
||||
dimensionsJson := parameters.Get("dimensions").MustMap()
|
||||
|
||||
var dimensions []*cloudwatch.DimensionFilter
|
||||
for k, v := range dimensionsJson {
|
||||
if vv, ok := v.(string); ok {
|
||||
dimensions = append(dimensions, &cloudwatch.DimensionFilter{
|
||||
Name: aws.String(k),
|
||||
Value: aws.String(vv),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
metrics, err := e.cloudwatchListMetrics(region, namespace, metricName, dimensions)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := make([]suggestData, 0)
|
||||
dupCheck := make(map[string]bool)
|
||||
for _, metric := range metrics.Metrics {
|
||||
for _, dim := range metric.Dimensions {
|
||||
if *dim.Name == dimensionKey {
|
||||
if _, exists := dupCheck[*dim.Value]; exists {
|
||||
continue
|
||||
}
|
||||
dupCheck[*dim.Value] = true
|
||||
result = append(result, suggestData{Text: *dim.Value, Value: *dim.Value})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sort.Slice(result, func(i, j int) bool {
|
||||
return result[i].Text < result[j].Text
|
||||
})
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (e *CloudWatchExecutor) handleGetEbsVolumeIds(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) {
|
||||
region := parameters.Get("region").MustString()
|
||||
instanceId := parameters.Get("instanceId").MustString()
|
||||
|
||||
instanceIds := []*string{aws.String(instanceId)}
|
||||
instances, err := e.ec2DescribeInstances(region, nil, instanceIds)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := make([]suggestData, 0)
|
||||
for _, mapping := range instances.Reservations[0].Instances[0].BlockDeviceMappings {
|
||||
result = append(result, suggestData{Text: *mapping.Ebs.VolumeId, Value: *mapping.Ebs.VolumeId})
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (e *CloudWatchExecutor) handleGetEc2InstanceAttribute(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) {
|
||||
region := parameters.Get("region").MustString()
|
||||
attributeName := parameters.Get("attributeName").MustString()
|
||||
filterJson := parameters.Get("filters").MustMap()
|
||||
|
||||
var filters []*ec2.Filter
|
||||
for k, v := range filterJson {
|
||||
if vv, ok := v.([]string); ok {
|
||||
var vvvv []*string
|
||||
for _, vvv := range vv {
|
||||
vvvv = append(vvvv, &vvv)
|
||||
}
|
||||
filters = append(filters, &ec2.Filter{
|
||||
Name: aws.String(k),
|
||||
Values: vvvv,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
instances, err := e.ec2DescribeInstances(region, filters, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := make([]suggestData, 0)
|
||||
dupCheck := make(map[string]bool)
|
||||
for _, reservation := range instances.Reservations {
|
||||
for _, instance := range reservation.Instances {
|
||||
tags := make(map[string]string)
|
||||
for _, tag := range instance.Tags {
|
||||
tags[*tag.Key] = *tag.Value
|
||||
}
|
||||
|
||||
var data string
|
||||
if strings.Index(attributeName, "Tags.") == 0 {
|
||||
tagName := attributeName[5:]
|
||||
data = tags[tagName]
|
||||
} else {
|
||||
attributePath := strings.Split(attributeName, ".")
|
||||
v := reflect.ValueOf(instance)
|
||||
for _, key := range attributePath {
|
||||
if v.Kind() == reflect.Ptr {
|
||||
v = v.Elem()
|
||||
}
|
||||
if v.Kind() != reflect.Struct {
|
||||
return nil, errors.New("invalid attribute path")
|
||||
}
|
||||
v = v.FieldByName(key)
|
||||
}
|
||||
if attr, ok := v.Interface().(*string); ok {
|
||||
data = *attr
|
||||
} else {
|
||||
return nil, errors.New("invalid attribute path")
|
||||
}
|
||||
}
|
||||
|
||||
if _, exists := dupCheck[data]; exists {
|
||||
continue
|
||||
}
|
||||
dupCheck[data] = true
|
||||
result = append(result, suggestData{Text: data, Value: data})
|
||||
}
|
||||
}
|
||||
|
||||
sort.Slice(result, func(i, j int) bool {
|
||||
return result[i].Text < result[j].Text
|
||||
})
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (e *CloudWatchExecutor) cloudwatchListMetrics(region string, namespace string, metricName string, dimensions []*cloudwatch.DimensionFilter) (*cloudwatch.ListMetricsOutput, error) {
|
||||
svc, err := e.getClient(region)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
params := &cloudwatch.ListMetricsInput{
|
||||
Namespace: aws.String(namespace),
|
||||
MetricName: aws.String(metricName),
|
||||
Dimensions: dimensions,
|
||||
}
|
||||
|
||||
var resp cloudwatch.ListMetricsOutput
|
||||
err = svc.ListMetricsPages(params,
|
||||
func(page *cloudwatch.ListMetricsOutput, lastPage bool) bool {
|
||||
metrics.M_Aws_CloudWatch_ListMetrics.Inc()
|
||||
metrics, _ := awsutil.ValuesAtPath(page, "Metrics")
|
||||
for _, metric := range metrics {
|
||||
resp.Metrics = append(resp.Metrics, metric.(*cloudwatch.Metric))
|
||||
}
|
||||
return !lastPage
|
||||
})
|
||||
if err != nil {
|
||||
return nil, errors.New("Failed to call cloudwatch:ListMetrics")
|
||||
}
|
||||
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
func (e *CloudWatchExecutor) ec2DescribeInstances(region string, filters []*ec2.Filter, instanceIds []*string) (*ec2.DescribeInstancesOutput, error) {
|
||||
dsInfo := e.getDsInfo(region)
|
||||
cfg, err := e.getAwsConfig(dsInfo)
|
||||
if err != nil {
|
||||
return nil, errors.New("Failed to call ec2:DescribeInstances")
|
||||
}
|
||||
sess, err := session.NewSession(cfg)
|
||||
if err != nil {
|
||||
return nil, errors.New("Failed to call ec2:DescribeInstances")
|
||||
}
|
||||
svc := ec2.New(sess, cfg)
|
||||
|
||||
params := &ec2.DescribeInstancesInput{
|
||||
Filters: filters,
|
||||
InstanceIds: instanceIds,
|
||||
}
|
||||
|
||||
var resp ec2.DescribeInstancesOutput
|
||||
err = svc.DescribeInstancesPages(params,
|
||||
func(page *ec2.DescribeInstancesOutput, lastPage bool) bool {
|
||||
reservations, _ := awsutil.ValuesAtPath(page, "Reservations")
|
||||
for _, reservation := range reservations {
|
||||
resp.Reservations = append(resp.Reservations, reservation.(*ec2.Reservation))
|
||||
}
|
||||
return !lastPage
|
||||
})
|
||||
if err != nil {
|
||||
return nil, errors.New("Failed to call ec2:DescribeInstances")
|
||||
}
|
||||
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
func getAllMetrics(cwData *DatasourceInfo) (cloudwatch.ListMetricsOutput, error) {
|
||||
creds, err := GetCredentials(cwData)
|
||||
if err != nil {
|
||||
return cloudwatch.ListMetricsOutput{}, err
|
||||
}
|
||||
@@ -291,7 +537,7 @@ func getAllMetrics(cwData *datasourceInfo) (cloudwatch.ListMetricsOutput, error)
|
||||
|
||||
var metricsCacheLock sync.Mutex
|
||||
|
||||
func getMetricsForCustomMetrics(dsInfo *datasourceInfo, getAllMetrics func(*datasourceInfo) (cloudwatch.ListMetricsOutput, error)) ([]string, error) {
|
||||
func getMetricsForCustomMetrics(dsInfo *DatasourceInfo, getAllMetrics func(*DatasourceInfo) (cloudwatch.ListMetricsOutput, error)) ([]string, error) {
|
||||
metricsCacheLock.Lock()
|
||||
defer metricsCacheLock.Unlock()
|
||||
|
||||
@@ -328,7 +574,7 @@ func getMetricsForCustomMetrics(dsInfo *datasourceInfo, getAllMetrics func(*data
|
||||
|
||||
var dimensionsCacheLock sync.Mutex
|
||||
|
||||
func getDimensionsForCustomMetrics(dsInfo *datasourceInfo, getAllMetrics func(*datasourceInfo) (cloudwatch.ListMetricsOutput, error)) ([]string, error) {
|
||||
func getDimensionsForCustomMetrics(dsInfo *DatasourceInfo, getAllMetrics func(*DatasourceInfo) (cloudwatch.ListMetricsOutput, error)) ([]string, error) {
|
||||
dimensionsCacheLock.Lock()
|
||||
defer dimensionsCacheLock.Unlock()
|
||||
|
@@ -11,13 +11,13 @@ import (
|
||||
func TestCloudWatchMetrics(t *testing.T) {
|
||||
|
||||
Convey("When calling getMetricsForCustomMetrics", t, func() {
|
||||
dsInfo := &datasourceInfo{
|
||||
dsInfo := &DatasourceInfo{
|
||||
Region: "us-east-1",
|
||||
Namespace: "Foo",
|
||||
Profile: "default",
|
||||
AssumeRoleArn: "",
|
||||
}
|
||||
f := func(dsInfo *datasourceInfo) (cloudwatch.ListMetricsOutput, error) {
|
||||
f := func(dsInfo *DatasourceInfo) (cloudwatch.ListMetricsOutput, error) {
|
||||
return cloudwatch.ListMetricsOutput{
|
||||
Metrics: []*cloudwatch.Metric{
|
||||
{
|
||||
@@ -39,13 +39,13 @@ func TestCloudWatchMetrics(t *testing.T) {
|
||||
})
|
||||
|
||||
Convey("When calling getDimensionsForCustomMetrics", t, func() {
|
||||
dsInfo := &datasourceInfo{
|
||||
dsInfo := &DatasourceInfo{
|
||||
Region: "us-east-1",
|
||||
Namespace: "Foo",
|
||||
Profile: "default",
|
||||
AssumeRoleArn: "",
|
||||
}
|
||||
f := func(dsInfo *datasourceInfo) (cloudwatch.ListMetricsOutput, error) {
|
||||
f := func(dsInfo *DatasourceInfo) (cloudwatch.ListMetricsOutput, error) {
|
||||
return cloudwatch.ListMetricsOutput{
|
||||
Metrics: []*cloudwatch.Metric{
|
||||
{
|
16
pkg/tsdb/cloudwatch/types.go
Normal file
16
pkg/tsdb/cloudwatch/types.go
Normal file
@@ -0,0 +1,16 @@
|
||||
package cloudwatch
|
||||
|
||||
import (
|
||||
"github.com/aws/aws-sdk-go/service/cloudwatch"
|
||||
)
|
||||
|
||||
type CloudWatchQuery struct {
|
||||
Region string
|
||||
Namespace string
|
||||
MetricName string
|
||||
Dimensions []*cloudwatch.Dimension
|
||||
Statistics []*string
|
||||
ExtendedStatistics []*string
|
||||
Period int
|
||||
Alias string
|
||||
}
|
@@ -1,23 +1,32 @@
|
||||
///<reference path="headers/common.d.ts" />
|
||||
|
||||
import 'bootstrap';
|
||||
import 'vendor/filesaver';
|
||||
import 'lodash-src';
|
||||
import 'angular-strap';
|
||||
import 'babel-polyfill';
|
||||
import 'file-saver';
|
||||
import 'lodash';
|
||||
import 'jquery';
|
||||
import 'angular';
|
||||
import 'angular-route';
|
||||
import 'angular-sanitize';
|
||||
import 'angular-dragdrop';
|
||||
import 'angular-native-dragdrop';
|
||||
import 'angular-bindonce';
|
||||
import 'angular-ui';
|
||||
import 'react';
|
||||
import 'react-dom';
|
||||
import 'ngreact';
|
||||
|
||||
import 'vendor/bootstrap/bootstrap';
|
||||
import 'vendor/angular-ui/ui-bootstrap-tpls';
|
||||
import 'vendor/angular-other/angular-strap';
|
||||
|
||||
import $ from 'jquery';
|
||||
import angular from 'angular';
|
||||
import config from 'app/core/config';
|
||||
import _ from 'lodash';
|
||||
import moment from 'moment';
|
||||
|
||||
// add move to lodash for backward compatabiltiy
|
||||
_.move = function (array, fromIndex, toIndex) {
|
||||
array.splice(toIndex, 0, array.splice(fromIndex, 1)[0]);
|
||||
return array;
|
||||
};
|
||||
|
||||
import {coreModule} from './core/core';
|
||||
|
||||
export class GrafanaApp {
|
||||
|
@@ -1,19 +0,0 @@
|
||||
(function bootGrafana() {
|
||||
'use strict';
|
||||
|
||||
var systemLocate = System.locate;
|
||||
System.locate = function(load) {
|
||||
var System = this;
|
||||
return Promise.resolve(systemLocate.call(this, load)).then(function(address) {
|
||||
return address + System.cacheBust;
|
||||
});
|
||||
};
|
||||
System.cacheBust = '?bust=' + Date.now();
|
||||
|
||||
System.import('app/app').then(function(app) {
|
||||
app.default.init();
|
||||
}).catch(function(err) {
|
||||
console.log('Loading app module failed: ', err);
|
||||
});
|
||||
|
||||
})();
|
@@ -1,4 +1,4 @@
|
||||
import * as React from 'react';
|
||||
import React from 'react';
|
||||
import coreModule from '../core_module';
|
||||
|
||||
export interface IProps {
|
||||
@@ -15,16 +15,16 @@ export class PasswordStrength extends React.Component<IProps, any> {
|
||||
let strengthText = "strength: strong like a bull.";
|
||||
let strengthClass = "password-strength-good";
|
||||
|
||||
if (this.props.password.length < 4) {
|
||||
strengthText = "strength: weak sauce.";
|
||||
strengthClass = "password-strength-bad";
|
||||
}
|
||||
|
||||
if (this.props.password.length <= 8) {
|
||||
strengthText = "strength: you can do better.";
|
||||
strengthClass = "password-strength-ok";
|
||||
}
|
||||
|
||||
if (this.props.password.length < 4) {
|
||||
strengthText = "strength: weak sauce.";
|
||||
strengthClass = "password-strength-bad";
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={`password-strength small ${strengthClass}`}>
|
||||
<em>{strengthText}</em>
|
||||
@@ -36,3 +36,4 @@ export class PasswordStrength extends React.Component<IProps, any> {
|
||||
coreModule.directive('passwordStrength', function(reactDirective) {
|
||||
return reactDirective(PasswordStrength, ['password']);
|
||||
});
|
||||
|
||||
|
@@ -26,60 +26,31 @@
|
||||
* Ctrl-Enter (Command-Enter): run onChange() function
|
||||
*/
|
||||
|
||||
///<reference path="../../../headers/common.d.ts" />
|
||||
import _ from 'lodash';
|
||||
import coreModule from 'app/core/core_module';
|
||||
import config from 'app/core/config';
|
||||
import ace from 'ace';
|
||||
import ace from 'brace';
|
||||
import './theme-grafana-dark';
|
||||
import 'brace/ext/language_tools';
|
||||
import 'brace/theme/textmate';
|
||||
import 'brace/mode/text';
|
||||
import 'brace/snippets/text';
|
||||
import 'brace/mode/sql';
|
||||
import 'brace/snippets/sql';
|
||||
|
||||
const ACE_SRC_BASE = "public/vendor/npm/ace-builds/src-noconflict/";
|
||||
const DEFAULT_THEME_DARK = "grafana-dark";
|
||||
const DEFAULT_THEME_LIGHT = "textmate";
|
||||
const DEFAULT_THEME_DARK = "ace/theme/grafana-dark";
|
||||
const DEFAULT_THEME_LIGHT = "ace/theme/textmate";
|
||||
const DEFAULT_MODE = "text";
|
||||
const DEFAULT_MAX_LINES = 10;
|
||||
const DEFAULT_TAB_SIZE = 2;
|
||||
const DEFAULT_BEHAVIOURS = true;
|
||||
|
||||
const GRAFANA_MODULES = ['theme-grafana-dark'];
|
||||
const GRAFANA_MODULE_BASE = "public/app/core/components/code_editor/";
|
||||
|
||||
// Trick for loading additional modules
|
||||
function setModuleUrl(moduleType, name, pluginBaseUrl = null) {
|
||||
let baseUrl = ACE_SRC_BASE;
|
||||
let aceModeName = `ace/${moduleType}/${name}`;
|
||||
let moduleName = `${moduleType}-${name}`;
|
||||
let componentName = `${moduleName}.js`;
|
||||
|
||||
if (_.includes(GRAFANA_MODULES, moduleName)) {
|
||||
baseUrl = GRAFANA_MODULE_BASE;
|
||||
}
|
||||
|
||||
if (pluginBaseUrl) {
|
||||
baseUrl = pluginBaseUrl + '/';
|
||||
}
|
||||
|
||||
if (moduleType === 'snippets') {
|
||||
componentName = `${moduleType}/${name}.js`;
|
||||
}
|
||||
|
||||
ace.config.setModuleUrl(aceModeName, baseUrl + componentName);
|
||||
}
|
||||
|
||||
setModuleUrl("ext", "language_tools");
|
||||
setModuleUrl("mode", "text");
|
||||
setModuleUrl("snippets", "text");
|
||||
|
||||
let editorTemplate = `<div></div>`;
|
||||
|
||||
function link(scope, elem, attrs) {
|
||||
let lightTheme = config.bootData.user.lightTheme;
|
||||
let default_theme = lightTheme ? DEFAULT_THEME_LIGHT : DEFAULT_THEME_DARK;
|
||||
|
||||
// Options
|
||||
let langMode = attrs.mode || DEFAULT_MODE;
|
||||
let maxLines = attrs.maxLines || DEFAULT_MAX_LINES;
|
||||
let showGutter = attrs.showGutter !== undefined;
|
||||
let theme = attrs.theme || default_theme;
|
||||
let tabSize = attrs.tabSize || DEFAULT_TAB_SIZE;
|
||||
let behavioursEnabled = attrs.behavioursEnabled ? attrs.behavioursEnabled === 'true' : DEFAULT_BEHAVIOURS;
|
||||
|
||||
@@ -103,10 +74,10 @@ function link(scope, elem, attrs) {
|
||||
// disable depreacation warning
|
||||
codeEditor.$blockScrolling = Infinity;
|
||||
// Padding hacks
|
||||
codeEditor.renderer.setScrollMargin(15, 15);
|
||||
(<any>codeEditor.renderer).setScrollMargin(15, 15);
|
||||
codeEditor.renderer.setPadding(10);
|
||||
|
||||
setThemeMode(theme);
|
||||
setThemeMode();
|
||||
setLangMode(langMode);
|
||||
setEditorContent(scope.content);
|
||||
|
||||
@@ -162,44 +133,31 @@ function link(scope, elem, attrs) {
|
||||
});
|
||||
|
||||
function setLangMode(lang) {
|
||||
let aceModeName = `ace/mode/${lang}`;
|
||||
setModuleUrl("mode", lang, scope.datasource.meta.baseUrl || null);
|
||||
setModuleUrl("snippets", lang, scope.datasource.meta.baseUrl || null);
|
||||
editorSession.setMode(aceModeName);
|
||||
|
||||
ace.config.loadModule("ace/ext/language_tools", (language_tools) => {
|
||||
codeEditor.setOptions({
|
||||
enableBasicAutocompletion: true,
|
||||
enableLiveAutocompletion: true,
|
||||
enableSnippets: true
|
||||
});
|
||||
|
||||
if (scope.getCompleter()) {
|
||||
// make copy of array as ace seems to share completers array between instances
|
||||
codeEditor.completers = codeEditor.completers.slice();
|
||||
codeEditor.completers.push(scope.getCompleter());
|
||||
}
|
||||
ace.acequire("ace/ext/language_tools");
|
||||
codeEditor.setOptions({
|
||||
enableBasicAutocompletion: true,
|
||||
enableLiveAutocompletion: true,
|
||||
enableSnippets: true
|
||||
});
|
||||
|
||||
if (scope.getCompleter()) {
|
||||
// make copy of array as ace seems to share completers array between instances
|
||||
const anyEditor = <any>codeEditor;
|
||||
anyEditor.completers = anyEditor.completers.slice();
|
||||
anyEditor.completers.push(scope.getCompleter());
|
||||
}
|
||||
|
||||
let aceModeName = `ace/mode/${lang}`;
|
||||
editorSession.setMode(aceModeName);
|
||||
}
|
||||
|
||||
function setThemeMode(theme) {
|
||||
setModuleUrl("theme", theme);
|
||||
let themeModule = `ace/theme/${theme}`;
|
||||
ace.config.loadModule(themeModule, (theme_module) => {
|
||||
// Check is theme light or dark and fix if needed
|
||||
let lightTheme = config.bootData.user.lightTheme;
|
||||
let fixedTheme = theme;
|
||||
if (lightTheme && theme_module.isDark) {
|
||||
fixedTheme = DEFAULT_THEME_LIGHT;
|
||||
} else if (!lightTheme && !theme_module.isDark) {
|
||||
fixedTheme = DEFAULT_THEME_DARK;
|
||||
}
|
||||
setModuleUrl("theme", fixedTheme);
|
||||
themeModule = `ace/theme/${fixedTheme}`;
|
||||
codeEditor.setTheme(themeModule);
|
||||
function setThemeMode() {
|
||||
let theme = DEFAULT_THEME_DARK;
|
||||
if (config.bootData.user.lightTheme) {
|
||||
theme = DEFAULT_THEME_LIGHT;
|
||||
}
|
||||
|
||||
elem.addClass("gf-code-editor--theme-loaded");
|
||||
});
|
||||
codeEditor.setTheme(theme);
|
||||
}
|
||||
|
||||
function setEditorContent(value) {
|
||||
|
@@ -1,6 +1,6 @@
|
||||
/* jshint ignore:start */
|
||||
|
||||
ace.define("ace/theme/grafana-dark",["require","exports","module","ace/lib/dom"], function(require, exports, module) {
|
||||
ace.define("ace/theme/grafana-dark",["require","exports","module","ace/lib/dom"], function(acequire, exports, module) {
|
||||
"use strict";
|
||||
|
||||
exports.isDark = true;
|
||||
@@ -109,7 +109,7 @@ ace.define("ace/theme/grafana-dark",["require","exports","module","ace/lib/dom"]
|
||||
background: url(data:image/png;base64,ivborw0kggoaaaansuheugaaaaeaaaaccayaaaczgbynaaaaekleqvqimwpq0fd0zxbzd/wpaajvaoxesgneaaaaaelftksuqmcc) right repeat-y\
|
||||
}";
|
||||
|
||||
var dom = require("../lib/dom");
|
||||
var dom = acequire("../lib/dom");
|
||||
dom.importCssString(exports.cssText, exports.cssClass);
|
||||
});
|
||||
|
||||
|
@@ -1,6 +1,3 @@
|
||||
///<reference path="../headers/common.d.ts" />
|
||||
///<reference path="./mod_defs.d.ts" />
|
||||
|
||||
import "./directives/dash_class";
|
||||
import "./directives/confirm_click";
|
||||
import "./directives/dash_edit_link";
|
||||
@@ -11,7 +8,6 @@ import "./directives/ng_model_on_blur";
|
||||
import "./directives/spectrum_picker";
|
||||
import "./directives/tags";
|
||||
import "./directives/value_select_dropdown";
|
||||
import "./directives/plugin_component";
|
||||
import "./directives/rebuild_on_change";
|
||||
import "./directives/give_focus";
|
||||
import "./directives/diff-view";
|
||||
|
@@ -1,4 +1,2 @@
|
||||
///<reference path="../headers/common.d.ts" />
|
||||
|
||||
import angular from 'angular';
|
||||
export default angular.module('grafana.core', ['ngRoute']);
|
||||
|
@@ -1,7 +1,4 @@
|
||||
///<reference path="../../headers/common.d.ts" />
|
||||
|
||||
import $ from 'jquery';
|
||||
|
||||
import coreModule from '../core_module';
|
||||
|
||||
function getBlockNodes(nodes) {
|
||||
@@ -21,6 +18,7 @@ function getBlockNodes(nodes) {
|
||||
return blockNodes || nodes;
|
||||
}
|
||||
|
||||
/** @ngInject **/
|
||||
function rebuildOnChange($animate) {
|
||||
|
||||
return {
|
||||
|
@@ -1,7 +1,7 @@
|
||||
define([
|
||||
'angular',
|
||||
'../core_module',
|
||||
'spectrum',
|
||||
'vendor/spectrum',
|
||||
],
|
||||
function (angular, coreModule) {
|
||||
'use strict';
|
||||
|
@@ -2,7 +2,7 @@ define([
|
||||
'angular',
|
||||
'jquery',
|
||||
'../core_module',
|
||||
'bootstrap-tagsinput',
|
||||
'vendor/tagsinput/bootstrap-tagsinput.js',
|
||||
],
|
||||
function (angular, $, coreModule) {
|
||||
'use strict';
|
||||
|
@@ -57,7 +57,8 @@ coreModule.filter('noXml', function() {
|
||||
};
|
||||
});
|
||||
|
||||
coreModule.filter('interpolateTemplateVars', function (templateSrv) {
|
||||
/** @ngInject */
|
||||
function interpolateTemplateVars(templateSrv) {
|
||||
var filterFunc: any = function(text, scope) {
|
||||
var scopedVars;
|
||||
if (scope.ctrl) {
|
||||
@@ -71,6 +72,7 @@ coreModule.filter('interpolateTemplateVars', function (templateSrv) {
|
||||
|
||||
filterFunc.$stateful = true;
|
||||
return filterFunc;
|
||||
});
|
||||
}
|
||||
|
||||
coreModule.filter('interpolateTemplateVars', interpolateTemplateVars);
|
||||
export default {};
|
||||
|
@@ -3,7 +3,7 @@
|
||||
import _ from 'lodash';
|
||||
import config from 'app/core/config';
|
||||
|
||||
import {Observable} from 'vendor/npm/rxjs/Observable';
|
||||
import {Observable} from 'rxjs/Observable';
|
||||
|
||||
export class LiveSrv {
|
||||
conn: any;
|
||||
|
@@ -1,2 +0,0 @@
|
||||
define([
|
||||
], function () {});
|
4
public/app/core/partials.ts
Normal file
4
public/app/core/partials.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
var templates = (<any>require).context('../', true, /\.html$/);
|
||||
templates.keys().forEach(function(key) {
|
||||
templates(key);
|
||||
});
|
@@ -1,5 +1,3 @@
|
||||
///<reference path="../headers/common.d.ts" />
|
||||
|
||||
import $ from 'jquery';
|
||||
import angular from 'angular';
|
||||
|
||||
|
@@ -1,5 +1,3 @@
|
||||
///<reference path="../../headers/common.d.ts" />
|
||||
|
||||
export class BundleLoader {
|
||||
lazy: any;
|
||||
|
||||
|
@@ -1,18 +1,27 @@
|
||||
///<reference path="../../headers/common.d.ts" />
|
||||
|
||||
import './dashboard_loaders';
|
||||
|
||||
import coreModule from 'app/core/core_module';
|
||||
import {BundleLoader} from './bundle_loader';
|
||||
|
||||
/** @ngInject **/
|
||||
function setupAngularRoutes($routeProvider, $locationProvider) {
|
||||
$locationProvider.html5Mode(true);
|
||||
|
||||
var loadOrgBundle = new BundleLoader('app/features/org/all');
|
||||
var loadPluginsBundle = new BundleLoader('app/features/plugins/all');
|
||||
var loadAdminBundle = new BundleLoader('app/features/admin/admin');
|
||||
var loadAlertingBundle = new BundleLoader('app/features/alerting/all');
|
||||
var loadOrgBundle = {
|
||||
lazy: ["$q", "$route", "$rootScope", ($q, $route, $rootScope) => {
|
||||
return System.import('app/features/org/all');
|
||||
}]
|
||||
};
|
||||
|
||||
var loadAdminBundle = {
|
||||
lazy: ["$q", "$route", "$rootScope", ($q, $route, $rootScope) => {
|
||||
return System.import('app/features/admin/admin');
|
||||
}]
|
||||
};
|
||||
|
||||
var loadAlertingBundle = {
|
||||
lazy: ["$q", "$route", "$rootScope", ($q, $route, $rootScope) => {
|
||||
return System.import('app/features/alerting/all');
|
||||
}]
|
||||
};
|
||||
|
||||
$routeProvider
|
||||
.when('/', {
|
||||
@@ -53,19 +62,16 @@ function setupAngularRoutes($routeProvider, $locationProvider) {
|
||||
templateUrl: 'public/app/features/plugins/partials/ds_list.html',
|
||||
controller : 'DataSourcesCtrl',
|
||||
controllerAs: 'ctrl',
|
||||
resolve: loadPluginsBundle,
|
||||
})
|
||||
.when('/datasources/edit/:id', {
|
||||
templateUrl: 'public/app/features/plugins/partials/ds_edit.html',
|
||||
controller : 'DataSourceEditCtrl',
|
||||
controllerAs: 'ctrl',
|
||||
resolve: loadPluginsBundle,
|
||||
})
|
||||
.when('/datasources/new', {
|
||||
templateUrl: 'public/app/features/plugins/partials/ds_edit.html',
|
||||
controller : 'DataSourceEditCtrl',
|
||||
controllerAs: 'ctrl',
|
||||
resolve: loadPluginsBundle,
|
||||
})
|
||||
.when('/org', {
|
||||
templateUrl: 'public/app/features/org/partials/orgDetails.html',
|
||||
@@ -193,19 +199,16 @@ function setupAngularRoutes($routeProvider, $locationProvider) {
|
||||
templateUrl: 'public/app/features/plugins/partials/plugin_list.html',
|
||||
controller: 'PluginListCtrl',
|
||||
controllerAs: 'ctrl',
|
||||
resolve: loadPluginsBundle,
|
||||
})
|
||||
.when('/plugins/:pluginId/edit', {
|
||||
templateUrl: 'public/app/features/plugins/partials/plugin_edit.html',
|
||||
controller: 'PluginEditCtrl',
|
||||
controllerAs: 'ctrl',
|
||||
resolve: loadPluginsBundle,
|
||||
})
|
||||
.when('/plugins/:pluginId/page/:slug', {
|
||||
templateUrl: 'public/app/features/plugins/partials/plugin_page.html',
|
||||
controller: 'AppPageCtrl',
|
||||
controllerAs: 'ctrl',
|
||||
resolve: loadPluginsBundle,
|
||||
})
|
||||
.when('/styleguide/:page?', {
|
||||
controller: 'StyleGuideCtrl',
|
||||
|
@@ -1,7 +1,6 @@
|
||||
define([
|
||||
'./alert_srv',
|
||||
'./util_srv',
|
||||
'./datasource_srv',
|
||||
'./context_srv',
|
||||
'./timer',
|
||||
'./keyboard_manager',
|
||||
|
@@ -1,14 +1,25 @@
|
||||
// import React from 'react';
|
||||
// import {describe, beforeEach, it, sinon, expect} from 'test/lib/common';
|
||||
// import {shallow} from 'enzyme';
|
||||
//
|
||||
// import {PasswordStrength} from '../components/PasswordStrength';
|
||||
//
|
||||
// describe('PasswordStrength', () => {
|
||||
//
|
||||
// it.skip('should have class bad if length below 4', () => {
|
||||
// const wrapper = shallow(<PasswordStrength password="asd" />);
|
||||
// expect(wrapper.find(".password-strength-bad")).to.have.length(3);
|
||||
// });
|
||||
// });
|
||||
//
|
||||
import React from 'react';
|
||||
import {describe, it, expect} from 'test/lib/common';
|
||||
import {shallow} from 'enzyme';
|
||||
|
||||
import {PasswordStrength} from '../components/PasswordStrength';
|
||||
|
||||
describe('PasswordStrength', () => {
|
||||
|
||||
it('should have class bad if length below 4', () => {
|
||||
const wrapper = shallow(<PasswordStrength password="asd" />);
|
||||
expect(wrapper.find(".password-strength-bad")).to.have.length(1);
|
||||
});
|
||||
|
||||
it('should have class ok if length below 8', () => {
|
||||
const wrapper = shallow(<PasswordStrength password="asdasd" />);
|
||||
expect(wrapper.find(".password-strength-ok")).to.have.length(1);
|
||||
});
|
||||
|
||||
it('should have class good if length above 8', () => {
|
||||
const wrapper = shallow(<PasswordStrength password="asdaasdda" />);
|
||||
expect(wrapper.find(".password-strength-good")).to.have.length(1);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
29
public/app/core/specs/backend_srv_specs.ts
Normal file
29
public/app/core/specs/backend_srv_specs.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import {describe, beforeEach, it, expect, angularMocks} from 'test/lib/common';
|
||||
import 'app/core/services/backend_srv';
|
||||
|
||||
describe('backend_srv', function() {
|
||||
var _backendSrv;
|
||||
var _http;
|
||||
var _httpBackend;
|
||||
|
||||
beforeEach(angularMocks.module('grafana.core'));
|
||||
beforeEach(angularMocks.module('grafana.services'));
|
||||
beforeEach(angularMocks.inject(function ($httpBackend, $http, backendSrv) {
|
||||
_httpBackend = $httpBackend;
|
||||
_http = $http;
|
||||
_backendSrv = backendSrv;
|
||||
}));
|
||||
|
||||
describe('when handling errors', function() {
|
||||
it('should return the http status code', function(done) {
|
||||
_httpBackend.whenGET('gateway-error').respond(502);
|
||||
_backendSrv.datasourceRequest({
|
||||
url: 'gateway-error'
|
||||
}).catch(function(err) {
|
||||
expect(err.status).to.be(502);
|
||||
done();
|
||||
});
|
||||
_httpBackend.flush();
|
||||
});
|
||||
});
|
||||
});
|
167
public/app/core/specs/value_select_dropdown_specs.ts
Normal file
167
public/app/core/specs/value_select_dropdown_specs.ts
Normal file
@@ -0,0 +1,167 @@
|
||||
import {describe, beforeEach, it, expect, angularMocks, sinon} from 'test/lib/common';
|
||||
import 'app/core/directives/value_select_dropdown';
|
||||
|
||||
describe("SelectDropdownCtrl", function() {
|
||||
var scope;
|
||||
var ctrl;
|
||||
var tagValuesMap: any = {};
|
||||
var rootScope;
|
||||
var q;
|
||||
|
||||
beforeEach(angularMocks.module('grafana.core'));
|
||||
beforeEach(angularMocks.inject(function($controller, $rootScope, $q, $httpBackend) {
|
||||
rootScope = $rootScope;
|
||||
q = $q;
|
||||
scope = $rootScope.$new();
|
||||
ctrl = $controller('ValueSelectDropdownCtrl', {$scope: scope});
|
||||
ctrl.onUpdated = sinon.spy();
|
||||
$httpBackend.when('GET', /\.html$/).respond('');
|
||||
}));
|
||||
|
||||
describe("Given simple variable", function() {
|
||||
beforeEach(function() {
|
||||
ctrl.variable = {
|
||||
current: {text: 'hej', value: 'hej' },
|
||||
getValuesForTag: function(key) {
|
||||
return q.when(tagValuesMap[key]);
|
||||
},
|
||||
};
|
||||
ctrl.init();
|
||||
});
|
||||
|
||||
it("Should init labelText and linkText", function() {
|
||||
expect(ctrl.linkText).to.be("hej");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Given variable with tags and dropdown is opened", function() {
|
||||
beforeEach(function() {
|
||||
ctrl.variable = {
|
||||
current: {text: 'server-1', value: 'server-1'},
|
||||
options: [
|
||||
{text: 'server-1', value: 'server-1', selected: true},
|
||||
{text: 'server-2', value: 'server-2'},
|
||||
{text: 'server-3', value: 'server-3'},
|
||||
],
|
||||
tags: ["key1", "key2", "key3"],
|
||||
getValuesForTag: function(key) {
|
||||
return q.when(tagValuesMap[key]);
|
||||
},
|
||||
multi: true
|
||||
};
|
||||
tagValuesMap.key1 = ['server-1', 'server-3'];
|
||||
tagValuesMap.key2 = ['server-2', 'server-3'];
|
||||
tagValuesMap.key3 = ['server-1', 'server-2', 'server-3'];
|
||||
ctrl.init();
|
||||
ctrl.show();
|
||||
});
|
||||
|
||||
it("should init tags model", function() {
|
||||
expect(ctrl.tags.length).to.be(3);
|
||||
expect(ctrl.tags[0].text).to.be("key1");
|
||||
});
|
||||
|
||||
it("should init options model", function() {
|
||||
expect(ctrl.options.length).to.be(3);
|
||||
});
|
||||
|
||||
it("should init selected values array", function() {
|
||||
expect(ctrl.selectedValues.length).to.be(1);
|
||||
});
|
||||
|
||||
it("should set linkText", function() {
|
||||
expect(ctrl.linkText).to.be('server-1');
|
||||
});
|
||||
|
||||
describe('after adititional value is selected', function() {
|
||||
beforeEach(function() {
|
||||
ctrl.selectValue(ctrl.options[2], {});
|
||||
ctrl.commitChanges();
|
||||
});
|
||||
|
||||
it('should update link text', function() {
|
||||
expect(ctrl.linkText).to.be('server-1 + server-3');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When tag is selected', function() {
|
||||
beforeEach(function() {
|
||||
ctrl.selectTag(ctrl.tags[0]);
|
||||
rootScope.$digest();
|
||||
ctrl.commitChanges();
|
||||
});
|
||||
|
||||
it("should select tag", function() {
|
||||
expect(ctrl.selectedTags.length).to.be(1);
|
||||
});
|
||||
|
||||
it("should select values", function() {
|
||||
expect(ctrl.options[0].selected).to.be(true);
|
||||
expect(ctrl.options[2].selected).to.be(true);
|
||||
});
|
||||
|
||||
it("link text should not include tag values", function() {
|
||||
expect(ctrl.linkText).to.be('');
|
||||
});
|
||||
|
||||
describe('and then dropdown is opened and closed without changes', function() {
|
||||
beforeEach(function() {
|
||||
ctrl.show();
|
||||
ctrl.commitChanges();
|
||||
rootScope.$digest();
|
||||
});
|
||||
|
||||
it("should still have selected tag", function() {
|
||||
expect(ctrl.selectedTags.length).to.be(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('and then unselected', function() {
|
||||
beforeEach(function() {
|
||||
ctrl.selectTag(ctrl.tags[0]);
|
||||
rootScope.$digest();
|
||||
});
|
||||
|
||||
it("should deselect tag", function() {
|
||||
expect(ctrl.selectedTags.length).to.be(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('and then value is unselected', function() {
|
||||
beforeEach(function() {
|
||||
ctrl.selectValue(ctrl.options[0], {});
|
||||
});
|
||||
|
||||
it("should deselect tag", function() {
|
||||
expect(ctrl.selectedTags.length).to.be(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Given variable with selected tags", function() {
|
||||
beforeEach(function() {
|
||||
ctrl.variable = {
|
||||
current: {text: 'server-1', value: 'server-1', tags: [{text: 'key1', selected: true}] },
|
||||
options: [
|
||||
{text: 'server-1', value: 'server-1'},
|
||||
{text: 'server-2', value: 'server-2'},
|
||||
{text: 'server-3', value: 'server-3'},
|
||||
],
|
||||
tags: ["key1", "key2", "key3"],
|
||||
getValuesForTag: function(key) {
|
||||
return q.when(tagValuesMap[key]);
|
||||
},
|
||||
multi: true
|
||||
};
|
||||
ctrl.init();
|
||||
ctrl.show();
|
||||
});
|
||||
|
||||
it("should set tag as selected", function() {
|
||||
expect(ctrl.tags[0].selected).to.be(true);
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
|
@@ -401,6 +401,7 @@ function($, _, moment) {
|
||||
kbn.valueFormats.currencyJPY = kbn.formatBuilders.currency('¥');
|
||||
kbn.valueFormats.currencyRUB = kbn.formatBuilders.currency('₽');
|
||||
kbn.valueFormats.currencyUAH = kbn.formatBuilders.currency('₴');
|
||||
kbn.valueFormats.currencyBRL = kbn.formatBuilders.currency('R$');
|
||||
|
||||
// Data (Binary)
|
||||
kbn.valueFormats.bits = kbn.formatBuilders.binarySIPrefix('b');
|
||||
@@ -754,6 +755,7 @@ function($, _, moment) {
|
||||
{text: 'Yen (¥)', value: 'currencyJPY'},
|
||||
{text: 'Rubles (₽)', value: 'currencyRUB'},
|
||||
{text: 'Hryvnias (₴)', value: 'currencyUAH'},
|
||||
{text: 'Real (R$)', value: 'currencyBRL'},
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@@ -49,6 +49,8 @@ var reducerTypes = [
|
||||
{text: 'count()', value: 'count'},
|
||||
{text: 'last()', value: 'last'},
|
||||
{text: 'median()', value: 'median'},
|
||||
{text: 'diff()', value: 'diff'},
|
||||
{text: 'percent_diff()', value: 'percent_diff'},
|
||||
];
|
||||
|
||||
var noDataModes = [
|
||||
|
@@ -34,9 +34,10 @@
|
||||
<div class="card-item-header">
|
||||
<div class="card-item-type">
|
||||
<a class="card-item-cog" bs-tooltip="'Pausing an alert rule prevents it from executing'" ng-click="ctrl.pauseAlertRule(alert.id)">
|
||||
<i class="fa fa-pause"></i>
|
||||
</a>
|
||||
<a class="card-item-cog" href="dashboard/{{alert.dashboardUri}}?panelId={{alert.panelId}}&fullscreen&edit&tab=alert" bs-tooltip="'Edit alert rule'">
|
||||
<i ng-show="alert.state !== 'paused'" class="fa fa-pause"></i>
|
||||
<i ng-show="alert.state === 'paused'" class="fa fa-play"></i>
|
||||
</a>
|
||||
<a class="card-item-cog" href="dashboard/{{alert.dashboardUri}}?panelId={{alert.panelId}}&fullscreen&edit&tab=alert" bs-tooltip="'Edit alert rule'">
|
||||
<i class="icon-gf icon-gf-settings"></i>
|
||||
</a>
|
||||
</div>
|
||||
|
@@ -12,7 +12,7 @@
|
||||
<li ng-class="{active: ctrl.subTabIndex === 2}">
|
||||
<a ng-click="ctrl.changeTabIndex(2)">State history</a>
|
||||
</li>
|
||||
<li>
|
||||
<li>
|
||||
<a ng-click="ctrl.delete()">Delete</a>
|
||||
</li>
|
||||
</ul>
|
||||
@@ -41,10 +41,10 @@
|
||||
<metric-segment-model css-class="query-keyword width-5" ng-if="$index" property="conditionModel.operator.type" options="ctrl.evalOperators" custom="false"></metric-segment-model>
|
||||
<span class="gf-form-label query-keyword width-5" ng-if="$index===0">WHEN</span>
|
||||
</div>
|
||||
<div class="gf-form">
|
||||
<div class="gf-form">
|
||||
<query-part-editor class="gf-form-label query-part width-6" part="conditionModel.reducerPart" handle-event="ctrl.handleReducerPartEvent(conditionModel, $event)">
|
||||
</query-part-editor>
|
||||
<span class="gf-form-label query-keyword">OF</span>
|
||||
<span class="gf-form-label query-keyword">OF</span>
|
||||
</div>
|
||||
<div class="gf-form">
|
||||
<query-part-editor class="gf-form-label query-part" part="conditionModel.queryPart" handle-event="ctrl.handleQueryPartEvent(conditionModel, $event)">
|
||||
@@ -53,8 +53,8 @@
|
||||
<div class="gf-form">
|
||||
<metric-segment-model property="conditionModel.evaluator.type" options="ctrl.evalFunctions" custom="false" css-class="query-keyword" on-change="ctrl.evaluatorTypeChanged(conditionModel.evaluator)"></metric-segment-model>
|
||||
<input class="gf-form-input max-width-9" type="number" step="any" ng-hide="conditionModel.evaluator.params.length === 0" ng-model="conditionModel.evaluator.params[0]" ng-change="ctrl.evaluatorParamsChanged()"></input>
|
||||
<label class="gf-form-label query-keyword" ng-show="conditionModel.evaluator.params.length === 2">TO</label>
|
||||
<input class="gf-form-input max-width-9" type="number" step="any" ng-if="conditionModel.evaluator.params.length === 2" ng-model="conditionModel.evaluator.params[1]" ng-change="ctrl.evaluatorParamsChanged()"></input>
|
||||
<label class="gf-form-label query-keyword" ng-show="conditionModel.evaluator.params.length === 2">TO</label>
|
||||
<input class="gf-form-input max-width-9" type="number" step="any" ng-if="conditionModel.evaluator.params.length === 2" ng-model="conditionModel.evaluator.params[1]" ng-change="ctrl.evaluatorParamsChanged()"></input>
|
||||
</div>
|
||||
<div class="gf-form">
|
||||
<label class="gf-form-label">
|
||||
@@ -77,13 +77,12 @@
|
||||
</ul>
|
||||
</label>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<div class="gf-form-group">
|
||||
<div class="gf-form">
|
||||
<span class="gf-form-label width-18">If no data or all values are null</span>
|
||||
<span class="gf-form-label query-keyword">SET STATE TO</span>
|
||||
<span class="gf-form-label width-18">If no data or all values are null</span>
|
||||
<span class="gf-form-label query-keyword">SET STATE TO</span>
|
||||
<div class="gf-form-select-wrapper">
|
||||
<select class="gf-form-input" ng-model="ctrl.alert.noDataState" ng-options="f.value as f.text for f in ctrl.noDataModes">
|
||||
</select>
|
||||
@@ -91,8 +90,8 @@
|
||||
</div>
|
||||
|
||||
<div class="gf-form">
|
||||
<span class="gf-form-label width-18">If execution error or timeout</span>
|
||||
<span class="gf-form-label query-keyword">SET STATE TO</span>
|
||||
<span class="gf-form-label width-18">If execution error or timeout</span>
|
||||
<span class="gf-form-label query-keyword">SET STATE TO</span>
|
||||
<div class="gf-form-select-wrapper">
|
||||
<select class="gf-form-input" ng-model="ctrl.alert.executionErrorState" ng-options="f.value as f.text for f in ctrl.executionErrorModes">
|
||||
</select>
|
||||
@@ -135,35 +134,31 @@
|
||||
|
||||
<div class="gf-form-group" style="max-width: 720px;" ng-if="ctrl.subTabIndex === 2">
|
||||
<button class="btn btn-mini btn-danger pull-right" ng-click="ctrl.clearHistory()"><i class="fa fa-trash"></i> Clear history</button>
|
||||
<h5 class="section-heading" style="whitespace: nowrap">
|
||||
<h5 class="section-heading" style="whitespace: nowrap">
|
||||
State history <span class="muted small">(last 50 state changes)</span>
|
||||
</h5>
|
||||
|
||||
<div ng-show="ctrl.alertHistory.length === 0">
|
||||
<br>
|
||||
<i>No state changes recorded</i>
|
||||
</div>
|
||||
<div ng-show="ctrl.alertHistory.length === 0">
|
||||
<br>
|
||||
<i>No state changes recorded</i>
|
||||
</div>
|
||||
|
||||
<section class="card-section card-list-layout-list">
|
||||
<ol class="card-list" >
|
||||
<li class="card-item-wrapper" ng-repeat="ah in ctrl.alertHistory">
|
||||
<div class="card-item card-item--alert">
|
||||
<div class="card-item-header">
|
||||
<div class="card-item-type">
|
||||
</div>
|
||||
</div>
|
||||
<div class="card-item-body">
|
||||
<div class="card-item-details">
|
||||
<div class="card-item-sub-name">
|
||||
<span class="alert-list-item-state {{ah.stateModel.stateClass}}">
|
||||
<i class="{{ah.stateModel.iconClass}}"></i>
|
||||
{{ah.stateModel.text}}
|
||||
</span> {{ah.time}}
|
||||
</div>
|
||||
<div class="card-item-sub-name">
|
||||
{{ah.info}}
|
||||
</div>
|
||||
<div class="alert-list card-item card-item--alert">
|
||||
<div class="alert-list-body">
|
||||
<div class="alert-list-icon alert-list-item-state {{ah.stateModel.stateClass}}">
|
||||
<i class="{{ah.stateModel.iconClass}}"></i>
|
||||
</div>
|
||||
<div class="alert-list-main alert-list-text">
|
||||
<span class="alert-list-state {{ah.stateModel.stateClass}}">{{ah.stateModel.text}}</span>
|
||||
<span class="alert-list-info">{{ah.info}}</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="alert-list-footer alert-list-text">
|
||||
<span>{{ah.time}}</span>
|
||||
<span><!--Img Link--></span>
|
||||
</div>
|
||||
</div>
|
||||
</li>
|
||||
|
@@ -3,6 +3,7 @@ define([
|
||||
'./dashlinks/module',
|
||||
'./annotations/all',
|
||||
'./templating/all',
|
||||
'./plugins/all',
|
||||
'./dashboard/all',
|
||||
'./playlist/all',
|
||||
'./snapshot/all',
|
||||
|
@@ -1,211 +1,211 @@
|
||||
///<reference path="../../../headers/common.d.ts" />
|
||||
|
||||
import coreModule from 'app/core/core_module';
|
||||
import {CELL_HEIGHT, CELL_VMARGIN} from '../model';
|
||||
|
||||
import 'jquery-ui';
|
||||
import 'gridstack';
|
||||
import 'gridstack.jquery-ui';
|
||||
|
||||
const template = `
|
||||
<div class="grid-stack">
|
||||
<dash-grid-item ng-repeat="panel in ctrl.dashboard.panels track by panel.id"
|
||||
class="grid-stack-item"
|
||||
grid-ctrl="ctrl"
|
||||
panel="panel">
|
||||
<plugin-component type="panel" class="grid-stack-item-content">
|
||||
</plugin-component>
|
||||
</dash-grid-item>
|
||||
</div>
|
||||
`;
|
||||
|
||||
var rowIndex = 0;
|
||||
|
||||
export class GridCtrl {
|
||||
options: any;
|
||||
dashboard: any;
|
||||
panels: any;
|
||||
gridstack: any;
|
||||
gridElem: any;
|
||||
isInitialized: boolean;
|
||||
isDestroyed: boolean;
|
||||
index: number;
|
||||
changeRenderPromise: any;
|
||||
|
||||
/** @ngInject */
|
||||
constructor(private $scope, private $element, private $timeout) {
|
||||
console.log(this.dashboard);
|
||||
this.index = rowIndex;
|
||||
rowIndex += 1;
|
||||
}
|
||||
|
||||
init() {
|
||||
this.gridElem = this.$element.find('.grid-stack');
|
||||
|
||||
this.gridstack = this.gridElem.gridstack({
|
||||
animate: true,
|
||||
cellHeight: CELL_HEIGHT,
|
||||
verticalMargin: CELL_VMARGIN,
|
||||
acceptWidgets: '.grid-stack-item',
|
||||
handle: '.grid-drag-handle'
|
||||
}).data('gridstack');
|
||||
|
||||
this.isInitialized = true;
|
||||
|
||||
this.gridElem.on('added', (e, items) => {
|
||||
for (let item of items) {
|
||||
this.onGridStackItemAdded(item);
|
||||
}
|
||||
});
|
||||
|
||||
this.gridElem.on('removed', (e, items) => {
|
||||
for (let item of items) {
|
||||
this.onGridStackItemRemoved(item);
|
||||
}
|
||||
});
|
||||
|
||||
this.gridElem.on('change', (e, items) => {
|
||||
this.$timeout(() => this.onGridStackItemsChanged(items), 50);
|
||||
});
|
||||
}
|
||||
|
||||
onGridStackItemAdded(item) {
|
||||
console.log('row: ' + this.index + ' item added', item);
|
||||
}
|
||||
|
||||
onGridStackItemRemoved(item) {
|
||||
console.log('row: ' + this.index + ' item removed', item.id, item);
|
||||
}
|
||||
|
||||
onGridStackItemsChanged(items) {
|
||||
console.log('onGridStackItemsChanged');
|
||||
|
||||
for (let item of items) {
|
||||
// find panel
|
||||
var panel = this.dashboard.getPanelById(parseInt(item.id));
|
||||
|
||||
if (!panel) {
|
||||
console.log('item change but no panel found for item', item);
|
||||
continue;
|
||||
}
|
||||
|
||||
// update panel model position
|
||||
panel.x = item.x;
|
||||
panel.y = item.y;
|
||||
panel.width = item.width;
|
||||
panel.height = item.height;
|
||||
|
||||
console.log('updating panel: ' + panel.id + ' x: ' + panel.x + ' y: ' + panel.y);
|
||||
}
|
||||
|
||||
this.dashboard.panels.sort(function (a, b) {
|
||||
let aScore = a.x + (a.y * 12);
|
||||
let bScore = b.x + (b.y * 12);
|
||||
if (aScore < bScore) { return -1; }
|
||||
if (aScore > bScore) { return 1; }
|
||||
return 0;
|
||||
});
|
||||
|
||||
if (this.changeRenderPromise) {
|
||||
this.$timeout.cancel(this.changeRenderPromise);
|
||||
}
|
||||
|
||||
this.changeRenderPromise = this.$timeout(() => {
|
||||
console.log('broadcasting render');
|
||||
this.$scope.$broadcast('render');
|
||||
});
|
||||
}
|
||||
|
||||
destroy() {
|
||||
this.gridstack.destroy();
|
||||
this.gridstack = null;
|
||||
this.isDestroyed = true;
|
||||
}
|
||||
}
|
||||
|
||||
/** @ngInject **/
|
||||
export function dashGrid($timeout) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
template: template,
|
||||
controller: GridCtrl,
|
||||
bindToController: true,
|
||||
controllerAs: 'ctrl',
|
||||
scope: {
|
||||
dashboard: "=",
|
||||
},
|
||||
link: function(scope, elem, attrs, ctrl) {
|
||||
$timeout(function() {
|
||||
ctrl.init();
|
||||
});
|
||||
|
||||
scope.$on('$destroy', () => {
|
||||
ctrl.destroy();
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/** @ngInject **/
|
||||
export function dashGridItem($timeout, $rootScope) {
|
||||
return {
|
||||
restrict: "E",
|
||||
scope: {
|
||||
panel: '=',
|
||||
gridCtrl: '='
|
||||
},
|
||||
link: function (scope, element, attrs) {
|
||||
let gridCtrl = scope.gridCtrl;
|
||||
let panel = scope.panel;
|
||||
let gridStackNode = null;
|
||||
|
||||
element.attr({
|
||||
'data-gs-id': panel.id,
|
||||
'data-gs-x': panel.x,
|
||||
'data-gs-y': panel.y,
|
||||
'data-gs-width': panel.width,
|
||||
'data-gs-height': panel.height,
|
||||
'data-gs-no-resize': panel.type === 'row',
|
||||
});
|
||||
|
||||
$rootScope.onAppEvent('panel-fullscreen-exit', (evt, payload) => {
|
||||
if (panel.id !== payload.panelId) {
|
||||
return;
|
||||
}
|
||||
gridCtrl.gridstack.locked(element, false);
|
||||
element.removeClass('panel-fullscreen');
|
||||
}, scope);
|
||||
|
||||
$rootScope.onAppEvent('panel-fullscreen-enter', (evt, payload) => {
|
||||
if (panel.id !== payload.panelId) {
|
||||
return;
|
||||
}
|
||||
element.addClass('panel-fullscreen');
|
||||
}, scope);
|
||||
|
||||
scope.$on('$destroy', () => {
|
||||
console.log('grid-item scope $destroy');
|
||||
if (gridCtrl.isDestroyed) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (gridStackNode) {
|
||||
console.log('grid-item scope $destroy removeWidget');
|
||||
gridStackNode._grid.removeWidget(element);
|
||||
}
|
||||
});
|
||||
|
||||
if (gridCtrl.isInitialized) {
|
||||
gridCtrl.gridstack.makeWidget(element);
|
||||
gridStackNode = element.data('_gridstack_node');
|
||||
} else {
|
||||
setTimeout(function() {
|
||||
gridStackNode = element.data('_gridstack_node');
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
coreModule.directive('dashGrid', dashGrid);
|
||||
coreModule.directive('dashGridItem', dashGridItem);
|
||||
// ///<reference path="../../../headers/common.d.ts" />
|
||||
//
|
||||
// import coreModule from 'app/core/core_module';
|
||||
// import {CELL_HEIGHT, CELL_VMARGIN} from '../model';
|
||||
//
|
||||
// import 'jquery-ui';
|
||||
// import 'gridstack/dist/jquery.jQueryUI';
|
||||
// import 'gridstack';
|
||||
//
|
||||
// const template = `
|
||||
// <div class="grid-stack">
|
||||
// <dash-grid-item ng-repeat="panel in ctrl.dashboard.panels track by panel.id"
|
||||
// class="grid-stack-item"
|
||||
// grid-ctrl="ctrl"
|
||||
// panel="panel">
|
||||
// <plugin-component type="panel" class="grid-stack-item-content">
|
||||
// </plugin-component>
|
||||
// </dash-grid-item>
|
||||
// </div>
|
||||
// `;
|
||||
//
|
||||
// var rowIndex = 0;
|
||||
//
|
||||
// export class GridCtrl {
|
||||
// options: any;
|
||||
// dashboard: any;
|
||||
// panels: any;
|
||||
// gridstack: any;
|
||||
// gridElem: any;
|
||||
// isInitialized: boolean;
|
||||
// isDestroyed: boolean;
|
||||
// index: number;
|
||||
// changeRenderPromise: any;
|
||||
//
|
||||
// #<{(|* @ngInject |)}>#
|
||||
// constructor(private $scope, private $element, private $timeout) {
|
||||
// console.log(this.dashboard);
|
||||
// this.index = rowIndex;
|
||||
// rowIndex += 1;
|
||||
// }
|
||||
//
|
||||
// init() {
|
||||
// this.gridElem = this.$element.find('.grid-stack');
|
||||
//
|
||||
// this.gridstack = this.gridElem.gridstack({
|
||||
// animate: true,
|
||||
// cellHeight: CELL_HEIGHT,
|
||||
// verticalMargin: CELL_VMARGIN,
|
||||
// acceptWidgets: '.grid-stack-item',
|
||||
// handle: '.grid-drag-handle'
|
||||
// }).data('gridstack');
|
||||
//
|
||||
// this.isInitialized = true;
|
||||
//
|
||||
// this.gridElem.on('added', (e, items) => {
|
||||
// for (let item of items) {
|
||||
// this.onGridStackItemAdded(item);
|
||||
// }
|
||||
// });
|
||||
//
|
||||
// this.gridElem.on('removed', (e, items) => {
|
||||
// for (let item of items) {
|
||||
// this.onGridStackItemRemoved(item);
|
||||
// }
|
||||
// });
|
||||
//
|
||||
// this.gridElem.on('change', (e, items) => {
|
||||
// this.$timeout(() => this.onGridStackItemsChanged(items), 50);
|
||||
// });
|
||||
// }
|
||||
//
|
||||
// onGridStackItemAdded(item) {
|
||||
// console.log('row: ' + this.index + ' item added', item);
|
||||
// }
|
||||
//
|
||||
// onGridStackItemRemoved(item) {
|
||||
// console.log('row: ' + this.index + ' item removed', item.id, item);
|
||||
// }
|
||||
//
|
||||
// onGridStackItemsChanged(items) {
|
||||
// console.log('onGridStackItemsChanged');
|
||||
//
|
||||
// for (let item of items) {
|
||||
// // find panel
|
||||
// var panel = this.dashboard.getPanelById(parseInt(item.id));
|
||||
//
|
||||
// if (!panel) {
|
||||
// console.log('item change but no panel found for item', item);
|
||||
// continue;
|
||||
// }
|
||||
//
|
||||
// // update panel model position
|
||||
// panel.x = item.x;
|
||||
// panel.y = item.y;
|
||||
// panel.width = item.width;
|
||||
// panel.height = item.height;
|
||||
//
|
||||
// console.log('updating panel: ' + panel.id + ' x: ' + panel.x + ' y: ' + panel.y);
|
||||
// }
|
||||
//
|
||||
// this.dashboard.panels.sort(function (a, b) {
|
||||
// let aScore = a.x + (a.y * 12);
|
||||
// let bScore = b.x + (b.y * 12);
|
||||
// if (aScore < bScore) { return -1; }
|
||||
// if (aScore > bScore) { return 1; }
|
||||
// return 0;
|
||||
// });
|
||||
//
|
||||
// if (this.changeRenderPromise) {
|
||||
// this.$timeout.cancel(this.changeRenderPromise);
|
||||
// }
|
||||
//
|
||||
// this.changeRenderPromise = this.$timeout(() => {
|
||||
// console.log('broadcasting render');
|
||||
// this.$scope.$broadcast('render');
|
||||
// });
|
||||
// }
|
||||
//
|
||||
// destroy() {
|
||||
// this.gridstack.destroy();
|
||||
// this.gridstack = null;
|
||||
// this.isDestroyed = true;
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// #<{(|* @ngInject *|)}>#
|
||||
// export function dashGrid($timeout) {
|
||||
// return {
|
||||
// restrict: 'E',
|
||||
// template: template,
|
||||
// controller: GridCtrl,
|
||||
// bindToController: true,
|
||||
// controllerAs: 'ctrl',
|
||||
// scope: {
|
||||
// dashboard: "=",
|
||||
// },
|
||||
// link: function(scope, elem, attrs, ctrl) {
|
||||
// $timeout(function() {
|
||||
// ctrl.init();
|
||||
// });
|
||||
//
|
||||
// scope.$on('$destroy', () => {
|
||||
// ctrl.destroy();
|
||||
// });
|
||||
// }
|
||||
// };
|
||||
// }
|
||||
//
|
||||
// #<{(|* @ngInject *|)}>#
|
||||
// export function dashGridItem($timeout, $rootScope) {
|
||||
// return {
|
||||
// restrict: "E",
|
||||
// scope: {
|
||||
// panel: '=',
|
||||
// gridCtrl: '='
|
||||
// },
|
||||
// link: function (scope, element, attrs) {
|
||||
// let gridCtrl = scope.gridCtrl;
|
||||
// let panel = scope.panel;
|
||||
// let gridStackNode = null;
|
||||
//
|
||||
// element.attr({
|
||||
// 'data-gs-id': panel.id,
|
||||
// 'data-gs-x': panel.x,
|
||||
// 'data-gs-y': panel.y,
|
||||
// 'data-gs-width': panel.width,
|
||||
// 'data-gs-height': panel.height,
|
||||
// 'data-gs-no-resize': panel.type === 'row',
|
||||
// });
|
||||
//
|
||||
// $rootScope.onAppEvent('panel-fullscreen-exit', (evt, payload) => {
|
||||
// if (panel.id !== payload.panelId) {
|
||||
// return;
|
||||
// }
|
||||
// gridCtrl.gridstack.locked(element, false);
|
||||
// element.removeClass('panel-fullscreen');
|
||||
// }, scope);
|
||||
//
|
||||
// $rootScope.onAppEvent('panel-fullscreen-enter', (evt, payload) => {
|
||||
// if (panel.id !== payload.panelId) {
|
||||
// return;
|
||||
// }
|
||||
// element.addClass('panel-fullscreen');
|
||||
// }, scope);
|
||||
//
|
||||
// scope.$on('$destroy', () => {
|
||||
// console.log('grid-item scope $destroy');
|
||||
// if (gridCtrl.isDestroyed) {
|
||||
// return;
|
||||
// }
|
||||
//
|
||||
// if (gridStackNode) {
|
||||
// console.log('grid-item scope $destroy removeWidget');
|
||||
// gridStackNode._grid.removeWidget(element);
|
||||
// }
|
||||
// });
|
||||
//
|
||||
// if (gridCtrl.isInitialized) {
|
||||
// gridCtrl.gridstack.makeWidget(element);
|
||||
// gridStackNode = element.data('_gridstack_node');
|
||||
// } else {
|
||||
// setTimeout(function() {
|
||||
// gridStackNode = element.data('_gridstack_node');
|
||||
// }, 500);
|
||||
// }
|
||||
// }
|
||||
// };
|
||||
// }
|
||||
//
|
||||
// coreModule.directive('dashGrid', dashGrid);
|
||||
// coreModule.directive('dashGridItem', dashGridItem);
|
||||
|
@@ -23,7 +23,7 @@
|
||||
<input type="text" class="gf-form-input" ng-model="ctrl.dateTimeFormat">
|
||||
</div>
|
||||
<gf-form-switch class="gf-form"
|
||||
label="Export To Excel" label-class="width-12" switch-class="max-width-6"
|
||||
label="Excel CSV Dialect" label-class="width-10" switch-class="max-width-6"
|
||||
checked="ctrl.excel">
|
||||
</gf-form-switch>
|
||||
</div>
|
||||
|
@@ -9,7 +9,8 @@ var template = `
|
||||
</div>
|
||||
`;
|
||||
|
||||
coreModule.directive('dashRepeatOption', function(variableSrv) {
|
||||
/** @ngInject **/
|
||||
function dashRepeatOptionDirective(variableSrv) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
template: template,
|
||||
@@ -30,5 +31,6 @@ coreModule.directive('dashRepeatOption', function(variableSrv) {
|
||||
scope.variables.unshift({text: 'Disabled', value: null});
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
coreModule.directive('dashRepeatOption', dashRepeatOptionDirective);
|
||||
|
@@ -2,7 +2,7 @@ import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/co
|
||||
|
||||
import _ from 'lodash';
|
||||
import {HistoryListCtrl} from 'app/features/dashboard/history/history';
|
||||
import { versions, compare, restore } from 'test/mocks/history-mocks';
|
||||
import {versions, compare, restore} from './history_mocks';
|
||||
|
||||
describe('HistoryListCtrl', function() {
|
||||
var RESTORE_ID = 4;
|
||||
|
193
public/app/features/dashboard/specs/history_mocks.ts
Normal file
193
public/app/features/dashboard/specs/history_mocks.ts
Normal file
@@ -0,0 +1,193 @@
|
||||
|
||||
export function versions() {
|
||||
return [{
|
||||
id: 4,
|
||||
dashboardId: 1,
|
||||
parentVersion: 3,
|
||||
restoredFrom: 0,
|
||||
version: 4,
|
||||
created: '2017-02-22T17:43:01-08:00',
|
||||
createdBy: 'admin',
|
||||
message: '',
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
dashboardId: 1,
|
||||
parentVersion: 1,
|
||||
restoredFrom: 1,
|
||||
version: 3,
|
||||
created: '2017-02-22T17:43:01-08:00',
|
||||
createdBy: 'admin',
|
||||
message: '',
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
dashboardId: 1,
|
||||
parentVersion: 0,
|
||||
restoredFrom: -1,
|
||||
version: 2,
|
||||
created: '2017-02-22T17:29:52-08:00',
|
||||
createdBy: 'admin',
|
||||
message: '',
|
||||
},
|
||||
{
|
||||
id: 1,
|
||||
dashboardId: 1,
|
||||
parentVersion: 0,
|
||||
restoredFrom: -1,
|
||||
slug: 'history-dashboard',
|
||||
version: 1,
|
||||
created: '2017-02-22T17:06:37-08:00',
|
||||
createdBy: 'admin',
|
||||
message: '',
|
||||
}];
|
||||
}
|
||||
|
||||
export function compare(type) {
|
||||
return type === 'basic' ? '<div></div>' : '<pre><code></code></pre>';
|
||||
}
|
||||
|
||||
export function restore(version, restoredFrom?) {
|
||||
return {
|
||||
dashboard: {
|
||||
meta: {
|
||||
type: 'db',
|
||||
canSave: true,
|
||||
canEdit: true,
|
||||
canStar: true,
|
||||
slug: 'history-dashboard',
|
||||
expires: '0001-01-01T00:00:00Z',
|
||||
created: '2017-02-21T18:40:45-08:00',
|
||||
updated: '2017-04-11T21:31:22.59219665-07:00',
|
||||
updatedBy: 'admin',
|
||||
createdBy: 'admin',
|
||||
version: version,
|
||||
},
|
||||
dashboard: {
|
||||
annotations: {
|
||||
list: []
|
||||
},
|
||||
description: 'A random dashboard for implementing the history list',
|
||||
editable: true,
|
||||
gnetId: null,
|
||||
graphTooltip: 0,
|
||||
hideControls: false,
|
||||
id: 1,
|
||||
links: [],
|
||||
restoredFrom: restoredFrom,
|
||||
rows: [{
|
||||
collapse: false,
|
||||
height: '250px',
|
||||
panels: [{
|
||||
aliasColors: {},
|
||||
bars: false,
|
||||
datasource: null,
|
||||
fill: 1,
|
||||
id: 1,
|
||||
legend: {
|
||||
avg: false,
|
||||
current: false,
|
||||
max: false,
|
||||
min: false,
|
||||
show: true,
|
||||
total: false,
|
||||
values: false
|
||||
},
|
||||
lines: true,
|
||||
linewidth: 1,
|
||||
nullPointMode: "null",
|
||||
percentage: false,
|
||||
pointradius: 5,
|
||||
points: false,
|
||||
renderer: 'flot',
|
||||
seriesOverrides: [],
|
||||
span: 12,
|
||||
stack: false,
|
||||
steppedLine: false,
|
||||
targets: [{}],
|
||||
thresholds: [],
|
||||
timeFrom: null,
|
||||
timeShift: null,
|
||||
title: 'Panel Title',
|
||||
tooltip: {
|
||||
shared: true,
|
||||
sort: 0,
|
||||
value_type: 'individual'
|
||||
},
|
||||
type: 'graph',
|
||||
xaxis: {
|
||||
mode: 'time',
|
||||
name: null,
|
||||
show: true,
|
||||
values: []
|
||||
},
|
||||
yaxes: [{
|
||||
format: 'short',
|
||||
label: null,
|
||||
logBase: 1,
|
||||
max: null,
|
||||
min: null,
|
||||
show: true
|
||||
}, {
|
||||
format: 'short',
|
||||
label: null,
|
||||
logBase: 1,
|
||||
max: null,
|
||||
min: null,
|
||||
show: true
|
||||
}]
|
||||
}],
|
||||
repeat: null,
|
||||
repeatIteration: null,
|
||||
repeatRowId: null,
|
||||
showTitle: false,
|
||||
title: 'Dashboard Row',
|
||||
titleSize: 'h6'
|
||||
}
|
||||
],
|
||||
schemaVersion: 14,
|
||||
style: 'dark',
|
||||
tags: [
|
||||
'development'
|
||||
],
|
||||
templating: {
|
||||
'list': []
|
||||
},
|
||||
time: {
|
||||
from: 'now-6h',
|
||||
to: 'now'
|
||||
},
|
||||
timepicker: {
|
||||
refresh_intervals: [
|
||||
'5s',
|
||||
'10s',
|
||||
'30s',
|
||||
'1m',
|
||||
'5m',
|
||||
'15m',
|
||||
'30m',
|
||||
'1h',
|
||||
'2h',
|
||||
'1d',
|
||||
],
|
||||
time_options: [
|
||||
'5m',
|
||||
'15m',
|
||||
'1h',
|
||||
'6h',
|
||||
'12h',
|
||||
'24h',
|
||||
'2d',
|
||||
'7d',
|
||||
'30d'
|
||||
]
|
||||
},
|
||||
timezone: 'utc',
|
||||
title: 'History Dashboard',
|
||||
version: version,
|
||||
}
|
||||
},
|
||||
message: 'Dashboard restored to version ' + version,
|
||||
version: version
|
||||
};
|
||||
}
|
@@ -2,7 +2,7 @@ import {describe, beforeEach, it, expect, angularMocks} from 'test/lib/common';
|
||||
|
||||
import helpers from 'test/specs/helpers';
|
||||
import '../history/history_srv';
|
||||
import {versions, restore} from 'test/mocks/history-mocks';
|
||||
import {versions, restore} from './history_mocks';
|
||||
|
||||
describe('historySrv', function() {
|
||||
var ctx = new helpers.ServiceTestContext();
|
||||
|
110
public/app/features/dashboard/specs/share_modal_ctrl_specs.ts
Normal file
110
public/app/features/dashboard/specs/share_modal_ctrl_specs.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import {describe, beforeEach, it, expect, sinon, angularMocks} from 'test/lib/common';
|
||||
import helpers from 'test/specs/helpers';
|
||||
import '../shareModalCtrl';
|
||||
import config from 'app/core/config';
|
||||
import 'app/features/panellinks/linkSrv';
|
||||
|
||||
describe('ShareModalCtrl', function() {
|
||||
var ctx = new helpers.ControllerTestContext();
|
||||
|
||||
function setTime(range) {
|
||||
ctx.timeSrv.timeRange = sinon.stub().returns(range);
|
||||
}
|
||||
|
||||
beforeEach(function() {
|
||||
config.bootData = {
|
||||
user: {
|
||||
orgId: 1
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
setTime({ from: new Date(1000), to: new Date(2000) });
|
||||
|
||||
beforeEach(angularMocks.module('grafana.controllers'));
|
||||
beforeEach(angularMocks.module('grafana.services'));
|
||||
beforeEach(angularMocks.module(function($compileProvider) {
|
||||
$compileProvider.preAssignBindingsEnabled(true);
|
||||
}));
|
||||
|
||||
beforeEach(ctx.providePhase());
|
||||
|
||||
beforeEach(ctx.createControllerPhase('ShareModalCtrl'));
|
||||
|
||||
describe('shareUrl with current time range and panel', function() {
|
||||
it('should generate share url absolute time', function() {
|
||||
ctx.$location.path('/test');
|
||||
ctx.scope.panel = { id: 22 };
|
||||
|
||||
ctx.scope.init();
|
||||
expect(ctx.scope.shareUrl).to.be('http://server/#!/test?from=1000&to=2000&orgId=1&panelId=22&fullscreen');
|
||||
});
|
||||
|
||||
it('should generate render url', function() {
|
||||
ctx.$location.$$absUrl = 'http://dashboards.grafana.com/dashboard/db/my-dash';
|
||||
|
||||
ctx.scope.panel = { id: 22 };
|
||||
|
||||
ctx.scope.init();
|
||||
var base = 'http://dashboards.grafana.com/render/dashboard-solo/db/my-dash';
|
||||
var params = '?from=1000&to=2000&orgId=1&panelId=22&width=1000&height=500&tz=UTC';
|
||||
expect(ctx.scope.imageUrl).to.contain(base + params);
|
||||
});
|
||||
|
||||
it('should remove panel id when no panel in scope', function() {
|
||||
ctx.$location.path('/test');
|
||||
ctx.scope.options.forCurrent = true;
|
||||
ctx.scope.panel = null;
|
||||
|
||||
ctx.scope.init();
|
||||
expect(ctx.scope.shareUrl).to.be('http://server/#!/test?from=1000&to=2000&orgId=1');
|
||||
});
|
||||
|
||||
it('should add theme when specified', function() {
|
||||
ctx.$location.path('/test');
|
||||
ctx.scope.options.theme = 'light';
|
||||
ctx.scope.panel = null;
|
||||
|
||||
ctx.scope.init();
|
||||
expect(ctx.scope.shareUrl).to.be('http://server/#!/test?from=1000&to=2000&orgId=1&theme=light');
|
||||
});
|
||||
|
||||
it('should remove fullscreen from image url when is first param in querystring and modeSharePanel is true', function() {
|
||||
ctx.$location.url('/test?fullscreen&edit');
|
||||
ctx.scope.modeSharePanel = true;
|
||||
ctx.scope.panel = { id: 1 };
|
||||
|
||||
ctx.scope.buildUrl();
|
||||
|
||||
expect(ctx.scope.shareUrl).to.contain('?fullscreen&edit&from=1000&to=2000&orgId=1&panelId=1');
|
||||
expect(ctx.scope.imageUrl).to.contain('?from=1000&to=2000&orgId=1&panelId=1&width=1000&height=500&tz=UTC');
|
||||
|
||||
});
|
||||
|
||||
it('should remove edit from image url when is first param in querystring and modeSharePanel is true', function() {
|
||||
ctx.$location.url('/test?edit&fullscreen');
|
||||
ctx.scope.modeSharePanel = true;
|
||||
ctx.scope.panel = { id: 1 };
|
||||
|
||||
ctx.scope.buildUrl();
|
||||
|
||||
expect(ctx.scope.shareUrl).to.contain('?edit&fullscreen&from=1000&to=2000&orgId=1&panelId=1');
|
||||
expect(ctx.scope.imageUrl).to.contain('?from=1000&to=2000&orgId=1&panelId=1&width=1000&height=500&tz=UTC');
|
||||
|
||||
});
|
||||
|
||||
it('should include template variables in url', function() {
|
||||
ctx.$location.path('/test');
|
||||
ctx.scope.options.includeTemplateVars = true;
|
||||
|
||||
ctx.templateSrv.fillVariableValuesForUrl = function(params) {
|
||||
params['var-app'] = 'mupp';
|
||||
params['var-server'] = 'srv-01';
|
||||
};
|
||||
|
||||
ctx.scope.buildUrl();
|
||||
expect(ctx.scope.shareUrl).to.be('http://server/#!/test?from=1000&to=2000&orgId=1&var-app=mupp&var-server=srv-01');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@@ -0,0 +1,82 @@
|
||||
import {describe, beforeEach, it, expect, sinon, angularMocks} from 'test/lib/common';
|
||||
import 'app/features/dashboard/unsavedChangesSrv';
|
||||
import 'app/features/dashboard/dashboard_srv';
|
||||
|
||||
describe("unsavedChangesSrv", function() {
|
||||
var _unsavedChangesSrv;
|
||||
var _dashboardSrv;
|
||||
var _location;
|
||||
var _contextSrvStub = { isEditor: true };
|
||||
var _rootScope;
|
||||
var tracker;
|
||||
var dash;
|
||||
var scope;
|
||||
|
||||
beforeEach(angularMocks.module('grafana.core'));
|
||||
beforeEach(angularMocks.module('grafana.services'));
|
||||
beforeEach(angularMocks.module(function($provide) {
|
||||
$provide.value('contextSrv', _contextSrvStub);
|
||||
$provide.value('$window', {});
|
||||
}));
|
||||
|
||||
beforeEach(angularMocks.inject(function(unsavedChangesSrv, $location, $rootScope, dashboardSrv) {
|
||||
_unsavedChangesSrv = unsavedChangesSrv;
|
||||
_dashboardSrv = dashboardSrv;
|
||||
_location = $location;
|
||||
_rootScope = $rootScope;
|
||||
}));
|
||||
|
||||
beforeEach(function() {
|
||||
dash = _dashboardSrv.create({
|
||||
refresh: false,
|
||||
rows: [
|
||||
{
|
||||
panels: [{ test: "asd", legend: { } }]
|
||||
}
|
||||
]
|
||||
});
|
||||
scope = _rootScope.$new();
|
||||
scope.appEvent = sinon.spy();
|
||||
scope.onAppEvent = sinon.spy();
|
||||
|
||||
tracker = new _unsavedChangesSrv.Tracker(dash, scope);
|
||||
});
|
||||
|
||||
it('No changes should not have changes', function() {
|
||||
expect(tracker.hasChanges()).to.be(false);
|
||||
});
|
||||
|
||||
it('Simple change should be registered', function() {
|
||||
dash.property = "google";
|
||||
expect(tracker.hasChanges()).to.be(true);
|
||||
});
|
||||
|
||||
it('Should ignore a lot of changes', function() {
|
||||
dash.time = {from: '1h'};
|
||||
dash.refresh = true;
|
||||
dash.schemaVersion = 10;
|
||||
expect(tracker.hasChanges()).to.be(false);
|
||||
});
|
||||
|
||||
it('Should ignore row collapse change', function() {
|
||||
dash.rows[0].collapse = true;
|
||||
expect(tracker.hasChanges()).to.be(false);
|
||||
});
|
||||
|
||||
it('Should ignore panel legend changes', function() {
|
||||
dash.rows[0].panels[0].legend.sortDesc = true;
|
||||
dash.rows[0].panels[0].legend.sort = "avg";
|
||||
expect(tracker.hasChanges()).to.be(false);
|
||||
});
|
||||
|
||||
it('Should ignore panel repeats', function() {
|
||||
dash.rows[0].panels.push({repeatPanelId: 10});
|
||||
expect(tracker.hasChanges()).to.be(false);
|
||||
});
|
||||
|
||||
it('Should ignore row repeats', function() {
|
||||
dash.addEmptyRow();
|
||||
dash.rows[1].repeatRowId = 10;
|
||||
expect(tracker.hasChanges()).to.be(false);
|
||||
});
|
||||
});
|
53
public/app/features/dashboard/specs/viewstate_srv_specs.ts
Normal file
53
public/app/features/dashboard/specs/viewstate_srv_specs.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import {describe, beforeEach, it, expect, angularMocks} from 'test/lib/common';
|
||||
import 'app/features/dashboard/viewStateSrv';
|
||||
import config from 'app/core/config';
|
||||
|
||||
describe('when updating view state', function() {
|
||||
var viewState, location;
|
||||
var timeSrv = {};
|
||||
var templateSrv = {};
|
||||
var contextSrv = {
|
||||
user: {
|
||||
orgId: 19
|
||||
}
|
||||
};
|
||||
beforeEach(function() {
|
||||
config.bootData = {
|
||||
user: {
|
||||
orgId: 1
|
||||
}
|
||||
};
|
||||
});
|
||||
beforeEach(angularMocks.module('grafana.services'));
|
||||
beforeEach(angularMocks.module(function($provide) {
|
||||
$provide.value('timeSrv', timeSrv);
|
||||
$provide.value('templateSrv', templateSrv);
|
||||
$provide.value('contextSrv', contextSrv);
|
||||
}));
|
||||
|
||||
beforeEach(angularMocks.inject(function(dashboardViewStateSrv, $location, $rootScope) {
|
||||
$rootScope.onAppEvent = function() {};
|
||||
$rootScope.dashboard = {meta: {}};
|
||||
viewState = dashboardViewStateSrv.create($rootScope);
|
||||
location = $location;
|
||||
}));
|
||||
|
||||
describe('to fullscreen true and edit true', function() {
|
||||
it('should update querystring and view state', function() {
|
||||
var updateState = {fullscreen: true, edit: true, panelId: 1};
|
||||
viewState.update(updateState);
|
||||
expect(location.search()).to.eql({fullscreen: true, edit: true, panelId: 1, orgId: 1});
|
||||
expect(viewState.dashboard.meta.fullscreen).to.be(true);
|
||||
expect(viewState.state.fullscreen).to.be(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('to fullscreen false', function() {
|
||||
it('should remove params from query string', function() {
|
||||
viewState.update({fullscreen: true, panelId: 1, edit: true});
|
||||
viewState.update({fullscreen: false});
|
||||
expect(viewState.dashboard.meta.fullscreen).to.be(false);
|
||||
expect(viewState.state.fullscreen).to.be(null);
|
||||
});
|
||||
});
|
||||
});
|
@@ -3,74 +3,78 @@
|
||||
|
||||
<div ng-repeat="link in dashboard.links">
|
||||
|
||||
<div class="gf-form-group">
|
||||
<div class="gf-form-inline">
|
||||
<div class="gf-form-group gf-form-inline">
|
||||
<div class="section">
|
||||
<div class="gf-form">
|
||||
<span class="gf-form-label width-6">Type</span>
|
||||
<span class="gf-form-label width-8">Type</span>
|
||||
<div class="gf-form-select-wrapper width-10">
|
||||
<select class="gf-form-input" ng-model="link.type" ng-options="f for f in ['dashboards','link']" ng-change="updated()"></select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="gf-form" ng-show="link.type === 'dashboards'">
|
||||
<span class="gf-form-label">With tags</span>
|
||||
<bootstrap-tagsinput ng-model="link.tags" tagclass="label label-tag" placeholder="add tags"></bootstrap-tagsinput>
|
||||
<span class="gf-form-label width-8">With tags</span>
|
||||
<bootstrap-tagsinput ng-model="link.tags" class="width-10" tagclass="label label-tag" placeholder="add tags" style="margin-right: .25rem"></bootstrap-tagsinput>
|
||||
</div>
|
||||
|
||||
<div class="gf-form" ng-show="link.type === 'dashboards'">
|
||||
<editor-checkbox text="As dropdown" model="link.asDropdown" change="updated()"></editor-checkbox>
|
||||
</div>
|
||||
|
||||
<div class="gf-form max-width-30" ng-show="link.type === 'link'">
|
||||
<li class="gf-form-label width-6">Url</li>
|
||||
<input type="text" ng-model="link.url" class="gf-form-input" ng-model-onblur ng-change="updated()">
|
||||
</div>
|
||||
|
||||
<div class="gf-form">
|
||||
<button class="btn btn-inverse btn-mini" ng-click="moveLink($index, -1)" ng-hide="$first"><i class="fa fa-arrow-up"></i></button>
|
||||
</div>
|
||||
<div class="gf-form">
|
||||
<button class="btn btn-inverse btn-mini" ng-click="moveLink($index, 1)" ng-hide="$last"><i class="fa fa-arrow-down"></i></button>
|
||||
</div>
|
||||
<div class="gf-form">
|
||||
<button class="btn btn-inverse btn-mini" ng-click="deleteLink($index)"><i class="fa fa-trash" ></i></button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="gf-form" ng-show="link.type === 'dashboards' && link.asDropdown">
|
||||
<span class="gf-form-label width-6">Title</span>
|
||||
<input type="text" ng-model="link.title" class="gf-form-input max-width-25" ng-model-onblur ng-change="updated()">
|
||||
</div>
|
||||
|
||||
<div class="gf-form-inline" ng-show="link.type === 'link'">
|
||||
<div class="gf-form">
|
||||
<span class="gf-form-label width-6">Title</span>
|
||||
<gf-form-switch ng-show="link.type === 'dashboards'" class="gf-form" label="As dropdown" checked="link.asDropdown" switch-class="max-width-4" label-class="width-8"></gf-form-switch>
|
||||
<div class="gf-form" ng-show="link.type === 'dashboards' && link.asDropdown">
|
||||
<span class="gf-form-label width-8">Title</span>
|
||||
<input type="text" ng-model="link.title" class="gf-form-input max-width-10" ng-model-onblur ng-change="updated()">
|
||||
</div>
|
||||
<div ng-show="link.type === 'link'">
|
||||
<div class="gf-form">
|
||||
<li class="gf-form-label width-8">Url</li>
|
||||
<input type="text" ng-model="link.url" class="gf-form-input width-20" ng-model-onblur ng-change="updated()">
|
||||
</div>
|
||||
|
||||
<div class="gf-form">
|
||||
<span class="gf-form-label width-6">Tooltip</span>
|
||||
<input type="text" ng-model="link.tooltip" class="gf-form-input max-width-10" placeholder="Open dashboard" ng-model-onblur ng-change="updated()">
|
||||
</div>
|
||||
<div class="gf-form">
|
||||
<span class="gf-form-label width-8">Title</span>
|
||||
<input type="text" ng-model="link.title" class="gf-form-input width-20" ng-model-onblur ng-change="updated()">
|
||||
</div>
|
||||
|
||||
<div class="gf-form">
|
||||
<span class="gf-form-label width-6">Icon</span>
|
||||
<div class="gf-form-select-wrapper max-width-10">
|
||||
<select class="gf-form-input" ng-model="link.icon" ng-options="k as k for (k, v) in iconMap" ng-change="updated()"></select>
|
||||
<div class="gf-form">
|
||||
<span class="gf-form-label width-8">Tooltip</span>
|
||||
<input type="text" ng-model="link.tooltip" class="gf-form-input width-20" placeholder="Open dashboard" ng-model-onblur ng-change="updated()">
|
||||
</div>
|
||||
|
||||
<div class="gf-form">
|
||||
<span class="gf-form-label width-8">Icon</span>
|
||||
<div class="gf-form-select-wrapper width-20">
|
||||
<select class="gf-form-input" ng-model="link.icon" ng-options="k as k for (k, v) in iconMap" ng-change="updated()"></select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="gf-form-inline">
|
||||
<div class="section gf-form-inline" style="display: flex">
|
||||
<div>
|
||||
<div class="gf-form">
|
||||
<span class="gf-form-label width-6">Include</span>
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<gf-form-switch class="gf-form" label="Time range" checked="link.keepTime" switch-class="max-width-6" label-class="width-9"></gf-form-switch>
|
||||
<gf-form-switch class="gf-form" label="Variable values" checked="link.includeVars" switch-class="max-width-6" label-class="width-9"></gf-form-switch>
|
||||
<gf-form-switch class="gf-form" label="Open in new tab" checked="link.targetBlank" switch-class="max-width-6" label-class="width-9"></gf-form-switch>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div style="display:flex; flex-direction:column; justify-content:flex-start">
|
||||
<div class="gf-form">
|
||||
<span class="gf-form-label width-6">Include</span>
|
||||
<editor-checkbox text="Time range" model="link.keepTime" change="updated()"></editor-checkbox>
|
||||
<editor-checkbox text="Variable values" model="link.includeVars" change="updated()"></editor-checkbox>
|
||||
<editor-checkbox text="Open in new tab " model="link.targetBlank" change="updated()"></editor-checkbox>
|
||||
<button class="btn btn-inverse gf-form-btn width-4" ng-click="deleteLink($index)">
|
||||
<i class="fa fa-trash"></i>
|
||||
</button>
|
||||
</div>
|
||||
<div class="gf-form">
|
||||
<button class="btn btn-inverse gf-form-btn width-4" ng-click="moveLink($index, -1)" ng-hide="$first"><i class="fa fa-arrow-up"></i></button>
|
||||
</div>
|
||||
<div class="gf-form">
|
||||
<button class="btn btn-inverse gf-form-btn width-4" ng-click="moveLink($index, 1)" ng-hide="$last"><i class="fa fa-arrow-down"></i></button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
<button class="btn btn-inverse" ng-click="addLink()"><i class="fa fa-plus"></i> Add link</button>
|
||||
|
@@ -1,5 +1,3 @@
|
||||
///<reference path="../../headers/common.d.ts" />
|
||||
|
||||
import config from 'app/core/config';
|
||||
import {coreModule} from 'app/core/core';
|
||||
|
||||
|
@@ -1,10 +1,8 @@
|
||||
///<reference path="../../headers/common.d.ts" />
|
||||
|
||||
import config from 'app/core/config';
|
||||
import $ from 'jquery';
|
||||
import _ from 'lodash';
|
||||
import kbn from 'app/core/utils/kbn';
|
||||
import {PanelCtrl} from './panel_ctrl';
|
||||
import {PanelCtrl} from 'app/features/panel/panel_ctrl';
|
||||
|
||||
import * as rangeUtil from 'app/core/utils/rangeutil';
|
||||
import * as dateMath from 'app/core/utils/datemath';
|
||||
|
@@ -1,5 +1,3 @@
|
||||
///<reference path="../../headers/common.d.ts" />
|
||||
|
||||
import config from 'app/core/config';
|
||||
import _ from 'lodash';
|
||||
import $ from 'jquery';
|
||||
|
46
public/app/features/panellinks/specs/link_srv_specs.ts
Normal file
46
public/app/features/panellinks/specs/link_srv_specs.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import {describe, beforeEach, it, expect, angularMocks} from 'test/lib/common';
|
||||
import 'app/features/panellinks/linkSrv';
|
||||
import _ from 'lodash';
|
||||
|
||||
describe('linkSrv', function() {
|
||||
var _linkSrv;
|
||||
|
||||
beforeEach(angularMocks.module('grafana.core'));
|
||||
beforeEach(angularMocks.module('grafana.services'));
|
||||
|
||||
beforeEach(angularMocks.inject(function(linkSrv) {
|
||||
_linkSrv = linkSrv;
|
||||
}));
|
||||
|
||||
describe('when appending query strings', function() {
|
||||
|
||||
it('add ? to URL if not present', function() {
|
||||
var url = _linkSrv.appendToQueryString('http://example.com', 'foo=bar');
|
||||
expect(url).to.be('http://example.com?foo=bar');
|
||||
});
|
||||
|
||||
it('do not add & to URL if ? is present but query string is empty', function() {
|
||||
var url = _linkSrv.appendToQueryString('http://example.com?', 'foo=bar');
|
||||
expect(url).to.be('http://example.com?foo=bar');
|
||||
});
|
||||
|
||||
it('add & to URL if query string is present', function() {
|
||||
var url = _linkSrv.appendToQueryString('http://example.com?foo=bar', 'hello=world');
|
||||
expect(url).to.be('http://example.com?foo=bar&hello=world');
|
||||
});
|
||||
|
||||
it('do not change the URL if there is nothing to append', function() {
|
||||
_.each(['', undefined, null], function(toAppend) {
|
||||
var url1 = _linkSrv.appendToQueryString('http://example.com', toAppend);
|
||||
expect(url1).to.be('http://example.com');
|
||||
|
||||
var url2 = _linkSrv.appendToQueryString('http://example.com?', toAppend);
|
||||
expect(url2).to.be('http://example.com?');
|
||||
|
||||
var url3 = _linkSrv.appendToQueryString('http://example.com?foo=bar', toAppend);
|
||||
expect(url3).to.be('http://example.com?foo=bar');
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
});
|
@@ -4,3 +4,5 @@ import './plugin_list_ctrl';
|
||||
import './import_list/import_list';
|
||||
import './ds_edit_ctrl';
|
||||
import './ds_list_ctrl';
|
||||
import './datasource_srv';
|
||||
import './plugin_component';
|
||||
|
@@ -1,10 +1,11 @@
|
||||
define([
|
||||
'angular',
|
||||
'lodash',
|
||||
'../core_module',
|
||||
'app/core/core_module',
|
||||
'app/core/config',
|
||||
'./plugin_loader',
|
||||
],
|
||||
function (angular, _, coreModule, config) {
|
||||
function (angular, _, coreModule, config, pluginLoader) {
|
||||
'use strict';
|
||||
|
||||
coreModule.default.service('datasourceSrv', function($q, $injector, $rootScope, templateSrv) {
|
||||
@@ -41,7 +42,7 @@ function (angular, _, coreModule, config) {
|
||||
var deferred = $q.defer();
|
||||
var pluginDef = dsConfig.meta;
|
||||
|
||||
System.import(pluginDef.module).then(function(plugin) {
|
||||
pluginLoader.importPluginModule(pluginDef.module).then(function(plugin) {
|
||||
// check if its in cache now
|
||||
if (self.datasources[name]) {
|
||||
deferred.resolve(self.datasources[name]);
|
@@ -1,12 +1,12 @@
|
||||
///<reference path="../../headers/common.d.ts" />
|
||||
|
||||
import angular from 'angular';
|
||||
import _ from 'lodash';
|
||||
|
||||
import config from 'app/core/config';
|
||||
import coreModule from 'app/core/core_module';
|
||||
import {importPluginModule} from './plugin_loader';
|
||||
|
||||
import {UnknownPanelCtrl} from 'app/plugins/panel/unknown/module';
|
||||
import {DashboardRowCtrl} from '../components/row_ctrl';
|
||||
import {DashboardRowCtrl} from './row_ctrl';
|
||||
|
||||
/** @ngInject **/
|
||||
function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $templateCache) {
|
||||
@@ -74,7 +74,7 @@ function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $
|
||||
let panelInfo = config.panels[scope.panel.type];
|
||||
var panelCtrlPromise = Promise.resolve(UnknownPanelCtrl);
|
||||
if (panelInfo) {
|
||||
panelCtrlPromise = System.import(panelInfo.module).then(function(panelModule) {
|
||||
panelCtrlPromise = importPluginModule(panelInfo.module).then(function(panelModule) {
|
||||
return panelModule.PanelCtrl;
|
||||
});
|
||||
}
|
||||
@@ -114,7 +114,7 @@ function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $
|
||||
return datasourceSrv.get(datasource).then(ds => {
|
||||
scope.datasource = ds;
|
||||
|
||||
return System.import(ds.meta.module).then(dsModule => {
|
||||
return importPluginModule(ds.meta.module).then(dsModule => {
|
||||
return {
|
||||
baseUrl: ds.meta.baseUrl,
|
||||
name: 'query-ctrl-' + ds.meta.id,
|
||||
@@ -128,7 +128,7 @@ function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $
|
||||
// QueryOptionsCtrl
|
||||
case "query-options-ctrl": {
|
||||
return datasourceSrv.get(scope.ctrl.panel.datasource).then(ds => {
|
||||
return System.import(ds.meta.module).then((dsModule): any => {
|
||||
return importPluginModule(ds.meta.module).then((dsModule): any => {
|
||||
if (!dsModule.QueryOptionsCtrl) {
|
||||
return {notFound: true};
|
||||
}
|
||||
@@ -145,7 +145,7 @@ function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $
|
||||
}
|
||||
// Annotations
|
||||
case "annotations-query-ctrl": {
|
||||
return System.import(scope.ctrl.currentDatasource.meta.module).then(function(dsModule) {
|
||||
return importPluginModule(scope.ctrl.currentDatasource.meta.module).then(function(dsModule) {
|
||||
return {
|
||||
baseUrl: scope.ctrl.currentDatasource.meta.baseUrl,
|
||||
name: 'annotations-query-ctrl-' + scope.ctrl.currentDatasource.meta.id,
|
||||
@@ -158,7 +158,7 @@ function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $
|
||||
// Datasource ConfigCtrl
|
||||
case 'datasource-config-ctrl': {
|
||||
var dsMeta = scope.ctrl.datasourceMeta;
|
||||
return System.import(dsMeta.module).then(function(dsModule): any {
|
||||
return importPluginModule(dsMeta.module).then(function(dsModule): any {
|
||||
if (!dsModule.ConfigCtrl) {
|
||||
return {notFound: true};
|
||||
}
|
||||
@@ -175,7 +175,7 @@ function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $
|
||||
// AppConfigCtrl
|
||||
case 'app-config-ctrl': {
|
||||
let model = scope.ctrl.model;
|
||||
return System.import(model.module).then(function(appModule) {
|
||||
return importPluginModule(model.module).then(function(appModule) {
|
||||
return {
|
||||
baseUrl: model.baseUrl,
|
||||
name: 'app-config-' + model.id,
|
||||
@@ -188,7 +188,7 @@ function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $
|
||||
// App Page
|
||||
case 'app-page': {
|
||||
let appModel = scope.ctrl.appModel;
|
||||
return System.import(appModel.module).then(function(appModule) {
|
||||
return importPluginModule(appModel.module).then(function(appModule) {
|
||||
return {
|
||||
baseUrl: appModel.baseUrl,
|
||||
name: 'app-page-' + appModel.id + '-' + scope.ctrl.page.slug,
|
128
public/app/features/plugins/plugin_loader.ts
Normal file
128
public/app/features/plugins/plugin_loader.ts
Normal file
@@ -0,0 +1,128 @@
|
||||
import System from 'systemjs/dist/system.js';
|
||||
import _ from 'lodash';
|
||||
import * as sdk from 'app/plugins/sdk';
|
||||
import kbn from 'app/core/utils/kbn';
|
||||
import moment from 'moment';
|
||||
import angular from 'angular';
|
||||
import jquery from 'jquery';
|
||||
import config from 'app/core/config';
|
||||
import TimeSeries from 'app/core/time_series2';
|
||||
import * as datemath from 'app/core/utils/datemath';
|
||||
|
||||
import * as graphitePlugin from 'app/plugins/datasource/graphite/module';
|
||||
import * as cloudwatchPlugin from 'app/plugins/datasource/cloudwatch/module';
|
||||
import * as elasticsearchPlugin from 'app/plugins/datasource/elasticsearch/module';
|
||||
import * as opentsdbPlugin from 'app/plugins/datasource/opentsdb/module';
|
||||
import * as grafanaPlugin from 'app/plugins/datasource/grafana/module';
|
||||
import * as influxdbPlugin from 'app/plugins/datasource/influxdb/module';
|
||||
import * as mixedPlugin from 'app/plugins/datasource/mixed/module';
|
||||
import * as mysqlPlugin from 'app/plugins/datasource/mysql/module';
|
||||
import * as prometheusPlugin from 'app/plugins/datasource/prometheus/module';
|
||||
|
||||
import * as textPanel from 'app/plugins/panel/text/module';
|
||||
import * as graphPanel from 'app/plugins/panel/graph/module';
|
||||
import * as dashListPanel from 'app/plugins/panel/dashlist/module';
|
||||
import * as pluginsListPanel from 'app/plugins/panel/pluginlist/module';
|
||||
import * as alertListPanel from 'app/plugins/panel/alertlist/module';
|
||||
import * as heatmapPanel from 'app/plugins/panel/heatmap/module';
|
||||
import * as tablePanel from 'app/plugins/panel/table/module';
|
||||
import * as singlestatPanel from 'app/plugins/panel/singlestat/module';
|
||||
import * as gettingStartedPanel from 'app/plugins/panel/gettingstarted/module';
|
||||
import * as testDataAppPlugin from 'app/plugins/app/testdata/module';
|
||||
import * as testDataDSPlugin from 'app/plugins/app/testdata/datasource/module';
|
||||
|
||||
let builtInPlugins = {
|
||||
"app/plugins/datasource/graphite/module": graphitePlugin,
|
||||
"app/plugins/datasource/cloudwatch/module": cloudwatchPlugin,
|
||||
"app/plugins/datasource/elasticsearch/module": elasticsearchPlugin,
|
||||
"app/plugins/datasource/opentsdb/module": opentsdbPlugin,
|
||||
"app/plugins/datasource/grafana/module": grafanaPlugin,
|
||||
"app/plugins/datasource/influxdb/module": influxdbPlugin,
|
||||
"app/plugins/datasource/mixed/module": mixedPlugin,
|
||||
"app/plugins/datasource/mysql/module": mysqlPlugin,
|
||||
"app/plugins/datasource/prometheus/module": prometheusPlugin,
|
||||
"app/plugins/app/testdata/module": testDataAppPlugin,
|
||||
"app/plugins/app/testdata/datasource/module": testDataDSPlugin,
|
||||
|
||||
"app/plugins/panel/text/module": textPanel,
|
||||
"app/plugins/panel/graph/module": graphPanel,
|
||||
"app/plugins/panel/dashlist/module": dashListPanel,
|
||||
"app/plugins/panel/pluginlist/module": pluginsListPanel,
|
||||
"app/plugins/panel/alertlist/module": alertListPanel,
|
||||
"app/plugins/panel/heatmap/module": heatmapPanel,
|
||||
"app/plugins/panel/table/module": tablePanel,
|
||||
"app/plugins/panel/singlestat/module": singlestatPanel,
|
||||
"app/plugins/panel/gettingstarted/module": gettingStartedPanel,
|
||||
};
|
||||
|
||||
System.config({
|
||||
baseURL: 'public',
|
||||
defaultExtension: 'js',
|
||||
packages: {
|
||||
'plugins': {
|
||||
defaultExtension: 'js'
|
||||
}
|
||||
},
|
||||
map: {
|
||||
text: 'vendor/plugin-text/text.js',
|
||||
css: 'vendor/plugin-css/css.js'
|
||||
},
|
||||
});
|
||||
|
||||
// add cache busting
|
||||
var systemLocate = System.locate;
|
||||
System.cacheBust = '?bust=' + Date.now();
|
||||
System.locate = function(load) {
|
||||
var System = this;
|
||||
return Promise.resolve(systemLocate.call(this, load)).then(function(address) {
|
||||
return address + System.cacheBust;
|
||||
});
|
||||
};
|
||||
|
||||
function exposeToPlugin(name: string, component: any) {
|
||||
System.registerDynamic(name, [], true, function(require, exports, module) {
|
||||
module.exports = component;
|
||||
});
|
||||
}
|
||||
|
||||
exposeToPlugin('lodash', _);
|
||||
exposeToPlugin('moment', moment);
|
||||
exposeToPlugin('jquery', jquery);
|
||||
exposeToPlugin('angular', angular);
|
||||
exposeToPlugin('app/plugins/sdk', sdk);
|
||||
exposeToPlugin('app/core/utils/datemath', datemath);
|
||||
exposeToPlugin('app/core/utils/kbn', kbn);
|
||||
exposeToPlugin('app/core/config', config);
|
||||
exposeToPlugin('app/core/time_series', TimeSeries);
|
||||
exposeToPlugin('app/core/time_series2', TimeSeries);
|
||||
|
||||
import 'vendor/flot/jquery.flot';
|
||||
import 'vendor/flot/jquery.flot.selection';
|
||||
import 'vendor/flot/jquery.flot.time';
|
||||
import 'vendor/flot/jquery.flot.stack';
|
||||
import 'vendor/flot/jquery.flot.pie';
|
||||
import 'vendor/flot/jquery.flot.stackpercent';
|
||||
import 'vendor/flot/jquery.flot.fillbelow';
|
||||
import 'vendor/flot/jquery.flot.crosshair';
|
||||
import 'vendor/flot/jquery.flot.dashes';
|
||||
|
||||
for (let flotDep of ['jquery.flot', 'jquery.flot.pie', 'jquery.flot.time']) {
|
||||
System.registerDynamic(flotDep, [], true, function(require, exports, module) { module.exports = {fakeDep: 1}; });
|
||||
}
|
||||
|
||||
export function importPluginModule(path: string): Promise<any> {
|
||||
let builtIn = builtInPlugins[path];
|
||||
if (builtIn) {
|
||||
return Promise.resolve(builtIn);
|
||||
}
|
||||
return System.import(path);
|
||||
}
|
||||
|
||||
export function loadPluginCss(options) {
|
||||
if (config.bootData.user.lightTheme) {
|
||||
System.import(options.light + '!css');
|
||||
} else {
|
||||
System.import(options.dark + '!css');
|
||||
}
|
||||
}
|
||||
|
60
public/app/features/plugins/specs/datasource_srv_specs.ts
Normal file
60
public/app/features/plugins/specs/datasource_srv_specs.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import {describe, beforeEach, it, expect, angularMocks} from 'test/lib/common';
|
||||
import config from 'app/core/config';
|
||||
import 'app/features/plugins/datasource_srv';
|
||||
|
||||
describe('datasource_srv', function() {
|
||||
var _datasourceSrv;
|
||||
var metricSources;
|
||||
var templateSrv = {};
|
||||
|
||||
beforeEach(angularMocks.module('grafana.core'));
|
||||
beforeEach(angularMocks.module(function($provide) {
|
||||
$provide.value('templateSrv', templateSrv);
|
||||
}));
|
||||
beforeEach(angularMocks.module('grafana.services'));
|
||||
beforeEach(angularMocks.inject(function(datasourceSrv) {
|
||||
_datasourceSrv = datasourceSrv;
|
||||
}));
|
||||
|
||||
describe('when loading metric sources', function() {
|
||||
var unsortedDatasources = {
|
||||
'mmm': {
|
||||
type: 'test-db',
|
||||
meta: { metrics: {m: 1} }
|
||||
},
|
||||
'--Grafana--': {
|
||||
type: 'grafana',
|
||||
meta: {builtIn: true, metrics: {m: 1}, id: "grafana"}
|
||||
},
|
||||
'--Mixed--': {
|
||||
type: 'test-db',
|
||||
meta: {builtIn: true, metrics: {m: 1}, id: "mixed"}
|
||||
},
|
||||
'ZZZ': {
|
||||
type: 'test-db',
|
||||
meta: {metrics: {m: 1} }
|
||||
},
|
||||
'aaa': {
|
||||
type: 'test-db',
|
||||
meta: { metrics: {m: 1} }
|
||||
},
|
||||
'BBB': {
|
||||
type: 'test-db',
|
||||
meta: { metrics: {m: 1} }
|
||||
},
|
||||
};
|
||||
beforeEach(function() {
|
||||
config.datasources = unsortedDatasources;
|
||||
metricSources = _datasourceSrv.getMetricSources({skipVariables: true});
|
||||
});
|
||||
|
||||
it('should return a list of sources sorted case insensitively with builtin sources last', function() {
|
||||
expect(metricSources[0].name).to.be('aaa');
|
||||
expect(metricSources[1].name).to.be('BBB');
|
||||
expect(metricSources[2].name).to.be('mmm');
|
||||
expect(metricSources[3].name).to.be('ZZZ');
|
||||
expect(metricSources[4].name).to.be('--Grafana--');
|
||||
expect(metricSources[5].name).to.be('--Mixed--');
|
||||
});
|
||||
});
|
||||
});
|
@@ -36,7 +36,7 @@ class StyleGuideCtrl {
|
||||
}
|
||||
|
||||
loadColors() {
|
||||
this.$http.get('public/sass/styleguide.json').then(res => {
|
||||
this.$http.get('public/build/styleguide.json').then(res => {
|
||||
this.colors = _.map(res.data[this.theme], (value, key) => {
|
||||
return {name: key, value: value};
|
||||
});
|
||||
|
@@ -10,6 +10,7 @@ describe('templateSrv', function() {
|
||||
beforeEach(angularMocks.module('grafana.services'));
|
||||
beforeEach(angularMocks.module($provide => {
|
||||
$provide.value('timeSrv', {});
|
||||
$provide.value('datasourceSrv', {});
|
||||
}));
|
||||
|
||||
beforeEach(angularMocks.inject(function(variableSrv, templateSrv) {
|
||||
|
1
public/app/headers/common.d.ts
vendored
1
public/app/headers/common.d.ts
vendored
@@ -81,3 +81,4 @@ declare module 'ace' {
|
||||
var ace: any;
|
||||
export default ace;
|
||||
}
|
||||
|
||||
|
3
public/app/index.ts
Normal file
3
public/app/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
import app from './app';
|
||||
app.init();
|
||||
|
@@ -1,2 +0,0 @@
|
||||
declare var test: any;
|
||||
export default test;
|
@@ -1,106 +0,0 @@
|
||||
define([
|
||||
'lodash',
|
||||
],
|
||||
function (_) {
|
||||
'use strict';
|
||||
|
||||
function CloudWatchAnnotationQuery(datasource, annotation, $q, templateSrv) {
|
||||
this.datasource = datasource;
|
||||
this.annotation = annotation;
|
||||
this.$q = $q;
|
||||
this.templateSrv = templateSrv;
|
||||
}
|
||||
|
||||
CloudWatchAnnotationQuery.prototype.process = function(from, to) {
|
||||
var self = this;
|
||||
var usePrefixMatch = this.annotation.prefixMatching;
|
||||
var region = this.templateSrv.replace(this.annotation.region);
|
||||
var namespace = this.templateSrv.replace(this.annotation.namespace);
|
||||
var metricName = this.templateSrv.replace(this.annotation.metricName);
|
||||
var dimensions = this.datasource.convertDimensionFormat(this.annotation.dimensions);
|
||||
var statistics = _.map(this.annotation.statistics, function(s) { return self.templateSrv.replace(s); });
|
||||
var defaultPeriod = usePrefixMatch ? '' : '300';
|
||||
var period = this.annotation.period || defaultPeriod;
|
||||
period = parseInt(period, 10);
|
||||
var actionPrefix = this.annotation.actionPrefix || '';
|
||||
var alarmNamePrefix = this.annotation.alarmNamePrefix || '';
|
||||
|
||||
var d = this.$q.defer();
|
||||
var allQueryPromise;
|
||||
if (usePrefixMatch) {
|
||||
allQueryPromise = [
|
||||
this.datasource.performDescribeAlarms(region, actionPrefix, alarmNamePrefix, [], '').then(function(alarms) {
|
||||
alarms.MetricAlarms = self.filterAlarms(alarms, namespace, metricName, dimensions, statistics, period);
|
||||
return alarms;
|
||||
})
|
||||
];
|
||||
} else {
|
||||
if (!region || !namespace || !metricName || _.isEmpty(statistics)) { return this.$q.when([]); }
|
||||
|
||||
allQueryPromise = _.map(statistics, function(statistic) {
|
||||
return self.datasource.performDescribeAlarmsForMetric(region, namespace, metricName, dimensions, statistic, period);
|
||||
});
|
||||
}
|
||||
this.$q.all(allQueryPromise).then(function(alarms) {
|
||||
var eventList = [];
|
||||
|
||||
var start = self.datasource.convertToCloudWatchTime(from, false);
|
||||
var end = self.datasource.convertToCloudWatchTime(to, true);
|
||||
_.chain(alarms)
|
||||
.map('MetricAlarms')
|
||||
.flatten()
|
||||
.each(function(alarm) {
|
||||
if (!alarm) {
|
||||
d.resolve(eventList);
|
||||
return;
|
||||
}
|
||||
|
||||
self.datasource.performDescribeAlarmHistory(region, alarm.AlarmName, start, end).then(function(history) {
|
||||
_.each(history.AlarmHistoryItems, function(h) {
|
||||
var event = {
|
||||
annotation: self.annotation,
|
||||
time: Date.parse(h.Timestamp),
|
||||
title: h.AlarmName,
|
||||
tags: [h.HistoryItemType],
|
||||
text: h.HistorySummary
|
||||
};
|
||||
|
||||
eventList.push(event);
|
||||
});
|
||||
|
||||
d.resolve(eventList);
|
||||
});
|
||||
})
|
||||
.value();
|
||||
});
|
||||
|
||||
return d.promise;
|
||||
};
|
||||
|
||||
CloudWatchAnnotationQuery.prototype.filterAlarms = function(alarms, namespace, metricName, dimensions, statistics, period) {
|
||||
return _.filter(alarms.MetricAlarms, function(alarm) {
|
||||
if (!_.isEmpty(namespace) && alarm.Namespace !== namespace) {
|
||||
return false;
|
||||
}
|
||||
if (!_.isEmpty(metricName) && alarm.MetricName !== metricName) {
|
||||
return false;
|
||||
}
|
||||
var sd = function(d) {
|
||||
return d.Name;
|
||||
};
|
||||
var isSameDimensions = JSON.stringify(_.sortBy(alarm.Dimensions, sd)) === JSON.stringify(_.sortBy(dimensions, sd));
|
||||
if (!_.isEmpty(dimensions) && !isSameDimensions) {
|
||||
return false;
|
||||
}
|
||||
if (!_.isEmpty(statistics) && !_.includes(statistics, alarm.Statistic)) {
|
||||
return false;
|
||||
}
|
||||
if (!_.isNaN(period) && alarm.Period !== period) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
};
|
||||
|
||||
return CloudWatchAnnotationQuery;
|
||||
});
|
@@ -1,3 +1,3 @@
|
||||
declare var CloudWatchDatasource: any;
|
||||
export {CloudWatchDatasource};
|
||||
export default CloudWatchDatasource;
|
||||
|
||||
|
@@ -5,18 +5,18 @@ define([
|
||||
'app/core/utils/datemath',
|
||||
'app/core/utils/kbn',
|
||||
'app/features/templating/variable',
|
||||
'./annotation_query',
|
||||
],
|
||||
function (angular, _, moment, dateMath, kbn, templatingVariable, CloudWatchAnnotationQuery) {
|
||||
function (angular, _, moment, dateMath, kbn, templatingVariable) {
|
||||
'use strict';
|
||||
|
||||
/** @ngInject */
|
||||
function CloudWatchDatasource(instanceSettings, $q, backendSrv, templateSrv) {
|
||||
function CloudWatchDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv) {
|
||||
this.type = 'cloudwatch';
|
||||
this.name = instanceSettings.name;
|
||||
this.supportMetrics = true;
|
||||
this.proxyUrl = instanceSettings.url;
|
||||
this.defaultRegion = instanceSettings.jsonData.defaultRegion;
|
||||
this.instanceSettings = instanceSettings;
|
||||
this.standardStatistics = [
|
||||
'Average',
|
||||
'Maximum',
|
||||
@@ -27,31 +27,30 @@ function (angular, _, moment, dateMath, kbn, templatingVariable, CloudWatchAnnot
|
||||
|
||||
var self = this;
|
||||
this.query = function(options) {
|
||||
var start = self.convertToCloudWatchTime(options.range.from, false);
|
||||
var end = self.convertToCloudWatchTime(options.range.to, true);
|
||||
|
||||
var queries = [];
|
||||
options = angular.copy(options);
|
||||
options.targets = this.expandTemplateVariable(options.targets, options.scopedVars, templateSrv);
|
||||
_.each(options.targets, function(target) {
|
||||
if (target.hide || !target.namespace || !target.metricName || _.isEmpty(target.statistics)) {
|
||||
return;
|
||||
}
|
||||
|
||||
var query = {};
|
||||
query.region = templateSrv.replace(target.region, options.scopedVars);
|
||||
query.namespace = templateSrv.replace(target.namespace, options.scopedVars);
|
||||
query.metricName = templateSrv.replace(target.metricName, options.scopedVars);
|
||||
query.dimensions = self.convertDimensionFormat(target.dimensions, options.scopedVars);
|
||||
query.statistics = target.statistics;
|
||||
var queries = _.filter(options.targets, function (item) {
|
||||
return item.hide !== true &&
|
||||
!!item.region &&
|
||||
!!item.namespace &&
|
||||
!!item.metricName &&
|
||||
!_.isEmpty(item.statistics);
|
||||
}).map(function (item) {
|
||||
item.region = templateSrv.replace(item.region, options.scopedVars);
|
||||
item.namespace = templateSrv.replace(item.namespace, options.scopedVars);
|
||||
item.metricName = templateSrv.replace(item.metricName, options.scopedVars);
|
||||
item.dimensions = self.convertDimensionFormat(item.dimensions, options.scopeVars);
|
||||
item.period = self.getPeriod(item, options);
|
||||
|
||||
var now = Math.round(Date.now() / 1000);
|
||||
var period = this.getPeriod(target, query, options, start, end, now);
|
||||
target.period = period;
|
||||
query.period = period;
|
||||
|
||||
queries.push(query);
|
||||
}.bind(this));
|
||||
return _.extend({
|
||||
refId: item.refId,
|
||||
intervalMs: options.intervalMs,
|
||||
maxDataPoints: options.maxDataPoints,
|
||||
datasourceId: self.instanceSettings.id,
|
||||
type: 'timeSeriesQuery',
|
||||
}, item);
|
||||
});
|
||||
|
||||
// No valid targets, return the empty result to save a round trip.
|
||||
if (_.isEmpty(queries)) {
|
||||
@@ -60,23 +59,20 @@ function (angular, _, moment, dateMath, kbn, templatingVariable, CloudWatchAnnot
|
||||
return d.promise;
|
||||
}
|
||||
|
||||
var allQueryPromise = _.map(queries, function(query) {
|
||||
return this.performTimeSeriesQuery(query, start, end);
|
||||
}.bind(this));
|
||||
var request = {
|
||||
from: options.range.from.valueOf().toString(),
|
||||
to: options.range.to.valueOf().toString(),
|
||||
queries: queries
|
||||
};
|
||||
|
||||
return $q.all(allQueryPromise).then(function(allResponse) {
|
||||
var result = [];
|
||||
|
||||
_.each(allResponse, function(response, index) {
|
||||
var metrics = transformMetricData(response, options.targets[index], options.scopedVars);
|
||||
result = result.concat(metrics);
|
||||
});
|
||||
|
||||
return {data: result};
|
||||
});
|
||||
return this.performTimeSeriesQuery(request);
|
||||
};
|
||||
|
||||
this.getPeriod = function(target, query, options, start, end, now) {
|
||||
this.getPeriod = function(target, options, now) {
|
||||
var start = this.convertToCloudWatchTime(options.range.from, false);
|
||||
var end = this.convertToCloudWatchTime(options.range.to, true);
|
||||
now = Math.round((now || Date.now()) / 1000);
|
||||
|
||||
var period;
|
||||
var range = end - start;
|
||||
|
||||
@@ -85,7 +81,7 @@ function (angular, _, moment, dateMath, kbn, templatingVariable, CloudWatchAnnot
|
||||
var periodUnit = 60;
|
||||
if (!target.period) {
|
||||
if (now - start <= (daySec * 15)) { // until 15 days ago
|
||||
if (query.namespace === 'AWS/EC2') {
|
||||
if (target.namespace === 'AWS/EC2') {
|
||||
periodUnit = period = 300;
|
||||
} else {
|
||||
periodUnit = period = 60;
|
||||
@@ -114,85 +110,93 @@ function (angular, _, moment, dateMath, kbn, templatingVariable, CloudWatchAnnot
|
||||
return period;
|
||||
};
|
||||
|
||||
this.performTimeSeriesQuery = function(query, start, end) {
|
||||
var statistics = _.filter(query.statistics, function(s) { return _.includes(self.standardStatistics, s); });
|
||||
var extendedStatistics = _.reject(query.statistics, function(s) { return _.includes(self.standardStatistics, s); });
|
||||
return this.awsRequest({
|
||||
region: query.region,
|
||||
action: 'GetMetricStatistics',
|
||||
parameters: {
|
||||
namespace: query.namespace,
|
||||
metricName: query.metricName,
|
||||
dimensions: query.dimensions,
|
||||
statistics: statistics,
|
||||
extendedStatistics: extendedStatistics,
|
||||
startTime: start,
|
||||
endTime: end,
|
||||
period: query.period
|
||||
this.performTimeSeriesQuery = function(request) {
|
||||
return backendSrv.post('/api/tsdb/query', request).then(function (res) {
|
||||
var data = [];
|
||||
|
||||
if (res.results) {
|
||||
_.forEach(res.results, function (queryRes) {
|
||||
_.forEach(queryRes.series, function (series) {
|
||||
data.push({target: series.name, datapoints: series.points});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return {data: data};
|
||||
});
|
||||
};
|
||||
|
||||
this.getRegions = function() {
|
||||
return this.awsRequest({action: '__GetRegions'});
|
||||
function transformSuggestDataFromTable(suggestData) {
|
||||
return _.map(suggestData.results['metricFindQuery'].tables[0].rows, function (v) {
|
||||
return {
|
||||
text: v[0],
|
||||
value: v[1]
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
this.doMetricQueryRequest = function (subtype, parameters) {
|
||||
var range = timeSrv.timeRange();
|
||||
return backendSrv.post('/api/tsdb/query', {
|
||||
from: range.from.valueOf().toString(),
|
||||
to: range.to.valueOf().toString(),
|
||||
queries: [
|
||||
_.extend({
|
||||
refId: 'metricFindQuery',
|
||||
intervalMs: 1, // dummy
|
||||
maxDataPoints: 1, // dummy
|
||||
datasourceId: this.instanceSettings.id,
|
||||
type: 'metricFindQuery',
|
||||
subtype: subtype
|
||||
}, parameters)
|
||||
]
|
||||
}).then(function (r) { return transformSuggestDataFromTable(r); });
|
||||
};
|
||||
|
||||
this.getRegions = function () {
|
||||
return this.doMetricQueryRequest('regions', null);
|
||||
};
|
||||
|
||||
this.getNamespaces = function() {
|
||||
return this.awsRequest({action: '__GetNamespaces'});
|
||||
return this.doMetricQueryRequest('namespaces', null);
|
||||
};
|
||||
|
||||
this.getMetrics = function(namespace, region) {
|
||||
return this.awsRequest({
|
||||
action: '__GetMetrics',
|
||||
region: region,
|
||||
parameters: {
|
||||
namespace: templateSrv.replace(namespace)
|
||||
}
|
||||
this.getMetrics = function (namespace, region) {
|
||||
return this.doMetricQueryRequest('metrics', {
|
||||
region: templateSrv.replace(region),
|
||||
namespace: templateSrv.replace(namespace)
|
||||
});
|
||||
};
|
||||
|
||||
this.getDimensionKeys = function(namespace, region) {
|
||||
return this.awsRequest({
|
||||
action: '__GetDimensions',
|
||||
region: region,
|
||||
parameters: {
|
||||
namespace: templateSrv.replace(namespace)
|
||||
}
|
||||
return this.doMetricQueryRequest('dimension_keys', {
|
||||
region: templateSrv.replace(region),
|
||||
namespace: templateSrv.replace(namespace)
|
||||
});
|
||||
};
|
||||
|
||||
this.getDimensionValues = function(region, namespace, metricName, dimensionKey, filterDimensions) {
|
||||
var request = {
|
||||
return this.doMetricQueryRequest('dimension_values', {
|
||||
region: templateSrv.replace(region),
|
||||
action: 'ListMetrics',
|
||||
parameters: {
|
||||
namespace: templateSrv.replace(namespace),
|
||||
metricName: templateSrv.replace(metricName),
|
||||
dimensions: this.convertDimensionFormat(filterDimensions, {}),
|
||||
}
|
||||
};
|
||||
|
||||
return this.awsRequest(request).then(function(result) {
|
||||
return _.chain(result.Metrics)
|
||||
.map('Dimensions')
|
||||
.flatten()
|
||||
.filter(function(dimension) {
|
||||
return dimension !== null && dimension.Name === dimensionKey;
|
||||
})
|
||||
.map('Value')
|
||||
.uniq()
|
||||
.sortBy()
|
||||
.map(function(value) {
|
||||
return {value: value, text: value};
|
||||
}).value();
|
||||
namespace: templateSrv.replace(namespace),
|
||||
metricName: templateSrv.replace(metricName),
|
||||
dimensionKey: templateSrv.replace(dimensionKey),
|
||||
dimensions: this.convertDimensionFormat(filterDimensions, {}),
|
||||
});
|
||||
};
|
||||
|
||||
this.performEC2DescribeInstances = function(region, filters, instanceIds) {
|
||||
return this.awsRequest({
|
||||
region: region,
|
||||
action: 'DescribeInstances',
|
||||
parameters: { filters: filters, instanceIds: instanceIds }
|
||||
this.getEbsVolumeIds = function(region, instanceId) {
|
||||
return this.doMetricQueryRequest('ebs_volume_ids', {
|
||||
region: templateSrv.replace(region),
|
||||
instanceId: templateSrv.replace(instanceId)
|
||||
});
|
||||
};
|
||||
|
||||
this.getEc2InstanceAttribute = function(region, attributeName, filters) {
|
||||
return this.doMetricQueryRequest('ec2_instance_attribute', {
|
||||
region: templateSrv.replace(region),
|
||||
attributeName: templateSrv.replace(attributeName),
|
||||
filters: filters
|
||||
});
|
||||
};
|
||||
|
||||
@@ -201,12 +205,6 @@ function (angular, _, moment, dateMath, kbn, templatingVariable, CloudWatchAnnot
|
||||
var namespace;
|
||||
var metricName;
|
||||
|
||||
var transformSuggestData = function(suggestData) {
|
||||
return _.map(suggestData, function(v) {
|
||||
return { text: v };
|
||||
});
|
||||
};
|
||||
|
||||
var regionQuery = query.match(/^regions\(\)/);
|
||||
if (regionQuery) {
|
||||
return this.getRegions();
|
||||
@@ -219,114 +217,98 @@ function (angular, _, moment, dateMath, kbn, templatingVariable, CloudWatchAnnot
|
||||
|
||||
var metricNameQuery = query.match(/^metrics\(([^\)]+?)(,\s?([^,]+?))?\)/);
|
||||
if (metricNameQuery) {
|
||||
return this.getMetrics(templateSrv.replace(metricNameQuery[1]), templateSrv.replace(metricNameQuery[3]));
|
||||
namespace = metricNameQuery[1];
|
||||
region = metricNameQuery[3];
|
||||
return this.getMetrics(namespace, region);
|
||||
}
|
||||
|
||||
var dimensionKeysQuery = query.match(/^dimension_keys\(([^\)]+?)(,\s?([^,]+?))?\)/);
|
||||
if (dimensionKeysQuery) {
|
||||
return this.getDimensionKeys(templateSrv.replace(dimensionKeysQuery[1]), templateSrv.replace(dimensionKeysQuery[3]));
|
||||
namespace = dimensionKeysQuery[1];
|
||||
region = dimensionKeysQuery[3];
|
||||
return this.getDimensionKeys(namespace, region);
|
||||
}
|
||||
|
||||
var dimensionValuesQuery = query.match(/^dimension_values\(([^,]+?),\s?([^,]+?),\s?([^,]+?),\s?([^,]+?)\)/);
|
||||
if (dimensionValuesQuery) {
|
||||
region = templateSrv.replace(dimensionValuesQuery[1]);
|
||||
namespace = templateSrv.replace(dimensionValuesQuery[2]);
|
||||
metricName = templateSrv.replace(dimensionValuesQuery[3]);
|
||||
var dimensionKey = templateSrv.replace(dimensionValuesQuery[4]);
|
||||
region = dimensionValuesQuery[1];
|
||||
namespace = dimensionValuesQuery[2];
|
||||
metricName = dimensionValuesQuery[3];
|
||||
var dimensionKey = dimensionValuesQuery[4];
|
||||
|
||||
return this.getDimensionValues(region, namespace, metricName, dimensionKey, {});
|
||||
}
|
||||
|
||||
var ebsVolumeIdsQuery = query.match(/^ebs_volume_ids\(([^,]+?),\s?([^,]+?)\)/);
|
||||
if (ebsVolumeIdsQuery) {
|
||||
region = templateSrv.replace(ebsVolumeIdsQuery[1]);
|
||||
var instanceId = templateSrv.replace(ebsVolumeIdsQuery[2]);
|
||||
var instanceIds = [
|
||||
instanceId
|
||||
];
|
||||
|
||||
return this.performEC2DescribeInstances(region, [], instanceIds).then(function(result) {
|
||||
var volumeIds = _.map(result.Reservations[0].Instances[0].BlockDeviceMappings, function(mapping) {
|
||||
return mapping.Ebs.VolumeId;
|
||||
});
|
||||
|
||||
return transformSuggestData(volumeIds);
|
||||
});
|
||||
region = ebsVolumeIdsQuery[1];
|
||||
var instanceId = ebsVolumeIdsQuery[2];
|
||||
return this.getEbsVolumeIds(region, instanceId);
|
||||
}
|
||||
|
||||
var ec2InstanceAttributeQuery = query.match(/^ec2_instance_attribute\(([^,]+?),\s?([^,]+?),\s?(.+?)\)/);
|
||||
if (ec2InstanceAttributeQuery) {
|
||||
region = templateSrv.replace(ec2InstanceAttributeQuery[1]);
|
||||
region = ec2InstanceAttributeQuery[1];
|
||||
var targetAttributeName = ec2InstanceAttributeQuery[2];
|
||||
var filterJson = JSON.parse(templateSrv.replace(ec2InstanceAttributeQuery[3]));
|
||||
var filters = _.map(filterJson, function(values, name) {
|
||||
return {
|
||||
Name: name,
|
||||
Values: values
|
||||
};
|
||||
});
|
||||
var targetAttributeName = templateSrv.replace(ec2InstanceAttributeQuery[2]);
|
||||
|
||||
return this.performEC2DescribeInstances(region, filters, null).then(function(result) {
|
||||
var attributes = _.chain(result.Reservations)
|
||||
.map(function(reservations) {
|
||||
return _.map(reservations.Instances, function(instance) {
|
||||
var tags = {};
|
||||
_.each(instance.Tags, function(tag) {
|
||||
tags[tag.Key] = tag.Value;
|
||||
});
|
||||
instance.Tags = tags;
|
||||
return instance;
|
||||
});
|
||||
})
|
||||
.map(function(instances) {
|
||||
return _.map(instances, targetAttributeName);
|
||||
})
|
||||
.flatten().uniq().sortBy().value();
|
||||
return transformSuggestData(attributes);
|
||||
});
|
||||
return this.getEc2InstanceAttribute(region, targetAttributeName, filterJson);
|
||||
}
|
||||
|
||||
return $q.when([]);
|
||||
};
|
||||
|
||||
this.performDescribeAlarms = function(region, actionPrefix, alarmNamePrefix, alarmNames, stateValue) {
|
||||
return this.awsRequest({
|
||||
region: region,
|
||||
action: 'DescribeAlarms',
|
||||
parameters: { actionPrefix: actionPrefix, alarmNamePrefix: alarmNamePrefix, alarmNames: alarmNames, stateValue: stateValue }
|
||||
this.annotationQuery = function (options) {
|
||||
var annotation = options.annotation;
|
||||
var statistics = _.map(annotation.statistics, function (s) { return templateSrv.replace(s); });
|
||||
var defaultPeriod = annotation.prefixMatching ? '' : '300';
|
||||
var period = annotation.period || defaultPeriod;
|
||||
period = parseInt(period, 10);
|
||||
var parameters = {
|
||||
prefixMatching: annotation.prefixMatching,
|
||||
region: templateSrv.replace(annotation.region),
|
||||
namespace: templateSrv.replace(annotation.namespace),
|
||||
metricName: templateSrv.replace(annotation.metricName),
|
||||
dimensions: this.convertDimensionFormat(annotation.dimensions, {}),
|
||||
statistics: statistics,
|
||||
period: period,
|
||||
actionPrefix: annotation.actionPrefix || '',
|
||||
alarmNamePrefix: annotation.alarmNamePrefix || ''
|
||||
};
|
||||
|
||||
return backendSrv.post('/api/tsdb/query', {
|
||||
from: options.range.from.valueOf().toString(),
|
||||
to: options.range.to.valueOf().toString(),
|
||||
queries: [
|
||||
_.extend({
|
||||
refId: 'annotationQuery',
|
||||
intervalMs: 1, // dummy
|
||||
maxDataPoints: 1, // dummy
|
||||
datasourceId: this.instanceSettings.id,
|
||||
type: 'annotationQuery'
|
||||
}, parameters)
|
||||
]
|
||||
}).then(function (r) {
|
||||
return _.map(r.results['annotationQuery'].tables[0].rows, function (v) {
|
||||
return {
|
||||
annotation: annotation,
|
||||
time: Date.parse(v[0]),
|
||||
title: v[1],
|
||||
tags: [v[2]],
|
||||
text: v[3]
|
||||
};
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
this.performDescribeAlarmsForMetric = function(region, namespace, metricName, dimensions, statistic, period) {
|
||||
var s = _.includes(self.standardStatistics, statistic) ? statistic : '';
|
||||
var es = _.includes(self.standardStatistics, statistic) ? '' : statistic;
|
||||
return this.awsRequest({
|
||||
region: region,
|
||||
action: 'DescribeAlarmsForMetric',
|
||||
parameters: {
|
||||
namespace: namespace,
|
||||
metricName: metricName,
|
||||
dimensions: dimensions,
|
||||
statistic: s,
|
||||
extendedStatistic: es,
|
||||
period: period
|
||||
}
|
||||
this.targetContainsTemplate = function(target) {
|
||||
return templateSrv.variableExists(target.region) ||
|
||||
templateSrv.variableExists(target.namespace) ||
|
||||
templateSrv.variableExists(target.metricName) ||
|
||||
_.find(target.dimensions, function(v, k) {
|
||||
return templateSrv.variableExists(k) || templateSrv.variableExists(v);
|
||||
});
|
||||
};
|
||||
|
||||
this.performDescribeAlarmHistory = function(region, alarmName, startDate, endDate) {
|
||||
return this.awsRequest({
|
||||
region: region,
|
||||
action: 'DescribeAlarmHistory',
|
||||
parameters: { alarmName: alarmName, startDate: startDate, endDate: endDate }
|
||||
});
|
||||
};
|
||||
|
||||
this.annotationQuery = function(options) {
|
||||
var annotationQuery = new CloudWatchAnnotationQuery(this, options.annotation, $q, templateSrv);
|
||||
return annotationQuery.process(options.range.from, options.range.to);
|
||||
};
|
||||
|
||||
this.testDatasource = function() {
|
||||
/* use billing metrics for test */
|
||||
var region = this.defaultRegion;
|
||||
@@ -355,62 +337,6 @@ function (angular, _, moment, dateMath, kbn, templatingVariable, CloudWatchAnnot
|
||||
return this.defaultRegion;
|
||||
};
|
||||
|
||||
function transformMetricData(md, options, scopedVars) {
|
||||
var aliasRegex = /\{\{(.+?)\}\}/g;
|
||||
var aliasPattern = options.alias || '{{metric}}_{{stat}}';
|
||||
var aliasData = {
|
||||
region: templateSrv.replace(options.region, scopedVars),
|
||||
namespace: templateSrv.replace(options.namespace, scopedVars),
|
||||
metric: templateSrv.replace(options.metricName, scopedVars),
|
||||
};
|
||||
|
||||
var aliasDimensions = {};
|
||||
|
||||
_.each(_.keys(options.dimensions), function(origKey) {
|
||||
var key = templateSrv.replace(origKey, scopedVars);
|
||||
var value = templateSrv.replace(options.dimensions[origKey], scopedVars);
|
||||
aliasDimensions[key] = value;
|
||||
});
|
||||
|
||||
_.extend(aliasData, aliasDimensions);
|
||||
|
||||
var periodMs = options.period * 1000;
|
||||
|
||||
return _.map(options.statistics, function(stat) {
|
||||
var extended = !_.includes(self.standardStatistics, stat);
|
||||
var dps = [];
|
||||
var lastTimestamp = null;
|
||||
_.chain(md.Datapoints)
|
||||
.sortBy(function(dp) {
|
||||
return dp.Timestamp;
|
||||
})
|
||||
.each(function(dp) {
|
||||
var timestamp = new Date(dp.Timestamp).getTime();
|
||||
while (lastTimestamp && (timestamp - lastTimestamp) > periodMs) {
|
||||
dps.push([null, lastTimestamp + periodMs]);
|
||||
lastTimestamp = lastTimestamp + periodMs;
|
||||
}
|
||||
lastTimestamp = timestamp;
|
||||
if (!extended) {
|
||||
dps.push([dp[stat], timestamp]);
|
||||
} else {
|
||||
dps.push([dp.ExtendedStatistics[stat], timestamp]);
|
||||
}
|
||||
})
|
||||
.value();
|
||||
|
||||
aliasData.stat = stat;
|
||||
var seriesName = aliasPattern.replace(aliasRegex, function(match, g1) {
|
||||
if (aliasData[g1]) {
|
||||
return aliasData[g1];
|
||||
}
|
||||
return g1;
|
||||
});
|
||||
|
||||
return {target: seriesName, datapoints: dps};
|
||||
});
|
||||
}
|
||||
|
||||
this.getExpandedVariables = function(target, dimensionKey, variable, templateSrv) {
|
||||
/* if the all checkbox is marked we should add all values to the targets */
|
||||
var allSelected = _.find(variable.options, {'selected': true, 'text': 'All'});
|
||||
@@ -461,17 +387,14 @@ function (angular, _, moment, dateMath, kbn, templatingVariable, CloudWatchAnnot
|
||||
};
|
||||
|
||||
this.convertDimensionFormat = function(dimensions, scopedVars) {
|
||||
return _.map(dimensions, function(value, key) {
|
||||
return {
|
||||
Name: templateSrv.replace(key, scopedVars),
|
||||
Value: templateSrv.replace(value, scopedVars)
|
||||
};
|
||||
var convertedDimensions = {};
|
||||
_.each(dimensions, function (value, key) {
|
||||
convertedDimensions[templateSrv.replace(key, scopedVars)] = templateSrv.replace(value, scopedVars);
|
||||
});
|
||||
return convertedDimensions;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
return {
|
||||
CloudWatchDatasource: CloudWatchDatasource
|
||||
};
|
||||
return CloudWatchDatasource;
|
||||
});
|
||||
|
@@ -1,6 +1,6 @@
|
||||
import './query_parameter_ctrl';
|
||||
|
||||
import {CloudWatchDatasource} from './datasource';
|
||||
import CloudWatchDatasource from './datasource';
|
||||
import {CloudWatchQueryCtrl} from './query_ctrl';
|
||||
import {CloudWatchConfigCtrl} from './config_ctrl';
|
||||
|
||||
|
@@ -4,6 +4,7 @@
|
||||
"id": "cloudwatch",
|
||||
|
||||
"metrics": true,
|
||||
"alerting": true,
|
||||
"annotations": true,
|
||||
|
||||
"info": {
|
||||
|
@@ -1,81 +0,0 @@
|
||||
import "../datasource";
|
||||
import {describe, beforeEach, it, expect, angularMocks} from 'test/lib/common';
|
||||
import moment from 'moment';
|
||||
import helpers from 'test/specs/helpers';
|
||||
import {CloudWatchDatasource} from "../datasource";
|
||||
import CloudWatchAnnotationQuery from '../annotation_query';
|
||||
|
||||
describe('CloudWatchAnnotationQuery', function() {
|
||||
var ctx = new helpers.ServiceTestContext();
|
||||
var instanceSettings = {
|
||||
jsonData: {defaultRegion: 'us-east-1', access: 'proxy'},
|
||||
};
|
||||
|
||||
beforeEach(angularMocks.module('grafana.core'));
|
||||
beforeEach(angularMocks.module('grafana.services'));
|
||||
beforeEach(angularMocks.module('grafana.controllers'));
|
||||
beforeEach(ctx.providePhase(['templateSrv', 'backendSrv']));
|
||||
|
||||
beforeEach(angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
|
||||
ctx.$q = $q;
|
||||
ctx.$httpBackend = $httpBackend;
|
||||
ctx.$rootScope = $rootScope;
|
||||
ctx.ds = $injector.instantiate(CloudWatchDatasource, {instanceSettings: instanceSettings});
|
||||
}));
|
||||
|
||||
describe('When performing annotationQuery', function() {
|
||||
var parameter = {
|
||||
annotation: {
|
||||
region: 'us-east-1',
|
||||
namespace: 'AWS/EC2',
|
||||
metricName: 'CPUUtilization',
|
||||
dimensions: {
|
||||
InstanceId: 'i-12345678'
|
||||
},
|
||||
statistics: ['Average'],
|
||||
period: 300
|
||||
},
|
||||
range: {
|
||||
from: moment(1443438674760),
|
||||
to: moment(1443460274760)
|
||||
}
|
||||
};
|
||||
var alarmResponse = {
|
||||
MetricAlarms: [
|
||||
{
|
||||
AlarmName: 'test_alarm_name'
|
||||
}
|
||||
]
|
||||
};
|
||||
var historyResponse = {
|
||||
AlarmHistoryItems: [
|
||||
{
|
||||
Timestamp: '2015-01-01T00:00:00.000Z',
|
||||
HistoryItemType: 'StateUpdate',
|
||||
AlarmName: 'test_alarm_name',
|
||||
HistoryData: '{}',
|
||||
HistorySummary: 'test_history_summary'
|
||||
}
|
||||
]
|
||||
};
|
||||
beforeEach(function() {
|
||||
ctx.backendSrv.datasourceRequest = function(params) {
|
||||
switch (params.data.action) {
|
||||
case 'DescribeAlarmsForMetric':
|
||||
return ctx.$q.when({data: alarmResponse});
|
||||
case 'DescribeAlarmHistory':
|
||||
return ctx.$q.when({data: historyResponse});
|
||||
}
|
||||
};
|
||||
});
|
||||
it('should return annotation list', function(done) {
|
||||
var annotationQuery = new CloudWatchAnnotationQuery(ctx.ds, parameter.annotation, ctx.$q, ctx.templateSrv);
|
||||
annotationQuery.process(parameter.range.from, parameter.range.to).then(function(result) {
|
||||
expect(result[0].title).to.be('test_alarm_name');
|
||||
expect(result[0].text).to.be('test_history_summary');
|
||||
done();
|
||||
});
|
||||
ctx.$rootScope.$apply();
|
||||
});
|
||||
});
|
||||
});
|
@@ -1,8 +1,7 @@
|
||||
|
||||
import "../datasource";
|
||||
import {describe, beforeEach, it, expect, angularMocks} from 'test/lib/common';
|
||||
import helpers from 'test/specs/helpers';
|
||||
import {CloudWatchDatasource} from "../datasource";
|
||||
import CloudWatchDatasource from "../datasource";
|
||||
|
||||
describe('CloudWatchDatasource', function() {
|
||||
var ctx = new helpers.ServiceTestContext();
|
||||
@@ -28,6 +27,7 @@ describe('CloudWatchDatasource', function() {
|
||||
|
||||
var query = {
|
||||
range: { from: 'now-1h', to: 'now' },
|
||||
rangeRaw: { from: 1483228800, to: 1483232400 },
|
||||
targets: [
|
||||
{
|
||||
region: 'us-east-1',
|
||||
@@ -43,37 +43,41 @@ describe('CloudWatchDatasource', function() {
|
||||
};
|
||||
|
||||
var response = {
|
||||
Datapoints: [
|
||||
{
|
||||
Average: 1,
|
||||
Timestamp: 'Wed Dec 31 1969 16:00:00 GMT-0800 (PST)'
|
||||
},
|
||||
{
|
||||
Average: 2,
|
||||
Timestamp: 'Wed Dec 31 1969 16:05:00 GMT-0800 (PST)'
|
||||
},
|
||||
{
|
||||
Average: 5,
|
||||
Timestamp: 'Wed Dec 31 1969 16:15:00 GMT-0800 (PST)'
|
||||
timings: [null],
|
||||
results: {
|
||||
A: {
|
||||
error: '',
|
||||
refId: 'A',
|
||||
series: [
|
||||
{
|
||||
name: 'CPUUtilization_Average',
|
||||
points: [
|
||||
[1, 1483228800000],
|
||||
[2, 1483229100000],
|
||||
[5, 1483229700000],
|
||||
],
|
||||
tags: {
|
||||
InstanceId: 'i-12345678'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
Label: 'CPUUtilization'
|
||||
}
|
||||
};
|
||||
|
||||
beforeEach(function() {
|
||||
ctx.backendSrv.datasourceRequest = function(params) {
|
||||
ctx.backendSrv.post = function(path, params) {
|
||||
requestParams = params;
|
||||
return ctx.$q.when({data: response});
|
||||
return ctx.$q.when(response);
|
||||
};
|
||||
});
|
||||
|
||||
it('should generate the correct query', function(done) {
|
||||
ctx.ds.query(query).then(function() {
|
||||
var params = requestParams.data.parameters;
|
||||
var params = requestParams.queries[0];
|
||||
expect(params.namespace).to.be(query.targets[0].namespace);
|
||||
expect(params.metricName).to.be(query.targets[0].metricName);
|
||||
expect(params.dimensions[0].Name).to.be(Object.keys(query.targets[0].dimensions)[0]);
|
||||
expect(params.dimensions[0].Value).to.be(query.targets[0].dimensions[Object.keys(query.targets[0].dimensions)[0]]);
|
||||
expect(params.dimensions['InstanceId']).to.be('i-12345678');
|
||||
expect(params.statistics).to.eql(query.targets[0].statistics);
|
||||
expect(params.period).to.be(query.targets[0].period);
|
||||
done();
|
||||
@@ -88,6 +92,7 @@ describe('CloudWatchDatasource', function() {
|
||||
|
||||
var query = {
|
||||
range: { from: 'now-1h', to: 'now' },
|
||||
rangeRaw: { from: 1483228800, to: 1483232400 },
|
||||
targets: [
|
||||
{
|
||||
region: 'us-east-1',
|
||||
@@ -103,7 +108,7 @@ describe('CloudWatchDatasource', function() {
|
||||
};
|
||||
|
||||
ctx.ds.query(query).then(function() {
|
||||
var params = requestParams.data.parameters;
|
||||
var params = requestParams.queries[0];
|
||||
expect(params.period).to.be(600);
|
||||
done();
|
||||
});
|
||||
@@ -112,16 +117,8 @@ describe('CloudWatchDatasource', function() {
|
||||
|
||||
it('should return series list', function(done) {
|
||||
ctx.ds.query(query).then(function(result) {
|
||||
expect(result.data[0].target).to.be('CPUUtilization_Average');
|
||||
expect(result.data[0].datapoints[0][0]).to.be(response.Datapoints[0]['Average']);
|
||||
done();
|
||||
});
|
||||
ctx.$rootScope.$apply();
|
||||
});
|
||||
|
||||
it('should return null for missing data point', function(done) {
|
||||
ctx.ds.query(query).then(function(result) {
|
||||
expect(result.data[0].datapoints[2][0]).to.be(null);
|
||||
expect(result.data[0].target).to.be(response.results.A.series[0].name);
|
||||
expect(result.data[0].datapoints[0][0]).to.be(response.results.A.series[0].points[0][0]);
|
||||
done();
|
||||
});
|
||||
ctx.$rootScope.$apply();
|
||||
@@ -173,6 +170,7 @@ describe('CloudWatchDatasource', function() {
|
||||
|
||||
var query = {
|
||||
range: { from: 'now-1h', to: 'now' },
|
||||
rangeRaw: { from: 1483228800, to: 1483232400 },
|
||||
targets: [
|
||||
{
|
||||
region: 'us-east-1',
|
||||
@@ -189,40 +187,40 @@ describe('CloudWatchDatasource', function() {
|
||||
};
|
||||
|
||||
var response = {
|
||||
Datapoints: [
|
||||
{
|
||||
ExtendedStatistics: {
|
||||
'p90.00': 1
|
||||
},
|
||||
Timestamp: 'Wed Dec 31 1969 16:00:00 GMT-0800 (PST)'
|
||||
},
|
||||
{
|
||||
ExtendedStatistics: {
|
||||
'p90.00': 2
|
||||
},
|
||||
Timestamp: 'Wed Dec 31 1969 16:05:00 GMT-0800 (PST)'
|
||||
},
|
||||
{
|
||||
ExtendedStatistics: {
|
||||
'p90.00': 5
|
||||
},
|
||||
Timestamp: 'Wed Dec 31 1969 16:15:00 GMT-0800 (PST)'
|
||||
timings: [null],
|
||||
results: {
|
||||
A: {
|
||||
error: '',
|
||||
refId: 'A',
|
||||
series: [
|
||||
{
|
||||
name: 'TargetResponseTime_p90.00',
|
||||
points: [
|
||||
[1, 1483228800000],
|
||||
[2, 1483229100000],
|
||||
[5, 1483229700000],
|
||||
],
|
||||
tags: {
|
||||
LoadBalancer: 'lb',
|
||||
TargetGroup: 'tg'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
Label: 'TargetResponseTime'
|
||||
}
|
||||
};
|
||||
|
||||
beforeEach(function() {
|
||||
ctx.backendSrv.datasourceRequest = function(params) {
|
||||
ctx.backendSrv.post = function(path, params) {
|
||||
requestParams = params;
|
||||
return ctx.$q.when({data: response});
|
||||
return ctx.$q.when(response);
|
||||
};
|
||||
});
|
||||
|
||||
it('should return series list', function(done) {
|
||||
ctx.ds.query(query).then(function(result) {
|
||||
expect(result.data[0].target).to.be('TargetResponseTime_p90.00');
|
||||
expect(result.data[0].datapoints[0][0]).to.be(response.Datapoints[0].ExtendedStatistics['p90.00']);
|
||||
expect(result.data[0].target).to.be(response.results.A.series[0].name);
|
||||
expect(result.data[0].datapoints[0][0]).to.be(response.results.A.series[0].points[0][0]);
|
||||
done();
|
||||
});
|
||||
ctx.$rootScope.$apply();
|
||||
@@ -237,7 +235,11 @@ describe('CloudWatchDatasource', function() {
|
||||
setupCallback();
|
||||
ctx.backendSrv.datasourceRequest = args => {
|
||||
scenario.request = args;
|
||||
return ctx.$q.when({data: scenario.requestResponse });
|
||||
return ctx.$q.when({ data: scenario.requestResponse });
|
||||
};
|
||||
ctx.backendSrv.post = (path, args) => {
|
||||
scenario.request = args;
|
||||
return ctx.$q.when(scenario.requestResponse);
|
||||
};
|
||||
ctx.ds.metricFindQuery(query).then(args => {
|
||||
scenario.result = args;
|
||||
@@ -252,135 +254,178 @@ describe('CloudWatchDatasource', function() {
|
||||
|
||||
describeMetricFindQuery('regions()', scenario => {
|
||||
scenario.setup(() => {
|
||||
scenario.requestResponse = [{text: 'us-east-1'}];
|
||||
scenario.requestResponse = {
|
||||
results: {
|
||||
metricFindQuery: {
|
||||
tables: [
|
||||
{ rows: [['us-east-1', 'us-east-1']] }
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
it('should call __GetRegions and return result', () => {
|
||||
expect(scenario.result[0].text).to.contain('us-east-1');
|
||||
expect(scenario.request.data.action).to.be('__GetRegions');
|
||||
expect(scenario.request.queries[0].type).to.be('metricFindQuery');
|
||||
expect(scenario.request.queries[0].subtype).to.be('regions');
|
||||
});
|
||||
});
|
||||
|
||||
describeMetricFindQuery('namespaces()', scenario => {
|
||||
scenario.setup(() => {
|
||||
scenario.requestResponse = [{text: 'AWS/EC2'}];
|
||||
scenario.requestResponse = {
|
||||
results: {
|
||||
metricFindQuery: {
|
||||
tables: [
|
||||
{ rows: [['AWS/EC2', 'AWS/EC2']] }
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
it('should call __GetNamespaces and return result', () => {
|
||||
expect(scenario.result[0].text).to.contain('AWS/EC2');
|
||||
expect(scenario.request.data.action).to.be('__GetNamespaces');
|
||||
expect(scenario.request.queries[0].type).to.be('metricFindQuery');
|
||||
expect(scenario.request.queries[0].subtype).to.be('namespaces');
|
||||
});
|
||||
});
|
||||
|
||||
describeMetricFindQuery('metrics(AWS/EC2)', scenario => {
|
||||
scenario.setup(() => {
|
||||
scenario.requestResponse = [{text: 'CPUUtilization'}];
|
||||
scenario.requestResponse = {
|
||||
results: {
|
||||
metricFindQuery: {
|
||||
tables: [
|
||||
{ rows: [['CPUUtilization', 'CPUUtilization']] }
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
it('should call __GetMetrics and return result', () => {
|
||||
expect(scenario.result[0].text).to.be('CPUUtilization');
|
||||
expect(scenario.request.data.action).to.be('__GetMetrics');
|
||||
expect(scenario.request.queries[0].type).to.be('metricFindQuery');
|
||||
expect(scenario.request.queries[0].subtype).to.be('metrics');
|
||||
});
|
||||
});
|
||||
|
||||
describeMetricFindQuery('dimension_keys(AWS/EC2)', scenario => {
|
||||
scenario.setup(() => {
|
||||
scenario.requestResponse = [{text: 'InstanceId'}];
|
||||
scenario.requestResponse = {
|
||||
results: {
|
||||
metricFindQuery: {
|
||||
tables: [
|
||||
{ rows: [['InstanceId', 'InstanceId']] }
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
it('should call __GetDimensions and return result', () => {
|
||||
expect(scenario.result[0].text).to.be('InstanceId');
|
||||
expect(scenario.request.data.action).to.be('__GetDimensions');
|
||||
expect(scenario.request.queries[0].type).to.be('metricFindQuery');
|
||||
expect(scenario.request.queries[0].subtype).to.be('dimension_keys');
|
||||
});
|
||||
});
|
||||
|
||||
describeMetricFindQuery('dimension_values(us-east-1,AWS/EC2,CPUUtilization,InstanceId)', scenario => {
|
||||
scenario.setup(() => {
|
||||
scenario.requestResponse = {
|
||||
Metrics: [
|
||||
{
|
||||
Namespace: 'AWS/EC2',
|
||||
MetricName: 'CPUUtilization',
|
||||
Dimensions: [
|
||||
{
|
||||
Name: 'InstanceId',
|
||||
Value: 'i-12345678'
|
||||
}
|
||||
results: {
|
||||
metricFindQuery: {
|
||||
tables: [
|
||||
{ rows: [['i-12345678', 'i-12345678']] }
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
it('should call __ListMetrics and return result', () => {
|
||||
expect(scenario.result[0].text).to.be('i-12345678');
|
||||
expect(scenario.request.data.action).to.be('ListMetrics');
|
||||
expect(scenario.result[0].text).to.contain('i-12345678');
|
||||
expect(scenario.request.queries[0].type).to.be('metricFindQuery');
|
||||
expect(scenario.request.queries[0].subtype).to.be('dimension_values');
|
||||
});
|
||||
});
|
||||
|
||||
it('should caclculate the correct period', function () {
|
||||
var hourSec = 60 * 60;
|
||||
var daySec = hourSec * 24;
|
||||
var start = 1483196400;
|
||||
var start = 1483196400 * 1000;
|
||||
var testData: any[] = [
|
||||
[{ period: 60 }, { namespace: 'AWS/EC2' }, {}, start, start + 3600, (hourSec * 3), 60],
|
||||
[{ period: null }, { namespace: 'AWS/EC2' }, {}, start, start + 3600, (hourSec * 3), 300],
|
||||
[{ period: 60 }, { namespace: 'AWS/ELB' }, {}, start, start + 3600, (hourSec * 3), 60],
|
||||
[{ period: null }, { namespace: 'AWS/ELB' }, {}, start, start + 3600, (hourSec * 3), 60],
|
||||
[{ period: 1 }, { namespace: 'CustomMetricsNamespace' }, {}, start, start + 1440 - 1, (hourSec * 3 - 1), 1],
|
||||
[{ period: 1 }, { namespace: 'CustomMetricsNamespace' }, {}, start, start + 3600, (hourSec * 3 - 1), 60],
|
||||
[{ period: 60 }, { namespace: 'CustomMetricsNamespace' }, {}, start, start + 3600, (hourSec * 3), 60],
|
||||
[{ period: null }, { namespace: 'CustomMetricsNamespace' }, {}, start, start + 3600, (hourSec * 3 - 1), 60],
|
||||
[{ period: null }, { namespace: 'CustomMetricsNamespace' }, {}, start, start + 3600, (hourSec * 3), 60],
|
||||
[{ period: null }, { namespace: 'CustomMetricsNamespace' }, {}, start, start + 3600, (daySec * 15), 60],
|
||||
[{ period: null }, { namespace: 'CustomMetricsNamespace' }, {}, start, start + 3600, (daySec * 63), 300],
|
||||
[{ period: null }, { namespace: 'CustomMetricsNamespace' }, {}, start, start + 3600, (daySec * 455), 3600]
|
||||
[
|
||||
{ period: 60, namespace: 'AWS/EC2' },
|
||||
{ range: { from: new Date(start), to: new Date(start + 3600 * 1000) } },
|
||||
(hourSec * 3), 60
|
||||
],
|
||||
[
|
||||
{ period: null, namespace: 'AWS/EC2' },
|
||||
{ range: { from: new Date(start), to: new Date(start + 3600 * 1000) } },
|
||||
(hourSec * 3), 300
|
||||
],
|
||||
[
|
||||
{ period: 60, namespace: 'AWS/ELB' },
|
||||
{ range: { from: new Date(start), to: new Date(start + 3600 * 1000) } },
|
||||
(hourSec * 3), 60
|
||||
],
|
||||
[
|
||||
{ period: null, namespace: 'AWS/ELB' },
|
||||
{ range: { from: new Date(start), to: new Date(start + 3600 * 1000) } },
|
||||
(hourSec * 3), 60
|
||||
],
|
||||
[
|
||||
{ period: 1, namespace: 'CustomMetricsNamespace' },
|
||||
{ range: { from: new Date(start), to: new Date(start + (1440 - 1) * 1000) } },
|
||||
(hourSec * 3 - 1), 1
|
||||
],
|
||||
[
|
||||
{ period: 1, namespace: 'CustomMetricsNamespace' },
|
||||
{ range: { from: new Date(start), to: new Date(start + 3600 * 1000) } },
|
||||
(hourSec * 3 - 1), 60
|
||||
],
|
||||
[
|
||||
{ period: 60, namespace: 'CustomMetricsNamespace' },
|
||||
{ range: { from: new Date(start), to: new Date(start + 3600 * 1000) } },
|
||||
(hourSec * 3), 60
|
||||
],
|
||||
[
|
||||
{ period: null, namespace: 'CustomMetricsNamespace' },
|
||||
{ range: { from: new Date(start), to: new Date(start + 3600 * 1000) } },
|
||||
(hourSec * 3 - 1), 60
|
||||
],
|
||||
[
|
||||
{ period: null, namespace: 'CustomMetricsNamespace' },
|
||||
{ range: { from: new Date(start), to: new Date(start + 3600 * 1000) } },
|
||||
(hourSec * 3), 60
|
||||
],
|
||||
[
|
||||
{ period: null, namespace: 'CustomMetricsNamespace' },
|
||||
{ range: { from: new Date(start), to: new Date(start + 3600 * 1000) } },
|
||||
(daySec * 15), 60
|
||||
],
|
||||
[
|
||||
{ period: null, namespace: 'CustomMetricsNamespace' },
|
||||
{ range: { from: new Date(start), to: new Date(start + 3600 * 1000) } },
|
||||
(daySec * 63), 300
|
||||
],
|
||||
[
|
||||
{ period: null, namespace: 'CustomMetricsNamespace' },
|
||||
{ range: { from: new Date(start), to: new Date(start + 3600 * 1000) } },
|
||||
(daySec * 455), 3600
|
||||
]
|
||||
];
|
||||
for (let t of testData) {
|
||||
let target = t[0];
|
||||
let query = t[1];
|
||||
let options = t[2];
|
||||
let start = t[3];
|
||||
let end = t[4];
|
||||
let now = start + t[5];
|
||||
let expected = t[6];
|
||||
let actual = ctx.ds.getPeriod(target, query, options, start, end, now);
|
||||
let options = t[1];
|
||||
let now = new Date(options.range.from.valueOf() + t[2] * 1000);
|
||||
let expected = t[3];
|
||||
let actual = ctx.ds.getPeriod(target, options, now);
|
||||
expect(actual).to.be(expected);
|
||||
}
|
||||
});
|
||||
|
||||
describeMetricFindQuery('ec2_instance_attribute(us-east-1, Tags.Name, { "tag:team": [ "sysops" ] })', scenario => {
|
||||
scenario.setup(() => {
|
||||
scenario.requestResponse = {
|
||||
Reservations: [
|
||||
{
|
||||
Instances: [
|
||||
{
|
||||
Tags: [
|
||||
{ Key: 'InstanceId', Value: 'i-123456' },
|
||||
{ Key: 'Name', Value: 'Sysops Dev Server' },
|
||||
{ Key: 'env', Value: 'dev' },
|
||||
{ Key: 'team', Value: 'sysops' }
|
||||
]
|
||||
},
|
||||
{
|
||||
Tags: [
|
||||
{ Key: 'InstanceId', Value: 'i-789012' },
|
||||
{ Key: 'Name', Value: 'Sysops Staging Server' },
|
||||
{ Key: 'env', Value: 'staging' },
|
||||
{ Key: 'team', Value: 'sysops' }
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
};
|
||||
});
|
||||
|
||||
it('should return the "Name" tag for each instance', function() {
|
||||
expect(scenario.result[0].text).to.be('Sysops Dev Server');
|
||||
expect(scenario.result[1].text).to.be('Sysops Staging Server');
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
@@ -1,3 +0,0 @@
|
||||
declare var ElasticDatasource: any;
|
||||
export {ElasticDatasource};
|
||||
|
@@ -1,372 +0,0 @@
|
||||
define([
|
||||
'angular',
|
||||
'lodash',
|
||||
'moment',
|
||||
'app/core/utils/kbn',
|
||||
'./query_builder',
|
||||
'./index_pattern',
|
||||
'./elastic_response',
|
||||
'./query_ctrl',
|
||||
],
|
||||
function (angular, _, moment, kbn, ElasticQueryBuilder, IndexPattern, ElasticResponse) {
|
||||
'use strict';
|
||||
|
||||
ElasticResponse = ElasticResponse.ElasticResponse;
|
||||
|
||||
/** @ngInject */
|
||||
function ElasticDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv) {
|
||||
this.basicAuth = instanceSettings.basicAuth;
|
||||
this.withCredentials = instanceSettings.withCredentials;
|
||||
this.url = instanceSettings.url;
|
||||
this.name = instanceSettings.name;
|
||||
this.index = instanceSettings.index;
|
||||
this.timeField = instanceSettings.jsonData.timeField;
|
||||
this.esVersion = instanceSettings.jsonData.esVersion;
|
||||
this.indexPattern = new IndexPattern(instanceSettings.index, instanceSettings.jsonData.interval);
|
||||
this.interval = instanceSettings.jsonData.timeInterval;
|
||||
this.queryBuilder = new ElasticQueryBuilder({
|
||||
timeField: this.timeField,
|
||||
esVersion: this.esVersion,
|
||||
});
|
||||
|
||||
this._request = function(method, url, data) {
|
||||
var options = {
|
||||
url: this.url + "/" + url,
|
||||
method: method,
|
||||
data: data
|
||||
};
|
||||
|
||||
if (this.basicAuth || this.withCredentials) {
|
||||
options.withCredentials = true;
|
||||
}
|
||||
if (this.basicAuth) {
|
||||
options.headers = {
|
||||
"Authorization": this.basicAuth
|
||||
};
|
||||
}
|
||||
|
||||
return backendSrv.datasourceRequest(options);
|
||||
};
|
||||
|
||||
this._get = function(url) {
|
||||
var range = timeSrv.timeRange();
|
||||
var index_list = this.indexPattern.getIndexList(range.from.valueOf(), range.to.valueOf());
|
||||
if (_.isArray(index_list) && index_list.length) {
|
||||
return this._request('GET', index_list[0] + url).then(function(results) {
|
||||
results.data.$$config = results.config;
|
||||
return results.data;
|
||||
});
|
||||
} else {
|
||||
return this._request('GET', this.indexPattern.getIndexForToday() + url).then(function(results) {
|
||||
results.data.$$config = results.config;
|
||||
return results.data;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
this._post = function(url, data) {
|
||||
return this._request('POST', url, data).then(function(results) {
|
||||
results.data.$$config = results.config;
|
||||
return results.data;
|
||||
});
|
||||
};
|
||||
|
||||
this.annotationQuery = function(options) {
|
||||
var annotation = options.annotation;
|
||||
var timeField = annotation.timeField || '@timestamp';
|
||||
var queryString = annotation.query || '*';
|
||||
var tagsField = annotation.tagsField || 'tags';
|
||||
var titleField = annotation.titleField || 'desc';
|
||||
var textField = annotation.textField || null;
|
||||
|
||||
var range = {};
|
||||
range[timeField]= {
|
||||
from: options.range.from.valueOf(),
|
||||
to: options.range.to.valueOf(),
|
||||
format: "epoch_millis",
|
||||
};
|
||||
|
||||
var queryInterpolated = templateSrv.replace(queryString, {}, 'lucene');
|
||||
var query = {
|
||||
"bool": {
|
||||
"filter": [
|
||||
{ "range": range },
|
||||
{
|
||||
"query_string": {
|
||||
"query": queryInterpolated
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
var data = {
|
||||
"query" : query,
|
||||
"size": 10000
|
||||
};
|
||||
|
||||
// fields field not supported on ES 5.x
|
||||
if (this.esVersion < 5) {
|
||||
data["fields"] = [timeField, "_source"];
|
||||
}
|
||||
|
||||
var header = {search_type: "query_then_fetch", "ignore_unavailable": true};
|
||||
|
||||
// old elastic annotations had index specified on them
|
||||
if (annotation.index) {
|
||||
header.index = annotation.index;
|
||||
} else {
|
||||
header.index = this.indexPattern.getIndexList(options.range.from, options.range.to);
|
||||
}
|
||||
|
||||
var payload = angular.toJson(header) + '\n' + angular.toJson(data) + '\n';
|
||||
|
||||
return this._post('_msearch', payload).then(function(res) {
|
||||
var list = [];
|
||||
var hits = res.responses[0].hits.hits;
|
||||
|
||||
var getFieldFromSource = function(source, fieldName) {
|
||||
if (!fieldName) { return; }
|
||||
|
||||
var fieldNames = fieldName.split('.');
|
||||
var fieldValue = source;
|
||||
|
||||
for (var i = 0; i < fieldNames.length; i++) {
|
||||
fieldValue = fieldValue[fieldNames[i]];
|
||||
if (!fieldValue) {
|
||||
console.log('could not find field in annotation: ', fieldName);
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
if (_.isArray(fieldValue)) {
|
||||
fieldValue = fieldValue.join(', ');
|
||||
}
|
||||
return fieldValue;
|
||||
};
|
||||
|
||||
for (var i = 0; i < hits.length; i++) {
|
||||
var source = hits[i]._source;
|
||||
var time = source[timeField];
|
||||
if (typeof hits[i].fields !== 'undefined') {
|
||||
var fields = hits[i].fields;
|
||||
if (_.isString(fields[timeField]) || _.isNumber(fields[timeField])) {
|
||||
time = fields[timeField];
|
||||
}
|
||||
}
|
||||
|
||||
var event = {
|
||||
annotation: annotation,
|
||||
time: moment.utc(time).valueOf(),
|
||||
title: getFieldFromSource(source, titleField),
|
||||
tags: getFieldFromSource(source, tagsField),
|
||||
text: getFieldFromSource(source, textField)
|
||||
};
|
||||
|
||||
list.push(event);
|
||||
}
|
||||
return list;
|
||||
});
|
||||
};
|
||||
|
||||
this.testDatasource = function() {
|
||||
timeSrv.setTime({ from: 'now-1m', to: 'now' }, true);
|
||||
// validate that the index exist and has date field
|
||||
return this.getFields({type: 'date'}).then(function(dateFields) {
|
||||
var timeField = _.find(dateFields, {text: this.timeField});
|
||||
if (!timeField) {
|
||||
return { status: "error", message: "No date field named " + this.timeField + ' found' };
|
||||
}
|
||||
return { status: "success", message: "Index OK. Time field name OK." };
|
||||
}.bind(this), function(err) {
|
||||
console.log(err);
|
||||
if (err.data && err.data.error) {
|
||||
var message = angular.toJson(err.data.error);
|
||||
if (err.data.error.reason) {
|
||||
message = err.data.error.reason;
|
||||
}
|
||||
return { status: "error", message: message };
|
||||
} else {
|
||||
return { status: "error", message: err.status };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
this.getQueryHeader = function(searchType, timeFrom, timeTo) {
|
||||
var header = {search_type: searchType, "ignore_unavailable": true};
|
||||
header.index = this.indexPattern.getIndexList(timeFrom, timeTo);
|
||||
return angular.toJson(header);
|
||||
};
|
||||
|
||||
this.query = function(options) {
|
||||
var payload = "";
|
||||
var target;
|
||||
var sentTargets = [];
|
||||
|
||||
// add global adhoc filters to timeFilter
|
||||
var adhocFilters = templateSrv.getAdhocFilters(this.name);
|
||||
|
||||
for (var i = 0; i < options.targets.length; i++) {
|
||||
target = options.targets[i];
|
||||
if (target.hide) {continue;}
|
||||
|
||||
var queryString = templateSrv.replace(target.query || '*', options.scopedVars, 'lucene');
|
||||
var queryObj = this.queryBuilder.build(target, adhocFilters, queryString);
|
||||
var esQuery = angular.toJson(queryObj);
|
||||
|
||||
var searchType = (queryObj.size === 0 && this.esVersion < 5) ? 'count' : 'query_then_fetch';
|
||||
var header = this.getQueryHeader(searchType, options.range.from, options.range.to);
|
||||
payload += header + '\n';
|
||||
|
||||
payload += esQuery + '\n';
|
||||
sentTargets.push(target);
|
||||
}
|
||||
|
||||
if (sentTargets.length === 0) {
|
||||
return $q.when([]);
|
||||
}
|
||||
|
||||
payload = payload.replace(/\$timeFrom/g, options.range.from.valueOf());
|
||||
payload = payload.replace(/\$timeTo/g, options.range.to.valueOf());
|
||||
payload = templateSrv.replace(payload, options.scopedVars);
|
||||
|
||||
return this._post('_msearch', payload).then(function(res) {
|
||||
return new ElasticResponse(sentTargets, res).getTimeSeries();
|
||||
});
|
||||
};
|
||||
|
||||
this.getFields = function(query) {
|
||||
return this._get('/_mapping').then(function(result) {
|
||||
|
||||
var typeMap = {
|
||||
'float': 'number',
|
||||
'double': 'number',
|
||||
'integer': 'number',
|
||||
'long': 'number',
|
||||
'date': 'date',
|
||||
'string': 'string',
|
||||
'text': 'string',
|
||||
'scaled_float': 'number',
|
||||
'nested': 'nested'
|
||||
};
|
||||
|
||||
function shouldAddField(obj, key, query) {
|
||||
if (key[0] === '_') {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!query.type) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// equal query type filter, or via typemap translation
|
||||
return query.type === obj.type || query.type === typeMap[obj.type];
|
||||
}
|
||||
|
||||
// Store subfield names: [system, process, cpu, total] -> system.process.cpu.total
|
||||
var fieldNameParts = [];
|
||||
var fields = {};
|
||||
|
||||
function getFieldsRecursively(obj) {
|
||||
for (var key in obj) {
|
||||
var subObj = obj[key];
|
||||
|
||||
// Check mapping field for nested fields
|
||||
if (_.isObject(subObj.properties)) {
|
||||
fieldNameParts.push(key);
|
||||
getFieldsRecursively(subObj.properties);
|
||||
}
|
||||
|
||||
if (_.isObject(subObj.fields)) {
|
||||
fieldNameParts.push(key);
|
||||
getFieldsRecursively(subObj.fields);
|
||||
}
|
||||
|
||||
if (_.isString(subObj.type)) {
|
||||
var fieldName = fieldNameParts.concat(key).join('.');
|
||||
|
||||
// Hide meta-fields and check field type
|
||||
if (shouldAddField(subObj, key, query)) {
|
||||
fields[fieldName] = {
|
||||
text: fieldName,
|
||||
type: subObj.type
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
fieldNameParts.pop();
|
||||
}
|
||||
|
||||
for (var indexName in result) {
|
||||
var index = result[indexName];
|
||||
if (index && index.mappings) {
|
||||
var mappings = index.mappings;
|
||||
for (var typeName in mappings) {
|
||||
var properties = mappings[typeName].properties;
|
||||
getFieldsRecursively(properties);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// transform to array
|
||||
return _.map(fields, function(value) {
|
||||
return value;
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
this.getTerms = function(queryDef) {
|
||||
var range = timeSrv.timeRange();
|
||||
var searchType = this.esVersion >= 5 ? 'query_then_fetch' : 'count' ;
|
||||
var header = this.getQueryHeader(searchType, range.from, range.to);
|
||||
var esQuery = angular.toJson(this.queryBuilder.getTermsQuery(queryDef));
|
||||
|
||||
esQuery = esQuery.replace(/\$timeFrom/g, range.from.valueOf());
|
||||
esQuery = esQuery.replace(/\$timeTo/g, range.to.valueOf());
|
||||
esQuery = header + '\n' + esQuery + '\n';
|
||||
|
||||
return this._post('_msearch?search_type=' + searchType, esQuery).then(function(res) {
|
||||
if (!res.responses[0].aggregations) {
|
||||
return [];
|
||||
}
|
||||
|
||||
var buckets = res.responses[0].aggregations["1"].buckets;
|
||||
return _.map(buckets, function(bucket) {
|
||||
return {
|
||||
text: bucket.key_as_string || bucket.key,
|
||||
value: bucket.key
|
||||
};
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
this.metricFindQuery = function(query) {
|
||||
query = angular.fromJson(query);
|
||||
if (!query) {
|
||||
return $q.when([]);
|
||||
}
|
||||
|
||||
if (query.find === 'fields') {
|
||||
query.field = templateSrv.replace(query.field, {}, 'lucene');
|
||||
return this.getFields(query);
|
||||
}
|
||||
|
||||
if (query.find === 'terms') {
|
||||
query.query = templateSrv.replace(query.query || '*', {}, 'lucene');
|
||||
return this.getTerms(query);
|
||||
}
|
||||
};
|
||||
|
||||
this.getTagKeys = function() {
|
||||
return this.getFields({});
|
||||
};
|
||||
|
||||
this.getTagValues = function(options) {
|
||||
return this.getTerms({field: options.key, query: '*'});
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
ElasticDatasource: ElasticDatasource
|
||||
};
|
||||
});
|
376
public/app/plugins/datasource/elasticsearch/datasource.ts
Normal file
376
public/app/plugins/datasource/elasticsearch/datasource.ts
Normal file
@@ -0,0 +1,376 @@
|
||||
///<reference path="../../../headers/common.d.ts" />
|
||||
|
||||
import angular from 'angular';
|
||||
import _ from 'lodash';
|
||||
import moment from 'moment';
|
||||
import {ElasticQueryBuilder} from './query_builder';
|
||||
import {IndexPattern} from './index_pattern';
|
||||
import {ElasticResponse} from './elastic_response';
|
||||
|
||||
export class ElasticDatasource {
|
||||
basicAuth: string;
|
||||
withCredentials: boolean;
|
||||
url: string;
|
||||
name: string;
|
||||
index: string;
|
||||
timeField: string;
|
||||
esVersion: number;
|
||||
interval: string;
|
||||
queryBuilder: ElasticQueryBuilder;
|
||||
indexPattern: IndexPattern;
|
||||
|
||||
/** @ngInject */
|
||||
constructor(instanceSettings, private $q, private backendSrv, private templateSrv, private timeSrv) {
|
||||
this.basicAuth = instanceSettings.basicAuth;
|
||||
this.withCredentials = instanceSettings.withCredentials;
|
||||
this.url = instanceSettings.url;
|
||||
this.name = instanceSettings.name;
|
||||
this.index = instanceSettings.index;
|
||||
this.timeField = instanceSettings.jsonData.timeField;
|
||||
this.esVersion = instanceSettings.jsonData.esVersion;
|
||||
this.indexPattern = new IndexPattern(instanceSettings.index, instanceSettings.jsonData.interval);
|
||||
this.interval = instanceSettings.jsonData.timeInterval;
|
||||
this.queryBuilder = new ElasticQueryBuilder({
|
||||
timeField: this.timeField,
|
||||
esVersion: this.esVersion,
|
||||
});
|
||||
}
|
||||
|
||||
private request(method, url, data?) {
|
||||
var options: any = {
|
||||
url: this.url + "/" + url,
|
||||
method: method,
|
||||
data: data
|
||||
};
|
||||
|
||||
if (this.basicAuth || this.withCredentials) {
|
||||
options.withCredentials = true;
|
||||
}
|
||||
if (this.basicAuth) {
|
||||
options.headers = {
|
||||
"Authorization": this.basicAuth
|
||||
};
|
||||
}
|
||||
|
||||
return this.backendSrv.datasourceRequest(options);
|
||||
}
|
||||
|
||||
private get(url) {
|
||||
var range = this.timeSrv.timeRange();
|
||||
var index_list = this.indexPattern.getIndexList(range.from.valueOf(), range.to.valueOf());
|
||||
if (_.isArray(index_list) && index_list.length) {
|
||||
return this.request('GET', index_list[0] + url).then(function(results) {
|
||||
results.data.$$config = results.config;
|
||||
return results.data;
|
||||
});
|
||||
} else {
|
||||
return this.request('GET', this.indexPattern.getIndexForToday() + url).then(function(results) {
|
||||
results.data.$$config = results.config;
|
||||
return results.data;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private post(url, data) {
|
||||
return this.request('POST', url, data).then(function(results) {
|
||||
results.data.$$config = results.config;
|
||||
return results.data;
|
||||
});
|
||||
}
|
||||
|
||||
annotationQuery(options) {
|
||||
var annotation = options.annotation;
|
||||
var timeField = annotation.timeField || '@timestamp';
|
||||
var queryString = annotation.query || '*';
|
||||
var tagsField = annotation.tagsField || 'tags';
|
||||
var titleField = annotation.titleField || 'desc';
|
||||
var textField = annotation.textField || null;
|
||||
|
||||
var range = {};
|
||||
range[timeField]= {
|
||||
from: options.range.from.valueOf(),
|
||||
to: options.range.to.valueOf(),
|
||||
format: "epoch_millis",
|
||||
};
|
||||
|
||||
var queryInterpolated = this.templateSrv.replace(queryString, {}, 'lucene');
|
||||
var query = {
|
||||
"bool": {
|
||||
"filter": [
|
||||
{ "range": range },
|
||||
{
|
||||
"query_string": {
|
||||
"query": queryInterpolated
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
var data = {
|
||||
"query" : query,
|
||||
"size": 10000
|
||||
};
|
||||
|
||||
// fields field not supported on ES 5.x
|
||||
if (this.esVersion < 5) {
|
||||
data["fields"] = [timeField, "_source"];
|
||||
}
|
||||
|
||||
var header: any = {search_type: "query_then_fetch", "ignore_unavailable": true};
|
||||
|
||||
// old elastic annotations had index specified on them
|
||||
if (annotation.index) {
|
||||
header.index = annotation.index;
|
||||
} else {
|
||||
header.index = this.indexPattern.getIndexList(options.range.from, options.range.to);
|
||||
}
|
||||
|
||||
var payload = angular.toJson(header) + '\n' + angular.toJson(data) + '\n';
|
||||
|
||||
return this.post('_msearch', payload).then(res => {
|
||||
var list = [];
|
||||
var hits = res.responses[0].hits.hits;
|
||||
|
||||
var getFieldFromSource = function(source, fieldName) {
|
||||
if (!fieldName) { return; }
|
||||
|
||||
var fieldNames = fieldName.split('.');
|
||||
var fieldValue = source;
|
||||
|
||||
for (var i = 0; i < fieldNames.length; i++) {
|
||||
fieldValue = fieldValue[fieldNames[i]];
|
||||
if (!fieldValue) {
|
||||
console.log('could not find field in annotation: ', fieldName);
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
if (_.isArray(fieldValue)) {
|
||||
fieldValue = fieldValue.join(', ');
|
||||
}
|
||||
return fieldValue;
|
||||
};
|
||||
|
||||
for (var i = 0; i < hits.length; i++) {
|
||||
var source = hits[i]._source;
|
||||
var time = source[timeField];
|
||||
if (typeof hits[i].fields !== 'undefined') {
|
||||
var fields = hits[i].fields;
|
||||
if (_.isString(fields[timeField]) || _.isNumber(fields[timeField])) {
|
||||
time = fields[timeField];
|
||||
}
|
||||
}
|
||||
|
||||
var event = {
|
||||
annotation: annotation,
|
||||
time: moment.utc(time).valueOf(),
|
||||
title: getFieldFromSource(source, titleField),
|
||||
tags: getFieldFromSource(source, tagsField),
|
||||
text: getFieldFromSource(source, textField)
|
||||
};
|
||||
|
||||
list.push(event);
|
||||
}
|
||||
return list;
|
||||
});
|
||||
};
|
||||
|
||||
testDatasource() {
|
||||
this.timeSrv.setTime({ from: 'now-1m', to: 'now' }, true);
|
||||
// validate that the index exist and has date field
|
||||
return this.getFields({type: 'date'}).then(function(dateFields) {
|
||||
var timeField = _.find(dateFields, {text: this.timeField});
|
||||
if (!timeField) {
|
||||
return { status: "error", message: "No date field named " + this.timeField + ' found' };
|
||||
}
|
||||
return { status: "success", message: "Index OK. Time field name OK." };
|
||||
}.bind(this), function(err) {
|
||||
console.log(err);
|
||||
if (err.data && err.data.error) {
|
||||
var message = angular.toJson(err.data.error);
|
||||
if (err.data.error.reason) {
|
||||
message = err.data.error.reason;
|
||||
}
|
||||
return { status: "error", message: message };
|
||||
} else {
|
||||
return { status: "error", message: err.status };
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
getQueryHeader(searchType, timeFrom, timeTo) {
|
||||
return angular.toJson({
|
||||
search_type: searchType,
|
||||
"ignore_unavailable": true,
|
||||
index: this.indexPattern.getIndexList(timeFrom, timeTo),
|
||||
});
|
||||
}
|
||||
|
||||
query(options) {
|
||||
var payload = "";
|
||||
var target;
|
||||
var sentTargets = [];
|
||||
|
||||
// add global adhoc filters to timeFilter
|
||||
var adhocFilters = this.templateSrv.getAdhocFilters(this.name);
|
||||
|
||||
for (var i = 0; i < options.targets.length; i++) {
|
||||
target = options.targets[i];
|
||||
if (target.hide) {continue;}
|
||||
|
||||
var queryString = this.templateSrv.replace(target.query || '*', options.scopedVars, 'lucene');
|
||||
var queryObj = this.queryBuilder.build(target, adhocFilters, queryString);
|
||||
var esQuery = angular.toJson(queryObj);
|
||||
|
||||
var searchType = (queryObj.size === 0 && this.esVersion < 5) ? 'count' : 'query_then_fetch';
|
||||
var header = this.getQueryHeader(searchType, options.range.from, options.range.to);
|
||||
payload += header + '\n';
|
||||
|
||||
payload += esQuery + '\n';
|
||||
sentTargets.push(target);
|
||||
}
|
||||
|
||||
if (sentTargets.length === 0) {
|
||||
return this.$q.when([]);
|
||||
}
|
||||
|
||||
payload = payload.replace(/\$timeFrom/g, options.range.from.valueOf());
|
||||
payload = payload.replace(/\$timeTo/g, options.range.to.valueOf());
|
||||
payload = this.templateSrv.replace(payload, options.scopedVars);
|
||||
|
||||
return this.post('_msearch', payload).then(function(res) {
|
||||
return new ElasticResponse(sentTargets, res).getTimeSeries();
|
||||
});
|
||||
};
|
||||
|
||||
getFields(query) {
|
||||
return this.get('/_mapping').then(function(result) {
|
||||
|
||||
var typeMap = {
|
||||
'float': 'number',
|
||||
'double': 'number',
|
||||
'integer': 'number',
|
||||
'long': 'number',
|
||||
'date': 'date',
|
||||
'string': 'string',
|
||||
'text': 'string',
|
||||
'scaled_float': 'number',
|
||||
'nested': 'nested'
|
||||
};
|
||||
|
||||
function shouldAddField(obj, key, query) {
|
||||
if (key[0] === '_') {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!query.type) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// equal query type filter, or via typemap translation
|
||||
return query.type === obj.type || query.type === typeMap[obj.type];
|
||||
}
|
||||
|
||||
// Store subfield names: [system, process, cpu, total] -> system.process.cpu.total
|
||||
var fieldNameParts = [];
|
||||
var fields = {};
|
||||
|
||||
function getFieldsRecursively(obj) {
|
||||
for (var key in obj) {
|
||||
var subObj = obj[key];
|
||||
|
||||
// Check mapping field for nested fields
|
||||
if (_.isObject(subObj.properties)) {
|
||||
fieldNameParts.push(key);
|
||||
getFieldsRecursively(subObj.properties);
|
||||
}
|
||||
|
||||
if (_.isObject(subObj.fields)) {
|
||||
fieldNameParts.push(key);
|
||||
getFieldsRecursively(subObj.fields);
|
||||
}
|
||||
|
||||
if (_.isString(subObj.type)) {
|
||||
var fieldName = fieldNameParts.concat(key).join('.');
|
||||
|
||||
// Hide meta-fields and check field type
|
||||
if (shouldAddField(subObj, key, query)) {
|
||||
fields[fieldName] = {
|
||||
text: fieldName,
|
||||
type: subObj.type
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
fieldNameParts.pop();
|
||||
}
|
||||
|
||||
for (var indexName in result) {
|
||||
var index = result[indexName];
|
||||
if (index && index.mappings) {
|
||||
var mappings = index.mappings;
|
||||
for (var typeName in mappings) {
|
||||
var properties = mappings[typeName].properties;
|
||||
getFieldsRecursively(properties);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// transform to array
|
||||
return _.map(fields, function(value) {
|
||||
return value;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
getTerms(queryDef) {
|
||||
var range = this.timeSrv.timeRange();
|
||||
var searchType = this.esVersion >= 5 ? 'query_then_fetch' : 'count' ;
|
||||
var header = this.getQueryHeader(searchType, range.from, range.to);
|
||||
var esQuery = angular.toJson(this.queryBuilder.getTermsQuery(queryDef));
|
||||
|
||||
esQuery = esQuery.replace(/\$timeFrom/g, range.from.valueOf());
|
||||
esQuery = esQuery.replace(/\$timeTo/g, range.to.valueOf());
|
||||
esQuery = header + '\n' + esQuery + '\n';
|
||||
|
||||
return this.post('_msearch?search_type=' + searchType, esQuery).then(function(res) {
|
||||
if (!res.responses[0].aggregations) {
|
||||
return [];
|
||||
}
|
||||
|
||||
var buckets = res.responses[0].aggregations["1"].buckets;
|
||||
return _.map(buckets, function(bucket) {
|
||||
return {
|
||||
text: bucket.key_as_string || bucket.key,
|
||||
value: bucket.key
|
||||
};
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
metricFindQuery(query) {
|
||||
query = angular.fromJson(query);
|
||||
if (!query) {
|
||||
return this.$q.when([]);
|
||||
}
|
||||
|
||||
if (query.find === 'fields') {
|
||||
query.field = this.templateSrv.replace(query.field, {}, 'lucene');
|
||||
return this.getFields(query);
|
||||
}
|
||||
|
||||
if (query.find === 'terms') {
|
||||
query.query = this.templateSrv.replace(query.query || '*', {}, 'lucene');
|
||||
return this.getTerms(query);
|
||||
}
|
||||
}
|
||||
|
||||
getTagKeys() {
|
||||
return this.getFields({});
|
||||
}
|
||||
|
||||
getTagValues(options) {
|
||||
return this.getTerms({field: options.key, query: '*'});
|
||||
}
|
||||
}
|
@@ -1,130 +1,55 @@
|
||||
///<reference path="../../../headers/common.d.ts" />
|
||||
|
||||
import _ from 'lodash';
|
||||
import queryDef from "./query_def";
|
||||
import * as queryDef from "./query_def";
|
||||
import TableModel from 'app/core/table_model';
|
||||
|
||||
export function ElasticResponse(targets, response) {
|
||||
this.targets = targets;
|
||||
this.response = response;
|
||||
}
|
||||
export class ElasticResponse {
|
||||
|
||||
ElasticResponse.prototype.processMetrics = function(esAgg, target, seriesList, props) {
|
||||
var metric, y, i, newSeries, bucket, value;
|
||||
constructor(private targets, private response) {
|
||||
this.targets = targets;
|
||||
this.response = response;
|
||||
}
|
||||
|
||||
for (y = 0; y < target.metrics.length; y++) {
|
||||
metric = target.metrics[y];
|
||||
if (metric.hide) {
|
||||
continue;
|
||||
}
|
||||
processMetrics(esAgg, target, seriesList, props) {
|
||||
var metric, y, i, newSeries, bucket, value;
|
||||
|
||||
switch (metric.type) {
|
||||
case 'count': {
|
||||
newSeries = { datapoints: [], metric: 'count', props: props};
|
||||
for (i = 0; i < esAgg.buckets.length; i++) {
|
||||
bucket = esAgg.buckets[i];
|
||||
value = bucket.doc_count;
|
||||
newSeries.datapoints.push([value, bucket.key]);
|
||||
}
|
||||
seriesList.push(newSeries);
|
||||
break;
|
||||
for (y = 0; y < target.metrics.length; y++) {
|
||||
metric = target.metrics[y];
|
||||
if (metric.hide) {
|
||||
continue;
|
||||
}
|
||||
case 'percentiles': {
|
||||
if (esAgg.buckets.length === 0) {
|
||||
|
||||
switch (metric.type) {
|
||||
case 'count': {
|
||||
newSeries = { datapoints: [], metric: 'count', props: props};
|
||||
for (i = 0; i < esAgg.buckets.length; i++) {
|
||||
bucket = esAgg.buckets[i];
|
||||
value = bucket.doc_count;
|
||||
newSeries.datapoints.push([value, bucket.key]);
|
||||
}
|
||||
seriesList.push(newSeries);
|
||||
break;
|
||||
}
|
||||
|
||||
var firstBucket = esAgg.buckets[0];
|
||||
var percentiles = firstBucket[metric.id].values;
|
||||
|
||||
for (var percentileName in percentiles) {
|
||||
newSeries = {datapoints: [], metric: 'p' + percentileName, props: props, field: metric.field};
|
||||
|
||||
for (i = 0; i < esAgg.buckets.length; i++) {
|
||||
bucket = esAgg.buckets[i];
|
||||
var values = bucket[metric.id].values;
|
||||
newSeries.datapoints.push([values[percentileName], bucket.key]);
|
||||
}
|
||||
seriesList.push(newSeries);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
case 'extended_stats': {
|
||||
for (var statName in metric.meta) {
|
||||
if (!metric.meta[statName]) {
|
||||
continue;
|
||||
case 'percentiles': {
|
||||
if (esAgg.buckets.length === 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
newSeries = {datapoints: [], metric: statName, props: props, field: metric.field};
|
||||
var firstBucket = esAgg.buckets[0];
|
||||
var percentiles = firstBucket[metric.id].values;
|
||||
|
||||
for (i = 0; i < esAgg.buckets.length; i++) {
|
||||
bucket = esAgg.buckets[i];
|
||||
var stats = bucket[metric.id];
|
||||
for (var percentileName in percentiles) {
|
||||
newSeries = {datapoints: [], metric: 'p' + percentileName, props: props, field: metric.field};
|
||||
|
||||
// add stats that are in nested obj to top level obj
|
||||
stats.std_deviation_bounds_upper = stats.std_deviation_bounds.upper;
|
||||
stats.std_deviation_bounds_lower = stats.std_deviation_bounds.lower;
|
||||
|
||||
newSeries.datapoints.push([stats[statName], bucket.key]);
|
||||
}
|
||||
|
||||
seriesList.push(newSeries);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
newSeries = { datapoints: [], metric: metric.type, field: metric.field, props: props};
|
||||
for (i = 0; i < esAgg.buckets.length; i++) {
|
||||
bucket = esAgg.buckets[i];
|
||||
|
||||
value = bucket[metric.id];
|
||||
if (value !== undefined) {
|
||||
if (value.normalized_value) {
|
||||
newSeries.datapoints.push([value.normalized_value, bucket.key]);
|
||||
} else {
|
||||
newSeries.datapoints.push([value.value, bucket.key]);
|
||||
for (i = 0; i < esAgg.buckets.length; i++) {
|
||||
bucket = esAgg.buckets[i];
|
||||
var values = bucket[metric.id].values;
|
||||
newSeries.datapoints.push([values[percentileName], bucket.key]);
|
||||
}
|
||||
seriesList.push(newSeries);
|
||||
}
|
||||
|
||||
}
|
||||
seriesList.push(newSeries);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
ElasticResponse.prototype.processAggregationDocs = function(esAgg, aggDef, target, table, props) {
|
||||
// add columns
|
||||
if (table.columns.length === 0) {
|
||||
for (let propKey of _.keys(props)) {
|
||||
table.addColumn({text: propKey, filterable: true});
|
||||
}
|
||||
table.addColumn({text: aggDef.field, filterable: true});
|
||||
}
|
||||
|
||||
// helper func to add values to value array
|
||||
let addMetricValue = (values, metricName, value) => {
|
||||
table.addColumn({text: metricName});
|
||||
values.push(value);
|
||||
};
|
||||
|
||||
for (let bucket of esAgg.buckets) {
|
||||
let values = [];
|
||||
|
||||
for (let propValues of _.values(props)) {
|
||||
values.push(propValues);
|
||||
}
|
||||
|
||||
// add bucket key (value)
|
||||
values.push(bucket.key);
|
||||
|
||||
for (let metric of target.metrics) {
|
||||
switch (metric.type) {
|
||||
case "count": {
|
||||
addMetricValue(values, this._getMetricName(metric.type), bucket.doc_count);
|
||||
break;
|
||||
}
|
||||
case 'extended_stats': {
|
||||
@@ -133,228 +58,304 @@ ElasticResponse.prototype.processAggregationDocs = function(esAgg, aggDef, targe
|
||||
continue;
|
||||
}
|
||||
|
||||
var stats = bucket[metric.id];
|
||||
// add stats that are in nested obj to top level obj
|
||||
stats.std_deviation_bounds_upper = stats.std_deviation_bounds.upper;
|
||||
stats.std_deviation_bounds_lower = stats.std_deviation_bounds.lower;
|
||||
newSeries = {datapoints: [], metric: statName, props: props, field: metric.field};
|
||||
|
||||
addMetricValue(values, this._getMetricName(statName), stats[statName]);
|
||||
for (i = 0; i < esAgg.buckets.length; i++) {
|
||||
bucket = esAgg.buckets[i];
|
||||
var stats = bucket[metric.id];
|
||||
|
||||
// add stats that are in nested obj to top level obj
|
||||
stats.std_deviation_bounds_upper = stats.std_deviation_bounds.upper;
|
||||
stats.std_deviation_bounds_lower = stats.std_deviation_bounds.lower;
|
||||
|
||||
newSeries.datapoints.push([stats[statName], bucket.key]);
|
||||
}
|
||||
|
||||
seriesList.push(newSeries);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
let metricName = this._getMetricName(metric.type);
|
||||
let otherMetrics = _.filter(target.metrics, {type: metric.type});
|
||||
default: {
|
||||
newSeries = { datapoints: [], metric: metric.type, field: metric.field, props: props};
|
||||
for (i = 0; i < esAgg.buckets.length; i++) {
|
||||
bucket = esAgg.buckets[i];
|
||||
|
||||
value = bucket[metric.id];
|
||||
if (value !== undefined) {
|
||||
if (value.normalized_value) {
|
||||
newSeries.datapoints.push([value.normalized_value, bucket.key]);
|
||||
} else {
|
||||
newSeries.datapoints.push([value.value, bucket.key]);
|
||||
}
|
||||
}
|
||||
|
||||
// if more of the same metric type include field field name in property
|
||||
if (otherMetrics.length > 1) {
|
||||
metricName += ' ' + metric.field;
|
||||
}
|
||||
|
||||
addMetricValue(values, metricName, bucket[metric.id].value);
|
||||
seriesList.push(newSeries);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
table.rows.push(values);
|
||||
}
|
||||
};
|
||||
|
||||
// This is quite complex
|
||||
// neeed to recurise down the nested buckets to build series
|
||||
ElasticResponse.prototype.processBuckets = function(aggs, target, seriesList, table, props, depth) {
|
||||
var bucket, aggDef, esAgg, aggId;
|
||||
var maxDepth = target.bucketAggs.length-1;
|
||||
|
||||
for (aggId in aggs) {
|
||||
aggDef = _.find(target.bucketAggs, {id: aggId});
|
||||
esAgg = aggs[aggId];
|
||||
|
||||
if (!aggDef) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (depth === maxDepth) {
|
||||
if (aggDef.type === 'date_histogram') {
|
||||
this.processMetrics(esAgg, target, seriesList, props);
|
||||
} else {
|
||||
this.processAggregationDocs(esAgg, aggDef, target, table, props);
|
||||
}
|
||||
} else {
|
||||
for (var nameIndex in esAgg.buckets) {
|
||||
bucket = esAgg.buckets[nameIndex];
|
||||
props = _.clone(props);
|
||||
if (bucket.key !== void 0) {
|
||||
props[aggDef.field] = bucket.key;
|
||||
} else {
|
||||
props["filter"] = nameIndex;
|
||||
}
|
||||
if (bucket.key_as_string) {
|
||||
props[aggDef.field] = bucket.key_as_string;
|
||||
}
|
||||
this.processBuckets(bucket, target, seriesList, table, props, depth+1);
|
||||
processAggregationDocs(esAgg, aggDef, target, table, props) {
|
||||
// add columns
|
||||
if (table.columns.length === 0) {
|
||||
for (let propKey of _.keys(props)) {
|
||||
table.addColumn({text: propKey, filterable: true});
|
||||
}
|
||||
table.addColumn({text: aggDef.field, filterable: true});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
ElasticResponse.prototype._getMetricName = function(metric) {
|
||||
var metricDef = _.find(queryDef.metricAggTypes, {value: metric});
|
||||
if (!metricDef) {
|
||||
metricDef = _.find(queryDef.extendedStats, {value: metric});
|
||||
}
|
||||
|
||||
return metricDef ? metricDef.text : metric;
|
||||
};
|
||||
|
||||
ElasticResponse.prototype._getSeriesName = function(series, target, metricTypeCount) {
|
||||
var metricName = this._getMetricName(series.metric);
|
||||
|
||||
if (target.alias) {
|
||||
var regex = /\{\{([\s\S]+?)\}\}/g;
|
||||
|
||||
return target.alias.replace(regex, function(match, g1, g2) {
|
||||
var group = g1 || g2;
|
||||
|
||||
if (group.indexOf('term ') === 0) { return series.props[group.substring(5)]; }
|
||||
if (series.props[group] !== void 0) { return series.props[group]; }
|
||||
if (group === 'metric') { return metricName; }
|
||||
if (group === 'field') { return series.field; }
|
||||
|
||||
return match;
|
||||
});
|
||||
}
|
||||
|
||||
if (series.field && queryDef.isPipelineAgg(series.metric)) {
|
||||
var appliedAgg = _.find(target.metrics, { id: series.field });
|
||||
if (appliedAgg) {
|
||||
metricName += ' ' + queryDef.describeMetric(appliedAgg);
|
||||
} else {
|
||||
metricName = 'Unset';
|
||||
}
|
||||
} else if (series.field) {
|
||||
metricName += ' ' + series.field;
|
||||
}
|
||||
|
||||
var propKeys = _.keys(series.props);
|
||||
if (propKeys.length === 0) {
|
||||
return metricName;
|
||||
}
|
||||
|
||||
var name = '';
|
||||
for (var propName in series.props) {
|
||||
name += series.props[propName] + ' ';
|
||||
}
|
||||
|
||||
if (metricTypeCount === 1) {
|
||||
return name.trim();
|
||||
}
|
||||
|
||||
return name.trim() + ' ' + metricName;
|
||||
};
|
||||
|
||||
ElasticResponse.prototype.nameSeries = function(seriesList, target) {
|
||||
var metricTypeCount = _.uniq(_.map(seriesList, 'metric')).length;
|
||||
var fieldNameCount = _.uniq(_.map(seriesList, 'field')).length;
|
||||
|
||||
for (var i = 0; i < seriesList.length; i++) {
|
||||
var series = seriesList[i];
|
||||
series.target = this._getSeriesName(series, target, metricTypeCount, fieldNameCount);
|
||||
}
|
||||
};
|
||||
|
||||
ElasticResponse.prototype.processHits = function(hits, seriesList) {
|
||||
var series = {target: 'docs', type: 'docs', datapoints: [], total: hits.total, filterable: true};
|
||||
var propName, hit, doc, i;
|
||||
|
||||
for (i = 0; i < hits.hits.length; i++) {
|
||||
hit = hits.hits[i];
|
||||
doc = {
|
||||
_id: hit._id,
|
||||
_type: hit._type,
|
||||
_index: hit._index
|
||||
// helper func to add values to value array
|
||||
let addMetricValue = (values, metricName, value) => {
|
||||
table.addColumn({text: metricName});
|
||||
values.push(value);
|
||||
};
|
||||
|
||||
if (hit._source) {
|
||||
for (propName in hit._source) {
|
||||
doc[propName] = hit._source[propName];
|
||||
}
|
||||
}
|
||||
for (let bucket of esAgg.buckets) {
|
||||
let values = [];
|
||||
|
||||
for (propName in hit.fields) {
|
||||
doc[propName] = hit.fields[propName];
|
||||
}
|
||||
series.datapoints.push(doc);
|
||||
}
|
||||
|
||||
seriesList.push(series);
|
||||
};
|
||||
|
||||
ElasticResponse.prototype.trimDatapoints = function(aggregations, target) {
|
||||
var histogram = _.find(target.bucketAggs, { type: 'date_histogram'});
|
||||
|
||||
var shouldDropFirstAndLast = histogram && histogram.settings && histogram.settings.trimEdges;
|
||||
if (shouldDropFirstAndLast) {
|
||||
var trim = histogram.settings.trimEdges;
|
||||
for (var prop in aggregations) {
|
||||
var points = aggregations[prop];
|
||||
if (points.datapoints.length > trim * 2) {
|
||||
points.datapoints = points.datapoints.slice(trim, points.datapoints.length - trim);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
ElasticResponse.prototype.getErrorFromElasticResponse = function(response, err) {
|
||||
var result: any = {};
|
||||
result.data = JSON.stringify(err, null, 4);
|
||||
if (err.root_cause && err.root_cause.length > 0 && err.root_cause[0].reason) {
|
||||
result.message = err.root_cause[0].reason;
|
||||
} else {
|
||||
result.message = err.reason || 'Unkown elatic error response';
|
||||
}
|
||||
|
||||
if (response.$$config) {
|
||||
result.config = response.$$config;
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
ElasticResponse.prototype.getTimeSeries = function() {
|
||||
var seriesList = [];
|
||||
|
||||
for (var i = 0; i < this.response.responses.length; i++) {
|
||||
var response = this.response.responses[i];
|
||||
if (response.error) {
|
||||
throw this.getErrorFromElasticResponse(this.response, response.error);
|
||||
}
|
||||
|
||||
if (response.hits && response.hits.hits.length > 0) {
|
||||
this.processHits(response.hits, seriesList);
|
||||
}
|
||||
|
||||
if (response.aggregations) {
|
||||
var aggregations = response.aggregations;
|
||||
var target = this.targets[i];
|
||||
var tmpSeriesList = [];
|
||||
var table = new TableModel();
|
||||
|
||||
this.processBuckets(aggregations, target, tmpSeriesList, table, {}, 0);
|
||||
this.trimDatapoints(tmpSeriesList, target);
|
||||
this.nameSeries(tmpSeriesList, target);
|
||||
|
||||
for (var y = 0; y < tmpSeriesList.length; y++) {
|
||||
seriesList.push(tmpSeriesList[y]);
|
||||
for (let propValues of _.values(props)) {
|
||||
values.push(propValues);
|
||||
}
|
||||
|
||||
if (table.rows.length > 0) {
|
||||
seriesList.push(table);
|
||||
// add bucket key (value)
|
||||
values.push(bucket.key);
|
||||
|
||||
for (let metric of target.metrics) {
|
||||
switch (metric.type) {
|
||||
case "count": {
|
||||
addMetricValue(values, this.getMetricName(metric.type), bucket.doc_count);
|
||||
break;
|
||||
}
|
||||
case 'extended_stats': {
|
||||
for (var statName in metric.meta) {
|
||||
if (!metric.meta[statName]) {
|
||||
continue;
|
||||
}
|
||||
|
||||
var stats = bucket[metric.id];
|
||||
// add stats that are in nested obj to top level obj
|
||||
stats.std_deviation_bounds_upper = stats.std_deviation_bounds.upper;
|
||||
stats.std_deviation_bounds_lower = stats.std_deviation_bounds.lower;
|
||||
|
||||
addMetricValue(values, this.getMetricName(statName), stats[statName]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
let metricName = this.getMetricName(metric.type);
|
||||
let otherMetrics = _.filter(target.metrics, {type: metric.type});
|
||||
|
||||
// if more of the same metric type include field field name in property
|
||||
if (otherMetrics.length > 1) {
|
||||
metricName += ' ' + metric.field;
|
||||
}
|
||||
|
||||
addMetricValue(values, metricName, bucket[metric.id].value);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
table.rows.push(values);
|
||||
}
|
||||
}
|
||||
|
||||
// This is quite complex
|
||||
// neeed to recurise down the nested buckets to build series
|
||||
processBuckets(aggs, target, seriesList, table, props, depth) {
|
||||
var bucket, aggDef, esAgg, aggId;
|
||||
var maxDepth = target.bucketAggs.length-1;
|
||||
|
||||
for (aggId in aggs) {
|
||||
aggDef = _.find(target.bucketAggs, {id: aggId});
|
||||
esAgg = aggs[aggId];
|
||||
|
||||
if (!aggDef) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (depth === maxDepth) {
|
||||
if (aggDef.type === 'date_histogram') {
|
||||
this.processMetrics(esAgg, target, seriesList, props);
|
||||
} else {
|
||||
this.processAggregationDocs(esAgg, aggDef, target, table, props);
|
||||
}
|
||||
} else {
|
||||
for (var nameIndex in esAgg.buckets) {
|
||||
bucket = esAgg.buckets[nameIndex];
|
||||
props = _.clone(props);
|
||||
if (bucket.key !== void 0) {
|
||||
props[aggDef.field] = bucket.key;
|
||||
} else {
|
||||
props["filter"] = nameIndex;
|
||||
}
|
||||
if (bucket.key_as_string) {
|
||||
props[aggDef.field] = bucket.key_as_string;
|
||||
}
|
||||
this.processBuckets(bucket, target, seriesList, table, props, depth+1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { data: seriesList };
|
||||
};
|
||||
private getMetricName(metric) {
|
||||
var metricDef = _.find(queryDef.metricAggTypes, {value: metric});
|
||||
if (!metricDef) {
|
||||
metricDef = _.find(queryDef.extendedStats, {value: metric});
|
||||
}
|
||||
|
||||
return metricDef ? metricDef.text : metric;
|
||||
}
|
||||
|
||||
private getSeriesName(series, target, metricTypeCount) {
|
||||
var metricName = this.getMetricName(series.metric);
|
||||
|
||||
if (target.alias) {
|
||||
var regex = /\{\{([\s\S]+?)\}\}/g;
|
||||
|
||||
return target.alias.replace(regex, function(match, g1, g2) {
|
||||
var group = g1 || g2;
|
||||
|
||||
if (group.indexOf('term ') === 0) { return series.props[group.substring(5)]; }
|
||||
if (series.props[group] !== void 0) { return series.props[group]; }
|
||||
if (group === 'metric') { return metricName; }
|
||||
if (group === 'field') { return series.field; }
|
||||
|
||||
return match;
|
||||
});
|
||||
}
|
||||
|
||||
if (series.field && queryDef.isPipelineAgg(series.metric)) {
|
||||
var appliedAgg = _.find(target.metrics, { id: series.field });
|
||||
if (appliedAgg) {
|
||||
metricName += ' ' + queryDef.describeMetric(appliedAgg);
|
||||
} else {
|
||||
metricName = 'Unset';
|
||||
}
|
||||
} else if (series.field) {
|
||||
metricName += ' ' + series.field;
|
||||
}
|
||||
|
||||
var propKeys = _.keys(series.props);
|
||||
if (propKeys.length === 0) {
|
||||
return metricName;
|
||||
}
|
||||
|
||||
var name = '';
|
||||
for (var propName in series.props) {
|
||||
name += series.props[propName] + ' ';
|
||||
}
|
||||
|
||||
if (metricTypeCount === 1) {
|
||||
return name.trim();
|
||||
}
|
||||
|
||||
return name.trim() + ' ' + metricName;
|
||||
}
|
||||
|
||||
nameSeries(seriesList, target) {
|
||||
var metricTypeCount = _.uniq(_.map(seriesList, 'metric')).length;
|
||||
|
||||
for (var i = 0; i < seriesList.length; i++) {
|
||||
var series = seriesList[i];
|
||||
series.target = this.getSeriesName(series, target, metricTypeCount);
|
||||
}
|
||||
}
|
||||
|
||||
processHits(hits, seriesList) {
|
||||
var series = {target: 'docs', type: 'docs', datapoints: [], total: hits.total, filterable: true};
|
||||
var propName, hit, doc, i;
|
||||
|
||||
for (i = 0; i < hits.hits.length; i++) {
|
||||
hit = hits.hits[i];
|
||||
doc = {
|
||||
_id: hit._id,
|
||||
_type: hit._type,
|
||||
_index: hit._index
|
||||
};
|
||||
|
||||
if (hit._source) {
|
||||
for (propName in hit._source) {
|
||||
doc[propName] = hit._source[propName];
|
||||
}
|
||||
}
|
||||
|
||||
for (propName in hit.fields) {
|
||||
doc[propName] = hit.fields[propName];
|
||||
}
|
||||
series.datapoints.push(doc);
|
||||
}
|
||||
|
||||
seriesList.push(series);
|
||||
}
|
||||
|
||||
trimDatapoints(aggregations, target) {
|
||||
var histogram = _.find(target.bucketAggs, { type: 'date_histogram'});
|
||||
|
||||
var shouldDropFirstAndLast = histogram && histogram.settings && histogram.settings.trimEdges;
|
||||
if (shouldDropFirstAndLast) {
|
||||
var trim = histogram.settings.trimEdges;
|
||||
for (var prop in aggregations) {
|
||||
var points = aggregations[prop];
|
||||
if (points.datapoints.length > trim * 2) {
|
||||
points.datapoints = points.datapoints.slice(trim, points.datapoints.length - trim);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
getErrorFromElasticResponse(response, err) {
|
||||
var result: any = {};
|
||||
result.data = JSON.stringify(err, null, 4);
|
||||
if (err.root_cause && err.root_cause.length > 0 && err.root_cause[0].reason) {
|
||||
result.message = err.root_cause[0].reason;
|
||||
} else {
|
||||
result.message = err.reason || 'Unkown elatic error response';
|
||||
}
|
||||
|
||||
if (response.$$config) {
|
||||
result.config = response.$$config;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
getTimeSeries() {
|
||||
var seriesList = [];
|
||||
|
||||
for (var i = 0; i < this.response.responses.length; i++) {
|
||||
var response = this.response.responses[i];
|
||||
if (response.error) {
|
||||
throw this.getErrorFromElasticResponse(this.response, response.error);
|
||||
}
|
||||
|
||||
if (response.hits && response.hits.hits.length > 0) {
|
||||
this.processHits(response.hits, seriesList);
|
||||
}
|
||||
|
||||
if (response.aggregations) {
|
||||
var aggregations = response.aggregations;
|
||||
var target = this.targets[i];
|
||||
var tmpSeriesList = [];
|
||||
var table = new TableModel();
|
||||
|
||||
this.processBuckets(aggregations, target, tmpSeriesList, table, {}, 0);
|
||||
this.trimDatapoints(tmpSeriesList, target);
|
||||
this.nameSeries(tmpSeriesList, target);
|
||||
|
||||
for (var y = 0; y < tmpSeriesList.length; y++) {
|
||||
seriesList.push(tmpSeriesList[y]);
|
||||
}
|
||||
|
||||
if (table.rows.length > 0) {
|
||||
seriesList.push(table);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { data: seriesList };
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user