mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Merge branch 'master' into org-users-to-react
This commit is contained in:
commit
09b681326e
@ -83,13 +83,14 @@ jobs:
|
||||
- checkout
|
||||
- run: 'go get -u github.com/alecthomas/gometalinter'
|
||||
- run: 'go get -u github.com/tsenart/deadcode'
|
||||
- run: 'go get -u github.com/jgautheron/goconst/cmd/goconst'
|
||||
- run: 'go get -u github.com/gordonklaus/ineffassign'
|
||||
- run: 'go get -u github.com/opennota/check/cmd/structcheck'
|
||||
- run: 'go get -u github.com/mdempsky/unconvert'
|
||||
- run: 'go get -u github.com/opennota/check/cmd/varcheck'
|
||||
- run:
|
||||
name: run linters
|
||||
command: 'gometalinter --enable-gc --vendor --deadline 10m --disable-all --enable=deadcode --enable=ineffassign --enable=structcheck --enable=unconvert --enable=varcheck ./...'
|
||||
command: 'gometalinter --enable-gc --vendor --deadline 10m --disable-all --enable=deadcode --enable=goconst --enable=ineffassign --enable=structcheck --enable=unconvert --enable=varcheck ./...'
|
||||
- run:
|
||||
name: run go vet
|
||||
command: 'go vet ./pkg/...'
|
||||
|
15
CHANGELOG.md
15
CHANGELOG.md
@ -1,23 +1,26 @@
|
||||
# 5.4.0 (unreleased)
|
||||
# 5.3.0 (unreleased)
|
||||
|
||||
# 5.3.0-beta2 (2018-10-01)
|
||||
|
||||
### New Features
|
||||
|
||||
* **Annotations**: Enable template variables in tagged annotations queries [#9735](https://github.com/grafana/grafana/issues/9735)
|
||||
* **Stackdriver**: Support for Google Stackdriver Datasource [#13289](https://github.com/grafana/grafana/pull/13289)
|
||||
|
||||
### Minor
|
||||
|
||||
* **Provisioning**: Dashboard Provisioning now support symlinks that changes target [#12534](https://github.com/grafana/grafana/issues/12534), thx [@auhlig](https://github.com/auhlig)
|
||||
* **OAuth**: Allow oauth email attribute name to be configurable [#12986](https://github.com/grafana/grafana/issues/12986), thx [@bobmshannon](https://github.com/bobmshannon)
|
||||
* **Tags**: Default sort order for GetDashboardTags [#11681](https://github.com/grafana/grafana/pull/11681), thx [@Jonnymcc](https://github.com/Jonnymcc)
|
||||
* **Prometheus**: Label completion queries respect dashboard time range [#12251](https://github.com/grafana/grafana/pull/12251), thx [@mtanda](https://github.com/mtanda)
|
||||
* **Prometheus**: Allow to display annotations based on Prometheus series value [#10159](https://github.com/grafana/grafana/issues/10159), thx [@mtanda](https://github.com/mtanda)
|
||||
* **Prometheus**: Adhoc-filtering for Prometheus dashboards [#13212](https://github.com/grafana/grafana/issues/13212)
|
||||
* **Singlestat**: Fix gauge display accuracy for percents [#13270](https://github.com/grafana/grafana/issues/13270), thx [@tianon](https://github.com/tianon)
|
||||
|
||||
# 5.3.0 (unreleased)
|
||||
|
||||
### Minor
|
||||
|
||||
* **Dashboard**: Prevent auto refresh from starting when loading dashboard with absolute time range [#12030](https://github.com/grafana/grafana/issues/12030)
|
||||
* **Templating**: New templating variable type `Text box` that allows free text input [#3173](https://github.com/grafana/grafana/issues/3173)
|
||||
* **Alerting**: Link to view full size image in Microsoft Teams alert notifier [#13121](https://github.com/grafana/grafana/issues/13121), thx [@holiiveira](https://github.com/holiiveira)
|
||||
* **Alerting**: Fixes a bug where all alerts would send reminders after upgrade & restart [#13402](https://github.com/grafana/grafana/pull/13402)
|
||||
* **Alerting**: Concurrent render limit for graphs used in notifications [#13401](https://github.com/grafana/grafana/pull/13401)
|
||||
* **Postgres/MySQL/MSSQL**: Add support for replacing $__interval and $__interval_ms in alert queries [#11555](https://github.com/grafana/grafana/issues/11555), thx [@svenklemm](https://github.com/svenklemm)
|
||||
|
||||
# 5.3.0-beta1 (2018-09-06)
|
||||
|
17
build.go
17
build.go
@ -22,6 +22,11 @@ import (
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
windows = "windows"
|
||||
linux = "linux"
|
||||
)
|
||||
|
||||
var (
|
||||
//versionRe = regexp.MustCompile(`-[0-9]{1,3}-g[0-9a-f]{5,10}`)
|
||||
goarch string
|
||||
@ -110,13 +115,13 @@ func main() {
|
||||
case "package":
|
||||
grunt(gruntBuildArg("build")...)
|
||||
grunt(gruntBuildArg("package")...)
|
||||
if goos == "linux" {
|
||||
if goos == linux {
|
||||
createLinuxPackages()
|
||||
}
|
||||
|
||||
case "package-only":
|
||||
grunt(gruntBuildArg("package")...)
|
||||
if goos == "linux" {
|
||||
if goos == linux {
|
||||
createLinuxPackages()
|
||||
}
|
||||
|
||||
@ -378,7 +383,7 @@ func ensureGoPath() {
|
||||
}
|
||||
|
||||
func grunt(params ...string) {
|
||||
if runtime.GOOS == "windows" {
|
||||
if runtime.GOOS == windows {
|
||||
runPrint(`.\node_modules\.bin\grunt`, params...)
|
||||
} else {
|
||||
runPrint("./node_modules/.bin/grunt", params...)
|
||||
@ -420,7 +425,7 @@ func build(binaryName, pkg string, tags []string) {
|
||||
binary = fmt.Sprintf("./bin/%s", binaryName)
|
||||
}
|
||||
|
||||
if goos == "windows" {
|
||||
if goos == windows {
|
||||
binary += ".exe"
|
||||
}
|
||||
|
||||
@ -484,11 +489,11 @@ func clean() {
|
||||
|
||||
func setBuildEnv() {
|
||||
os.Setenv("GOOS", goos)
|
||||
if goos == "windows" {
|
||||
if goos == windows {
|
||||
// require windows >=7
|
||||
os.Setenv("CGO_CFLAGS", "-D_WIN32_WINNT=0x0601")
|
||||
}
|
||||
if goarch != "amd64" || goos != "linux" {
|
||||
if goarch != "amd64" || goos != linux {
|
||||
// needed for all other archs
|
||||
cgo = true
|
||||
}
|
||||
|
@ -474,6 +474,10 @@ error_or_timeout = alerting
|
||||
# Default setting for how Grafana handles nodata or null values in alerting. (alerting, no_data, keep_state, ok)
|
||||
nodata_or_nullvalues = no_data
|
||||
|
||||
# Alert notifications can include images, but rendering many images at the same time can overload the server
|
||||
# This limit will protect the server from render overloading and make sure notifications are sent out quickly
|
||||
concurrent_render_limit = 5
|
||||
|
||||
#################################### Explore #############################
|
||||
[explore]
|
||||
# Enable the Explore section
|
||||
|
@ -393,6 +393,10 @@ log_queries =
|
||||
# Default setting for how Grafana handles nodata or null values in alerting. (alerting, no_data, keep_state, ok)
|
||||
;nodata_or_nullvalues = no_data
|
||||
|
||||
# Alert notifications can include images, but rendering many images at the same time can overload the server
|
||||
# This limit will protect the server from render overloading and make sure notifications are sent out quickly
|
||||
;concurrent_render_limit = 5
|
||||
|
||||
#################################### Explore #############################
|
||||
[explore]
|
||||
# Enable the Explore section
|
||||
@ -431,7 +435,7 @@ log_queries =
|
||||
;sampler_param = 1
|
||||
|
||||
#################################### Grafana.com integration ##########################
|
||||
# Url used to to import dashboards directly from Grafana.com
|
||||
# Url used to import dashboards directly from Grafana.com
|
||||
[grafana_com]
|
||||
;url = https://grafana.com
|
||||
|
||||
|
1
devenv/docker/ha_test/.gitignore
vendored
Normal file
1
devenv/docker/ha_test/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
grafana/provisioning/dashboards/alerts/alert-*
|
137
devenv/docker/ha_test/README.md
Normal file
137
devenv/docker/ha_test/README.md
Normal file
@ -0,0 +1,137 @@
|
||||
# Grafana High Availability (HA) test setup
|
||||
|
||||
A set of docker compose services which together creates a Grafana HA test setup with capability of easily
|
||||
scaling up/down number of Grafana instances.
|
||||
|
||||
Included services
|
||||
|
||||
* Grafana
|
||||
* Mysql - Grafana configuration database and session storage
|
||||
* Prometheus - Monitoring of Grafana and used as datasource of provisioned alert rules
|
||||
* Nginx - Reverse proxy for Grafana and Prometheus. Enables browsing Grafana/Prometheus UI using a hostname
|
||||
|
||||
## Prerequisites
|
||||
|
||||
### Build grafana docker container
|
||||
|
||||
Build a Grafana docker container from current branch and commit and tag it as grafana/grafana:dev.
|
||||
|
||||
```bash
|
||||
$ cd <grafana repo>
|
||||
$ make build-docker-full
|
||||
```
|
||||
|
||||
### Virtual host names
|
||||
|
||||
#### Alternative 1 - Use dnsmasq
|
||||
|
||||
```bash
|
||||
$ sudo apt-get install dnsmasq
|
||||
$ echo 'address=/loc/127.0.0.1' | sudo tee /etc/dnsmasq.d/dnsmasq-loc.conf > /dev/null
|
||||
$ sudo /etc/init.d/dnsmasq restart
|
||||
$ ping whatever.loc
|
||||
PING whatever.loc (127.0.0.1) 56(84) bytes of data.
|
||||
64 bytes from localhost (127.0.0.1): icmp_seq=1 ttl=64 time=0.076 ms
|
||||
--- whatever.loc ping statistics ---
|
||||
1 packet transmitted, 1 received, 0% packet loss, time 1998ms
|
||||
```
|
||||
|
||||
#### Alternative 2 - Manually update /etc/hosts
|
||||
|
||||
Update your `/etc/hosts` to be able to access Grafana and/or Prometheus UI using a hostname.
|
||||
|
||||
```bash
|
||||
$ cat /etc/hosts
|
||||
127.0.0.1 grafana.loc
|
||||
127.0.0.1 prometheus.loc
|
||||
```
|
||||
|
||||
## Start services
|
||||
|
||||
```bash
|
||||
$ docker-compose up -d
|
||||
```
|
||||
|
||||
Browse
|
||||
* http://grafana.loc/
|
||||
* http://prometheus.loc/
|
||||
|
||||
Check for any errors
|
||||
|
||||
```bash
|
||||
$ docker-compose logs | grep error
|
||||
```
|
||||
|
||||
### Scale Grafana instances up/down
|
||||
|
||||
Scale number of Grafana instances to `<instances>`
|
||||
|
||||
```bash
|
||||
$ docker-compose up --scale grafana=<instances> -d
|
||||
# for example 3 instances
|
||||
$ docker-compose up --scale grafana=3 -d
|
||||
```
|
||||
|
||||
## Test alerting
|
||||
|
||||
### Create notification channels
|
||||
|
||||
Creates default notification channels, if not already exists
|
||||
|
||||
```bash
|
||||
$ ./alerts.sh setup
|
||||
```
|
||||
|
||||
### Slack notifications
|
||||
|
||||
Disable
|
||||
|
||||
```bash
|
||||
$ ./alerts.sh slack -d
|
||||
```
|
||||
|
||||
Enable and configure url
|
||||
|
||||
```bash
|
||||
$ ./alerts.sh slack -u https://hooks.slack.com/services/...
|
||||
```
|
||||
|
||||
Enable, configure url and enable reminders
|
||||
|
||||
```bash
|
||||
$ ./alerts.sh slack -u https://hooks.slack.com/services/... -r -e 10m
|
||||
```
|
||||
|
||||
### Provision alert dashboards with alert rules
|
||||
|
||||
Provision 1 dashboard/alert rule (default)
|
||||
|
||||
```bash
|
||||
$ ./alerts.sh provision
|
||||
```
|
||||
|
||||
Provision 10 dashboards/alert rules
|
||||
|
||||
```bash
|
||||
$ ./alerts.sh provision -a 10
|
||||
```
|
||||
|
||||
Provision 10 dashboards/alert rules and change condition to `gt > 100`
|
||||
|
||||
```bash
|
||||
$ ./alerts.sh provision -a 10 -c 100
|
||||
```
|
||||
|
||||
### Pause/unpause all alert rules
|
||||
|
||||
Pause
|
||||
|
||||
```bash
|
||||
$ ./alerts.sh pause
|
||||
```
|
||||
|
||||
Unpause
|
||||
|
||||
```bash
|
||||
$ ./alerts.sh unpause
|
||||
```
|
156
devenv/docker/ha_test/alerts.sh
Executable file
156
devenv/docker/ha_test/alerts.sh
Executable file
@ -0,0 +1,156 @@
|
||||
#!/bin/bash
|
||||
|
||||
requiresJsonnet() {
|
||||
if ! type "jsonnet" > /dev/null; then
|
||||
echo "you need you install jsonnet to run this script"
|
||||
echo "follow the instructions on https://github.com/google/jsonnet"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
setup() {
|
||||
STATUS=$(curl -s -o /dev/null -w '%{http_code}' http://admin:admin@grafana.loc/api/alert-notifications/1)
|
||||
if [ $STATUS -eq 200 ]; then
|
||||
echo "Email already exists, skipping..."
|
||||
else
|
||||
curl -H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"name": "Email",
|
||||
"type": "email",
|
||||
"isDefault": false,
|
||||
"sendReminder": false,
|
||||
"uploadImage": true,
|
||||
"settings": {
|
||||
"addresses": "user@test.com"
|
||||
}
|
||||
}' \
|
||||
http://admin:admin@grafana.loc/api/alert-notifications
|
||||
fi
|
||||
|
||||
STATUS=$(curl -s -o /dev/null -w '%{http_code}' http://admin:admin@grafana.loc/api/alert-notifications/2)
|
||||
if [ $STATUS -eq 200 ]; then
|
||||
echo "Slack already exists, skipping..."
|
||||
else
|
||||
curl -H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"name": "Slack",
|
||||
"type": "slack",
|
||||
"isDefault": false,
|
||||
"sendReminder": false,
|
||||
"uploadImage": true
|
||||
}' \
|
||||
http://admin:admin@grafana.loc/api/alert-notifications
|
||||
fi
|
||||
}
|
||||
|
||||
slack() {
|
||||
enabled=true
|
||||
url=''
|
||||
remind=false
|
||||
remindEvery='10m'
|
||||
|
||||
while getopts ":e:u:dr" o; do
|
||||
case "${o}" in
|
||||
e)
|
||||
remindEvery=${OPTARG}
|
||||
;;
|
||||
u)
|
||||
url=${OPTARG}
|
||||
;;
|
||||
d)
|
||||
enabled=false
|
||||
;;
|
||||
r)
|
||||
remind=true
|
||||
;;
|
||||
esac
|
||||
done
|
||||
shift $((OPTIND-1))
|
||||
|
||||
curl -X PUT \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"id": 2,
|
||||
"name": "Slack",
|
||||
"type": "slack",
|
||||
"isDefault": '$enabled',
|
||||
"sendReminder": '$remind',
|
||||
"frequency": "'$remindEvery'",
|
||||
"uploadImage": true,
|
||||
"settings": {
|
||||
"url": "'$url'"
|
||||
}
|
||||
}' \
|
||||
http://admin:admin@grafana.loc/api/alert-notifications/2
|
||||
}
|
||||
|
||||
provision() {
|
||||
alerts=1
|
||||
condition=65
|
||||
while getopts ":a:c:" o; do
|
||||
case "${o}" in
|
||||
a)
|
||||
alerts=${OPTARG}
|
||||
;;
|
||||
c)
|
||||
condition=${OPTARG}
|
||||
;;
|
||||
esac
|
||||
done
|
||||
shift $((OPTIND-1))
|
||||
|
||||
requiresJsonnet
|
||||
|
||||
rm -rf grafana/provisioning/dashboards/alerts/alert-*.json
|
||||
jsonnet -m grafana/provisioning/dashboards/alerts grafana/provisioning/alerts.jsonnet --ext-code alerts=$alerts --ext-code condition=$condition
|
||||
}
|
||||
|
||||
pause() {
|
||||
curl -H "Content-Type: application/json" \
|
||||
-d '{"paused":true}' \
|
||||
http://admin:admin@grafana.loc/api/admin/pause-all-alerts
|
||||
}
|
||||
|
||||
unpause() {
|
||||
curl -H "Content-Type: application/json" \
|
||||
-d '{"paused":false}' \
|
||||
http://admin:admin@grafana.loc/api/admin/pause-all-alerts
|
||||
}
|
||||
|
||||
usage() {
|
||||
echo -e "Usage: ./alerts.sh COMMAND [OPTIONS]\n"
|
||||
echo -e "Commands"
|
||||
echo -e " setup\t\t creates default alert notification channels"
|
||||
echo -e " slack\t\t configure slack notification channel"
|
||||
echo -e " [-d]\t\t\t disable notifier, default enabled"
|
||||
echo -e " [-u]\t\t\t url"
|
||||
echo -e " [-r]\t\t\t send reminders"
|
||||
echo -e " [-e <remind every>]\t\t default 10m\n"
|
||||
echo -e " provision\t provision alerts"
|
||||
echo -e " [-a <alert rule count>]\t default 1"
|
||||
echo -e " [-c <condition value>]\t default 65\n"
|
||||
echo -e " pause\t\t pause all alerts"
|
||||
echo -e " unpause\t unpause all alerts"
|
||||
}
|
||||
|
||||
main() {
|
||||
local cmd=$1
|
||||
|
||||
if [[ $cmd == "setup" ]]; then
|
||||
setup
|
||||
elif [[ $cmd == "slack" ]]; then
|
||||
slack "${@:2}"
|
||||
elif [[ $cmd == "provision" ]]; then
|
||||
provision "${@:2}"
|
||||
elif [[ $cmd == "pause" ]]; then
|
||||
pause
|
||||
elif [[ $cmd == "unpause" ]]; then
|
||||
unpause
|
||||
fi
|
||||
|
||||
if [[ -z "$cmd" ]]; then
|
||||
usage
|
||||
fi
|
||||
}
|
||||
|
||||
main "$@"
|
57
devenv/docker/ha_test/docker-compose.yaml
Normal file
57
devenv/docker/ha_test/docker-compose.yaml
Normal file
@ -0,0 +1,57 @@
|
||||
version: "2.1"
|
||||
|
||||
services:
|
||||
nginx-proxy:
|
||||
image: jwilder/nginx-proxy
|
||||
ports:
|
||||
- "80:80"
|
||||
volumes:
|
||||
- /var/run/docker.sock:/tmp/docker.sock:ro
|
||||
|
||||
mysql:
|
||||
image: mysql
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: rootpass
|
||||
MYSQL_DATABASE: grafana
|
||||
MYSQL_USER: grafana
|
||||
MYSQL_PASSWORD: password
|
||||
healthcheck:
|
||||
test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"]
|
||||
timeout: 10s
|
||||
retries: 10
|
||||
|
||||
grafana:
|
||||
image: grafana/grafana:dev
|
||||
volumes:
|
||||
- ./grafana/provisioning/:/etc/grafana/provisioning/
|
||||
environment:
|
||||
- VIRTUAL_HOST=grafana.loc
|
||||
- GF_SERVER_ROOT_URL=http://grafana.loc
|
||||
- GF_DATABASE_TYPE=mysql
|
||||
- GF_DATABASE_HOST=mysql:3306
|
||||
- GF_DATABASE_NAME=grafana
|
||||
- GF_DATABASE_USER=grafana
|
||||
- GF_DATABASE_PASSWORD=password
|
||||
- GF_SESSION_PROVIDER=mysql
|
||||
- GF_SESSION_PROVIDER_CONFIG=grafana:password@tcp(mysql:3306)/grafana?allowNativePasswords=true
|
||||
ports:
|
||||
- 3000
|
||||
depends_on:
|
||||
mysql:
|
||||
condition: service_healthy
|
||||
|
||||
prometheus:
|
||||
image: prom/prometheus:v2.4.2
|
||||
volumes:
|
||||
- ./prometheus/:/etc/prometheus/
|
||||
environment:
|
||||
- VIRTUAL_HOST=prometheus.loc
|
||||
ports:
|
||||
- 9090
|
||||
|
||||
# mysqld-exporter:
|
||||
# image: prom/mysqld-exporter
|
||||
# environment:
|
||||
# - DATA_SOURCE_NAME=grafana:password@(mysql:3306)/
|
||||
# ports:
|
||||
# - 9104
|
202
devenv/docker/ha_test/grafana/provisioning/alerts.jsonnet
Normal file
202
devenv/docker/ha_test/grafana/provisioning/alerts.jsonnet
Normal file
@ -0,0 +1,202 @@
|
||||
local numAlerts = std.extVar('alerts');
|
||||
local condition = std.extVar('condition');
|
||||
local arr = std.range(1, numAlerts);
|
||||
|
||||
local alertDashboardTemplate = {
|
||||
"editable": true,
|
||||
"gnetId": null,
|
||||
"graphTooltip": 0,
|
||||
"id": null,
|
||||
"links": [],
|
||||
"panels": [
|
||||
{
|
||||
"alert": {
|
||||
"conditions": [
|
||||
{
|
||||
"evaluator": {
|
||||
"params": [
|
||||
65
|
||||
],
|
||||
"type": "gt"
|
||||
},
|
||||
"operator": {
|
||||
"type": "and"
|
||||
},
|
||||
"query": {
|
||||
"params": [
|
||||
"A",
|
||||
"5m",
|
||||
"now"
|
||||
]
|
||||
},
|
||||
"reducer": {
|
||||
"params": [],
|
||||
"type": "avg"
|
||||
},
|
||||
"type": "query"
|
||||
}
|
||||
],
|
||||
"executionErrorState": "alerting",
|
||||
"frequency": "10s",
|
||||
"handler": 1,
|
||||
"name": "bulk alerting",
|
||||
"noDataState": "no_data",
|
||||
"notifications": [
|
||||
{
|
||||
"id": 2
|
||||
}
|
||||
]
|
||||
},
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "Prometheus",
|
||||
"fill": 1,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 0
|
||||
},
|
||||
"id": 2,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 1,
|
||||
"nullPointMode": "null",
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"$$hashKey": "object:117",
|
||||
"expr": "go_goroutines",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"thresholds": [
|
||||
{
|
||||
"colorMode": "critical",
|
||||
"fill": true,
|
||||
"line": true,
|
||||
"op": "gt",
|
||||
"value": 50
|
||||
}
|
||||
],
|
||||
"timeFrom": null,
|
||||
"timeShift": null,
|
||||
"title": "Panel Title",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "individual"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"schemaVersion": 16,
|
||||
"style": "dark",
|
||||
"tags": [],
|
||||
"templating": {
|
||||
"list": []
|
||||
},
|
||||
"time": {
|
||||
"from": "now-6h",
|
||||
"to": "now"
|
||||
},
|
||||
"timepicker": {
|
||||
"refresh_intervals": [
|
||||
"5s",
|
||||
"10s",
|
||||
"30s",
|
||||
"1m",
|
||||
"5m",
|
||||
"15m",
|
||||
"30m",
|
||||
"1h",
|
||||
"2h",
|
||||
"1d"
|
||||
],
|
||||
"time_options": [
|
||||
"5m",
|
||||
"15m",
|
||||
"1h",
|
||||
"6h",
|
||||
"12h",
|
||||
"24h",
|
||||
"2d",
|
||||
"7d",
|
||||
"30d"
|
||||
]
|
||||
},
|
||||
"timezone": "",
|
||||
"title": "New dashboard",
|
||||
"uid": null,
|
||||
"version": 0
|
||||
};
|
||||
|
||||
|
||||
{
|
||||
['alert-' + std.toString(x) + '.json']:
|
||||
alertDashboardTemplate + {
|
||||
panels: [
|
||||
alertDashboardTemplate.panels[0] +
|
||||
{
|
||||
alert+: {
|
||||
name: 'Alert rule ' + x,
|
||||
conditions: [
|
||||
alertDashboardTemplate.panels[0].alert.conditions[0] +
|
||||
{
|
||||
evaluator+: {
|
||||
params: [condition]
|
||||
}
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
uid: 'alert-' + x,
|
||||
title: 'Alert ' + x
|
||||
},
|
||||
for x in arr
|
||||
}
|
@ -0,0 +1,8 @@
|
||||
apiVersion: 1
|
||||
|
||||
providers:
|
||||
- name: 'Alerts'
|
||||
folder: 'Alerts'
|
||||
type: file
|
||||
options:
|
||||
path: /etc/grafana/provisioning/dashboards/alerts
|
@ -0,0 +1,172 @@
|
||||
{
|
||||
"annotations": {
|
||||
"list": [
|
||||
{
|
||||
"builtIn": 1,
|
||||
"datasource": "-- Grafana --",
|
||||
"enable": true,
|
||||
"hide": true,
|
||||
"iconColor": "rgba(0, 211, 255, 1)",
|
||||
"name": "Annotations & Alerts",
|
||||
"type": "dashboard"
|
||||
}
|
||||
]
|
||||
},
|
||||
"editable": true,
|
||||
"gnetId": null,
|
||||
"graphTooltip": 0,
|
||||
"links": [],
|
||||
"panels": [
|
||||
{
|
||||
"aliasColors": {
|
||||
"Active alerts": "#bf1b00"
|
||||
},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "Prometheus",
|
||||
"fill": 1,
|
||||
"gridPos": {
|
||||
"h": 12,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 0
|
||||
},
|
||||
"id": 2,
|
||||
"interval": "",
|
||||
"legend": {
|
||||
"alignAsTable": true,
|
||||
"avg": false,
|
||||
"current": true,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"rightSide": true,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": true
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [
|
||||
{
|
||||
"alias": "Active grafana instances",
|
||||
"dashes": true,
|
||||
"fill": 0
|
||||
}
|
||||
],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"expr": "sum(increase(grafana_alerting_notification_sent_total[1m])) by(job)",
|
||||
"format": "time_series",
|
||||
"instant": false,
|
||||
"interval": "1m",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "Notifications sent",
|
||||
"refId": "A"
|
||||
},
|
||||
{
|
||||
"expr": "min(grafana_alerting_active_alerts) without(instance)",
|
||||
"format": "time_series",
|
||||
"interval": "1m",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "Active alerts",
|
||||
"refId": "B"
|
||||
},
|
||||
{
|
||||
"expr": "count(up{job=\"grafana\"})",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "Active grafana instances",
|
||||
"refId": "C"
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeShift": null,
|
||||
"title": "Notifications sent vs active alerts",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "individual"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": "0",
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": 3
|
||||
}
|
||||
}
|
||||
],
|
||||
"schemaVersion": 16,
|
||||
"style": "dark",
|
||||
"tags": [],
|
||||
"templating": {
|
||||
"list": []
|
||||
},
|
||||
"time": {
|
||||
"from": "now-1h",
|
||||
"to": "now"
|
||||
},
|
||||
"timepicker": {
|
||||
"refresh_intervals": [
|
||||
"5s",
|
||||
"10s",
|
||||
"30s",
|
||||
"1m",
|
||||
"5m",
|
||||
"15m",
|
||||
"30m",
|
||||
"1h",
|
||||
"2h",
|
||||
"1d"
|
||||
],
|
||||
"time_options": [
|
||||
"5m",
|
||||
"15m",
|
||||
"1h",
|
||||
"6h",
|
||||
"12h",
|
||||
"24h",
|
||||
"2d",
|
||||
"7d",
|
||||
"30d"
|
||||
]
|
||||
},
|
||||
"timezone": "",
|
||||
"title": "Overview",
|
||||
"uid": "xHy7-hAik",
|
||||
"version": 6
|
||||
}
|
@ -0,0 +1,11 @@
|
||||
apiVersion: 1
|
||||
|
||||
datasources:
|
||||
- name: Prometheus
|
||||
type: prometheus
|
||||
access: proxy
|
||||
url: http://prometheus:9090
|
||||
jsonData:
|
||||
timeInterval: 10s
|
||||
queryTimeout: 30s
|
||||
httpMethod: POST
|
39
devenv/docker/ha_test/prometheus/prometheus.yml
Normal file
39
devenv/docker/ha_test/prometheus/prometheus.yml
Normal file
@ -0,0 +1,39 @@
|
||||
# my global config
|
||||
global:
|
||||
scrape_interval: 10s # By default, scrape targets every 15 seconds.
|
||||
evaluation_interval: 10s # By default, scrape targets every 15 seconds.
|
||||
# scrape_timeout is set to the global default (10s).
|
||||
|
||||
# Load and evaluate rules in this file every 'evaluation_interval' seconds.
|
||||
#rule_files:
|
||||
# - "alert.rules"
|
||||
# - "first.rules"
|
||||
# - "second.rules"
|
||||
|
||||
# alerting:
|
||||
# alertmanagers:
|
||||
# - scheme: http
|
||||
# static_configs:
|
||||
# - targets:
|
||||
# - "127.0.0.1:9093"
|
||||
|
||||
scrape_configs:
|
||||
- job_name: 'prometheus'
|
||||
static_configs:
|
||||
- targets: ['localhost:9090']
|
||||
|
||||
- job_name: 'grafana'
|
||||
dns_sd_configs:
|
||||
- names:
|
||||
- 'grafana'
|
||||
type: 'A'
|
||||
port: 3000
|
||||
refresh_interval: 10s
|
||||
|
||||
# - job_name: 'mysql'
|
||||
# dns_sd_configs:
|
||||
# - names:
|
||||
# - 'mysqld-exporter'
|
||||
# type: 'A'
|
||||
# port: 9104
|
||||
# refresh_interval: 10s
|
@ -11,7 +11,7 @@ bulkDashboard() {
|
||||
let COUNTER=COUNTER+1
|
||||
done
|
||||
|
||||
ln -s -f -r ./bulk-dashboards/bulk-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml
|
||||
ln -s -f ../../../devenv/bulk-dashboards/bulk-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml
|
||||
}
|
||||
|
||||
bulkAlertingDashboard() {
|
||||
@ -25,7 +25,7 @@ bulkAlertingDashboard() {
|
||||
let COUNTER=COUNTER+1
|
||||
done
|
||||
|
||||
ln -s -f -r ./bulk_alerting_dashboards/bulk_alerting_dashboards.yaml ../conf/provisioning/dashboards/custom.yaml
|
||||
ln -s -f ../../../devenv/bulk_alerting_dashboards/bulk_alerting_dashboards.yaml ../conf/provisioning/dashboards/custom.yaml
|
||||
}
|
||||
|
||||
requiresJsonnet() {
|
||||
|
@ -200,7 +200,7 @@ providers:
|
||||
folder: ''
|
||||
type: file
|
||||
disableDeletion: false
|
||||
updateIntervalSeconds: 3 #how often Grafana will scan for changed dashboards
|
||||
updateIntervalSeconds: 10 #how often Grafana will scan for changed dashboards
|
||||
options:
|
||||
path: /var/lib/grafana/dashboards
|
||||
```
|
||||
|
@ -181,6 +181,7 @@ group_search_filter = "(member:1.2.840.113556.1.4.1941:=CN=%s,[user container/OU
|
||||
group_search_filter = "(|(member:1.2.840.113556.1.4.1941:=CN=%s,[user container/OU])(member:1.2.840.113556.1.4.1941:=CN=%s,[another user container/OU]))"
|
||||
group_search_filter_user_attribute = "cn"
|
||||
```
|
||||
For more information on AD searches see [Microsoft's Search Filter Syntax](https://docs.microsoft.com/en-us/windows/desktop/adsi/search-filter-syntax) documentation.
|
||||
|
||||
For troubleshooting, by changing `member_of` in `[servers.attributes]` to "dn" it will show you more accurate group memberships when [debug is enabled](#troubleshooting).
|
||||
|
||||
|
171
docs/sources/features/datasources/stackdriver.md
Normal file
171
docs/sources/features/datasources/stackdriver.md
Normal file
@ -0,0 +1,171 @@
|
||||
+++
|
||||
title = "Using Stackdriver in Grafana"
|
||||
description = "Guide for using Stackdriver in Grafana"
|
||||
keywords = ["grafana", "stackdriver", "google", "guide"]
|
||||
type = "docs"
|
||||
aliases = ["/datasources/stackdriver"]
|
||||
[menu.docs]
|
||||
name = "Stackdriver"
|
||||
parent = "datasources"
|
||||
weight = 11
|
||||
+++
|
||||
|
||||
# Using Google Stackdriver in Grafana
|
||||
|
||||
> Only available in Grafana v5.3+.
|
||||
> The datasource is currently a beta feature and is subject to change.
|
||||
|
||||
Grafana ships with built-in support for Google Stackdriver. Just add it as a datasource and you are ready to build dashboards for your Stackdriver metrics.
|
||||
|
||||
## Adding the data source to Grafana
|
||||
|
||||
1. Open the side menu by clicking the Grafana icon in the top header.
|
||||
2. In the side menu under the `Dashboards` link you should find a link named `Data Sources`.
|
||||
3. Click the `+ Add data source` button in the top header.
|
||||
4. Select `Stackdriver` from the *Type* dropdown.
|
||||
5. Upload or paste in the Service Account Key file. See below for steps on how to create a Service Account Key file.
|
||||
|
||||
> NOTE: If you're not seeing the `Data Sources` link in your side menu it means that your current user does not have the `Admin` role for the current organization.
|
||||
|
||||
| Name | Description |
|
||||
| --------------------- | ----------------------------------------------------------------------------------- |
|
||||
| _Name_ | The datasource name. This is how you refer to the datasource in panels & queries. |
|
||||
| _Default_ | Default datasource means that it will be pre-selected for new panels. |
|
||||
| _Service Account Key_ | Service Account Key File for a GCP Project. Instructions below on how to create it. |
|
||||
|
||||
## Authentication
|
||||
|
||||
### Service Account Credentials - Private Key File
|
||||
|
||||
To authenticate with the Stackdriver API, you need to create a Google Cloud Platform (GCP) Service Account for the Project you want to show data for. A Grafana datasource integrates with one GCP Project. If you want to visualize data from multiple GCP Projects then you need to create one datasource per GCP Project.
|
||||
|
||||
#### Enable APIs
|
||||
|
||||
The following APIs need to be enabled first:
|
||||
|
||||
- [Monitoring API](https://console.cloud.google.com/apis/library/monitoring.googleapis.com)
|
||||
- [Cloud Resource Manager API](https://console.cloud.google.com/apis/library/cloudresourcemanager.googleapis.com)
|
||||
|
||||
Click on the links above and click the `Enable` button:
|
||||
|
||||

|
||||
|
||||
#### Create a GCP Service Account for a Project
|
||||
|
||||
1. Navigate to the [APIs & Services Credentials page](https://console.cloud.google.com/apis/credentials).
|
||||
2. Click on the `Create credentials` dropdown/button and choose the `Service account key` option.
|
||||
|
||||

|
||||
3. On the `Create service account key` page, choose key type `JSON`. Then in the `Service Account` dropdown, choose the `New service account` option:
|
||||
|
||||

|
||||
4. Some new fields will appear. Fill in a name for the service account in the `Service account name` field and then choose the `Monitoring Viewer` role from the `Role` dropdown:
|
||||
|
||||

|
||||
5. Click the Create button. A JSON key file will be created and downloaded to your computer. Store this file in a secure place as it allows access to your Stackdriver data.
|
||||
6. Upload it to Grafana on the datasource Configuration page. You can either upload the file or paste in the contents of the file.
|
||||
|
||||

|
||||
7. The file contents will be encrypted and saved in the Grafana database. Don't forget to save after uploading the file!
|
||||
|
||||

|
||||
|
||||
## Metric Query Editor
|
||||
|
||||
Choose a metric from the `Metric` dropdown.
|
||||
|
||||
To add a filter, click the plus icon and choose a field to filter by and enter a filter value e.g. `instance_name = grafana-1`
|
||||
|
||||
### Aggregation
|
||||
|
||||
The aggregation field lets you combine time series based on common statistics. Read more about this option [here](https://cloud.google.com/monitoring/charts/metrics-selector#aggregation-options).
|
||||
|
||||
The `Aligner` field allows you to align multiple time series after the same group by time interval. Read more about how it works [here](https://cloud.google.com/monitoring/charts/metrics-selector#alignment).
|
||||
|
||||
#### Alignment Period/Group by Time
|
||||
|
||||
The `Alignment Period` groups a metric by time if an aggregation is chosen. The default is to use the GCP Stackdriver default groupings (which allows you to compare graphs in Grafana with graphs in the Stackdriver UI).
|
||||
The option is called `Stackdriver auto` and the defaults are:
|
||||
|
||||
- 1m for time ranges < 23 hours
|
||||
- 5m for time ranges >= 23 hours and < 6 days
|
||||
- 1h for time ranges >= 6 days
|
||||
|
||||
The other automatic option is `Grafana auto`. This will automatically set the group by time depending on the time range chosen and the width of the graph panel. Read more about the details [here](http://docs.grafana.org/reference/templating/#the-interval-variable).
|
||||
|
||||
It is also possible to choose fixed time intervals to group by, like `1h` or `1d`.
|
||||
|
||||
### Group By
|
||||
|
||||
Group by resource or metric labels to reduce the number of time series and to aggregate the results by a group by. E.g. Group by instance_name to see an aggregated metric for a Compute instance.
|
||||
|
||||
### Alias Patterns
|
||||
|
||||
The Alias By field allows you to control the format of the legend keys. The default is to show the metric name and labels. This can be long and hard to read. Using the following patterns in the alias field, you can format the legend key the way you want it.
|
||||
|
||||
#### Metric Type Patterns
|
||||
|
||||
Alias Pattern | Description | Example Result
|
||||
----------------- | ---------------------------- | -------------
|
||||
`{{metric.type}}` | returns the full Metric Type | `compute.googleapis.com/instance/cpu/utilization`
|
||||
`{{metric.name}}` | returns the metric name part | `instance/cpu/utilization`
|
||||
`{{metric.service}}` | returns the service part | `compute`
|
||||
|
||||
#### Label Patterns
|
||||
|
||||
In the Group By dropdown, you can see a list of metric and resource labels for a metric. These can be included in the legend key using alias patterns.
|
||||
|
||||
Alias Pattern Format | Description | Alias Pattern Example | Example Result
|
||||
---------------------- | ---------------------------------- | ---------------------------- | -------------
|
||||
`{{metric.label.xxx}}` | returns the metric label value | `{{metric.label.instance_name}}` | `grafana-1-prod`
|
||||
`{{resource.label.xxx}}` | returns the resource label value | `{{resource.label.zone}}` | `us-east1-b`
|
||||
|
||||
Example Alias By: `{{metric.type}} - {{metric.labels.instance_name}}`
|
||||
|
||||
Example Result: `compute.googleapis.com/instance/cpu/usage_time - server1-prod`
|
||||
|
||||
## Templating
|
||||
|
||||
Instead of hard-coding things like server, application and sensor name in you metric queries you can use variables in their place.
|
||||
Variables are shown as dropdown select boxes at the top of the dashboard. These dropdowns makes it easy to change the data
|
||||
being displayed in your dashboard.
|
||||
|
||||
Checkout the [Templating]({{< relref "reference/templating.md" >}}) documentation for an introduction to the templating feature and the different
|
||||
types of template variables.
|
||||
|
||||
### Query Variable
|
||||
|
||||
Writing variable queries is not supported yet.
|
||||
|
||||
### Using variables in queries
|
||||
|
||||
There are two syntaxes:
|
||||
|
||||
- `$<varname>` Example: rate(http_requests_total{job=~"$job"}[5m])
|
||||
- `[[varname]]` Example: rate(http_requests_total{job=~"[[job]]"}[5m])
|
||||
|
||||
Why two ways? The first syntax is easier to read and write but does not allow you to use a variable in the middle of a word. When the *Multi-value* or *Include all value* options are enabled, Grafana converts the labels from plain text to a regex compatible string, which means you have to use `=~` instead of `=`.
|
||||
|
||||
## Annotations
|
||||
|
||||
[Annotations]({{< relref "reference/annotations.md" >}}) allows you to overlay rich event information on top of graphs. You add annotation
|
||||
queries via the Dashboard menu / Annotations view.
|
||||
|
||||
## Configure the Datasource with Provisioning
|
||||
|
||||
It's now possible to configure datasources using config files with Grafana's provisioning system. You can read more about how it works and all the settings you can set for datasources on the [provisioning docs page](/administration/provisioning/#datasources)
|
||||
|
||||
Here is a provisioning example for this datasource.
|
||||
|
||||
```yaml
|
||||
apiVersion: 1
|
||||
|
||||
datasources:
|
||||
- name: Stackdriver
|
||||
type: stackdriver
|
||||
jsonData:
|
||||
tokenUri: https://oauth2.googleapis.com/token
|
||||
clientEmail: stackdriver@myproject.iam.gserviceaccount.com
|
||||
secureJsonData:
|
||||
privateKey: "<contents of your Service Account JWT Key file>"
|
||||
```
|
@ -566,3 +566,11 @@ Default setting for new alert rules. Defaults to categorize error and timeouts a
|
||||
> Available in 5.3 and above
|
||||
|
||||
Default setting for how Grafana handles nodata or null values in alerting. (alerting, no_data, keep_state, ok)
|
||||
|
||||
# concurrent_render_limit
|
||||
|
||||
> Available in 5.3 and above
|
||||
|
||||
Alert notifications can include images, but rendering many images at the same time can overload the server.
|
||||
This limit will protect the server from render overloading and make sure notifications are sent out quickly. Default
|
||||
value is `5`.
|
||||
|
@ -1,4 +1,5 @@
|
||||
[
|
||||
{ "version": "v5.3", "path": "/v5.3", "archived": false, "current": false },
|
||||
{ "version": "v5.2", "path": "/", "archived": false, "current": true },
|
||||
{ "version": "v5.1", "path": "/v5.1", "archived": true },
|
||||
{ "version": "v5.0", "path": "/v5.0", "archived": true },
|
||||
|
@ -12,7 +12,7 @@
|
||||
"devDependencies": {
|
||||
"@types/d3": "^4.10.1",
|
||||
"@types/enzyme": "^3.1.13",
|
||||
"@types/jest": "^21.1.4",
|
||||
"@types/jest": "^23.3.2",
|
||||
"@types/node": "^8.0.31",
|
||||
"@types/react": "^16.4.14",
|
||||
"@types/react-custom-scrollbars": "^4.0.5",
|
||||
|
@ -97,15 +97,6 @@ type CacheServer struct {
|
||||
cache *gocache.Cache
|
||||
}
|
||||
|
||||
func (this *CacheServer) mustInt(r *http.Request, defaultValue int, keys ...string) (v int) {
|
||||
for _, k := range keys {
|
||||
if _, err := fmt.Sscanf(r.FormValue(k), "%d", &v); err == nil {
|
||||
defaultValue = v
|
||||
}
|
||||
}
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
func (this *CacheServer) Handler(ctx *macaron.Context) {
|
||||
urlPath := ctx.Req.URL.Path
|
||||
hash := urlPath[strings.LastIndex(urlPath, "/")+1:]
|
||||
|
@ -22,6 +22,10 @@ import (
|
||||
"github.com/grafana/grafana/pkg/util"
|
||||
)
|
||||
|
||||
const (
|
||||
anonString = "Anonymous"
|
||||
)
|
||||
|
||||
func isDashboardStarredByUser(c *m.ReqContext, dashID int64) (bool, error) {
|
||||
if !c.IsSignedIn {
|
||||
return false, nil
|
||||
@ -64,7 +68,7 @@ func GetDashboard(c *m.ReqContext) Response {
|
||||
}
|
||||
|
||||
// Finding creator and last updater of the dashboard
|
||||
updater, creator := "Anonymous", "Anonymous"
|
||||
updater, creator := anonString, anonString
|
||||
if dash.UpdatedBy > 0 {
|
||||
updater = getUserLogin(dash.UpdatedBy)
|
||||
}
|
||||
@ -128,7 +132,7 @@ func getUserLogin(userID int64) string {
|
||||
query := m.GetUserByIdQuery{Id: userID}
|
||||
err := bus.Dispatch(&query)
|
||||
if err != nil {
|
||||
return "Anonymous"
|
||||
return anonString
|
||||
}
|
||||
return query.Result.Login
|
||||
}
|
||||
@ -403,7 +407,7 @@ func GetDashboardVersion(c *m.ReqContext) Response {
|
||||
return Error(500, fmt.Sprintf("Dashboard version %d not found for dashboardId %d", query.Version, dashID), err)
|
||||
}
|
||||
|
||||
creator := "Anonymous"
|
||||
creator := anonString
|
||||
if query.Result.CreatedBy > 0 {
|
||||
creator = getUserLogin(query.Result.CreatedBy)
|
||||
}
|
||||
|
@ -95,7 +95,7 @@ func toFolderDto(g guardian.DashboardGuardian, folder *m.Folder) dtos.Folder {
|
||||
canAdmin, _ := g.CanAdmin()
|
||||
|
||||
// Finding creator and last updater of the folder
|
||||
updater, creator := "Anonymous", "Anonymous"
|
||||
updater, creator := anonString, anonString
|
||||
if folder.CreatedBy > 0 {
|
||||
creator = getUserLogin(folder.CreatedBy)
|
||||
}
|
||||
|
@ -133,16 +133,6 @@ func TestFoldersApiEndpoint(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
func callGetFolderByUID(sc *scenarioContext) {
|
||||
sc.handlerFunc = GetFolderByUID
|
||||
sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec()
|
||||
}
|
||||
|
||||
func callDeleteFolder(sc *scenarioContext) {
|
||||
sc.handlerFunc = DeleteFolder
|
||||
sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec()
|
||||
}
|
||||
|
||||
func callCreateFolder(sc *scenarioContext) {
|
||||
sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec()
|
||||
}
|
||||
|
@ -11,6 +11,12 @@ import (
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
)
|
||||
|
||||
const (
|
||||
// Themes
|
||||
lightName = "light"
|
||||
darkName = "dark"
|
||||
)
|
||||
|
||||
func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) {
|
||||
settings, err := getFrontendSettingsMap(c)
|
||||
if err != nil {
|
||||
@ -60,7 +66,7 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) {
|
||||
OrgRole: c.OrgRole,
|
||||
GravatarUrl: dtos.GetGravatarUrl(c.Email),
|
||||
IsGrafanaAdmin: c.IsGrafanaAdmin,
|
||||
LightTheme: prefs.Theme == "light",
|
||||
LightTheme: prefs.Theme == lightName,
|
||||
Timezone: prefs.Timezone,
|
||||
Locale: locale,
|
||||
HelpFlags1: c.HelpFlags1,
|
||||
@ -88,12 +94,12 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) {
|
||||
}
|
||||
|
||||
themeURLParam := c.Query("theme")
|
||||
if themeURLParam == "light" {
|
||||
if themeURLParam == lightName {
|
||||
data.User.LightTheme = true
|
||||
data.Theme = "light"
|
||||
} else if themeURLParam == "dark" {
|
||||
data.Theme = lightName
|
||||
} else if themeURLParam == darkName {
|
||||
data.User.LightTheme = false
|
||||
data.Theme = "dark"
|
||||
data.Theme = darkName
|
||||
}
|
||||
|
||||
if hasEditPermissionInFoldersQuery.Result {
|
||||
|
@ -37,9 +37,6 @@ func newHub() *hub {
|
||||
}
|
||||
}
|
||||
|
||||
func (h *hub) removeConnection() {
|
||||
}
|
||||
|
||||
func (h *hub) run(ctx context.Context) {
|
||||
for {
|
||||
select {
|
||||
|
171
pkg/api/pluginproxy/access_token_provider.go
Normal file
171
pkg/api/pluginproxy/access_token_provider.go
Normal file
@ -0,0 +1,171 @@
|
||||
package pluginproxy
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"golang.org/x/oauth2"
|
||||
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/plugins"
|
||||
"golang.org/x/oauth2/jwt"
|
||||
)
|
||||
|
||||
var (
|
||||
tokenCache = tokenCacheType{
|
||||
cache: map[string]*jwtToken{},
|
||||
}
|
||||
oauthJwtTokenCache = oauthJwtTokenCacheType{
|
||||
cache: map[string]*oauth2.Token{},
|
||||
}
|
||||
)
|
||||
|
||||
type tokenCacheType struct {
|
||||
cache map[string]*jwtToken
|
||||
sync.Mutex
|
||||
}
|
||||
|
||||
type oauthJwtTokenCacheType struct {
|
||||
cache map[string]*oauth2.Token
|
||||
sync.Mutex
|
||||
}
|
||||
|
||||
type accessTokenProvider struct {
|
||||
route *plugins.AppPluginRoute
|
||||
datasourceId int64
|
||||
datasourceVersion int
|
||||
}
|
||||
|
||||
type jwtToken struct {
|
||||
ExpiresOn time.Time `json:"-"`
|
||||
ExpiresOnString string `json:"expires_on"`
|
||||
AccessToken string `json:"access_token"`
|
||||
}
|
||||
|
||||
func newAccessTokenProvider(ds *models.DataSource, pluginRoute *plugins.AppPluginRoute) *accessTokenProvider {
|
||||
return &accessTokenProvider{
|
||||
datasourceId: ds.Id,
|
||||
datasourceVersion: ds.Version,
|
||||
route: pluginRoute,
|
||||
}
|
||||
}
|
||||
|
||||
func (provider *accessTokenProvider) getAccessToken(data templateData) (string, error) {
|
||||
tokenCache.Lock()
|
||||
defer tokenCache.Unlock()
|
||||
if cachedToken, found := tokenCache.cache[provider.getAccessTokenCacheKey()]; found {
|
||||
if cachedToken.ExpiresOn.After(time.Now().Add(time.Second * 10)) {
|
||||
logger.Info("Using token from cache")
|
||||
return cachedToken.AccessToken, nil
|
||||
}
|
||||
}
|
||||
|
||||
urlInterpolated, err := interpolateString(provider.route.TokenAuth.Url, data)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
params := make(url.Values)
|
||||
for key, value := range provider.route.TokenAuth.Params {
|
||||
interpolatedParam, err := interpolateString(value, data)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
params.Add(key, interpolatedParam)
|
||||
}
|
||||
|
||||
getTokenReq, _ := http.NewRequest("POST", urlInterpolated, bytes.NewBufferString(params.Encode()))
|
||||
getTokenReq.Header.Add("Content-Type", "application/x-www-form-urlencoded")
|
||||
getTokenReq.Header.Add("Content-Length", strconv.Itoa(len(params.Encode())))
|
||||
|
||||
resp, err := client.Do(getTokenReq)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
|
||||
var token jwtToken
|
||||
if err := json.NewDecoder(resp.Body).Decode(&token); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
expiresOnEpoch, _ := strconv.ParseInt(token.ExpiresOnString, 10, 64)
|
||||
token.ExpiresOn = time.Unix(expiresOnEpoch, 0)
|
||||
tokenCache.cache[provider.getAccessTokenCacheKey()] = &token
|
||||
|
||||
logger.Info("Got new access token", "ExpiresOn", token.ExpiresOn)
|
||||
|
||||
return token.AccessToken, nil
|
||||
}
|
||||
|
||||
func (provider *accessTokenProvider) getJwtAccessToken(ctx context.Context, data templateData) (string, error) {
|
||||
oauthJwtTokenCache.Lock()
|
||||
defer oauthJwtTokenCache.Unlock()
|
||||
if cachedToken, found := oauthJwtTokenCache.cache[provider.getAccessTokenCacheKey()]; found {
|
||||
if cachedToken.Expiry.After(time.Now().Add(time.Second * 10)) {
|
||||
logger.Debug("Using token from cache")
|
||||
return cachedToken.AccessToken, nil
|
||||
}
|
||||
}
|
||||
|
||||
conf := &jwt.Config{}
|
||||
|
||||
if val, ok := provider.route.JwtTokenAuth.Params["client_email"]; ok {
|
||||
interpolatedVal, err := interpolateString(val, data)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
conf.Email = interpolatedVal
|
||||
}
|
||||
|
||||
if val, ok := provider.route.JwtTokenAuth.Params["private_key"]; ok {
|
||||
interpolatedVal, err := interpolateString(val, data)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
conf.PrivateKey = []byte(interpolatedVal)
|
||||
}
|
||||
|
||||
if val, ok := provider.route.JwtTokenAuth.Params["token_uri"]; ok {
|
||||
interpolatedVal, err := interpolateString(val, data)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
conf.TokenURL = interpolatedVal
|
||||
}
|
||||
|
||||
conf.Scopes = provider.route.JwtTokenAuth.Scopes
|
||||
|
||||
token, err := getTokenSource(conf, ctx)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
oauthJwtTokenCache.cache[provider.getAccessTokenCacheKey()] = token
|
||||
|
||||
logger.Info("Got new access token", "ExpiresOn", token.Expiry)
|
||||
|
||||
return token.AccessToken, nil
|
||||
}
|
||||
|
||||
var getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) {
|
||||
tokenSrc := conf.TokenSource(ctx)
|
||||
token, err := tokenSrc.Token()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return token, nil
|
||||
}
|
||||
|
||||
func (provider *accessTokenProvider) getAccessTokenCacheKey() string {
|
||||
return fmt.Sprintf("%v_%v_%v_%v", provider.datasourceId, provider.datasourceVersion, provider.route.Path, provider.route.Method)
|
||||
}
|
94
pkg/api/pluginproxy/access_token_provider_test.go
Normal file
94
pkg/api/pluginproxy/access_token_provider_test.go
Normal file
@ -0,0 +1,94 @@
|
||||
package pluginproxy
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/plugins"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
"golang.org/x/oauth2"
|
||||
"golang.org/x/oauth2/jwt"
|
||||
)
|
||||
|
||||
func TestAccessToken(t *testing.T) {
|
||||
Convey("Plugin with JWT token auth route", t, func() {
|
||||
pluginRoute := &plugins.AppPluginRoute{
|
||||
Path: "pathwithjwttoken1",
|
||||
Url: "https://api.jwt.io/some/path",
|
||||
Method: "GET",
|
||||
JwtTokenAuth: &plugins.JwtTokenAuth{
|
||||
Url: "https://login.server.com/{{.JsonData.tenantId}}/oauth2/token",
|
||||
Scopes: []string{
|
||||
"https://www.testapi.com/auth/monitoring.read",
|
||||
"https://www.testapi.com/auth/cloudplatformprojects.readonly",
|
||||
},
|
||||
Params: map[string]string{
|
||||
"token_uri": "{{.JsonData.tokenUri}}",
|
||||
"client_email": "{{.JsonData.clientEmail}}",
|
||||
"private_key": "{{.SecureJsonData.privateKey}}",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
templateData := templateData{
|
||||
JsonData: map[string]interface{}{
|
||||
"clientEmail": "test@test.com",
|
||||
"tokenUri": "login.url.com/token",
|
||||
},
|
||||
SecureJsonData: map[string]string{
|
||||
"privateKey": "testkey",
|
||||
},
|
||||
}
|
||||
|
||||
ds := &models.DataSource{Id: 1, Version: 2}
|
||||
|
||||
Convey("should fetch token using jwt private key", func() {
|
||||
getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) {
|
||||
return &oauth2.Token{AccessToken: "abc"}, nil
|
||||
}
|
||||
provider := newAccessTokenProvider(ds, pluginRoute)
|
||||
token, err := provider.getJwtAccessToken(context.Background(), templateData)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(token, ShouldEqual, "abc")
|
||||
})
|
||||
|
||||
Convey("should set jwt config values", func() {
|
||||
getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) {
|
||||
So(conf.Email, ShouldEqual, "test@test.com")
|
||||
So(conf.PrivateKey, ShouldResemble, []byte("testkey"))
|
||||
So(len(conf.Scopes), ShouldEqual, 2)
|
||||
So(conf.Scopes[0], ShouldEqual, "https://www.testapi.com/auth/monitoring.read")
|
||||
So(conf.Scopes[1], ShouldEqual, "https://www.testapi.com/auth/cloudplatformprojects.readonly")
|
||||
So(conf.TokenURL, ShouldEqual, "login.url.com/token")
|
||||
|
||||
return &oauth2.Token{AccessToken: "abc"}, nil
|
||||
}
|
||||
|
||||
provider := newAccessTokenProvider(ds, pluginRoute)
|
||||
_, err := provider.getJwtAccessToken(context.Background(), templateData)
|
||||
So(err, ShouldBeNil)
|
||||
})
|
||||
|
||||
Convey("should use cached token on second call", func() {
|
||||
getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) {
|
||||
return &oauth2.Token{
|
||||
AccessToken: "abc",
|
||||
Expiry: time.Now().Add(1 * time.Minute)}, nil
|
||||
}
|
||||
provider := newAccessTokenProvider(ds, pluginRoute)
|
||||
token1, err := provider.getJwtAccessToken(context.Background(), templateData)
|
||||
So(err, ShouldBeNil)
|
||||
So(token1, ShouldEqual, "abc")
|
||||
|
||||
getTokenSource = func(conf *jwt.Config, ctx context.Context) (*oauth2.Token, error) {
|
||||
return &oauth2.Token{AccessToken: "error: cache not used"}, nil
|
||||
}
|
||||
token2, err := provider.getJwtAccessToken(context.Background(), templateData)
|
||||
So(err, ShouldBeNil)
|
||||
So(token2, ShouldEqual, "abc")
|
||||
})
|
||||
})
|
||||
}
|
93
pkg/api/pluginproxy/ds_auth_provider.go
Normal file
93
pkg/api/pluginproxy/ds_auth_provider.go
Normal file
@ -0,0 +1,93 @@
|
||||
package pluginproxy
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"text/template"
|
||||
|
||||
m "github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/plugins"
|
||||
"github.com/grafana/grafana/pkg/util"
|
||||
)
|
||||
|
||||
//ApplyRoute should use the plugin route data to set auth headers and custom headers
|
||||
func ApplyRoute(ctx context.Context, req *http.Request, proxyPath string, route *plugins.AppPluginRoute, ds *m.DataSource) {
|
||||
proxyPath = strings.TrimPrefix(proxyPath, route.Path)
|
||||
|
||||
data := templateData{
|
||||
JsonData: ds.JsonData.Interface().(map[string]interface{}),
|
||||
SecureJsonData: ds.SecureJsonData.Decrypt(),
|
||||
}
|
||||
|
||||
interpolatedURL, err := interpolateString(route.Url, data)
|
||||
if err != nil {
|
||||
logger.Error("Error interpolating proxy url", "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
routeURL, err := url.Parse(interpolatedURL)
|
||||
if err != nil {
|
||||
logger.Error("Error parsing plugin route url", "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
req.URL.Scheme = routeURL.Scheme
|
||||
req.URL.Host = routeURL.Host
|
||||
req.Host = routeURL.Host
|
||||
req.URL.Path = util.JoinUrlFragments(routeURL.Path, proxyPath)
|
||||
|
||||
if err := addHeaders(&req.Header, route, data); err != nil {
|
||||
logger.Error("Failed to render plugin headers", "error", err)
|
||||
}
|
||||
|
||||
tokenProvider := newAccessTokenProvider(ds, route)
|
||||
|
||||
if route.TokenAuth != nil {
|
||||
if token, err := tokenProvider.getAccessToken(data); err != nil {
|
||||
logger.Error("Failed to get access token", "error", err)
|
||||
} else {
|
||||
req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token))
|
||||
}
|
||||
}
|
||||
|
||||
if route.JwtTokenAuth != nil {
|
||||
if token, err := tokenProvider.getJwtAccessToken(ctx, data); err != nil {
|
||||
logger.Error("Failed to get access token", "error", err)
|
||||
} else {
|
||||
req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token))
|
||||
}
|
||||
}
|
||||
logger.Info("Requesting", "url", req.URL.String())
|
||||
|
||||
}
|
||||
|
||||
func interpolateString(text string, data templateData) (string, error) {
|
||||
t, err := template.New("content").Parse(text)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("could not parse template %s", text)
|
||||
}
|
||||
|
||||
var contentBuf bytes.Buffer
|
||||
err = t.Execute(&contentBuf, data)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to execute template %s", text)
|
||||
}
|
||||
|
||||
return contentBuf.String(), nil
|
||||
}
|
||||
|
||||
func addHeaders(reqHeaders *http.Header, route *plugins.AppPluginRoute, data templateData) error {
|
||||
for _, header := range route.Headers {
|
||||
interpolated, err := interpolateString(header.Content, data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
reqHeaders.Add(header.Name, interpolated)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
21
pkg/api/pluginproxy/ds_auth_provider_test.go
Normal file
21
pkg/api/pluginproxy/ds_auth_provider_test.go
Normal file
@ -0,0 +1,21 @@
|
||||
package pluginproxy
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestDsAuthProvider(t *testing.T) {
|
||||
Convey("When interpolating string", t, func() {
|
||||
data := templateData{
|
||||
SecureJsonData: map[string]string{
|
||||
"Test": "0asd+asd",
|
||||
},
|
||||
}
|
||||
|
||||
interpolated, err := interpolateString("{{.SecureJsonData.Test}}", data)
|
||||
So(err, ShouldBeNil)
|
||||
So(interpolated, ShouldEqual, "0asd+asd")
|
||||
})
|
||||
}
|
@ -2,7 +2,6 @@ package pluginproxy
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
@ -12,7 +11,6 @@ import (
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"github.com/opentracing/opentracing-go"
|
||||
@ -25,17 +23,10 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
logger = log.New("data-proxy-log")
|
||||
tokenCache = map[string]*jwtToken{}
|
||||
client = newHTTPClient()
|
||||
logger = log.New("data-proxy-log")
|
||||
client = newHTTPClient()
|
||||
)
|
||||
|
||||
type jwtToken struct {
|
||||
ExpiresOn time.Time `json:"-"`
|
||||
ExpiresOnString string `json:"expires_on"`
|
||||
AccessToken string `json:"access_token"`
|
||||
}
|
||||
|
||||
type DataSourceProxy struct {
|
||||
ds *m.DataSource
|
||||
ctx *m.ReqContext
|
||||
@ -162,7 +153,6 @@ func (proxy *DataSourceProxy) getDirector() func(req *http.Request) {
|
||||
} else {
|
||||
req.URL.Path = util.JoinUrlFragments(proxy.targetUrl.Path, proxy.proxyPath)
|
||||
}
|
||||
|
||||
if proxy.ds.BasicAuth {
|
||||
req.Header.Del("Authorization")
|
||||
req.Header.Add("Authorization", util.GetBasicAuthHeader(proxy.ds.BasicAuthUser, proxy.ds.BasicAuthPassword))
|
||||
@ -219,7 +209,7 @@ func (proxy *DataSourceProxy) getDirector() func(req *http.Request) {
|
||||
}
|
||||
|
||||
if proxy.route != nil {
|
||||
proxy.applyRoute(req)
|
||||
ApplyRoute(proxy.ctx.Req.Context(), req, proxy.proxyPath, proxy.route, proxy.ds)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -311,120 +301,3 @@ func checkWhiteList(c *m.ReqContext, host string) bool {
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (proxy *DataSourceProxy) applyRoute(req *http.Request) {
|
||||
proxy.proxyPath = strings.TrimPrefix(proxy.proxyPath, proxy.route.Path)
|
||||
|
||||
data := templateData{
|
||||
JsonData: proxy.ds.JsonData.Interface().(map[string]interface{}),
|
||||
SecureJsonData: proxy.ds.SecureJsonData.Decrypt(),
|
||||
}
|
||||
|
||||
interpolatedURL, err := interpolateString(proxy.route.Url, data)
|
||||
if err != nil {
|
||||
logger.Error("Error interpolating proxy url", "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
routeURL, err := url.Parse(interpolatedURL)
|
||||
if err != nil {
|
||||
logger.Error("Error parsing plugin route url", "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
req.URL.Scheme = routeURL.Scheme
|
||||
req.URL.Host = routeURL.Host
|
||||
req.Host = routeURL.Host
|
||||
req.URL.Path = util.JoinUrlFragments(routeURL.Path, proxy.proxyPath)
|
||||
|
||||
if err := addHeaders(&req.Header, proxy.route, data); err != nil {
|
||||
logger.Error("Failed to render plugin headers", "error", err)
|
||||
}
|
||||
|
||||
if proxy.route.TokenAuth != nil {
|
||||
if token, err := proxy.getAccessToken(data); err != nil {
|
||||
logger.Error("Failed to get access token", "error", err)
|
||||
} else {
|
||||
req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token))
|
||||
}
|
||||
}
|
||||
|
||||
logger.Info("Requesting", "url", req.URL.String())
|
||||
}
|
||||
|
||||
func (proxy *DataSourceProxy) getAccessToken(data templateData) (string, error) {
|
||||
if cachedToken, found := tokenCache[proxy.getAccessTokenCacheKey()]; found {
|
||||
if cachedToken.ExpiresOn.After(time.Now().Add(time.Second * 10)) {
|
||||
logger.Info("Using token from cache")
|
||||
return cachedToken.AccessToken, nil
|
||||
}
|
||||
}
|
||||
|
||||
urlInterpolated, err := interpolateString(proxy.route.TokenAuth.Url, data)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
params := make(url.Values)
|
||||
for key, value := range proxy.route.TokenAuth.Params {
|
||||
interpolatedParam, err := interpolateString(value, data)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
params.Add(key, interpolatedParam)
|
||||
}
|
||||
|
||||
getTokenReq, _ := http.NewRequest("POST", urlInterpolated, bytes.NewBufferString(params.Encode()))
|
||||
getTokenReq.Header.Add("Content-Type", "application/x-www-form-urlencoded")
|
||||
getTokenReq.Header.Add("Content-Length", strconv.Itoa(len(params.Encode())))
|
||||
|
||||
resp, err := client.Do(getTokenReq)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
|
||||
var token jwtToken
|
||||
if err := json.NewDecoder(resp.Body).Decode(&token); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
expiresOnEpoch, _ := strconv.ParseInt(token.ExpiresOnString, 10, 64)
|
||||
token.ExpiresOn = time.Unix(expiresOnEpoch, 0)
|
||||
tokenCache[proxy.getAccessTokenCacheKey()] = &token
|
||||
|
||||
logger.Info("Got new access token", "ExpiresOn", token.ExpiresOn)
|
||||
return token.AccessToken, nil
|
||||
}
|
||||
|
||||
func (proxy *DataSourceProxy) getAccessTokenCacheKey() string {
|
||||
return fmt.Sprintf("%v_%v_%v", proxy.ds.Id, proxy.route.Path, proxy.route.Method)
|
||||
}
|
||||
|
||||
func interpolateString(text string, data templateData) (string, error) {
|
||||
t, err := template.New("content").Parse(text)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("could not parse template %s", text)
|
||||
}
|
||||
|
||||
var contentBuf bytes.Buffer
|
||||
err = t.Execute(&contentBuf, data)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to execute template %s", text)
|
||||
}
|
||||
|
||||
return contentBuf.String(), nil
|
||||
}
|
||||
|
||||
func addHeaders(reqHeaders *http.Header, route *plugins.AppPluginRoute, data templateData) error {
|
||||
for _, header := range route.Headers {
|
||||
interpolated, err := interpolateString(header.Content, data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
reqHeaders.Add(header.Name, interpolated)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@ -83,7 +83,7 @@ func TestDSRouteRule(t *testing.T) {
|
||||
Convey("When matching route path", func() {
|
||||
proxy := NewDataSourceProxy(ds, plugin, ctx, "api/v4/some/method")
|
||||
proxy.route = plugin.Routes[0]
|
||||
proxy.applyRoute(req)
|
||||
ApplyRoute(proxy.ctx.Req.Context(), req, proxy.proxyPath, proxy.route, proxy.ds)
|
||||
|
||||
Convey("should add headers and update url", func() {
|
||||
So(req.URL.String(), ShouldEqual, "https://www.google.com/some/method")
|
||||
@ -94,7 +94,7 @@ func TestDSRouteRule(t *testing.T) {
|
||||
Convey("When matching route path and has dynamic url", func() {
|
||||
proxy := NewDataSourceProxy(ds, plugin, ctx, "api/common/some/method")
|
||||
proxy.route = plugin.Routes[3]
|
||||
proxy.applyRoute(req)
|
||||
ApplyRoute(proxy.ctx.Req.Context(), req, proxy.proxyPath, proxy.route, proxy.ds)
|
||||
|
||||
Convey("should add headers and interpolate the url", func() {
|
||||
So(req.URL.String(), ShouldEqual, "https://dynamic.grafana.com/some/method")
|
||||
@ -188,7 +188,7 @@ func TestDSRouteRule(t *testing.T) {
|
||||
client = newFakeHTTPClient(json)
|
||||
proxy1 := NewDataSourceProxy(ds, plugin, ctx, "pathwithtoken1")
|
||||
proxy1.route = plugin.Routes[0]
|
||||
proxy1.applyRoute(req)
|
||||
ApplyRoute(proxy1.ctx.Req.Context(), req, proxy1.proxyPath, proxy1.route, proxy1.ds)
|
||||
|
||||
authorizationHeaderCall1 = req.Header.Get("Authorization")
|
||||
So(req.URL.String(), ShouldEqual, "https://api.nr1.io/some/path")
|
||||
@ -202,7 +202,7 @@ func TestDSRouteRule(t *testing.T) {
|
||||
client = newFakeHTTPClient(json2)
|
||||
proxy2 := NewDataSourceProxy(ds, plugin, ctx, "pathwithtoken2")
|
||||
proxy2.route = plugin.Routes[1]
|
||||
proxy2.applyRoute(req)
|
||||
ApplyRoute(proxy2.ctx.Req.Context(), req, proxy2.proxyPath, proxy2.route, proxy2.ds)
|
||||
|
||||
authorizationHeaderCall2 = req.Header.Get("Authorization")
|
||||
|
||||
@ -217,7 +217,7 @@ func TestDSRouteRule(t *testing.T) {
|
||||
client = newFakeHTTPClient([]byte{})
|
||||
proxy3 := NewDataSourceProxy(ds, plugin, ctx, "pathwithtoken1")
|
||||
proxy3.route = plugin.Routes[0]
|
||||
proxy3.applyRoute(req)
|
||||
ApplyRoute(proxy3.ctx.Req.Context(), req, proxy3.proxyPath, proxy3.route, proxy3.ds)
|
||||
|
||||
authorizationHeaderCall3 := req.Header.Get("Authorization")
|
||||
So(req.URL.String(), ShouldEqual, "https://api.nr1.io/some/path")
|
||||
@ -331,18 +331,6 @@ func TestDSRouteRule(t *testing.T) {
|
||||
})
|
||||
})
|
||||
|
||||
Convey("When interpolating string", func() {
|
||||
data := templateData{
|
||||
SecureJsonData: map[string]string{
|
||||
"Test": "0asd+asd",
|
||||
},
|
||||
}
|
||||
|
||||
interpolated, err := interpolateString("{{.SecureJsonData.Test}}", data)
|
||||
So(err, ShouldBeNil)
|
||||
So(interpolated, ShouldEqual, "0asd+asd")
|
||||
})
|
||||
|
||||
Convey("When proxying a data source with custom headers specified", func() {
|
||||
plugin := &plugins.DataSourcePlugin{}
|
||||
|
||||
|
@ -41,15 +41,16 @@ func (hs *HTTPServer) RenderToPng(c *m.ReqContext) {
|
||||
}
|
||||
|
||||
result, err := hs.RenderService.Render(c.Req.Context(), rendering.Opts{
|
||||
Width: width,
|
||||
Height: height,
|
||||
Timeout: time.Duration(timeout) * time.Second,
|
||||
OrgId: c.OrgId,
|
||||
UserId: c.UserId,
|
||||
OrgRole: c.OrgRole,
|
||||
Path: c.Params("*") + queryParams,
|
||||
Timezone: queryReader.Get("tz", ""),
|
||||
Encoding: queryReader.Get("encoding", ""),
|
||||
Width: width,
|
||||
Height: height,
|
||||
Timeout: time.Duration(timeout) * time.Second,
|
||||
OrgId: c.OrgId,
|
||||
UserId: c.UserId,
|
||||
OrgRole: c.OrgRole,
|
||||
Path: c.Params("*") + queryParams,
|
||||
Timezone: queryReader.Get("tz", ""),
|
||||
Encoding: queryReader.Get("encoding", ""),
|
||||
ConcurrentLimit: 30,
|
||||
})
|
||||
|
||||
if err != nil && err == rendering.ErrTimeout {
|
||||
|
@ -6,6 +6,7 @@ import (
|
||||
|
||||
"github.com/codegangsta/cli"
|
||||
"github.com/fatih/color"
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
"github.com/grafana/grafana/pkg/cmd/grafana-cli/logger"
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
@ -24,6 +25,7 @@ func runDbCommand(command func(commandLine CommandLine) error) func(context *cli
|
||||
|
||||
engine := &sqlstore.SqlStore{}
|
||||
engine.Cfg = cfg
|
||||
engine.Bus = bus.GetBus()
|
||||
engine.Init()
|
||||
|
||||
if err := command(cmd); err != nil {
|
||||
|
@ -29,6 +29,7 @@ import (
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/opentsdb"
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/postgres"
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/prometheus"
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/stackdriver"
|
||||
_ "github.com/grafana/grafana/pkg/tsdb/testdata"
|
||||
)
|
||||
|
||||
@ -103,7 +104,7 @@ func listenToSystemSignals(server *GrafanaServerImpl) {
|
||||
|
||||
for {
|
||||
select {
|
||||
case _ = <-sighupChan:
|
||||
case <-sighupChan:
|
||||
log.Reload()
|
||||
case sig := <-signalChan:
|
||||
server.Shutdown(fmt.Sprintf("System signal: %s", sig))
|
||||
|
@ -127,8 +127,6 @@ type xmlError struct {
|
||||
const ms_date_layout = "Mon, 02 Jan 2006 15:04:05 GMT"
|
||||
const version = "2017-04-17"
|
||||
|
||||
var client = &http.Client{}
|
||||
|
||||
type StorageClient struct {
|
||||
Auth *Auth
|
||||
Transport http.RoundTripper
|
||||
|
@ -2,12 +2,15 @@ package imguploader
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/aws/credentials"
|
||||
"github.com/aws/aws-sdk-go/aws/credentials/ec2rolecreds"
|
||||
"github.com/aws/aws-sdk-go/aws/credentials/endpointcreds"
|
||||
"github.com/aws/aws-sdk-go/aws/defaults"
|
||||
"github.com/aws/aws-sdk-go/aws/ec2metadata"
|
||||
"github.com/aws/aws-sdk-go/aws/endpoints"
|
||||
"github.com/aws/aws-sdk-go/aws/session"
|
||||
@ -50,7 +53,7 @@ func (u *S3Uploader) Upload(ctx context.Context, imageDiskPath string) (string,
|
||||
SecretAccessKey: u.secretKey,
|
||||
}},
|
||||
&credentials.EnvProvider{},
|
||||
&ec2rolecreds.EC2RoleProvider{Client: ec2metadata.New(sess), ExpiryWindow: 5 * time.Minute},
|
||||
remoteCredProvider(sess),
|
||||
})
|
||||
cfg := &aws.Config{
|
||||
Region: aws.String(u.region),
|
||||
@ -85,3 +88,27 @@ func (u *S3Uploader) Upload(ctx context.Context, imageDiskPath string) (string,
|
||||
}
|
||||
return image_url, nil
|
||||
}
|
||||
|
||||
func remoteCredProvider(sess *session.Session) credentials.Provider {
|
||||
ecsCredURI := os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI")
|
||||
|
||||
if len(ecsCredURI) > 0 {
|
||||
return ecsCredProvider(sess, ecsCredURI)
|
||||
}
|
||||
return ec2RoleProvider(sess)
|
||||
}
|
||||
|
||||
func ecsCredProvider(sess *session.Session, uri string) credentials.Provider {
|
||||
const host = `169.254.170.2`
|
||||
|
||||
d := defaults.Get()
|
||||
return endpointcreds.NewProviderClient(
|
||||
*d.Config,
|
||||
d.Handlers,
|
||||
fmt.Sprintf("http://%s%s", host, uri),
|
||||
func(p *endpointcreds.Provider) { p.ExpiryWindow = 5 * time.Minute })
|
||||
}
|
||||
|
||||
func ec2RoleProvider(sess *session.Session) credentials.Provider {
|
||||
return &ec2rolecreds.EC2RoleProvider{Client: ec2metadata.New(sess), ExpiryWindow: 5 * time.Minute}
|
||||
}
|
||||
|
@ -8,6 +8,10 @@ import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
const (
|
||||
nullString = "null"
|
||||
)
|
||||
|
||||
// Float is a nullable float64.
|
||||
// It does not consider zero values to be null.
|
||||
// It will decode to null, not zero, if null.
|
||||
@ -68,7 +72,7 @@ func (f *Float) UnmarshalJSON(data []byte) error {
|
||||
// It will return an error if the input is not an integer, blank, or "null".
|
||||
func (f *Float) UnmarshalText(text []byte) error {
|
||||
str := string(text)
|
||||
if str == "" || str == "null" {
|
||||
if str == "" || str == nullString {
|
||||
f.Valid = false
|
||||
return nil
|
||||
}
|
||||
@ -82,7 +86,7 @@ func (f *Float) UnmarshalText(text []byte) error {
|
||||
// It will encode null if this Float is null.
|
||||
func (f Float) MarshalJSON() ([]byte, error) {
|
||||
if !f.Valid {
|
||||
return []byte("null"), nil
|
||||
return []byte(nullString), nil
|
||||
}
|
||||
return []byte(strconv.FormatFloat(f.Float64, 'f', -1, 64)), nil
|
||||
}
|
||||
@ -100,7 +104,7 @@ func (f Float) MarshalText() ([]byte, error) {
|
||||
// It will encode a blank string if this Float is null.
|
||||
func (f Float) String() string {
|
||||
if !f.Valid {
|
||||
return "null"
|
||||
return nullString
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%1.3f", f.Float64)
|
||||
@ -109,7 +113,7 @@ func (f Float) String() string {
|
||||
// FullString returns float as string in full precision
|
||||
func (f Float) FullString() string {
|
||||
if !f.Valid {
|
||||
return "null"
|
||||
return nullString
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%f", f.Float64)
|
||||
|
@ -435,11 +435,6 @@ func (sc *scenarioContext) withValidApiKey() *scenarioContext {
|
||||
return sc
|
||||
}
|
||||
|
||||
func (sc *scenarioContext) withInvalidApiKey() *scenarioContext {
|
||||
sc.apiKey = "nvalidhhhhds"
|
||||
return sc
|
||||
}
|
||||
|
||||
func (sc *scenarioContext) withAuthorizationHeader(authHeader string) *scenarioContext {
|
||||
sc.authHeader = authHeader
|
||||
return sc
|
||||
|
@ -98,7 +98,7 @@ type GetLatestNotificationQuery struct {
|
||||
AlertId int64
|
||||
NotifierId int64
|
||||
|
||||
Result *AlertNotificationJournal
|
||||
Result []AlertNotificationJournal
|
||||
}
|
||||
|
||||
type CleanNotificationJournalCommand struct {
|
||||
|
@ -22,6 +22,7 @@ const (
|
||||
DS_MSSQL = "mssql"
|
||||
DS_ACCESS_DIRECT = "direct"
|
||||
DS_ACCESS_PROXY = "proxy"
|
||||
DS_STACKDRIVER = "stackdriver"
|
||||
)
|
||||
|
||||
var (
|
||||
@ -70,12 +71,12 @@ var knownDatasourcePlugins = map[string]bool{
|
||||
DS_POSTGRES: true,
|
||||
DS_MYSQL: true,
|
||||
DS_MSSQL: true,
|
||||
DS_STACKDRIVER: true,
|
||||
"opennms": true,
|
||||
"abhisant-druid-datasource": true,
|
||||
"dalmatinerdb-datasource": true,
|
||||
"gnocci": true,
|
||||
"zabbix": true,
|
||||
"alexanderzobnin-zabbix-datasource": true,
|
||||
"newrelic-app": true,
|
||||
"grafana-datadog-datasource": true,
|
||||
"grafana-simple-json": true,
|
||||
@ -88,6 +89,7 @@ var knownDatasourcePlugins = map[string]bool{
|
||||
"ayoungprogrammer-finance-datasource": true,
|
||||
"monasca-datasource": true,
|
||||
"vertamedia-clickhouse-datasource": true,
|
||||
"alexanderzobnin-zabbix-datasource": true,
|
||||
}
|
||||
|
||||
func IsKnownDataSourcePlugin(dsType string) bool {
|
||||
|
@ -23,12 +23,13 @@ type AppPlugin struct {
|
||||
}
|
||||
|
||||
type AppPluginRoute struct {
|
||||
Path string `json:"path"`
|
||||
Method string `json:"method"`
|
||||
ReqRole models.RoleType `json:"reqRole"`
|
||||
Url string `json:"url"`
|
||||
Headers []AppPluginRouteHeader `json:"headers"`
|
||||
TokenAuth *JwtTokenAuth `json:"tokenAuth"`
|
||||
Path string `json:"path"`
|
||||
Method string `json:"method"`
|
||||
ReqRole models.RoleType `json:"reqRole"`
|
||||
Url string `json:"url"`
|
||||
Headers []AppPluginRouteHeader `json:"headers"`
|
||||
TokenAuth *JwtTokenAuth `json:"tokenAuth"`
|
||||
JwtTokenAuth *JwtTokenAuth `json:"jwtTokenAuth"`
|
||||
}
|
||||
|
||||
type AppPluginRouteHeader struct {
|
||||
@ -36,8 +37,11 @@ type AppPluginRouteHeader struct {
|
||||
Content string `json:"content"`
|
||||
}
|
||||
|
||||
// JwtTokenAuth struct is both for normal Token Auth and JWT Token Auth with
|
||||
// an uploaded JWT file.
|
||||
type JwtTokenAuth struct {
|
||||
Url string `json:"url"`
|
||||
Scopes []string `json:"scopes"`
|
||||
Params map[string]string `json:"params"`
|
||||
}
|
||||
|
||||
|
@ -11,6 +11,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/log"
|
||||
"github.com/grafana/grafana/pkg/metrics"
|
||||
"github.com/grafana/grafana/pkg/services/rendering"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
|
||||
m "github.com/grafana/grafana/pkg/models"
|
||||
)
|
||||
@ -67,7 +68,7 @@ func (n *notificationService) sendNotifications(evalContext *EvalContext, notifi
|
||||
|
||||
// Verify that we can send the notification again
|
||||
// but this time within the same transaction.
|
||||
if !evalContext.IsTestRun && !not.ShouldNotify(context.Background(), evalContext) {
|
||||
if !evalContext.IsTestRun && !not.ShouldNotify(ctx, evalContext) {
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -108,11 +109,12 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) {
|
||||
}
|
||||
|
||||
renderOpts := rendering.Opts{
|
||||
Width: 1000,
|
||||
Height: 500,
|
||||
Timeout: alertTimeout / 2,
|
||||
OrgId: context.Rule.OrgId,
|
||||
OrgRole: m.ROLE_ADMIN,
|
||||
Width: 1000,
|
||||
Height: 500,
|
||||
Timeout: alertTimeout / 2,
|
||||
OrgId: context.Rule.OrgId,
|
||||
OrgRole: m.ROLE_ADMIN,
|
||||
ConcurrentLimit: setting.AlertingRenderLimit,
|
||||
}
|
||||
|
||||
ref, err := context.GetDashboardUID()
|
||||
|
@ -11,6 +11,10 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/alerting"
|
||||
)
|
||||
|
||||
const (
|
||||
triggMetrString = "Triggered metrics:\n\n"
|
||||
)
|
||||
|
||||
type NotifierBase struct {
|
||||
Name string
|
||||
Type string
|
||||
@ -42,12 +46,21 @@ func NewNotifierBase(model *models.AlertNotification) NotifierBase {
|
||||
}
|
||||
}
|
||||
|
||||
func defaultShouldNotify(context *alerting.EvalContext, sendReminder bool, frequency time.Duration, lastNotify time.Time) bool {
|
||||
func defaultShouldNotify(context *alerting.EvalContext, sendReminder bool, frequency time.Duration, journals []models.AlertNotificationJournal) bool {
|
||||
// Only notify on state change.
|
||||
if context.PrevAlertState == context.Rule.State && !sendReminder {
|
||||
return false
|
||||
}
|
||||
|
||||
// get last successfully sent notification
|
||||
lastNotify := time.Time{}
|
||||
for _, j := range journals {
|
||||
if j.Success {
|
||||
lastNotify = time.Unix(j.SentAt, 0)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Do not notify if interval has not elapsed
|
||||
if sendReminder && !lastNotify.IsZero() && lastNotify.Add(frequency).After(time.Now()) {
|
||||
return false
|
||||
@ -75,20 +88,12 @@ func (n *NotifierBase) ShouldNotify(ctx context.Context, c *alerting.EvalContext
|
||||
}
|
||||
|
||||
err := bus.DispatchCtx(ctx, cmd)
|
||||
if err == models.ErrJournalingNotFound {
|
||||
return true
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
n.log.Error("Could not determine last time alert notifier fired", "Alert name", c.Rule.Name, "Error", err)
|
||||
return false
|
||||
}
|
||||
|
||||
if !cmd.Result.Success {
|
||||
return true
|
||||
}
|
||||
|
||||
return defaultShouldNotify(c, n.SendReminder, n.Frequency, time.Unix(cmd.Result.SentAt, 0))
|
||||
return defaultShouldNotify(c, n.SendReminder, n.Frequency, cmd.Result)
|
||||
}
|
||||
|
||||
func (n *NotifierBase) GetType() string {
|
||||
|
@ -15,51 +15,105 @@ import (
|
||||
)
|
||||
|
||||
func TestShouldSendAlertNotification(t *testing.T) {
|
||||
tnow := time.Now()
|
||||
|
||||
tcs := []struct {
|
||||
name string
|
||||
prevState m.AlertStateType
|
||||
newState m.AlertStateType
|
||||
expected bool
|
||||
sendReminder bool
|
||||
frequency time.Duration
|
||||
journals []m.AlertNotificationJournal
|
||||
|
||||
expect bool
|
||||
}{
|
||||
{
|
||||
name: "pending -> ok should not trigger an notification",
|
||||
newState: m.AlertStatePending,
|
||||
prevState: m.AlertStateOK,
|
||||
expected: false,
|
||||
name: "pending -> ok should not trigger an notification",
|
||||
newState: m.AlertStatePending,
|
||||
prevState: m.AlertStateOK,
|
||||
sendReminder: false,
|
||||
journals: []m.AlertNotificationJournal{},
|
||||
|
||||
expect: false,
|
||||
},
|
||||
{
|
||||
name: "ok -> alerting should trigger an notification",
|
||||
newState: m.AlertStateOK,
|
||||
prevState: m.AlertStateAlerting,
|
||||
expected: true,
|
||||
name: "ok -> alerting should trigger an notification",
|
||||
newState: m.AlertStateOK,
|
||||
prevState: m.AlertStateAlerting,
|
||||
sendReminder: false,
|
||||
journals: []m.AlertNotificationJournal{},
|
||||
|
||||
expect: true,
|
||||
},
|
||||
{
|
||||
name: "ok -> pending should not trigger an notification",
|
||||
newState: m.AlertStateOK,
|
||||
prevState: m.AlertStatePending,
|
||||
expected: false,
|
||||
name: "ok -> pending should not trigger an notification",
|
||||
newState: m.AlertStateOK,
|
||||
prevState: m.AlertStatePending,
|
||||
sendReminder: false,
|
||||
journals: []m.AlertNotificationJournal{},
|
||||
|
||||
expect: false,
|
||||
},
|
||||
{
|
||||
name: "ok -> ok should not trigger an notification",
|
||||
newState: m.AlertStateOK,
|
||||
prevState: m.AlertStateOK,
|
||||
expected: false,
|
||||
sendReminder: false,
|
||||
journals: []m.AlertNotificationJournal{},
|
||||
|
||||
expect: false,
|
||||
},
|
||||
{
|
||||
name: "ok -> alerting should not trigger an notification",
|
||||
name: "ok -> alerting should trigger an notification",
|
||||
newState: m.AlertStateOK,
|
||||
prevState: m.AlertStateAlerting,
|
||||
expected: true,
|
||||
sendReminder: true,
|
||||
journals: []m.AlertNotificationJournal{},
|
||||
|
||||
expect: true,
|
||||
},
|
||||
{
|
||||
name: "ok -> ok with reminder should not trigger an notification",
|
||||
newState: m.AlertStateOK,
|
||||
prevState: m.AlertStateOK,
|
||||
expected: false,
|
||||
sendReminder: true,
|
||||
journals: []m.AlertNotificationJournal{},
|
||||
|
||||
expect: false,
|
||||
},
|
||||
{
|
||||
name: "alerting -> alerting with reminder and no journaling should trigger",
|
||||
newState: m.AlertStateAlerting,
|
||||
prevState: m.AlertStateAlerting,
|
||||
frequency: time.Minute * 10,
|
||||
sendReminder: true,
|
||||
journals: []m.AlertNotificationJournal{},
|
||||
|
||||
expect: true,
|
||||
},
|
||||
{
|
||||
name: "alerting -> alerting with reminder and successful recent journal event should not trigger",
|
||||
newState: m.AlertStateAlerting,
|
||||
prevState: m.AlertStateAlerting,
|
||||
frequency: time.Minute * 10,
|
||||
sendReminder: true,
|
||||
journals: []m.AlertNotificationJournal{
|
||||
{SentAt: tnow.Add(-time.Minute).Unix(), Success: true},
|
||||
},
|
||||
|
||||
expect: false,
|
||||
},
|
||||
{
|
||||
name: "alerting -> alerting with reminder and failed recent journal event should trigger",
|
||||
newState: m.AlertStateAlerting,
|
||||
prevState: m.AlertStateAlerting,
|
||||
frequency: time.Minute * 10,
|
||||
sendReminder: true,
|
||||
expect: true,
|
||||
journals: []m.AlertNotificationJournal{
|
||||
{SentAt: tnow.Add(-time.Minute).Unix(), Success: false}, // recent failed notification
|
||||
{SentAt: tnow.Add(-time.Hour).Unix(), Success: true}, // old successful notification
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@ -69,8 +123,8 @@ func TestShouldSendAlertNotification(t *testing.T) {
|
||||
})
|
||||
|
||||
evalContext.Rule.State = tc.prevState
|
||||
if defaultShouldNotify(evalContext, true, 0, time.Now()) != tc.expected {
|
||||
t.Errorf("failed %s. expected %+v to return %v", tc.name, tc, tc.expected)
|
||||
if defaultShouldNotify(evalContext, true, tc.frequency, tc.journals) != tc.expect {
|
||||
t.Errorf("failed test %s.\n expected \n%+v \nto return: %v", tc.name, tc, tc.expect)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -87,16 +141,6 @@ func TestShouldNotifyWhenNoJournalingIsFound(t *testing.T) {
|
||||
})
|
||||
evalContext := alerting.NewEvalContext(context.TODO(), &alerting.Rule{})
|
||||
|
||||
Convey("should notify if no journaling is found", func() {
|
||||
bus.AddHandlerCtx("", func(ctx context.Context, q *m.GetLatestNotificationQuery) error {
|
||||
return m.ErrJournalingNotFound
|
||||
})
|
||||
|
||||
if !notifier.ShouldNotify(context.Background(), evalContext) {
|
||||
t.Errorf("should send notifications when ErrJournalingNotFound is returned")
|
||||
}
|
||||
})
|
||||
|
||||
Convey("should not notify query returns error", func() {
|
||||
bus.AddHandlerCtx("", func(ctx context.Context, q *m.GetLatestNotificationQuery) error {
|
||||
return errors.New("some kind of error unknown error")
|
||||
|
@ -61,7 +61,7 @@ func (this *KafkaNotifier) Notify(evalContext *alerting.EvalContext) error {
|
||||
|
||||
state := evalContext.Rule.State
|
||||
|
||||
customData := "Triggered metrics:\n\n"
|
||||
customData := triggMetrString
|
||||
for _, evt := range evalContext.EvalMatches {
|
||||
customData = customData + fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value)
|
||||
}
|
||||
|
@ -95,7 +95,7 @@ func (this *OpsGenieNotifier) createAlert(evalContext *alerting.EvalContext) err
|
||||
return err
|
||||
}
|
||||
|
||||
customData := "Triggered metrics:\n\n"
|
||||
customData := triggMetrString
|
||||
for _, evt := range evalContext.EvalMatches {
|
||||
customData = customData + fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value)
|
||||
}
|
||||
|
@ -76,7 +76,7 @@ func (this *PagerdutyNotifier) Notify(evalContext *alerting.EvalContext) error {
|
||||
if evalContext.Rule.State == m.AlertStateOK {
|
||||
eventType = "resolve"
|
||||
}
|
||||
customData := "Triggered metrics:\n\n"
|
||||
customData := triggMetrString
|
||||
for _, evt := range evalContext.EvalMatches {
|
||||
customData = customData + fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value)
|
||||
}
|
||||
|
@ -100,7 +100,7 @@ func (handler *DefaultResultHandler) Handle(evalContext *EvalContext) error {
|
||||
}
|
||||
}
|
||||
}
|
||||
handler.notifier.SendIfNeeded(evalContext)
|
||||
|
||||
handler.notifier.SendIfNeeded(evalContext)
|
||||
return nil
|
||||
}
|
||||
|
@ -37,10 +37,6 @@ func NewTicker(last time.Time, initialOffset time.Duration, c clock.Clock) *Tick
|
||||
return t
|
||||
}
|
||||
|
||||
func (t *Ticker) updateOffset(offset time.Duration) {
|
||||
t.newOffset <- offset
|
||||
}
|
||||
|
||||
func (t *Ticker) run() {
|
||||
for {
|
||||
next := t.last.Add(time.Duration(1) * time.Second)
|
||||
|
@ -9,12 +9,6 @@ import (
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
type testTriggeredAlert struct {
|
||||
ActualValue float64
|
||||
Name string
|
||||
State string
|
||||
}
|
||||
|
||||
func TestNotifications(t *testing.T) {
|
||||
|
||||
Convey("Given the notifications service", t, func() {
|
||||
|
@ -83,7 +83,7 @@ func (cr *configReader) readConfig() ([]*DashboardsAsConfig, error) {
|
||||
}
|
||||
|
||||
if dashboards[i].UpdateIntervalSeconds == 0 {
|
||||
dashboards[i].UpdateIntervalSeconds = 3
|
||||
dashboards[i].UpdateIntervalSeconds = 10
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -70,7 +70,7 @@ func validateDashboardAsConfig(t *testing.T, cfg []*DashboardsAsConfig) {
|
||||
So(len(ds.Options), ShouldEqual, 1)
|
||||
So(ds.Options["path"], ShouldEqual, "/var/lib/grafana/dashboards")
|
||||
So(ds.DisableDeletion, ShouldBeTrue)
|
||||
So(ds.UpdateIntervalSeconds, ShouldEqual, 10)
|
||||
So(ds.UpdateIntervalSeconds, ShouldEqual, 15)
|
||||
|
||||
ds2 := cfg[1]
|
||||
So(ds2.Name, ShouldEqual, "default")
|
||||
@ -81,5 +81,5 @@ func validateDashboardAsConfig(t *testing.T, cfg []*DashboardsAsConfig) {
|
||||
So(len(ds2.Options), ShouldEqual, 1)
|
||||
So(ds2.Options["path"], ShouldEqual, "/var/lib/grafana/dashboards")
|
||||
So(ds2.DisableDeletion, ShouldBeFalse)
|
||||
So(ds2.UpdateIntervalSeconds, ShouldEqual, 3)
|
||||
So(ds2.UpdateIntervalSeconds, ShouldEqual, 10)
|
||||
}
|
||||
|
@ -43,26 +43,6 @@ func NewDashboardFileReader(cfg *DashboardsAsConfig, log log.Logger) (*fileReade
|
||||
log.Warn("[Deprecated] The folder property is deprecated. Please use path instead.")
|
||||
}
|
||||
|
||||
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||
log.Error("Cannot read directory", "error", err)
|
||||
}
|
||||
|
||||
copy := path
|
||||
path, err := filepath.Abs(path)
|
||||
if err != nil {
|
||||
log.Error("Could not create absolute path ", "path", path)
|
||||
}
|
||||
|
||||
path, err = filepath.EvalSymlinks(path)
|
||||
if err != nil {
|
||||
log.Error("Failed to read content of symlinked path: %s", path)
|
||||
}
|
||||
|
||||
if path == "" {
|
||||
path = copy
|
||||
log.Info("falling back to original path due to EvalSymlink/Abs failure")
|
||||
}
|
||||
|
||||
return &fileReader{
|
||||
Cfg: cfg,
|
||||
Path: path,
|
||||
@ -99,7 +79,8 @@ func (fr *fileReader) ReadAndListen(ctx context.Context) error {
|
||||
}
|
||||
|
||||
func (fr *fileReader) startWalkingDisk() error {
|
||||
if _, err := os.Stat(fr.Path); err != nil {
|
||||
resolvedPath := fr.resolvePath(fr.Path)
|
||||
if _, err := os.Stat(resolvedPath); err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return err
|
||||
}
|
||||
@ -116,7 +97,7 @@ func (fr *fileReader) startWalkingDisk() error {
|
||||
}
|
||||
|
||||
filesFoundOnDisk := map[string]os.FileInfo{}
|
||||
err = filepath.Walk(fr.Path, createWalkFn(filesFoundOnDisk))
|
||||
err = filepath.Walk(resolvedPath, createWalkFn(filesFoundOnDisk))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -344,6 +325,29 @@ func (fr *fileReader) readDashboardFromFile(path string, lastModified time.Time,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (fr *fileReader) resolvePath(path string) string {
|
||||
if _, err := os.Stat(path); os.IsNotExist(err) {
|
||||
fr.log.Error("Cannot read directory", "error", err)
|
||||
}
|
||||
|
||||
copy := path
|
||||
path, err := filepath.Abs(path)
|
||||
if err != nil {
|
||||
fr.log.Error("Could not create absolute path ", "path", path)
|
||||
}
|
||||
|
||||
path, err = filepath.EvalSymlinks(path)
|
||||
if err != nil {
|
||||
fr.log.Error("Failed to read content of symlinked path: %s", path)
|
||||
}
|
||||
|
||||
if path == "" {
|
||||
path = copy
|
||||
fr.log.Info("falling back to original path due to EvalSymlink/Abs failure")
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
type provisioningMetadata struct {
|
||||
uid string
|
||||
title string
|
||||
|
@ -30,10 +30,11 @@ func TestProvsionedSymlinkedFolder(t *testing.T) {
|
||||
want, err := filepath.Abs(containingId)
|
||||
|
||||
if err != nil {
|
||||
t.Errorf("expected err to be nill")
|
||||
t.Errorf("expected err to be nil")
|
||||
}
|
||||
|
||||
if reader.Path != want {
|
||||
t.Errorf("got %s want %s", reader.Path, want)
|
||||
resolvedPath := reader.resolvePath(reader.Path)
|
||||
if resolvedPath != want {
|
||||
t.Errorf("got %s want %s", resolvedPath, want)
|
||||
}
|
||||
}
|
||||
|
@ -67,7 +67,8 @@ func TestCreatingNewDashboardFileReader(t *testing.T) {
|
||||
reader, err := NewDashboardFileReader(cfg, log.New("test-logger"))
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(filepath.IsAbs(reader.Path), ShouldBeTrue)
|
||||
resolvedPath := reader.resolvePath(reader.Path)
|
||||
So(filepath.IsAbs(resolvedPath), ShouldBeTrue)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
@ -6,7 +6,7 @@ providers:
|
||||
folder: 'developers'
|
||||
editable: true
|
||||
disableDeletion: true
|
||||
updateIntervalSeconds: 10
|
||||
updateIntervalSeconds: 15
|
||||
type: file
|
||||
options:
|
||||
path: /var/lib/grafana/dashboards
|
||||
|
@ -3,7 +3,7 @@
|
||||
folder: 'developers'
|
||||
editable: true
|
||||
disableDeletion: true
|
||||
updateIntervalSeconds: 10
|
||||
updateIntervalSeconds: 15
|
||||
type: file
|
||||
options:
|
||||
path: /var/lib/grafana/dashboards
|
||||
|
@ -13,15 +13,16 @@ var ErrNoRenderer = errors.New("No renderer plugin found nor is an external rend
|
||||
var ErrPhantomJSNotInstalled = errors.New("PhantomJS executable not found")
|
||||
|
||||
type Opts struct {
|
||||
Width int
|
||||
Height int
|
||||
Timeout time.Duration
|
||||
OrgId int64
|
||||
UserId int64
|
||||
OrgRole models.RoleType
|
||||
Path string
|
||||
Encoding string
|
||||
Timezone string
|
||||
Width int
|
||||
Height int
|
||||
Timeout time.Duration
|
||||
OrgId int64
|
||||
UserId int64
|
||||
OrgRole models.RoleType
|
||||
Path string
|
||||
Encoding string
|
||||
Timezone string
|
||||
ConcurrentLimit int
|
||||
}
|
||||
|
||||
type RenderResult struct {
|
||||
|
@ -24,12 +24,13 @@ func init() {
|
||||
}
|
||||
|
||||
type RenderingService struct {
|
||||
log log.Logger
|
||||
pluginClient *plugin.Client
|
||||
grpcPlugin pluginModel.RendererPlugin
|
||||
pluginInfo *plugins.RendererPlugin
|
||||
renderAction renderFunc
|
||||
domain string
|
||||
log log.Logger
|
||||
pluginClient *plugin.Client
|
||||
grpcPlugin pluginModel.RendererPlugin
|
||||
pluginInfo *plugins.RendererPlugin
|
||||
renderAction renderFunc
|
||||
domain string
|
||||
inProgressCount int
|
||||
|
||||
Cfg *setting.Cfg `inject:""`
|
||||
}
|
||||
@ -90,6 +91,18 @@ func (rs *RenderingService) Run(ctx context.Context) error {
|
||||
}
|
||||
|
||||
func (rs *RenderingService) Render(ctx context.Context, opts Opts) (*RenderResult, error) {
|
||||
if rs.inProgressCount > opts.ConcurrentLimit {
|
||||
return &RenderResult{
|
||||
FilePath: filepath.Join(setting.HomePath, "public/img/rendering_limit.png"),
|
||||
}, nil
|
||||
}
|
||||
|
||||
defer func() {
|
||||
rs.inProgressCount -= 1
|
||||
}()
|
||||
|
||||
rs.inProgressCount += 1
|
||||
|
||||
if rs.renderAction != nil {
|
||||
return rs.renderAction(ctx, opts)
|
||||
} else {
|
||||
|
@ -230,7 +230,7 @@ func UpdateAlertNotification(cmd *m.UpdateAlertNotificationCommand) error {
|
||||
}
|
||||
|
||||
func RecordNotificationJournal(ctx context.Context, cmd *m.RecordNotificationJournalCommand) error {
|
||||
return inTransactionCtx(ctx, func(sess *DBSession) error {
|
||||
return withDbSession(ctx, func(sess *DBSession) error {
|
||||
journalEntry := &m.AlertNotificationJournal{
|
||||
OrgId: cmd.OrgId,
|
||||
AlertId: cmd.AlertId,
|
||||
@ -245,21 +245,19 @@ func RecordNotificationJournal(ctx context.Context, cmd *m.RecordNotificationJou
|
||||
}
|
||||
|
||||
func GetLatestNotification(ctx context.Context, cmd *m.GetLatestNotificationQuery) error {
|
||||
return inTransactionCtx(ctx, func(sess *DBSession) error {
|
||||
nj := &m.AlertNotificationJournal{}
|
||||
return withDbSession(ctx, func(sess *DBSession) error {
|
||||
nj := []m.AlertNotificationJournal{}
|
||||
|
||||
_, err := sess.Desc("alert_notification_journal.sent_at").
|
||||
Limit(1).
|
||||
Where("alert_notification_journal.org_id = ? AND alert_notification_journal.alert_id = ? AND alert_notification_journal.notifier_id = ?", cmd.OrgId, cmd.AlertId, cmd.NotifierId).Get(nj)
|
||||
err := sess.Desc("alert_notification_journal.sent_at").
|
||||
Where("alert_notification_journal.org_id = ?", cmd.OrgId).
|
||||
Where("alert_notification_journal.alert_id = ?", cmd.AlertId).
|
||||
Where("alert_notification_journal.notifier_id = ?", cmd.NotifierId).
|
||||
Find(&nj)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if nj.AlertId == 0 && nj.Id == 0 && nj.NotifierId == 0 && nj.OrgId == 0 {
|
||||
return m.ErrJournalingNotFound
|
||||
}
|
||||
|
||||
cmd.Result = nj
|
||||
return nil
|
||||
})
|
||||
|
@ -15,16 +15,21 @@ func TestAlertNotificationSQLAccess(t *testing.T) {
|
||||
InitTestDB(t)
|
||||
|
||||
Convey("Alert notification journal", func() {
|
||||
var alertId int64 = 5
|
||||
var alertId int64 = 7
|
||||
var orgId int64 = 5
|
||||
var notifierId int64 = 5
|
||||
var notifierId int64 = 10
|
||||
|
||||
Convey("Getting last journal should raise error if no one exists", func() {
|
||||
query := &m.GetLatestNotificationQuery{AlertId: alertId, OrgId: orgId, NotifierId: notifierId}
|
||||
err := GetLatestNotification(context.Background(), query)
|
||||
So(err, ShouldEqual, m.ErrJournalingNotFound)
|
||||
GetLatestNotification(context.Background(), query)
|
||||
So(len(query.Result), ShouldEqual, 0)
|
||||
|
||||
Convey("shoulbe be able to record two journaling events", func() {
|
||||
// recording an journal entry in another org to make sure org filter works as expected.
|
||||
journalInOtherOrg := &m.RecordNotificationJournalCommand{AlertId: alertId, NotifierId: notifierId, OrgId: 10, Success: true, SentAt: 1}
|
||||
err := RecordNotificationJournal(context.Background(), journalInOtherOrg)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
Convey("should be able to record two journaling events", func() {
|
||||
createCmd := &m.RecordNotificationJournalCommand{AlertId: alertId, NotifierId: notifierId, OrgId: orgId, Success: true, SentAt: 1}
|
||||
|
||||
err := RecordNotificationJournal(context.Background(), createCmd)
|
||||
@ -38,17 +43,20 @@ func TestAlertNotificationSQLAccess(t *testing.T) {
|
||||
Convey("get last journaling event", func() {
|
||||
err := GetLatestNotification(context.Background(), query)
|
||||
So(err, ShouldBeNil)
|
||||
So(query.Result.SentAt, ShouldEqual, 1001)
|
||||
So(len(query.Result), ShouldEqual, 2)
|
||||
last := query.Result[0]
|
||||
So(last.SentAt, ShouldEqual, 1001)
|
||||
|
||||
Convey("be able to clear all journaling for an notifier", func() {
|
||||
cmd := &m.CleanNotificationJournalCommand{AlertId: alertId, NotifierId: notifierId, OrgId: orgId}
|
||||
err := CleanNotificationJournal(context.Background(), cmd)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
Convey("querying for last junaling should raise error", func() {
|
||||
Convey("querying for last journaling should return no journal entries", func() {
|
||||
query := &m.GetLatestNotificationQuery{AlertId: alertId, OrgId: orgId, NotifierId: notifierId}
|
||||
err := GetLatestNotification(context.Background(), query)
|
||||
So(err, ShouldEqual, m.ErrJournalingNotFound)
|
||||
So(err, ShouldBeNil)
|
||||
So(len(query.Result), ShouldEqual, 0)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
@ -932,29 +932,6 @@ func TestIntegratedDashboardService(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
type scenarioContext struct {
|
||||
dashboardGuardianMock *guardian.FakeDashboardGuardian
|
||||
}
|
||||
|
||||
type scenarioFunc func(c *scenarioContext)
|
||||
|
||||
func dashboardGuardianScenario(desc string, mock *guardian.FakeDashboardGuardian, fn scenarioFunc) {
|
||||
Convey(desc, func() {
|
||||
origNewDashboardGuardian := guardian.New
|
||||
guardian.MockDashboardGuardian(mock)
|
||||
|
||||
sc := &scenarioContext{
|
||||
dashboardGuardianMock: mock,
|
||||
}
|
||||
|
||||
defer func() {
|
||||
guardian.New = origNewDashboardGuardian
|
||||
}()
|
||||
|
||||
fn(sc)
|
||||
})
|
||||
}
|
||||
|
||||
type dashboardPermissionScenarioContext struct {
|
||||
dashboardGuardianMock *guardian.FakeDashboardGuardian
|
||||
}
|
||||
|
@ -10,10 +10,6 @@ import (
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
type testQuery struct {
|
||||
result bool
|
||||
}
|
||||
|
||||
var ProvokedError = errors.New("testing error.")
|
||||
|
||||
func TestTransaction(t *testing.T) {
|
||||
|
@ -166,6 +166,7 @@ var (
|
||||
// Alerting
|
||||
AlertingEnabled bool
|
||||
ExecuteAlerts bool
|
||||
AlertingRenderLimit int
|
||||
AlertingErrorOrTimeout string
|
||||
AlertingNoDataOrNullValues string
|
||||
|
||||
@ -196,10 +197,13 @@ type Cfg struct {
|
||||
Smtp SmtpSettings
|
||||
|
||||
// Rendering
|
||||
ImagesDir string
|
||||
PhantomDir string
|
||||
RendererUrl string
|
||||
RendererCallbackUrl string
|
||||
ImagesDir string
|
||||
PhantomDir string
|
||||
RendererUrl string
|
||||
RendererCallbackUrl string
|
||||
RendererLimit int
|
||||
RendererLimitAlerting int
|
||||
|
||||
DisableBruteForceLoginProtection bool
|
||||
|
||||
TempDataLifetime time.Duration
|
||||
@ -677,6 +681,7 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error {
|
||||
alerting := iniFile.Section("alerting")
|
||||
AlertingEnabled = alerting.Key("enabled").MustBool(true)
|
||||
ExecuteAlerts = alerting.Key("execute_alerts").MustBool(true)
|
||||
AlertingRenderLimit = alerting.Key("concurrent_render_limit").MustInt(5)
|
||||
AlertingErrorOrTimeout = alerting.Key("error_or_timeout").MustString("alerting")
|
||||
AlertingNoDataOrNullValues = alerting.Key("nodata_or_nullvalues").MustString("no_data")
|
||||
|
||||
|
@ -46,10 +46,14 @@ func (e *Error) Error() string {
|
||||
return e.s
|
||||
}
|
||||
|
||||
const (
|
||||
grafanaCom = "grafana_com"
|
||||
)
|
||||
|
||||
var (
|
||||
SocialBaseUrl = "/login/"
|
||||
SocialMap = make(map[string]SocialConnector)
|
||||
allOauthes = []string{"github", "gitlab", "google", "generic_oauth", "grafananet", "grafana_com"}
|
||||
allOauthes = []string{"github", "gitlab", "google", "generic_oauth", "grafananet", grafanaCom}
|
||||
)
|
||||
|
||||
func NewOAuthService() {
|
||||
@ -82,7 +86,7 @@ func NewOAuthService() {
|
||||
}
|
||||
|
||||
if name == "grafananet" {
|
||||
name = "grafana_com"
|
||||
name = grafanaCom
|
||||
}
|
||||
|
||||
setting.OAuthService.OAuthInfos[name] = info
|
||||
@ -159,7 +163,7 @@ func NewOAuthService() {
|
||||
}
|
||||
}
|
||||
|
||||
if name == "grafana_com" {
|
||||
if name == grafanaCom {
|
||||
config = oauth2.Config{
|
||||
ClientID: info.ClientId,
|
||||
ClientSecret: info.ClientSecret,
|
||||
@ -171,7 +175,7 @@ func NewOAuthService() {
|
||||
Scopes: info.Scopes,
|
||||
}
|
||||
|
||||
SocialMap["grafana_com"] = &SocialGrafanaCom{
|
||||
SocialMap[grafanaCom] = &SocialGrafanaCom{
|
||||
SocialBase: &SocialBase{
|
||||
Config: &config,
|
||||
log: logger,
|
||||
@ -194,7 +198,7 @@ var GetOAuthProviders = func(cfg *setting.Cfg) map[string]bool {
|
||||
|
||||
for _, name := range allOauthes {
|
||||
if name == "grafananet" {
|
||||
name = "grafana_com"
|
||||
name = grafanaCom
|
||||
}
|
||||
|
||||
sec := cfg.Raw.Section("auth." + name)
|
||||
|
@ -42,8 +42,7 @@ func GetCredentials(dsInfo *DatasourceInfo) (*credentials.Credentials, error) {
|
||||
accessKeyId := ""
|
||||
secretAccessKey := ""
|
||||
sessionToken := ""
|
||||
var expiration *time.Time
|
||||
expiration = nil
|
||||
var expiration *time.Time = nil
|
||||
if dsInfo.AuthType == "arn" && strings.Index(dsInfo.AssumeRoleArn, "arn:aws:iam:") == 0 {
|
||||
params := &sts.AssumeRoleInput{
|
||||
RoleArn: aws.String(dsInfo.AssumeRoleArn),
|
||||
|
@ -235,7 +235,7 @@ func parseMultiSelectValue(input string) []string {
|
||||
func (e *CloudWatchExecutor) handleGetRegions(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) {
|
||||
regions := []string{
|
||||
"ap-northeast-1", "ap-northeast-2", "ap-southeast-1", "ap-southeast-2", "ap-south-1", "ca-central-1", "cn-north-1", "cn-northwest-1",
|
||||
"eu-central-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "us-east-1", "us-east-2", "us-gov-west-1", "us-west-1", "us-west-2",
|
||||
"eu-central-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "us-east-1", "us-east-2", "us-gov-west-1", "us-west-1", "us-west-2", "us-isob-east-1", "us-iso-east-1",
|
||||
}
|
||||
|
||||
result := make([]suggestData, 0)
|
||||
|
@ -13,6 +13,19 @@ import (
|
||||
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
|
||||
)
|
||||
|
||||
const (
|
||||
// Metric types
|
||||
countType = "count"
|
||||
percentilesType = "percentiles"
|
||||
extendedStatsType = "extended_stats"
|
||||
// Bucket types
|
||||
dateHistType = "date_histogram"
|
||||
histogramType = "histogram"
|
||||
filtersType = "filters"
|
||||
termsType = "terms"
|
||||
geohashGridType = "geohash_grid"
|
||||
)
|
||||
|
||||
type responseParser struct {
|
||||
Responses []*es.SearchResponse
|
||||
Targets []*Query
|
||||
@ -81,7 +94,7 @@ func (rp *responseParser) processBuckets(aggs map[string]interface{}, target *Qu
|
||||
}
|
||||
|
||||
if depth == maxDepth {
|
||||
if aggDef.Type == "date_histogram" {
|
||||
if aggDef.Type == dateHistType {
|
||||
err = rp.processMetrics(esAgg, target, series, props)
|
||||
} else {
|
||||
err = rp.processAggregationDocs(esAgg, aggDef, target, table, props)
|
||||
@ -149,7 +162,7 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query,
|
||||
}
|
||||
|
||||
switch metric.Type {
|
||||
case "count":
|
||||
case countType:
|
||||
newSeries := tsdb.TimeSeries{
|
||||
Tags: make(map[string]string),
|
||||
}
|
||||
@ -164,10 +177,10 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query,
|
||||
for k, v := range props {
|
||||
newSeries.Tags[k] = v
|
||||
}
|
||||
newSeries.Tags["metric"] = "count"
|
||||
newSeries.Tags["metric"] = countType
|
||||
*series = append(*series, &newSeries)
|
||||
|
||||
case "percentiles":
|
||||
case percentilesType:
|
||||
buckets := esAgg.Get("buckets").MustArray()
|
||||
if len(buckets) == 0 {
|
||||
break
|
||||
@ -198,7 +211,7 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query,
|
||||
}
|
||||
*series = append(*series, &newSeries)
|
||||
}
|
||||
case "extended_stats":
|
||||
case extendedStatsType:
|
||||
buckets := esAgg.Get("buckets").MustArray()
|
||||
|
||||
metaKeys := make([]string, 0)
|
||||
@ -312,9 +325,9 @@ func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef
|
||||
|
||||
for _, metric := range target.Metrics {
|
||||
switch metric.Type {
|
||||
case "count":
|
||||
case countType:
|
||||
addMetricValue(&values, rp.getMetricName(metric.Type), castToNullFloat(bucket.Get("doc_count")))
|
||||
case "extended_stats":
|
||||
case extendedStatsType:
|
||||
metaKeys := make([]string, 0)
|
||||
meta := metric.Meta.MustMap()
|
||||
for k := range meta {
|
||||
@ -366,7 +379,7 @@ func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef
|
||||
func (rp *responseParser) trimDatapoints(series *tsdb.TimeSeriesSlice, target *Query) {
|
||||
var histogram *BucketAgg
|
||||
for _, bucketAgg := range target.BucketAggs {
|
||||
if bucketAgg.Type == "date_histogram" {
|
||||
if bucketAgg.Type == dateHistType {
|
||||
histogram = bucketAgg
|
||||
break
|
||||
}
|
||||
|
@ -75,15 +75,15 @@ func (e *timeSeriesQuery) execute() (*tsdb.Response, error) {
|
||||
// iterate backwards to create aggregations bottom-down
|
||||
for _, bucketAgg := range q.BucketAggs {
|
||||
switch bucketAgg.Type {
|
||||
case "date_histogram":
|
||||
case dateHistType:
|
||||
aggBuilder = addDateHistogramAgg(aggBuilder, bucketAgg, from, to)
|
||||
case "histogram":
|
||||
case histogramType:
|
||||
aggBuilder = addHistogramAgg(aggBuilder, bucketAgg)
|
||||
case "filters":
|
||||
case filtersType:
|
||||
aggBuilder = addFiltersAgg(aggBuilder, bucketAgg)
|
||||
case "terms":
|
||||
case termsType:
|
||||
aggBuilder = addTermsAgg(aggBuilder, bucketAgg, q.Metrics)
|
||||
case "geohash_grid":
|
||||
case geohashGridType:
|
||||
aggBuilder = addGeoHashGridAgg(aggBuilder, bucketAgg)
|
||||
}
|
||||
}
|
||||
|
120
pkg/tsdb/stackdriver/annotation_query.go
Normal file
120
pkg/tsdb/stackdriver/annotation_query.go
Normal file
@ -0,0 +1,120 @@
|
||||
package stackdriver
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
)
|
||||
|
||||
func (e *StackdriverExecutor) executeAnnotationQuery(ctx context.Context, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
|
||||
result := &tsdb.Response{
|
||||
Results: make(map[string]*tsdb.QueryResult),
|
||||
}
|
||||
|
||||
firstQuery := tsdbQuery.Queries[0]
|
||||
|
||||
queries, err := e.buildQueries(tsdbQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
queryRes, resp, err := e.executeQuery(ctx, queries[0], tsdbQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
title := firstQuery.Model.Get("title").MustString()
|
||||
text := firstQuery.Model.Get("text").MustString()
|
||||
tags := firstQuery.Model.Get("tags").MustString()
|
||||
err = e.parseToAnnotations(queryRes, resp, queries[0], title, text, tags)
|
||||
result.Results[firstQuery.RefId] = queryRes
|
||||
|
||||
return result, err
|
||||
}
|
||||
|
||||
func (e *StackdriverExecutor) parseToAnnotations(queryRes *tsdb.QueryResult, data StackdriverResponse, query *StackdriverQuery, title string, text string, tags string) error {
|
||||
annotations := make([]map[string]string, 0)
|
||||
|
||||
for _, series := range data.TimeSeries {
|
||||
// reverse the order to be ascending
|
||||
for i := len(series.Points) - 1; i >= 0; i-- {
|
||||
point := series.Points[i]
|
||||
value := strconv.FormatFloat(point.Value.DoubleValue, 'f', 6, 64)
|
||||
if series.ValueType == "STRING" {
|
||||
value = point.Value.StringValue
|
||||
}
|
||||
annotation := make(map[string]string)
|
||||
annotation["time"] = point.Interval.EndTime.UTC().Format(time.RFC3339)
|
||||
annotation["title"] = formatAnnotationText(title, value, series.Metric.Type, series.Metric.Labels, series.Resource.Labels)
|
||||
annotation["tags"] = tags
|
||||
annotation["text"] = formatAnnotationText(text, value, series.Metric.Type, series.Metric.Labels, series.Resource.Labels)
|
||||
annotations = append(annotations, annotation)
|
||||
}
|
||||
}
|
||||
|
||||
transformAnnotationToTable(annotations, queryRes)
|
||||
return nil
|
||||
}
|
||||
|
||||
func transformAnnotationToTable(data []map[string]string, result *tsdb.QueryResult) {
|
||||
table := &tsdb.Table{
|
||||
Columns: make([]tsdb.TableColumn, 4),
|
||||
Rows: make([]tsdb.RowValues, 0),
|
||||
}
|
||||
table.Columns[0].Text = "time"
|
||||
table.Columns[1].Text = "title"
|
||||
table.Columns[2].Text = "tags"
|
||||
table.Columns[3].Text = "text"
|
||||
|
||||
for _, r := range data {
|
||||
values := make([]interface{}, 4)
|
||||
values[0] = r["time"]
|
||||
values[1] = r["title"]
|
||||
values[2] = r["tags"]
|
||||
values[3] = r["text"]
|
||||
table.Rows = append(table.Rows, values)
|
||||
}
|
||||
result.Tables = append(result.Tables, table)
|
||||
result.Meta.Set("rowCount", len(data))
|
||||
slog.Info("anno", "len", len(data))
|
||||
}
|
||||
|
||||
func formatAnnotationText(annotationText string, pointValue string, metricType string, metricLabels map[string]string, resourceLabels map[string]string) string {
|
||||
result := legendKeyFormat.ReplaceAllFunc([]byte(annotationText), func(in []byte) []byte {
|
||||
metaPartName := strings.Replace(string(in), "{{", "", 1)
|
||||
metaPartName = strings.Replace(metaPartName, "}}", "", 1)
|
||||
metaPartName = strings.TrimSpace(metaPartName)
|
||||
|
||||
if metaPartName == "metric.type" {
|
||||
return []byte(metricType)
|
||||
}
|
||||
|
||||
metricPart := replaceWithMetricPart(metaPartName, metricType)
|
||||
|
||||
if metricPart != nil {
|
||||
return metricPart
|
||||
}
|
||||
|
||||
if metaPartName == "metric.value" {
|
||||
return []byte(pointValue)
|
||||
}
|
||||
|
||||
metaPartName = strings.Replace(metaPartName, "metric.label.", "", 1)
|
||||
|
||||
if val, exists := metricLabels[metaPartName]; exists {
|
||||
return []byte(val)
|
||||
}
|
||||
|
||||
metaPartName = strings.Replace(metaPartName, "resource.label.", "", 1)
|
||||
|
||||
if val, exists := resourceLabels[metaPartName]; exists {
|
||||
return []byte(val)
|
||||
}
|
||||
|
||||
return in
|
||||
})
|
||||
|
||||
return string(result)
|
||||
}
|
33
pkg/tsdb/stackdriver/annotation_query_test.go
Normal file
33
pkg/tsdb/stackdriver/annotation_query_test.go
Normal file
@ -0,0 +1,33 @@
|
||||
package stackdriver
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestStackdriverAnnotationQuery(t *testing.T) {
|
||||
Convey("Stackdriver Annotation Query Executor", t, func() {
|
||||
executor := &StackdriverExecutor{}
|
||||
Convey("When parsing the stackdriver api response", func() {
|
||||
data, err := loadTestFile("./test-data/2-series-response-no-agg.json")
|
||||
So(err, ShouldBeNil)
|
||||
So(len(data.TimeSeries), ShouldEqual, 3)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "annotationQuery"}
|
||||
query := &StackdriverQuery{}
|
||||
err = executor.parseToAnnotations(res, data, query, "atitle {{metric.label.instance_name}} {{metric.value}}", "atext {{resource.label.zone}}", "atag")
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
Convey("Should return annotations table", func() {
|
||||
So(len(res.Tables), ShouldEqual, 1)
|
||||
So(len(res.Tables[0].Rows), ShouldEqual, 9)
|
||||
So(res.Tables[0].Rows[0][1], ShouldEqual, "atitle collector-asia-east-1 9.856650")
|
||||
So(res.Tables[0].Rows[0][3], ShouldEqual, "atext asia-east1-a")
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
460
pkg/tsdb/stackdriver/stackdriver.go
Normal file
460
pkg/tsdb/stackdriver/stackdriver.go
Normal file
@ -0,0 +1,460 @@
|
||||
package stackdriver
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"math"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"golang.org/x/net/context/ctxhttp"
|
||||
|
||||
"github.com/grafana/grafana/pkg/api/pluginproxy"
|
||||
"github.com/grafana/grafana/pkg/components/null"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/plugins"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
"github.com/opentracing/opentracing-go"
|
||||
)
|
||||
|
||||
var (
|
||||
slog log.Logger
|
||||
legendKeyFormat *regexp.Regexp
|
||||
metricNameFormat *regexp.Regexp
|
||||
)
|
||||
|
||||
// StackdriverExecutor executes queries for the Stackdriver datasource
|
||||
type StackdriverExecutor struct {
|
||||
httpClient *http.Client
|
||||
dsInfo *models.DataSource
|
||||
}
|
||||
|
||||
// NewStackdriverExecutor initializes a http client
|
||||
func NewStackdriverExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
|
||||
httpClient, err := dsInfo.GetHttpClient()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &StackdriverExecutor{
|
||||
httpClient: httpClient,
|
||||
dsInfo: dsInfo,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
slog = log.New("tsdb.stackdriver")
|
||||
tsdb.RegisterTsdbQueryEndpoint("stackdriver", NewStackdriverExecutor)
|
||||
legendKeyFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`)
|
||||
metricNameFormat = regexp.MustCompile(`([\w\d_]+)\.googleapis\.com/(.+)`)
|
||||
}
|
||||
|
||||
// Query takes in the frontend queries, parses them into the Stackdriver query format
|
||||
// executes the queries against the Stackdriver API and parses the response into
|
||||
// the time series or table format
|
||||
func (e *StackdriverExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
|
||||
var result *tsdb.Response
|
||||
var err error
|
||||
queryType := tsdbQuery.Queries[0].Model.Get("type").MustString("")
|
||||
|
||||
switch queryType {
|
||||
case "annotationQuery":
|
||||
result, err = e.executeAnnotationQuery(ctx, tsdbQuery)
|
||||
case "timeSeriesQuery":
|
||||
fallthrough
|
||||
default:
|
||||
result, err = e.executeTimeSeriesQuery(ctx, tsdbQuery)
|
||||
}
|
||||
|
||||
return result, err
|
||||
}
|
||||
|
||||
func (e *StackdriverExecutor) executeTimeSeriesQuery(ctx context.Context, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
|
||||
result := &tsdb.Response{
|
||||
Results: make(map[string]*tsdb.QueryResult),
|
||||
}
|
||||
|
||||
queries, err := e.buildQueries(tsdbQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
queryRes, resp, err := e.executeQuery(ctx, query, tsdbQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = e.parseResponse(queryRes, resp, query)
|
||||
if err != nil {
|
||||
queryRes.Error = err
|
||||
}
|
||||
result.Results[query.RefID] = queryRes
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (e *StackdriverExecutor) buildQueries(tsdbQuery *tsdb.TsdbQuery) ([]*StackdriverQuery, error) {
|
||||
stackdriverQueries := []*StackdriverQuery{}
|
||||
|
||||
startTime, err := tsdbQuery.TimeRange.ParseFrom()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
endTime, err := tsdbQuery.TimeRange.ParseTo()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
durationSeconds := int(endTime.Sub(startTime).Seconds())
|
||||
|
||||
for _, query := range tsdbQuery.Queries {
|
||||
var target string
|
||||
|
||||
metricType := query.Model.Get("metricType").MustString()
|
||||
filterParts := query.Model.Get("filters").MustArray()
|
||||
|
||||
params := url.Values{}
|
||||
params.Add("interval.startTime", startTime.UTC().Format(time.RFC3339))
|
||||
params.Add("interval.endTime", endTime.UTC().Format(time.RFC3339))
|
||||
params.Add("filter", buildFilterString(metricType, filterParts))
|
||||
params.Add("view", query.Model.Get("view").MustString("FULL"))
|
||||
setAggParams(¶ms, query, durationSeconds)
|
||||
|
||||
target = params.Encode()
|
||||
|
||||
if setting.Env == setting.DEV {
|
||||
slog.Debug("Stackdriver request", "params", params)
|
||||
}
|
||||
|
||||
groupBys := query.Model.Get("groupBys").MustArray()
|
||||
groupBysAsStrings := make([]string, 0)
|
||||
for _, groupBy := range groupBys {
|
||||
groupBysAsStrings = append(groupBysAsStrings, groupBy.(string))
|
||||
}
|
||||
|
||||
aliasBy := query.Model.Get("aliasBy").MustString()
|
||||
|
||||
stackdriverQueries = append(stackdriverQueries, &StackdriverQuery{
|
||||
Target: target,
|
||||
Params: params,
|
||||
RefID: query.RefId,
|
||||
GroupBys: groupBysAsStrings,
|
||||
AliasBy: aliasBy,
|
||||
})
|
||||
}
|
||||
|
||||
return stackdriverQueries, nil
|
||||
}
|
||||
|
||||
func buildFilterString(metricType string, filterParts []interface{}) string {
|
||||
filterString := ""
|
||||
for i, part := range filterParts {
|
||||
mod := i % 4
|
||||
if part == "AND" {
|
||||
filterString += " "
|
||||
} else if mod == 2 {
|
||||
filterString += fmt.Sprintf(`"%s"`, part)
|
||||
} else {
|
||||
filterString += part.(string)
|
||||
}
|
||||
}
|
||||
return strings.Trim(fmt.Sprintf(`metric.type="%s" %s`, metricType, filterString), " ")
|
||||
}
|
||||
|
||||
func setAggParams(params *url.Values, query *tsdb.Query, durationSeconds int) {
|
||||
primaryAggregation := query.Model.Get("primaryAggregation").MustString()
|
||||
perSeriesAligner := query.Model.Get("perSeriesAligner").MustString()
|
||||
alignmentPeriod := query.Model.Get("alignmentPeriod").MustString()
|
||||
|
||||
if primaryAggregation == "" {
|
||||
primaryAggregation = "REDUCE_NONE"
|
||||
}
|
||||
|
||||
if perSeriesAligner == "" {
|
||||
perSeriesAligner = "ALIGN_MEAN"
|
||||
}
|
||||
|
||||
if alignmentPeriod == "grafana-auto" || alignmentPeriod == "" {
|
||||
alignmentPeriodValue := int(math.Max(float64(query.IntervalMs)/1000, 60.0))
|
||||
alignmentPeriod = "+" + strconv.Itoa(alignmentPeriodValue) + "s"
|
||||
}
|
||||
|
||||
if alignmentPeriod == "stackdriver-auto" {
|
||||
alignmentPeriodValue := int(math.Max(float64(durationSeconds), 60.0))
|
||||
if alignmentPeriodValue < 60*60*23 {
|
||||
alignmentPeriod = "+60s"
|
||||
} else if alignmentPeriodValue < 60*60*24*6 {
|
||||
alignmentPeriod = "+300s"
|
||||
} else {
|
||||
alignmentPeriod = "+3600s"
|
||||
}
|
||||
}
|
||||
|
||||
re := regexp.MustCompile("[0-9]+")
|
||||
seconds, err := strconv.ParseInt(re.FindString(alignmentPeriod), 10, 64)
|
||||
if err != nil || seconds > 3600 {
|
||||
alignmentPeriod = "+3600s"
|
||||
}
|
||||
|
||||
params.Add("aggregation.crossSeriesReducer", primaryAggregation)
|
||||
params.Add("aggregation.perSeriesAligner", perSeriesAligner)
|
||||
params.Add("aggregation.alignmentPeriod", alignmentPeriod)
|
||||
|
||||
groupBys := query.Model.Get("groupBys").MustArray()
|
||||
if len(groupBys) > 0 {
|
||||
for i := 0; i < len(groupBys); i++ {
|
||||
params.Add("aggregation.groupByFields", groupBys[i].(string))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (e *StackdriverExecutor) executeQuery(ctx context.Context, query *StackdriverQuery, tsdbQuery *tsdb.TsdbQuery) (*tsdb.QueryResult, StackdriverResponse, error) {
|
||||
queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefID}
|
||||
|
||||
req, err := e.createRequest(ctx, e.dsInfo)
|
||||
if err != nil {
|
||||
queryResult.Error = err
|
||||
return queryResult, StackdriverResponse{}, nil
|
||||
}
|
||||
|
||||
req.URL.RawQuery = query.Params.Encode()
|
||||
queryResult.Meta.Set("rawQuery", req.URL.RawQuery)
|
||||
alignmentPeriod, ok := req.URL.Query()["aggregation.alignmentPeriod"]
|
||||
|
||||
if ok {
|
||||
re := regexp.MustCompile("[0-9]+")
|
||||
seconds, err := strconv.ParseInt(re.FindString(alignmentPeriod[0]), 10, 64)
|
||||
if err == nil {
|
||||
queryResult.Meta.Set("alignmentPeriod", seconds)
|
||||
}
|
||||
}
|
||||
|
||||
span, ctx := opentracing.StartSpanFromContext(ctx, "stackdriver query")
|
||||
span.SetTag("target", query.Target)
|
||||
span.SetTag("from", tsdbQuery.TimeRange.From)
|
||||
span.SetTag("until", tsdbQuery.TimeRange.To)
|
||||
span.SetTag("datasource_id", e.dsInfo.Id)
|
||||
span.SetTag("org_id", e.dsInfo.OrgId)
|
||||
|
||||
defer span.Finish()
|
||||
|
||||
opentracing.GlobalTracer().Inject(
|
||||
span.Context(),
|
||||
opentracing.HTTPHeaders,
|
||||
opentracing.HTTPHeadersCarrier(req.Header))
|
||||
|
||||
res, err := ctxhttp.Do(ctx, e.httpClient, req)
|
||||
if err != nil {
|
||||
queryResult.Error = err
|
||||
return queryResult, StackdriverResponse{}, nil
|
||||
}
|
||||
|
||||
data, err := e.unmarshalResponse(res)
|
||||
if err != nil {
|
||||
queryResult.Error = err
|
||||
return queryResult, StackdriverResponse{}, nil
|
||||
}
|
||||
|
||||
return queryResult, data, nil
|
||||
}
|
||||
|
||||
func (e *StackdriverExecutor) unmarshalResponse(res *http.Response) (StackdriverResponse, error) {
|
||||
body, err := ioutil.ReadAll(res.Body)
|
||||
defer res.Body.Close()
|
||||
if err != nil {
|
||||
return StackdriverResponse{}, err
|
||||
}
|
||||
|
||||
if res.StatusCode/100 != 2 {
|
||||
slog.Error("Request failed", "status", res.Status, "body", string(body))
|
||||
return StackdriverResponse{}, fmt.Errorf(string(body))
|
||||
}
|
||||
|
||||
var data StackdriverResponse
|
||||
err = json.Unmarshal(body, &data)
|
||||
if err != nil {
|
||||
slog.Error("Failed to unmarshal Stackdriver response", "error", err, "status", res.Status, "body", string(body))
|
||||
return StackdriverResponse{}, err
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data StackdriverResponse, query *StackdriverQuery) error {
|
||||
metricLabels := make(map[string][]string)
|
||||
resourceLabels := make(map[string][]string)
|
||||
|
||||
for _, series := range data.TimeSeries {
|
||||
points := make([]tsdb.TimePoint, 0)
|
||||
|
||||
// reverse the order to be ascending
|
||||
for i := len(series.Points) - 1; i >= 0; i-- {
|
||||
point := series.Points[i]
|
||||
value := point.Value.DoubleValue
|
||||
|
||||
if series.ValueType == "INT64" {
|
||||
parsedValue, err := strconv.ParseFloat(point.Value.IntValue, 64)
|
||||
if err == nil {
|
||||
value = parsedValue
|
||||
}
|
||||
}
|
||||
|
||||
if series.ValueType == "BOOL" {
|
||||
if point.Value.BoolValue {
|
||||
value = 1
|
||||
} else {
|
||||
value = 0
|
||||
}
|
||||
}
|
||||
|
||||
points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000))
|
||||
}
|
||||
|
||||
defaultMetricName := series.Metric.Type
|
||||
|
||||
for key, value := range series.Metric.Labels {
|
||||
if !containsLabel(metricLabels[key], value) {
|
||||
metricLabels[key] = append(metricLabels[key], value)
|
||||
}
|
||||
if len(query.GroupBys) == 0 || containsLabel(query.GroupBys, "metric.label."+key) {
|
||||
defaultMetricName += " " + value
|
||||
}
|
||||
}
|
||||
|
||||
for key, value := range series.Resource.Labels {
|
||||
if !containsLabel(resourceLabels[key], value) {
|
||||
resourceLabels[key] = append(resourceLabels[key], value)
|
||||
}
|
||||
|
||||
if containsLabel(query.GroupBys, "resource.label."+key) {
|
||||
defaultMetricName += " " + value
|
||||
}
|
||||
}
|
||||
|
||||
metricName := formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, query)
|
||||
|
||||
queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{
|
||||
Name: metricName,
|
||||
Points: points,
|
||||
})
|
||||
}
|
||||
|
||||
queryRes.Meta.Set("resourceLabels", resourceLabels)
|
||||
queryRes.Meta.Set("metricLabels", metricLabels)
|
||||
queryRes.Meta.Set("groupBys", query.GroupBys)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func containsLabel(labels []string, newLabel string) bool {
|
||||
for _, val := range labels {
|
||||
if val == newLabel {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func formatLegendKeys(metricType string, defaultMetricName string, metricLabels map[string]string, resourceLabels map[string]string, query *StackdriverQuery) string {
|
||||
if query.AliasBy == "" {
|
||||
return defaultMetricName
|
||||
}
|
||||
|
||||
result := legendKeyFormat.ReplaceAllFunc([]byte(query.AliasBy), func(in []byte) []byte {
|
||||
metaPartName := strings.Replace(string(in), "{{", "", 1)
|
||||
metaPartName = strings.Replace(metaPartName, "}}", "", 1)
|
||||
metaPartName = strings.TrimSpace(metaPartName)
|
||||
|
||||
if metaPartName == "metric.type" {
|
||||
return []byte(metricType)
|
||||
}
|
||||
|
||||
metricPart := replaceWithMetricPart(metaPartName, metricType)
|
||||
|
||||
if metricPart != nil {
|
||||
return metricPart
|
||||
}
|
||||
|
||||
metaPartName = strings.Replace(metaPartName, "metric.label.", "", 1)
|
||||
|
||||
if val, exists := metricLabels[metaPartName]; exists {
|
||||
return []byte(val)
|
||||
}
|
||||
|
||||
metaPartName = strings.Replace(metaPartName, "resource.label.", "", 1)
|
||||
|
||||
if val, exists := resourceLabels[metaPartName]; exists {
|
||||
return []byte(val)
|
||||
}
|
||||
|
||||
return in
|
||||
})
|
||||
|
||||
return string(result)
|
||||
}
|
||||
|
||||
func replaceWithMetricPart(metaPartName string, metricType string) []byte {
|
||||
// https://cloud.google.com/monitoring/api/v3/metrics-details#label_names
|
||||
shortMatches := metricNameFormat.FindStringSubmatch(metricType)
|
||||
|
||||
if metaPartName == "metric.name" {
|
||||
if len(shortMatches) > 0 {
|
||||
return []byte(shortMatches[2])
|
||||
}
|
||||
}
|
||||
|
||||
if metaPartName == "metric.service" {
|
||||
if len(shortMatches) > 0 {
|
||||
return []byte(shortMatches[1])
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *StackdriverExecutor) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) {
|
||||
u, _ := url.Parse(dsInfo.Url)
|
||||
u.Path = path.Join(u.Path, "render")
|
||||
|
||||
req, err := http.NewRequest(http.MethodGet, "https://monitoring.googleapis.com/", nil)
|
||||
if err != nil {
|
||||
slog.Error("Failed to create request", "error", err)
|
||||
return nil, fmt.Errorf("Failed to create request. error: %v", err)
|
||||
}
|
||||
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion))
|
||||
|
||||
// find plugin
|
||||
plugin, ok := plugins.DataSources[dsInfo.Type]
|
||||
if !ok {
|
||||
return nil, errors.New("Unable to find datasource plugin Stackdriver")
|
||||
}
|
||||
projectName := dsInfo.JsonData.Get("defaultProject").MustString()
|
||||
proxyPass := fmt.Sprintf("stackdriver%s", "v3/projects/"+projectName+"/timeSeries")
|
||||
|
||||
var stackdriverRoute *plugins.AppPluginRoute
|
||||
for _, route := range plugin.Routes {
|
||||
if route.Path == "stackdriver" {
|
||||
stackdriverRoute = route
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
pluginproxy.ApplyRoute(ctx, req, proxyPass, stackdriverRoute, dsInfo)
|
||||
|
||||
return req, nil
|
||||
}
|
357
pkg/tsdb/stackdriver/stackdriver_test.go
Normal file
357
pkg/tsdb/stackdriver/stackdriver_test.go
Normal file
@ -0,0 +1,357 @@
|
||||
package stackdriver
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestStackdriver(t *testing.T) {
|
||||
Convey("Stackdriver", t, func() {
|
||||
executor := &StackdriverExecutor{}
|
||||
|
||||
Convey("Parse queries from frontend and build Stackdriver API queries", func() {
|
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
|
||||
tsdbQuery := &tsdb.TsdbQuery{
|
||||
TimeRange: &tsdb.TimeRange{
|
||||
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
|
||||
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
|
||||
},
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"metricType": "a/metric/type",
|
||||
"view": "FULL",
|
||||
"aliasBy": "testalias",
|
||||
"type": "timeSeriesQuery",
|
||||
}),
|
||||
RefId: "A",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
Convey("and query has no aggregation set", func() {
|
||||
queries, err := executor.buildQueries(tsdbQuery)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(len(queries), ShouldEqual, 1)
|
||||
So(queries[0].RefID, ShouldEqual, "A")
|
||||
So(queries[0].Target, ShouldEqual, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL")
|
||||
So(len(queries[0].Params), ShouldEqual, 7)
|
||||
So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z")
|
||||
So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z")
|
||||
So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN")
|
||||
So(queries[0].Params["filter"][0], ShouldEqual, "metric.type=\"a/metric/type\"")
|
||||
So(queries[0].Params["view"][0], ShouldEqual, "FULL")
|
||||
So(queries[0].AliasBy, ShouldEqual, "testalias")
|
||||
})
|
||||
|
||||
Convey("and query has filters", func() {
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"metricType": "a/metric/type",
|
||||
"filters": []interface{}{"key", "=", "value", "AND", "key2", "=", "value2"},
|
||||
})
|
||||
|
||||
queries, err := executor.buildQueries(tsdbQuery)
|
||||
So(err, ShouldBeNil)
|
||||
So(len(queries), ShouldEqual, 1)
|
||||
So(queries[0].Params["filter"][0], ShouldEqual, `metric.type="a/metric/type" key="value" key2="value2"`)
|
||||
})
|
||||
|
||||
Convey("and alignmentPeriod is set to grafana-auto", func() {
|
||||
Convey("and IntervalMs is larger than 60000", func() {
|
||||
tsdbQuery.Queries[0].IntervalMs = 1000000
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"alignmentPeriod": "grafana-auto",
|
||||
"filters": []interface{}{"key", "=", "value", "AND", "key2", "=", "value2"},
|
||||
})
|
||||
|
||||
queries, err := executor.buildQueries(tsdbQuery)
|
||||
So(err, ShouldBeNil)
|
||||
So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+1000s`)
|
||||
})
|
||||
Convey("and IntervalMs is less than 60000", func() {
|
||||
tsdbQuery.Queries[0].IntervalMs = 30000
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"alignmentPeriod": "grafana-auto",
|
||||
"filters": []interface{}{"key", "=", "value", "AND", "key2", "=", "value2"},
|
||||
})
|
||||
|
||||
queries, err := executor.buildQueries(tsdbQuery)
|
||||
So(err, ShouldBeNil)
|
||||
So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("and alignmentPeriod is set to stackdriver-auto", func() {
|
||||
Convey("and range is two hours", func() {
|
||||
tsdbQuery.TimeRange.From = "1538033322461"
|
||||
tsdbQuery.TimeRange.To = "1538040522461"
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"target": "target",
|
||||
"alignmentPeriod": "stackdriver-auto",
|
||||
})
|
||||
|
||||
queries, err := executor.buildQueries(tsdbQuery)
|
||||
So(err, ShouldBeNil)
|
||||
So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`)
|
||||
})
|
||||
|
||||
Convey("and range is 22 hours", func() {
|
||||
tsdbQuery.TimeRange.From = "1538034524922"
|
||||
tsdbQuery.TimeRange.To = "1538113724922"
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"target": "target",
|
||||
"alignmentPeriod": "stackdriver-auto",
|
||||
})
|
||||
|
||||
queries, err := executor.buildQueries(tsdbQuery)
|
||||
So(err, ShouldBeNil)
|
||||
So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`)
|
||||
})
|
||||
|
||||
Convey("and range is 23 hours", func() {
|
||||
tsdbQuery.TimeRange.From = "1538034567985"
|
||||
tsdbQuery.TimeRange.To = "1538117367985"
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"target": "target",
|
||||
"alignmentPeriod": "stackdriver-auto",
|
||||
})
|
||||
|
||||
queries, err := executor.buildQueries(tsdbQuery)
|
||||
So(err, ShouldBeNil)
|
||||
So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+300s`)
|
||||
})
|
||||
|
||||
Convey("and range is 7 days", func() {
|
||||
tsdbQuery.TimeRange.From = "1538036324073"
|
||||
tsdbQuery.TimeRange.To = "1538641124073"
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"target": "target",
|
||||
"alignmentPeriod": "stackdriver-auto",
|
||||
})
|
||||
|
||||
queries, err := executor.buildQueries(tsdbQuery)
|
||||
So(err, ShouldBeNil)
|
||||
So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+3600s`)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("and alignmentPeriod is set in frontend", func() {
|
||||
Convey("and alignment period is too big", func() {
|
||||
tsdbQuery.Queries[0].IntervalMs = 1000
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"alignmentPeriod": "+360000s",
|
||||
})
|
||||
|
||||
queries, err := executor.buildQueries(tsdbQuery)
|
||||
So(err, ShouldBeNil)
|
||||
So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+3600s`)
|
||||
})
|
||||
|
||||
Convey("and alignment period is within accepted range", func() {
|
||||
tsdbQuery.Queries[0].IntervalMs = 1000
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"alignmentPeriod": "+600s",
|
||||
})
|
||||
|
||||
queries, err := executor.buildQueries(tsdbQuery)
|
||||
So(err, ShouldBeNil)
|
||||
So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+600s`)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("and query has aggregation mean set", func() {
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"metricType": "a/metric/type",
|
||||
"primaryAggregation": "REDUCE_MEAN",
|
||||
"view": "FULL",
|
||||
})
|
||||
|
||||
queries, err := executor.buildQueries(tsdbQuery)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(len(queries), ShouldEqual, 1)
|
||||
So(queries[0].RefID, ShouldEqual, "A")
|
||||
So(queries[0].Target, ShouldEqual, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_MEAN&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL")
|
||||
So(len(queries[0].Params), ShouldEqual, 7)
|
||||
So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z")
|
||||
So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z")
|
||||
So(queries[0].Params["aggregation.crossSeriesReducer"][0], ShouldEqual, "REDUCE_MEAN")
|
||||
So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN")
|
||||
So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, "+60s")
|
||||
So(queries[0].Params["filter"][0], ShouldEqual, "metric.type=\"a/metric/type\"")
|
||||
So(queries[0].Params["view"][0], ShouldEqual, "FULL")
|
||||
})
|
||||
|
||||
Convey("and query has group bys", func() {
|
||||
tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{
|
||||
"metricType": "a/metric/type",
|
||||
"primaryAggregation": "REDUCE_NONE",
|
||||
"groupBys": []interface{}{"metric.label.group1", "metric.label.group2"},
|
||||
"view": "FULL",
|
||||
})
|
||||
|
||||
queries, err := executor.buildQueries(tsdbQuery)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(len(queries), ShouldEqual, 1)
|
||||
So(queries[0].RefID, ShouldEqual, "A")
|
||||
So(queries[0].Target, ShouldEqual, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.groupByFields=metric.label.group1&aggregation.groupByFields=metric.label.group2&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL")
|
||||
So(len(queries[0].Params), ShouldEqual, 8)
|
||||
So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z")
|
||||
So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z")
|
||||
So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN")
|
||||
So(queries[0].Params["aggregation.groupByFields"][0], ShouldEqual, "metric.label.group1")
|
||||
So(queries[0].Params["aggregation.groupByFields"][1], ShouldEqual, "metric.label.group2")
|
||||
So(queries[0].Params["filter"][0], ShouldEqual, "metric.type=\"a/metric/type\"")
|
||||
So(queries[0].Params["view"][0], ShouldEqual, "FULL")
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
Convey("Parse stackdriver response in the time series format", func() {
|
||||
Convey("when data from query aggregated to one time series", func() {
|
||||
data, err := loadTestFile("./test-data/1-series-response-agg-one-metric.json")
|
||||
So(err, ShouldBeNil)
|
||||
So(len(data.TimeSeries), ShouldEqual, 1)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &StackdriverQuery{}
|
||||
err = executor.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(len(res.Series), ShouldEqual, 1)
|
||||
So(res.Series[0].Name, ShouldEqual, "serviceruntime.googleapis.com/api/request_count")
|
||||
So(len(res.Series[0].Points), ShouldEqual, 3)
|
||||
|
||||
Convey("timestamps should be in ascending order", func() {
|
||||
So(res.Series[0].Points[0][0].Float64, ShouldEqual, 0.05)
|
||||
So(res.Series[0].Points[0][1].Float64, ShouldEqual, 1536670020000)
|
||||
|
||||
So(res.Series[0].Points[1][0].Float64, ShouldEqual, 1.05)
|
||||
So(res.Series[0].Points[1][1].Float64, ShouldEqual, 1536670080000)
|
||||
|
||||
So(res.Series[0].Points[2][0].Float64, ShouldEqual, 1.0666666666667)
|
||||
So(res.Series[0].Points[2][1].Float64, ShouldEqual, 1536670260000)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("when data from query with no aggregation", func() {
|
||||
data, err := loadTestFile("./test-data/2-series-response-no-agg.json")
|
||||
So(err, ShouldBeNil)
|
||||
So(len(data.TimeSeries), ShouldEqual, 3)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &StackdriverQuery{}
|
||||
err = executor.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
Convey("Should add labels to metric name", func() {
|
||||
So(len(res.Series), ShouldEqual, 3)
|
||||
So(res.Series[0].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1")
|
||||
So(res.Series[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1")
|
||||
So(res.Series[2].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1")
|
||||
})
|
||||
|
||||
Convey("Should parse to time series", func() {
|
||||
So(len(res.Series[0].Points), ShouldEqual, 3)
|
||||
So(res.Series[0].Points[0][0].Float64, ShouldEqual, 9.8566497180145)
|
||||
So(res.Series[0].Points[1][0].Float64, ShouldEqual, 9.7323568146676)
|
||||
So(res.Series[0].Points[2][0].Float64, ShouldEqual, 9.7730520330369)
|
||||
})
|
||||
|
||||
Convey("Should add meta for labels to the response", func() {
|
||||
metricLabels := res.Meta.Get("metricLabels").Interface().(map[string][]string)
|
||||
So(metricLabels, ShouldNotBeNil)
|
||||
So(len(metricLabels["instance_name"]), ShouldEqual, 3)
|
||||
So(metricLabels["instance_name"][0], ShouldEqual, "collector-asia-east-1")
|
||||
So(metricLabels["instance_name"][1], ShouldEqual, "collector-europe-west-1")
|
||||
So(metricLabels["instance_name"][2], ShouldEqual, "collector-us-east-1")
|
||||
|
||||
resourceLabels := res.Meta.Get("resourceLabels").Interface().(map[string][]string)
|
||||
So(resourceLabels, ShouldNotBeNil)
|
||||
So(len(resourceLabels["zone"]), ShouldEqual, 3)
|
||||
So(resourceLabels["zone"][0], ShouldEqual, "asia-east1-a")
|
||||
So(resourceLabels["zone"][1], ShouldEqual, "europe-west1-b")
|
||||
So(resourceLabels["zone"][2], ShouldEqual, "us-east1-b")
|
||||
|
||||
So(len(resourceLabels["project_id"]), ShouldEqual, 1)
|
||||
So(resourceLabels["project_id"][0], ShouldEqual, "grafana-prod")
|
||||
})
|
||||
})
|
||||
|
||||
Convey("when data from query with no aggregation and group bys", func() {
|
||||
data, err := loadTestFile("./test-data/2-series-response-no-agg.json")
|
||||
So(err, ShouldBeNil)
|
||||
So(len(data.TimeSeries), ShouldEqual, 3)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
query := &StackdriverQuery{GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}}
|
||||
err = executor.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
Convey("Should add instance name and zone labels to metric name", func() {
|
||||
So(len(res.Series), ShouldEqual, 3)
|
||||
So(res.Series[0].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1 asia-east1-a")
|
||||
So(res.Series[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1 europe-west1-b")
|
||||
So(res.Series[2].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1 us-east1-b")
|
||||
})
|
||||
})
|
||||
|
||||
Convey("when data from query with no aggregation and alias by", func() {
|
||||
data, err := loadTestFile("./test-data/2-series-response-no-agg.json")
|
||||
So(err, ShouldBeNil)
|
||||
So(len(data.TimeSeries), ShouldEqual, 3)
|
||||
|
||||
res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"}
|
||||
|
||||
Convey("and the alias pattern is for metric type, a metric label and a resource label", func() {
|
||||
|
||||
query := &StackdriverQuery{AliasBy: "{{metric.type}} - {{metric.label.instance_name}} - {{resource.label.zone}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}}
|
||||
err = executor.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
Convey("Should use alias by formatting and only show instance name", func() {
|
||||
So(len(res.Series), ShouldEqual, 3)
|
||||
So(res.Series[0].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-asia-east-1 - asia-east1-a")
|
||||
So(res.Series[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-europe-west-1 - europe-west1-b")
|
||||
So(res.Series[2].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-us-east-1 - us-east1-b")
|
||||
})
|
||||
})
|
||||
|
||||
Convey("and the alias pattern is for metric name", func() {
|
||||
|
||||
query := &StackdriverQuery{AliasBy: "metric {{metric.name}} service {{metric.service}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}}
|
||||
err = executor.parseResponse(res, data, query)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
Convey("Should use alias by formatting and only show instance name", func() {
|
||||
So(len(res.Series), ShouldEqual, 3)
|
||||
So(res.Series[0].Name, ShouldEqual, "metric instance/cpu/usage_time service compute")
|
||||
So(res.Series[1].Name, ShouldEqual, "metric instance/cpu/usage_time service compute")
|
||||
So(res.Series[2].Name, ShouldEqual, "metric instance/cpu/usage_time service compute")
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func loadTestFile(path string) (StackdriverResponse, error) {
|
||||
var data StackdriverResponse
|
||||
|
||||
jsonBody, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return data, err
|
||||
}
|
||||
err = json.Unmarshal(jsonBody, &data)
|
||||
return data, err
|
||||
}
|
@ -0,0 +1,46 @@
|
||||
{
|
||||
"timeSeries": [
|
||||
{
|
||||
"metric": {
|
||||
"type": "serviceruntime.googleapis.com\/api\/request_count"
|
||||
},
|
||||
"resource": {
|
||||
"type": "consumed_api",
|
||||
"labels": {
|
||||
"project_id": "grafana-prod"
|
||||
}
|
||||
},
|
||||
"metricKind": "GAUGE",
|
||||
"valueType": "DOUBLE",
|
||||
"points": [
|
||||
{
|
||||
"interval": {
|
||||
"startTime": "2018-09-11T12:51:00Z",
|
||||
"endTime": "2018-09-11T12:51:00Z"
|
||||
},
|
||||
"value": {
|
||||
"doubleValue": 1.0666666666667
|
||||
}
|
||||
},
|
||||
{
|
||||
"interval": {
|
||||
"startTime": "2018-09-11T12:48:00Z",
|
||||
"endTime": "2018-09-11T12:48:00Z"
|
||||
},
|
||||
"value": {
|
||||
"doubleValue": 1.05
|
||||
}
|
||||
},
|
||||
{
|
||||
"interval": {
|
||||
"startTime": "2018-09-11T12:47:00Z",
|
||||
"endTime": "2018-09-11T12:47:00Z"
|
||||
},
|
||||
"value": {
|
||||
"doubleValue": 0.05
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
145
pkg/tsdb/stackdriver/test-data/2-series-response-no-agg.json
Normal file
145
pkg/tsdb/stackdriver/test-data/2-series-response-no-agg.json
Normal file
@ -0,0 +1,145 @@
|
||||
{
|
||||
"timeSeries": [
|
||||
{
|
||||
"metric": {
|
||||
"labels": {
|
||||
"instance_name": "collector-asia-east-1"
|
||||
},
|
||||
"type": "compute.googleapis.com\/instance\/cpu\/usage_time"
|
||||
},
|
||||
"resource": {
|
||||
"type": "gce_instance",
|
||||
"labels": {
|
||||
"instance_id": "1119268429530133111",
|
||||
"zone": "asia-east1-a",
|
||||
"project_id": "grafana-prod"
|
||||
}
|
||||
},
|
||||
"metricKind": "DELTA",
|
||||
"valueType": "DOUBLE",
|
||||
"points": [
|
||||
{
|
||||
"interval": {
|
||||
"startTime": "2018-09-11T12:30:00Z",
|
||||
"endTime": "2018-09-11T12:31:00Z"
|
||||
},
|
||||
"value": {
|
||||
"doubleValue": 9.7730520330369
|
||||
}
|
||||
},
|
||||
{
|
||||
"interval": {
|
||||
"startTime": "2018-09-11T12:29:00Z",
|
||||
"endTime": "2018-09-11T12:30:00Z"
|
||||
},
|
||||
"value": {
|
||||
"doubleValue": 9.7323568146676
|
||||
}
|
||||
},
|
||||
{
|
||||
"interval": {
|
||||
"startTime": "2018-09-11T12:28:00Z",
|
||||
"endTime": "2018-09-11T12:29:00Z"
|
||||
},
|
||||
"value": {
|
||||
"doubleValue": 9.8566497180145
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"metric": {
|
||||
"labels": {
|
||||
"instance_name": "collector-europe-west-1"
|
||||
},
|
||||
"type": "compute.googleapis.com\/instance\/cpu\/usage_time"
|
||||
},
|
||||
"resource": {
|
||||
"type": "gce_instance",
|
||||
"labels": {
|
||||
"instance_id": "22241654114540837222",
|
||||
"zone": "europe-west1-b",
|
||||
"project_id": "grafana-prod"
|
||||
}
|
||||
},
|
||||
"metricKind": "DELTA",
|
||||
"valueType": "DOUBLE",
|
||||
"points": [
|
||||
{
|
||||
"interval": {
|
||||
"startTime": "2018-09-11T12:30:00Z",
|
||||
"endTime": "2018-09-11T12:31:00Z"
|
||||
},
|
||||
"value": {
|
||||
"doubleValue": 8.8210971239023
|
||||
}
|
||||
},
|
||||
{
|
||||
"interval": {
|
||||
"startTime": "2018-09-11T12:29:00Z",
|
||||
"endTime": "2018-09-11T12:30:00Z"
|
||||
},
|
||||
"value": {
|
||||
"doubleValue": 8.9689492364414
|
||||
}
|
||||
},
|
||||
{
|
||||
"interval": {
|
||||
"startTime": "2018-09-11T12:28:00Z",
|
||||
"endTime": "2018-09-11T12:29:00Z"
|
||||
},
|
||||
"value": {
|
||||
"doubleValue": 9.0238475054502
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"metric": {
|
||||
"labels": {
|
||||
"instance_name": "collector-us-east-1"
|
||||
},
|
||||
"type": "compute.googleapis.com\/instance\/cpu\/usage_time"
|
||||
},
|
||||
"resource": {
|
||||
"type": "gce_instance",
|
||||
"labels": {
|
||||
"instance_id": "3332264424035095333",
|
||||
"zone": "us-east1-b",
|
||||
"project_id": "grafana-prod"
|
||||
}
|
||||
},
|
||||
"metricKind": "DELTA",
|
||||
"valueType": "DOUBLE",
|
||||
"points": [
|
||||
{
|
||||
"interval": {
|
||||
"startTime": "2018-09-11T12:30:00Z",
|
||||
"endTime": "2018-09-11T12:31:00Z"
|
||||
},
|
||||
"value": {
|
||||
"doubleValue": 30.807846801355
|
||||
}
|
||||
},
|
||||
{
|
||||
"interval": {
|
||||
"startTime": "2018-09-11T12:29:00Z",
|
||||
"endTime": "2018-09-11T12:30:00Z"
|
||||
},
|
||||
"value": {
|
||||
"doubleValue": 30.903974115849
|
||||
}
|
||||
},
|
||||
{
|
||||
"interval": {
|
||||
"startTime": "2018-09-11T12:28:00Z",
|
||||
"endTime": "2018-09-11T12:29:00Z"
|
||||
},
|
||||
"value": {
|
||||
"doubleValue": 30.829426143318
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
43
pkg/tsdb/stackdriver/types.go
Normal file
43
pkg/tsdb/stackdriver/types.go
Normal file
@ -0,0 +1,43 @@
|
||||
package stackdriver
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"time"
|
||||
)
|
||||
|
||||
// StackdriverQuery is the query that Grafana sends from the frontend
|
||||
type StackdriverQuery struct {
|
||||
Target string
|
||||
Params url.Values
|
||||
RefID string
|
||||
GroupBys []string
|
||||
AliasBy string
|
||||
}
|
||||
|
||||
// StackdriverResponse is the data returned from the external Google Stackdriver API
|
||||
type StackdriverResponse struct {
|
||||
TimeSeries []struct {
|
||||
Metric struct {
|
||||
Labels map[string]string `json:"labels"`
|
||||
Type string `json:"type"`
|
||||
} `json:"metric"`
|
||||
Resource struct {
|
||||
Type string `json:"type"`
|
||||
Labels map[string]string `json:"labels"`
|
||||
} `json:"resource"`
|
||||
MetricKind string `json:"metricKind"`
|
||||
ValueType string `json:"valueType"`
|
||||
Points []struct {
|
||||
Interval struct {
|
||||
StartTime time.Time `json:"startTime"`
|
||||
EndTime time.Time `json:"endTime"`
|
||||
} `json:"interval"`
|
||||
Value struct {
|
||||
DoubleValue float64 `json:"doubleValue"`
|
||||
StringValue string `json:"stringValue"`
|
||||
BoolValue bool `json:"boolValue"`
|
||||
IntValue string `json:"int64Value"`
|
||||
} `json:"value"`
|
||||
} `json:"points"`
|
||||
} `json:"timeSeries"`
|
||||
}
|
@ -1,7 +1,8 @@
|
||||
import React, { Component } from 'react';
|
||||
import { UserPicker, User } from 'app/core/components/Picker/UserPicker';
|
||||
import { UserPicker } from 'app/core/components/Picker/UserPicker';
|
||||
import { TeamPicker, Team } from 'app/core/components/Picker/TeamPicker';
|
||||
import DescriptionPicker, { OptionWithDescription } from 'app/core/components/Picker/DescriptionPicker';
|
||||
import { User } from 'app/types';
|
||||
import {
|
||||
dashboardPermissionLevels,
|
||||
dashboardAclTargets,
|
||||
|
@ -3,6 +3,7 @@ import Select from 'react-select';
|
||||
import PickerOption from './PickerOption';
|
||||
import { debounce } from 'lodash';
|
||||
import { getBackendSrv } from 'app/core/services/backend_srv';
|
||||
import { User } from 'app/types';
|
||||
|
||||
export interface Props {
|
||||
onSelected: (user: User) => void;
|
||||
@ -14,13 +15,6 @@ export interface State {
|
||||
isLoading: boolean;
|
||||
}
|
||||
|
||||
export interface User {
|
||||
id: number;
|
||||
label: string;
|
||||
avatarUrl: string;
|
||||
login: string;
|
||||
}
|
||||
|
||||
export class UserPicker extends Component<Props, State> {
|
||||
debouncedSearch: any;
|
||||
|
||||
|
@ -245,6 +245,9 @@ export function grafanaAppDirective(playlistSrv, contextSrv, $timeout, $rootScop
|
||||
return;
|
||||
}
|
||||
|
||||
// ensure dropdown menu doesn't impact on z-index
|
||||
body.find('.dropdown-menu-open').removeClass('dropdown-menu-open');
|
||||
|
||||
// for stuff that animates, slides out etc, clicking it needs to
|
||||
// hide it right away
|
||||
const clickAutoHide = target.closest('[data-click-hide]');
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { Action } from 'app/core/actions/location';
|
||||
import { LocationState, UrlQueryMap } from 'app/types';
|
||||
import { toUrlParams } from 'app/core/utils/url';
|
||||
import { LocationState } from 'app/types';
|
||||
import { renderUrl } from 'app/core/utils/url';
|
||||
|
||||
export const initialState: LocationState = {
|
||||
url: '',
|
||||
@ -9,13 +9,6 @@ export const initialState: LocationState = {
|
||||
routeParams: {},
|
||||
};
|
||||
|
||||
function renderUrl(path: string, query: UrlQueryMap | undefined): string {
|
||||
if (query && Object.keys(query).length > 0) {
|
||||
path += '?' + toUrlParams(query);
|
||||
}
|
||||
return path;
|
||||
}
|
||||
|
||||
export const locationReducer = (state = initialState, action: Action): LocationState => {
|
||||
switch (action.type) {
|
||||
case 'UPDATE_LOCATION': {
|
||||
|
@ -4,7 +4,7 @@ import _ from 'lodash';
|
||||
import config from 'app/core/config';
|
||||
import coreModule from 'app/core/core_module';
|
||||
import appEvents from 'app/core/app_events';
|
||||
import { encodePathComponent } from 'app/core/utils/location_util';
|
||||
import { renderUrl } from 'app/core/utils/url';
|
||||
|
||||
import Mousetrap from 'mousetrap';
|
||||
import 'mousetrap-global-bind';
|
||||
@ -200,8 +200,8 @@ export class KeybindingSrv {
|
||||
...datasource.getExploreState(panel),
|
||||
range,
|
||||
};
|
||||
const exploreState = encodePathComponent(JSON.stringify(state));
|
||||
this.$location.url(`/explore?state=${exploreState}`);
|
||||
const exploreState = JSON.stringify(state);
|
||||
this.$location.url(renderUrl('/explore', { state: exploreState }));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -2,6 +2,15 @@
|
||||
* @preserve jquery-param (c) 2015 KNOWLEDGECODE | MIT
|
||||
*/
|
||||
|
||||
import { UrlQueryMap } from 'app/types';
|
||||
|
||||
export function renderUrl(path: string, query: UrlQueryMap | undefined): string {
|
||||
if (query && Object.keys(query).length > 0) {
|
||||
path += '?' + toUrlParams(query);
|
||||
}
|
||||
return path;
|
||||
}
|
||||
|
||||
export function toUrlParams(a) {
|
||||
const s = [];
|
||||
const rbracket = /\[\]$/;
|
||||
|
25
public/app/features/api-keys/ApiKeysAddedModal.test.tsx
Normal file
25
public/app/features/api-keys/ApiKeysAddedModal.test.tsx
Normal file
@ -0,0 +1,25 @@
|
||||
import React from 'react';
|
||||
import { shallow } from 'enzyme';
|
||||
import { ApiKeysAddedModal, Props } from './ApiKeysAddedModal';
|
||||
|
||||
const setup = (propOverrides?: object) => {
|
||||
const props: Props = {
|
||||
apiKey: 'api key test',
|
||||
rootPath: 'test/path',
|
||||
};
|
||||
|
||||
Object.assign(props, propOverrides);
|
||||
|
||||
const wrapper = shallow(<ApiKeysAddedModal {...props} />);
|
||||
|
||||
return {
|
||||
wrapper,
|
||||
};
|
||||
};
|
||||
|
||||
describe('Render', () => {
|
||||
it('should render component', () => {
|
||||
const { wrapper } = setup();
|
||||
expect(wrapper).toMatchSnapshot();
|
||||
});
|
||||
});
|
46
public/app/features/api-keys/ApiKeysAddedModal.tsx
Normal file
46
public/app/features/api-keys/ApiKeysAddedModal.tsx
Normal file
@ -0,0 +1,46 @@
|
||||
import React from 'react';
|
||||
|
||||
export interface Props {
|
||||
apiKey: string;
|
||||
rootPath: string;
|
||||
}
|
||||
|
||||
export const ApiKeysAddedModal = (props: Props) => {
|
||||
return (
|
||||
<div className="modal-body">
|
||||
<div className="modal-header">
|
||||
<h2 className="modal-header-title">
|
||||
<i className="fa fa-key" />
|
||||
<span className="p-l-1">API Key Created</span>
|
||||
</h2>
|
||||
|
||||
<a className="modal-header-close" ng-click="dismiss();">
|
||||
<i className="fa fa-remove" />
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<div className="modal-content">
|
||||
<div className="gf-form-group">
|
||||
<div className="gf-form">
|
||||
<span className="gf-form-label">Key</span>
|
||||
<span className="gf-form-label">{props.apiKey}</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="grafana-info-box" style={{ border: 0 }}>
|
||||
You will only be able to view this key here once! It is not stored in this form. So be sure to copy it now.
|
||||
<br />
|
||||
<br />
|
||||
You can authenticate request using the Authorization HTTP header, example:
|
||||
<br />
|
||||
<br />
|
||||
<pre className="small">
|
||||
curl -H "Authorization: Bearer {props.apiKey}" {props.rootPath}/api/dashboards/home
|
||||
</pre>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default ApiKeysAddedModal;
|
73
public/app/features/api-keys/ApiKeysPage.test.tsx
Normal file
73
public/app/features/api-keys/ApiKeysPage.test.tsx
Normal file
@ -0,0 +1,73 @@
|
||||
import React from 'react';
|
||||
import { shallow } from 'enzyme';
|
||||
import { Props, ApiKeysPage } from './ApiKeysPage';
|
||||
import { NavModel, ApiKey } from 'app/types';
|
||||
import { getMultipleMockKeys, getMockKey } from './__mocks__/apiKeysMock';
|
||||
|
||||
const setup = (propOverrides?: object) => {
|
||||
const props: Props = {
|
||||
navModel: {} as NavModel,
|
||||
apiKeys: [] as ApiKey[],
|
||||
searchQuery: '',
|
||||
loadApiKeys: jest.fn(),
|
||||
deleteApiKey: jest.fn(),
|
||||
setSearchQuery: jest.fn(),
|
||||
addApiKey: jest.fn(),
|
||||
};
|
||||
|
||||
Object.assign(props, propOverrides);
|
||||
|
||||
const wrapper = shallow(<ApiKeysPage {...props} />);
|
||||
const instance = wrapper.instance() as ApiKeysPage;
|
||||
|
||||
return {
|
||||
wrapper,
|
||||
instance,
|
||||
};
|
||||
};
|
||||
|
||||
describe('Render', () => {
|
||||
it('should render component', () => {
|
||||
const { wrapper } = setup();
|
||||
expect(wrapper).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should render API keys table', () => {
|
||||
const { wrapper } = setup({
|
||||
apiKeys: getMultipleMockKeys(5),
|
||||
});
|
||||
|
||||
expect(wrapper).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Life cycle', () => {
|
||||
it('should call loadApiKeys', () => {
|
||||
const { instance } = setup();
|
||||
|
||||
instance.componentDidMount();
|
||||
|
||||
expect(instance.props.loadApiKeys).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Functions', () => {
|
||||
describe('Delete team', () => {
|
||||
it('should call delete team', () => {
|
||||
const { instance } = setup();
|
||||
instance.onDeleteApiKey(getMockKey());
|
||||
expect(instance.props.deleteApiKey).toHaveBeenCalledWith(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('on search query change', () => {
|
||||
it('should call setSearchQuery', () => {
|
||||
const { instance } = setup();
|
||||
const mockEvent = { target: { value: 'test' } };
|
||||
|
||||
instance.onSearchQueryChange(mockEvent);
|
||||
|
||||
expect(instance.props.setSearchQuery).toHaveBeenCalledWith('test');
|
||||
});
|
||||
});
|
||||
});
|
222
public/app/features/api-keys/ApiKeysPage.tsx
Normal file
222
public/app/features/api-keys/ApiKeysPage.tsx
Normal file
@ -0,0 +1,222 @@
|
||||
import React, { PureComponent } from 'react';
|
||||
import ReactDOMServer from 'react-dom/server';
|
||||
import { connect } from 'react-redux';
|
||||
import { hot } from 'react-hot-loader';
|
||||
import { NavModel, ApiKey, NewApiKey, OrgRole } from 'app/types';
|
||||
import { getNavModel } from 'app/core/selectors/navModel';
|
||||
import { getApiKeys } from './state/selectors';
|
||||
import { loadApiKeys, deleteApiKey, setSearchQuery, addApiKey } from './state/actions';
|
||||
import PageHeader from 'app/core/components/PageHeader/PageHeader';
|
||||
import SlideDown from 'app/core/components/Animations/SlideDown';
|
||||
import ApiKeysAddedModal from './ApiKeysAddedModal';
|
||||
import config from 'app/core/config';
|
||||
import appEvents from 'app/core/app_events';
|
||||
|
||||
export interface Props {
|
||||
navModel: NavModel;
|
||||
apiKeys: ApiKey[];
|
||||
searchQuery: string;
|
||||
loadApiKeys: typeof loadApiKeys;
|
||||
deleteApiKey: typeof deleteApiKey;
|
||||
setSearchQuery: typeof setSearchQuery;
|
||||
addApiKey: typeof addApiKey;
|
||||
}
|
||||
|
||||
export interface State {
|
||||
isAdding: boolean;
|
||||
newApiKey: NewApiKey;
|
||||
}
|
||||
|
||||
enum ApiKeyStateProps {
|
||||
Name = 'name',
|
||||
Role = 'role',
|
||||
}
|
||||
|
||||
const initialApiKeyState = {
|
||||
name: '',
|
||||
role: OrgRole.Viewer,
|
||||
};
|
||||
|
||||
export class ApiKeysPage extends PureComponent<Props, any> {
|
||||
constructor(props) {
|
||||
super(props);
|
||||
this.state = { isAdding: false, newApiKey: initialApiKeyState };
|
||||
}
|
||||
|
||||
componentDidMount() {
|
||||
this.fetchApiKeys();
|
||||
}
|
||||
|
||||
async fetchApiKeys() {
|
||||
await this.props.loadApiKeys();
|
||||
}
|
||||
|
||||
onDeleteApiKey(key: ApiKey) {
|
||||
this.props.deleteApiKey(key.id);
|
||||
}
|
||||
|
||||
onSearchQueryChange = evt => {
|
||||
this.props.setSearchQuery(evt.target.value);
|
||||
};
|
||||
|
||||
onToggleAdding = () => {
|
||||
this.setState({ isAdding: !this.state.isAdding });
|
||||
};
|
||||
|
||||
onAddApiKey = async evt => {
|
||||
evt.preventDefault();
|
||||
|
||||
const openModal = (apiKey: string) => {
|
||||
const rootPath = window.location.origin + config.appSubUrl;
|
||||
const modalTemplate = ReactDOMServer.renderToString(<ApiKeysAddedModal apiKey={apiKey} rootPath={rootPath} />);
|
||||
|
||||
appEvents.emit('show-modal', {
|
||||
templateHtml: modalTemplate,
|
||||
});
|
||||
};
|
||||
|
||||
this.props.addApiKey(this.state.newApiKey, openModal);
|
||||
this.setState((prevState: State) => {
|
||||
return {
|
||||
...prevState,
|
||||
newApiKey: initialApiKeyState,
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
onApiKeyStateUpdate = (evt, prop: string) => {
|
||||
const value = evt.currentTarget.value;
|
||||
this.setState((prevState: State) => {
|
||||
const newApiKey = {
|
||||
...prevState.newApiKey,
|
||||
};
|
||||
newApiKey[prop] = value;
|
||||
|
||||
return {
|
||||
...prevState,
|
||||
newApiKey: newApiKey,
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
render() {
|
||||
const { newApiKey, isAdding } = this.state;
|
||||
const { navModel, apiKeys, searchQuery } = this.props;
|
||||
|
||||
return (
|
||||
<div>
|
||||
<PageHeader model={navModel} />
|
||||
<div className="page-container page-body">
|
||||
<div className="page-action-bar">
|
||||
<div className="gf-form gf-form--grow">
|
||||
<label className="gf-form--has-input-icon gf-form--grow">
|
||||
<input
|
||||
type="text"
|
||||
className="gf-form-input"
|
||||
placeholder="Search keys"
|
||||
value={searchQuery}
|
||||
onChange={this.onSearchQueryChange}
|
||||
/>
|
||||
<i className="gf-form-input-icon fa fa-search" />
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<div className="page-action-bar__spacer" />
|
||||
<button className="btn btn-success pull-right" onClick={this.onToggleAdding} disabled={isAdding}>
|
||||
<i className="fa fa-plus" /> Add API Key
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<SlideDown in={isAdding}>
|
||||
<div className="cta-form">
|
||||
<button className="cta-form__close btn btn-transparent" onClick={this.onToggleAdding}>
|
||||
<i className="fa fa-close" />
|
||||
</button>
|
||||
<h5>Add API Key</h5>
|
||||
<form className="gf-form-group" onSubmit={this.onAddApiKey}>
|
||||
<div className="gf-form-inline">
|
||||
<div className="gf-form max-width-21">
|
||||
<span className="gf-form-label">Key name</span>
|
||||
<input
|
||||
type="text"
|
||||
className="gf-form-input"
|
||||
value={newApiKey.name}
|
||||
placeholder="Name"
|
||||
onChange={evt => this.onApiKeyStateUpdate(evt, ApiKeyStateProps.Name)}
|
||||
/>
|
||||
</div>
|
||||
<div className="gf-form">
|
||||
<span className="gf-form-label">Role</span>
|
||||
<span className="gf-form-select-wrapper">
|
||||
<select
|
||||
className="gf-form-input gf-size-auto"
|
||||
value={newApiKey.role}
|
||||
onChange={evt => this.onApiKeyStateUpdate(evt, ApiKeyStateProps.Role)}
|
||||
>
|
||||
{Object.keys(OrgRole).map(role => {
|
||||
return (
|
||||
<option key={role} label={role} value={role}>
|
||||
{role}
|
||||
</option>
|
||||
);
|
||||
})}
|
||||
</select>
|
||||
</span>
|
||||
</div>
|
||||
<div className="gf-form">
|
||||
<button className="btn gf-form-btn btn-success">Add</button>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</SlideDown>
|
||||
|
||||
<h3 className="page-heading">Existing Keys</h3>
|
||||
<table className="filter-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Role</th>
|
||||
<th style={{ width: '34px' }} />
|
||||
</tr>
|
||||
</thead>
|
||||
{apiKeys.length > 0 ? (
|
||||
<tbody>
|
||||
{apiKeys.map(key => {
|
||||
return (
|
||||
<tr key={key.id}>
|
||||
<td>{key.name}</td>
|
||||
<td>{key.role}</td>
|
||||
<td>
|
||||
<a onClick={() => this.onDeleteApiKey(key)} className="btn btn-danger btn-mini">
|
||||
<i className="fa fa-remove" />
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
})}
|
||||
</tbody>
|
||||
) : null}
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function mapStateToProps(state) {
|
||||
return {
|
||||
navModel: getNavModel(state.navIndex, 'apikeys'),
|
||||
apiKeys: getApiKeys(state.apiKeys),
|
||||
searchQuery: state.apiKeys.searchQuery,
|
||||
};
|
||||
}
|
||||
|
||||
const mapDispatchToProps = {
|
||||
loadApiKeys,
|
||||
deleteApiKey,
|
||||
setSearchQuery,
|
||||
addApiKey,
|
||||
};
|
||||
|
||||
export default hot(module)(connect(mapStateToProps, mapDispatchToProps)(ApiKeysPage));
|
22
public/app/features/api-keys/__mocks__/apiKeysMock.ts
Normal file
22
public/app/features/api-keys/__mocks__/apiKeysMock.ts
Normal file
@ -0,0 +1,22 @@
|
||||
import { ApiKey, OrgRole } from 'app/types';
|
||||
|
||||
export const getMultipleMockKeys = (numberOfKeys: number): ApiKey[] => {
|
||||
const keys: ApiKey[] = [];
|
||||
for (let i = 1; i <= numberOfKeys; i++) {
|
||||
keys.push({
|
||||
id: i,
|
||||
name: `test-${i}`,
|
||||
role: OrgRole.Viewer,
|
||||
});
|
||||
}
|
||||
|
||||
return keys;
|
||||
};
|
||||
|
||||
export const getMockKey = (): ApiKey => {
|
||||
return {
|
||||
id: 1,
|
||||
name: 'test',
|
||||
role: OrgRole.Admin,
|
||||
};
|
||||
};
|
@ -0,0 +1,78 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`Render should render component 1`] = `
|
||||
<div
|
||||
className="modal-body"
|
||||
>
|
||||
<div
|
||||
className="modal-header"
|
||||
>
|
||||
<h2
|
||||
className="modal-header-title"
|
||||
>
|
||||
<i
|
||||
className="fa fa-key"
|
||||
/>
|
||||
<span
|
||||
className="p-l-1"
|
||||
>
|
||||
API Key Created
|
||||
</span>
|
||||
</h2>
|
||||
<a
|
||||
className="modal-header-close"
|
||||
ng-click="dismiss();"
|
||||
>
|
||||
<i
|
||||
className="fa fa-remove"
|
||||
/>
|
||||
</a>
|
||||
</div>
|
||||
<div
|
||||
className="modal-content"
|
||||
>
|
||||
<div
|
||||
className="gf-form-group"
|
||||
>
|
||||
<div
|
||||
className="gf-form"
|
||||
>
|
||||
<span
|
||||
className="gf-form-label"
|
||||
>
|
||||
Key
|
||||
</span>
|
||||
<span
|
||||
className="gf-form-label"
|
||||
>
|
||||
api key test
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
className="grafana-info-box"
|
||||
style={
|
||||
Object {
|
||||
"border": 0,
|
||||
}
|
||||
}
|
||||
>
|
||||
You will only be able to view this key here once! It is not stored in this form. So be sure to copy it now.
|
||||
<br />
|
||||
<br />
|
||||
You can authenticate request using the Authorization HTTP header, example:
|
||||
<br />
|
||||
<br />
|
||||
<pre
|
||||
className="small"
|
||||
>
|
||||
curl -H "Authorization: Bearer
|
||||
api key test
|
||||
"
|
||||
test/path
|
||||
/api/dashboards/home
|
||||
</pre>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
@ -0,0 +1,435 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`Render should render API keys table 1`] = `
|
||||
<div>
|
||||
<PageHeader
|
||||
model={Object {}}
|
||||
/>
|
||||
<div
|
||||
className="page-container page-body"
|
||||
>
|
||||
<div
|
||||
className="page-action-bar"
|
||||
>
|
||||
<div
|
||||
className="gf-form gf-form--grow"
|
||||
>
|
||||
<label
|
||||
className="gf-form--has-input-icon gf-form--grow"
|
||||
>
|
||||
<input
|
||||
className="gf-form-input"
|
||||
onChange={[Function]}
|
||||
placeholder="Search keys"
|
||||
type="text"
|
||||
value=""
|
||||
/>
|
||||
<i
|
||||
className="gf-form-input-icon fa fa-search"
|
||||
/>
|
||||
</label>
|
||||
</div>
|
||||
<div
|
||||
className="page-action-bar__spacer"
|
||||
/>
|
||||
<button
|
||||
className="btn btn-success pull-right"
|
||||
disabled={false}
|
||||
onClick={[Function]}
|
||||
>
|
||||
<i
|
||||
className="fa fa-plus"
|
||||
/>
|
||||
Add API Key
|
||||
</button>
|
||||
</div>
|
||||
<Component
|
||||
in={false}
|
||||
>
|
||||
<div
|
||||
className="cta-form"
|
||||
>
|
||||
<button
|
||||
className="cta-form__close btn btn-transparent"
|
||||
onClick={[Function]}
|
||||
>
|
||||
<i
|
||||
className="fa fa-close"
|
||||
/>
|
||||
</button>
|
||||
<h5>
|
||||
Add API Key
|
||||
</h5>
|
||||
<form
|
||||
className="gf-form-group"
|
||||
onSubmit={[Function]}
|
||||
>
|
||||
<div
|
||||
className="gf-form-inline"
|
||||
>
|
||||
<div
|
||||
className="gf-form max-width-21"
|
||||
>
|
||||
<span
|
||||
className="gf-form-label"
|
||||
>
|
||||
Key name
|
||||
</span>
|
||||
<input
|
||||
className="gf-form-input"
|
||||
onChange={[Function]}
|
||||
placeholder="Name"
|
||||
type="text"
|
||||
value=""
|
||||
/>
|
||||
</div>
|
||||
<div
|
||||
className="gf-form"
|
||||
>
|
||||
<span
|
||||
className="gf-form-label"
|
||||
>
|
||||
Role
|
||||
</span>
|
||||
<span
|
||||
className="gf-form-select-wrapper"
|
||||
>
|
||||
<select
|
||||
className="gf-form-input gf-size-auto"
|
||||
onChange={[Function]}
|
||||
value="Viewer"
|
||||
>
|
||||
<option
|
||||
key="Viewer"
|
||||
label="Viewer"
|
||||
value="Viewer"
|
||||
>
|
||||
Viewer
|
||||
</option>
|
||||
<option
|
||||
key="Editor"
|
||||
label="Editor"
|
||||
value="Editor"
|
||||
>
|
||||
Editor
|
||||
</option>
|
||||
<option
|
||||
key="Admin"
|
||||
label="Admin"
|
||||
value="Admin"
|
||||
>
|
||||
Admin
|
||||
</option>
|
||||
</select>
|
||||
</span>
|
||||
</div>
|
||||
<div
|
||||
className="gf-form"
|
||||
>
|
||||
<button
|
||||
className="btn gf-form-btn btn-success"
|
||||
>
|
||||
Add
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</Component>
|
||||
<h3
|
||||
className="page-heading"
|
||||
>
|
||||
Existing Keys
|
||||
</h3>
|
||||
<table
|
||||
className="filter-table"
|
||||
>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>
|
||||
Name
|
||||
</th>
|
||||
<th>
|
||||
Role
|
||||
</th>
|
||||
<th
|
||||
style={
|
||||
Object {
|
||||
"width": "34px",
|
||||
}
|
||||
}
|
||||
/>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr
|
||||
key="1"
|
||||
>
|
||||
<td>
|
||||
test-1
|
||||
</td>
|
||||
<td>
|
||||
Viewer
|
||||
</td>
|
||||
<td>
|
||||
<a
|
||||
className="btn btn-danger btn-mini"
|
||||
onClick={[Function]}
|
||||
>
|
||||
<i
|
||||
className="fa fa-remove"
|
||||
/>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr
|
||||
key="2"
|
||||
>
|
||||
<td>
|
||||
test-2
|
||||
</td>
|
||||
<td>
|
||||
Viewer
|
||||
</td>
|
||||
<td>
|
||||
<a
|
||||
className="btn btn-danger btn-mini"
|
||||
onClick={[Function]}
|
||||
>
|
||||
<i
|
||||
className="fa fa-remove"
|
||||
/>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr
|
||||
key="3"
|
||||
>
|
||||
<td>
|
||||
test-3
|
||||
</td>
|
||||
<td>
|
||||
Viewer
|
||||
</td>
|
||||
<td>
|
||||
<a
|
||||
className="btn btn-danger btn-mini"
|
||||
onClick={[Function]}
|
||||
>
|
||||
<i
|
||||
className="fa fa-remove"
|
||||
/>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr
|
||||
key="4"
|
||||
>
|
||||
<td>
|
||||
test-4
|
||||
</td>
|
||||
<td>
|
||||
Viewer
|
||||
</td>
|
||||
<td>
|
||||
<a
|
||||
className="btn btn-danger btn-mini"
|
||||
onClick={[Function]}
|
||||
>
|
||||
<i
|
||||
className="fa fa-remove"
|
||||
/>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr
|
||||
key="5"
|
||||
>
|
||||
<td>
|
||||
test-5
|
||||
</td>
|
||||
<td>
|
||||
Viewer
|
||||
</td>
|
||||
<td>
|
||||
<a
|
||||
className="btn btn-danger btn-mini"
|
||||
onClick={[Function]}
|
||||
>
|
||||
<i
|
||||
className="fa fa-remove"
|
||||
/>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
|
||||
exports[`Render should render component 1`] = `
|
||||
<div>
|
||||
<PageHeader
|
||||
model={Object {}}
|
||||
/>
|
||||
<div
|
||||
className="page-container page-body"
|
||||
>
|
||||
<div
|
||||
className="page-action-bar"
|
||||
>
|
||||
<div
|
||||
className="gf-form gf-form--grow"
|
||||
>
|
||||
<label
|
||||
className="gf-form--has-input-icon gf-form--grow"
|
||||
>
|
||||
<input
|
||||
className="gf-form-input"
|
||||
onChange={[Function]}
|
||||
placeholder="Search keys"
|
||||
type="text"
|
||||
value=""
|
||||
/>
|
||||
<i
|
||||
className="gf-form-input-icon fa fa-search"
|
||||
/>
|
||||
</label>
|
||||
</div>
|
||||
<div
|
||||
className="page-action-bar__spacer"
|
||||
/>
|
||||
<button
|
||||
className="btn btn-success pull-right"
|
||||
disabled={false}
|
||||
onClick={[Function]}
|
||||
>
|
||||
<i
|
||||
className="fa fa-plus"
|
||||
/>
|
||||
Add API Key
|
||||
</button>
|
||||
</div>
|
||||
<Component
|
||||
in={false}
|
||||
>
|
||||
<div
|
||||
className="cta-form"
|
||||
>
|
||||
<button
|
||||
className="cta-form__close btn btn-transparent"
|
||||
onClick={[Function]}
|
||||
>
|
||||
<i
|
||||
className="fa fa-close"
|
||||
/>
|
||||
</button>
|
||||
<h5>
|
||||
Add API Key
|
||||
</h5>
|
||||
<form
|
||||
className="gf-form-group"
|
||||
onSubmit={[Function]}
|
||||
>
|
||||
<div
|
||||
className="gf-form-inline"
|
||||
>
|
||||
<div
|
||||
className="gf-form max-width-21"
|
||||
>
|
||||
<span
|
||||
className="gf-form-label"
|
||||
>
|
||||
Key name
|
||||
</span>
|
||||
<input
|
||||
className="gf-form-input"
|
||||
onChange={[Function]}
|
||||
placeholder="Name"
|
||||
type="text"
|
||||
value=""
|
||||
/>
|
||||
</div>
|
||||
<div
|
||||
className="gf-form"
|
||||
>
|
||||
<span
|
||||
className="gf-form-label"
|
||||
>
|
||||
Role
|
||||
</span>
|
||||
<span
|
||||
className="gf-form-select-wrapper"
|
||||
>
|
||||
<select
|
||||
className="gf-form-input gf-size-auto"
|
||||
onChange={[Function]}
|
||||
value="Viewer"
|
||||
>
|
||||
<option
|
||||
key="Viewer"
|
||||
label="Viewer"
|
||||
value="Viewer"
|
||||
>
|
||||
Viewer
|
||||
</option>
|
||||
<option
|
||||
key="Editor"
|
||||
label="Editor"
|
||||
value="Editor"
|
||||
>
|
||||
Editor
|
||||
</option>
|
||||
<option
|
||||
key="Admin"
|
||||
label="Admin"
|
||||
value="Admin"
|
||||
>
|
||||
Admin
|
||||
</option>
|
||||
</select>
|
||||
</span>
|
||||
</div>
|
||||
<div
|
||||
className="gf-form"
|
||||
>
|
||||
<button
|
||||
className="btn gf-form-btn btn-success"
|
||||
>
|
||||
Add
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</Component>
|
||||
<h3
|
||||
className="page-heading"
|
||||
>
|
||||
Existing Keys
|
||||
</h3>
|
||||
<table
|
||||
className="filter-table"
|
||||
>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>
|
||||
Name
|
||||
</th>
|
||||
<th>
|
||||
Role
|
||||
</th>
|
||||
<th
|
||||
style={
|
||||
Object {
|
||||
"width": "34px",
|
||||
}
|
||||
}
|
||||
/>
|
||||
</tr>
|
||||
</thead>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
56
public/app/features/api-keys/state/actions.ts
Normal file
56
public/app/features/api-keys/state/actions.ts
Normal file
@ -0,0 +1,56 @@
|
||||
import { ThunkAction } from 'redux-thunk';
|
||||
import { getBackendSrv } from 'app/core/services/backend_srv';
|
||||
import { StoreState, ApiKey } from 'app/types';
|
||||
|
||||
export enum ActionTypes {
|
||||
LoadApiKeys = 'LOAD_API_KEYS',
|
||||
SetApiKeysSearchQuery = 'SET_API_KEYS_SEARCH_QUERY',
|
||||
}
|
||||
|
||||
export interface LoadApiKeysAction {
|
||||
type: ActionTypes.LoadApiKeys;
|
||||
payload: ApiKey[];
|
||||
}
|
||||
|
||||
export interface SetSearchQueryAction {
|
||||
type: ActionTypes.SetApiKeysSearchQuery;
|
||||
payload: string;
|
||||
}
|
||||
|
||||
export type Action = LoadApiKeysAction | SetSearchQueryAction;
|
||||
|
||||
type ThunkResult<R> = ThunkAction<R, StoreState, undefined, Action>;
|
||||
|
||||
const apiKeysLoaded = (apiKeys: ApiKey[]): LoadApiKeysAction => ({
|
||||
type: ActionTypes.LoadApiKeys,
|
||||
payload: apiKeys,
|
||||
});
|
||||
|
||||
export function addApiKey(apiKey: ApiKey, openModal: (key: string) => void): ThunkResult<void> {
|
||||
return async dispatch => {
|
||||
const result = await getBackendSrv().post('/api/auth/keys', apiKey);
|
||||
dispatch(setSearchQuery(''));
|
||||
dispatch(loadApiKeys());
|
||||
openModal(result.key);
|
||||
};
|
||||
}
|
||||
|
||||
export function loadApiKeys(): ThunkResult<void> {
|
||||
return async dispatch => {
|
||||
const response = await getBackendSrv().get('/api/auth/keys');
|
||||
dispatch(apiKeysLoaded(response));
|
||||
};
|
||||
}
|
||||
|
||||
export function deleteApiKey(id: number): ThunkResult<void> {
|
||||
return async dispatch => {
|
||||
getBackendSrv()
|
||||
.delete('/api/auth/keys/' + id)
|
||||
.then(dispatch(loadApiKeys()));
|
||||
};
|
||||
}
|
||||
|
||||
export const setSearchQuery = (searchQuery: string): SetSearchQueryAction => ({
|
||||
type: ActionTypes.SetApiKeysSearchQuery,
|
||||
payload: searchQuery,
|
||||
});
|
31
public/app/features/api-keys/state/reducers.test.ts
Normal file
31
public/app/features/api-keys/state/reducers.test.ts
Normal file
@ -0,0 +1,31 @@
|
||||
import { Action, ActionTypes } from './actions';
|
||||
import { initialApiKeysState, apiKeysReducer } from './reducers';
|
||||
import { getMultipleMockKeys } from '../__mocks__/apiKeysMock';
|
||||
|
||||
describe('API Keys reducer', () => {
|
||||
it('should set keys', () => {
|
||||
const payload = getMultipleMockKeys(4);
|
||||
|
||||
const action: Action = {
|
||||
type: ActionTypes.LoadApiKeys,
|
||||
payload,
|
||||
};
|
||||
|
||||
const result = apiKeysReducer(initialApiKeysState, action);
|
||||
|
||||
expect(result.keys).toEqual(payload);
|
||||
});
|
||||
|
||||
it('should set search query', () => {
|
||||
const payload = 'test query';
|
||||
|
||||
const action: Action = {
|
||||
type: ActionTypes.SetApiKeysSearchQuery,
|
||||
payload,
|
||||
};
|
||||
|
||||
const result = apiKeysReducer(initialApiKeysState, action);
|
||||
|
||||
expect(result.searchQuery).toEqual('test query');
|
||||
});
|
||||
});
|
21
public/app/features/api-keys/state/reducers.ts
Normal file
21
public/app/features/api-keys/state/reducers.ts
Normal file
@ -0,0 +1,21 @@
|
||||
import { ApiKeysState } from 'app/types';
|
||||
import { Action, ActionTypes } from './actions';
|
||||
|
||||
export const initialApiKeysState: ApiKeysState = {
|
||||
keys: [],
|
||||
searchQuery: '',
|
||||
};
|
||||
|
||||
export const apiKeysReducer = (state = initialApiKeysState, action: Action): ApiKeysState => {
|
||||
switch (action.type) {
|
||||
case ActionTypes.LoadApiKeys:
|
||||
return { ...state, keys: action.payload };
|
||||
case ActionTypes.SetApiKeysSearchQuery:
|
||||
return { ...state, searchQuery: action.payload };
|
||||
}
|
||||
return state;
|
||||
};
|
||||
|
||||
export default {
|
||||
apiKeys: apiKeysReducer,
|
||||
};
|
25
public/app/features/api-keys/state/selectors.test.ts
Normal file
25
public/app/features/api-keys/state/selectors.test.ts
Normal file
@ -0,0 +1,25 @@
|
||||
import { getApiKeys } from './selectors';
|
||||
import { getMultipleMockKeys } from '../__mocks__/apiKeysMock';
|
||||
import { ApiKeysState } from 'app/types';
|
||||
|
||||
describe('API Keys selectors', () => {
|
||||
describe('Get API Keys', () => {
|
||||
const mockKeys = getMultipleMockKeys(5);
|
||||
|
||||
it('should return all keys if no search query', () => {
|
||||
const mockState: ApiKeysState = { keys: mockKeys, searchQuery: '' };
|
||||
|
||||
const keys = getApiKeys(mockState);
|
||||
|
||||
expect(keys).toEqual(mockKeys);
|
||||
});
|
||||
|
||||
it('should filter keys if search query exists', () => {
|
||||
const mockState: ApiKeysState = { keys: mockKeys, searchQuery: '5' };
|
||||
|
||||
const keys = getApiKeys(mockState);
|
||||
|
||||
expect(keys.length).toEqual(1);
|
||||
});
|
||||
});
|
||||
});
|
9
public/app/features/api-keys/state/selectors.ts
Normal file
9
public/app/features/api-keys/state/selectors.ts
Normal file
@ -0,0 +1,9 @@
|
||||
import { ApiKeysState } from 'app/types';
|
||||
|
||||
export const getApiKeys = (state: ApiKeysState) => {
|
||||
const regex = RegExp(state.searchQuery, 'i');
|
||||
|
||||
return state.keys.filter(key => {
|
||||
return regex.test(key.name) || regex.test(key.role);
|
||||
});
|
||||
};
|
@ -29,6 +29,7 @@ describe('timeSrv', () => {
|
||||
beforeEach(() => {
|
||||
timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() });
|
||||
timeSrv.init(_dashboard);
|
||||
_dashboard.refresh = false;
|
||||
});
|
||||
|
||||
describe('timeRange', () => {
|
||||
@ -79,6 +80,23 @@ describe('timeSrv', () => {
|
||||
expect(time.to.valueOf()).toEqual(new Date('2014-05-20T03:10:22Z').getTime());
|
||||
});
|
||||
|
||||
it('should ignore refresh if time absolute', () => {
|
||||
location = {
|
||||
search: jest.fn(() => ({
|
||||
from: '20140410T052010',
|
||||
to: '20140520T031022',
|
||||
})),
|
||||
};
|
||||
|
||||
timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() });
|
||||
|
||||
// dashboard saved with refresh on
|
||||
_dashboard.refresh = true;
|
||||
timeSrv.init(_dashboard);
|
||||
|
||||
expect(timeSrv.refresh).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle formatted dates without time', () => {
|
||||
location = {
|
||||
search: jest.fn(() => ({
|
||||
|
@ -4,7 +4,8 @@
|
||||
<label class="gf-form-label template-variable" ng-hide="variable.hide === 1">
|
||||
{{variable.label || variable.name}}
|
||||
</label>
|
||||
<value-select-dropdown ng-if="variable.type !== 'adhoc'" variable="variable" on-updated="ctrl.variableUpdated(variable)"></value-select-dropdown>
|
||||
<value-select-dropdown ng-if="variable.type !== 'adhoc' && variable.type !== 'textbox'" variable="variable" on-updated="ctrl.variableUpdated(variable)"></value-select-dropdown>
|
||||
<input type="text" ng-if="variable.type === 'textbox'" ng-model="variable.query" class="gf-form-input width-12" ng-blur="variable.current.value != variable.query && variable.updateOptions() && ctrl.variableUpdated(variable);" ng-keydown="$event.keyCode === 13 && variable.current.value != variable.query && variable.updateOptions() && ctrl.variableUpdated(variable);" ></input>
|
||||
</div>
|
||||
<ad-hoc-filters ng-if="variable.type === 'adhoc'" variable="variable"></ad-hoc-filters>
|
||||
</div>
|
||||
|
@ -85,6 +85,12 @@ export class TimeSrv {
|
||||
if (params.to) {
|
||||
this.time.to = this.parseUrlParam(params.to) || this.time.to;
|
||||
}
|
||||
// if absolute ignore refresh option saved to dashboard
|
||||
if (params.to && params.to.indexOf('now') === -1) {
|
||||
this.refresh = false;
|
||||
this.dashboard.refresh = false;
|
||||
}
|
||||
// but if refresh explicitly set then use that
|
||||
if (params.refresh) {
|
||||
this.refresh = params.refresh || this.refresh;
|
||||
}
|
||||
@ -107,7 +113,7 @@ export class TimeSrv {
|
||||
}
|
||||
|
||||
private timeHasChangedSinceLoad() {
|
||||
return this.timeAtLoad.from !== this.time.from || this.timeAtLoad.to !== this.time.to;
|
||||
return this.timeAtLoad && (this.timeAtLoad.from !== this.time.from || this.timeAtLoad.to !== this.time.to);
|
||||
}
|
||||
|
||||
setAutoRefresh(interval) {
|
||||
|
@ -1,10 +1,12 @@
|
||||
import coreModule from 'app/core/core_module';
|
||||
import appEvents from 'app/core/app_events';
|
||||
import angular from 'angular';
|
||||
|
||||
const template = `
|
||||
<input type="file" id="dashupload" name="dashupload" class="hide"/>
|
||||
<input type="file" id="dashupload" name="dashupload" class="hide" onchange="angular.element(this).scope().file_selected"/>
|
||||
<label class="btn btn-success" for="dashupload">
|
||||
<i class="fa fa-upload"></i>
|
||||
Upload .json File
|
||||
{{btnText}}
|
||||
</label>
|
||||
`;
|
||||
|
||||
@ -15,8 +17,11 @@ function uploadDashboardDirective(timer, alertSrv, $location) {
|
||||
template: template,
|
||||
scope: {
|
||||
onUpload: '&',
|
||||
btnText: '@?',
|
||||
},
|
||||
link: scope => {
|
||||
link: (scope, elem) => {
|
||||
scope.btnText = angular.isDefined(scope.btnText) ? scope.btnText : 'Upload .json File';
|
||||
|
||||
function file_selected(evt) {
|
||||
const files = evt.target.files; // FileList object
|
||||
const readerOnload = () => {
|
||||
@ -26,7 +31,7 @@ function uploadDashboardDirective(timer, alertSrv, $location) {
|
||||
dash = JSON.parse(e.target.result);
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
scope.appEvent('alert-error', ['Import failed', 'JSON -> JS Serialization failed: ' + err.message]);
|
||||
appEvents.emit('alert-error', ['Import failed', 'JSON -> JS Serialization failed: ' + err.message]);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -52,7 +57,7 @@ function uploadDashboardDirective(timer, alertSrv, $location) {
|
||||
// Check for the various File API support.
|
||||
if (wnd.File && wnd.FileReader && wnd.FileList && wnd.Blob) {
|
||||
// Something
|
||||
document.getElementById('dashupload').addEventListener('change', file_selected, false);
|
||||
elem[0].addEventListener('change', file_selected, false);
|
||||
} else {
|
||||
alertSrv.set('Oops', 'Sorry, the HTML5 File APIs are not fully supported in this browser.', 'error');
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user