mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Merge branch 'master' into react-panels
This commit is contained in:
commit
5aeded8cf3
@ -8,7 +8,6 @@ charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
max_line_length = 120
|
||||
insert_final_newline = true
|
||||
|
||||
[*.go]
|
||||
indent_style = tab
|
||||
|
5
.gitignore
vendored
5
.gitignore
vendored
@ -10,8 +10,8 @@ awsconfig
|
||||
/public_gen
|
||||
/public/vendor/npm
|
||||
/tmp
|
||||
vendor/phantomjs/phantomjs
|
||||
vendor/phantomjs/phantomjs.exe
|
||||
tools/phantomjs/phantomjs
|
||||
tools/phantomjs/phantomjs.exe
|
||||
profile.out
|
||||
coverage.txt
|
||||
|
||||
@ -60,3 +60,4 @@ debug.test
|
||||
/vendor/**/*_test.go
|
||||
/vendor/**/.editorconfig
|
||||
/vendor/**/appengine*
|
||||
*.orig
|
35
CHANGELOG.md
35
CHANGELOG.md
@ -2,7 +2,7 @@
|
||||
|
||||
Grafana v5.0 is going to be the biggest and most foundational release Grafana has ever had, coming with a ton of UX improvements, a new dashboard grid engine, dashboard folders, user teams and permissions. Checkout out this [video preview](https://www.youtube.com/watch?v=BC_YRNpqj5k) of Grafana v5.
|
||||
|
||||
### New Features
|
||||
### New Major Features
|
||||
- **Dashboards** Dashboard folders, [#1611](https://github.com/grafana/grafana/issues/1611)
|
||||
- **Teams** User groups (teams) implemented. Can be used in folder & dashboard permission list.
|
||||
- **Dashboard grid**: Panels are now layed out in a two dimensional grid (with x, y, w, h). [#9093](https://github.com/grafana/grafana/issues/9093).
|
||||
@ -10,24 +10,22 @@ Grafana v5.0 is going to be the biggest and most foundational release Grafana ha
|
||||
- **UX**: Major update to page header and navigation
|
||||
- **Dashboard settings**: Combine dashboard settings views into one with side menu, [#9750](https://github.com/grafana/grafana/issues/9750)
|
||||
|
||||
## New Dashboard Grid
|
||||
|
||||
The new grid engine is major upgrade for how you can position and move panels. It enables new layouts and a much easier dashboard building experience. The change is backwards compatible. Grafana will automatically upgrade your dashboards to the new schema and position panels to match your existing layout. There might be minor differences in panel height.
|
||||
|
||||
Dashboard panels and rows are positioned using a gridPos object `{x: 0, y: 0, w: 24, h: 5}`. Units are in grid dimensions (24 columns, 1 height unit 30px). Rows and Panels objects exist (together) in a flat array directly on the dashboard root object. Rows are not needed for layouts anymore and are mainly there for backward compatibility. Some panel plugins that do not respect their panel height might require an update.
|
||||
|
||||
# 4.7.0 (unreleased / v4.7.x branch)
|
||||
|
||||
## Breaking changes
|
||||
|
||||
`[dashboard.json]` have been replaced with [dashboard provisioning](http://docs.grafana.org/administration/provisioning/).
|
||||
|
||||
* **[dashboard.json]** have been replaced with [dashboard provisioning](http://docs.grafana.org/administration/provisioning/).
|
||||
Config files for provisioning datasources as configuration have changed from `/conf/datasources` to `/conf/provisioning/datasources`.
|
||||
From `/etc/grafana/datasources` to `/etc/grafana/provisioning/datasources` when installed with deb/rpm packages.
|
||||
|
||||
The pagerduty notifier now defaults to not auto resolve incidents. More details at [#10222](https://github.com/grafana/grafana/issues/10222)
|
||||
* **Pagerduty** The notifier now defaults to not auto resolve incidents. More details at [#10222](https://github.com/grafana/grafana/issues/10222)
|
||||
|
||||
## New Dashboard Grid
|
||||
|
||||
The new grid engine is a major upgrade for how you can position and move panels. It enables new layouts and a much easier dashboard building experience. The change is backward compatible. So you can upgrade your current version to 5.0 without breaking dashboards, but you cannot downgrade from 5.0 to previous versions. Grafana will automatically upgrade your dashboards to the new schema and position panels to match your existing layout. There might be minor differences in panel height. If you upgrade to 5.0 and for some reason want to rollback to the previous version you can restore dashboards to previous versions using dashboard history. But that should only be seen as an emergency solution.
|
||||
|
||||
Dashboard panels and rows are positioned using a gridPos object `{x: 0, y: 0, w: 24, h: 5}`. Units are in grid dimensions (24 columns, 1 height unit 30px). Rows and Panels objects exist (together) in a flat array directly on the dashboard root object. Rows are not needed for layouts anymore and are mainly there for backward compatibility. Some panel plugins that do not respect their panel height might require an update.
|
||||
|
||||
## New Features
|
||||
* **Alerting**: Add support for internal image store [#6922](https://github.com/grafana/grafana/issues/6922), thx [@FunkyM](https://github.com/FunkyM)
|
||||
* **Data Source Proxy**: Add support for whitelisting specified cookies that will be passed through to the data source when proxying data source requests [#5457](https://github.com/grafana/grafana/issues/5457), thanks [@robingustafsson](https://github.com/robingustafsson)
|
||||
* **Postgres/MySQL**: add __timeGroup macro for mysql [#9596](https://github.com/grafana/grafana/pull/9596), thanks [@svenklemm](https://github.com/svenklemm)
|
||||
* **Text**: Text panel are now edited in the ace editor. [#9698](https://github.com/grafana/grafana/pull/9698), thx [@mtanda](https://github.com/mtanda)
|
||||
@ -38,7 +36,11 @@ The pagerduty notifier now defaults to not auto resolve incidents. More details
|
||||
* **Dashboard as cfg**: Load dashboards from file into Grafana on startup/change [#9654](https://github.com/grafana/grafana/issues/9654) [#5269](https://github.com/grafana/grafana/issues/5269)
|
||||
* **Prometheus**: Grafana can now send alerts to Prometheus Alertmanager while firing [#7481](https://github.com/grafana/grafana/issues/7481), thx [@Thib17](https://github.com/Thib17) and [@mtanda](https://github.com/mtanda)
|
||||
* **Table**: Support multiple table formated queries in table panel [#9170](https://github.com/grafana/grafana/issues/9170), thx [@davkal](https://github.com/davkal)
|
||||
* **Security**: Protect against brute force (frequent) login attempts [#7616](https://github.com/grafana/grafana/issues/7616)
|
||||
|
||||
## Minor
|
||||
* **Graph**: Don't hide graph display options (Lines/Points) when draw mode is unchecked [#9770](https://github.com/grafana/grafana/issues/9770), thx [@Jonnymcc](https://github.com/Jonnymcc)
|
||||
* **Prometheus**: Show label name in paren after by/without/on/ignoring/group_left/group_right [#9664](https://github.com/grafana/grafana/pull/9664), thx [@mtanda](https://github.com/mtanda)
|
||||
* **Alert panel**: Adds placeholder text when no alerts are within the time range [#9624](https://github.com/grafana/grafana/issues/9624), thx [@straend](https://github.com/straend)
|
||||
* **Mysql**: MySQL enable MaxOpenCon and MaxIdleCon regards how constring is configured. [#9784](https://github.com/grafana/grafana/issues/9784), thx [@dfredell](https://github.com/dfredell)
|
||||
* **Cloudwatch**: Fixes broken query inspector for cloudwatch [#9661](https://github.com/grafana/grafana/issues/9661), thx [@mtanda](https://github.com/mtanda)
|
||||
@ -48,10 +50,8 @@ The pagerduty notifier now defaults to not auto resolve incidents. More details
|
||||
* **Github**: Use organizations_url provided from github to verify user belongs in org. [#10111](https://github.com/grafana/grafana/issues/10111), thx
|
||||
[@adiletmaratov](https://github.com/adiletmaratov)
|
||||
* **Backend**: Fixed bug where Grafana exited before all sub routines where finished [#10131](https://github.com/grafana/grafana/issues/10131)
|
||||
|
||||
## Tech
|
||||
* **RabbitMq**: Remove support for publishing events to RabbitMQ [#9645](https://github.com/grafana/grafana/issues/9645)
|
||||
|
||||
* **Azure**: Adds support for Azure blob storage as external image stor [#8955](https://github.com/grafana/grafana/issues/8955), thx [@saada](https://github.com/saada)
|
||||
* **Telegram**: Add support for inline image uploads to telegram notifier plugin [#9967](https://github.com/grafana/grafana/pull/9967), thx [@rburchell](https://github.com/rburchell)
|
||||
|
||||
## Fixes
|
||||
* **Sensu**: Send alert message to sensu output [#9551](https://github.com/grafana/grafana/issues/9551), thx [@cjchand](https://github.com/cjchand)
|
||||
@ -59,6 +59,9 @@ The pagerduty notifier now defaults to not auto resolve incidents. More details
|
||||
* **Postgres/MySQL**: Control quoting in SQL-queries when using template variables [#9030](https://github.com/grafana/grafana/issues/9030), thanks [@svenklemm](https://github.com/svenklemm)
|
||||
* **Pagerduty**: Pagerduty dont auto resolve incidents by default anymore. [#10222](https://github.com/grafana/grafana/issues/10222)
|
||||
|
||||
## Tech
|
||||
* **RabbitMq**: Remove support for publishing events to RabbitMQ [#9645](https://github.com/grafana/grafana/issues/9645)
|
||||
|
||||
# 4.6.3 (2017-12-14)
|
||||
|
||||
## Fixes
|
||||
|
636
Gopkg.lock
generated
Normal file
636
Gopkg.lock
generated
Normal file
@ -0,0 +1,636 @@
|
||||
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
|
||||
|
||||
|
||||
[[projects]]
|
||||
name = "cloud.google.com/go"
|
||||
packages = ["compute/metadata"]
|
||||
revision = "767c40d6a2e058483c25fa193e963a22da17236d"
|
||||
version = "v0.18.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/BurntSushi/toml"
|
||||
packages = ["."]
|
||||
revision = "b26d9c308763d68093482582cea63d69be07a0f0"
|
||||
version = "v0.3.0"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/Unknwon/com"
|
||||
packages = ["."]
|
||||
revision = "7677a1d7c1137cd3dd5ba7a076d0c898a1ef4520"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/apache/thrift"
|
||||
packages = ["lib/go/thrift"]
|
||||
revision = "b2a4d4ae21c789b689dd162deb819665567f481c"
|
||||
version = "0.10.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/aws/aws-sdk-go"
|
||||
packages = [
|
||||
"aws",
|
||||
"aws/awserr",
|
||||
"aws/awsutil",
|
||||
"aws/client",
|
||||
"aws/client/metadata",
|
||||
"aws/corehandlers",
|
||||
"aws/credentials",
|
||||
"aws/credentials/ec2rolecreds",
|
||||
"aws/credentials/endpointcreds",
|
||||
"aws/credentials/stscreds",
|
||||
"aws/defaults",
|
||||
"aws/ec2metadata",
|
||||
"aws/endpoints",
|
||||
"aws/request",
|
||||
"aws/session",
|
||||
"aws/signer/v4",
|
||||
"internal/shareddefaults",
|
||||
"private/protocol",
|
||||
"private/protocol/ec2query",
|
||||
"private/protocol/query",
|
||||
"private/protocol/query/queryutil",
|
||||
"private/protocol/rest",
|
||||
"private/protocol/restxml",
|
||||
"private/protocol/xml/xmlutil",
|
||||
"service/cloudwatch",
|
||||
"service/ec2",
|
||||
"service/ec2/ec2iface",
|
||||
"service/s3",
|
||||
"service/sts"
|
||||
]
|
||||
revision = "decd990ddc5dcdf2f73309cbcab90d06b996ca28"
|
||||
version = "v1.12.67"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/benbjohnson/clock"
|
||||
packages = ["."]
|
||||
revision = "7dc76406b6d3c05b5f71a86293cbcf3c4ea03b19"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/beorn7/perks"
|
||||
packages = ["quantile"]
|
||||
revision = "4c0e84591b9aa9e6dcfdf3e020114cd81f89d5f9"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/bmizerany/assert"
|
||||
packages = ["."]
|
||||
revision = "b7ed37b82869576c289d7d97fb2bbd8b64a0cb28"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/bradfitz/gomemcache"
|
||||
packages = ["memcache"]
|
||||
revision = "1952afaa557dc08e8e0d89eafab110fb501c1a2b"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/codahale/hdrhistogram"
|
||||
packages = ["."]
|
||||
revision = "3a0bb77429bd3a61596f5e8a3172445844342120"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/codegangsta/cli"
|
||||
packages = ["."]
|
||||
revision = "cfb38830724cc34fedffe9a2a29fb54fa9169cd1"
|
||||
version = "v1.20.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/davecgh/go-spew"
|
||||
packages = ["spew"]
|
||||
revision = "346938d642f2ec3594ed81d874461961cd0faa76"
|
||||
version = "v1.1.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/fatih/color"
|
||||
packages = ["."]
|
||||
revision = "570b54cabe6b8eb0bc2dfce68d964677d63b5260"
|
||||
version = "v1.5.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/go-ini/ini"
|
||||
packages = ["."]
|
||||
revision = "32e4c1e6bc4e7d0d8451aa6b75200d19e37a536a"
|
||||
version = "v1.32.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/go-ldap/ldap"
|
||||
packages = ["."]
|
||||
revision = "bb7a9ca6e4fbc2129e3db588a34bc970ffe811a9"
|
||||
version = "v2.5.1"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/go-macaron/binding"
|
||||
packages = ["."]
|
||||
revision = "ac54ee249c27dca7e76fad851a4a04b73bd1b183"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/go-macaron/gzip"
|
||||
packages = ["."]
|
||||
revision = "cad1c6580a07c56f5f6bc52d66002a05985c5854"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/go-macaron/inject"
|
||||
packages = ["."]
|
||||
revision = "d8a0b8677191f4380287cfebd08e462217bac7ad"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/go-macaron/session"
|
||||
packages = [
|
||||
".",
|
||||
"memcache",
|
||||
"mysql",
|
||||
"postgres",
|
||||
"redis"
|
||||
]
|
||||
revision = "b8e286a0dba8f4999042d6b258daf51b31d08938"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/go-sql-driver/mysql"
|
||||
packages = ["."]
|
||||
revision = "2cc627ac8defc45d65066ae98f898166f580f9a4"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/go-stack/stack"
|
||||
packages = ["."]
|
||||
revision = "259ab82a6cad3992b4e21ff5cac294ccb06474bc"
|
||||
version = "v1.7.0"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/go-xorm/builder"
|
||||
packages = ["."]
|
||||
revision = "488224409dd8aa2ce7a5baf8d10d55764a913738"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/go-xorm/core"
|
||||
packages = ["."]
|
||||
revision = "e8409d73255791843585964791443dbad877058c"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/go-xorm/xorm"
|
||||
packages = ["."]
|
||||
revision = "6687a2b4e824f4d87f2d65060ec5cb0d896dff1e"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/golang/protobuf"
|
||||
packages = [
|
||||
"proto",
|
||||
"ptypes",
|
||||
"ptypes/any",
|
||||
"ptypes/duration",
|
||||
"ptypes/timestamp"
|
||||
]
|
||||
revision = "c65a0412e71e8b9b3bfd22925720d23c0f054237"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/gopherjs/gopherjs"
|
||||
packages = ["js"]
|
||||
revision = "178c176a91fe05e3e6c58fa5c989bad19e6cdcb3"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/gorilla/websocket"
|
||||
packages = ["."]
|
||||
revision = "ea4d1f681babbce9545c9c5f3d5194a789c89f5b"
|
||||
version = "v1.2.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/gosimple/slug"
|
||||
packages = ["."]
|
||||
revision = "e9f42fa127660e552d0ad2b589868d403a9be7c6"
|
||||
version = "v1.1.1"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/grafana/grafana_plugin_model"
|
||||
packages = ["go/datasource"]
|
||||
revision = "dfe5dc0a6ce05825ba7fe2d0323d92e631bffa89"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/hashicorp/go-hclog"
|
||||
packages = ["."]
|
||||
revision = "5bcb0f17e36442247290887cc914a6e507afa5c4"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/hashicorp/go-plugin"
|
||||
packages = ["."]
|
||||
revision = "3e6d191694b5a3a2b99755f31b47fa209e4bcd09"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/hashicorp/go-version"
|
||||
packages = ["."]
|
||||
revision = "4fe82ae3040f80a03d04d2cccb5606a626b8e1ee"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/hashicorp/yamux"
|
||||
packages = ["."]
|
||||
revision = "683f49123a33db61abfb241b7ac5e4af4dc54d55"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/inconshreveable/log15"
|
||||
packages = ["."]
|
||||
revision = "0decfc6c20d9ca0ad143b0e89dcaa20f810b4fb3"
|
||||
version = "v2.13"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/jmespath/go-jmespath"
|
||||
packages = ["."]
|
||||
revision = "0b12d6b5"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/jtolds/gls"
|
||||
packages = ["."]
|
||||
revision = "77f18212c9c7edc9bd6a33d383a7b545ce62f064"
|
||||
version = "v4.2.1"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/klauspost/compress"
|
||||
packages = [
|
||||
"flate",
|
||||
"gzip"
|
||||
]
|
||||
revision = "6c8db69c4b49dd4df1fff66996cf556176d0b9bf"
|
||||
version = "v1.2.1"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/klauspost/cpuid"
|
||||
packages = ["."]
|
||||
revision = "ae7887de9fa5d2db4eaa8174a7eff2c1ac00f2da"
|
||||
version = "v1.1"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/klauspost/crc32"
|
||||
packages = ["."]
|
||||
revision = "cb6bfca970f6908083f26f39a79009d608efd5cd"
|
||||
version = "v1.1"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/kr/pretty"
|
||||
packages = ["."]
|
||||
revision = "cfb55aafdaf3ec08f0db22699ab822c50091b1c4"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/kr/text"
|
||||
packages = ["."]
|
||||
revision = "7cafcd837844e784b526369c9bce262804aebc60"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/lib/pq"
|
||||
packages = [
|
||||
".",
|
||||
"oid"
|
||||
]
|
||||
revision = "61fe37aa2ee24fabcdbe5c4ac1d4ac566f88f345"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/mattn/go-colorable"
|
||||
packages = ["."]
|
||||
revision = "167de6bfdfba052fa6b2d3664c8f5272e23c9072"
|
||||
version = "v0.0.9"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/mattn/go-isatty"
|
||||
packages = ["."]
|
||||
revision = "0360b2af4f38e8d38c7fce2a9f4e702702d73a39"
|
||||
version = "v0.0.3"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/mattn/go-sqlite3"
|
||||
packages = ["."]
|
||||
revision = "6c771bb9887719704b210e87e934f08be014bdb1"
|
||||
version = "v1.6.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/matttproud/golang_protobuf_extensions"
|
||||
packages = ["pbutil"]
|
||||
revision = "3247c84500bff8d9fb6d579d800f20b3e091582c"
|
||||
version = "v1.0.0"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/mitchellh/go-testing-interface"
|
||||
packages = ["."]
|
||||
revision = "a61a99592b77c9ba629d254a693acffaeb4b7e28"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/opentracing/opentracing-go"
|
||||
packages = [
|
||||
".",
|
||||
"ext",
|
||||
"log"
|
||||
]
|
||||
revision = "1949ddbfd147afd4d964a9f00b24eb291e0e7c38"
|
||||
version = "v1.0.2"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/patrickmn/go-cache"
|
||||
packages = ["."]
|
||||
revision = "a3647f8e31d79543b2d0f0ae2fe5c379d72cedc0"
|
||||
version = "v2.1.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/prometheus/client_golang"
|
||||
packages = [
|
||||
"api",
|
||||
"api/prometheus/v1",
|
||||
"prometheus",
|
||||
"prometheus/promhttp"
|
||||
]
|
||||
revision = "967789050ba94deca04a5e84cce8ad472ce313c1"
|
||||
version = "v0.9.0-pre1"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/prometheus/client_model"
|
||||
packages = ["go"]
|
||||
revision = "99fa1f4be8e564e8a6b613da7fa6f46c9edafc6c"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/prometheus/common"
|
||||
packages = [
|
||||
"expfmt",
|
||||
"internal/bitbucket.org/ww/goautoneg",
|
||||
"model"
|
||||
]
|
||||
revision = "89604d197083d4781071d3c65855d24ecfb0a563"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/prometheus/procfs"
|
||||
packages = [
|
||||
".",
|
||||
"internal/util",
|
||||
"nfsd",
|
||||
"xfs"
|
||||
]
|
||||
revision = "85fadb6e89903ef7cca6f6a804474cd5ea85b6e1"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/rainycape/unidecode"
|
||||
packages = ["."]
|
||||
revision = "cb7f23ec59bec0d61b19c56cd88cee3d0cc1870c"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/sergi/go-diff"
|
||||
packages = ["diffmatchpatch"]
|
||||
revision = "1744e2970ca51c86172c8190fadad617561ed6e7"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/smartystreets/assertions"
|
||||
packages = [
|
||||
".",
|
||||
"internal/go-render/render",
|
||||
"internal/oglematchers"
|
||||
]
|
||||
revision = "0b37b35ec7434b77e77a4bb29b79677cced992ea"
|
||||
version = "1.8.1"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/smartystreets/goconvey"
|
||||
packages = [
|
||||
"convey",
|
||||
"convey/gotest",
|
||||
"convey/reporting"
|
||||
]
|
||||
revision = "9e8dc3f972df6c8fcc0375ef492c24d0bb204857"
|
||||
version = "1.6.3"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/teris-io/shortid"
|
||||
packages = ["."]
|
||||
revision = "771a37caa5cf0c81f585d7b6df4dfc77e0615b5c"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/uber/jaeger-client-go"
|
||||
packages = [
|
||||
".",
|
||||
"config",
|
||||
"internal/baggage",
|
||||
"internal/baggage/remote",
|
||||
"internal/spanlog",
|
||||
"log",
|
||||
"rpcmetrics",
|
||||
"thrift-gen/agent",
|
||||
"thrift-gen/baggage",
|
||||
"thrift-gen/jaeger",
|
||||
"thrift-gen/sampling",
|
||||
"thrift-gen/zipkincore",
|
||||
"utils"
|
||||
]
|
||||
revision = "3ac96c6e679cb60a74589b0d0aa7c70a906183f7"
|
||||
version = "v2.11.2"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/uber/jaeger-lib"
|
||||
packages = ["metrics"]
|
||||
revision = "7f95f4f7e80028096410abddaae2556e4c61b59f"
|
||||
version = "v1.3.1"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/yudai/gojsondiff"
|
||||
packages = [
|
||||
".",
|
||||
"formatter"
|
||||
]
|
||||
revision = "7b1b7adf999dab73a6eb02669c3d82dbb27a3dd6"
|
||||
version = "1.0.0"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/yudai/golcs"
|
||||
packages = ["."]
|
||||
revision = "ecda9a501e8220fae3b4b600c3db4b0ba22cfc68"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/crypto"
|
||||
packages = ["pbkdf2"]
|
||||
revision = "3d37316aaa6bd9929127ac9a527abf408178ea7b"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/net"
|
||||
packages = [
|
||||
"context",
|
||||
"context/ctxhttp",
|
||||
"http2",
|
||||
"http2/hpack",
|
||||
"idna",
|
||||
"internal/timeseries",
|
||||
"lex/httplex",
|
||||
"trace"
|
||||
]
|
||||
revision = "5ccada7d0a7ba9aeb5d3aca8d3501b4c2a509fec"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/oauth2"
|
||||
packages = [
|
||||
".",
|
||||
"google",
|
||||
"internal",
|
||||
"jws",
|
||||
"jwt"
|
||||
]
|
||||
revision = "b28fcf2b08a19742b43084fb40ab78ac6c3d8067"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/sync"
|
||||
packages = ["errgroup"]
|
||||
revision = "fd80eb99c8f653c847d294a001bdf2a3a6f768f5"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/sys"
|
||||
packages = ["unix"]
|
||||
revision = "af50095a40f9041b3b38960738837185c26e9419"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/text"
|
||||
packages = [
|
||||
"collate",
|
||||
"collate/build",
|
||||
"internal/colltab",
|
||||
"internal/gen",
|
||||
"internal/tag",
|
||||
"internal/triegen",
|
||||
"internal/ucd",
|
||||
"language",
|
||||
"secure/bidirule",
|
||||
"transform",
|
||||
"unicode/bidi",
|
||||
"unicode/cldr",
|
||||
"unicode/norm",
|
||||
"unicode/rangetable"
|
||||
]
|
||||
revision = "e19ae1496984b1c655b8044a65c0300a3c878dd3"
|
||||
|
||||
[[projects]]
|
||||
name = "google.golang.org/appengine"
|
||||
packages = [
|
||||
".",
|
||||
"cloudsql",
|
||||
"internal",
|
||||
"internal/app_identity",
|
||||
"internal/base",
|
||||
"internal/datastore",
|
||||
"internal/log",
|
||||
"internal/modules",
|
||||
"internal/remote_api",
|
||||
"internal/urlfetch",
|
||||
"urlfetch"
|
||||
]
|
||||
revision = "150dc57a1b433e64154302bdc40b6bb8aefa313a"
|
||||
version = "v1.0.0"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "google.golang.org/genproto"
|
||||
packages = ["googleapis/rpc/status"]
|
||||
revision = "a8101f21cf983e773d0c1133ebc5424792003214"
|
||||
|
||||
[[projects]]
|
||||
name = "google.golang.org/grpc"
|
||||
packages = [
|
||||
".",
|
||||
"balancer",
|
||||
"balancer/base",
|
||||
"balancer/roundrobin",
|
||||
"codes",
|
||||
"connectivity",
|
||||
"credentials",
|
||||
"encoding",
|
||||
"grpclb/grpc_lb_v1/messages",
|
||||
"grpclog",
|
||||
"health",
|
||||
"health/grpc_health_v1",
|
||||
"internal",
|
||||
"keepalive",
|
||||
"metadata",
|
||||
"naming",
|
||||
"peer",
|
||||
"resolver",
|
||||
"resolver/dns",
|
||||
"resolver/passthrough",
|
||||
"stats",
|
||||
"status",
|
||||
"tap",
|
||||
"transport"
|
||||
]
|
||||
revision = "6b51017f791ae1cfbec89c52efdf444b13b550ef"
|
||||
version = "v1.9.2"
|
||||
|
||||
[[projects]]
|
||||
branch = "v3"
|
||||
name = "gopkg.in/alexcesaro/quotedprintable.v3"
|
||||
packages = ["."]
|
||||
revision = "2caba252f4dc53eaf6b553000885530023f54623"
|
||||
|
||||
[[projects]]
|
||||
name = "gopkg.in/asn1-ber.v1"
|
||||
packages = ["."]
|
||||
revision = "379148ca0225df7a432012b8df0355c2a2063ac0"
|
||||
version = "v1.2"
|
||||
|
||||
[[projects]]
|
||||
name = "gopkg.in/bufio.v1"
|
||||
packages = ["."]
|
||||
revision = "567b2bfa514e796916c4747494d6ff5132a1dfce"
|
||||
version = "v1"
|
||||
|
||||
[[projects]]
|
||||
branch = "v2"
|
||||
name = "gopkg.in/gomail.v2"
|
||||
packages = ["."]
|
||||
revision = "81ebce5c23dfd25c6c67194b37d3dd3f338c98b1"
|
||||
|
||||
[[projects]]
|
||||
name = "gopkg.in/ini.v1"
|
||||
packages = ["."]
|
||||
revision = "32e4c1e6bc4e7d0d8451aa6b75200d19e37a536a"
|
||||
version = "v1.32.0"
|
||||
|
||||
[[projects]]
|
||||
name = "gopkg.in/macaron.v1"
|
||||
packages = ["."]
|
||||
revision = "75f2e9b42e99652f0d82b28ccb73648f44615faa"
|
||||
version = "v1.2.4"
|
||||
|
||||
[[projects]]
|
||||
name = "gopkg.in/redis.v2"
|
||||
packages = ["."]
|
||||
revision = "e6179049628164864e6e84e973cfb56335748dea"
|
||||
version = "v2.3.2"
|
||||
|
||||
[[projects]]
|
||||
branch = "v2"
|
||||
name = "gopkg.in/yaml.v2"
|
||||
packages = ["."]
|
||||
revision = "d670f9405373e636a5a2765eea47fac0c9bc91a4"
|
||||
|
||||
[solve-meta]
|
||||
analyzer-name = "dep"
|
||||
analyzer-version = 1
|
||||
inputs-digest = "4de68f1342ba98a637ec8ca7496aeeae2021bf9e4c7c80db7924e14709151a62"
|
||||
solver-name = "gps-cdcl"
|
||||
solver-version = 1
|
199
Gopkg.toml
Normal file
199
Gopkg.toml
Normal file
@ -0,0 +1,199 @@
|
||||
# Gopkg.toml example
|
||||
#
|
||||
# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
|
||||
# for detailed Gopkg.toml documentation.
|
||||
#
|
||||
# required = ["github.com/user/thing/cmd/thing"]
|
||||
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
|
||||
#
|
||||
# [[constraint]]
|
||||
# name = "github.com/user/project"
|
||||
# version = "1.0.0"
|
||||
#
|
||||
# [[constraint]]
|
||||
# name = "github.com/user/project2"
|
||||
# branch = "dev"
|
||||
# source = "github.com/myfork/project2"
|
||||
#
|
||||
# [[override]]
|
||||
# name = "github.com/x/y"
|
||||
# version = "2.4.0"
|
||||
|
||||
ignored = [
|
||||
"github.com/grafana/grafana/data/*",
|
||||
"github.com/grafana/grafana/public/*",
|
||||
"github.com/grafana/grafana/node_modules/*"
|
||||
]
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/BurntSushi/toml"
|
||||
version = "0.3.0"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/Unknwon/com"
|
||||
#version = "1.0.0"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/aws/aws-sdk-go"
|
||||
version = "1.12.65"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/benbjohnson/clock"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/bmizerany/assert"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/codegangsta/cli"
|
||||
version = "1.20.0"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/davecgh/go-spew"
|
||||
version = "1.1.0"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/fatih/color"
|
||||
version = "1.5.0"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/go-ldap/ldap"
|
||||
version = "2.5.1"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/go-macaron/binding"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/go-macaron/gzip"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/go-macaron/session"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/go-sql-driver/mysql"
|
||||
revision = "2cc627ac8defc45d65066ae98f898166f580f9a4"
|
||||
#version = "1.3.0" //keeping this since we would rather depend on version then commit
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/go-stack/stack"
|
||||
version = "1.7.0"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/go-xorm/core"
|
||||
revision = "e8409d73255791843585964791443dbad877058c"
|
||||
#version = "0.5.7" //keeping this since we would rather depend on version then commit
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/go-xorm/xorm"
|
||||
revision = "6687a2b4e824f4d87f2d65060ec5cb0d896dff1e"
|
||||
#version = "0.6.4" //keeping this since we would rather depend on version then commit
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/gorilla/websocket"
|
||||
version = "1.2.0"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/gosimple/slug"
|
||||
version = "1.1.1"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/grafana/grafana_plugin_model"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/hashicorp/go-hclog"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/hashicorp/go-version"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/inconshreveable/log15"
|
||||
version = "2.13.0"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/lib/pq"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/mattn/go-isatty"
|
||||
version = "0.0.3"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/mattn/go-sqlite3"
|
||||
version = "1.6.0"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/opentracing/opentracing-go"
|
||||
version = "1.0.2"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/patrickmn/go-cache"
|
||||
version = "2.1.0"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/prometheus/client_golang"
|
||||
version = "0.9.0-pre1"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/prometheus/client_model"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/prometheus/common"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/smartystreets/goconvey"
|
||||
version = "1.6.3"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/uber/jaeger-client-go"
|
||||
version = "2.11.2"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/yudai/gojsondiff"
|
||||
version = "1.0.0"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/net"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/oauth2"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/sync"
|
||||
|
||||
[[constraint]]
|
||||
name = "gopkg.in/gomail.v2"
|
||||
branch = "v2"
|
||||
|
||||
[[constraint]]
|
||||
name = "gopkg.in/ini.v1"
|
||||
version = "1.32.0"
|
||||
|
||||
[[constraint]]
|
||||
name = "gopkg.in/macaron.v1"
|
||||
version = "1.2.4"
|
||||
|
||||
[[constraint]]
|
||||
branch = "v2"
|
||||
name = "gopkg.in/yaml.v2"
|
||||
|
||||
[prune]
|
||||
non-go = true
|
||||
go-tests = true
|
||||
unused-packages = true
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/teris-io/shortid"
|
3
Makefile
3
Makefile
@ -26,3 +26,6 @@ test: test-go test-js
|
||||
|
||||
run:
|
||||
./bin/grafana-server
|
||||
|
||||
protoc:
|
||||
protoc -I pkg/tsdb/models pkg/tsdb/models/*.proto --go_out=plugins=grpc:pkg/tsdb/models/.
|
14
README.md
14
README.md
@ -45,23 +45,17 @@ For this you need nodejs (v.6+).
|
||||
```bash
|
||||
npm install -g yarn
|
||||
yarn install --pure-lockfile
|
||||
npm run build
|
||||
```
|
||||
|
||||
To rebuild frontend assets (typescript, sass etc) as you change them start the watcher via.
|
||||
|
||||
```bash
|
||||
npm run watch
|
||||
```
|
||||
|
||||
Run tests
|
||||
Run tests
|
||||
```bash
|
||||
npm run test
|
||||
npm run jest
|
||||
```
|
||||
|
||||
Run tests in watch mode
|
||||
Run karma tests
|
||||
```bash
|
||||
npm run watch-test
|
||||
npm run karma
|
||||
```
|
||||
|
||||
### Recompile backend on source change
|
||||
|
12
build.go
12
build.go
@ -347,11 +347,11 @@ func ChangeWorkingDir(dir string) {
|
||||
}
|
||||
|
||||
func grunt(params ...string) {
|
||||
if runtime.GOOS == "windows" {
|
||||
runPrint(`.\node_modules\.bin\grunt`, params...)
|
||||
} else {
|
||||
runPrint("./node_modules/.bin/grunt", params...)
|
||||
}
|
||||
if runtime.GOOS == "windows" {
|
||||
runPrint(`.\node_modules\.bin\grunt`, params...)
|
||||
} else {
|
||||
runPrint("./node_modules/.bin/grunt", params...)
|
||||
}
|
||||
}
|
||||
|
||||
func gruntBuildArg(task string) []string {
|
||||
@ -371,7 +371,7 @@ func gruntBuildArg(task string) []string {
|
||||
}
|
||||
|
||||
func setup() {
|
||||
runPrint("go", "get", "-v", "github.com/kardianos/govendor")
|
||||
runPrint("go", "get", "-v", "github.com/golang/dep")
|
||||
runPrint("go", "install", "-v", "./pkg/cmd/grafana-server")
|
||||
}
|
||||
|
||||
|
@ -9,7 +9,7 @@ machine:
|
||||
GOPATH: "/home/ubuntu/.go_workspace"
|
||||
ORG_PATH: "github.com/grafana"
|
||||
REPO_PATH: "${ORG_PATH}/grafana"
|
||||
GODIST: "go1.9.2.linux-amd64.tar.gz"
|
||||
GODIST: "go1.9.3.linux-amd64.tar.gz"
|
||||
post:
|
||||
- mkdir -p ~/download
|
||||
- mkdir -p ~/docker
|
||||
|
@ -174,6 +174,9 @@ disable_gravatar = false
|
||||
# data source proxy whitelist (ip_or_domain:port separated by spaces)
|
||||
data_source_proxy_whitelist =
|
||||
|
||||
# disable protection against brute force login attempts
|
||||
disable_brute_force_login_protection = false
|
||||
|
||||
#################################### Snapshots ###########################
|
||||
[snapshots]
|
||||
# snapshot sharing options
|
||||
@ -473,7 +476,7 @@ sampler_param = 1
|
||||
|
||||
#################################### External Image Storage ##############
|
||||
[external_image_storage]
|
||||
# You can choose between (s3, webdav, gcs)
|
||||
# You can choose between (s3, webdav, gcs, azure_blob, local)
|
||||
provider =
|
||||
|
||||
[external_image_storage.s3]
|
||||
@ -493,4 +496,12 @@ public_url =
|
||||
[external_image_storage.gcs]
|
||||
key_file =
|
||||
bucket =
|
||||
path =
|
||||
path =
|
||||
|
||||
[external_image_storage.azure_blob]
|
||||
account_name =
|
||||
account_key =
|
||||
container_name =
|
||||
|
||||
[external_image_storage.local]
|
||||
# does not require any configuration
|
||||
|
@ -162,6 +162,9 @@ log_queries =
|
||||
# data source proxy whitelist (ip_or_domain:port separated by spaces)
|
||||
;data_source_proxy_whitelist =
|
||||
|
||||
# disable protection against brute force login attempts
|
||||
;disable_brute_force_login_protection = false
|
||||
|
||||
#################################### Snapshots ###########################
|
||||
[snapshots]
|
||||
# snapshot sharing options
|
||||
@ -417,7 +420,7 @@ log_queries =
|
||||
#################################### External image storage ##########################
|
||||
[external_image_storage]
|
||||
# Used for uploading images to public servers so they can be included in slack/email messages.
|
||||
# you can choose between (s3, webdav, gcs)
|
||||
# you can choose between (s3, webdav, gcs, azure_blob, local)
|
||||
;provider =
|
||||
|
||||
[external_image_storage.s3]
|
||||
@ -436,4 +439,12 @@ log_queries =
|
||||
[external_image_storage.gcs]
|
||||
;key_file =
|
||||
;bucket =
|
||||
;path =
|
||||
;path =
|
||||
|
||||
[external_image_storage.azure_blob]
|
||||
;account_name =
|
||||
;account_key =
|
||||
;container_name =
|
||||
|
||||
[external_image_storage.local]
|
||||
# does not require any configuration
|
||||
|
4
docker/blocks/apache_proxy/Dockerfile
Normal file
4
docker/blocks/apache_proxy/Dockerfile
Normal file
@ -0,0 +1,4 @@
|
||||
FROM jmferrer/apache2-reverse-proxy:latest
|
||||
|
||||
COPY ports.conf /etc/apache2/sites-enabled
|
||||
COPY proxy.conf /etc/apache2/sites-enabled
|
9
docker/blocks/apache_proxy/docker-compose.yaml
Normal file
9
docker/blocks/apache_proxy/docker-compose.yaml
Normal file
@ -0,0 +1,9 @@
|
||||
# This will proxy all requests for http://localhost:10081/grafana/ to
|
||||
# http://localhost:3000 (Grafana running locally)
|
||||
#
|
||||
# Please note that you'll need to change the root_url in the Grafana configuration:
|
||||
# root_url = %(protocol)s://%(domain)s:/grafana/
|
||||
|
||||
apacheproxy:
|
||||
build: blocks/apache_proxy
|
||||
network_mode: host
|
1
docker/blocks/apache_proxy/ports.conf
Normal file
1
docker/blocks/apache_proxy/ports.conf
Normal file
@ -0,0 +1 @@
|
||||
Listen 10081
|
4
docker/blocks/apache_proxy/proxy.conf
Normal file
4
docker/blocks/apache_proxy/proxy.conf
Normal file
@ -0,0 +1,4 @@
|
||||
<VirtualHost *:10081>
|
||||
ProxyPass /grafana/ http://localhost:3000/
|
||||
ProxyPassReverse /grafana/ http://localhost:3000/
|
||||
</VirtualHost>
|
@ -12,3 +12,10 @@
|
||||
- /etc/timezone:/etc/timezone:ro
|
||||
command: [mysqld, --character-set-server=utf8mb4, --collation-server=utf8mb4_unicode_ci, --innodb_monitor_enable=all]
|
||||
|
||||
fake-mysql-data:
|
||||
image: grafana/fake-data-gen
|
||||
network_mode: bridge
|
||||
environment:
|
||||
FD_DATASOURCE: mysql
|
||||
FD_PORT: 3306
|
||||
|
||||
|
@ -7,4 +7,7 @@
|
||||
MYSQL_PASSWORD: password
|
||||
ports:
|
||||
- "3306:3306"
|
||||
volumes:
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
- /etc/timezone:/etc/timezone:ro
|
||||
tmpfs: /var/lib/mysql:rw
|
||||
|
3
docker/blocks/nginx_proxy/Dockerfile
Normal file
3
docker/blocks/nginx_proxy/Dockerfile
Normal file
@ -0,0 +1,3 @@
|
||||
FROM nginx:alpine
|
||||
|
||||
COPY nginx.conf /etc/nginx/nginx.conf
|
9
docker/blocks/nginx_proxy/docker-compose.yaml
Normal file
9
docker/blocks/nginx_proxy/docker-compose.yaml
Normal file
@ -0,0 +1,9 @@
|
||||
# This will proxy all requests for http://localhost:10080/grafana/ to
|
||||
# http://localhost:3000 (Grafana running locally)
|
||||
#
|
||||
# Please note that you'll need to change the root_url in the Grafana configuration:
|
||||
# root_url = %(protocol)s://%(domain)s:/grafana/
|
||||
|
||||
nginxproxy:
|
||||
build: blocks/nginx_proxy
|
||||
network_mode: host
|
19
docker/blocks/nginx_proxy/nginx.conf
Normal file
19
docker/blocks/nginx_proxy/nginx.conf
Normal file
@ -0,0 +1,19 @@
|
||||
events { worker_connections 1024; }
|
||||
|
||||
http {
|
||||
sendfile on;
|
||||
|
||||
proxy_redirect off;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Host $server_name;
|
||||
|
||||
server {
|
||||
listen 10080;
|
||||
|
||||
location /grafana/ {
|
||||
proxy_pass http://localhost:3000/;
|
||||
}
|
||||
}
|
||||
}
|
@ -7,3 +7,10 @@
|
||||
ports:
|
||||
- "5432:5432"
|
||||
command: postgres -c log_connections=on -c logging_collector=on -c log_destination=stderr -c log_directory=/var/log/postgresql
|
||||
|
||||
fake-postgres-data:
|
||||
image: grafana/fake-data-gen
|
||||
network_mode: bridge
|
||||
environment:
|
||||
FD_DATASOURCE: postgres
|
||||
FD_PORT: 5432
|
@ -1,3 +1,3 @@
|
||||
FROM prom/prometheus
|
||||
FROM prom/prometheus:v1.8.2
|
||||
ADD prometheus.yml /etc/prometheus/
|
||||
ADD alert.rules /etc/prometheus/
|
||||
|
@ -9,5 +9,6 @@ FROM grafana/docs-base:latest
|
||||
|
||||
COPY config.toml /site
|
||||
COPY awsconfig /site
|
||||
COPY versions.json /site/static/js
|
||||
|
||||
VOLUME ["/site/content"]
|
||||
|
@ -10,17 +10,17 @@ weight = 8
|
||||
|
||||
# Grafana CLI
|
||||
|
||||
Grafana cli is a small executable that is bundled with grafana server and is suppose to be executed on the same machine as grafana runs.
|
||||
Grafana cli is a small executable that is bundled with Grafana-server and is supposed to be executed on the same machine Grafana-server is running on.
|
||||
|
||||
## Plugins
|
||||
|
||||
The CLI helps you install, upgrade and manage your plugins on the same machine it CLI is running.
|
||||
You can find more information about how to install and manage your plugins at the
|
||||
[plugin page]({{< relref "plugins/installation.md" >}}).
|
||||
The CLI allows you to install, upgrade and manage your plugins on the machine it is running on.
|
||||
You can find more information about how to install and manage your plugins in the
|
||||
[plugins page]({{< relref "plugins/installation.md" >}}).
|
||||
|
||||
## Admin
|
||||
|
||||
> This feature is only available in grafana 4.1 and above.
|
||||
> This feature is only available in Grafana 4.1 and above.
|
||||
|
||||
To show all admin commands:
|
||||
`grafana-cli admin`
|
||||
@ -39,7 +39,7 @@ then there are two flags that can be used to set homepath and the config file pa
|
||||
|
||||
`grafana-cli admin reset-admin-password --homepath "/usr/share/grafana" newpass`
|
||||
|
||||
If you have not lost the admin password then it is better to set in the Grafana UI. If you need to set the password in a script then the [Grafana API](http://docs.grafana.org/http_api/user/#change-password) can be used. Here is an example with curl using basic auth:
|
||||
If you have not lost the admin password then it is better to set in the Grafana UI. If you need to set the password in a script then the [Grafana API](http://docs.grafana.org/http_api/user/#change-password) can be used. Here is an example using curl with basic auth:
|
||||
|
||||
```bash
|
||||
curl -X PUT -H "Content-Type: application/json" -d '{
|
||||
|
@ -10,6 +10,6 @@ weight = 8
|
||||
|
||||
# Internal metrics
|
||||
|
||||
Grafana collects some metrics about it self internally. Currently Grafana supports pushing metrics to graphite and exposing them to be scraped by Prometheus.
|
||||
Grafana collects some metrics about itself internally. Currently, Grafana supports pushing metrics to Graphite or exposing them to be scraped by Prometheus.
|
||||
|
||||
To enabled internal metrics you have to enable it under the [metrics] section in your [grafana.ini](http://docs.grafana.org/installation/configuration/#enabled-6) config file.If you want to push metrics to graphite you have also have to configure the [metrics.graphite](http://docs.grafana.org/installation/configuration/#metrics-graphite) section.
|
||||
To emit internal metrics you have to enable the option under the [metrics] section in your [grafana.ini](http://docs.grafana.org/installation/configuration/#enabled-6) config file. If you want to push metrics to Graphite, you must also configure the [metrics.graphite](http://docs.grafana.org/installation/configuration/#metrics-graphite) section.
|
||||
|
76
docs/sources/administration/permissions.md
Normal file
76
docs/sources/administration/permissions.md
Normal file
@ -0,0 +1,76 @@
|
||||
+++
|
||||
title = "Permissions"
|
||||
description = "Grafana user permissions"
|
||||
keywords = ["grafana", "configuration", "documentation", "admin", "users", "permissions"]
|
||||
type = "docs"
|
||||
aliases = ["/reference/admin"]
|
||||
[menu.docs]
|
||||
name = "Permissions"
|
||||
parent = "admin"
|
||||
weight = 3
|
||||
+++
|
||||
|
||||
# Permissions
|
||||
|
||||
Grafana users have permissions that are determined by their:
|
||||
|
||||
- **Organization Role** (Admin, Editor, Viewer)
|
||||
- Via **Team** memberships where the **Team** has been assigned specific permissions.
|
||||
- Via permissions assigned directly to user (on folders or dashboards)
|
||||
- The Grafana Admin (i.e. Super Admin) user flag.
|
||||
|
||||
## Organization Roles
|
||||
|
||||
Users can be belong to one or more organizations. A user's organization membership is tied to a role that defines what the user is allowed to do
|
||||
in that organization.
|
||||
|
||||
### Admin Role
|
||||
|
||||
Can do everything scoped to the organization. For example:
|
||||
|
||||
- Add & Edit data data sources.
|
||||
- Add & Edit organization users & teams.
|
||||
- Configure App plugins & set org settings.
|
||||
|
||||
### Editor Role
|
||||
|
||||
- Can create and modify dashboards & alert rules. This can be disabled on specific folders and dashboards.
|
||||
- **Cannot** create or edit data sources nor invite new users.
|
||||
|
||||
### Viewer Role
|
||||
|
||||
- View any dashboard. This can be disabled on specific folders and dashboards.
|
||||
- **Cannot** create or edit dashboards nor data sources.
|
||||
|
||||
This role can be tweaked via Grafana server setting [viewers_can_edit]({{< relref "installation/configuration.md#viewers-can-edit" >}}). If you set this to true users
|
||||
with **Viewer** can also make transient dashboard edits, meaning they can modify panels & queries but not save the changes (nor create new dashboards).
|
||||
Useful for public Grafana installations where you want anonymous users to be able to edit panels & queries but not save or create new dashboards.
|
||||
|
||||
## Grafana Admin
|
||||
|
||||
This admin flag makes a user a `Super Admin`. This means they can access the `Server Admin` views where all users and organizations can be administrated.
|
||||
|
||||
### Dashboard & Folder Permissions
|
||||
|
||||
> Introduced in Grafana v5.0
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v50/folder_permissions.png" max-width="500px" class="docs-image--right" >}}
|
||||
|
||||
For dashboards and dashboard folders there is a **Permissions** page that make it possible to
|
||||
remove the default role based permssions for Editors and Viewers. It's here you can add and assign permissions to specific **Users** and **Teams**.
|
||||
|
||||
You can assign & remove permissions for **Organization Roles**, **Users** and **Teams**.
|
||||
|
||||
Permission levels:
|
||||
|
||||
- **Admin**: Can edit & create dashboards and edit permissions.
|
||||
- **Edit**: Can edit & create dashboards. **Cannot** edit folder/dashboard permissions.
|
||||
- **View**: Can only view existing dashboars/folders.
|
||||
|
||||
#### Restricting access
|
||||
|
||||
The highest permission always wins so if you for example want to hide a folder or dashboard from others you need to remove the **Organization Role** based permission from the
|
||||
Access Control List (ACL).
|
||||
|
||||
- You cannot override permissions for users with **Org Admin Role**
|
||||
- A more specific permission with lower permission level will not have any effect if a more general rule exists with higher permission level. For example if "Everyone with Editor Role Can Edit" exists in the ACL list then **John Doe** will still have Edit permission even after you have specifically added a permission for this user with the permission set to **View**. You need to remove or lower the permission level of the more general rule.
|
@ -3,6 +3,7 @@ title = "Provisioning"
|
||||
description = ""
|
||||
keywords = ["grafana", "provisioning"]
|
||||
type = "docs"
|
||||
aliases = ["/installation/provisioning"]
|
||||
[menu.docs]
|
||||
parent = "admin"
|
||||
weight = 8
|
||||
@ -12,7 +13,7 @@ weight = 8
|
||||
|
||||
## Config file
|
||||
|
||||
Checkout the [configuration](/installation/configuration) page for more information about what you can configure in `grafana.ini`
|
||||
Checkout the [configuration](/installation/configuration) page for more information on what you can configure in `grafana.ini`
|
||||
|
||||
### Config file locations
|
||||
|
||||
@ -35,7 +36,7 @@ GF_<SectionName>_<KeyName>
|
||||
```
|
||||
|
||||
Where the section name is the text within the brackets. Everything
|
||||
should be upper case, `.` should be replaced by `_`. For example, given these configuration settings:
|
||||
should be upper case and `.` should be replaced by `_`. For example, given these configuration settings:
|
||||
|
||||
```bash
|
||||
# default section
|
||||
@ -48,7 +49,7 @@ admin_user = admin
|
||||
client_secret = 0ldS3cretKey
|
||||
```
|
||||
|
||||
Then you can override them using:
|
||||
Overriding will be done like so:
|
||||
|
||||
```bash
|
||||
export GF_DEFAULT_INSTANCE_NAME=my-instance
|
||||
@ -60,24 +61,23 @@ export GF_AUTH_GOOGLE_CLIENT_SECRET=newS3cretKey
|
||||
|
||||
## Configuration management tools
|
||||
|
||||
Currently we do not provide any scripts/manifests for configuring Grafana. Rather then spending time learning and creating scripts/manifests for each tool, we think our time is better spent making Grafana easier to provision. Therefor, we heavily relay on the expertise of he community.
|
||||
Currently we do not provide any scripts/manifests for configuring Grafana. Rather than spending time learning and creating scripts/manifests for each tool, we think our time is better spent making Grafana easier to provision. Therefore, we heavily relay on the expertise of the community.
|
||||
|
||||
Tool | Project
|
||||
-----|------------
|
||||
Puppet | [https://forge.puppet.com/puppet/grafana](https://forge.puppet.com/puppet/grafana)
|
||||
Ansible | [https://github.com/cloudalchemy/ansible-grafana](https://github.com/cloudalchemy/ansible-grafana)
|
||||
Ansible | [https://github.com/picotrading/ansible-grafana](https://github.com/picotrading/ansible-grafana)
|
||||
Chef | [https://github.com/JonathanTron/chef-grafana](https://github.com/JonathanTron/chef-grafana)
|
||||
Saltstack | [https://github.com/salt-formulas/salt-formula-grafana](https://github.com/salt-formulas/salt-formula-grafana)
|
||||
|
||||
## Datasources
|
||||
## Datasources
|
||||
|
||||
> This feature is available from v5.0
|
||||
|
||||
It's possible to manage datasources in Grafana by adding one or more yaml config files in the [`provisioning/datasources`](/installation/configuration/#provisioning) directory. Each config file can contain a list of `datasources` that will be added or updated during start up. If the datasource already exists, Grafana will update it to match the configuration file. The config file can also contain a list of datasources that should be deleted. That list is called `delete_datasources`. Grafana will delete datasources listed in `delete_datasources` before inserting/updating those in the `datasource` list.
|
||||
|
||||
### Running multiple grafana instances.
|
||||
If you are running multiple instances of Grafana you might run into problems if they have different versions of the datasource.yaml configuration file. The best way to solve this problem is to add a version number to each datasource in the configuration and increase it when you update the config. Grafana will only update datasources with the same or lower version number than specified in the config. That way old configs cannot overwrite newer configs if they restart at the same time.
|
||||
### Running multiple Grafana instances.
|
||||
If you are running multiple instances of Grafana you might run into problems if they have different versions of the `datasource.yaml` configuration file. The best way to solve this problem is to add a version number to each datasource in the configuration and increase it when you update the config. Grafana will only update datasources with the same or lower version number than specified in the config. That way, old configs cannot overwrite newer configs if they restart at the same time.
|
||||
|
||||
### Example datasource config file
|
||||
```yaml
|
||||
@ -86,7 +86,7 @@ delete_datasources:
|
||||
- name: Graphite
|
||||
org_id: 1
|
||||
|
||||
# list of datasources to insert/update depending
|
||||
# list of datasources to insert/update depending
|
||||
# whats available in the datbase
|
||||
datasources:
|
||||
# <string, required> name of the datasource. Required
|
||||
@ -116,7 +116,7 @@ datasources:
|
||||
# <bool> mark as default datasource. Max one per org
|
||||
is_default:
|
||||
# <map> fields that will be converted to json and stored in json_data
|
||||
json_data:
|
||||
json_data:
|
||||
graphiteVersion: "1.1"
|
||||
tlsAuth: true
|
||||
tlsAuthWithCACert: true
|
||||
@ -132,45 +132,46 @@ datasources:
|
||||
|
||||
#### Json data
|
||||
|
||||
Since all datasources dont have the same configuration settings we only have the most common ones as fields. The rest should be stored as a json blob in the `json_data` field. Here are the most common settings that the core datasources use.
|
||||
Since not all datasources have the same configuration settings we only have the most common ones as fields. The rest should be stored as a json blob in the `json_data` field. Here are the most common settings that the core datasources use.
|
||||
|
||||
| Name | Type | Datasource |Description |
|
||||
| ----| ---- | ---- | --- |
|
||||
| tlsAuth | boolean | *All* | Enable TLS authentication using client cert configured in secure json data |
|
||||
| tlsAuthWithCACert | boolean | *All* | Enable TLS authtication using CA cert |
|
||||
| tlsSkipVerify | boolean | *All* | Controls whether a client verifies the server's certificate chain and host name. |
|
||||
| graphiteVersion | string | Graphite | Graphite version |
|
||||
| timeInterval | string | Elastic, Influxdb & Prometheus | Lowest interval/step value that should be used for this data source |
|
||||
| esVersion | string | Elastic | Elasticsearch version |
|
||||
| timeField | string | Elastic | Which field that should be used as timestamp |
|
||||
| esVersion | string | Elastic | Elasticsearch version |
|
||||
| timeField | string | Elastic | Which field that should be used as timestamp |
|
||||
| interval | string | Elastic | Index date time format |
|
||||
| authType | string | Cloudwatch | Auth provider. keys/credentials/arn |
|
||||
| assumeRoleArn | string | Cloudwatch | ARN of Assume Role |
|
||||
| assumeRoleArn | string | Cloudwatch | ARN of Assume Role |
|
||||
| defaultRegion | string | Cloudwatch | AWS region |
|
||||
| customMetricsNamespaces | string | Cloudwatch | Namespaces of Custom Metrics |
|
||||
| customMetricsNamespaces | string | Cloudwatch | Namespaces of Custom Metrics |
|
||||
| tsdbVersion | string | OpenTsdb | Version |
|
||||
| tsdbResolution | string | OpenTsdb | Resolution |
|
||||
| sslmode | string | Postgre | SSLmode. 'disable', 'require', 'verify-ca' or 'verify-full' |
|
||||
| sslmode | string | Postgre | SSLmode. 'disable', 'require', 'verify-ca' or 'verify-full' |
|
||||
|
||||
|
||||
#### Secure Json data
|
||||
|
||||
{"authType":"keys","defaultRegion":"us-west-2","timeField":"@timestamp"}
|
||||
|
||||
Secure json data is a map of settings that will be encrypted with [secret key](/installation/configuration/#secret-key) from the grafana config. The purpose of this is only to hide content from the users of the application. This should be used for storing TLS Cert and password that Grafana will append to request on the server side. All these settings are optional.
|
||||
Secure json data is a map of settings that will be encrypted with [secret key](/installation/configuration/#secret-key) from the Grafana config. The purpose of this is only to hide content from the users of the application. This should be used for storing TLS Cert and password that Grafana will append to the request on the server side. All of these settings are optional.
|
||||
|
||||
| Name | Type | Datasource | Description |
|
||||
| ----| ---- | ---- | --- |
|
||||
| tlsCACert | string | *All* |CA cert for out going requests |
|
||||
| tlsClientCert | string | *All* |TLS Client cert for outgoing requests |
|
||||
| tlsClientKey | string | *All* |TLS Client key for outgoing requests |
|
||||
| password | string | Postgre | password |
|
||||
| user | string | Postgre | user |
|
||||
| password | string | Postgre | password |
|
||||
| user | string | Postgre | user |
|
||||
|
||||
### Dashboards
|
||||
|
||||
It's possible to manage dashboards in Grafana by adding one or more yaml config files in the [`provisioning/dashboards`](/installation/configuration/#provisioning) directory. Each config file can contain a list of `dashboards providers` that will load dashboards into grafana. Currently we only support reading dashboards from file but we will add more providers in the future.
|
||||
It's possible to manage dashboards in Grafana by adding one or more yaml config files in the [`provisioning/dashboards`](/installation/configuration/#provisioning) directory. Each config file can contain a list of `dashboards providers` that will load dashboards into Grafana. Currently we only support reading dashboards from file but we will add more providers in the future.
|
||||
|
||||
The dashboard provider config file looks like this
|
||||
The dashboard provider config file looks somewhat like this:
|
||||
|
||||
```yaml
|
||||
- name: 'default'
|
||||
@ -181,4 +182,4 @@ The dashboard provider config file looks like this
|
||||
folder: /var/lib/grafana/dashboards
|
||||
```
|
||||
|
||||
When grafana starts it will update/insert all dashboards available in the configured folders. If you modify the file the dashboard will also be updated.
|
||||
When Grafana starts, it will update/insert all dashboards available in the configured folders. If you modify the file, the dashboard will also be updated.
|
||||
|
@ -13,7 +13,7 @@ weight = 2
|
||||
|
||||
> Alerting is only available in Grafana v4.0 and above.
|
||||
|
||||
The alert engine publishes some internal metrics about itself. You can read more about how Grafana published [internal metrics](/installation/configuration/#metrics).
|
||||
The alert engine publishes some internal metrics about itself. You can read more about how Grafana publishes [internal metrics](/installation/configuration/#metrics).
|
||||
|
||||
Description | Type | Metric name
|
||||
---------- | ----------- | ----------
|
||||
|
@ -14,9 +14,9 @@ weight = 2
|
||||
|
||||
> Alerting is only available in Grafana v4.0 and above.
|
||||
|
||||
When an alert changes state it sends out notifications. Each alert rule can have
|
||||
multiple notifications. But in order to add a notification to an alert rule you first need
|
||||
to add and configure a `notification` channel (can be email, Pagerduty or other integration). This is done from the Notification Channels page.
|
||||
When an alert changes state, it sends out notifications. Each alert rule can have
|
||||
multiple notifications. In order to add a notification to an alert rule you first need
|
||||
to add and configure a `notification` channel (can be email, PagerDuty or other integration). This is done from the Notification Channels page.
|
||||
|
||||
## Notification Channel Setup
|
||||
|
||||
@ -25,12 +25,12 @@ to add and configure a `notification` channel (can be email, Pagerduty or other
|
||||
On the Notification Channels page hit the `New Channel` button to go the page where you
|
||||
can configure and setup a new Notification Channel.
|
||||
|
||||
You specify name and type, and type specific options. You can also test the notification to make
|
||||
sure it's working and setup correctly.
|
||||
You specify a name and a type, and type specific options. You can also test the notification to make
|
||||
sure it's setup correctly.
|
||||
|
||||
### Send on all alerts
|
||||
|
||||
When checked this option will make this notification used for all alert rules, existing and new.
|
||||
When checked, this option will nofity for all alert rules - existing and new.
|
||||
|
||||
## Supported Notification Types
|
||||
|
||||
@ -38,39 +38,39 @@ Grafana ships with the following set of notification types:
|
||||
|
||||
### Email
|
||||
|
||||
To enable email notification you have to setup [SMTP settings](/installation/configuration/#smtp)
|
||||
in the Grafana config. Email notification will upload an image of the alert graph to an
|
||||
external image destination if available or fallback to attaching the image in the email.
|
||||
To enable email notifications you have to setup [SMTP settings](/installation/configuration/#smtp)
|
||||
in the Grafana config. Email notifications will upload an image of the alert graph to an
|
||||
external image destination if available or fallback to attaching the image to the email.
|
||||
|
||||
### Slack
|
||||
|
||||
{{< imgbox max-width="40%" img="/img/docs/v4/slack_notification.png" caption="Alerting Slack Notification" >}}
|
||||
|
||||
To set up slack you need to configure an incoming webhook url at slack. You can follow their guide for how
|
||||
to do that https://api.slack.com/incoming-webhooks If you want to include screenshots of the firing alerts
|
||||
in the slack messages you have to configure either the [external image destination](#external-image-store) in Grafana,
|
||||
To set up slack you need to configure an incoming webhook url at slack. You can follow their guide on how
|
||||
to do that [here](https://api.slack.com/incoming-webhooks). If you want to include screenshots of the firing alerts
|
||||
in the Slack messages you have to configure either the [external image destination](#external-image-store) in Grafana,
|
||||
or a bot integration via Slack Apps. Follow Slack's guide to set up a bot integration and use the token provided
|
||||
https://api.slack.com/bot-users, which starts with "xoxb".
|
||||
(https://api.slack.com/bot-users), which starts with "xoxb".
|
||||
|
||||
Setting | Description
|
||||
---------- | -----------
|
||||
Recipient | allows you to override the slack recipient.
|
||||
Mention | make it possible to include a mention in the slack notification sent by Grafana. Ex @here or @channel
|
||||
Recipient | allows you to override the Slack recipient.
|
||||
Mention | make it possible to include a mention in the Slack notification sent by Grafana. Ex @here or @channel
|
||||
Token | If provided, Grafana will upload the generated image via Slack's file.upload API method, not the external image destination.
|
||||
|
||||
### PagerDuty
|
||||
|
||||
To set up PagerDuty, all you have to do is to provide an api key.
|
||||
To set up PagerDuty, all you have to do is to provide an API key.
|
||||
|
||||
Setting | Description
|
||||
---------- | -----------
|
||||
Integration Key | Integration key for pagerduty.
|
||||
Auto resolve incidents | Resolve incidents in pagerduty once the alert goes back to ok
|
||||
Integration Key | Integration key for PagerDuty.
|
||||
Auto resolve incidents | Resolve incidents in PagerDuty once the alert goes back to ok
|
||||
|
||||
### Webhook
|
||||
|
||||
The webhook notification is a simple way to send information about an state change over HTTP to a custom endpoint.
|
||||
Using this notification you could integrate Grafana into any system you choose, by yourself.
|
||||
The webhook notification is a simple way to send information about a state change over HTTP to a custom endpoint.
|
||||
Using this notification you could integrate Grafana into a system of your choosing.
|
||||
|
||||
Example json body:
|
||||
|
||||
@ -117,19 +117,19 @@ Dingtalk supports the following "message type": `text`, `link` and `markdown`. O
|
||||
|
||||
### Kafka
|
||||
|
||||
Notifications can be sent to a Kafka topic from Grafana using [Kafka REST Proxy](https://docs.confluent.io/1.0/kafka-rest/docs/index.html).
|
||||
There are couple of configurations options which need to be set in Grafana UI under Kafka Settings:
|
||||
Notifications can be sent to a Kafka topic from Grafana using the [Kafka REST Proxy](https://docs.confluent.io/1.0/kafka-rest/docs/index.html).
|
||||
There are a couple of configuration options which need to be set up in Grafana UI under Kafka Settings:
|
||||
|
||||
1. Kafka REST Proxy endpoint.
|
||||
|
||||
2. Kafka Topic.
|
||||
|
||||
Once these two properties are set, you can send the alerts to Kafka for further processing or throttling them.
|
||||
Once these two properties are set, you can send the alerts to Kafka for further processing or throttling.
|
||||
|
||||
### All supported notifier
|
||||
|
||||
Name | Type |Support images
|
||||
-----|------------ | ------
|
||||
Name | Type |Support images
|
||||
-----|------------ | ------
|
||||
Slack | `slack` | yes
|
||||
Pagerduty | `pagerduty` | yes
|
||||
Email | `email` | yes
|
||||
@ -149,14 +149,16 @@ Prometheus Alertmanager | `prometheus-alertmanager` | no
|
||||
|
||||
# Enable images in notifications {#external-image-store}
|
||||
|
||||
Grafana can render the panel associated with the alert rule and include that in the notification. Most Notification Channels require that this image be publicly accessible (Slack and PagerDuty for example). In order to include images in alert notifications, Grafana can upload the image to an image store. It currently supports
|
||||
Amazon S3 and Webdav for this. So to set that up you need to configure the [external image uploader](/installation/configuration/#external-image-storage) in your grafana-server ini config file.
|
||||
Grafana can render the panel associated with the alert rule and include that in the notification. Most Notification Channels require that this image be publicly accessable (Slack and PagerDuty for example). In order to include images in alert notifications, Grafana can upload the image to an image store. It currently supports
|
||||
Amazon S3, Webdav, Google Cloud Storage and Azure Blob Storage. So to set that up you need to configure the [external image uploader](/installation/configuration/#external-image-storage) in your grafana-server ini config file.
|
||||
|
||||
Be aware that some notifiers requires public access to the image to be able to include it in the notification. So make sure to enable public access to the images. If your using local image uploader, your Grafana instance need to be accessible by the internet.
|
||||
|
||||
Currently only the Email Channels attaches images if no external image store is specified. To include images in alert notifications for other channels then you need to set up an external image store.
|
||||
|
||||
This is an optional requirement, you can get Slack and email notifications without setting this up.
|
||||
This is an optional requirement. You can get Slack and email notifications without setting this up.
|
||||
|
||||
# Configure the link back to Grafana from alert notifications
|
||||
|
||||
All alert notifications contains a link back to the triggered alert in the Grafana instance.
|
||||
All alert notifications contain a link back to the triggered alert in the Grafana instance.
|
||||
This url is based on the [domain](/installation/configuration/#domain) setting in Grafana.
|
||||
|
@ -5,7 +5,7 @@ type = "docs"
|
||||
[menu.docs]
|
||||
name = "Features"
|
||||
identifier = "features"
|
||||
weight = 3
|
||||
weight = 4
|
||||
+++
|
||||
|
||||
|
||||
|
@ -7,6 +7,7 @@ type = "docs"
|
||||
name = "Basic Concepts"
|
||||
identifier = "basic_concepts"
|
||||
parent = "guides"
|
||||
weight = 2
|
||||
+++
|
||||
|
||||
# Basic Concepts
|
||||
|
@ -8,6 +8,7 @@ aliases = ["/guides/gettingstarted"]
|
||||
name = "Getting Started"
|
||||
identifier = "getting_started_guide"
|
||||
parent = "guides"
|
||||
weight = 1
|
||||
+++
|
||||
|
||||
# Getting started
|
||||
@ -24,7 +25,7 @@ Read the [Basic Concepts](/guides/basic_concepts) document to get a crash course
|
||||
|
||||
### Top header
|
||||
|
||||
Let's start with creating a new Dashboard. You can find the new Dashboard link on the right side of the Dashboard picker. You now have a blank Dashboard.
|
||||
Let's start with creating a new Dashboard. You can find the new Dashboard link on the right side of the Dashboard picker. You now have a blank Dashboard.
|
||||
|
||||
<img class="no-shadow" src="/img/docs/v45/top_nav_annotated.png">
|
||||
|
||||
|
@ -4,6 +4,6 @@ type = "docs"
|
||||
[menu.docs]
|
||||
name = "Getting Started"
|
||||
identifier = "guides"
|
||||
weight = 2
|
||||
weight = 3
|
||||
+++
|
||||
|
||||
|
@ -55,7 +55,7 @@ of another alert in your conditions, and `Time Of Day`.
|
||||
Alerting would not be very useful if there was no way to send notifications when rules trigger and change state. You
|
||||
can setup notifications of different types. We currently have `Slack`, `PagerDuty`, `Email` and `Webhook` with more in the
|
||||
pipe that will be added during beta period. The notifications can then be added to your alert rules.
|
||||
If you have configured an external image store in the grafana.ini config file (s3 and webdav options available)
|
||||
If you have configured an external image store in the grafana.ini config file (s3, webdav, and azure_blob options available)
|
||||
you can get very rich notifications with an image of the graph and the metric
|
||||
values all included in the notification.
|
||||
|
||||
|
120
docs/sources/guides/whats-new-in-v5.md
Normal file
120
docs/sources/guides/whats-new-in-v5.md
Normal file
@ -0,0 +1,120 @@
|
||||
+++
|
||||
title = "What's New in Grafana v5.0"
|
||||
description = "Feature & improvement highlights for Grafana v5.0"
|
||||
keywords = ["grafana", "new", "documentation", "5.0"]
|
||||
type = "docs"
|
||||
[menu.docs]
|
||||
name = "Version 5.0"
|
||||
identifier = "v5.0"
|
||||
parent = "whatsnew"
|
||||
weight = -6
|
||||
+++
|
||||
|
||||
# What's New in Grafana v5.0
|
||||
|
||||
This is the most substantial update that Grafana has ever seen. This article will detail the major new features and enhancements.
|
||||
|
||||
- [New Dashboard Layout Engine]({{< relref "#new-dashboard-layout-engine" >}}) enables a much easier drag, drop and resize experience and new types of layouts.
|
||||
- [New UX]({{< relref "#new-ux-layout-engine" >}}). The UI has big improvements in both look and function.
|
||||
- [New Light Theme]({{< relref "#new-light-theme" >}}) is now looking really nice.
|
||||
- [Dashboard Folders]({{< relref "#dashboard-folders" >}}) helps you keep your dashboards organized.
|
||||
- [Permissions]({{< relref "#dashboard-folders" >}}) on folders and dashboards helps manage larger Grafana installations.
|
||||
- [Group users into teams]({{< relref "#teams" >}}) and use them in the new permission system.
|
||||
- [Datasource provisioning]({{< relref "#data-sources" >}}) makes it possible to setup datasources via config files.
|
||||
- [Dashboard provisioning]({{< relref "#dashboards" >}}) makes it possible to setup dashboards via config files.
|
||||
|
||||
### Video showing new features
|
||||
|
||||
<iframe height="215" src="https://www.youtube.com/embed/BC_YRNpqj5k?rel=0&showinfo=0" frameborder="0" allow="autoplay; encrypted-media" allowfullscreen></iframe>
|
||||
<br />
|
||||
|
||||
## New Dashboard Layout Engine
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v50/new_grid.png" max-width="1000px" class="docs-image--right">}}
|
||||
|
||||
The new dashboard layout engine allows for much easier movement and sizing of panels, as other panels now move out of the way in
|
||||
a very intuitive way. Panels are sized independently, so rows are no longer necessary to create layouts. This opens
|
||||
up many new types of layouts where panels of different heights can be aligned easily. Checkout the new grid in the video
|
||||
above or on the [play site](http://play.grafana.org). All your existing dashboards will automatically migrate to the
|
||||
new position system and look close to identical. The new panel position makes dashboards saved in v5.0 not compatible
|
||||
with older versions of Grafana.
|
||||
|
||||
<div class="clearfix"></div>
|
||||
|
||||
## New UX
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v50/new_ux_nav.png" max-width="1000px" class="docs-image--right" >}}
|
||||
|
||||
Almost every page has seen significant UX improvements. All pages (except dashboard pages) have a new tab-based layout that improves navigation between pages. The side menu has also changed quite a bit. You can still hide the side menu completely if you click on the Grafana logo.
|
||||
|
||||
<div class="clearfix"></div>
|
||||
|
||||
### Dashboard Settings
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v50/dashboard_settings.png" max-width="1000px" class="docs-image--right" >}}
|
||||
Dashboard pages have a new header toolbar where buttons and actions are now all moved to the right. All the dashboard
|
||||
settings views have been combined with a side nav which allows you to easily move between different setting categories.
|
||||
|
||||
<div class="clearfix"></div>
|
||||
|
||||
## New Light Theme
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v50/new_white_theme.png" max-width="1000px" class="docs-image--right" >}}
|
||||
|
||||
This theme has not seen a lot of love in recent years and we felt it was time to rework it and give it a major overhaul. We are very happy with the result.
|
||||
|
||||
<div class="clearfix"></div>
|
||||
|
||||
## Dashboard Folders
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v50/new_search.png" max-width="1000px" class="docs-image--right" >}}
|
||||
|
||||
The big new feature that comes with Grafana v5.0 is dashboard folders. Now you can organize your dashboards in folders,
|
||||
which is very useful if you have a lot of dashboards or multiple teams.
|
||||
|
||||
- New search design adds expandable sections for each folder, starred and recently viewed dashboards.
|
||||
- New manage dashboard pages enable batch actions and views for folder settings and permissions.
|
||||
- Set permissions on folders and have dashboards inherit the permissions.
|
||||
|
||||
## Teams
|
||||
|
||||
A team is a new concept in Grafana v5. They are simply a group of users that can be then be used in the new permission system for dashboards and folders. Only an admin can create teams.
|
||||
We hope to do more with teams in future releases like integration with LDAP and a team landing page.
|
||||
|
||||
## Permissions
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v50/folder_permissions.png" max-width="1000px" class="docs-image--right" >}}
|
||||
|
||||
You can assign permissions to folders and dashboards. The default user role-based permissions can be removed and replaced with specific teams or users enabling more control over what a user can see and edit.
|
||||
|
||||
<div class="clearfix"></div>
|
||||
|
||||
# Provisioning from configuration
|
||||
|
||||
In previous versions of Grafana, you could only use the API for provisioning data sources and dashboards.
|
||||
But that required the service to be running before you started creating dashboards and you also needed to
|
||||
set up credentials for the HTTP API. In 5.0 we decided to improve this experience by adding a new active
|
||||
provisioning system that uses config files. This will make GitOps more natural as data sources and dashboards can
|
||||
be defined via files that can be version controlled. We hope to extend this system to later add support for users, orgs
|
||||
and alerts as well.
|
||||
|
||||
### Data sources
|
||||
|
||||
Data sources can now be setup using config files. These data sources are by default not editable from the Grafana GUI.
|
||||
It's also possible to update and delete data sources from the config file. More info in the [data source provisioning docs](/administration/provisioning/#datasources).
|
||||
|
||||
### Dashboards
|
||||
|
||||
We also deprecated the `[dashboard.json]` in favor of our new dashboard provisioner that keeps dashboards on disk
|
||||
in sync with dashboards in Grafana's database. The dashboard provisioner has multiple advantages over the old
|
||||
`[dashboard.json]` feature. Instead of storing the dashboard in memory we now insert the dashboard into the database,
|
||||
which makes it possible to star them, use one as the home dashboard, set permissions and other features in Grafana that
|
||||
expects the dashboards to exist in the database. More info in the [dashboard provisioning docs](/administration/provisioning/#dashboards)
|
||||
|
||||
# Dashboard model & API
|
||||
|
||||
We are introducing a new identifier (`uid`) in the dashboard JSON model. The new identifier will be a 9-12 character long unique id.
|
||||
We are also changing the route for getting dashboards to use this `uid` instead of the slug that the current route and API are using.
|
||||
We will keep supporting the old route for backward compatibility. This will make it possible to change the title on dashboards without breaking links.
|
||||
Sharing dashboards between instances becomes much easier since the uid is unique (unique enough). This might seem like a small change,
|
||||
but we are incredibly excited about it since it will make it much easier to manage, collaborate and navigate between dashboards.
|
@ -90,7 +90,7 @@ Content-Type: application/json
|
||||
|
||||
## Get a single data source by Name
|
||||
|
||||
`GET /api/datasources/name/:name`
|
||||
`GET /api/datasources/:name`
|
||||
|
||||
**Example Request**:
|
||||
|
||||
|
@ -4,10 +4,6 @@ description = "Install guide for Grafana"
|
||||
keywords = ["grafana", "installation", "documentation"]
|
||||
type = "docs"
|
||||
aliases = ["v1.1", "guides/reference/admin"]
|
||||
[menu.docs]
|
||||
name = "Welcome to the Docs"
|
||||
identifier = "root"
|
||||
weight = -1
|
||||
+++
|
||||
|
||||
# Welcome to the Grafana Documentation
|
||||
@ -22,7 +18,7 @@ other domains including industrial sensors, home automation, weather, and proces
|
||||
- [Installing on Mac OS X](installation/mac)
|
||||
- [Installing on Windows](installation/windows)
|
||||
- [Installing on Docker](installation/docker)
|
||||
- [Installing using Provisioning (Chef, Puppet, Salt, Ansible, etc)](installation/provisioning)
|
||||
- [Installing using Provisioning (Chef, Puppet, Salt, Ansible, etc)](administration/provisioning#configuration-management-tools)
|
||||
- [Nightly Builds](https://grafana.com/grafana/download)
|
||||
|
||||
For other platforms Read the [build from source]({{< relref "project/building_from_source.md" >}})
|
||||
|
@ -69,4 +69,57 @@ server {
|
||||
}
|
||||
```
|
||||
|
||||
#### HAProxy configuration with sub path
|
||||
```bash
|
||||
frontend http-in
|
||||
bind *:80
|
||||
use_backend grafana_backend if { path /grafana } or { path_beg /grafana/ }
|
||||
|
||||
backend grafana_backend
|
||||
# Requires haproxy >= 1.6
|
||||
http-request set-path %[path,regsub(^/grafana/?,/)]
|
||||
|
||||
# Works for haproxy < 1.6
|
||||
# reqrep ^([^\ ]*\ /)grafana[/]?(.*) \1\2
|
||||
|
||||
server grafana localhost:3000
|
||||
```
|
||||
|
||||
### IIS URL Rewrite Rule (Windows) with Subpath
|
||||
|
||||
IIS requires that the URL Rewrite module is installed.
|
||||
|
||||
Given:
|
||||
|
||||
- subpath `grafana`
|
||||
- Grafana installed on `http://localhost:3000`
|
||||
- server config:
|
||||
|
||||
```bash
|
||||
[server]
|
||||
domain = localhost:8080
|
||||
root_url = %(protocol)s://%(domain)s:/grafana
|
||||
```
|
||||
|
||||
Create an Inbound Rule for the parent website (localhost:8080 in this example) in IIS Manager with the following settings:
|
||||
|
||||
- pattern: `grafana(/)?(.*)`
|
||||
- check the `Ignore case` checkbox
|
||||
- rewrite url set to `http://localhost:3000/{R:2}`
|
||||
- check the `Append query string` checkbox
|
||||
- check the `Stop processing of subsequent rules` checkbox
|
||||
|
||||
This is the rewrite rule that is generated in the `web.config`:
|
||||
|
||||
```xml
|
||||
<rewrite>
|
||||
<rules>
|
||||
<rule name="Grafana" enabled="true" stopProcessing="true">
|
||||
<match url="grafana(/)?(.*)" />
|
||||
<action type="Rewrite" url="http://localhost:3000/{R:2}" logRewrittenUrl="false" />
|
||||
</rule>
|
||||
</rules>
|
||||
</rewrite>
|
||||
```
|
||||
|
||||
See the [tutorial on IIS Url Rewrites](http://docs.grafana.org/tutorials/iis/) for more in-depth instructions.
|
||||
|
@ -496,7 +496,7 @@ name = BitBucket
|
||||
enabled = true
|
||||
allow_sign_up = true
|
||||
client_id = <client id>
|
||||
client_secret = <secret>
|
||||
client_secret = <client secret>
|
||||
scopes = account email
|
||||
auth_url = https://bitbucket.org/site/oauth2/authorize
|
||||
token_url = https://bitbucket.org/site/oauth2/access_token
|
||||
@ -505,6 +505,105 @@ team_ids =
|
||||
allowed_organizations =
|
||||
```
|
||||
|
||||
### Set up oauth2 with OneLogin
|
||||
|
||||
1. Create a new Custom Connector with the following settings:
|
||||
- Name: Grafana
|
||||
- Sign On Method: OpenID Connect
|
||||
- Redirect URI: `https://<grafana domain>/login/generic_oauth`
|
||||
- Signing Algorithm: RS256
|
||||
- Login URL: `https://<grafana domain>/login/generic_oauth`
|
||||
|
||||
then:
|
||||
2. Add an App to the Grafana Connector:
|
||||
- Display Name: Grafana
|
||||
|
||||
then:
|
||||
3. Under the SSO tab on the Grafana App details page you'll find the Client ID and Client Secret.
|
||||
|
||||
Your OneLogin Domain will match the url you use to access OneLogin.
|
||||
|
||||
Configure Grafana as follows:
|
||||
|
||||
```bash
|
||||
[auth.generic_oauth]
|
||||
name = OneLogin
|
||||
enabled = true
|
||||
allow_sign_up = true
|
||||
client_id = <client id>
|
||||
client_secret = <client secret>
|
||||
scopes = openid email name
|
||||
auth_url = https://<onelogin domain>.onelogin.com/oidc/auth
|
||||
token_url = https://<onelogin domain>.onelogin.com/oidc/token
|
||||
api_url = https://<onelogin domain>.onelogin.com/oidc/me
|
||||
team_ids =
|
||||
allowed_organizations =
|
||||
```
|
||||
|
||||
### Set up oauth2 with Auth0
|
||||
|
||||
1. Create a new Client in Auth0
|
||||
- Name: Grafana
|
||||
- Type: Regular Web Application
|
||||
|
||||
2. Go to the Settings tab and set:
|
||||
- Allowed Callback URLs: `https://<grafana domain>/login/generic_oauth`
|
||||
|
||||
3. Click Save Changes, then use the values at the top of the page to configure Grafana:
|
||||
|
||||
```bash
|
||||
[auth.generic_oauth]
|
||||
enabled = true
|
||||
allow_sign_up = true
|
||||
team_ids =
|
||||
allowed_organizations =
|
||||
name = Auth0
|
||||
client_id = <client id>
|
||||
client_secret = <client secret>
|
||||
scopes = openid profile email
|
||||
auth_url = https://<domain>/authorize
|
||||
token_url = https://<domain>/oauth/token
|
||||
api_url = https://<domain>/userinfo
|
||||
```
|
||||
|
||||
### Set up oauth2 with Azure Active Directory
|
||||
|
||||
1. Log in to portal.azure.com and click "Azure Active Directory" in the side menu, then click the "Properties" sub-menu item.
|
||||
|
||||
2. Copy the "Directory ID", this is needed for setting URLs later
|
||||
|
||||
3. Click "App Registrations" and add a new application registration:
|
||||
- Name: Grafana
|
||||
- Application type: Web app / API
|
||||
- Sign-on URL: `https://<grafana domain>/login/generic_oauth`
|
||||
|
||||
4. Click the name of the new application to open the application details page.
|
||||
|
||||
5. Note down the "Application ID", this will be the OAuth client id.
|
||||
|
||||
6. Click "Settings", then click "Keys" and add a new entry under Passwords
|
||||
- Key Description: Grafana OAuth
|
||||
- Duration: Never Expires
|
||||
|
||||
7. Click Save then copy the key value, this will be the OAuth client secret.
|
||||
|
||||
8. Configure Grafana as follows:
|
||||
|
||||
```bash
|
||||
[auth.generic_oauth]
|
||||
name = Azure AD
|
||||
enabled = true
|
||||
allow_sign_up = true
|
||||
client_id = <application id>
|
||||
client_secret = <key value>
|
||||
scopes = openid email name
|
||||
auth_url = https://login.microsoftonline.com/<directory id>/oauth2/authorize
|
||||
token_url = https://login.microsoftonline.com/<directory id>/oauth2/token
|
||||
api_url =
|
||||
team_ids =
|
||||
allowed_organizations =
|
||||
```
|
||||
|
||||
<hr>
|
||||
|
||||
## [auth.basic]
|
||||
@ -572,31 +671,6 @@ session provider you have configured.
|
||||
- **memcache:** ex: 127.0.0.1:11211
|
||||
- **redis:** ex: `addr=127.0.0.1:6379,pool_size=100,prefix=grafana`
|
||||
|
||||
If you use MySQL or Postgres as the session store you need to create the
|
||||
session table manually.
|
||||
|
||||
Mysql Example:
|
||||
|
||||
```bash
|
||||
CREATE TABLE `session` (
|
||||
`key` CHAR(16) NOT NULL,
|
||||
`data` BLOB,
|
||||
`expiry` INT(11) UNSIGNED NOT NULL,
|
||||
PRIMARY KEY (`key`)
|
||||
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
|
||||
```
|
||||
|
||||
Postgres Example:
|
||||
|
||||
```bash
|
||||
CREATE TABLE session (
|
||||
key CHAR(16) NOT NULL,
|
||||
data BYTEA,
|
||||
expiry INTEGER NOT NULL,
|
||||
PRIMARY KEY (key)
|
||||
);
|
||||
```
|
||||
|
||||
Postgres valid `sslmode` are `disable`, `require`, `verify-ca`, and `verify-full` (default).
|
||||
|
||||
### cookie_name
|
||||
@ -731,7 +805,7 @@ Time to live for snapshots.
|
||||
These options control how images should be made public so they can be shared on services like slack.
|
||||
|
||||
### provider
|
||||
You can choose between (s3, webdav, gcs). If left empty Grafana will ignore the upload action.
|
||||
You can choose between (s3, webdav, gcs, azure_blob, local). If left empty Grafana will ignore the upload action.
|
||||
|
||||
## [external_image_storage.s3]
|
||||
|
||||
@ -786,6 +860,17 @@ Bucket Name on Google Cloud Storage.
|
||||
### path
|
||||
Optional extra path inside bucket
|
||||
|
||||
## [external_image_storage.azure_blob]
|
||||
|
||||
### account_name
|
||||
Storage account name
|
||||
|
||||
### account_key
|
||||
Storage account key
|
||||
|
||||
### container_name
|
||||
Container name where to store "Blob" images with random names. Creating the blob container beforehand is required. Only public containers are supported.
|
||||
|
||||
## [alerting]
|
||||
|
||||
### enabled
|
||||
|
@ -7,6 +7,7 @@ aliases = ["installation/installation/", "v2.1/installation/install/"]
|
||||
[menu.docs]
|
||||
name = "Installation"
|
||||
identifier = "installation"
|
||||
weight = 1
|
||||
+++
|
||||
|
||||
## Installing Grafana
|
||||
|
@ -57,7 +57,7 @@ For this you need nodejs (v.6+).
|
||||
```bash
|
||||
npm install -g yarn
|
||||
yarn install --pure-lockfile
|
||||
npm run build
|
||||
npm run watch
|
||||
```
|
||||
|
||||
## Running Grafana Locally
|
||||
|
@ -1,42 +0,0 @@
|
||||
+++
|
||||
title = "Admin Roles"
|
||||
description = "Users & Organization permission and administration"
|
||||
keywords = ["grafana", "configuration", "documentation", "admin", "users", "permissions"]
|
||||
type = "docs"
|
||||
[menu.docs]
|
||||
name = "Admin Roles"
|
||||
parent = "admin"
|
||||
weight = 3
|
||||
+++
|
||||
|
||||
# Administration
|
||||
|
||||
Grafana has two levels of administrators:
|
||||
|
||||
* Organizational administrators: These admins can manage users within specific organizations in a particular Grafana installation
|
||||
* Grafana administrators: These super admins can manage users across all organizations in a Grafana installation. They can also change and access system-wide settings.
|
||||
|
||||
## Organizational Administrators
|
||||
|
||||
As an Organizational administrator, you can add `Data Sources`, add Users to your Organization and
|
||||
modify Organization details and options.
|
||||
|
||||
> *Note*: If Grafana is configured with `users.allow_org_create = true`, any User of any Organization will be able to
|
||||
> start their own Organization and become the administrator of that Organization.
|
||||
|
||||
|
||||
## Grafana Administrators
|
||||
|
||||
<img src="/img/v2/admin_sidenav.png" class="pull-right" style="margin-left: 15px">
|
||||
As a Grafana Administrator, you have complete access to any Organization or User in that instance of Grafana.
|
||||
When performing actions as a Grafana admin, the sidebar will change it's appearance as below to indicate you are performing global server administration.
|
||||
|
||||
From the Grafana Server Admin page, you can access the System Info page which summarizes all of the backend configuration settings of the Grafana server.
|
||||
|
||||
## Why would I have multiple Organizations?
|
||||
|
||||
Organizations in Grafana are best suited for a **multi-tenant deployment**. In a multi-tenant deployment,
|
||||
Organizations can be used to provide a full Grafana experience to different sets of users from a single Grafana instance,
|
||||
at the convenience of the Grafana Administrator.
|
||||
|
||||
In most cases, a Grafana installation will only have **one** Organization. Since dashboards, data sources and other configuration items are not shared between organizations, there's no need to create multiple Organizations if you want all your users to have access to the same set of dashboards and data.
|
@ -71,8 +71,8 @@ Each field in the dashboard JSON is explained below with its usage:
|
||||
| **timepicker** | timepicker metadata, see [timepicker section](#timepicker) for details |
|
||||
| **templating** | templating metadata, see [templating section](#templating) for details |
|
||||
| **annotations** | annotations metadata, see [annotations section](#annotations) for details |
|
||||
| **schemaVersion** | TODO |
|
||||
| **version** | TODO |
|
||||
| **schemaVersion** | version of the JSON schema (integer), incremented each time a Grafana update brings changes to the said schema |
|
||||
| **version** | version of the dashboard (integer), incremented each time the dashboard is updated |
|
||||
| **links** | TODO |
|
||||
|
||||
### rows
|
||||
|
@ -39,7 +39,7 @@ Click a panel title to open the panel menu, then click share in the panel menu t
|
||||
|
||||
### Direct Link Rendered Image
|
||||
|
||||
You also get a link to service side rendered PNG of the panel. Useful if you want to share an image of the panel. Please note that for OSX and Windows, you will need to ensure that a `phantomjs` binary is available under `vendor/phantomjs/phantomjs`. For Linux, a `phantomjs` binary is included - however, you should ensure that any requisite libraries (e.g. libfontconfig) are available.
|
||||
You also get a link to service side rendered PNG of the panel. Useful if you want to share an image of the panel. Please note that for OSX and Windows, you will need to ensure that a `phantomjs` binary is available under `tools/phantomjs/phantomjs`. For Linux, a `phantomjs` binary is included - however, you should ensure that any requisite libraries (e.g. libfontconfig) are available.
|
||||
|
||||
Example of a link to a server-side rendered PNG:
|
||||
|
||||
|
89
docs/sources/tutorials/iis.md
Normal file
89
docs/sources/tutorials/iis.md
Normal file
@ -0,0 +1,89 @@
|
||||
+++
|
||||
title = "Grafana with IIS Reverse Proxy on Windows"
|
||||
type = "docs"
|
||||
keywords = ["grafana", "tutorials", "proxy", "IIS", "windows"]
|
||||
[menu.docs]
|
||||
parent = "tutorials"
|
||||
weight = 10
|
||||
+++
|
||||
|
||||
# How to Use IIS with URL Rewrite as a Reverse Proxy for Grafana on Windows
|
||||
|
||||
If you want Grafana to be a subpath or subfolder under a website in IIS then the URL Rewrite module for ISS can be used to support this.
|
||||
|
||||
Example:
|
||||
|
||||
- Parent site: http://localhost:8080
|
||||
- Grafana: http://localhost:3000
|
||||
|
||||
Grafana as a subpath: http://localhost:8080/grafana
|
||||
|
||||
## Setup
|
||||
|
||||
If you have not already done it, then a requirement is to install URL Rewrite module for IIS.
|
||||
|
||||
Download and install the URL Rewrite module for IIS: https://www.iis.net/downloads/microsoft/url-rewrite
|
||||
|
||||
## Grafana Config
|
||||
|
||||
The Grafana config can be set by creating a file named `custom.ini` in the `conf` subdirectory of your Grafana installation. See the [installation instructions](http://docs.grafana.org/installation/windows/#configure) for more details.
|
||||
|
||||
Given that the subpath should be `grafana` and the parent site is `localhost:8080` then add this to the `custom.ini` config file:
|
||||
|
||||
```bash
|
||||
[server]
|
||||
domain = localhost:8080
|
||||
root_url = %(protocol)s://%(domain)s:/grafana
|
||||
```
|
||||
|
||||
Restart the Grafana server after changing the config file.
|
||||
|
||||
## IIS Config
|
||||
|
||||
1. Open the IIS Manager and click on the parent website
|
||||
2. In the admin console for this website, double click on the Url Rewrite option:
|
||||
{{< docs-imagebox img="/img/docs/tutorials/IIS_admin_console.png" max-width= "800px" >}}
|
||||
|
||||
3. Click on the `Add Rule(s)...` action
|
||||
4. Choose the Blank Rule template for an Inbound Rule
|
||||
{{< docs-imagebox img="/img/docs/tutorials/IIS_add_inbound_rule.png" max-width= "800px" >}}
|
||||
|
||||
5. Create an Inbound Rule for the parent website (localhost:8080 in this example) with the following settings:
|
||||
- pattern: `grafana(/)?(.*)`
|
||||
- check the `Ignore case` checkbox
|
||||
- rewrite url set to `http://localhost:3000/{R:2}`
|
||||
- check the `Append query string` checkbox
|
||||
- check the `Stop processing of subsequent rules` checkbox
|
||||
|
||||
{{< docs-imagebox img="/img/docs/tutorials/IIS_url_rewrite.png" max-width= "800px" >}}
|
||||
|
||||
Finally, navigate to `http://localhost:8080/grafana` (replace `http://localhost:8080` with your parent domain) and you should come to the Grafana login page.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### 404 error
|
||||
|
||||
When navigating to the grafana url (`http://localhost:8080/grafana` in the example above) and a `HTTP Error 404.0 - Not Found` error is returned then either:
|
||||
|
||||
- the pattern for the Inbound Rule is incorrect. Edit the rule, click on the `Test pattern...` button, test the part of the url after `http://localhost:8080/` and make sure it matches. For `grafana/login` the test should return 3 capture groups: {R:0}: `grafana` {R:1}: `/` and {R:2}: `login`.
|
||||
- The `root_url` setting in the Grafana config file does not match the parent url with subpath.
|
||||
|
||||
### Grafana Website only shows text with no images or css
|
||||
|
||||
{{< docs-imagebox img="/img/docs/tutorials/IIS_proxy_error.png" max-width= "800px" >}}
|
||||
|
||||
1. The `root_url` setting in the Grafana config file does not match the parent url with subpath. This could happen if the root_url is commented out by mistake (`;` is used for commenting out a line in .ini files):
|
||||
|
||||
`; root_url = %(protocol)s://%(domain)s:/grafana`
|
||||
|
||||
2. or if the subpath in the `root_url` setting does not match the subpath used in the pattern in the Inbound Rule in IIS:
|
||||
|
||||
`root_url = %(protocol)s://%(domain)s:/grafana`
|
||||
|
||||
pattern in Inbound Rule: `wrongsubpath(/)?(.*)`
|
||||
|
||||
3. or if the Rewrite Url in the Inbound Rule is incorrect.
|
||||
|
||||
The Rewrite Url should not include the subpath.
|
||||
|
||||
The Rewrite Url should contain the capture group from the pattern matching that returns the part of the url after the subpath. The pattern used above returns 3 capture groups and the third one {R:2} returns the part of the url after `http://localhost:8080/grafana/`.
|
@ -3,7 +3,7 @@ title = "What's New in Grafana"
|
||||
[menu.docs]
|
||||
name = "What's New In Grafana"
|
||||
identifier = "whatsnew"
|
||||
weight = 2
|
||||
weight = 3
|
||||
+++
|
||||
|
||||
|
9
docs/versions.json
Normal file
9
docs/versions.json
Normal file
@ -0,0 +1,9 @@
|
||||
[
|
||||
{ "version": "v5.0", "path": "/v5.0", "archived": false },
|
||||
{ "version": "v4.6", "path": "/", "archived": false, "current": true },
|
||||
{ "version": "v4.5", "path": "/v4.5", "archived": true },
|
||||
{ "version": "v4.4", "path": "/v4.4", "archived": true },
|
||||
{ "version": "v4.3", "path": "/v4.3", "archived": true },
|
||||
{ "version": "v4.1", "path": "/v4.1", "archived": true },
|
||||
{ "version": "v3.1", "path": "/v3.1", "archived": true }
|
||||
]
|
15
package.json
15
package.json
@ -68,6 +68,7 @@
|
||||
"karma-webpack": "^2.0.4",
|
||||
"lint-staged": "^6.0.0",
|
||||
"load-grunt-tasks": "3.5.2",
|
||||
"mobx-react-devtools": "^4.2.15",
|
||||
"mocha": "^4.0.1",
|
||||
"ng-annotate-loader": "^0.6.1",
|
||||
"ng-annotate-webpack-plugin": "^0.2.1-pre",
|
||||
@ -114,6 +115,10 @@
|
||||
"*.scss": [
|
||||
"prettier --write",
|
||||
"git add"
|
||||
],
|
||||
"*.go": [
|
||||
"gofmt -w -s",
|
||||
"git add"
|
||||
]
|
||||
},
|
||||
"prettier": {
|
||||
@ -125,7 +130,6 @@
|
||||
"dependencies": {
|
||||
"angular": "^1.6.6",
|
||||
"angular-bindonce": "^0.3.1",
|
||||
"angular-mocks": "^1.6.6",
|
||||
"angular-native-dragdrop": "^1.2.2",
|
||||
"angular-route": "^1.6.6",
|
||||
"angular-sanitize": "^1.6.6",
|
||||
@ -135,7 +139,7 @@
|
||||
"clipboard": "^1.7.1",
|
||||
"d3": "^4.11.0",
|
||||
"d3-scale-chromatic": "^1.1.1",
|
||||
"eventemitter3": "^2.0.2",
|
||||
"eventemitter3": "^2.0.3",
|
||||
"file-saver": "^1.3.3",
|
||||
"jquery": "^3.2.1",
|
||||
"lodash": "^4.17.4",
|
||||
@ -148,9 +152,14 @@
|
||||
"prop-types": "^15.6.0",
|
||||
"react": "^16.2.0",
|
||||
"react-dom": "^16.2.0",
|
||||
"react-grid-layout": "^0.16.1",
|
||||
"react-grid-layout": "^0.16.2",
|
||||
"react-highlight-words": "^0.10.0",
|
||||
"react-popper": "^0.7.5",
|
||||
"react-select": "^1.1.0",
|
||||
"react-sizeme": "^2.3.6",
|
||||
"react-transition-group": "^2.2.1",
|
||||
"remarkable": "^1.7.1",
|
||||
"rst2html": "github:thoward/rst2html#990cb89",
|
||||
"rxjs": "^5.4.3",
|
||||
"tether": "^1.4.0",
|
||||
"tether-drop": "https://github.com/torkelo/drop",
|
||||
|
@ -8,6 +8,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/middleware"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/services/alerting"
|
||||
"github.com/grafana/grafana/pkg/services/guardian"
|
||||
)
|
||||
|
||||
func ValidateOrgAlert(c *middleware.Context) {
|
||||
@ -62,9 +63,22 @@ func GetAlerts(c *middleware.Context) Response {
|
||||
return ApiError(500, "List alerts failed", err)
|
||||
}
|
||||
|
||||
alertDTOs, resp := transformToDTOs(query.Result, c)
|
||||
if resp != nil {
|
||||
return resp
|
||||
}
|
||||
|
||||
return Json(200, alertDTOs)
|
||||
}
|
||||
|
||||
func transformToDTOs(alerts []*models.Alert, c *middleware.Context) ([]*dtos.AlertRule, Response) {
|
||||
if len(alerts) == 0 {
|
||||
return []*dtos.AlertRule{}, nil
|
||||
}
|
||||
|
||||
dashboardIds := make([]int64, 0)
|
||||
alertDTOs := make([]*dtos.AlertRule, 0)
|
||||
for _, alert := range query.Result {
|
||||
for _, alert := range alerts {
|
||||
dashboardIds = append(dashboardIds, alert.DashboardId)
|
||||
alertDTOs = append(alertDTOs, &dtos.AlertRule{
|
||||
Id: alert.Id,
|
||||
@ -83,22 +97,40 @@ func GetAlerts(c *middleware.Context) Response {
|
||||
DashboardIds: dashboardIds,
|
||||
}
|
||||
|
||||
if len(alertDTOs) > 0 {
|
||||
if err := bus.Dispatch(&dashboardsQuery); err != nil {
|
||||
return ApiError(500, "List alerts failed", err)
|
||||
}
|
||||
if err := bus.Dispatch(&dashboardsQuery); err != nil {
|
||||
return nil, ApiError(500, "List alerts failed", err)
|
||||
}
|
||||
|
||||
//TODO: should be possible to speed this up with lookup table
|
||||
for _, alert := range alertDTOs {
|
||||
for _, dash := range dashboardsQuery.Result {
|
||||
if alert.DashboardId == dash.Id {
|
||||
alert.DashbboardUri = "db/" + dash.Slug
|
||||
alert.DashbboardUri = dash.GenerateUrl()
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Json(200, alertDTOs)
|
||||
permissionsQuery := models.GetDashboardPermissionsForUserQuery{
|
||||
DashboardIds: dashboardIds,
|
||||
OrgId: c.OrgId,
|
||||
UserId: c.SignedInUser.UserId,
|
||||
OrgRole: c.SignedInUser.OrgRole,
|
||||
}
|
||||
|
||||
if err := bus.Dispatch(&permissionsQuery); err != nil {
|
||||
return nil, ApiError(500, "List alerts failed", err)
|
||||
}
|
||||
|
||||
for _, alert := range alertDTOs {
|
||||
for _, perm := range permissionsQuery.Result {
|
||||
if alert.DashboardId == perm.DashboardId {
|
||||
alert.CanEdit = perm.Permission > 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return alertDTOs, nil
|
||||
}
|
||||
|
||||
// POST /api/alerts/test
|
||||
@ -155,24 +187,6 @@ func GetAlert(c *middleware.Context) Response {
|
||||
return Json(200, &query.Result)
|
||||
}
|
||||
|
||||
// DEL /api/alerts/:id
|
||||
func DelAlert(c *middleware.Context) Response {
|
||||
alertId := c.ParamsInt64(":alertId")
|
||||
|
||||
if alertId == 0 {
|
||||
return ApiError(401, "Failed to parse alertid", nil)
|
||||
}
|
||||
|
||||
cmd := models.DeleteAlertCommand{AlertId: alertId}
|
||||
|
||||
if err := bus.Dispatch(&cmd); err != nil {
|
||||
return ApiError(500, "Failed to delete alert", err)
|
||||
}
|
||||
|
||||
var resp = map[string]interface{}{"alertId": alertId}
|
||||
return Json(200, resp)
|
||||
}
|
||||
|
||||
func GetAlertNotifiers(c *middleware.Context) Response {
|
||||
return Json(200, alerting.GetNotifiers())
|
||||
}
|
||||
@ -267,6 +281,22 @@ func NotificationTest(c *middleware.Context, dto dtos.NotificationTestCommand) R
|
||||
//POST /api/alerts/:alertId/pause
|
||||
func PauseAlert(c *middleware.Context, dto dtos.PauseAlertCommand) Response {
|
||||
alertId := c.ParamsInt64("alertId")
|
||||
|
||||
query := models.GetAlertByIdQuery{Id: alertId}
|
||||
|
||||
if err := bus.Dispatch(&query); err != nil {
|
||||
return ApiError(500, "Get Alert failed", err)
|
||||
}
|
||||
|
||||
guardian := guardian.NewDashboardGuardian(query.Result.DashboardId, c.OrgId, c.SignedInUser)
|
||||
if canEdit, err := guardian.CanEdit(); err != nil || !canEdit {
|
||||
if err != nil {
|
||||
return ApiError(500, "Error while checking permissions for Alert", err)
|
||||
}
|
||||
|
||||
return ApiError(403, "Access denied to this dashboard and alert", nil)
|
||||
}
|
||||
|
||||
cmd := models.PauseAlertCommand{
|
||||
OrgId: c.OrgId,
|
||||
AlertIds: []int64{alertId},
|
||||
|
97
pkg/api/alerting_test.go
Normal file
97
pkg/api/alerting_test.go
Normal file
@ -0,0 +1,97 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/api/dtos"
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
"github.com/grafana/grafana/pkg/middleware"
|
||||
m "github.com/grafana/grafana/pkg/models"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestAlertingApiEndpoint(t *testing.T) {
|
||||
Convey("Given an alert in a dashboard with an acl", t, func() {
|
||||
|
||||
singleAlert := &m.Alert{Id: 1, DashboardId: 1, Name: "singlealert"}
|
||||
|
||||
bus.AddHandler("test", func(query *m.GetAlertByIdQuery) error {
|
||||
query.Result = singleAlert
|
||||
return nil
|
||||
})
|
||||
|
||||
viewerRole := m.ROLE_VIEWER
|
||||
editorRole := m.ROLE_EDITOR
|
||||
|
||||
aclMockResp := []*m.DashboardAclInfoDTO{}
|
||||
bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error {
|
||||
query.Result = aclMockResp
|
||||
return nil
|
||||
})
|
||||
|
||||
bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error {
|
||||
query.Result = []*m.Team{}
|
||||
return nil
|
||||
})
|
||||
|
||||
Convey("When user is editor and not in the ACL", func() {
|
||||
Convey("Should not be able to pause the alert", func() {
|
||||
cmd := dtos.PauseAlertCommand{
|
||||
AlertId: 1,
|
||||
Paused: true,
|
||||
}
|
||||
postAlertScenario("When calling POST on", "/api/alerts/1/pause", "/api/alerts/:alertId/pause", m.ROLE_EDITOR, cmd, func(sc *scenarioContext) {
|
||||
CallPauseAlert(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("When user is editor and dashboard has default ACL", func() {
|
||||
aclMockResp = []*m.DashboardAclInfoDTO{
|
||||
{Role: &viewerRole, Permission: m.PERMISSION_VIEW},
|
||||
{Role: &editorRole, Permission: m.PERMISSION_EDIT},
|
||||
}
|
||||
|
||||
Convey("Should be able to pause the alert", func() {
|
||||
cmd := dtos.PauseAlertCommand{
|
||||
AlertId: 1,
|
||||
Paused: true,
|
||||
}
|
||||
postAlertScenario("When calling POST on", "/api/alerts/1/pause", "/api/alerts/:alertId/pause", m.ROLE_EDITOR, cmd, func(sc *scenarioContext) {
|
||||
CallPauseAlert(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 200)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func CallPauseAlert(sc *scenarioContext) {
|
||||
bus.AddHandler("test", func(cmd *m.PauseAlertCommand) error {
|
||||
return nil
|
||||
})
|
||||
|
||||
sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec()
|
||||
}
|
||||
|
||||
func postAlertScenario(desc string, url string, routePattern string, role m.RoleType, cmd dtos.PauseAlertCommand, fn scenarioFunc) {
|
||||
Convey(desc+" "+url, func() {
|
||||
defer bus.ClearBusHandlers()
|
||||
|
||||
sc := setupScenarioContext(url)
|
||||
sc.defaultHandler = wrap(func(c *middleware.Context) Response {
|
||||
sc.context = c
|
||||
sc.context.UserId = TestUserID
|
||||
sc.context.OrgId = TestOrgID
|
||||
sc.context.OrgRole = role
|
||||
|
||||
return PauseAlert(c, cmd)
|
||||
})
|
||||
|
||||
sc.m.Post(routePattern, sc.defaultHandler)
|
||||
|
||||
fn(sc)
|
||||
})
|
||||
}
|
@ -7,7 +7,9 @@ import (
|
||||
"github.com/grafana/grafana/pkg/api/dtos"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/middleware"
|
||||
m "github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/services/annotations"
|
||||
"github.com/grafana/grafana/pkg/services/guardian"
|
||||
"github.com/grafana/grafana/pkg/util"
|
||||
)
|
||||
|
||||
@ -51,6 +53,10 @@ func (e *CreateAnnotationError) Error() string {
|
||||
}
|
||||
|
||||
func PostAnnotation(c *middleware.Context, cmd dtos.PostAnnotationsCmd) Response {
|
||||
if canSave, err := canSaveByDashboardId(c, cmd.DashboardId); err != nil || !canSave {
|
||||
return dashboardGuardianResponse(err)
|
||||
}
|
||||
|
||||
repo := annotations.GetRepository()
|
||||
|
||||
if cmd.Text == "" {
|
||||
@ -178,6 +184,10 @@ func UpdateAnnotation(c *middleware.Context, cmd dtos.UpdateAnnotationsCmd) Resp
|
||||
|
||||
repo := annotations.GetRepository()
|
||||
|
||||
if resp := canSave(c, repo, annotationId); resp != nil {
|
||||
return resp
|
||||
}
|
||||
|
||||
item := annotations.Item{
|
||||
OrgId: c.OrgId,
|
||||
UserId: c.UserId,
|
||||
@ -228,6 +238,10 @@ func DeleteAnnotationById(c *middleware.Context) Response {
|
||||
repo := annotations.GetRepository()
|
||||
annotationId := c.ParamsInt64(":annotationId")
|
||||
|
||||
if resp := canSave(c, repo, annotationId); resp != nil {
|
||||
return resp
|
||||
}
|
||||
|
||||
err := repo.Delete(&annotations.DeleteParams{
|
||||
Id: annotationId,
|
||||
})
|
||||
@ -243,6 +257,10 @@ func DeleteAnnotationRegion(c *middleware.Context) Response {
|
||||
repo := annotations.GetRepository()
|
||||
regionId := c.ParamsInt64(":regionId")
|
||||
|
||||
if resp := canSave(c, repo, regionId); resp != nil {
|
||||
return resp
|
||||
}
|
||||
|
||||
err := repo.Delete(&annotations.DeleteParams{
|
||||
RegionId: regionId,
|
||||
})
|
||||
@ -253,3 +271,50 @@ func DeleteAnnotationRegion(c *middleware.Context) Response {
|
||||
|
||||
return ApiSuccess("Annotation region deleted")
|
||||
}
|
||||
|
||||
func canSaveByDashboardId(c *middleware.Context, dashboardId int64) (bool, error) {
|
||||
if dashboardId == 0 && !c.SignedInUser.HasRole(m.ROLE_EDITOR) {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
if dashboardId > 0 {
|
||||
guardian := guardian.NewDashboardGuardian(dashboardId, c.OrgId, c.SignedInUser)
|
||||
if canEdit, err := guardian.CanEdit(); err != nil || !canEdit {
|
||||
return false, err
|
||||
}
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func canSave(c *middleware.Context, repo annotations.Repository, annotationId int64) Response {
|
||||
items, err := repo.Find(&annotations.ItemQuery{AnnotationId: annotationId, OrgId: c.OrgId})
|
||||
|
||||
if err != nil || len(items) == 0 {
|
||||
return ApiError(500, "Could not find annotation to update", err)
|
||||
}
|
||||
|
||||
dashboardId := items[0].DashboardId
|
||||
|
||||
if canSave, err := canSaveByDashboardId(c, dashboardId); err != nil || !canSave {
|
||||
return dashboardGuardianResponse(err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func canSaveByRegionId(c *middleware.Context, repo annotations.Repository, regionId int64) Response {
|
||||
items, err := repo.Find(&annotations.ItemQuery{RegionId: regionId, OrgId: c.OrgId})
|
||||
|
||||
if err != nil || len(items) == 0 {
|
||||
return ApiError(500, "Could not find annotation to update", err)
|
||||
}
|
||||
|
||||
dashboardId := items[0].DashboardId
|
||||
|
||||
if canSave, err := canSaveByDashboardId(c, dashboardId); err != nil || !canSave {
|
||||
return dashboardGuardianResponse(err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
242
pkg/api/annotations_test.go
Normal file
242
pkg/api/annotations_test.go
Normal file
@ -0,0 +1,242 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/api/dtos"
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
"github.com/grafana/grafana/pkg/middleware"
|
||||
m "github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/services/annotations"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestAnnotationsApiEndpoint(t *testing.T) {
|
||||
Convey("Given an annotation without a dashboard id", t, func() {
|
||||
cmd := dtos.PostAnnotationsCmd{
|
||||
Time: 1000,
|
||||
Text: "annotation text",
|
||||
Tags: []string{"tag1", "tag2"},
|
||||
IsRegion: false,
|
||||
}
|
||||
|
||||
updateCmd := dtos.UpdateAnnotationsCmd{
|
||||
Time: 1000,
|
||||
Text: "annotation text",
|
||||
Tags: []string{"tag1", "tag2"},
|
||||
IsRegion: false,
|
||||
}
|
||||
|
||||
Convey("When user is an Org Viewer", func() {
|
||||
role := m.ROLE_VIEWER
|
||||
Convey("Should not be allowed to save an annotation", func() {
|
||||
postAnnotationScenario("When calling POST on", "/api/annotations", "/api/annotations", role, cmd, func(sc *scenarioContext) {
|
||||
sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec()
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
})
|
||||
|
||||
putAnnotationScenario("When calling PUT on", "/api/annotations/1", "/api/annotations/:annotationId", role, updateCmd, func(sc *scenarioContext) {
|
||||
sc.fakeReqWithParams("PUT", sc.url, map[string]string{}).exec()
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/annotations/1", "/api/annotations/:annotationId", role, func(sc *scenarioContext) {
|
||||
sc.handlerFunc = DeleteAnnotationById
|
||||
sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec()
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/annotations/region/1", "/api/annotations/region/:regionId", role, func(sc *scenarioContext) {
|
||||
sc.handlerFunc = DeleteAnnotationRegion
|
||||
sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec()
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("When user is an Org Editor", func() {
|
||||
role := m.ROLE_EDITOR
|
||||
Convey("Should be able to save an annotation", func() {
|
||||
postAnnotationScenario("When calling POST on", "/api/annotations", "/api/annotations", role, cmd, func(sc *scenarioContext) {
|
||||
sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec()
|
||||
So(sc.resp.Code, ShouldEqual, 200)
|
||||
})
|
||||
|
||||
putAnnotationScenario("When calling PUT on", "/api/annotations/1", "/api/annotations/:annotationId", role, updateCmd, func(sc *scenarioContext) {
|
||||
sc.fakeReqWithParams("PUT", sc.url, map[string]string{}).exec()
|
||||
So(sc.resp.Code, ShouldEqual, 200)
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/annotations/1", "/api/annotations/:annotationId", role, func(sc *scenarioContext) {
|
||||
sc.handlerFunc = DeleteAnnotationById
|
||||
sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec()
|
||||
So(sc.resp.Code, ShouldEqual, 200)
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/annotations/region/1", "/api/annotations/region/:regionId", role, func(sc *scenarioContext) {
|
||||
sc.handlerFunc = DeleteAnnotationRegion
|
||||
sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec()
|
||||
So(sc.resp.Code, ShouldEqual, 200)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Given an annotation with a dashboard id and the dashboard does not have an acl", t, func() {
|
||||
cmd := dtos.PostAnnotationsCmd{
|
||||
Time: 1000,
|
||||
Text: "annotation text",
|
||||
Tags: []string{"tag1", "tag2"},
|
||||
IsRegion: false,
|
||||
DashboardId: 1,
|
||||
PanelId: 1,
|
||||
}
|
||||
|
||||
updateCmd := dtos.UpdateAnnotationsCmd{
|
||||
Time: 1000,
|
||||
Text: "annotation text",
|
||||
Tags: []string{"tag1", "tag2"},
|
||||
IsRegion: false,
|
||||
Id: 1,
|
||||
}
|
||||
|
||||
viewerRole := m.ROLE_VIEWER
|
||||
editorRole := m.ROLE_EDITOR
|
||||
|
||||
aclMockResp := []*m.DashboardAclInfoDTO{
|
||||
{Role: &viewerRole, Permission: m.PERMISSION_VIEW},
|
||||
{Role: &editorRole, Permission: m.PERMISSION_EDIT},
|
||||
}
|
||||
|
||||
bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error {
|
||||
query.Result = aclMockResp
|
||||
return nil
|
||||
})
|
||||
|
||||
bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error {
|
||||
query.Result = []*m.Team{}
|
||||
return nil
|
||||
})
|
||||
|
||||
Convey("When user is an Org Viewer", func() {
|
||||
role := m.ROLE_VIEWER
|
||||
Convey("Should not be allowed to save an annotation", func() {
|
||||
postAnnotationScenario("When calling POST on", "/api/annotations", "/api/annotations", role, cmd, func(sc *scenarioContext) {
|
||||
sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec()
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
})
|
||||
|
||||
putAnnotationScenario("When calling PUT on", "/api/annotations/1", "/api/annotations/:annotationId", role, updateCmd, func(sc *scenarioContext) {
|
||||
sc.fakeReqWithParams("PUT", sc.url, map[string]string{}).exec()
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/annotations/1", "/api/annotations/:annotationId", role, func(sc *scenarioContext) {
|
||||
sc.handlerFunc = DeleteAnnotationById
|
||||
sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec()
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/annotations/region/1", "/api/annotations/region/:regionId", role, func(sc *scenarioContext) {
|
||||
sc.handlerFunc = DeleteAnnotationRegion
|
||||
sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec()
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("When user is an Org Editor", func() {
|
||||
role := m.ROLE_EDITOR
|
||||
Convey("Should be able to save an annotation", func() {
|
||||
postAnnotationScenario("When calling POST on", "/api/annotations", "/api/annotations", role, cmd, func(sc *scenarioContext) {
|
||||
sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec()
|
||||
So(sc.resp.Code, ShouldEqual, 200)
|
||||
})
|
||||
|
||||
putAnnotationScenario("When calling PUT on", "/api/annotations/1", "/api/annotations/:annotationId", role, updateCmd, func(sc *scenarioContext) {
|
||||
sc.fakeReqWithParams("PUT", sc.url, map[string]string{}).exec()
|
||||
So(sc.resp.Code, ShouldEqual, 200)
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/annotations/1", "/api/annotations/:annotationId", role, func(sc *scenarioContext) {
|
||||
sc.handlerFunc = DeleteAnnotationById
|
||||
sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec()
|
||||
So(sc.resp.Code, ShouldEqual, 200)
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/annotations/region/1", "/api/annotations/region/:regionId", role, func(sc *scenarioContext) {
|
||||
sc.handlerFunc = DeleteAnnotationRegion
|
||||
sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec()
|
||||
So(sc.resp.Code, ShouldEqual, 200)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
type fakeAnnotationsRepo struct {
|
||||
}
|
||||
|
||||
func (repo *fakeAnnotationsRepo) Delete(params *annotations.DeleteParams) error {
|
||||
return nil
|
||||
}
|
||||
func (repo *fakeAnnotationsRepo) Save(item *annotations.Item) error {
|
||||
item.Id = 1
|
||||
return nil
|
||||
}
|
||||
func (repo *fakeAnnotationsRepo) Update(item *annotations.Item) error {
|
||||
return nil
|
||||
}
|
||||
func (repo *fakeAnnotationsRepo) Find(query *annotations.ItemQuery) ([]*annotations.ItemDTO, error) {
|
||||
annotations := []*annotations.ItemDTO{{Id: 1}}
|
||||
return annotations, nil
|
||||
}
|
||||
|
||||
var fakeAnnoRepo *fakeAnnotationsRepo
|
||||
|
||||
func postAnnotationScenario(desc string, url string, routePattern string, role m.RoleType, cmd dtos.PostAnnotationsCmd, fn scenarioFunc) {
|
||||
Convey(desc+" "+url, func() {
|
||||
defer bus.ClearBusHandlers()
|
||||
|
||||
sc := setupScenarioContext(url)
|
||||
sc.defaultHandler = wrap(func(c *middleware.Context) Response {
|
||||
sc.context = c
|
||||
sc.context.UserId = TestUserID
|
||||
sc.context.OrgId = TestOrgID
|
||||
sc.context.OrgRole = role
|
||||
|
||||
return PostAnnotation(c, cmd)
|
||||
})
|
||||
|
||||
fakeAnnoRepo = &fakeAnnotationsRepo{}
|
||||
annotations.SetRepository(fakeAnnoRepo)
|
||||
|
||||
sc.m.Post(routePattern, sc.defaultHandler)
|
||||
|
||||
fn(sc)
|
||||
})
|
||||
}
|
||||
|
||||
func putAnnotationScenario(desc string, url string, routePattern string, role m.RoleType, cmd dtos.UpdateAnnotationsCmd, fn scenarioFunc) {
|
||||
Convey(desc+" "+url, func() {
|
||||
defer bus.ClearBusHandlers()
|
||||
|
||||
sc := setupScenarioContext(url)
|
||||
sc.defaultHandler = wrap(func(c *middleware.Context) Response {
|
||||
sc.context = c
|
||||
sc.context.UserId = TestUserID
|
||||
sc.context.OrgId = TestOrgID
|
||||
sc.context.OrgRole = role
|
||||
|
||||
return UpdateAnnotation(c, cmd)
|
||||
})
|
||||
|
||||
fakeAnnoRepo = &fakeAnnotationsRepo{}
|
||||
annotations.SetRepository(fakeAnnoRepo)
|
||||
|
||||
sc.m.Put(routePattern, sc.defaultHandler)
|
||||
|
||||
fn(sc)
|
||||
})
|
||||
}
|
@ -15,6 +15,8 @@ func (hs *HttpServer) registerRoutes() {
|
||||
reqGrafanaAdmin := middleware.Auth(&middleware.AuthOptions{ReqSignedIn: true, ReqGrafanaAdmin: true})
|
||||
reqEditorRole := middleware.RoleAuth(m.ROLE_EDITOR, m.ROLE_ADMIN)
|
||||
reqOrgAdmin := middleware.RoleAuth(m.ROLE_ADMIN)
|
||||
redirectFromLegacyDashboardUrl := middleware.RedirectFromLegacyDashboardUrl()
|
||||
redirectFromLegacyDashboardSoloUrl := middleware.RedirectFromLegacyDashboardSoloUrl()
|
||||
quota := middleware.Quota
|
||||
bind := binding.Bind
|
||||
|
||||
@ -63,9 +65,13 @@ func (hs *HttpServer) registerRoutes() {
|
||||
r.Get("/plugins/:id/edit", reqSignedIn, Index)
|
||||
r.Get("/plugins/:id/page/:page", reqSignedIn, Index)
|
||||
|
||||
r.Get("/dashboard/*", reqSignedIn, Index)
|
||||
r.Get("/d/:uid/:slug", reqSignedIn, Index)
|
||||
r.Get("/dashboard/db/:slug", reqSignedIn, redirectFromLegacyDashboardUrl, Index)
|
||||
r.Get("/dashboard/script/*", reqSignedIn, Index)
|
||||
r.Get("/dashboard-solo/snapshot/*", Index)
|
||||
r.Get("/dashboard-solo/*", reqSignedIn, Index)
|
||||
r.Get("/d-solo/:uid/:slug", reqSignedIn, Index)
|
||||
r.Get("/dashboard-solo/db/:slug", reqSignedIn, redirectFromLegacyDashboardSoloUrl, Index)
|
||||
r.Get("/dashboard-solo/script/*", reqSignedIn, Index)
|
||||
r.Get("/import/dashboard", reqSignedIn, Index)
|
||||
r.Get("/dashboards/", reqSignedIn, Index)
|
||||
r.Get("/dashboards/*", reqSignedIn, Index)
|
||||
@ -144,13 +150,13 @@ func (hs *HttpServer) registerRoutes() {
|
||||
apiRoute.Group("/teams", func(teamsRoute RouteRegister) {
|
||||
teamsRoute.Get("/:teamId", wrap(GetTeamById))
|
||||
teamsRoute.Get("/search", wrap(SearchTeams))
|
||||
teamsRoute.Post("/", quota("teams"), bind(m.CreateTeamCommand{}), wrap(CreateTeam))
|
||||
teamsRoute.Put("/:teamId", bind(m.UpdateTeamCommand{}), wrap(UpdateTeam))
|
||||
teamsRoute.Delete("/:teamId", wrap(DeleteTeamById))
|
||||
teamsRoute.Get("/:teamId/members", wrap(GetTeamMembers))
|
||||
teamsRoute.Post("/:teamId/members", quota("teams"), bind(m.AddTeamMemberCommand{}), wrap(AddTeamMember))
|
||||
teamsRoute.Delete("/:teamId/members/:userId", wrap(RemoveTeamMember))
|
||||
}, reqOrgAdmin)
|
||||
teamsRoute.Post("/", quota("teams"), reqOrgAdmin, bind(m.CreateTeamCommand{}), wrap(CreateTeam))
|
||||
teamsRoute.Put("/:teamId", reqOrgAdmin, bind(m.UpdateTeamCommand{}), wrap(UpdateTeam))
|
||||
teamsRoute.Delete("/:teamId", reqOrgAdmin, wrap(DeleteTeamById))
|
||||
teamsRoute.Get("/:teamId/members", reqOrgAdmin, wrap(GetTeamMembers))
|
||||
teamsRoute.Post("/:teamId/members", reqOrgAdmin, quota("teams"), bind(m.AddTeamMemberCommand{}), wrap(AddTeamMember))
|
||||
teamsRoute.Delete("/:teamId/members/:userId", reqOrgAdmin, wrap(RemoveTeamMember))
|
||||
})
|
||||
|
||||
// org information available to all users.
|
||||
apiRoute.Group("/org", func(orgRoute RouteRegister) {
|
||||
@ -242,20 +248,25 @@ func (hs *HttpServer) registerRoutes() {
|
||||
|
||||
// Dashboard
|
||||
apiRoute.Group("/dashboards", func(dashboardRoute RouteRegister) {
|
||||
dashboardRoute.Get("/uid/:uid", wrap(GetDashboard))
|
||||
dashboardRoute.Delete("/uid/:uid", wrap(DeleteDashboardByUid))
|
||||
|
||||
dashboardRoute.Get("/db/:slug", wrap(GetDashboard))
|
||||
dashboardRoute.Delete("/db/:slug", reqEditorRole, wrap(DeleteDashboard))
|
||||
dashboardRoute.Delete("/db/:slug", wrap(DeleteDashboard))
|
||||
|
||||
dashboardRoute.Post("/calculate-diff", bind(dtos.CalculateDiffOptions{}), wrap(CalculateDashboardDiff))
|
||||
|
||||
dashboardRoute.Post("/db", reqEditorRole, bind(m.SaveDashboardCommand{}), wrap(PostDashboard))
|
||||
dashboardRoute.Post("/db", bind(m.SaveDashboardCommand{}), wrap(PostDashboard))
|
||||
dashboardRoute.Get("/home", wrap(GetHomeDashboard))
|
||||
dashboardRoute.Get("/tags", GetDashboardTags)
|
||||
dashboardRoute.Post("/import", bind(dtos.ImportDashboardCommand{}), wrap(ImportDashboard))
|
||||
|
||||
dashboardRoute.Get("/folders", wrap(GetFoldersForSignedInUser))
|
||||
|
||||
dashboardRoute.Group("/id/:dashboardId", func(dashIdRoute RouteRegister) {
|
||||
dashIdRoute.Get("/versions", wrap(GetDashboardVersions))
|
||||
dashIdRoute.Get("/versions/:id", wrap(GetDashboardVersion))
|
||||
dashIdRoute.Post("/restore", reqEditorRole, bind(dtos.RestoreDashboardVersionCommand{}), wrap(RestoreDashboardVersion))
|
||||
dashIdRoute.Post("/restore", bind(dtos.RestoreDashboardVersionCommand{}), wrap(RestoreDashboardVersion))
|
||||
|
||||
dashIdRoute.Group("/acl", func(aclRoute RouteRegister) {
|
||||
aclRoute.Get("/", wrap(GetDashboardAclList))
|
||||
@ -317,8 +328,8 @@ func (hs *HttpServer) registerRoutes() {
|
||||
annotationsRoute.Delete("/:annotationId", wrap(DeleteAnnotationById))
|
||||
annotationsRoute.Put("/:annotationId", bind(dtos.UpdateAnnotationsCmd{}), wrap(UpdateAnnotation))
|
||||
annotationsRoute.Delete("/region/:regionId", wrap(DeleteAnnotationRegion))
|
||||
annotationsRoute.Post("/graphite", bind(dtos.PostGraphiteAnnotationsCmd{}), wrap(PostGraphiteAnnotation))
|
||||
}, reqEditorRole)
|
||||
annotationsRoute.Post("/graphite", reqEditorRole, bind(dtos.PostGraphiteAnnotationsCmd{}), wrap(PostGraphiteAnnotation))
|
||||
})
|
||||
|
||||
// error test
|
||||
r.Get("/metrics/error", wrap(GenerateError))
|
||||
|
@ -157,11 +157,11 @@ func NewCacheServer() *CacheServer {
|
||||
func newNotFound() *Avatar {
|
||||
avatar := &Avatar{notFound: true}
|
||||
|
||||
// load transparent png into buffer
|
||||
path := filepath.Join(setting.StaticRootPath, "img", "transparent.png")
|
||||
// load user_profile png into buffer
|
||||
path := filepath.Join(setting.StaticRootPath, "img", "user_profile.png")
|
||||
|
||||
if data, err := ioutil.ReadFile(path); err != nil {
|
||||
log.Error(3, "Failed to read transparent.png, %v", path)
|
||||
log.Error(3, "Failed to read user_profile.png, %v", path)
|
||||
} else {
|
||||
avatar.data = bytes.NewBuffer(data)
|
||||
}
|
||||
|
105
pkg/api/common_test.go
Normal file
105
pkg/api/common_test.go
Normal file
@ -0,0 +1,105 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/go-macaron/session"
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
"github.com/grafana/grafana/pkg/middleware"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
macaron "gopkg.in/macaron.v1"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func loggedInUserScenario(desc string, url string, fn scenarioFunc) {
|
||||
loggedInUserScenarioWithRole(desc, "GET", url, url, models.ROLE_EDITOR, fn)
|
||||
}
|
||||
|
||||
func loggedInUserScenarioWithRole(desc string, method string, url string, routePattern string, role models.RoleType, fn scenarioFunc) {
|
||||
Convey(desc+" "+url, func() {
|
||||
defer bus.ClearBusHandlers()
|
||||
|
||||
sc := setupScenarioContext(url)
|
||||
sc.defaultHandler = wrap(func(c *middleware.Context) Response {
|
||||
sc.context = c
|
||||
sc.context.UserId = TestUserID
|
||||
sc.context.OrgId = TestOrgID
|
||||
sc.context.OrgRole = role
|
||||
if sc.handlerFunc != nil {
|
||||
return sc.handlerFunc(sc.context)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
switch method {
|
||||
case "GET":
|
||||
sc.m.Get(routePattern, sc.defaultHandler)
|
||||
case "DELETE":
|
||||
sc.m.Delete(routePattern, sc.defaultHandler)
|
||||
}
|
||||
|
||||
fn(sc)
|
||||
})
|
||||
}
|
||||
|
||||
func (sc *scenarioContext) fakeReq(method, url string) *scenarioContext {
|
||||
sc.resp = httptest.NewRecorder()
|
||||
req, err := http.NewRequest(method, url, nil)
|
||||
So(err, ShouldBeNil)
|
||||
sc.req = req
|
||||
|
||||
return sc
|
||||
}
|
||||
|
||||
func (sc *scenarioContext) fakeReqWithParams(method, url string, queryParams map[string]string) *scenarioContext {
|
||||
sc.resp = httptest.NewRecorder()
|
||||
req, err := http.NewRequest(method, url, nil)
|
||||
q := req.URL.Query()
|
||||
for k, v := range queryParams {
|
||||
q.Add(k, v)
|
||||
}
|
||||
req.URL.RawQuery = q.Encode()
|
||||
So(err, ShouldBeNil)
|
||||
sc.req = req
|
||||
|
||||
return sc
|
||||
}
|
||||
|
||||
type scenarioContext struct {
|
||||
m *macaron.Macaron
|
||||
context *middleware.Context
|
||||
resp *httptest.ResponseRecorder
|
||||
handlerFunc handlerFunc
|
||||
defaultHandler macaron.Handler
|
||||
req *http.Request
|
||||
url string
|
||||
}
|
||||
|
||||
func (sc *scenarioContext) exec() {
|
||||
sc.m.ServeHTTP(sc.resp, sc.req)
|
||||
}
|
||||
|
||||
type scenarioFunc func(c *scenarioContext)
|
||||
type handlerFunc func(c *middleware.Context) Response
|
||||
|
||||
func setupScenarioContext(url string) *scenarioContext {
|
||||
sc := &scenarioContext{
|
||||
url: url,
|
||||
}
|
||||
viewsPath, _ := filepath.Abs("../../public/views")
|
||||
|
||||
sc.m = macaron.New()
|
||||
sc.m.Use(macaron.Renderer(macaron.RenderOptions{
|
||||
Directory: viewsPath,
|
||||
Delims: macaron.Delims{Left: "[[", Right: "]]"},
|
||||
}))
|
||||
|
||||
sc.m.Use(middleware.GetContextHandler())
|
||||
sc.m.Use(middleware.Sessioner(&session.Options{}))
|
||||
|
||||
return sc
|
||||
}
|
@ -38,20 +38,19 @@ func isDashboardStarredByUser(c *middleware.Context, dashId int64) (bool, error)
|
||||
func dashboardGuardianResponse(err error) Response {
|
||||
if err != nil {
|
||||
return ApiError(500, "Error while checking dashboard permissions", err)
|
||||
} else {
|
||||
return ApiError(403, "Access denied to this dashboard", nil)
|
||||
}
|
||||
|
||||
return ApiError(403, "Access denied to this dashboard", nil)
|
||||
}
|
||||
|
||||
func GetDashboard(c *middleware.Context) Response {
|
||||
dash, rsp := getDashboardHelper(c.OrgId, c.Params(":slug"), 0)
|
||||
dash, rsp := getDashboardHelper(c.OrgId, c.Params(":slug"), 0, c.Params(":uid"))
|
||||
if rsp != nil {
|
||||
return rsp
|
||||
}
|
||||
|
||||
guardian := guardian.NewDashboardGuardian(dash.Id, c.OrgId, c.SignedInUser)
|
||||
if canView, err := guardian.CanView(); err != nil || !canView {
|
||||
fmt.Printf("%v", err)
|
||||
return dashboardGuardianResponse(err)
|
||||
}
|
||||
|
||||
@ -89,7 +88,8 @@ func GetDashboard(c *middleware.Context) Response {
|
||||
HasAcl: dash.HasAcl,
|
||||
IsFolder: dash.IsFolder,
|
||||
FolderId: dash.FolderId,
|
||||
FolderTitle: "Root",
|
||||
Url: dash.GetUrl(),
|
||||
FolderTitle: "General",
|
||||
}
|
||||
|
||||
// lookup folder title
|
||||
@ -99,6 +99,7 @@ func GetDashboard(c *middleware.Context) Response {
|
||||
return ApiError(500, "Dashboard folder could not be read", err)
|
||||
}
|
||||
meta.FolderTitle = query.Result.Title
|
||||
meta.FolderSlug = query.Result.Slug
|
||||
}
|
||||
|
||||
// make sure db version is in sync with json model version
|
||||
@ -124,8 +125,15 @@ func getUserLogin(userId int64) string {
|
||||
}
|
||||
}
|
||||
|
||||
func getDashboardHelper(orgId int64, slug string, id int64) (*m.Dashboard, Response) {
|
||||
query := m.GetDashboardQuery{Slug: slug, Id: id, OrgId: orgId}
|
||||
func getDashboardHelper(orgId int64, slug string, id int64, uid string) (*m.Dashboard, Response) {
|
||||
var query m.GetDashboardQuery
|
||||
|
||||
if len(uid) > 0 {
|
||||
query = m.GetDashboardQuery{Uid: uid, Id: id, OrgId: orgId}
|
||||
} else {
|
||||
query = m.GetDashboardQuery{Slug: slug, Id: id, OrgId: orgId}
|
||||
}
|
||||
|
||||
if err := bus.Dispatch(&query); err != nil {
|
||||
return nil, ApiError(404, "Dashboard not found", err)
|
||||
}
|
||||
@ -133,7 +141,37 @@ func getDashboardHelper(orgId int64, slug string, id int64) (*m.Dashboard, Respo
|
||||
}
|
||||
|
||||
func DeleteDashboard(c *middleware.Context) Response {
|
||||
dash, rsp := getDashboardHelper(c.OrgId, c.Params(":slug"), 0)
|
||||
query := m.GetDashboardsBySlugQuery{OrgId: c.OrgId, Slug: c.Params(":slug")}
|
||||
|
||||
if err := bus.Dispatch(&query); err != nil {
|
||||
return ApiError(500, "Failed to retrieve dashboards by slug", err)
|
||||
}
|
||||
|
||||
if len(query.Result) > 1 {
|
||||
return Json(412, util.DynMap{"status": "multiple-slugs-exists", "message": m.ErrDashboardsWithSameSlugExists.Error()})
|
||||
}
|
||||
|
||||
dash, rsp := getDashboardHelper(c.OrgId, c.Params(":slug"), 0, "")
|
||||
if rsp != nil {
|
||||
return rsp
|
||||
}
|
||||
|
||||
guardian := guardian.NewDashboardGuardian(dash.Id, c.OrgId, c.SignedInUser)
|
||||
if canSave, err := guardian.CanSave(); err != nil || !canSave {
|
||||
return dashboardGuardianResponse(err)
|
||||
}
|
||||
|
||||
cmd := m.DeleteDashboardCommand{OrgId: c.OrgId, Id: dash.Id}
|
||||
if err := bus.Dispatch(&cmd); err != nil {
|
||||
return ApiError(500, "Failed to delete dashboard", err)
|
||||
}
|
||||
|
||||
var resp = map[string]interface{}{"title": dash.Title}
|
||||
return Json(200, resp)
|
||||
}
|
||||
|
||||
func DeleteDashboardByUid(c *middleware.Context) Response {
|
||||
dash, rsp := getDashboardHelper(c.OrgId, "", 0, c.Params(":uid"))
|
||||
if rsp != nil {
|
||||
return rsp
|
||||
}
|
||||
@ -158,7 +196,14 @@ func PostDashboard(c *middleware.Context, cmd m.SaveDashboardCommand) Response {
|
||||
|
||||
dash := cmd.GetDashboardModel()
|
||||
|
||||
guardian := guardian.NewDashboardGuardian(dash.Id, c.OrgId, c.SignedInUser)
|
||||
dashId := dash.Id
|
||||
|
||||
// if new dashboard, use parent folder permissions instead
|
||||
if dashId == 0 {
|
||||
dashId = cmd.FolderId
|
||||
}
|
||||
|
||||
guardian := guardian.NewDashboardGuardian(dashId, c.OrgId, c.SignedInUser)
|
||||
if canSave, err := guardian.CanSave(); err != nil || !canSave {
|
||||
return dashboardGuardianResponse(err)
|
||||
}
|
||||
@ -201,7 +246,10 @@ func PostDashboard(c *middleware.Context, cmd m.SaveDashboardCommand) Response {
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
if err == m.ErrDashboardWithSameNameExists {
|
||||
if err == m.ErrDashboardWithSameUIDExists {
|
||||
return Json(412, util.DynMap{"status": "name-exists", "message": err.Error()})
|
||||
}
|
||||
if err == m.ErrDashboardWithSameNameInFolderExists {
|
||||
return Json(412, util.DynMap{"status": "name-exists", "message": err.Error()})
|
||||
}
|
||||
if err == m.ErrDashboardVersionMismatch {
|
||||
@ -225,8 +273,17 @@ func PostDashboard(c *middleware.Context, cmd m.SaveDashboardCommand) Response {
|
||||
return ApiError(500, "Invalid alert data. Cannot save dashboard", err)
|
||||
}
|
||||
|
||||
dashboard.IsFolder = dash.IsFolder
|
||||
|
||||
c.TimeRequest(metrics.M_Api_Dashboard_Save)
|
||||
return Json(200, util.DynMap{"status": "success", "slug": dashboard.Slug, "version": dashboard.Version, "id": dashboard.Id})
|
||||
return Json(200, util.DynMap{
|
||||
"status": "success",
|
||||
"slug": dashboard.Slug,
|
||||
"version": dashboard.Version,
|
||||
"id": dashboard.Id,
|
||||
"uid": dashboard.Uid,
|
||||
"url": dashboard.GetUrl(),
|
||||
})
|
||||
}
|
||||
|
||||
func GetHomeDashboard(c *middleware.Context) Response {
|
||||
@ -255,7 +312,7 @@ func GetHomeDashboard(c *middleware.Context) Response {
|
||||
dash := dtos.DashboardFullWithMeta{}
|
||||
dash.Meta.IsHome = true
|
||||
dash.Meta.CanEdit = c.SignedInUser.HasRole(m.ROLE_EDITOR)
|
||||
dash.Meta.FolderTitle = "Root"
|
||||
dash.Meta.FolderTitle = "General"
|
||||
|
||||
jsonParser := json.NewDecoder(file)
|
||||
if err := jsonParser.Decode(&dash.Dashboard); err != nil {
|
||||
@ -393,7 +450,7 @@ func CalculateDashboardDiff(c *middleware.Context, apiOptions dtos.CalculateDiff
|
||||
|
||||
// RestoreDashboardVersion restores a dashboard to the given version.
|
||||
func RestoreDashboardVersion(c *middleware.Context, apiCmd dtos.RestoreDashboardVersionCommand) Response {
|
||||
dash, rsp := getDashboardHelper(c.OrgId, "", c.ParamsInt64(":dashboardId"))
|
||||
dash, rsp := getDashboardHelper(c.OrgId, "", c.ParamsInt64(":dashboardId"), "")
|
||||
if rsp != nil {
|
||||
return rsp
|
||||
}
|
||||
@ -416,6 +473,7 @@ func RestoreDashboardVersion(c *middleware.Context, apiCmd dtos.RestoreDashboard
|
||||
saveCmd.UserId = c.UserId
|
||||
saveCmd.Dashboard = version.Data
|
||||
saveCmd.Dashboard.Set("version", dash.Version)
|
||||
saveCmd.Dashboard.Set("uid", dash.Uid)
|
||||
saveCmd.Message = fmt.Sprintf("Restored from version %d", version.Version)
|
||||
|
||||
return PostDashboard(c, saveCmd)
|
||||
@ -431,3 +489,19 @@ func GetDashboardTags(c *middleware.Context) {
|
||||
|
||||
c.JSON(200, query.Result)
|
||||
}
|
||||
|
||||
func GetFoldersForSignedInUser(c *middleware.Context) Response {
|
||||
title := c.Query("query")
|
||||
query := m.GetFoldersForSignedInUserQuery{
|
||||
OrgId: c.OrgId,
|
||||
SignedInUser: c.SignedInUser,
|
||||
Title: title,
|
||||
}
|
||||
|
||||
err := bus.Dispatch(&query)
|
||||
if err != nil {
|
||||
return ApiError(500, "Failed to get folders from database", err)
|
||||
}
|
||||
|
||||
return Json(200, query.Result)
|
||||
}
|
||||
|
@ -51,6 +51,14 @@ func UpdateDashboardAcl(c *middleware.Context, apiCmd dtos.UpdateDashboardAclCom
|
||||
})
|
||||
}
|
||||
|
||||
if okToUpdate, err := guardian.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, cmd.Items); err != nil || !okToUpdate {
|
||||
if err != nil {
|
||||
return ApiError(500, "Error while checking dashboard permissions", err)
|
||||
}
|
||||
|
||||
return ApiError(403, "Cannot remove own admin permission for a folder", nil)
|
||||
}
|
||||
|
||||
if err := bus.Dispatch(&cmd); err != nil {
|
||||
if err == m.ErrDashboardAclInfoMissing || err == m.ErrDashboardPermissionDashboardEmpty {
|
||||
return ApiError(409, err.Error(), err)
|
||||
@ -70,6 +78,14 @@ func DeleteDashboardAcl(c *middleware.Context) Response {
|
||||
return dashboardGuardianResponse(err)
|
||||
}
|
||||
|
||||
if okToDelete, err := guardian.CheckPermissionBeforeRemove(m.PERMISSION_ADMIN, aclId); err != nil || !okToDelete {
|
||||
if err != nil {
|
||||
return ApiError(500, "Error while checking dashboard permissions", err)
|
||||
}
|
||||
|
||||
return ApiError(403, "Cannot remove own admin permission for a folder", nil)
|
||||
}
|
||||
|
||||
cmd := m.RemoveDashboardAclCommand{OrgId: c.OrgId, AclId: aclId}
|
||||
if err := bus.Dispatch(&cmd); err != nil {
|
||||
return ApiError(500, "Failed to delete permission for user", err)
|
||||
|
@ -3,8 +3,10 @@ package api
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/api/dtos"
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/middleware"
|
||||
m "github.com/grafana/grafana/pkg/models"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
@ -37,6 +39,12 @@ func TestDashboardAclApiEndpoint(t *testing.T) {
|
||||
return nil
|
||||
})
|
||||
|
||||
// This tests four scenarios:
|
||||
// 1. user is an org admin
|
||||
// 2. user is an org editor AND has been granted admin permission for the dashboard
|
||||
// 3. user is an org viewer AND has been granted edit permission for the dashboard
|
||||
// 4. user is an org editor AND has no permissions for the dashboard
|
||||
|
||||
Convey("When user is org admin", func() {
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/1/acl", "/api/dashboards/id/:dashboardsId/acl", m.ROLE_ADMIN, func(sc *scenarioContext) {
|
||||
Convey("Should be able to access ACL", func() {
|
||||
@ -54,9 +62,9 @@ func TestDashboardAclApiEndpoint(t *testing.T) {
|
||||
})
|
||||
})
|
||||
|
||||
Convey("When user is editor and has admin permission in the ACL", func() {
|
||||
Convey("When user is org editor and has admin permission in the ACL", func() {
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/1/acl", "/api/dashboards/id/:dashboardId/acl", m.ROLE_EDITOR, func(sc *scenarioContext) {
|
||||
mockResult = append(mockResult, &m.DashboardAclInfoDTO{Id: 1, OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_ADMIN})
|
||||
mockResult = append(mockResult, &m.DashboardAclInfoDTO{Id: 6, OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_ADMIN})
|
||||
|
||||
Convey("Should be able to access ACL", func() {
|
||||
sc.handlerFunc = GetDashboardAclList
|
||||
@ -67,7 +75,7 @@ func TestDashboardAclApiEndpoint(t *testing.T) {
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/id/1/acl/1", "/api/dashboards/id/:dashboardId/acl/:aclId", m.ROLE_EDITOR, func(sc *scenarioContext) {
|
||||
mockResult = append(mockResult, &m.DashboardAclInfoDTO{Id: 1, OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_ADMIN})
|
||||
mockResult = append(mockResult, &m.DashboardAclInfoDTO{Id: 6, OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_ADMIN})
|
||||
|
||||
bus.AddHandler("test3", func(cmd *m.RemoveDashboardAclCommand) error {
|
||||
return nil
|
||||
@ -81,6 +89,52 @@ func TestDashboardAclApiEndpoint(t *testing.T) {
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/id/1/acl/6", "/api/dashboards/id/:dashboardId/acl/:aclId", m.ROLE_EDITOR, func(sc *scenarioContext) {
|
||||
mockResult = append(mockResult, &m.DashboardAclInfoDTO{Id: 6, OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_ADMIN})
|
||||
|
||||
bus.AddHandler("test3", func(cmd *m.RemoveDashboardAclCommand) error {
|
||||
return nil
|
||||
})
|
||||
|
||||
Convey("Should not be able to delete their own Admin permission", func() {
|
||||
sc.handlerFunc = DeleteDashboardAcl
|
||||
sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec()
|
||||
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Should not be able to downgrade their own Admin permission", func() {
|
||||
cmd := dtos.UpdateDashboardAclCommand{
|
||||
Items: []dtos.DashboardAclUpdateItem{
|
||||
{UserId: TestUserID, Permission: m.PERMISSION_EDIT},
|
||||
},
|
||||
}
|
||||
|
||||
postAclScenario("When calling POST on", "/api/dashboards/id/1/acl", "/api/dashboards/id/:dashboardId/acl", m.ROLE_EDITOR, cmd, func(sc *scenarioContext) {
|
||||
mockResult = append(mockResult, &m.DashboardAclInfoDTO{Id: 6, OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_ADMIN})
|
||||
|
||||
CallPostAcl(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Should be able to update permissions", func() {
|
||||
cmd := dtos.UpdateDashboardAclCommand{
|
||||
Items: []dtos.DashboardAclUpdateItem{
|
||||
{UserId: TestUserID, Permission: m.PERMISSION_ADMIN},
|
||||
{UserId: 2, Permission: m.PERMISSION_EDIT},
|
||||
},
|
||||
}
|
||||
|
||||
postAclScenario("When calling POST on", "/api/dashboards/id/1/acl", "/api/dashboards/id/:dashboardId/acl", m.ROLE_EDITOR, cmd, func(sc *scenarioContext) {
|
||||
mockResult = append(mockResult, &m.DashboardAclInfoDTO{Id: 6, OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_ADMIN})
|
||||
|
||||
CallPostAcl(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 200)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("When user is a member of a team in the ACL with admin permission", func() {
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/id/1/acl/1", "/api/dashboards/id/:dashboardsId/acl/:aclId", m.ROLE_EDITOR, func(sc *scenarioContext) {
|
||||
teamResp = append(teamResp, &m.Team{Id: 2, OrgId: 1, Name: "UG2"})
|
||||
@ -99,11 +153,12 @@ func TestDashboardAclApiEndpoint(t *testing.T) {
|
||||
})
|
||||
})
|
||||
|
||||
Convey("When user is editor and has edit permission in the ACL", func() {
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/1/acl", "/api/dashboards/id/:dashboardId/acl", m.ROLE_EDITOR, func(sc *scenarioContext) {
|
||||
Convey("When user is org viewer and has edit permission in the ACL", func() {
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/1/acl", "/api/dashboards/id/:dashboardId/acl", m.ROLE_VIEWER, func(sc *scenarioContext) {
|
||||
mockResult = append(mockResult, &m.DashboardAclInfoDTO{Id: 1, OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_EDIT})
|
||||
|
||||
Convey("Should not be able to access ACL", func() {
|
||||
// Getting the permissions is an Admin permission
|
||||
Convey("Should not be able to get list of permissions from ACL", func() {
|
||||
sc.handlerFunc = GetDashboardAclList
|
||||
sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec()
|
||||
|
||||
@ -111,7 +166,7 @@ func TestDashboardAclApiEndpoint(t *testing.T) {
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/id/1/acl/1", "/api/dashboards/id/:dashboardId/acl/:aclId", m.ROLE_EDITOR, func(sc *scenarioContext) {
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/id/1/acl/1", "/api/dashboards/id/:dashboardId/acl/:aclId", m.ROLE_VIEWER, func(sc *scenarioContext) {
|
||||
mockResult = append(mockResult, &m.DashboardAclInfoDTO{Id: 1, OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_EDIT})
|
||||
|
||||
bus.AddHandler("test3", func(cmd *m.RemoveDashboardAclCommand) error {
|
||||
@ -127,7 +182,7 @@ func TestDashboardAclApiEndpoint(t *testing.T) {
|
||||
})
|
||||
})
|
||||
|
||||
Convey("When user is editor and not in the ACL", func() {
|
||||
Convey("When user is org editor and not in the ACL", func() {
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/1/acl", "/api/dashboards/id/:dashboardsId/acl", m.ROLE_EDITOR, func(sc *scenarioContext) {
|
||||
|
||||
Convey("Should not be able to access ACL", func() {
|
||||
@ -172,3 +227,32 @@ func transformDashboardAclsToDTOs(acls []*m.DashboardAclInfoDTO) []*m.DashboardA
|
||||
|
||||
return dtos
|
||||
}
|
||||
|
||||
func CallPostAcl(sc *scenarioContext) {
|
||||
bus.AddHandler("test", func(cmd *m.UpdateDashboardAclCommand) error {
|
||||
return nil
|
||||
})
|
||||
|
||||
sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec()
|
||||
}
|
||||
|
||||
func postAclScenario(desc string, url string, routePattern string, role m.RoleType, cmd dtos.UpdateDashboardAclCommand, fn scenarioFunc) {
|
||||
Convey(desc+" "+url, func() {
|
||||
defer bus.ClearBusHandlers()
|
||||
|
||||
sc := setupScenarioContext(url)
|
||||
|
||||
sc.defaultHandler = wrap(func(c *middleware.Context) Response {
|
||||
sc.context = c
|
||||
sc.context.UserId = TestUserID
|
||||
sc.context.OrgId = TestOrgID
|
||||
sc.context.OrgRole = role
|
||||
|
||||
return UpdateDashboardAcl(c, cmd)
|
||||
})
|
||||
|
||||
sc.m.Post(routePattern, sc.defaultHandler)
|
||||
|
||||
fn(sc)
|
||||
})
|
||||
}
|
||||
|
@ -2,12 +2,8 @@ package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
macaron "gopkg.in/macaron.v1"
|
||||
|
||||
"github.com/go-macaron/session"
|
||||
"github.com/grafana/grafana/pkg/api/dtos"
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
@ -32,6 +28,10 @@ func (repo *fakeDashboardRepo) SaveDashboard(json *dashboards.SaveDashboardItem)
|
||||
|
||||
var fakeRepo *fakeDashboardRepo
|
||||
|
||||
// This tests two main scenarios. If a user has access to execute an action on a dashboard:
|
||||
// 1. and the dashboard is in a folder which does not have an acl
|
||||
// 2. and the dashboard is in a folder which does have an acl
|
||||
|
||||
func TestDashboardApiEndpoint(t *testing.T) {
|
||||
Convey("Given a dashboard with a parent folder which does not have an acl", t, func() {
|
||||
fakeDash := m.NewDashboard("Child dash")
|
||||
@ -39,8 +39,17 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
fakeDash.FolderId = 1
|
||||
fakeDash.HasAcl = false
|
||||
|
||||
bus.AddHandler("test", func(query *m.GetDashboardsBySlugQuery) error {
|
||||
dashboards := []*m.Dashboard{fakeDash}
|
||||
query.Result = dashboards
|
||||
return nil
|
||||
})
|
||||
|
||||
var getDashboardQueries []*m.GetDashboardQuery
|
||||
|
||||
bus.AddHandler("test", func(query *m.GetDashboardQuery) error {
|
||||
query.Result = fakeDash
|
||||
getDashboardQueries = append(getDashboardQueries, query)
|
||||
return nil
|
||||
})
|
||||
|
||||
@ -70,12 +79,20 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
}),
|
||||
}
|
||||
|
||||
// This tests two scenarios:
|
||||
// 1. user is an org viewer
|
||||
// 2. user is an org editor
|
||||
|
||||
Convey("When user is an Org Viewer", func() {
|
||||
role := m.ROLE_VIEWER
|
||||
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) {
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) {
|
||||
dash := GetDashboardShouldReturn200(sc)
|
||||
|
||||
Convey("Should lookup dashboard by slug", func() {
|
||||
So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash")
|
||||
})
|
||||
|
||||
Convey("Should not be able to edit or save dashboard", func() {
|
||||
So(dash.Meta.CanEdit, ShouldBeFalse)
|
||||
So(dash.Meta.CanSave, ShouldBeFalse)
|
||||
@ -83,9 +100,36 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) {
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) {
|
||||
dash := GetDashboardShouldReturn200(sc)
|
||||
|
||||
Convey("Should lookup dashboard by uid", func() {
|
||||
So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi")
|
||||
})
|
||||
|
||||
Convey("Should not be able to edit or save dashboard", func() {
|
||||
So(dash.Meta.CanEdit, ShouldBeFalse)
|
||||
So(dash.Meta.CanSave, ShouldBeFalse)
|
||||
So(dash.Meta.CanAdmin, ShouldBeFalse)
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) {
|
||||
CallDeleteDashboard(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
|
||||
Convey("Should lookup dashboard by slug", func() {
|
||||
So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash")
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) {
|
||||
CallDeleteDashboardByUid(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
|
||||
Convey("Should lookup dashboard by uid", func() {
|
||||
So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi")
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/2/versions/1", "/api/dashboards/id/:dashboardId/versions/:id", role, func(sc *scenarioContext) {
|
||||
@ -107,9 +151,13 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
Convey("When user is an Org Editor", func() {
|
||||
role := m.ROLE_EDITOR
|
||||
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) {
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) {
|
||||
dash := GetDashboardShouldReturn200(sc)
|
||||
|
||||
Convey("Should lookup dashboard by slug", func() {
|
||||
So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash")
|
||||
})
|
||||
|
||||
Convey("Should be able to edit or save dashboard", func() {
|
||||
So(dash.Meta.CanEdit, ShouldBeTrue)
|
||||
So(dash.Meta.CanSave, ShouldBeTrue)
|
||||
@ -117,9 +165,36 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) {
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) {
|
||||
dash := GetDashboardShouldReturn200(sc)
|
||||
|
||||
Convey("Should lookup dashboard by uid", func() {
|
||||
So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi")
|
||||
})
|
||||
|
||||
Convey("Should be able to edit or save dashboard", func() {
|
||||
So(dash.Meta.CanEdit, ShouldBeTrue)
|
||||
So(dash.Meta.CanSave, ShouldBeTrue)
|
||||
So(dash.Meta.CanAdmin, ShouldBeFalse)
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) {
|
||||
CallDeleteDashboard(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 200)
|
||||
|
||||
Convey("Should lookup dashboard by slug", func() {
|
||||
So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash")
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) {
|
||||
CallDeleteDashboardByUid(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 200)
|
||||
|
||||
Convey("Should lookup dashboard by uid", func() {
|
||||
So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi")
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/2/versions/1", "/api/dashboards/id/:dashboardId/versions/:id", role, func(sc *scenarioContext) {
|
||||
@ -133,8 +208,7 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
})
|
||||
|
||||
postDashboardScenario("When calling POST on", "/api/dashboards", "/api/dashboards", role, cmd, func(sc *scenarioContext) {
|
||||
CallPostDashboard(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 200)
|
||||
CallPostDashboardShouldReturnSuccess(sc)
|
||||
})
|
||||
|
||||
Convey("When saving a dashboard folder in another folder", func() {
|
||||
@ -168,6 +242,12 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
fakeDash.HasAcl = true
|
||||
setting.ViewersCanEdit = false
|
||||
|
||||
bus.AddHandler("test", func(query *m.GetDashboardsBySlugQuery) error {
|
||||
dashboards := []*m.Dashboard{fakeDash}
|
||||
query.Result = dashboards
|
||||
return nil
|
||||
})
|
||||
|
||||
aclMockResp := []*m.DashboardAclInfoDTO{
|
||||
{
|
||||
DashboardId: 1,
|
||||
@ -181,8 +261,11 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
return nil
|
||||
})
|
||||
|
||||
var getDashboardQueries []*m.GetDashboardQuery
|
||||
|
||||
bus.AddHandler("test", func(query *m.GetDashboardQuery) error {
|
||||
query.Result = fakeDash
|
||||
getDashboardQueries = append(getDashboardQueries, query)
|
||||
return nil
|
||||
})
|
||||
|
||||
@ -200,21 +283,59 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
}),
|
||||
}
|
||||
|
||||
// This tests six scenarios:
|
||||
// 1. user is an org viewer AND has no permissions for this dashboard
|
||||
// 2. user is an org editor AND has no permissions for this dashboard
|
||||
// 3. user is an org viewer AND has been granted edit permission for the dashboard
|
||||
// 4. user is an org viewer AND all viewers have edit permission for this dashboard
|
||||
// 5. user is an org viewer AND has been granted an admin permission
|
||||
// 6. user is an org editor AND has been granted a view permission
|
||||
|
||||
Convey("When user is an Org Viewer and has no permissions for this dashboard", func() {
|
||||
role := m.ROLE_VIEWER
|
||||
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) {
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) {
|
||||
sc.handlerFunc = GetDashboard
|
||||
sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec()
|
||||
|
||||
Convey("Should lookup dashboard by slug", func() {
|
||||
So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash")
|
||||
})
|
||||
|
||||
Convey("Should be denied access", func() {
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) {
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) {
|
||||
sc.handlerFunc = GetDashboard
|
||||
sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec()
|
||||
|
||||
Convey("Should lookup dashboard by uid", func() {
|
||||
So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi")
|
||||
})
|
||||
|
||||
Convey("Should be denied access", func() {
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) {
|
||||
CallDeleteDashboard(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
|
||||
Convey("Should lookup dashboard by slug", func() {
|
||||
So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash")
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) {
|
||||
CallDeleteDashboardByUid(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
|
||||
Convey("Should lookup dashboard by uid", func() {
|
||||
So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi")
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/2/versions/1", "/api/dashboards/id/:dashboardId/versions/:id", role, func(sc *scenarioContext) {
|
||||
@ -236,18 +357,48 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
Convey("When user is an Org Editor and has no permissions for this dashboard", func() {
|
||||
role := m.ROLE_EDITOR
|
||||
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) {
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) {
|
||||
sc.handlerFunc = GetDashboard
|
||||
sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec()
|
||||
|
||||
Convey("Should lookup dashboard by slug", func() {
|
||||
So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash")
|
||||
})
|
||||
|
||||
Convey("Should be denied access", func() {
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) {
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) {
|
||||
sc.handlerFunc = GetDashboard
|
||||
sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec()
|
||||
|
||||
Convey("Should lookup dashboard by uid", func() {
|
||||
So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi")
|
||||
})
|
||||
|
||||
Convey("Should be denied access", func() {
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) {
|
||||
CallDeleteDashboard(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
|
||||
Convey("Should lookup dashboard by slug", func() {
|
||||
So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash")
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) {
|
||||
CallDeleteDashboardByUid(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
|
||||
Convey("Should lookup dashboard by uid", func() {
|
||||
So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi")
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/2/versions/1", "/api/dashboards/id/:dashboardId/versions/:id", role, func(sc *scenarioContext) {
|
||||
@ -278,9 +429,13 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
return nil
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) {
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) {
|
||||
dash := GetDashboardShouldReturn200(sc)
|
||||
|
||||
Convey("Should lookup dashboard by slug", func() {
|
||||
So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash")
|
||||
})
|
||||
|
||||
Convey("Should be able to get dashboard with edit rights", func() {
|
||||
So(dash.Meta.CanEdit, ShouldBeTrue)
|
||||
So(dash.Meta.CanSave, ShouldBeTrue)
|
||||
@ -288,9 +443,36 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) {
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) {
|
||||
dash := GetDashboardShouldReturn200(sc)
|
||||
|
||||
Convey("Should lookup dashboard by uid", func() {
|
||||
So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi")
|
||||
})
|
||||
|
||||
Convey("Should be able to get dashboard with edit rights", func() {
|
||||
So(dash.Meta.CanEdit, ShouldBeTrue)
|
||||
So(dash.Meta.CanSave, ShouldBeTrue)
|
||||
So(dash.Meta.CanAdmin, ShouldBeFalse)
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) {
|
||||
CallDeleteDashboard(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 200)
|
||||
|
||||
Convey("Should lookup dashboard by slug", func() {
|
||||
So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash")
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) {
|
||||
CallDeleteDashboardByUid(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 200)
|
||||
|
||||
Convey("Should lookup dashboard by uid", func() {
|
||||
So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi")
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/2/versions/1", "/api/dashboards/id/:dashboardId/versions/:id", role, func(sc *scenarioContext) {
|
||||
@ -304,8 +486,7 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
})
|
||||
|
||||
postDashboardScenario("When calling POST on", "/api/dashboards", "/api/dashboards", role, cmd, func(sc *scenarioContext) {
|
||||
CallPostDashboard(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 200)
|
||||
CallPostDashboardShouldReturnSuccess(sc)
|
||||
})
|
||||
})
|
||||
|
||||
@ -322,9 +503,13 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
return nil
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) {
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) {
|
||||
dash := GetDashboardShouldReturn200(sc)
|
||||
|
||||
Convey("Should lookup dashboard by slug", func() {
|
||||
So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash")
|
||||
})
|
||||
|
||||
Convey("Should be able to get dashboard with edit rights but can save should be false", func() {
|
||||
So(dash.Meta.CanEdit, ShouldBeTrue)
|
||||
So(dash.Meta.CanSave, ShouldBeFalse)
|
||||
@ -332,9 +517,36 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) {
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) {
|
||||
dash := GetDashboardShouldReturn200(sc)
|
||||
|
||||
Convey("Should lookup dashboard by uid", func() {
|
||||
So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi")
|
||||
})
|
||||
|
||||
Convey("Should be able to get dashboard with edit rights but can save should be false", func() {
|
||||
So(dash.Meta.CanEdit, ShouldBeTrue)
|
||||
So(dash.Meta.CanSave, ShouldBeFalse)
|
||||
So(dash.Meta.CanAdmin, ShouldBeFalse)
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) {
|
||||
CallDeleteDashboard(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
|
||||
Convey("Should lookup dashboard by slug", func() {
|
||||
So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash")
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) {
|
||||
CallDeleteDashboardByUid(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
|
||||
Convey("Should lookup dashboard by uid", func() {
|
||||
So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@ -350,9 +562,13 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
return nil
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) {
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) {
|
||||
dash := GetDashboardShouldReturn200(sc)
|
||||
|
||||
Convey("Should lookup dashboard by slug", func() {
|
||||
So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash")
|
||||
})
|
||||
|
||||
Convey("Should be able to get dashboard with edit rights", func() {
|
||||
So(dash.Meta.CanEdit, ShouldBeTrue)
|
||||
So(dash.Meta.CanSave, ShouldBeTrue)
|
||||
@ -360,9 +576,36 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) {
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) {
|
||||
dash := GetDashboardShouldReturn200(sc)
|
||||
|
||||
Convey("Should lookup dashboard by uid", func() {
|
||||
So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi")
|
||||
})
|
||||
|
||||
Convey("Should be able to get dashboard with edit rights", func() {
|
||||
So(dash.Meta.CanEdit, ShouldBeTrue)
|
||||
So(dash.Meta.CanSave, ShouldBeTrue)
|
||||
So(dash.Meta.CanAdmin, ShouldBeTrue)
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) {
|
||||
CallDeleteDashboard(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 200)
|
||||
|
||||
Convey("Should lookup dashboard by slug", func() {
|
||||
So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash")
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) {
|
||||
CallDeleteDashboardByUid(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 200)
|
||||
|
||||
Convey("Should lookup dashboard by uid", func() {
|
||||
So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi")
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/2/versions/1", "/api/dashboards/id/:dashboardId/versions/:id", role, func(sc *scenarioContext) {
|
||||
@ -376,8 +619,7 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
})
|
||||
|
||||
postDashboardScenario("When calling POST on", "/api/dashboards", "/api/dashboards", role, cmd, func(sc *scenarioContext) {
|
||||
CallPostDashboard(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 200)
|
||||
CallPostDashboardShouldReturnSuccess(sc)
|
||||
})
|
||||
})
|
||||
|
||||
@ -393,18 +635,48 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
return nil
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) {
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) {
|
||||
dash := GetDashboardShouldReturn200(sc)
|
||||
|
||||
Convey("Should lookup dashboard by slug", func() {
|
||||
So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash")
|
||||
})
|
||||
|
||||
Convey("Should not be able to edit or save dashboard", func() {
|
||||
So(dash.Meta.CanEdit, ShouldBeFalse)
|
||||
So(dash.Meta.CanSave, ShouldBeFalse)
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) {
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) {
|
||||
dash := GetDashboardShouldReturn200(sc)
|
||||
|
||||
Convey("Should lookup dashboard by uid", func() {
|
||||
So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi")
|
||||
})
|
||||
|
||||
Convey("Should not be able to edit or save dashboard", func() {
|
||||
So(dash.Meta.CanEdit, ShouldBeFalse)
|
||||
So(dash.Meta.CanSave, ShouldBeFalse)
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) {
|
||||
CallDeleteDashboard(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
|
||||
Convey("Should lookup dashboard by slug", func() {
|
||||
So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash")
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) {
|
||||
CallDeleteDashboardByUid(sc)
|
||||
So(sc.resp.Code, ShouldEqual, 403)
|
||||
|
||||
Convey("Should lookup dashboard by uid", func() {
|
||||
So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi")
|
||||
})
|
||||
})
|
||||
|
||||
loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/2/versions/1", "/api/dashboards/id/:dashboardId/versions/:id", role, func(sc *scenarioContext) {
|
||||
@ -423,6 +695,37 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Given two dashboards with the same title in different folders", t, func() {
|
||||
dashOne := m.NewDashboard("dash")
|
||||
dashOne.Id = 2
|
||||
dashOne.FolderId = 1
|
||||
dashOne.HasAcl = false
|
||||
|
||||
dashTwo := m.NewDashboard("dash")
|
||||
dashTwo.Id = 4
|
||||
dashTwo.FolderId = 3
|
||||
dashTwo.HasAcl = false
|
||||
|
||||
bus.AddHandler("test", func(query *m.GetDashboardsBySlugQuery) error {
|
||||
dashboards := []*m.Dashboard{dashOne, dashTwo}
|
||||
query.Result = dashboards
|
||||
return nil
|
||||
})
|
||||
|
||||
role := m.ROLE_EDITOR
|
||||
|
||||
loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/db/dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) {
|
||||
CallDeleteDashboard(sc)
|
||||
|
||||
Convey("Should result in 412 Precondition failed", func() {
|
||||
So(sc.resp.Code, ShouldEqual, 412)
|
||||
result := sc.ToJson()
|
||||
So(result.Get("status").MustString(), ShouldEqual, "multiple-slugs-exists")
|
||||
So(result.Get("message").MustString(), ShouldEqual, m.ErrDashboardsWithSameSlugExists.Error())
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func GetDashboardShouldReturn200(sc *scenarioContext) dtos.DashboardFullWithMeta {
|
||||
@ -467,6 +770,15 @@ func CallDeleteDashboard(sc *scenarioContext) {
|
||||
sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec()
|
||||
}
|
||||
|
||||
func CallDeleteDashboardByUid(sc *scenarioContext) {
|
||||
bus.AddHandler("test", func(cmd *m.DeleteDashboardCommand) error {
|
||||
return nil
|
||||
})
|
||||
|
||||
sc.handlerFunc = DeleteDashboardByUid
|
||||
sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec()
|
||||
}
|
||||
|
||||
func CallPostDashboard(sc *scenarioContext) {
|
||||
bus.AddHandler("test", func(cmd *alerting.ValidateDashboardAlertsCommand) error {
|
||||
return nil
|
||||
@ -484,24 +796,23 @@ func CallPostDashboard(sc *scenarioContext) {
|
||||
sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec()
|
||||
}
|
||||
|
||||
func CallPostDashboardShouldReturnSuccess(sc *scenarioContext) {
|
||||
CallPostDashboard(sc)
|
||||
|
||||
So(sc.resp.Code, ShouldEqual, 200)
|
||||
result := sc.ToJson()
|
||||
So(result.Get("status").MustString(), ShouldEqual, "success")
|
||||
So(result.Get("id").MustInt64(), ShouldBeGreaterThan, 0)
|
||||
So(result.Get("uid").MustString(), ShouldNotBeNil)
|
||||
So(result.Get("slug").MustString(), ShouldNotBeNil)
|
||||
So(result.Get("url").MustString(), ShouldNotBeNil)
|
||||
}
|
||||
|
||||
func postDashboardScenario(desc string, url string, routePattern string, role m.RoleType, cmd m.SaveDashboardCommand, fn scenarioFunc) {
|
||||
Convey(desc+" "+url, func() {
|
||||
defer bus.ClearBusHandlers()
|
||||
|
||||
sc := &scenarioContext{
|
||||
url: url,
|
||||
}
|
||||
viewsPath, _ := filepath.Abs("../../public/views")
|
||||
|
||||
sc.m = macaron.New()
|
||||
sc.m.Use(macaron.Renderer(macaron.RenderOptions{
|
||||
Directory: viewsPath,
|
||||
Delims: macaron.Delims{Left: "[[", Right: "]]"},
|
||||
}))
|
||||
|
||||
sc.m.Use(middleware.GetContextHandler())
|
||||
sc.m.Use(middleware.Sessioner(&session.Options{}))
|
||||
|
||||
sc := setupScenarioContext(url)
|
||||
sc.defaultHandler = wrap(func(c *middleware.Context) Response {
|
||||
sc.context = c
|
||||
sc.context.UserId = TestUserID
|
||||
@ -519,3 +830,10 @@ func postDashboardScenario(desc string, url string, routePattern string, role m.
|
||||
fn(sc)
|
||||
})
|
||||
}
|
||||
|
||||
func (sc *scenarioContext) ToJson() *simplejson.Json {
|
||||
var result *simplejson.Json
|
||||
err := json.NewDecoder(sc.resp.Body).Decode(&result)
|
||||
So(err, ShouldBeNil)
|
||||
return result
|
||||
}
|
||||
|
@ -2,17 +2,11 @@ package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
macaron "gopkg.in/macaron.v1"
|
||||
|
||||
"github.com/go-macaron/session"
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
"github.com/grafana/grafana/pkg/middleware"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
@ -54,88 +48,3 @@ func TestDataSourcesProxy(t *testing.T) {
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func loggedInUserScenario(desc string, url string, fn scenarioFunc) {
|
||||
loggedInUserScenarioWithRole(desc, "GET", url, url, models.ROLE_EDITOR, fn)
|
||||
}
|
||||
|
||||
func loggedInUserScenarioWithRole(desc string, method string, url string, routePattern string, role models.RoleType, fn scenarioFunc) {
|
||||
Convey(desc+" "+url, func() {
|
||||
defer bus.ClearBusHandlers()
|
||||
|
||||
sc := &scenarioContext{
|
||||
url: url,
|
||||
}
|
||||
viewsPath, _ := filepath.Abs("../../public/views")
|
||||
|
||||
sc.m = macaron.New()
|
||||
sc.m.Use(macaron.Renderer(macaron.RenderOptions{
|
||||
Directory: viewsPath,
|
||||
Delims: macaron.Delims{Left: "[[", Right: "]]"},
|
||||
}))
|
||||
|
||||
sc.m.Use(middleware.GetContextHandler())
|
||||
sc.m.Use(middleware.Sessioner(&session.Options{}))
|
||||
|
||||
sc.defaultHandler = wrap(func(c *middleware.Context) Response {
|
||||
sc.context = c
|
||||
sc.context.UserId = TestUserID
|
||||
sc.context.OrgId = TestOrgID
|
||||
sc.context.OrgRole = role
|
||||
if sc.handlerFunc != nil {
|
||||
return sc.handlerFunc(sc.context)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
switch method {
|
||||
case "GET":
|
||||
sc.m.Get(routePattern, sc.defaultHandler)
|
||||
case "DELETE":
|
||||
sc.m.Delete(routePattern, sc.defaultHandler)
|
||||
}
|
||||
|
||||
fn(sc)
|
||||
})
|
||||
}
|
||||
|
||||
func (sc *scenarioContext) fakeReq(method, url string) *scenarioContext {
|
||||
sc.resp = httptest.NewRecorder()
|
||||
req, err := http.NewRequest(method, url, nil)
|
||||
So(err, ShouldBeNil)
|
||||
sc.req = req
|
||||
|
||||
return sc
|
||||
}
|
||||
|
||||
func (sc *scenarioContext) fakeReqWithParams(method, url string, queryParams map[string]string) *scenarioContext {
|
||||
sc.resp = httptest.NewRecorder()
|
||||
req, err := http.NewRequest(method, url, nil)
|
||||
q := req.URL.Query()
|
||||
for k, v := range queryParams {
|
||||
q.Add(k, v)
|
||||
}
|
||||
req.URL.RawQuery = q.Encode()
|
||||
So(err, ShouldBeNil)
|
||||
sc.req = req
|
||||
|
||||
return sc
|
||||
}
|
||||
|
||||
type scenarioContext struct {
|
||||
m *macaron.Macaron
|
||||
context *middleware.Context
|
||||
resp *httptest.ResponseRecorder
|
||||
handlerFunc handlerFunc
|
||||
defaultHandler macaron.Handler
|
||||
req *http.Request
|
||||
url string
|
||||
}
|
||||
|
||||
func (sc *scenarioContext) exec() {
|
||||
sc.m.ServeHTTP(sc.resp, sc.req)
|
||||
}
|
||||
|
||||
type scenarioFunc func(c *scenarioContext)
|
||||
type handlerFunc func(c *middleware.Context) Response
|
||||
|
@ -20,6 +20,7 @@ type AlertRule struct {
|
||||
EvalData *simplejson.Json `json:"evalData"`
|
||||
ExecutionError string `json:"executionError"`
|
||||
DashbboardUri string `json:"dashboardUri"`
|
||||
CanEdit bool `json:"canEdit"`
|
||||
}
|
||||
|
||||
type AlertNotification struct {
|
||||
|
@ -16,6 +16,7 @@ type DashboardMeta struct {
|
||||
CanAdmin bool `json:"canAdmin"`
|
||||
CanStar bool `json:"canStar"`
|
||||
Slug string `json:"slug"`
|
||||
Url string `json:"url"`
|
||||
Expires time.Time `json:"expires"`
|
||||
Created time.Time `json:"created"`
|
||||
Updated time.Time `json:"updated"`
|
||||
@ -26,6 +27,7 @@ type DashboardMeta struct {
|
||||
IsFolder bool `json:"isFolder"`
|
||||
FolderId int64 `json:"folderId"`
|
||||
FolderTitle string `json:"folderTitle"`
|
||||
FolderSlug string `json:"folderSlug"`
|
||||
}
|
||||
|
||||
type DashboardFullWithMeta struct {
|
||||
|
@ -162,6 +162,10 @@ func (hs *HttpServer) newMacaron() *macaron.Macaron {
|
||||
hs.mapStatic(m, setting.StaticRootPath, "", "public")
|
||||
hs.mapStatic(m, setting.StaticRootPath, "robots.txt", "robots.txt")
|
||||
|
||||
if setting.ImageUploadProvider == "local" {
|
||||
hs.mapStatic(m, setting.ImagesDir, "", "/public/img/attachments")
|
||||
}
|
||||
|
||||
m.Use(macaron.Renderer(macaron.RenderOptions{
|
||||
Directory: path.Join(setting.StaticRootPath, "views"),
|
||||
IndentJSON: macaron.Env != macaron.PROD,
|
||||
|
@ -102,8 +102,8 @@ func setIndexViewData(c *middleware.Context) (*dtos.IndexViewData, error) {
|
||||
}
|
||||
|
||||
dashboardChildNavs := []*dtos.NavLink{
|
||||
{Text: "Home", Url: setting.AppSubUrl + "/", Icon: "gicon gicon-home", HideFromTabs: true},
|
||||
{Divider: true, HideFromTabs: true},
|
||||
{Text: "Home", Id: "home", Url: setting.AppSubUrl + "/", Icon: "gicon gicon-home", HideFromTabs: true},
|
||||
{Text: "Divider", Divider: true, Id: "divider", HideFromTabs: true},
|
||||
{Text: "Manage", Id: "manage-dashboards", Url: setting.AppSubUrl + "/dashboards", Icon: "gicon gicon-manage"},
|
||||
{Text: "Playlists", Id: "playlists", Url: setting.AppSubUrl + "/playlists", Icon: "gicon gicon-playlists"},
|
||||
{Text: "Snapshots", Id: "snapshots", Url: setting.AppSubUrl + "/dashboard/snapshots", Icon: "gicon gicon-snapshots"},
|
||||
@ -261,7 +261,7 @@ func setIndexViewData(c *middleware.Context) (*dtos.IndexViewData, error) {
|
||||
|
||||
if c.IsGrafanaAdmin {
|
||||
cfgNode.Children = append(cfgNode.Children, &dtos.NavLink{
|
||||
Divider: true, HideFromTabs: true,
|
||||
Divider: true, HideFromTabs: true, Id: "admin-divider", Text: "Text",
|
||||
})
|
||||
cfgNode.Children = append(cfgNode.Children, &dtos.NavLink{
|
||||
Text: "Server Admin",
|
||||
|
@ -102,12 +102,13 @@ func LoginPost(c *middleware.Context, cmd dtos.LoginCommand) Response {
|
||||
}
|
||||
|
||||
authQuery := login.LoginUserQuery{
|
||||
Username: cmd.User,
|
||||
Password: cmd.Password,
|
||||
Username: cmd.User,
|
||||
Password: cmd.Password,
|
||||
IpAddress: c.Req.RemoteAddr,
|
||||
}
|
||||
|
||||
if err := bus.Dispatch(&authQuery); err != nil {
|
||||
if err == login.ErrInvalidCredentials {
|
||||
if err == login.ErrInvalidCredentials || err == login.ErrTooManyLoginAttempts {
|
||||
return ApiError(401, "Invalid username or password", err)
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/rand"
|
||||
"crypto/tls"
|
||||
"crypto/x509"
|
||||
@ -11,7 +12,6 @@ import (
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"golang.org/x/net/context"
|
||||
"golang.org/x/oauth2"
|
||||
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
@ -29,13 +29,13 @@ var (
|
||||
ErrSignUpNotAllowed = errors.New("Signup is not allowed for this adapter")
|
||||
ErrUsersQuotaReached = errors.New("Users quota reached")
|
||||
ErrNoEmail = errors.New("Login provider didn't return an email address")
|
||||
oauthLogger = log.New("oauth.login")
|
||||
oauthLogger = log.New("oauth")
|
||||
)
|
||||
|
||||
func GenStateString() string {
|
||||
rnd := make([]byte, 32)
|
||||
rand.Read(rnd)
|
||||
return base64.StdEncoding.EncodeToString(rnd)
|
||||
return base64.URLEncoding.EncodeToString(rnd)
|
||||
}
|
||||
|
||||
func OAuthLogin(ctx *middleware.Context) {
|
||||
@ -96,7 +96,9 @@ func OAuthLogin(ctx *middleware.Context) {
|
||||
if setting.OAuthService.OAuthInfos[name].TlsClientCert != "" || setting.OAuthService.OAuthInfos[name].TlsClientKey != "" {
|
||||
cert, err := tls.LoadX509KeyPair(setting.OAuthService.OAuthInfos[name].TlsClientCert, setting.OAuthService.OAuthInfos[name].TlsClientKey)
|
||||
if err != nil {
|
||||
log.Fatal(1, "Failed to setup TlsClientCert", "oauth provider", name, "error", err)
|
||||
ctx.Logger.Error("Failed to setup TlsClientCert", "oauth", name, "error", err)
|
||||
ctx.Handle(500, "login.OAuthLogin(Failed to setup TlsClientCert)", nil)
|
||||
return
|
||||
}
|
||||
|
||||
tr.TLSClientConfig.Certificates = append(tr.TLSClientConfig.Certificates, cert)
|
||||
@ -105,7 +107,9 @@ func OAuthLogin(ctx *middleware.Context) {
|
||||
if setting.OAuthService.OAuthInfos[name].TlsClientCa != "" {
|
||||
caCert, err := ioutil.ReadFile(setting.OAuthService.OAuthInfos[name].TlsClientCa)
|
||||
if err != nil {
|
||||
log.Fatal(1, "Failed to setup TlsClientCa", "oauth provider", name, "error", err)
|
||||
ctx.Logger.Error("Failed to setup TlsClientCa", "oauth", name, "error", err)
|
||||
ctx.Handle(500, "login.OAuthLogin(Failed to setup TlsClientCa)", nil)
|
||||
return
|
||||
}
|
||||
caCertPool := x509.NewCertPool()
|
||||
caCertPool.AppendCertsFromPEM(caCert)
|
||||
@ -124,13 +128,13 @@ func OAuthLogin(ctx *middleware.Context) {
|
||||
// token.TokenType was defaulting to "bearer", which is out of spec, so we explicitly set to "Bearer"
|
||||
token.TokenType = "Bearer"
|
||||
|
||||
ctx.Logger.Debug("OAuthLogin Got token")
|
||||
oauthLogger.Debug("OAuthLogin Got token", "token", token)
|
||||
|
||||
// set up oauth2 client
|
||||
client := connect.Client(oauthCtx, token)
|
||||
|
||||
// get user info
|
||||
userInfo, err := connect.UserInfo(client)
|
||||
userInfo, err := connect.UserInfo(client, token)
|
||||
if err != nil {
|
||||
if sErr, ok := err.(*social.Error); ok {
|
||||
redirectWithError(ctx, sErr)
|
||||
@ -140,7 +144,7 @@ func OAuthLogin(ctx *middleware.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
ctx.Logger.Debug("OAuthLogin got user info", "userInfo", userInfo)
|
||||
oauthLogger.Debug("OAuthLogin got user info", "userInfo", userInfo)
|
||||
|
||||
// validate that we got at least an email address
|
||||
if userInfo.Email == "" {
|
||||
@ -205,8 +209,7 @@ func OAuthLogin(ctx *middleware.Context) {
|
||||
}
|
||||
|
||||
func redirectWithError(ctx *middleware.Context, err error, v ...interface{}) {
|
||||
ctx.Logger.Info(err.Error(), v...)
|
||||
// TODO: we can use the flash storage here once it's implemented
|
||||
ctx.Logger.Error(err.Error(), v...)
|
||||
ctx.Session.Set("loginError", err.Error())
|
||||
ctx.Redirect(setting.AppSubUrl + "/login")
|
||||
}
|
||||
|
@ -31,11 +31,12 @@ func RenderToPng(c *middleware.Context) {
|
||||
|
||||
pngPath, err := renderer.RenderToPng(renderOpts)
|
||||
|
||||
if err != nil {
|
||||
if err == renderer.ErrTimeout {
|
||||
c.Handle(500, err.Error(), err)
|
||||
}
|
||||
if err != nil && err == renderer.ErrTimeout {
|
||||
c.Handle(500, err.Error(), err)
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
c.Handle(500, "Rendering failed.", err)
|
||||
return
|
||||
}
|
||||
|
@ -62,17 +62,22 @@ func (g *GrafanaServerImpl) Start() error {
|
||||
search.Init()
|
||||
login.Init()
|
||||
social.NewOAuthService()
|
||||
plugins.Init()
|
||||
|
||||
pluginManager, err := plugins.NewPluginManager(g.context)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to start plugins. error: %v", err)
|
||||
}
|
||||
g.childRoutines.Go(func() error { return pluginManager.Run(g.context) })
|
||||
|
||||
if err := provisioning.Init(g.context, setting.HomePath, setting.Cfg); err != nil {
|
||||
return fmt.Errorf("Failed to provision Grafana from config. error: %v", err)
|
||||
}
|
||||
|
||||
closer, err := tracing.Init(setting.Cfg)
|
||||
tracingCloser, err := tracing.Init(setting.Cfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Tracing settings is not valid. error: %v", err)
|
||||
}
|
||||
defer closer.Close()
|
||||
defer tracingCloser.Close()
|
||||
|
||||
// init alerting
|
||||
if setting.AlertingEnabled && setting.ExecuteAlerts {
|
||||
|
320
pkg/components/imguploader/azureblobuploader.go
Normal file
320
pkg/components/imguploader/azureblobuploader.go
Normal file
@ -0,0 +1,320 @@
|
||||
package imguploader
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/hmac"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"mime"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/log"
|
||||
"github.com/grafana/grafana/pkg/util"
|
||||
)
|
||||
|
||||
type AzureBlobUploader struct {
|
||||
account_name string
|
||||
account_key string
|
||||
container_name string
|
||||
log log.Logger
|
||||
}
|
||||
|
||||
func NewAzureBlobUploader(account_name string, account_key string, container_name string) *AzureBlobUploader {
|
||||
return &AzureBlobUploader{
|
||||
account_name: account_name,
|
||||
account_key: account_key,
|
||||
container_name: container_name,
|
||||
log: log.New("azureBlobUploader"),
|
||||
}
|
||||
}
|
||||
|
||||
// Receive path of image on disk and return azure blob url
|
||||
func (az *AzureBlobUploader) Upload(ctx context.Context, imageDiskPath string) (string, error) {
|
||||
// setup client
|
||||
blob := NewStorageClient(az.account_name, az.account_key)
|
||||
|
||||
file, err := os.Open(imageDiskPath)
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
randomFileName := util.GetRandomString(30) + ".png"
|
||||
// upload image
|
||||
az.log.Debug("Uploading image to azure_blob", "conatiner_name", az.container_name, "blob_name", randomFileName)
|
||||
resp, err := blob.FileUpload(az.container_name, randomFileName, file)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if resp.StatusCode > 400 && resp.StatusCode < 600 {
|
||||
body, _ := ioutil.ReadAll(io.LimitReader(resp.Body, 1<<20))
|
||||
aerr := &Error{
|
||||
Code: resp.StatusCode,
|
||||
Status: resp.Status,
|
||||
Body: body,
|
||||
Header: resp.Header,
|
||||
}
|
||||
aerr.parseXML()
|
||||
resp.Body.Close()
|
||||
return "", aerr
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("https://%s.blob.core.windows.net/%s/%s", az.account_name, az.container_name, randomFileName)
|
||||
return url, nil
|
||||
}
|
||||
|
||||
// --- AZURE LIBRARY
|
||||
type Blobs struct {
|
||||
XMLName xml.Name `xml:"EnumerationResults"`
|
||||
Items []Blob `xml:"Blobs>Blob"`
|
||||
}
|
||||
|
||||
type Blob struct {
|
||||
Name string `xml:"Name"`
|
||||
Property Property `xml:"Properties"`
|
||||
}
|
||||
|
||||
type Property struct {
|
||||
LastModified string `xml:"Last-Modified"`
|
||||
Etag string `xml:"Etag"`
|
||||
ContentLength int `xml:"Content-Length"`
|
||||
ContentType string `xml:"Content-Type"`
|
||||
BlobType string `xml:"BlobType"`
|
||||
LeaseStatus string `xml:"LeaseStatus"`
|
||||
}
|
||||
|
||||
type Error struct {
|
||||
Code int
|
||||
Status string
|
||||
Body []byte
|
||||
Header http.Header
|
||||
|
||||
AzureCode string
|
||||
}
|
||||
|
||||
func (e *Error) Error() string {
|
||||
return fmt.Sprintf("status %d: %s", e.Code, e.Body)
|
||||
}
|
||||
|
||||
func (e *Error) parseXML() {
|
||||
var xe xmlError
|
||||
_ = xml.NewDecoder(bytes.NewReader(e.Body)).Decode(&xe)
|
||||
e.AzureCode = xe.Code
|
||||
}
|
||||
|
||||
type xmlError struct {
|
||||
XMLName xml.Name `xml:"Error"`
|
||||
Code string
|
||||
Message string
|
||||
}
|
||||
|
||||
const ms_date_layout = "Mon, 02 Jan 2006 15:04:05 GMT"
|
||||
const version = "2017-04-17"
|
||||
|
||||
var client = &http.Client{}
|
||||
|
||||
type StorageClient struct {
|
||||
Auth *Auth
|
||||
Transport http.RoundTripper
|
||||
}
|
||||
|
||||
func (c *StorageClient) transport() http.RoundTripper {
|
||||
if c.Transport != nil {
|
||||
return c.Transport
|
||||
}
|
||||
return http.DefaultTransport
|
||||
}
|
||||
|
||||
func NewStorageClient(account, accessKey string) *StorageClient {
|
||||
return &StorageClient{
|
||||
Auth: &Auth{
|
||||
account,
|
||||
accessKey,
|
||||
},
|
||||
Transport: nil,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *StorageClient) absUrl(format string, a ...interface{}) string {
|
||||
part := fmt.Sprintf(format, a...)
|
||||
return fmt.Sprintf("https://%s.blob.core.windows.net/%s", c.Auth.Account, part)
|
||||
}
|
||||
|
||||
func copyHeadersToRequest(req *http.Request, headers map[string]string) {
|
||||
for k, v := range headers {
|
||||
req.Header[k] = []string{v}
|
||||
}
|
||||
}
|
||||
|
||||
func (c *StorageClient) FileUpload(container, blobName string, body io.Reader) (*http.Response, error) {
|
||||
blobName = escape(blobName)
|
||||
extension := strings.ToLower(path.Ext(blobName))
|
||||
contentType := mime.TypeByExtension(extension)
|
||||
buf := new(bytes.Buffer)
|
||||
buf.ReadFrom(body)
|
||||
req, err := http.NewRequest(
|
||||
"PUT",
|
||||
c.absUrl("%s/%s", container, blobName),
|
||||
buf,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
copyHeadersToRequest(req, map[string]string{
|
||||
"x-ms-blob-type": "BlockBlob",
|
||||
"x-ms-date": time.Now().UTC().Format(ms_date_layout),
|
||||
"x-ms-version": version,
|
||||
"Accept-Charset": "UTF-8",
|
||||
"Content-Type": contentType,
|
||||
"Content-Length": strconv.Itoa(buf.Len()),
|
||||
})
|
||||
|
||||
c.Auth.SignRequest(req)
|
||||
|
||||
return c.transport().RoundTrip(req)
|
||||
}
|
||||
|
||||
func escape(content string) string {
|
||||
content = url.QueryEscape(content)
|
||||
// the Azure's behavior uses %20 to represent whitespace instead of + (plus)
|
||||
content = strings.Replace(content, "+", "%20", -1)
|
||||
// the Azure's behavior uses slash instead of + %2F
|
||||
content = strings.Replace(content, "%2F", "/", -1)
|
||||
|
||||
return content
|
||||
}
|
||||
|
||||
type Auth struct {
|
||||
Account string
|
||||
Key string
|
||||
}
|
||||
|
||||
func (a *Auth) SignRequest(req *http.Request) {
|
||||
strToSign := fmt.Sprintf("%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s",
|
||||
strings.ToUpper(req.Method),
|
||||
tryget(req.Header, "Content-Encoding"),
|
||||
tryget(req.Header, "Content-Language"),
|
||||
tryget(req.Header, "Content-Length"),
|
||||
tryget(req.Header, "Content-MD5"),
|
||||
tryget(req.Header, "Content-Type"),
|
||||
tryget(req.Header, "Date"),
|
||||
tryget(req.Header, "If-Modified-Since"),
|
||||
tryget(req.Header, "If-Match"),
|
||||
tryget(req.Header, "If-None-Match"),
|
||||
tryget(req.Header, "If-Unmodified-Since"),
|
||||
tryget(req.Header, "Range"),
|
||||
a.canonicalizedHeaders(req),
|
||||
a.canonicalizedResource(req),
|
||||
)
|
||||
decodedKey, _ := base64.StdEncoding.DecodeString(a.Key)
|
||||
|
||||
sha256 := hmac.New(sha256.New, []byte(decodedKey))
|
||||
sha256.Write([]byte(strToSign))
|
||||
|
||||
signature := base64.StdEncoding.EncodeToString(sha256.Sum(nil))
|
||||
|
||||
copyHeadersToRequest(req, map[string]string{
|
||||
"Authorization": fmt.Sprintf("SharedKey %s:%s", a.Account, signature),
|
||||
})
|
||||
}
|
||||
|
||||
func tryget(headers map[string][]string, key string) string {
|
||||
// We default to empty string for "0" values to match server side behavior when generating signatures.
|
||||
if len(headers[key]) > 0 { // && headers[key][0] != "0" { //&& key != "Content-Length" {
|
||||
return headers[key][0]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
//
|
||||
// The following is copied ~95% verbatim from:
|
||||
// http://github.com/loldesign/azure/ -> core/core.go
|
||||
//
|
||||
|
||||
/*
|
||||
Based on Azure docs:
|
||||
Link: http://msdn.microsoft.com/en-us/library/windowsazure/dd179428.aspx#Constructing_Element
|
||||
|
||||
1) Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header.
|
||||
2) Convert each HTTP header name to lowercase.
|
||||
3) Sort the headers lexicographically by header name, in ascending order. Note that each header may appear only once in the string.
|
||||
4) Unfold the string by replacing any breaking white space with a single space.
|
||||
5) Trim any white space around the colon in the header.
|
||||
6) Finally, append a new line character to each canonicalized header in the resulting list. Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string.
|
||||
*/
|
||||
func (a *Auth) canonicalizedHeaders(req *http.Request) string {
|
||||
var buffer bytes.Buffer
|
||||
|
||||
for key, value := range req.Header {
|
||||
lowerKey := strings.ToLower(key)
|
||||
|
||||
if strings.HasPrefix(lowerKey, "x-ms-") {
|
||||
if buffer.Len() == 0 {
|
||||
buffer.WriteString(fmt.Sprintf("%s:%s", lowerKey, value[0]))
|
||||
} else {
|
||||
buffer.WriteString(fmt.Sprintf("\n%s:%s", lowerKey, value[0]))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
splitted := strings.Split(buffer.String(), "\n")
|
||||
sort.Strings(splitted)
|
||||
|
||||
return strings.Join(splitted, "\n")
|
||||
}
|
||||
|
||||
/*
|
||||
Based on Azure docs
|
||||
Link: http://msdn.microsoft.com/en-us/library/windowsazure/dd179428.aspx#Constructing_Element
|
||||
|
||||
1) Beginning with an empty string (""), append a forward slash (/), followed by the name of the account that owns the resource being accessed.
|
||||
2) Append the resource's encoded URI path, without any query parameters.
|
||||
3) Retrieve all query parameters on the resource URI, including the comp parameter if it exists.
|
||||
4) Convert all parameter names to lowercase.
|
||||
5) Sort the query parameters lexicographically by parameter name, in ascending order.
|
||||
6) URL-decode each query parameter name and value.
|
||||
7) Append each query parameter name and value to the string in the following format, making sure to include the colon (:) between the name and the value:
|
||||
parameter-name:parameter-value
|
||||
|
||||
8) If a query parameter has more than one value, sort all values lexicographically, then include them in a comma-separated list:
|
||||
parameter-name:parameter-value-1,parameter-value-2,parameter-value-n
|
||||
|
||||
9) Append a new line character (\n) after each name-value pair.
|
||||
|
||||
Rules:
|
||||
1) Avoid using the new line character (\n) in values for query parameters. If it must be used, ensure that it does not affect the format of the canonicalized resource string.
|
||||
2) Avoid using commas in query parameter values.
|
||||
*/
|
||||
func (a *Auth) canonicalizedResource(req *http.Request) string {
|
||||
var buffer bytes.Buffer
|
||||
|
||||
buffer.WriteString(fmt.Sprintf("/%s%s", a.Account, req.URL.Path))
|
||||
queries := req.URL.Query()
|
||||
|
||||
for key, values := range queries {
|
||||
sort.Strings(values)
|
||||
buffer.WriteString(fmt.Sprintf("\n%s:%s", key, strings.Join(values, ",")))
|
||||
}
|
||||
|
||||
splitted := strings.Split(buffer.String(), "\n")
|
||||
sort.Strings(splitted)
|
||||
|
||||
return strings.Join(splitted, "\n")
|
||||
}
|
24
pkg/components/imguploader/azureblobuploader_test.go
Normal file
24
pkg/components/imguploader/azureblobuploader_test.go
Normal file
@ -0,0 +1,24 @@
|
||||
package imguploader
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestUploadToAzureBlob(t *testing.T) {
|
||||
SkipConvey("[Integration test] for external_image_store.azure_blob", t, func() {
|
||||
err := setting.NewConfigContext(&setting.CommandLineArgs{
|
||||
HomePath: "../../../",
|
||||
})
|
||||
|
||||
uploader, _ := NewImageUploader()
|
||||
|
||||
path, err := uploader.Upload(context.Background(), "../../../public/img/logo_transparent_400x.png")
|
||||
|
||||
So(err, ShouldBeNil)
|
||||
So(path, ShouldNotEqual, "")
|
||||
})
|
||||
}
|
@ -3,6 +3,7 @@ package imguploader
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"github.com/grafana/grafana/pkg/log"
|
||||
"regexp"
|
||||
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
@ -76,6 +77,23 @@ func NewImageUploader() (ImageUploader, error) {
|
||||
path := gcssec.Key("path").MustString("")
|
||||
|
||||
return NewGCSUploader(keyFile, bucketName, path), nil
|
||||
case "azure_blob":
|
||||
azureBlobSec, err := setting.Cfg.GetSection("external_image_storage.azure_blob")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
account_name := azureBlobSec.Key("account_name").MustString("")
|
||||
account_key := azureBlobSec.Key("account_key").MustString("")
|
||||
container_name := azureBlobSec.Key("container_name").MustString("")
|
||||
|
||||
return NewAzureBlobUploader(account_name, account_key, container_name), nil
|
||||
case "local":
|
||||
return NewLocalImageUploader()
|
||||
}
|
||||
|
||||
if setting.ImageUploadProvider != "" {
|
||||
log.Error2("The external image storage configuration is invalid", "unsupported provider", setting.ImageUploadProvider)
|
||||
}
|
||||
|
||||
return NopImageUploader{}, nil
|
||||
|
@ -119,5 +119,47 @@ func TestImageUploaderFactory(t *testing.T) {
|
||||
So(original.keyFile, ShouldEqual, "/etc/secrets/project-79a52befa3f6.json")
|
||||
So(original.bucket, ShouldEqual, "project-grafana-east")
|
||||
})
|
||||
|
||||
Convey("AzureBlobUploader config", func() {
|
||||
setting.NewConfigContext(&setting.CommandLineArgs{
|
||||
HomePath: "../../../",
|
||||
})
|
||||
setting.ImageUploadProvider = "azure_blob"
|
||||
|
||||
Convey("with container name", func() {
|
||||
azureBlobSec, err := setting.Cfg.GetSection("external_image_storage.azure_blob")
|
||||
azureBlobSec.NewKey("account_name", "account_name")
|
||||
azureBlobSec.NewKey("account_key", "account_key")
|
||||
azureBlobSec.NewKey("container_name", "container_name")
|
||||
|
||||
uploader, err := NewImageUploader()
|
||||
|
||||
So(err, ShouldBeNil)
|
||||
original, ok := uploader.(*AzureBlobUploader)
|
||||
|
||||
So(ok, ShouldBeTrue)
|
||||
So(original.account_name, ShouldEqual, "account_name")
|
||||
So(original.account_key, ShouldEqual, "account_key")
|
||||
So(original.container_name, ShouldEqual, "container_name")
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Local uploader", func() {
|
||||
var err error
|
||||
|
||||
setting.NewConfigContext(&setting.CommandLineArgs{
|
||||
HomePath: "../../../",
|
||||
})
|
||||
|
||||
setting.ImageUploadProvider = "local"
|
||||
|
||||
uploader, err := NewImageUploader()
|
||||
|
||||
So(err, ShouldBeNil)
|
||||
original, ok := uploader.(*LocalUploader)
|
||||
|
||||
So(ok, ShouldBeTrue)
|
||||
So(original, ShouldNotBeNil)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
22
pkg/components/imguploader/localuploader.go
Normal file
22
pkg/components/imguploader/localuploader.go
Normal file
@ -0,0 +1,22 @@
|
||||
package imguploader
|
||||
|
||||
import (
|
||||
"context"
|
||||
"path"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
)
|
||||
|
||||
type LocalUploader struct {
|
||||
}
|
||||
|
||||
func (u *LocalUploader) Upload(ctx context.Context, imageOnDiskPath string) (string, error) {
|
||||
filename := filepath.Base(imageOnDiskPath)
|
||||
image_url := setting.ToAbsUrl(path.Join("public/img/attachments", filename))
|
||||
return image_url, nil
|
||||
}
|
||||
|
||||
func NewLocalImageUploader() (*LocalUploader, error) {
|
||||
return &LocalUploader{}, nil
|
||||
}
|
18
pkg/components/imguploader/localuploader_test.go
Normal file
18
pkg/components/imguploader/localuploader_test.go
Normal file
@ -0,0 +1,18 @@
|
||||
package imguploader
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestUploadToLocal(t *testing.T) {
|
||||
Convey("[Integration test] for external_image_store.local", t, func() {
|
||||
localUploader, _ := NewLocalImageUploader()
|
||||
path, err := localUploader.Upload(context.Background(), "../../../public/img/logo_transparent_400x.png")
|
||||
|
||||
So(err, ShouldBeNil)
|
||||
So(path, ShouldContainSubstring, "/public/img/attachments")
|
||||
})
|
||||
}
|
@ -91,9 +91,15 @@ func RenderToPng(params *RenderOpts) (string, error) {
|
||||
timeout = 15
|
||||
}
|
||||
|
||||
phantomDebugArg := "--debug=false"
|
||||
if log.GetLogLevelFor("png-renderer") >= log.LvlDebug {
|
||||
phantomDebugArg = "--debug=true"
|
||||
}
|
||||
|
||||
cmdArgs := []string{
|
||||
"--ignore-ssl-errors=true",
|
||||
"--web-security=false",
|
||||
phantomDebugArg,
|
||||
scriptPath,
|
||||
"url=" + url,
|
||||
"width=" + params.Width,
|
||||
@ -109,15 +115,13 @@ func RenderToPng(params *RenderOpts) (string, error) {
|
||||
}
|
||||
|
||||
cmd := exec.Command(binPath, cmdArgs...)
|
||||
stdout, err := cmd.StdoutPipe()
|
||||
output, err := cmd.StdoutPipe()
|
||||
|
||||
if err != nil {
|
||||
rendererLog.Error("Could not acquire stdout pipe", err)
|
||||
return "", err
|
||||
}
|
||||
stderr, err := cmd.StderrPipe()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
cmd.Stderr = cmd.Stdout
|
||||
|
||||
if params.Timezone != "" {
|
||||
baseEnviron := os.Environ()
|
||||
@ -126,11 +130,12 @@ func RenderToPng(params *RenderOpts) (string, error) {
|
||||
|
||||
err = cmd.Start()
|
||||
if err != nil {
|
||||
rendererLog.Error("Could not start command", err)
|
||||
return "", err
|
||||
}
|
||||
|
||||
go io.Copy(os.Stdout, stdout)
|
||||
go io.Copy(os.Stdout, stderr)
|
||||
logWriter := log.NewLogWriter(rendererLog, log.LvlDebug, "[phantom] ")
|
||||
go io.Copy(logWriter, output)
|
||||
|
||||
done := make(chan error)
|
||||
go func() {
|
||||
|
@ -21,6 +21,7 @@ import (
|
||||
|
||||
var Root log15.Logger
|
||||
var loggersToClose []DisposableHandler
|
||||
var filters map[string]log15.Lvl
|
||||
|
||||
func init() {
|
||||
loggersToClose = make([]DisposableHandler, 0)
|
||||
@ -114,6 +115,25 @@ func Close() {
|
||||
loggersToClose = make([]DisposableHandler, 0)
|
||||
}
|
||||
|
||||
func GetLogLevelFor(name string) Lvl {
|
||||
if level, ok := filters[name]; ok {
|
||||
switch level {
|
||||
case log15.LvlWarn:
|
||||
return LvlWarn
|
||||
case log15.LvlInfo:
|
||||
return LvlInfo
|
||||
case log15.LvlError:
|
||||
return LvlError
|
||||
case log15.LvlCrit:
|
||||
return LvlCrit
|
||||
default:
|
||||
return LvlDebug
|
||||
}
|
||||
}
|
||||
|
||||
return LvlInfo
|
||||
}
|
||||
|
||||
var logLevels = map[string]log15.Lvl{
|
||||
"trace": log15.LvlDebug,
|
||||
"debug": log15.LvlDebug,
|
||||
@ -187,7 +207,7 @@ func ReadLoggingConfig(modes []string, logsPath string, cfg *ini.File) {
|
||||
|
||||
// Log level.
|
||||
_, level := getLogLevelFromConfig("log."+mode, defaultLevelName, cfg)
|
||||
modeFilters := getFilters(util.SplitString(sec.Key("filters").String()))
|
||||
filters := getFilters(util.SplitString(sec.Key("filters").String()))
|
||||
format := getLogFormat(sec.Key("format").MustString(""))
|
||||
|
||||
var handler log15.Handler
|
||||
@ -219,12 +239,12 @@ func ReadLoggingConfig(modes []string, logsPath string, cfg *ini.File) {
|
||||
}
|
||||
|
||||
for key, value := range defaultFilters {
|
||||
if _, exist := modeFilters[key]; !exist {
|
||||
modeFilters[key] = value
|
||||
if _, exist := filters[key]; !exist {
|
||||
filters[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
handler = LogFilterHandler(level, modeFilters, handler)
|
||||
handler = LogFilterHandler(level, filters, handler)
|
||||
handlers = append(handlers, handler)
|
||||
}
|
||||
|
||||
@ -236,8 +256,8 @@ func LogFilterHandler(maxLevel log15.Lvl, filters map[string]log15.Lvl, h log15.
|
||||
|
||||
if len(filters) > 0 {
|
||||
for i := 0; i < len(r.Ctx); i += 2 {
|
||||
key := r.Ctx[i].(string)
|
||||
if key == "logger" {
|
||||
key, ok := r.Ctx[i].(string)
|
||||
if ok && key == "logger" {
|
||||
loggerName, strOk := r.Ctx[i+1].(string)
|
||||
if strOk {
|
||||
if filterLevel, ok := filters[loggerName]; ok {
|
||||
|
39
pkg/log/log_writer.go
Normal file
39
pkg/log/log_writer.go
Normal file
@ -0,0 +1,39 @@
|
||||
package log
|
||||
|
||||
import (
|
||||
"io"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type logWriterImpl struct {
|
||||
log Logger
|
||||
level Lvl
|
||||
prefix string
|
||||
}
|
||||
|
||||
func NewLogWriter(log Logger, level Lvl, prefix string) io.Writer {
|
||||
return &logWriterImpl{
|
||||
log: log,
|
||||
level: level,
|
||||
prefix: prefix,
|
||||
}
|
||||
}
|
||||
|
||||
func (l *logWriterImpl) Write(p []byte) (n int, err error) {
|
||||
message := l.prefix + strings.TrimSpace(string(p))
|
||||
|
||||
switch l.level {
|
||||
case LvlCrit:
|
||||
l.log.Crit(message)
|
||||
case LvlError:
|
||||
l.log.Error(message)
|
||||
case LvlWarn:
|
||||
l.log.Warn(message)
|
||||
case LvlInfo:
|
||||
l.log.Info(message)
|
||||
default:
|
||||
l.log.Debug(message)
|
||||
}
|
||||
|
||||
return len(p), nil
|
||||
}
|
116
pkg/log/log_writer_test.go
Normal file
116
pkg/log/log_writer_test.go
Normal file
@ -0,0 +1,116 @@
|
||||
package log
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/inconshreveable/log15"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
type FakeLogger struct {
|
||||
debug string
|
||||
info string
|
||||
warn string
|
||||
err string
|
||||
crit string
|
||||
}
|
||||
|
||||
func (f *FakeLogger) New(ctx ...interface{}) log15.Logger {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *FakeLogger) Debug(msg string, ctx ...interface{}) {
|
||||
f.debug = msg
|
||||
}
|
||||
|
||||
func (f *FakeLogger) Info(msg string, ctx ...interface{}) {
|
||||
f.info = msg
|
||||
}
|
||||
|
||||
func (f *FakeLogger) Warn(msg string, ctx ...interface{}) {
|
||||
f.warn = msg
|
||||
}
|
||||
|
||||
func (f *FakeLogger) Error(msg string, ctx ...interface{}) {
|
||||
f.err = msg
|
||||
}
|
||||
|
||||
func (f *FakeLogger) Crit(msg string, ctx ...interface{}) {
|
||||
f.crit = msg
|
||||
}
|
||||
|
||||
func (f *FakeLogger) GetHandler() log15.Handler {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *FakeLogger) SetHandler(l log15.Handler) {}
|
||||
|
||||
func TestLogWriter(t *testing.T) {
|
||||
Convey("When writing to a LogWriter", t, func() {
|
||||
Convey("Should write using the correct level [crit]", func() {
|
||||
fake := &FakeLogger{}
|
||||
|
||||
crit := NewLogWriter(fake, LvlCrit, "")
|
||||
n, err := crit.Write([]byte("crit"))
|
||||
|
||||
So(n, ShouldEqual, 4)
|
||||
So(err, ShouldBeNil)
|
||||
So(fake.crit, ShouldEqual, "crit")
|
||||
})
|
||||
|
||||
Convey("Should write using the correct level [error]", func() {
|
||||
fake := &FakeLogger{}
|
||||
|
||||
crit := NewLogWriter(fake, LvlError, "")
|
||||
n, err := crit.Write([]byte("error"))
|
||||
|
||||
So(n, ShouldEqual, 5)
|
||||
So(err, ShouldBeNil)
|
||||
So(fake.err, ShouldEqual, "error")
|
||||
})
|
||||
|
||||
Convey("Should write using the correct level [warn]", func() {
|
||||
fake := &FakeLogger{}
|
||||
|
||||
crit := NewLogWriter(fake, LvlWarn, "")
|
||||
n, err := crit.Write([]byte("warn"))
|
||||
|
||||
So(n, ShouldEqual, 4)
|
||||
So(err, ShouldBeNil)
|
||||
So(fake.warn, ShouldEqual, "warn")
|
||||
})
|
||||
|
||||
Convey("Should write using the correct level [info]", func() {
|
||||
fake := &FakeLogger{}
|
||||
|
||||
crit := NewLogWriter(fake, LvlInfo, "")
|
||||
n, err := crit.Write([]byte("info"))
|
||||
|
||||
So(n, ShouldEqual, 4)
|
||||
So(err, ShouldBeNil)
|
||||
So(fake.info, ShouldEqual, "info")
|
||||
})
|
||||
|
||||
Convey("Should write using the correct level [debug]", func() {
|
||||
fake := &FakeLogger{}
|
||||
|
||||
crit := NewLogWriter(fake, LvlDebug, "")
|
||||
n, err := crit.Write([]byte("debug"))
|
||||
|
||||
So(n, ShouldEqual, 5)
|
||||
So(err, ShouldBeNil)
|
||||
So(fake.debug, ShouldEqual, "debug")
|
||||
})
|
||||
|
||||
Convey("Should prefix the output with the prefix", func() {
|
||||
fake := &FakeLogger{}
|
||||
|
||||
crit := NewLogWriter(fake, LvlDebug, "prefix")
|
||||
n, err := crit.Write([]byte("debug"))
|
||||
|
||||
So(n, ShouldEqual, 5) // n is how much of input consumed
|
||||
So(err, ShouldBeNil)
|
||||
So(fake.debug, ShouldEqual, "prefixdebug")
|
||||
})
|
||||
})
|
||||
}
|
@ -3,21 +3,20 @@ package login
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"crypto/subtle"
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
m "github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/util"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrInvalidCredentials = errors.New("Invalid Username or Password")
|
||||
ErrInvalidCredentials = errors.New("Invalid Username or Password")
|
||||
ErrTooManyLoginAttempts = errors.New("Too many consecutive incorrect login attempts for user. Login for user temporarily blocked")
|
||||
)
|
||||
|
||||
type LoginUserQuery struct {
|
||||
Username string
|
||||
Password string
|
||||
User *m.User
|
||||
Username string
|
||||
Password string
|
||||
User *m.User
|
||||
IpAddress string
|
||||
}
|
||||
|
||||
func Init() {
|
||||
@ -26,41 +25,31 @@ func Init() {
|
||||
}
|
||||
|
||||
func AuthenticateUser(query *LoginUserQuery) error {
|
||||
err := loginUsingGrafanaDB(query)
|
||||
if err == nil || err != ErrInvalidCredentials {
|
||||
if err := validateLoginAttempts(query.Username); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if setting.LdapEnabled {
|
||||
for _, server := range LdapCfg.Servers {
|
||||
author := NewLdapAuthenticator(server)
|
||||
err = author.Login(query)
|
||||
if err == nil || err != ErrInvalidCredentials {
|
||||
return err
|
||||
}
|
||||
err := loginUsingGrafanaDB(query)
|
||||
if err == nil || (err != m.ErrUserNotFound && err != ErrInvalidCredentials) {
|
||||
return err
|
||||
}
|
||||
|
||||
ldapEnabled, ldapErr := loginUsingLdap(query)
|
||||
if ldapEnabled {
|
||||
if ldapErr == nil || ldapErr != ErrInvalidCredentials {
|
||||
return ldapErr
|
||||
}
|
||||
|
||||
err = ldapErr
|
||||
}
|
||||
|
||||
if err == ErrInvalidCredentials {
|
||||
saveInvalidLoginAttempt(query)
|
||||
}
|
||||
|
||||
if err == m.ErrUserNotFound {
|
||||
return ErrInvalidCredentials
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func loginUsingGrafanaDB(query *LoginUserQuery) error {
|
||||
userQuery := m.GetUserByLoginQuery{LoginOrEmail: query.Username}
|
||||
|
||||
if err := bus.Dispatch(&userQuery); err != nil {
|
||||
if err == m.ErrUserNotFound {
|
||||
return ErrInvalidCredentials
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
user := userQuery.Result
|
||||
|
||||
passwordHashed := util.EncodePassword(query.Password, user.Salt)
|
||||
if subtle.ConstantTimeCompare([]byte(passwordHashed), []byte(user.Password)) != 1 {
|
||||
return ErrInvalidCredentials
|
||||
}
|
||||
|
||||
query.User = user
|
||||
return nil
|
||||
}
|
||||
|
214
pkg/login/auth_test.go
Normal file
214
pkg/login/auth_test.go
Normal file
@ -0,0 +1,214 @@
|
||||
package login
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
m "github.com/grafana/grafana/pkg/models"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestAuthenticateUser(t *testing.T) {
|
||||
Convey("Authenticate user", t, func() {
|
||||
authScenario("When a user authenticates having too many login attempts", func(sc *authScenarioContext) {
|
||||
mockLoginAttemptValidation(ErrTooManyLoginAttempts, sc)
|
||||
mockLoginUsingGrafanaDB(nil, sc)
|
||||
mockLoginUsingLdap(true, nil, sc)
|
||||
mockSaveInvalidLoginAttempt(sc)
|
||||
|
||||
err := AuthenticateUser(sc.loginUserQuery)
|
||||
|
||||
Convey("it should result in", func() {
|
||||
So(err, ShouldEqual, ErrTooManyLoginAttempts)
|
||||
So(sc.loginAttemptValidationWasCalled, ShouldBeTrue)
|
||||
So(sc.grafanaLoginWasCalled, ShouldBeFalse)
|
||||
So(sc.ldapLoginWasCalled, ShouldBeFalse)
|
||||
So(sc.saveInvalidLoginAttemptWasCalled, ShouldBeFalse)
|
||||
})
|
||||
})
|
||||
|
||||
authScenario("When grafana user authenticate with valid credentials", func(sc *authScenarioContext) {
|
||||
mockLoginAttemptValidation(nil, sc)
|
||||
mockLoginUsingGrafanaDB(nil, sc)
|
||||
mockLoginUsingLdap(true, ErrInvalidCredentials, sc)
|
||||
mockSaveInvalidLoginAttempt(sc)
|
||||
|
||||
err := AuthenticateUser(sc.loginUserQuery)
|
||||
|
||||
Convey("it should result in", func() {
|
||||
So(err, ShouldEqual, nil)
|
||||
So(sc.loginAttemptValidationWasCalled, ShouldBeTrue)
|
||||
So(sc.grafanaLoginWasCalled, ShouldBeTrue)
|
||||
So(sc.ldapLoginWasCalled, ShouldBeFalse)
|
||||
So(sc.saveInvalidLoginAttemptWasCalled, ShouldBeFalse)
|
||||
})
|
||||
})
|
||||
|
||||
authScenario("When grafana user authenticate and unexpected error occurs", func(sc *authScenarioContext) {
|
||||
customErr := errors.New("custom")
|
||||
mockLoginAttemptValidation(nil, sc)
|
||||
mockLoginUsingGrafanaDB(customErr, sc)
|
||||
mockLoginUsingLdap(true, ErrInvalidCredentials, sc)
|
||||
mockSaveInvalidLoginAttempt(sc)
|
||||
|
||||
err := AuthenticateUser(sc.loginUserQuery)
|
||||
|
||||
Convey("it should result in", func() {
|
||||
So(err, ShouldEqual, customErr)
|
||||
So(sc.loginAttemptValidationWasCalled, ShouldBeTrue)
|
||||
So(sc.grafanaLoginWasCalled, ShouldBeTrue)
|
||||
So(sc.ldapLoginWasCalled, ShouldBeFalse)
|
||||
So(sc.saveInvalidLoginAttemptWasCalled, ShouldBeFalse)
|
||||
})
|
||||
})
|
||||
|
||||
authScenario("When a non-existing grafana user authenticate and ldap disabled", func(sc *authScenarioContext) {
|
||||
mockLoginAttemptValidation(nil, sc)
|
||||
mockLoginUsingGrafanaDB(m.ErrUserNotFound, sc)
|
||||
mockLoginUsingLdap(false, nil, sc)
|
||||
mockSaveInvalidLoginAttempt(sc)
|
||||
|
||||
err := AuthenticateUser(sc.loginUserQuery)
|
||||
|
||||
Convey("it should result in", func() {
|
||||
So(err, ShouldEqual, ErrInvalidCredentials)
|
||||
So(sc.loginAttemptValidationWasCalled, ShouldBeTrue)
|
||||
So(sc.grafanaLoginWasCalled, ShouldBeTrue)
|
||||
So(sc.ldapLoginWasCalled, ShouldBeTrue)
|
||||
So(sc.saveInvalidLoginAttemptWasCalled, ShouldBeFalse)
|
||||
})
|
||||
})
|
||||
|
||||
authScenario("When a non-existing grafana user authenticate and invalid ldap credentials", func(sc *authScenarioContext) {
|
||||
mockLoginAttemptValidation(nil, sc)
|
||||
mockLoginUsingGrafanaDB(m.ErrUserNotFound, sc)
|
||||
mockLoginUsingLdap(true, ErrInvalidCredentials, sc)
|
||||
mockSaveInvalidLoginAttempt(sc)
|
||||
|
||||
err := AuthenticateUser(sc.loginUserQuery)
|
||||
|
||||
Convey("it should result in", func() {
|
||||
So(err, ShouldEqual, ErrInvalidCredentials)
|
||||
So(sc.loginAttemptValidationWasCalled, ShouldBeTrue)
|
||||
So(sc.grafanaLoginWasCalled, ShouldBeTrue)
|
||||
So(sc.ldapLoginWasCalled, ShouldBeTrue)
|
||||
So(sc.saveInvalidLoginAttemptWasCalled, ShouldBeTrue)
|
||||
})
|
||||
})
|
||||
|
||||
authScenario("When a non-existing grafana user authenticate and valid ldap credentials", func(sc *authScenarioContext) {
|
||||
mockLoginAttemptValidation(nil, sc)
|
||||
mockLoginUsingGrafanaDB(m.ErrUserNotFound, sc)
|
||||
mockLoginUsingLdap(true, nil, sc)
|
||||
mockSaveInvalidLoginAttempt(sc)
|
||||
|
||||
err := AuthenticateUser(sc.loginUserQuery)
|
||||
|
||||
Convey("it should result in", func() {
|
||||
So(err, ShouldBeNil)
|
||||
So(sc.loginAttemptValidationWasCalled, ShouldBeTrue)
|
||||
So(sc.grafanaLoginWasCalled, ShouldBeTrue)
|
||||
So(sc.ldapLoginWasCalled, ShouldBeTrue)
|
||||
So(sc.saveInvalidLoginAttemptWasCalled, ShouldBeFalse)
|
||||
})
|
||||
})
|
||||
|
||||
authScenario("When a non-existing grafana user authenticate and ldap returns unexpected error", func(sc *authScenarioContext) {
|
||||
customErr := errors.New("custom")
|
||||
mockLoginAttemptValidation(nil, sc)
|
||||
mockLoginUsingGrafanaDB(m.ErrUserNotFound, sc)
|
||||
mockLoginUsingLdap(true, customErr, sc)
|
||||
mockSaveInvalidLoginAttempt(sc)
|
||||
|
||||
err := AuthenticateUser(sc.loginUserQuery)
|
||||
|
||||
Convey("it should result in", func() {
|
||||
So(err, ShouldEqual, customErr)
|
||||
So(sc.loginAttemptValidationWasCalled, ShouldBeTrue)
|
||||
So(sc.grafanaLoginWasCalled, ShouldBeTrue)
|
||||
So(sc.ldapLoginWasCalled, ShouldBeTrue)
|
||||
So(sc.saveInvalidLoginAttemptWasCalled, ShouldBeFalse)
|
||||
})
|
||||
})
|
||||
|
||||
authScenario("When grafana user authenticate with invalid credentials and invalid ldap credentials", func(sc *authScenarioContext) {
|
||||
mockLoginAttemptValidation(nil, sc)
|
||||
mockLoginUsingGrafanaDB(ErrInvalidCredentials, sc)
|
||||
mockLoginUsingLdap(true, ErrInvalidCredentials, sc)
|
||||
mockSaveInvalidLoginAttempt(sc)
|
||||
|
||||
err := AuthenticateUser(sc.loginUserQuery)
|
||||
|
||||
Convey("it should result in", func() {
|
||||
So(err, ShouldEqual, ErrInvalidCredentials)
|
||||
So(sc.loginAttemptValidationWasCalled, ShouldBeTrue)
|
||||
So(sc.grafanaLoginWasCalled, ShouldBeTrue)
|
||||
So(sc.ldapLoginWasCalled, ShouldBeTrue)
|
||||
So(sc.saveInvalidLoginAttemptWasCalled, ShouldBeTrue)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
type authScenarioContext struct {
|
||||
loginUserQuery *LoginUserQuery
|
||||
grafanaLoginWasCalled bool
|
||||
ldapLoginWasCalled bool
|
||||
loginAttemptValidationWasCalled bool
|
||||
saveInvalidLoginAttemptWasCalled bool
|
||||
}
|
||||
|
||||
type authScenarioFunc func(sc *authScenarioContext)
|
||||
|
||||
func mockLoginUsingGrafanaDB(err error, sc *authScenarioContext) {
|
||||
loginUsingGrafanaDB = func(query *LoginUserQuery) error {
|
||||
sc.grafanaLoginWasCalled = true
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
func mockLoginUsingLdap(enabled bool, err error, sc *authScenarioContext) {
|
||||
loginUsingLdap = func(query *LoginUserQuery) (bool, error) {
|
||||
sc.ldapLoginWasCalled = true
|
||||
return enabled, err
|
||||
}
|
||||
}
|
||||
|
||||
func mockLoginAttemptValidation(err error, sc *authScenarioContext) {
|
||||
validateLoginAttempts = func(username string) error {
|
||||
sc.loginAttemptValidationWasCalled = true
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
func mockSaveInvalidLoginAttempt(sc *authScenarioContext) {
|
||||
saveInvalidLoginAttempt = func(query *LoginUserQuery) {
|
||||
sc.saveInvalidLoginAttemptWasCalled = true
|
||||
}
|
||||
}
|
||||
|
||||
func authScenario(desc string, fn authScenarioFunc) {
|
||||
Convey(desc, func() {
|
||||
origLoginUsingGrafanaDB := loginUsingGrafanaDB
|
||||
origLoginUsingLdap := loginUsingLdap
|
||||
origValidateLoginAttempts := validateLoginAttempts
|
||||
origSaveInvalidLoginAttempt := saveInvalidLoginAttempt
|
||||
|
||||
sc := &authScenarioContext{
|
||||
loginUserQuery: &LoginUserQuery{
|
||||
Username: "user",
|
||||
Password: "pwd",
|
||||
IpAddress: "192.168.1.1:56433",
|
||||
},
|
||||
}
|
||||
|
||||
defer func() {
|
||||
loginUsingGrafanaDB = origLoginUsingGrafanaDB
|
||||
loginUsingLdap = origLoginUsingLdap
|
||||
validateLoginAttempts = origValidateLoginAttempts
|
||||
saveInvalidLoginAttempt = origSaveInvalidLoginAttempt
|
||||
}()
|
||||
|
||||
fn(sc)
|
||||
})
|
||||
}
|
48
pkg/login/brute_force_login_protection.go
Normal file
48
pkg/login/brute_force_login_protection.go
Normal file
@ -0,0 +1,48 @@
|
||||
package login
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
m "github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
)
|
||||
|
||||
var (
|
||||
maxInvalidLoginAttempts int64 = 5
|
||||
loginAttemptsWindow time.Duration = time.Minute * 5
|
||||
)
|
||||
|
||||
var validateLoginAttempts = func(username string) error {
|
||||
if setting.DisableBruteForceLoginProtection {
|
||||
return nil
|
||||
}
|
||||
|
||||
loginAttemptCountQuery := m.GetUserLoginAttemptCountQuery{
|
||||
Username: username,
|
||||
Since: time.Now().Add(-loginAttemptsWindow),
|
||||
}
|
||||
|
||||
if err := bus.Dispatch(&loginAttemptCountQuery); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if loginAttemptCountQuery.Result >= maxInvalidLoginAttempts {
|
||||
return ErrTooManyLoginAttempts
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var saveInvalidLoginAttempt = func(query *LoginUserQuery) {
|
||||
if setting.DisableBruteForceLoginProtection {
|
||||
return
|
||||
}
|
||||
|
||||
loginAttemptCommand := m.CreateLoginAttemptCommand{
|
||||
Username: query.Username,
|
||||
IpAddress: query.IpAddress,
|
||||
}
|
||||
|
||||
bus.Dispatch(&loginAttemptCommand)
|
||||
}
|
125
pkg/login/brute_force_login_protection_test.go
Normal file
125
pkg/login/brute_force_login_protection_test.go
Normal file
@ -0,0 +1,125 @@
|
||||
package login
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
m "github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestLoginAttemptsValidation(t *testing.T) {
|
||||
Convey("Validate login attempts", t, func() {
|
||||
Convey("Given brute force login protection enabled", func() {
|
||||
setting.DisableBruteForceLoginProtection = false
|
||||
|
||||
Convey("When user login attempt count equals max-1 ", func() {
|
||||
withLoginAttempts(maxInvalidLoginAttempts - 1)
|
||||
err := validateLoginAttempts("user")
|
||||
|
||||
Convey("it should not result in error", func() {
|
||||
So(err, ShouldBeNil)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("When user login attempt count equals max ", func() {
|
||||
withLoginAttempts(maxInvalidLoginAttempts)
|
||||
err := validateLoginAttempts("user")
|
||||
|
||||
Convey("it should result in too many login attempts error", func() {
|
||||
So(err, ShouldEqual, ErrTooManyLoginAttempts)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("When user login attempt count is greater than max ", func() {
|
||||
withLoginAttempts(maxInvalidLoginAttempts + 5)
|
||||
err := validateLoginAttempts("user")
|
||||
|
||||
Convey("it should result in too many login attempts error", func() {
|
||||
So(err, ShouldEqual, ErrTooManyLoginAttempts)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("When saving invalid login attempt", func() {
|
||||
defer bus.ClearBusHandlers()
|
||||
createLoginAttemptCmd := &m.CreateLoginAttemptCommand{}
|
||||
|
||||
bus.AddHandler("test", func(cmd *m.CreateLoginAttemptCommand) error {
|
||||
createLoginAttemptCmd = cmd
|
||||
return nil
|
||||
})
|
||||
|
||||
saveInvalidLoginAttempt(&LoginUserQuery{
|
||||
Username: "user",
|
||||
Password: "pwd",
|
||||
IpAddress: "192.168.1.1:56433",
|
||||
})
|
||||
|
||||
Convey("it should dispatch command", func() {
|
||||
So(createLoginAttemptCmd, ShouldNotBeNil)
|
||||
So(createLoginAttemptCmd.Username, ShouldEqual, "user")
|
||||
So(createLoginAttemptCmd.IpAddress, ShouldEqual, "192.168.1.1:56433")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Given brute force login protection disabled", func() {
|
||||
setting.DisableBruteForceLoginProtection = true
|
||||
|
||||
Convey("When user login attempt count equals max-1 ", func() {
|
||||
withLoginAttempts(maxInvalidLoginAttempts - 1)
|
||||
err := validateLoginAttempts("user")
|
||||
|
||||
Convey("it should not result in error", func() {
|
||||
So(err, ShouldBeNil)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("When user login attempt count equals max ", func() {
|
||||
withLoginAttempts(maxInvalidLoginAttempts)
|
||||
err := validateLoginAttempts("user")
|
||||
|
||||
Convey("it should not result in error", func() {
|
||||
So(err, ShouldBeNil)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("When user login attempt count is greater than max ", func() {
|
||||
withLoginAttempts(maxInvalidLoginAttempts + 5)
|
||||
err := validateLoginAttempts("user")
|
||||
|
||||
Convey("it should not result in error", func() {
|
||||
So(err, ShouldBeNil)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("When saving invalid login attempt", func() {
|
||||
defer bus.ClearBusHandlers()
|
||||
createLoginAttemptCmd := (*m.CreateLoginAttemptCommand)(nil)
|
||||
|
||||
bus.AddHandler("test", func(cmd *m.CreateLoginAttemptCommand) error {
|
||||
createLoginAttemptCmd = cmd
|
||||
return nil
|
||||
})
|
||||
|
||||
saveInvalidLoginAttempt(&LoginUserQuery{
|
||||
Username: "user",
|
||||
Password: "pwd",
|
||||
IpAddress: "192.168.1.1:56433",
|
||||
})
|
||||
|
||||
Convey("it should not dispatch command", func() {
|
||||
So(createLoginAttemptCmd, ShouldBeNil)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func withLoginAttempts(loginAttempts int64) {
|
||||
bus.AddHandler("test", func(query *m.GetUserLoginAttemptCountQuery) error {
|
||||
query.Result = loginAttempts
|
||||
return nil
|
||||
})
|
||||
}
|
35
pkg/login/grafana_login.go
Normal file
35
pkg/login/grafana_login.go
Normal file
@ -0,0 +1,35 @@
|
||||
package login
|
||||
|
||||
import (
|
||||
"crypto/subtle"
|
||||
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
m "github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/util"
|
||||
)
|
||||
|
||||
var validatePassword = func(providedPassword string, userPassword string, userSalt string) error {
|
||||
passwordHashed := util.EncodePassword(providedPassword, userSalt)
|
||||
if subtle.ConstantTimeCompare([]byte(passwordHashed), []byte(userPassword)) != 1 {
|
||||
return ErrInvalidCredentials
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var loginUsingGrafanaDB = func(query *LoginUserQuery) error {
|
||||
userQuery := m.GetUserByLoginQuery{LoginOrEmail: query.Username}
|
||||
|
||||
if err := bus.Dispatch(&userQuery); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
user := userQuery.Result
|
||||
|
||||
if err := validatePassword(query.Password, user.Password, user.Salt); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
query.User = user
|
||||
return nil
|
||||
}
|
139
pkg/login/grafana_login_test.go
Normal file
139
pkg/login/grafana_login_test.go
Normal file
@ -0,0 +1,139 @@
|
||||
package login
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
m "github.com/grafana/grafana/pkg/models"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestGrafanaLogin(t *testing.T) {
|
||||
Convey("Login using Grafana DB", t, func() {
|
||||
grafanaLoginScenario("When login with non-existing user", func(sc *grafanaLoginScenarioContext) {
|
||||
sc.withNonExistingUser()
|
||||
err := loginUsingGrafanaDB(sc.loginUserQuery)
|
||||
|
||||
Convey("it should result in user not found error", func() {
|
||||
So(err, ShouldEqual, m.ErrUserNotFound)
|
||||
})
|
||||
|
||||
Convey("it should not call password validation", func() {
|
||||
So(sc.validatePasswordCalled, ShouldBeFalse)
|
||||
})
|
||||
|
||||
Convey("it should not pupulate user object", func() {
|
||||
So(sc.loginUserQuery.User, ShouldBeNil)
|
||||
})
|
||||
})
|
||||
|
||||
grafanaLoginScenario("When login with invalid credentials", func(sc *grafanaLoginScenarioContext) {
|
||||
sc.withInvalidPassword()
|
||||
err := loginUsingGrafanaDB(sc.loginUserQuery)
|
||||
|
||||
Convey("it should result in invalid credentials error", func() {
|
||||
So(err, ShouldEqual, ErrInvalidCredentials)
|
||||
})
|
||||
|
||||
Convey("it should call password validation", func() {
|
||||
So(sc.validatePasswordCalled, ShouldBeTrue)
|
||||
})
|
||||
|
||||
Convey("it should not pupulate user object", func() {
|
||||
So(sc.loginUserQuery.User, ShouldBeNil)
|
||||
})
|
||||
})
|
||||
|
||||
grafanaLoginScenario("When login with valid credentials", func(sc *grafanaLoginScenarioContext) {
|
||||
sc.withValidCredentials()
|
||||
err := loginUsingGrafanaDB(sc.loginUserQuery)
|
||||
|
||||
Convey("it should not result in error", func() {
|
||||
So(err, ShouldBeNil)
|
||||
})
|
||||
|
||||
Convey("it should call password validation", func() {
|
||||
So(sc.validatePasswordCalled, ShouldBeTrue)
|
||||
})
|
||||
|
||||
Convey("it should pupulate user object", func() {
|
||||
So(sc.loginUserQuery.User, ShouldNotBeNil)
|
||||
So(sc.loginUserQuery.User.Login, ShouldEqual, sc.loginUserQuery.Username)
|
||||
So(sc.loginUserQuery.User.Password, ShouldEqual, sc.loginUserQuery.Password)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
type grafanaLoginScenarioContext struct {
|
||||
loginUserQuery *LoginUserQuery
|
||||
validatePasswordCalled bool
|
||||
}
|
||||
|
||||
type grafanaLoginScenarioFunc func(c *grafanaLoginScenarioContext)
|
||||
|
||||
func grafanaLoginScenario(desc string, fn grafanaLoginScenarioFunc) {
|
||||
Convey(desc, func() {
|
||||
origValidatePassword := validatePassword
|
||||
|
||||
sc := &grafanaLoginScenarioContext{
|
||||
loginUserQuery: &LoginUserQuery{
|
||||
Username: "user",
|
||||
Password: "pwd",
|
||||
IpAddress: "192.168.1.1:56433",
|
||||
},
|
||||
validatePasswordCalled: false,
|
||||
}
|
||||
|
||||
defer func() {
|
||||
validatePassword = origValidatePassword
|
||||
}()
|
||||
|
||||
fn(sc)
|
||||
})
|
||||
}
|
||||
|
||||
func mockPasswordValidation(valid bool, sc *grafanaLoginScenarioContext) {
|
||||
validatePassword = func(providedPassword string, userPassword string, userSalt string) error {
|
||||
sc.validatePasswordCalled = true
|
||||
|
||||
if !valid {
|
||||
return ErrInvalidCredentials
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func (sc *grafanaLoginScenarioContext) getUserByLoginQueryReturns(user *m.User) {
|
||||
bus.AddHandler("test", func(query *m.GetUserByLoginQuery) error {
|
||||
if user == nil {
|
||||
return m.ErrUserNotFound
|
||||
}
|
||||
|
||||
query.Result = user
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func (sc *grafanaLoginScenarioContext) withValidCredentials() {
|
||||
sc.getUserByLoginQueryReturns(&m.User{
|
||||
Id: 1,
|
||||
Login: sc.loginUserQuery.Username,
|
||||
Password: sc.loginUserQuery.Password,
|
||||
Salt: "salt",
|
||||
})
|
||||
mockPasswordValidation(true, sc)
|
||||
}
|
||||
|
||||
func (sc *grafanaLoginScenarioContext) withNonExistingUser() {
|
||||
sc.getUserByLoginQueryReturns(nil)
|
||||
}
|
||||
|
||||
func (sc *grafanaLoginScenarioContext) withInvalidPassword() {
|
||||
sc.getUserByLoginQueryReturns(&m.User{
|
||||
Password: sc.loginUserQuery.Password,
|
||||
Salt: "salt",
|
||||
})
|
||||
mockPasswordValidation(false, sc)
|
||||
}
|
21
pkg/login/ldap_login.go
Normal file
21
pkg/login/ldap_login.go
Normal file
@ -0,0 +1,21 @@
|
||||
package login
|
||||
|
||||
import (
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
)
|
||||
|
||||
var loginUsingLdap = func(query *LoginUserQuery) (bool, error) {
|
||||
if !setting.LdapEnabled {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
for _, server := range LdapCfg.Servers {
|
||||
author := NewLdapAuthenticator(server)
|
||||
err := author.Login(query)
|
||||
if err == nil || err != ErrInvalidCredentials {
|
||||
return true, err
|
||||
}
|
||||
}
|
||||
|
||||
return true, ErrInvalidCredentials
|
||||
}
|
172
pkg/login/ldap_login_test.go
Normal file
172
pkg/login/ldap_login_test.go
Normal file
@ -0,0 +1,172 @@
|
||||
package login
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
m "github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestLdapLogin(t *testing.T) {
|
||||
Convey("Login using ldap", t, func() {
|
||||
Convey("Given ldap enabled and a server configured", func() {
|
||||
setting.LdapEnabled = true
|
||||
LdapCfg.Servers = append(LdapCfg.Servers,
|
||||
&LdapServerConf{
|
||||
Host: "",
|
||||
})
|
||||
|
||||
ldapLoginScenario("When login with invalid credentials", func(sc *ldapLoginScenarioContext) {
|
||||
sc.withLoginResult(false)
|
||||
enabled, err := loginUsingLdap(sc.loginUserQuery)
|
||||
|
||||
Convey("it should return true", func() {
|
||||
So(enabled, ShouldBeTrue)
|
||||
})
|
||||
|
||||
Convey("it should return invalid credentials error", func() {
|
||||
So(err, ShouldEqual, ErrInvalidCredentials)
|
||||
})
|
||||
|
||||
Convey("it should call ldap login", func() {
|
||||
So(sc.ldapAuthenticatorMock.loginCalled, ShouldBeTrue)
|
||||
})
|
||||
})
|
||||
|
||||
ldapLoginScenario("When login with valid credentials", func(sc *ldapLoginScenarioContext) {
|
||||
sc.withLoginResult(true)
|
||||
enabled, err := loginUsingLdap(sc.loginUserQuery)
|
||||
|
||||
Convey("it should return true", func() {
|
||||
So(enabled, ShouldBeTrue)
|
||||
})
|
||||
|
||||
Convey("it should not return error", func() {
|
||||
So(err, ShouldBeNil)
|
||||
})
|
||||
|
||||
Convey("it should call ldap login", func() {
|
||||
So(sc.ldapAuthenticatorMock.loginCalled, ShouldBeTrue)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Given ldap enabled and no server configured", func() {
|
||||
setting.LdapEnabled = true
|
||||
LdapCfg.Servers = make([]*LdapServerConf, 0)
|
||||
|
||||
ldapLoginScenario("When login", func(sc *ldapLoginScenarioContext) {
|
||||
sc.withLoginResult(true)
|
||||
enabled, err := loginUsingLdap(sc.loginUserQuery)
|
||||
|
||||
Convey("it should return true", func() {
|
||||
So(enabled, ShouldBeTrue)
|
||||
})
|
||||
|
||||
Convey("it should return invalid credentials error", func() {
|
||||
So(err, ShouldEqual, ErrInvalidCredentials)
|
||||
})
|
||||
|
||||
Convey("it should not call ldap login", func() {
|
||||
So(sc.ldapAuthenticatorMock.loginCalled, ShouldBeFalse)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Given ldap disabled", func() {
|
||||
setting.LdapEnabled = false
|
||||
|
||||
ldapLoginScenario("When login", func(sc *ldapLoginScenarioContext) {
|
||||
sc.withLoginResult(false)
|
||||
enabled, err := loginUsingLdap(&LoginUserQuery{
|
||||
Username: "user",
|
||||
Password: "pwd",
|
||||
})
|
||||
|
||||
Convey("it should return false", func() {
|
||||
So(enabled, ShouldBeFalse)
|
||||
})
|
||||
|
||||
Convey("it should not return error", func() {
|
||||
So(err, ShouldBeNil)
|
||||
})
|
||||
|
||||
Convey("it should not call ldap login", func() {
|
||||
So(sc.ldapAuthenticatorMock.loginCalled, ShouldBeFalse)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func mockLdapAuthenticator(valid bool) *mockLdapAuther {
|
||||
mock := &mockLdapAuther{
|
||||
validLogin: valid,
|
||||
}
|
||||
|
||||
NewLdapAuthenticator = func(server *LdapServerConf) ILdapAuther {
|
||||
return mock
|
||||
}
|
||||
|
||||
return mock
|
||||
}
|
||||
|
||||
type mockLdapAuther struct {
|
||||
validLogin bool
|
||||
loginCalled bool
|
||||
}
|
||||
|
||||
func (a *mockLdapAuther) Login(query *LoginUserQuery) error {
|
||||
a.loginCalled = true
|
||||
|
||||
if !a.validLogin {
|
||||
return ErrInvalidCredentials
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *mockLdapAuther) SyncSignedInUser(signedInUser *m.SignedInUser) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *mockLdapAuther) GetGrafanaUserFor(ldapUser *LdapUserInfo) (*m.User, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (a *mockLdapAuther) SyncOrgRoles(user *m.User, ldapUser *LdapUserInfo) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type ldapLoginScenarioContext struct {
|
||||
loginUserQuery *LoginUserQuery
|
||||
ldapAuthenticatorMock *mockLdapAuther
|
||||
}
|
||||
|
||||
type ldapLoginScenarioFunc func(c *ldapLoginScenarioContext)
|
||||
|
||||
func ldapLoginScenario(desc string, fn ldapLoginScenarioFunc) {
|
||||
Convey(desc, func() {
|
||||
origNewLdapAuthenticator := NewLdapAuthenticator
|
||||
|
||||
sc := &ldapLoginScenarioContext{
|
||||
loginUserQuery: &LoginUserQuery{
|
||||
Username: "user",
|
||||
Password: "pwd",
|
||||
IpAddress: "192.168.1.1:56433",
|
||||
},
|
||||
ldapAuthenticatorMock: &mockLdapAuther{},
|
||||
}
|
||||
|
||||
defer func() {
|
||||
NewLdapAuthenticator = origNewLdapAuthenticator
|
||||
}()
|
||||
|
||||
fn(sc)
|
||||
})
|
||||
}
|
||||
|
||||
func (sc *ldapLoginScenarioContext) withLoginResult(valid bool) {
|
||||
sc.ldapAuthenticatorMock = mockLdapAuthenticator(valid)
|
||||
}
|
@ -26,9 +26,10 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"context"
|
||||
|
||||
"github.com/prometheus/common/expfmt"
|
||||
"github.com/prometheus/common/model"
|
||||
"golang.org/x/net/context"
|
||||
|
||||
dto "github.com/prometheus/client_model/go"
|
||||
|
||||
|
@ -379,6 +379,7 @@ func sendUsageStats() {
|
||||
metrics["stats.alerts.count"] = statsQuery.Result.Alerts
|
||||
metrics["stats.active_users.count"] = statsQuery.Result.ActiveUsers
|
||||
metrics["stats.datasources.count"] = statsQuery.Result.Datasources
|
||||
metrics["stats.stars.count"] = statsQuery.Result.Stars
|
||||
|
||||
dsStats := models.GetDataSourceStatsQuery{}
|
||||
if err := bus.Dispatch(&dsStats); err != nil {
|
||||
|
46
pkg/middleware/dashboard_redirect.go
Normal file
46
pkg/middleware/dashboard_redirect.go
Normal file
@ -0,0 +1,46 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
m "github.com/grafana/grafana/pkg/models"
|
||||
"gopkg.in/macaron.v1"
|
||||
)
|
||||
|
||||
func getDashboardUrlBySlug(orgId int64, slug string) (string, error) {
|
||||
query := m.GetDashboardQuery{Slug: slug, OrgId: orgId}
|
||||
|
||||
if err := bus.Dispatch(&query); err != nil {
|
||||
return "", m.ErrDashboardNotFound
|
||||
}
|
||||
|
||||
return m.GetDashboardUrl(query.Result.Uid, query.Result.Slug), nil
|
||||
}
|
||||
|
||||
func RedirectFromLegacyDashboardUrl() macaron.Handler {
|
||||
return func(c *Context) {
|
||||
slug := c.Params("slug")
|
||||
|
||||
if slug != "" {
|
||||
if url, err := getDashboardUrlBySlug(c.OrgId, slug); err == nil {
|
||||
c.Redirect(url, 301)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func RedirectFromLegacyDashboardSoloUrl() macaron.Handler {
|
||||
return func(c *Context) {
|
||||
slug := c.Params("slug")
|
||||
|
||||
if slug != "" {
|
||||
if url, err := getDashboardUrlBySlug(c.OrgId, slug); err == nil {
|
||||
url = strings.Replace(url, "/d/", "/d-solo/", 1)
|
||||
c.Redirect(url, 301)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
56
pkg/middleware/dashboard_redirect_test.go
Normal file
56
pkg/middleware/dashboard_redirect_test.go
Normal file
@ -0,0 +1,56 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
m "github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/util"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestMiddlewareDashboardRedirect(t *testing.T) {
|
||||
Convey("Given the dashboard redirect middleware", t, func() {
|
||||
bus.ClearBusHandlers()
|
||||
redirectFromLegacyDashboardUrl := RedirectFromLegacyDashboardUrl()
|
||||
redirectFromLegacyDashboardSoloUrl := RedirectFromLegacyDashboardSoloUrl()
|
||||
|
||||
fakeDash := m.NewDashboard("Child dash")
|
||||
fakeDash.Id = 1
|
||||
fakeDash.FolderId = 1
|
||||
fakeDash.HasAcl = false
|
||||
fakeDash.Uid = util.GenerateShortUid()
|
||||
|
||||
bus.AddHandler("test", func(query *m.GetDashboardQuery) error {
|
||||
query.Result = fakeDash
|
||||
return nil
|
||||
})
|
||||
|
||||
middlewareScenario("GET dashboard by legacy url", func(sc *scenarioContext) {
|
||||
sc.m.Get("/dashboard/db/:slug", redirectFromLegacyDashboardUrl, sc.defaultHandler)
|
||||
|
||||
sc.fakeReqWithParams("GET", "/dashboard/db/dash", map[string]string{}).exec()
|
||||
|
||||
Convey("Should redirect to new dashboard url with a 301 Moved Permanently", func() {
|
||||
So(sc.resp.Code, ShouldEqual, 301)
|
||||
redirectUrl, _ := sc.resp.Result().Location()
|
||||
So(redirectUrl.Path, ShouldEqual, m.GetDashboardUrl(fakeDash.Uid, fakeDash.Slug))
|
||||
})
|
||||
})
|
||||
|
||||
middlewareScenario("GET dashboard solo by legacy url", func(sc *scenarioContext) {
|
||||
sc.m.Get("/dashboard-solo/db/:slug", redirectFromLegacyDashboardSoloUrl, sc.defaultHandler)
|
||||
|
||||
sc.fakeReqWithParams("GET", "/dashboard-solo/db/dash", map[string]string{}).exec()
|
||||
|
||||
Convey("Should redirect to new dashboard url with a 301 Moved Permanently", func() {
|
||||
So(sc.resp.Code, ShouldEqual, 301)
|
||||
redirectUrl, _ := sc.resp.Result().Location()
|
||||
expectedUrl := m.GetDashboardUrl(fakeDash.Uid, fakeDash.Slug)
|
||||
expectedUrl = strings.Replace(expectedUrl, "/d/", "/d-solo/", 1)
|
||||
So(redirectUrl.Path, ShouldEqual, expectedUrl)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
@ -399,6 +399,20 @@ func (sc *scenarioContext) fakeReq(method, url string) *scenarioContext {
|
||||
return sc
|
||||
}
|
||||
|
||||
func (sc *scenarioContext) fakeReqWithParams(method, url string, queryParams map[string]string) *scenarioContext {
|
||||
sc.resp = httptest.NewRecorder()
|
||||
req, err := http.NewRequest(method, url, nil)
|
||||
q := req.URL.Query()
|
||||
for k, v := range queryParams {
|
||||
q.Add(k, v)
|
||||
}
|
||||
req.URL.RawQuery = q.Encode()
|
||||
So(err, ShouldBeNil)
|
||||
sc.req = req
|
||||
|
||||
return sc
|
||||
}
|
||||
|
||||
func (sc *scenarioContext) handler(fn handlerFunc) *scenarioContext {
|
||||
sc.handlerFunc = fn
|
||||
return sc
|
||||
|
@ -159,10 +159,6 @@ type SetAlertStateCommand struct {
|
||||
Timestamp time.Time
|
||||
}
|
||||
|
||||
type DeleteAlertCommand struct {
|
||||
AlertId int64
|
||||
}
|
||||
|
||||
//Queries
|
||||
type GetAlertsQuery struct {
|
||||
OrgId int64
|
||||
|
@ -2,23 +2,28 @@ package models
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gosimple/slug"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
)
|
||||
|
||||
// Typed errors
|
||||
var (
|
||||
ErrDashboardNotFound = errors.New("Dashboard not found")
|
||||
ErrDashboardSnapshotNotFound = errors.New("Dashboard snapshot not found")
|
||||
ErrDashboardWithSameNameExists = errors.New("A dashboard with the same name already exists")
|
||||
ErrDashboardVersionMismatch = errors.New("The dashboard has been changed by someone else")
|
||||
ErrDashboardTitleEmpty = errors.New("Dashboard title cannot be empty")
|
||||
ErrDashboardFolderCannotHaveParent = errors.New("A Dashboard Folder cannot be added to another folder")
|
||||
ErrDashboardContainsInvalidAlertData = errors.New("Invalid alert data. Cannot save dashboard")
|
||||
ErrDashboardFailedToUpdateAlertData = errors.New("Failed to save alert data")
|
||||
ErrDashboardNotFound = errors.New("Dashboard not found")
|
||||
ErrDashboardSnapshotNotFound = errors.New("Dashboard snapshot not found")
|
||||
ErrDashboardWithSameUIDExists = errors.New("A dashboard with the same uid already exists")
|
||||
ErrDashboardWithSameNameInFolderExists = errors.New("A dashboard with the same name in the folder already exists")
|
||||
ErrDashboardVersionMismatch = errors.New("The dashboard has been changed by someone else")
|
||||
ErrDashboardTitleEmpty = errors.New("Dashboard title cannot be empty")
|
||||
ErrDashboardFolderCannotHaveParent = errors.New("A Dashboard Folder cannot be added to another folder")
|
||||
ErrDashboardContainsInvalidAlertData = errors.New("Invalid alert data. Cannot save dashboard")
|
||||
ErrDashboardFailedToUpdateAlertData = errors.New("Failed to save alert data")
|
||||
ErrDashboardsWithSameSlugExists = errors.New("Multiple dashboards with the same slug exists")
|
||||
ErrDashboardFailedGenerateUniqueUid = errors.New("Failed to generate unique dashboard id")
|
||||
)
|
||||
|
||||
type UpdatePluginDashboardError struct {
|
||||
@ -39,6 +44,7 @@ var (
|
||||
// Dashboard model
|
||||
type Dashboard struct {
|
||||
Id int64
|
||||
Uid string
|
||||
Slug string
|
||||
OrgId int64
|
||||
GnetId int64
|
||||
@ -107,6 +113,10 @@ func NewDashboardFromJson(data *simplejson.Json) *Dashboard {
|
||||
dash.GnetId = int64(gnetId)
|
||||
}
|
||||
|
||||
if uid, err := dash.Data.Get("uid").String(); err == nil {
|
||||
dash.Uid = uid
|
||||
}
|
||||
|
||||
return dash
|
||||
}
|
||||
|
||||
@ -139,8 +149,46 @@ func (dash *Dashboard) GetString(prop string, defaultValue string) string {
|
||||
|
||||
// UpdateSlug updates the slug
|
||||
func (dash *Dashboard) UpdateSlug() {
|
||||
title := strings.ToLower(dash.Data.Get("title").MustString())
|
||||
dash.Slug = slug.Make(title)
|
||||
title := dash.Data.Get("title").MustString()
|
||||
dash.Slug = SlugifyTitle(title)
|
||||
}
|
||||
|
||||
func SlugifyTitle(title string) string {
|
||||
return slug.Make(strings.ToLower(title))
|
||||
}
|
||||
|
||||
// GetUrl return the html url for a folder if it's folder, otherwise for a dashboard
|
||||
func (dash *Dashboard) GetUrl() string {
|
||||
return GetDashboardFolderUrl(dash.IsFolder, dash.Uid, dash.Slug)
|
||||
}
|
||||
|
||||
// Return the html url for a dashboard
|
||||
func (dash *Dashboard) GenerateUrl() string {
|
||||
return GetDashboardUrl(dash.Uid, dash.Slug)
|
||||
}
|
||||
|
||||
// GetDashboardFolderUrl return the html url for a folder if it's folder, otherwise for a dashboard
|
||||
func GetDashboardFolderUrl(isFolder bool, uid string, slug string) string {
|
||||
if isFolder {
|
||||
return GetFolderUrl(uid, slug)
|
||||
}
|
||||
|
||||
return GetDashboardUrl(uid, slug)
|
||||
}
|
||||
|
||||
// Return the html url for a dashboard
|
||||
func GetDashboardUrl(uid string, slug string) string {
|
||||
return fmt.Sprintf("%s/d/%s/%s", setting.AppSubUrl, uid, slug)
|
||||
}
|
||||
|
||||
// Return the full url for a dashboard
|
||||
func GetFullDashboardUrl(uid string, slug string) string {
|
||||
return fmt.Sprintf("%s%s", setting.AppUrl, GetDashboardUrl(uid, slug))
|
||||
}
|
||||
|
||||
// GetFolderUrl return the html url for a folder
|
||||
func GetFolderUrl(folderUid string, slug string) string {
|
||||
return fmt.Sprintf("%s/dashboards/f/%s/%s", setting.AppSubUrl, folderUid, slug)
|
||||
}
|
||||
|
||||
//
|
||||
@ -173,8 +221,9 @@ type DeleteDashboardCommand struct {
|
||||
//
|
||||
|
||||
type GetDashboardQuery struct {
|
||||
Slug string // required if no Id is specified
|
||||
Slug string // required if no Id or Uid is specified
|
||||
Id int64 // optional if slug is set
|
||||
Uid string // optional if slug is set
|
||||
OrgId int64
|
||||
|
||||
Result *Dashboard
|
||||
@ -195,6 +244,14 @@ type GetDashboardsQuery struct {
|
||||
Result []*Dashboard
|
||||
}
|
||||
|
||||
type GetDashboardPermissionsForUserQuery struct {
|
||||
DashboardIds []int64
|
||||
OrgId int64
|
||||
UserId int64
|
||||
OrgRole RoleType
|
||||
Result []*DashboardPermissionForUser
|
||||
}
|
||||
|
||||
type GetDashboardsByPluginIdQuery struct {
|
||||
OrgId int64
|
||||
PluginId string
|
||||
@ -205,3 +262,38 @@ type GetDashboardSlugByIdQuery struct {
|
||||
Id int64
|
||||
Result string
|
||||
}
|
||||
|
||||
type GetDashboardsBySlugQuery struct {
|
||||
OrgId int64
|
||||
Slug string
|
||||
|
||||
Result []*Dashboard
|
||||
}
|
||||
|
||||
type GetFoldersForSignedInUserQuery struct {
|
||||
OrgId int64
|
||||
SignedInUser *SignedInUser
|
||||
Title string
|
||||
Result []*DashboardFolder
|
||||
}
|
||||
|
||||
type DashboardFolder struct {
|
||||
Id int64 `json:"id"`
|
||||
Title string `json:"title"`
|
||||
}
|
||||
|
||||
type DashboardPermissionForUser struct {
|
||||
DashboardId int64 `json:"dashboardId"`
|
||||
Permission PermissionType `json:"permission"`
|
||||
PermissionName string `json:"permissionName"`
|
||||
}
|
||||
|
||||
type DashboardRef struct {
|
||||
Uid string
|
||||
Slug string
|
||||
}
|
||||
|
||||
type GetDashboardUIDByIdQuery struct {
|
||||
Id int64
|
||||
Result *DashboardRef
|
||||
}
|
||||
|
@ -16,6 +16,12 @@ func TestDashboardModel(t *testing.T) {
|
||||
So(dashboard.Slug, ShouldEqual, "grafana-play-home")
|
||||
})
|
||||
|
||||
Convey("Can slugify title", t, func() {
|
||||
slug := SlugifyTitle("Grafana Play Home")
|
||||
|
||||
So(slug, ShouldEqual, "grafana-play-home")
|
||||
})
|
||||
|
||||
Convey("Given a dashboard json", t, func() {
|
||||
json := simplejson.New()
|
||||
json.Set("title", "test dash")
|
||||
|
36
pkg/models/login_attempt.go
Normal file
36
pkg/models/login_attempt.go
Normal file
@ -0,0 +1,36 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
type LoginAttempt struct {
|
||||
Id int64
|
||||
Username string
|
||||
IpAddress string
|
||||
Created time.Time
|
||||
}
|
||||
|
||||
// ---------------------
|
||||
// COMMANDS
|
||||
|
||||
type CreateLoginAttemptCommand struct {
|
||||
Username string
|
||||
IpAddress string
|
||||
|
||||
Result LoginAttempt
|
||||
}
|
||||
|
||||
type DeleteOldLoginAttemptsCommand struct {
|
||||
OlderThan time.Time
|
||||
DeletedRows int64
|
||||
}
|
||||
|
||||
// ---------------------
|
||||
// QUERIES
|
||||
|
||||
type GetUserLoginAttemptCountQuery struct {
|
||||
Username string
|
||||
Since time.Time
|
||||
Result int64
|
||||
}
|
@ -8,6 +8,7 @@ type SystemStats struct {
|
||||
Orgs int64
|
||||
Playlists int64
|
||||
Alerts int64
|
||||
Stars int64
|
||||
}
|
||||
|
||||
type DataSourceStats struct {
|
||||
|
@ -1,6 +1,7 @@
|
||||
package plugins
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
|
||||
@ -91,7 +92,7 @@ func pluginScenario(desc string, t *testing.T, fn func()) {
|
||||
setting.Cfg = ini.Empty()
|
||||
sec, _ := setting.Cfg.NewSection("plugin.test-app")
|
||||
sec.NewKey("path", "../../tests/test-app")
|
||||
err := Init()
|
||||
err := initPlugins(context.Background())
|
||||
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
package plugins
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
@ -17,7 +18,7 @@ func TestPluginDashboards(t *testing.T) {
|
||||
setting.Cfg = ini.Empty()
|
||||
sec, _ := setting.Cfg.NewSection("plugin.test-app")
|
||||
sec.NewKey("path", "../../tests/test-app")
|
||||
err := Init()
|
||||
err := initPlugins(context.Background())
|
||||
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
|
161
pkg/plugins/datasource/wrapper/datasource_plugin_wrapper.go
Normal file
161
pkg/plugins/datasource/wrapper/datasource_plugin_wrapper.go
Normal file
@ -0,0 +1,161 @@
|
||||
package wrapper
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/null"
|
||||
"github.com/grafana/grafana/pkg/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
"github.com/grafana/grafana_plugin_model/go/datasource"
|
||||
)
|
||||
|
||||
func NewDatasourcePluginWrapper(log log.Logger, plugin datasource.DatasourcePlugin) *DatasourcePluginWrapper {
|
||||
return &DatasourcePluginWrapper{DatasourcePlugin: plugin, logger: log}
|
||||
}
|
||||
|
||||
type DatasourcePluginWrapper struct {
|
||||
datasource.DatasourcePlugin
|
||||
logger log.Logger
|
||||
}
|
||||
|
||||
func (tw *DatasourcePluginWrapper) Query(ctx context.Context, ds *models.DataSource, query *tsdb.TsdbQuery) (*tsdb.Response, error) {
|
||||
jsonData, err := ds.JsonData.MarshalJSON()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pbQuery := &datasource.DatasourceRequest{
|
||||
Datasource: &datasource.DatasourceInfo{
|
||||
Name: ds.Name,
|
||||
Type: ds.Type,
|
||||
Url: ds.Url,
|
||||
Id: ds.Id,
|
||||
OrgId: ds.OrgId,
|
||||
JsonData: string(jsonData),
|
||||
DecryptedSecureJsonData: ds.SecureJsonData.Decrypt(),
|
||||
},
|
||||
TimeRange: &datasource.TimeRange{
|
||||
FromRaw: query.TimeRange.From,
|
||||
ToRaw: query.TimeRange.To,
|
||||
ToEpochMs: query.TimeRange.GetToAsMsEpoch(),
|
||||
FromEpochMs: query.TimeRange.GetFromAsMsEpoch(),
|
||||
},
|
||||
Queries: []*datasource.Query{},
|
||||
}
|
||||
|
||||
for _, q := range query.Queries {
|
||||
modelJson, _ := q.Model.MarshalJSON()
|
||||
|
||||
pbQuery.Queries = append(pbQuery.Queries, &datasource.Query{
|
||||
ModelJson: string(modelJson),
|
||||
IntervalMs: q.IntervalMs,
|
||||
RefId: q.RefId,
|
||||
MaxDataPoints: q.MaxDataPoints,
|
||||
})
|
||||
}
|
||||
|
||||
pbres, err := tw.DatasourcePlugin.Query(ctx, pbQuery)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res := &tsdb.Response{
|
||||
Results: map[string]*tsdb.QueryResult{},
|
||||
}
|
||||
|
||||
for _, r := range pbres.Results {
|
||||
qr := &tsdb.QueryResult{
|
||||
RefId: r.RefId,
|
||||
Series: []*tsdb.TimeSeries{},
|
||||
Tables: []*tsdb.Table{},
|
||||
}
|
||||
|
||||
if r.Error != "" {
|
||||
qr.Error = errors.New(r.Error)
|
||||
qr.ErrorString = r.Error
|
||||
}
|
||||
|
||||
for _, s := range r.GetSeries() {
|
||||
points := tsdb.TimeSeriesPoints{}
|
||||
|
||||
for _, p := range s.Points {
|
||||
po := tsdb.NewTimePoint(null.FloatFrom(p.Value), float64(p.Timestamp))
|
||||
points = append(points, po)
|
||||
}
|
||||
|
||||
qr.Series = append(qr.Series, &tsdb.TimeSeries{
|
||||
Name: s.Name,
|
||||
Tags: s.Tags,
|
||||
Points: points,
|
||||
})
|
||||
}
|
||||
|
||||
mappedTables, err := tw.mapTables(r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
qr.Tables = mappedTables
|
||||
|
||||
res.Results[r.RefId] = qr
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
func (tw *DatasourcePluginWrapper) mapTables(r *datasource.QueryResult) ([]*tsdb.Table, error) {
|
||||
var tables []*tsdb.Table
|
||||
for _, t := range r.GetTables() {
|
||||
mappedTable, err := tw.mapTable(t)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tables = append(tables, mappedTable)
|
||||
}
|
||||
return tables, nil
|
||||
}
|
||||
|
||||
func (tw *DatasourcePluginWrapper) mapTable(t *datasource.Table) (*tsdb.Table, error) {
|
||||
table := &tsdb.Table{}
|
||||
for _, c := range t.GetColumns() {
|
||||
table.Columns = append(table.Columns, tsdb.TableColumn{
|
||||
Text: c.Name,
|
||||
})
|
||||
}
|
||||
|
||||
table.Rows = make([]tsdb.RowValues, 0)
|
||||
for _, r := range t.GetRows() {
|
||||
row := tsdb.RowValues{}
|
||||
for _, rv := range r.Values {
|
||||
mappedRw, err := tw.mapRowValue(rv)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
row = append(row, mappedRw)
|
||||
}
|
||||
table.Rows = append(table.Rows, row)
|
||||
}
|
||||
|
||||
return table, nil
|
||||
}
|
||||
func (tw *DatasourcePluginWrapper) mapRowValue(rv *datasource.RowValue) (interface{}, error) {
|
||||
switch rv.Kind {
|
||||
case datasource.RowValue_TYPE_NULL:
|
||||
return nil, nil
|
||||
case datasource.RowValue_TYPE_INT64:
|
||||
return rv.Int64Value, nil
|
||||
case datasource.RowValue_TYPE_BOOL:
|
||||
return rv.BoolValue, nil
|
||||
case datasource.RowValue_TYPE_STRING:
|
||||
return rv.StringValue, nil
|
||||
case datasource.RowValue_TYPE_DOUBLE:
|
||||
return rv.DoubleValue, nil
|
||||
case datasource.RowValue_TYPE_BYTES:
|
||||
return rv.BytesValue, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("Unsupported row value %v from plugin", rv.Kind)
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user