Merge remote-tracking branch 'origin/master' into davkal/explore-facetting-filtered

This commit is contained in:
Marcus Efraimsson 2018-08-21 09:40:39 +02:00
commit 8f875951cb
No known key found for this signature in database
GPG Key ID: EBFE0FB04612DD4A
233 changed files with 3432 additions and 3461 deletions

View File

@ -9,7 +9,7 @@ watch_dirs = [
"$WORKDIR/public/views",
"$WORKDIR/conf",
]
watch_exts = [".go", ".ini", ".toml"]
watch_exts = [".go", ".ini", ".toml", ".template.html"]
build_delay = 1500
cmds = [
["go", "run", "build.go", "-dev", "build-server"],

View File

@ -104,6 +104,7 @@ jobs:
- run:
name: yarn install
command: 'yarn install --pure-lockfile --no-progress'
no_output_timeout: 15m
- save_cache:
key: dependency-cache-{{ checksum "yarn.lock" }}
paths:
@ -146,6 +147,12 @@ jobs:
- run:
name: sign packages
command: './scripts/build/sign_packages.sh'
- run:
name: verify signed packages
command: |
mkdir -p ~/.rpmdb/pubkeys
curl -s https://grafanarel.s3.amazonaws.com/RPM-GPG-KEY-grafana > ~/.rpmdb/pubkeys/grafana.key
./scripts/build/verify_signed_packages.sh dist/*.rpm
- run:
name: sha-sum packages
command: 'go run build.go sha-dist'

View File

@ -2,12 +2,12 @@ Follow the setup guide in README.md
### Rebuild frontend assets on source change
```
grunt && grunt watch
yarn watch
```
### Rerun tests on source change
```
grunt karma:dev
yarn jest
```
### Run tests for backend assets before commit
@ -17,6 +17,6 @@ test -z "$(gofmt -s -l . | grep -v -E 'vendor/(github.com|golang.org|gopkg.in)'
### Run tests for frontend assets before commit
```
npm test
yarn test
go test -v ./pkg/...
```

2
.gitignore vendored
View File

@ -71,4 +71,4 @@ debug.test
/vendor/**/appengine*
*.orig
/devenv/dashboards/bulk-testing/*.json
/devenv/bulk-dashboards/*.json

View File

@ -1,13 +0,0 @@
{
"disallowImplicitTypeConversion": ["string"],
"disallowKeywords": ["with"],
"disallowMultipleLineBreaks": true,
"disallowMixedSpacesAndTabs": true,
"disallowTrailingWhitespace": true,
"requireSpacesInFunctionExpression": {
"beforeOpeningCurlyBrace": true
},
"disallowSpacesInsideArrayBrackets": true,
"disallowSpacesInsideParentheses": true,
"validateIndentation": 2
}

View File

@ -1,37 +0,0 @@
{
"browser": true,
"esversion": 6,
"bitwise":false,
"curly": true,
"eqnull": true,
"strict": false,
"devel": true,
"eqeqeq": true,
"forin": false,
"immed": true,
"supernew": true,
"expr": true,
"indent": 2,
"latedef": false,
"newcap": true,
"noarg": true,
"noempty": true,
"undef": true,
"boss": true,
"trailing": true,
"laxbreak": true,
"laxcomma": true,
"sub": true,
"unused": true,
"maxdepth": 6,
"maxlen": 140,
"globals": {
"System": true,
"Promise": true,
"define": true,
"require": true,
"Chromath": false,
"setImmediate": true
}
}

View File

@ -1,21 +1,26 @@
# 5.3.0 (unreleased)
* **OAuth**: Gitlab OAuth with support for filter by groups [#5623](https://github.com/grafana/grafana/issues/5623), thx [@BenoitKnecht](https://github.com/BenoitKnecht)
* **Dataproxy**: Pass configured/auth headers to a Datasource [#10971](https://github.com/grafana/grafana/issues/10971), thx [@mrsiano](https://github.com/mrsiano)
* **Cleanup**: Make temp file time to live configurable [#11607](https://github.com/grafana/grafana/issues/11607), thx [@xapon](https://github.com/xapon)
* **LDAP**: Define Grafana Admin permission in ldap group mappings [#2469](https://github.com/grafana/grafana/issues/2496), PR [#12622](https://github.com/grafana/grafana/issues/12622)
* **Cloudwatch**: CloudWatch GetMetricData support [#11487](https://github.com/grafana/grafana/issues/11487), thx [@mtanda](https://github.com/mtanda)
* **Configuration**: Allow auto-assigning users to specific organization (other than Main. Org) [#1823](https://github.com/grafana/grafana/issues/1823) [#12801](https://github.com/grafana/grafana/issues/12801), thx [@gzzo](https://github.com/gzzo) and [@ofosos](https://github.com/ofosos)
* **Profile**: List teams that the user is member of in current/active organization [#12476](https://github.com/grafana/grafana/issues/12476)
* **LDAP**: Client certificates support [#12805](https://github.com/grafana/grafana/issues/12805), thx [@nyxi](https://github.com/nyxi)
* **Postgres**: TimescaleDB support, e.g. use `time_bucket` for grouping by time when option enabled [#12680](https://github.com/grafana/grafana/pull/12680), thx [svenklemm](https://github.com/svenklemm)
### Minor
* **Api**: Delete nonexistent datasource should return 404 [#12313](https://github.com/grafana/grafana/issues/12313), thx [@AustinWinstanley](https://github.com/AustinWinstanley)
* **Dashboard**: Fix selecting current dashboard from search should not reload dashboard [#12248](https://github.com/grafana/grafana/issues/12248)
* **Dashboard**: Use uid when linking to dashboards internally in a dashboard [#10705](https://github.com/grafana/grafana/issues/10705)
* **Singlestat**: Make colorization of prefix and postfix optional in singlestat [#11892](https://github.com/grafana/grafana/pull/11892), thx [@ApsOps](https://github.com/ApsOps)
* **Table**: Make table sorting stable when null values exist [#12362](https://github.com/grafana/grafana/pull/12362), thx [@bz2](https://github.com/bz2)
* **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379)
* **Prometheus**: Heatmap - fix unhandled error when some points are missing [#12484](https://github.com/grafana/grafana/issues/12484)
* **Prometheus**: Add $interval, $interval_ms, $range, and $range_ms support for dashboard and template queries [#12597](https://github.com/grafana/grafana/issues/12597)
* **Prometheus**: Add $__interval, $__interval_ms, $__range, $__range_s & $__range_ms support for dashboard and template queries [#12597](https://github.com/grafana/grafana/issues/12597) [#12882](https://github.com/grafana/grafana/issues/12882), thx [@roidelapluie](https://github.com/roidelapluie)
* **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda)
* **Postgres/MySQL/MSSQL**: New $__unixEpochGroup and $__unixEpochGroupAlias macros [#12892](https://github.com/grafana/grafana/issues/12892), thx [@svenklemm](https://github.com/svenklemm)
* **Postgres/MySQL/MSSQL**: Add previous fill mode to $__timeGroup macro which will fill in previously seen value when point is missing [#12756](https://github.com/grafana/grafana/issues/12756), thx [@svenklemm](https://github.com/svenklemm)
* **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm)
* **Postgres/MySQL/MSSQL**: Use metric column as prefix when returning multiple value columns [#12727](https://github.com/grafana/grafana/issues/12727), thx [@svenklemm](https://github.com/svenklemm)
@ -25,7 +30,7 @@
* **Postgres**: Escape ssl mode parameter in connectionstring [#12644](https://github.com/grafana/grafana/issues/12644), thx [@yogyrahmawan](https://github.com/yogyrahmawan)
* **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber)
* **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane)
* **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek)
* **Alerting**: Fix rendering timeout which could cause notifications to not be sent due to rendering timing out [#12151](https://github.com/grafana/grafana/issues/12151)
* **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda)
* **Cloudwatch**: AppSync metrics and dimensions [#12300](https://github.com/grafana/grafana/issues/12300), thx [@franciscocpg](https://github.com/franciscocpg)
* **Cloudwatch**: Direct Connect metrics and dimensions [#12762](https://github.com/grafana/grafana/pulls/12762), thx [@mindriot88](https://github.com/mindriot88)
@ -33,13 +38,21 @@
* **Cloudwatch**: Add new Redshift metrics and dimensions [#12063](https://github.com/grafana/grafana/pulls/12063), thx [@A21z](https://github.com/A21z)
* **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668)
* **Table**: Fix link color when using light theme and thresholds in use [#12766](https://github.com/grafana/grafana/issues/12766)
om/grafana/grafana/issues/12668)
* **Table**: Fix for useless horizontal scrollbar for table panel [#9964](https://github.com/grafana/grafana/issues/9964)
* **Table**: Make table sorting stable when null values exist [#12362](https://github.com/grafana/grafana/pull/12362), thx [@bz2](https://github.com/bz2)
* **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731)
* **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
* **Units**: Change units to include characters for power of 2 and 3 [#12744](https://github.com/grafana/grafana/pull/12744), thx [@Worty](https://github.com/Worty)
* **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek)
* **Graph**: Option to hide series from tooltip [#3341](https://github.com/grafana/grafana/issues/3341), thx [@mtanda](https://github.com/mtanda)
* **UI**: Fix iOS home screen "app" icon and Windows 10 app experience [#12752](https://github.com/grafana/grafana/issues/12752), thx [@andig](https://github.com/andig)
* **Datasource**: Fix UI issue with secret fields after updating datasource [#11270](https://github.com/grafana/grafana/issues/11270)
* **Plugins**: Convert URL-like text to links in plugins readme [#12843](https://github.com/grafana/grafana/pull/12843), thx [pgiraud](https://github.com/pgiraud)
* **Docker**: Make it possible to set a specific plugin url [#12861](https://github.com/grafana/grafana/pull/12861), thx [ClementGautier](https://github.com/ClementGautier)
* **Graphite**: Fix for quoting of int function parameters (when using variables) [#11927](https://github.com/grafana/grafana/pull/11927)
* **InfluxDB**: Support timeFilter in query templating for InfluxDB [#12598](https://github.com/grafana/grafana/pull/12598), thx [kichristensen](https://github.com/kichristensen)
* **Provisioning**: Should allow one default datasource per organisation [#12229](https://github.com/grafana/grafana/issues/12229)
### Breaking changes
@ -51,6 +64,10 @@ These are new features that's still being worked on and are in an experimental p
* **Dashboard**: Auto fit dashboard panels to optimize space used for current TV / Monitor [#12768](https://github.com/grafana/grafana/issues/12768)
### Tech
* **Frontend**: Convert all Frontend Karma tests to Jest tests [#12224](https://github.com/grafana/grafana/issues/12224)
# 5.2.2 (2018-07-25)
### Minor

View File

@ -1,4 +1,3 @@
/* jshint node:true */
'use strict';
module.exports = function (grunt) {
var os = require('os');

View File

@ -43,7 +43,7 @@ To build the assets, rebuild on file change, and serve them by Grafana's webserv
```bash
npm install -g yarn
yarn install --pure-lockfile
yarn run watch
yarn watch
```
Build the assets, rebuild on file change with Hot Module Replacement (HMR), and serve them by webpack-dev-server (http://localhost:3333):
@ -56,12 +56,7 @@ Note: HMR for Angular is not supported. If you edit files in the Angular part of
Run tests
```bash
yarn run jest
```
Run karma tests
```bash
yarn run karma
yarn jest
```
### Recompile backend on source change
@ -98,17 +93,13 @@ In your custom.ini uncomment (remove the leading `;`) sign. And set `app_mode =
#### Frontend
Execute all frontend tests
```bash
yarn run test
yarn test
```
Writing & watching frontend tests (we have two test runners)
Writing & watching frontend tests
- jest for all new tests that do not require browser context (React+more)
- Start watcher: `yarn run jest`
- Jest will run all test files that end with the name ".jest.ts"
- karma + mocha is used for testing angularjs components. We do want to migrate these test to jest over time (if possible).
- Start watcher: `yarn run karma`
- Karma+Mocha runs all files that end with the name "_specs.ts".
- Start watcher: `yarn jest`
- Jest will run all test files that end with the name ".test.ts"
#### Backend
```bash

View File

@ -270,6 +270,18 @@ api_url = https://api.github.com/user
team_ids =
allowed_organizations =
#################################### GitLab Auth #########################
[auth.gitlab]
enabled = false
allow_sign_up = true
client_id = some_id
client_secret = some_secret
scopes = api
auth_url = https://gitlab.com/oauth/authorize
token_url = https://gitlab.com/oauth/token
api_url = https://gitlab.com/api/v4
allowed_groups =
#################################### Google Auth #########################
[auth.google]
enabled = false
@ -315,6 +327,9 @@ api_url =
team_ids =
allowed_organizations =
tls_skip_verify_insecure = false
tls_client_cert =
tls_client_key =
tls_client_ca =
#################################### Basic Auth ##########################
[auth.basic]

View File

@ -15,6 +15,9 @@ start_tls = false
ssl_skip_verify = false
# set to the path to your root CA certificate or leave unset to use system defaults
# root_ca_cert = "/path/to/certificate.crt"
# Authentication against LDAP servers requiring client certificates
# client_cert = "/path/to/client.crt"
# client_key = "/path/to/client.key"
# Search user bind dn
bind_dn = "cn=admin,dc=grafana,dc=org"

View File

@ -272,6 +272,10 @@ log_queries =
;api_url = https://foo.bar/user
;team_ids =
;allowed_organizations =
;tls_skip_verify_insecure = false
;tls_client_cert =
;tls_client_key =
;tls_client_ca =
#################################### Grafana.com Auth ####################
[auth.grafana_com]

View File

@ -5,5 +5,5 @@ providers:
folder: 'Bulk dashboards'
type: file
options:
path: devenv/dashboards/bulk-testing
path: devenv/bulk-dashboards

View File

@ -64,7 +64,7 @@
"editable": true,
"gnetId": null,
"graphTooltip": 0,
"iteration": 1533713720618,
"iteration": 1534507501976,
"links": [],
"panels": [
{
@ -1197,6 +1197,196 @@
"x": 0,
"y": 27
},
"id": 38,
"legend": {
"alignAsTable": true,
"avg": true,
"current": true,
"hideEmpty": false,
"hideZero": false,
"max": true,
"min": true,
"rightSide": true,
"show": true,
"total": true,
"values": true
},
"lines": true,
"linewidth": 2,
"links": [],
"nullPointMode": "null",
"percentage": false,
"pointradius": 3,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT \n $__unixEpochGroupAlias(timeInt32, '$summarize'), \n measurement as metric, \n avg(valueOne) as valueOne,\n avg(valueTwo) as valueTwo\nFROM\n metric_values \nWHERE\n $__unixEpochFilter(timeInt32) AND\n ($metric = 'ALL' OR measurement = $metric)\nGROUP BY \n $__unixEpochGroup(timeInt32, '$summarize'), \n measurement \nORDER BY 1",
"refId": "A"
}
],
"thresholds": [],
"timeFrom": null,
"timeShift": null,
"title": "Multiple series with metric column using unixEpochGroup macro ($summarize)",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": "0",
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": "gdev-mssql-ds-tests",
"fill": 2,
"gridPos": {
"h": 8,
"w": 12,
"x": 12,
"y": 27
},
"id": 39,
"legend": {
"alignAsTable": true,
"avg": true,
"current": true,
"max": true,
"min": true,
"rightSide": true,
"show": true,
"total": true,
"values": true
},
"lines": true,
"linewidth": 2,
"links": [],
"nullPointMode": "null",
"percentage": false,
"pointradius": 3,
"points": false,
"renderer": "flot",
"seriesOverrides": [
{
"alias": "MovingAverageValueOne",
"dashes": true,
"lines": false
},
{
"alias": "MovingAverageValueTwo",
"dashes": true,
"lines": false,
"yaxis": 1
}
],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT \n $__unixEpochGroupAlias(timeInt32, '$summarize'), \n avg(valueOne) as valueOne,\n avg(valueTwo) as valueTwo\nFROM\n metric_values \nWHERE\n $__unixEpochFilter(timeInt32) AND\n ($metric = 'ALL' OR measurement = $metric)\nGROUP BY \n $__unixEpochGroup(timeInt32, '$summarize')\nORDER BY 1",
"refId": "A"
},
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT \n time,\n avg(valueOne) OVER (ORDER BY time ROWS BETWEEN 6 PRECEDING AND 6 FOLLOWING) as MovingAverageValueOne,\n avg(valueTwo) OVER (ORDER BY time ROWS BETWEEN 6 PRECEDING AND 6 FOLLOWING) as MovingAverageValueTwo\nFROM\n metric_values \nWHERE \n $__timeFilter(time) AND \n ($metric = 'ALL' OR measurement = $metric)\nORDER BY 1",
"refId": "B"
}
],
"thresholds": [],
"timeFrom": null,
"timeShift": null,
"title": "Multiple series without metric column using unixEpochGroup macro ($summarize)",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": "0",
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": "gdev-mssql-ds-tests",
"fill": 2,
"gridPos": {
"h": 8,
"w": 12,
"x": 0,
"y": 35
},
"id": 4,
"legend": {
"alignAsTable": true,
@ -1282,7 +1472,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 27
"y": 35
},
"id": 28,
"legend": {
@ -1367,7 +1557,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 35
"y": 43
},
"id": 19,
"legend": {
@ -1454,7 +1644,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 35
"y": 43
},
"id": 18,
"legend": {
@ -1539,7 +1729,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 43
"y": 51
},
"id": 17,
"legend": {
@ -1626,7 +1816,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 43
"y": 51
},
"id": 20,
"legend": {
@ -1711,7 +1901,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 51
"y": 59
},
"id": 29,
"legend": {
@ -1798,7 +1988,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 51
"y": 59
},
"id": 30,
"legend": {
@ -1885,7 +2075,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 59
"y": 67
},
"id": 14,
"legend": {
@ -1973,7 +2163,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 59
"y": 67
},
"id": 15,
"legend": {
@ -2060,7 +2250,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 67
"y": 75
},
"id": 25,
"legend": {
@ -2148,7 +2338,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 67
"y": 75
},
"id": 22,
"legend": {
@ -2235,7 +2425,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 75
"y": 83
},
"id": 21,
"legend": {
@ -2323,7 +2513,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 75
"y": 83
},
"id": 26,
"legend": {
@ -2410,7 +2600,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 83
"y": 91
},
"id": 23,
"legend": {
@ -2498,7 +2688,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 83
"y": 91
},
"id": 24,
"legend": {
@ -2708,5 +2898,5 @@
"timezone": "",
"title": "Datasource tests - MSSQL (unit test)",
"uid": "GlAqcPgmz",
"version": 10
"version": 2
}

View File

@ -64,7 +64,7 @@
"editable": true,
"gnetId": null,
"graphTooltip": 0,
"iteration": 1533714324007,
"iteration": 1534508678095,
"links": [],
"panels": [
{
@ -1191,6 +1191,190 @@
"x": 0,
"y": 27
},
"id": 38,
"legend": {
"alignAsTable": true,
"avg": true,
"current": true,
"hideEmpty": false,
"hideZero": false,
"max": true,
"min": true,
"rightSide": true,
"show": true,
"total": true,
"values": true
},
"lines": true,
"linewidth": 2,
"links": [],
"nullPointMode": "null",
"percentage": false,
"pointradius": 3,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT \n $__unixEpochGroupAlias(timeInt32, '$summarize'), \n measurement, \n avg(valueOne) as valueOne,\n avg(valueTwo) as valueTwo\nFROM\n metric_values \nWHERE\n $__unixEpochFilter(timeInt32) AND\n measurement in($metric)\nGROUP BY 1, 2\nORDER BY 1, 2",
"refId": "A"
}
],
"thresholds": [],
"timeFrom": null,
"timeShift": null,
"title": "Multiple series with metric column using unixEpochGroup macro ($summarize)",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": "0",
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": "gdev-mysql-ds-tests",
"fill": 2,
"gridPos": {
"h": 8,
"w": 12,
"x": 12,
"y": 27
},
"id": 39,
"legend": {
"alignAsTable": true,
"avg": true,
"current": true,
"max": true,
"min": true,
"rightSide": true,
"show": true,
"total": true,
"values": true
},
"lines": true,
"linewidth": 2,
"links": [],
"nullPointMode": "null",
"percentage": false,
"pointradius": 3,
"points": false,
"renderer": "flot",
"seriesOverrides": [
{
"alias": "MovingAverageValueOne",
"dashes": true,
"lines": false
},
{
"alias": "MovingAverageValueTwo",
"dashes": true,
"lines": false,
"yaxis": 1
}
],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT \n $__unixEpochGroupAlias(timeInt32, '$summarize'), \n avg(valueOne) as valueOne,\n avg(valueTwo) as valueTwo\nFROM\n metric_values \nWHERE\n $__unixEpochFilter(timeInt32) AND\n measurement in($metric)\nGROUP BY 1\nORDER BY 1",
"refId": "A"
}
],
"thresholds": [],
"timeFrom": null,
"timeShift": null,
"title": "Multiple series without metric column using unixEpochGroup macro ($summarize)",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": "0",
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": "gdev-mysql-ds-tests",
"fill": 2,
"gridPos": {
"h": 8,
"w": 12,
"x": 0,
"y": 35
},
"id": 4,
"legend": {
"alignAsTable": true,
@ -1276,7 +1460,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 27
"y": 35
},
"id": 28,
"legend": {
@ -1361,7 +1545,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 35
"y": 43
},
"id": 19,
"legend": {
@ -1448,7 +1632,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 35
"y": 43
},
"id": 18,
"legend": {
@ -1533,7 +1717,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 43
"y": 51
},
"id": 17,
"legend": {
@ -1620,7 +1804,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 43
"y": 51
},
"id": 20,
"legend": {
@ -1705,7 +1889,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 51
"y": 59
},
"id": 14,
"legend": {
@ -1793,7 +1977,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 51
"y": 59
},
"id": 15,
"legend": {
@ -1880,7 +2064,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 59
"y": 67
},
"id": 25,
"legend": {
@ -1968,7 +2152,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 59
"y": 67
},
"id": 22,
"legend": {
@ -2055,7 +2239,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 67
"y": 75
},
"id": 21,
"legend": {
@ -2143,7 +2327,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 67
"y": 75
},
"id": 26,
"legend": {
@ -2230,7 +2414,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 75
"y": 83
},
"id": 23,
"legend": {
@ -2318,7 +2502,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 75
"y": 83
},
"id": 24,
"legend": {
@ -2526,5 +2710,5 @@
"timezone": "",
"title": "Datasource tests - MySQL (unittest)",
"uid": "Hmf8FDkmz",
"version": 9
"version": 2
}

View File

@ -64,7 +64,7 @@
"editable": true,
"gnetId": null,
"graphTooltip": 0,
"iteration": 1533714184500,
"iteration": 1534507993194,
"links": [],
"panels": [
{
@ -1179,6 +1179,178 @@
"x": 0,
"y": 27
},
"id": 38,
"legend": {
"alignAsTable": true,
"avg": true,
"current": true,
"hideEmpty": false,
"hideZero": false,
"max": true,
"min": true,
"rightSide": true,
"show": true,
"total": true,
"values": true
},
"lines": true,
"linewidth": 2,
"links": [],
"nullPointMode": "null",
"percentage": false,
"pointradius": 3,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT \n $__unixEpochGroupAlias(\"timeInt32\", '$summarize'), \n measurement, \n avg(\"valueOne\") as \"valueOne\",\n avg(\"valueTwo\") as \"valueTwo\"\nFROM\n metric_values \nWHERE\n $__unixEpochFilter(\"timeInt32\") AND\n measurement in($metric)\nGROUP BY 1, 2\nORDER BY 1, 2",
"refId": "A"
}
],
"thresholds": [],
"timeFrom": null,
"timeShift": null,
"title": "Multiple series with metric column using unixEpochGroup macro ($summarize)",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": "0",
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": "gdev-postgres-ds-tests",
"fill": 2,
"gridPos": {
"h": 8,
"w": 12,
"x": 12,
"y": 27
},
"id": 39,
"legend": {
"alignAsTable": true,
"avg": true,
"current": true,
"max": true,
"min": true,
"rightSide": true,
"show": true,
"total": true,
"values": true
},
"lines": true,
"linewidth": 2,
"links": [],
"nullPointMode": "null",
"percentage": false,
"pointradius": 3,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "",
"format": "time_series",
"rawSql": "SELECT \n $__unixEpochGroupAlias(\"timeInt32\", '$summarize'), \n avg(\"valueOne\") as \"valueOne\",\n avg(\"valueTwo\") as \"valueTwo\"\nFROM\n metric_values \nWHERE\n $__unixEpochFilter(\"timeInt32\") AND\n measurement in($metric)\nGROUP BY 1\nORDER BY 1",
"refId": "A"
}
],
"thresholds": [],
"timeFrom": null,
"timeShift": null,
"title": "Multiple series without metric column using timeGroup macro ($summarize)",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": "0",
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": "gdev-postgres-ds-tests",
"fill": 2,
"gridPos": {
"h": 8,
"w": 12,
"x": 0,
"y": 35
},
"id": 4,
"legend": {
"alignAsTable": true,
@ -1264,7 +1436,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 27
"y": 35
},
"id": 28,
"legend": {
@ -1349,7 +1521,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 35
"y": 43
},
"id": 19,
"legend": {
@ -1436,7 +1608,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 35
"y": 43
},
"id": 18,
"legend": {
@ -1521,7 +1693,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 43
"y": 51
},
"id": 17,
"legend": {
@ -1608,7 +1780,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 43
"y": 51
},
"id": 20,
"legend": {
@ -1693,7 +1865,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 51
"y": 59
},
"id": 14,
"legend": {
@ -1781,7 +1953,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 51
"y": 59
},
"id": 15,
"legend": {
@ -1868,7 +2040,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 59
"y": 67
},
"id": 25,
"legend": {
@ -1956,7 +2128,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 59
"y": 67
},
"id": 22,
"legend": {
@ -2043,7 +2215,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 67
"y": 75
},
"id": 21,
"legend": {
@ -2131,7 +2303,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 67
"y": 75
},
"id": 26,
"legend": {
@ -2218,7 +2390,7 @@
"h": 8,
"w": 12,
"x": 0,
"y": 75
"y": 83
},
"id": 23,
"legend": {
@ -2306,7 +2478,7 @@
"h": 8,
"w": 12,
"x": 12,
"y": 75
"y": 83
},
"id": 24,
"legend": {
@ -2518,5 +2690,5 @@
"timezone": "",
"title": "Datasource tests - Postgres (unittest)",
"uid": "vHQdlVziz",
"version": 9
"version": 1
}

View File

@ -7,11 +7,11 @@ bulkDashboard() {
COUNTER=0
MAX=400
while [ $COUNTER -lt $MAX ]; do
jsonnet -o "dashboards/bulk-testing/dashboard${COUNTER}.json" -e "local bulkDash = import 'dashboards/bulk-testing/bulkdash.jsonnet'; bulkDash + { uid: 'uid-${COUNTER}', title: 'title-${COUNTER}' }"
jsonnet -o "bulk-dashboards/dashboard${COUNTER}.json" -e "local bulkDash = import 'bulk-dashboards/bulkdash.jsonnet'; bulkDash + { uid: 'uid-${COUNTER}', title: 'title-${COUNTER}' }"
let COUNTER=COUNTER+1
done
ln -s -f -r ./dashboards/bulk-testing/bulk-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml
ln -s -f -r ./bulk-dashboards/bulk-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml
}
requiresJsonnet() {

View File

@ -115,6 +115,8 @@ and `dimension keys/values`.
In place of `region` you can specify `default` to use the default region configured in the datasource for the query,
e.g. `metrics(AWS/DynamoDB, default)` or `dimension_values(default, ..., ..., ...)`.
Read more about the available dimensions in the [CloudWatch Metrics and Dimensions Reference](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CW_Support_For_AWS.html).
Name | Description
------- | --------
*regions()* | Returns a list of regions AWS provides their service.

View File

@ -58,8 +58,8 @@ a time pattern for the index name or a wildcard.
### Elasticsearch version
Be sure to specify your Elasticsearch version in the version selection dropdown. This is very important as there are differences how queries are composed. Currently only 2.x and 5.x
are supported.
Be sure to specify your Elasticsearch version in the version selection dropdown. This is very important as there are differences how queries are composed.
Currently the versions available is 2.x, 5.x and 5.6+ where 5.6+ means a version of 5.6 or higher, 6.3.2 for example.
### Min time interval
A lower limit for the auto group by time interval. Recommended to be set to write frequency, for example `1m` if your data is written every minute.

View File

@ -88,6 +88,8 @@ Macro example | Description
*$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn > 1494410783 AND dateColumn < 1494497183*
*$__unixEpochFrom()* | Will be replaced by the start of the currently active time selection as unix timestamp. For example, *1494410783*
*$__unixEpochTo()* | Will be replaced by the end of the currently active time selection as unix timestamp. For example, *1494497183*
*$__unixEpochGroup(dateColumn,'5m', [fillmode])* | Same as $__timeGroup but for times stored as unix timestamp (only available in Grafana 5.3+).
*$__unixEpochGroupAlias(dateColumn,'5m', [fillmode])* | Same as above but also adds a column alias (only available in Grafana 5.3+).
We plan to add many more macros. If you have suggestions for what macros you would like to see, please [open an issue](https://github.com/grafana/grafana) in our GitHub repo.

View File

@ -71,6 +71,8 @@ Macro example | Description
*$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn > 1494410783 AND dateColumn < 1494497183*
*$__unixEpochFrom()* | Will be replaced by the start of the currently active time selection as unix timestamp. For example, *1494410783*
*$__unixEpochTo()* | Will be replaced by the end of the currently active time selection as unix timestamp. For example, *1494497183*
*$__unixEpochGroup(dateColumn,'5m', [fillmode])* | Same as $__timeGroup but for times stored as unix timestamp (only available in Grafana 5.3+).
*$__unixEpochGroupAlias(dateColumn,'5m', [fillmode])* | Same as above but also adds a column alias (only available in Grafana 5.3+).
We plan to add many more macros. If you have suggestions for what macros you would like to see, please [open an issue](https://github.com/grafana/grafana) in our GitHub repo.

View File

@ -31,6 +31,7 @@ Name | Description
*User* | Database user's login/username
*Password* | Database user's password
*SSL Mode* | This option determines whether or with what priority a secure SSL TCP/IP connection will be negotiated with the server.
*TimescaleDB* | With this option enabled Grafana will use TimescaleDB features, e.g. use ```time_bucket``` for grouping by time (only available in Grafana 5.3+).
### Database User Permissions (Important!)
@ -68,6 +69,8 @@ Macro example | Description
*$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn >= 1494410783 AND dateColumn <= 1494497183*
*$__unixEpochFrom()* | Will be replaced by the start of the currently active time selection as unix timestamp. For example, *1494410783*
*$__unixEpochTo()* | Will be replaced by the end of the currently active time selection as unix timestamp. For example, *1494497183*
*$__unixEpochGroup(dateColumn,'5m', [fillmode])* | Same as $__timeGroup but for times stored as unix timestamp (only available in Grafana 5.3+).
*$__unixEpochGroupAlias(dateColumn,'5m', [fillmode])* | Same as above but also adds a column alias (only available in Grafana 5.3+).
We plan to add many more macros. If you have suggestions for what macros you would like to see, please [open an issue](https://github.com/grafana/grafana) in our GitHub repo.
@ -289,4 +292,5 @@ datasources:
password: "Password!"
jsonData:
sslmode: "disable" # disable/require/verify-ca/verify-full
timescaledb: false
```

View File

@ -78,9 +78,9 @@ For details of *metric names*, *label names* and *label values* are please refer
#### Using interval and range variables
> Support for `$__range` and `$__range_ms` only available from Grafana v5.3
> Support for `$__range`, `$__range_s` and `$__range_ms` only available from Grafana v5.3
It's possible to use some global built-in variables in query variables; `$__interval`, `$__interval_ms`, `$__range` and `$__range_ms`, see [Global built-in variables](/reference/templating/#global-built-in-variables) for more information. These can be convenient to use in conjunction with the `query_result` function when you need to filter variable queries since
It's possible to use some global built-in variables in query variables; `$__interval`, `$__interval_ms`, `$__range`, `$__range_s` and `$__range_ms`, see [Global built-in variables](/reference/templating/#global-built-in-variables) for more information. These can be convenient to use in conjunction with the `query_result` function when you need to filter variable queries since
`label_values` function doesn't support queries.
Make sure to set the variable's `refresh` trigger to be `On Time Range Change` to get the correct instances when changing the time range on the dashboard.
@ -94,10 +94,10 @@ Query: query_result(topk(5, sum(rate(http_requests_total[$__range])) by (instanc
Regex: /"([^"]+)"/
```
Populate a variable with the instances having a certain state over the time range shown in the dashboard:
Populate a variable with the instances having a certain state over the time range shown in the dashboard, using the more precise `$__range_s`:
```
Query: query_result(max_over_time(<metric>[$__range]) != <state>)
Query: query_result(max_over_time(<metric>[${__range_s}s]) != <state>)
Regex:
```

View File

@ -54,7 +54,7 @@ We utilize a unit abstraction so that Grafana looks great on all screens both sm
> Note: With MaxDataPoint functionality, Grafana can show you the perfect amount of datapoints no matter your resolution or time-range.
Utilize the [Repeating Row functionality](/reference/templating/#utilizing-template-variables-with-repeating-panels-and-repeating-rows) to dynamically create or remove entire Rows (that can be filled with Panels), based on the Template variables selected.
Utilize the [Repeating Rows functionality](/reference/templating/#repeating-rows) to dynamically create or remove entire Rows (that can be filled with Panels), based on the Template variables selected.
Rows can be collapsed by clicking on the Row Title. If you save a Dashboard with a Row collapsed, it will save in that state and will not preload those graphs until the row is expanded.
@ -72,7 +72,7 @@ Panels like the [Graph](/reference/graph/) panel allow you to graph as many metr
Panels can be made more dynamic by utilizing [Dashboard Templating](/reference/templating/) variable strings within the panel configuration (including queries to your Data Source configured via the Query Editor).
Utilize the [Repeating Panel](/reference/templating/#utilizing-template-variables-with-repeating-panels-and-repeating-rows) functionality to dynamically create or remove Panels based on the [Templating Variables](/reference/templating/#utilizing-template-variables-with-repeating-panels-and-repeating-rows) selected.
Utilize the [Repeating Panel](/reference/templating/#repeating-panels) functionality to dynamically create or remove Panels based on the [Templating Variables](/reference/templating/#repeating-panels) selected.
The time range on Panels is normally what is set in the [Dashboard time picker](/reference/timerange/) but this can be overridden by utilizes [Panel specific time overrides](/reference/timerange/#panel-time-overrides-timeshift).

View File

@ -59,7 +59,6 @@ Content-Type: application/json
"panelId": 1,
"name": "fire place sensor",
"state": "alerting",
"message": "Someone is trying to break in through the fire place",
"newStateDate": "2018-05-14T05:55:20+02:00",
"evalDate": "0001-01-01T00:00:00Z",
"evalData": null,

View File

@ -85,7 +85,7 @@ Status Codes:
- **403** Access denied
- **412** Precondition failed
The **412** status code is used for explaing that you cannot create the dashboard and why.
The **412** status code is used for explaining that you cannot create the dashboard and why.
There can be different reasons for this:
- The dashboard has been changed by someone else, `status=version-mismatch`

View File

@ -223,7 +223,7 @@ Status Codes:
- **404** Folder not found
- **412** Precondition failed
The **412** status code is used for explaing that you cannot update the folder and why.
The **412** status code is used for explaining that you cannot update the folder and why.
There can be different reasons for this:
- The folder has been changed by someone else, `status=version-mismatch`

View File

@ -363,6 +363,39 @@ Content-Type: application/json
]
```
## Teams that the actual User is member of
`GET /api/user/teams`
Return a list of all teams that the current user is member of.
**Example Request**:
```http
GET /api/user/teams HTTP/1.1
Accept: application/json
Content-Type: application/json
Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
```
**Example Response**:
```http
HTTP/1.1 200
Content-Type: application/json
[
{
"id": 1,
"orgId": 1,
"name": "MyTestTeam",
"email": "",
"avatarUrl": "\/avatar\/3f49c15916554246daa714b9bd0ee398",
"memberCount": 1
}
]
```
## Star a dashboard
`POST /api/user/stars/dashboard/:dashboardId`

View File

@ -430,6 +430,108 @@ allowed_organizations = github google
<hr>
## [auth.gitlab]
> Only available in Grafana v5.3+.
You need to [create a GitLab OAuth
application](https://docs.gitlab.com/ce/integration/oauth_provider.html).
Choose a descriptive *Name*, and use the following *Redirect URI*:
```
https://grafana.example.com/login/gitlab
```
where `https://grafana.example.com` is the URL you use to connect to Grafana.
Adjust it as needed if you don't use HTTPS or if you use a different port; for
instance, if you access Grafana at `http://203.0.113.31:3000`, you should use
```
http://203.0.113.31:3000/login/gitlab
```
Finally, select *api* as the *Scope* and submit the form. Note that if you're
not going to use GitLab groups for authorization (i.e. not setting
`allowed_groups`, see below), you can select *read_user* instead of *api* as
the *Scope*, thus giving a more restricted access to your GitLab API.
You'll get an *Application Id* and a *Secret* in return; we'll call them
`GITLAB_APPLICATION_ID` and `GITLAB_SECRET` respectively for the rest of this
section.
Add the following to your Grafana configuration file to enable GitLab
authentication:
```ini
[auth.gitlab]
enabled = false
allow_sign_up = false
client_id = GITLAB_APPLICATION_ID
client_secret = GITLAB_SECRET
scopes = api
auth_url = https://gitlab.com/oauth/authorize
token_url = https://gitlab.com/oauth/token
api_url = https://gitlab.com/api/v4
allowed_groups =
```
Restart the Grafana backend for your changes to take effect.
If you use your own instance of GitLab instead of `gitlab.com`, adjust
`auth_url`, `token_url` and `api_url` accordingly by replacing the `gitlab.com`
hostname with your own.
With `allow_sign_up` set to `false`, only existing users will be able to login
using their GitLab account, but with `allow_sign_up` set to `true`, *any* user
who can authenticate on GitLab will be able to login on your Grafana instance;
if you use the public `gitlab.com`, it means anyone in the world would be able
to login on your Grafana instance.
You can can however limit access to only members of a given group or list of
groups by setting the `allowed_groups` option.
### allowed_groups
To limit access to authenticated users that are members of one or more [GitLab
groups](https://docs.gitlab.com/ce/user/group/index.html), set `allowed_groups`
to a comma- or space-separated list of groups. For instance, if you want to
only give access to members of the `example` group, set
```ini
allowed_groups = example
```
If you want to also give access to members of the subgroup `bar`, which is in
the group `foo`, set
```ini
allowed_groups = example, foo/bar
```
Note that in GitLab, the group or subgroup name doesn't always match its
display name, especially if the display name contains spaces or special
characters. Make sure you always use the group or subgroup name as it appears
in the URL of the group or subgroup.
Here's a complete example with `alloed_sign_up` enabled, and access limited to
the `example` and `foo/bar` groups:
```ini
[auth.gitlab]
enabled = false
allow_sign_up = true
client_id = GITLAB_APPLICATION_ID
client_secret = GITLAB_SECRET
scopes = api
auth_url = https://gitlab.com/oauth/authorize
token_url = https://gitlab.com/oauth/token
api_url = https://gitlab.com/api/v4
allowed_groups = example, foo/bar
```
<hr>
## [auth.google]
First, you need to create a Google OAuth Client:

View File

@ -38,6 +38,8 @@ The back-end web server has a number of configuration options. Go to the
[Configuration]({{< relref "configuration.md" >}}) page for details on all
those options.
> For any changes to `conf/grafana.ini` (or corresponding environment variables) to take effect you need to restart Grafana by restarting the Docker container.
## Running a Specific Version of Grafana
```bash
@ -49,10 +51,13 @@ $ docker run \
grafana/grafana:5.1.0
```
## Running of the master branch
## Running the master branch
For every successful commit we publish a Grafana container to [`grafana/grafana`](https://hub.docker.com/r/grafana/grafana/tags/) and [`grafana/grafana-dev`](https://hub.docker.com/r/grafana/grafana-dev/tags/). In `grafana/grafana` container we will always overwrite the `master` tag with the latest version. In `grafana/grafana-dev` we will include
the git commit in the tag. If you run Grafana master in production we **strongly** recommend that you use the later since different machines might run different version of grafana if they pull the master tag at different times.
For every successful build of the master branch we update the `grafana/grafana:master` tag and create a new tag `grafana/grafana-dev:master-<commit hash>` with the hash of the git commit that was built. This means you can always get the latest version of Grafana.
When running Grafana master in production we **strongly** recommend that you use the `grafana/grafana-dev:master-<commit hash>` tag as that will guarantee that you use a specific version of Grafana instead of whatever was the most recent commit at the time.
For a list of available tags, check out [grafana/grafana](https://hub.docker.com/r/grafana/grafana/tags/) and [grafana/grafana-dev](https://hub.docker.com/r/grafana/grafana-dev/tags/).
## Installing Plugins for Grafana

View File

@ -40,6 +40,9 @@ start_tls = false
ssl_skip_verify = false
# set to the path to your root CA certificate or leave unset to use system defaults
# root_ca_cert = "/path/to/certificate.crt"
# Authentication against LDAP servers requiring client certificates
# client_cert = "/path/to/client.crt"
# client_key = "/path/to/client.key"
# Search user bind dn
bind_dn = "cn=admin,dc=grafana,dc=org"

View File

@ -57,7 +57,7 @@ For this you need nodejs (v.6+).
```bash
npm install -g yarn
yarn install --pure-lockfile
npm run watch
yarn watch
```
## Running Grafana Locally
@ -83,21 +83,18 @@ go get github.com/Unknwon/bra
bra run
```
You'll also need to run `npm run watch` to watch for changes to the front-end (typescript, html, sass)
You'll also need to run `yarn watch` to watch for changes to the front-end (typescript, html, sass)
### Running tests
- You can run backend Golang tests using "go test ./pkg/...".
- Execute all frontend tests with "npm run test"
- You can run backend Golang tests using `go test ./pkg/...`.
- Execute all frontend tests with `yarn test`
Writing & watching frontend tests (we have two test runners)
Writing & watching frontend tests
- Start watcher: `yarn jest`
- Jest will run all test files that end with the name ".test.ts"
- jest for all new tests that do not require browser context (React+more)
- Start watcher: `npm run jest`
- Jest will run all test files that end with the name ".jest.ts"
- karma + mocha is used for testing angularjs components. We do want to migrate these test to jest over time (if possible).
- Start watcher: `npm run karma`
- Karma+Mocha runs all files that end with the name "_specs.ts".
## Creating optimized release packages

View File

@ -277,31 +277,45 @@ This variable is only available in the Singlestat panel and can be used in the p
> Only available in Grafana v5.3+
Currently only supported for Prometheus data sources. This variable represents the range for the current dashboard. It is calculated by `to - from`. It has a millisecond representation called `$__range_ms`.
Currently only supported for Prometheus data sources. This variable represents the range for the current dashboard. It is calculated by `to - from`. It has a millisecond and a second representation called `$__range_ms` and `$__range_s`.
## Repeating Panels
Template variables can be very useful to dynamically change your queries across a whole dashboard. If you want
Grafana to dynamically create new panels or rows based on what values you have selected you can use the *Repeat* feature.
If you have a variable with `Multi-value` or `Include all value` options enabled you can choose one panel or one row and have Grafana repeat that row
for every selected value. You find this option under the General tab in panel edit mode. Select the variable to repeat by, and a `min span`.
The `min span` controls how small Grafana will make the panels (if you have many values selected). Grafana will automatically adjust the width of
each repeated panel so that the whole row is filled. Currently, you cannot mix other panels on a row with a repeated panel.
If you have a variable with `Multi-value` or `Include all value` options enabled you can choose one panel and have Grafana repeat that panel
for every selected value. You find the *Repeat* feature under the *General tab* in panel edit mode.
The `direction` controls how the panels will be arranged.
By choosing `horizontal` the panels will be arranged side-by-side. Grafana will automatically adjust the width
of each repeated panel so that the whole row is filled. Currently, you cannot mix other panels on a row with a repeated
panel. Each panel will never be smaller that the provided `Min width` if you have many selected values.
By choosing `vertical` the panels will be arranged from top to bottom in a column. The `Min width` doesn't have any effect in this case. The width of the repeated panels will be the same as of the first panel (the original template) being repeated.
Only make changes to the first panel (the original template). To have the changes take effect on all panels you need to trigger a dynamic dashboard re-build.
You can do this by either changing the variable value (that is the basis for the repeat) or reload the dashboard.
## Repeating Rows
This option requires you to open the row options view. Hover over the row left side to trigger the row menu, in this menu click `Row Options`. This
opens the row options view. Here you find a *Repeat* dropdown where you can select the variable to repeat by.
As seen above with the *Panels* you can also repeat *Rows* if you have variables set with `Multi-value` or
`Include all value` selection option.
### URL state
To enable this feature you need to first add a new *Row* using the *Add Panel* menu. Then by hovering the row title and
clicking on the cog button, you will access the `Row Options` configuration panel. You can then select the variable
you want to repeat the row for.
It may be a good idea to use a variable in the row title as well.
Example: [Repeated Rows Dashboard](http://play.grafana.org/dashboard/db/repeated-rows)
## URL state
Variable values are always synced to the URL using the syntax `var-<varname>=value`.
### Examples
## Examples
- [Graphite Templated Dashboard](http://play.grafana.org/dashboard/db/graphite-templated-nested)
- [Elasticsearch Templated Dashboard](http://play.grafana.org/dashboard/db/elasticsearch-templated)

View File

@ -13,7 +13,7 @@ module.exports = {
"roots": [
"<rootDir>/public"
],
"testRegex": "(\\.|/)(jest)\\.(jsx?|tsx?)$",
"testRegex": "(\\.|/)(test)\\.(jsx?|tsx?)$",
"moduleFileExtensions": [
"ts",
"tsx",

View File

@ -1,40 +0,0 @@
var webpack = require('webpack');
var path = require('path');
var webpackTestConfig = require('./scripts/webpack/webpack.test.js');
module.exports = function(config) {
'use strict';
config.set({
frameworks: ['mocha', 'expect', 'sinon'],
// list of files / patterns to load in the browser
files: [
{ pattern: 'public/test/index.ts', watched: false }
],
preprocessors: {
'public/test/index.ts': ['webpack', 'sourcemap'],
},
webpack: webpackTestConfig,
webpackMiddleware: {
stats: 'minimal',
},
// list of files to exclude
exclude: [],
reporters: ['dots'],
port: 9876,
colors: true,
logLevel: config.LOG_INFO,
autoWatch: true,
browsers: ['PhantomJS'],
captureTimeout: 20000,
singleRun: true,
// autoWatchBatchDelay: 1000,
// browserNoActivityTimeout: 60000,
});
};

View File

@ -32,7 +32,6 @@
"es6-shim": "^0.35.3",
"expect.js": "~0.2.0",
"expose-loader": "^0.7.3",
"extract-text-webpack-plugin": "^4.0.0-beta.0",
"file-loader": "^1.1.11",
"fork-ts-checker-webpack-plugin": "^0.4.2",
"gaze": "^1.1.2",
@ -45,10 +44,7 @@
"grunt-contrib-concat": "^1.0.1",
"grunt-contrib-copy": "~1.0.0",
"grunt-contrib-cssmin": "~1.0.2",
"grunt-contrib-jshint": "~1.1.0",
"grunt-exec": "^1.0.1",
"grunt-jscs": "3.0.1",
"grunt-karma": "~2.0.0",
"grunt-notify": "^0.4.5",
"grunt-postcss": "^0.8.0",
"grunt-sass": "^2.0.0",
@ -60,22 +56,13 @@
"html-webpack-plugin": "^3.2.0",
"husky": "^0.14.3",
"jest": "^22.0.4",
"jshint-stylish": "~2.2.1",
"karma": "1.7.0",
"karma-chrome-launcher": "~2.2.0",
"karma-expect": "~1.1.3",
"karma-mocha": "~1.3.0",
"karma-phantomjs-launcher": "1.0.4",
"karma-sinon": "^1.0.5",
"karma-sourcemap-loader": "^0.3.7",
"karma-webpack": "^3.0.0",
"lint-staged": "^6.0.0",
"load-grunt-tasks": "3.5.2",
"mini-css-extract-plugin": "^0.4.0",
"mobx-react-devtools": "^4.2.15",
"mocha": "^4.0.1",
"ng-annotate-loader": "^0.6.1",
"ng-annotate-webpack-plugin": "^0.2.1-pre",
"ng-annotate-webpack-plugin": "^0.3.0",
"ngtemplate-loader": "^2.0.1",
"npm": "^5.4.2",
"optimize-css-assets-webpack-plugin": "^4.0.2",
@ -115,7 +102,6 @@
"test": "grunt test",
"test:coverage": "grunt test --coverage=true",
"lint": "tslint -c tslint.json --project tsconfig.json --type-check",
"karma": "grunt karma:dev",
"jest": "jest --notify --watch",
"api-tests": "jest --notify --watch --config=tests/api/jest.js",
"precommit": "lint-staged && grunt precommit"

View File

@ -1,7 +1,5 @@
# Grafana Docker image
[![CircleCI](https://circleci.com/gh/grafana/grafana-docker.svg?style=svg)](https://circleci.com/gh/grafana/grafana-docker)
## Running your Grafana container
Start your container binding the external port `3000`.

View File

@ -15,10 +15,10 @@ fi
echo "pushing ${_docker_repo}:${_grafana_version}"
docker push "${_docker_repo}:${_grafana_version}"
if echo "$_grafana_tag" | grep -q "^v"; then
if echo "$_grafana_tag" | grep -q "^v" && echo "$_grafana_tag" | grep -vq "beta"; then
echo "pushing ${_docker_repo}:latest"
docker push "${_docker_repo}:latest"
else
elif echo "$_grafana_tag" | grep -q "master"; then
echo "pushing grafana/grafana:master"
docker push grafana/grafana:master
fi

View File

@ -67,7 +67,13 @@ if [ ! -z "${GF_INSTALL_PLUGINS}" ]; then
IFS=','
for plugin in ${GF_INSTALL_PLUGINS}; do
IFS=$OLDIFS
if [[ $plugin =~ .*\;.* ]]; then
pluginUrl=$(echo "$plugin" | cut -d';' -f 1)
pluginWithoutUrl=$(echo "$plugin" | cut -d';' -f 2)
grafana-cli --pluginUrl "${pluginUrl}" --pluginsDir "${GF_PATHS_PLUGINS}" plugins install ${pluginWithoutUrl}
else
grafana-cli --pluginsDir "${GF_PATHS_PLUGINS}" plugins install ${plugin}
fi
done
fi

View File

@ -120,6 +120,7 @@ func (hs *HTTPServer) registerRoutes() {
userRoute.Put("/", bind(m.UpdateUserCommand{}), Wrap(UpdateSignedInUser))
userRoute.Post("/using/:id", Wrap(UserSetUsingOrg))
userRoute.Get("/orgs", Wrap(GetSignedInUserOrgList))
userRoute.Get("/teams", Wrap(GetSignedInUserTeamList))
userRoute.Post("/stars/dashboard/:id", Wrap(StarDashboard))
userRoute.Delete("/stars/dashboard/:id", Wrap(UnstarDashboard))

View File

@ -203,6 +203,7 @@ func (proxy *DataSourceProxy) getDirector() func(req *http.Request) {
req.Header.Del("X-Forwarded-Host")
req.Header.Del("X-Forwarded-Port")
req.Header.Del("X-Forwarded-Proto")
req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion))
// set X-Forwarded-For header
if req.RemoteAddr != "" {
@ -319,9 +320,15 @@ func (proxy *DataSourceProxy) applyRoute(req *http.Request) {
SecureJsonData: proxy.ds.SecureJsonData.Decrypt(),
}
routeURL, err := url.Parse(proxy.route.Url)
interpolatedURL, err := interpolateString(proxy.route.Url, data)
if err != nil {
logger.Error("Error parsing plugin route url")
logger.Error("Error interpolating proxy url", "error", err)
return
}
routeURL, err := url.Parse(interpolatedURL)
if err != nil {
logger.Error("Error parsing plugin route url", "error", err)
return
}

View File

@ -49,6 +49,13 @@ func TestDSRouteRule(t *testing.T) {
{Name: "x-header", Content: "my secret {{.SecureJsonData.key}}"},
},
},
{
Path: "api/common",
Url: "{{.JsonData.dynamicUrl}}",
Headers: []plugins.AppPluginRouteHeader{
{Name: "x-header", Content: "my secret {{.SecureJsonData.key}}"},
},
},
},
}
@ -58,6 +65,7 @@ func TestDSRouteRule(t *testing.T) {
ds := &m.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"clientId": "asd",
"dynamicUrl": "https://dynamic.grafana.com",
}),
SecureJsonData: map[string][]byte{
"key": key,
@ -83,6 +91,17 @@ func TestDSRouteRule(t *testing.T) {
})
})
Convey("When matching route path and has dynamic url", func() {
proxy := NewDataSourceProxy(ds, plugin, ctx, "api/common/some/method")
proxy.route = plugin.Routes[3]
proxy.applyRoute(req)
Convey("should add headers and interpolate the url", func() {
So(req.URL.String(), ShouldEqual, "https://dynamic.grafana.com/some/method")
So(req.Header.Get("x-header"), ShouldEqual, "my secret 123")
})
})
Convey("Validating request", func() {
Convey("plugin route with valid role", func() {
proxy := NewDataSourceProxy(ds, plugin, ctx, "api/v4/some/method")
@ -212,20 +231,21 @@ func TestDSRouteRule(t *testing.T) {
})
Convey("When proxying graphite", func() {
setting.BuildVersion = "5.3.0"
plugin := &plugins.DataSourcePlugin{}
ds := &m.DataSource{Url: "htttp://graphite:8080", Type: m.DS_GRAPHITE}
ctx := &m.ReqContext{}
proxy := NewDataSourceProxy(ds, plugin, ctx, "/render")
req, err := http.NewRequest(http.MethodGet, "http://grafana.com/sub", nil)
So(err, ShouldBeNil)
requestURL, _ := url.Parse("http://grafana.com/sub")
req := http.Request{URL: requestURL}
proxy.getDirector()(&req)
proxy.getDirector()(req)
Convey("Can translate request url and path", func() {
So(req.URL.Host, ShouldEqual, "graphite:8080")
So(req.URL.Path, ShouldEqual, "/render")
So(req.Header.Get("User-Agent"), ShouldEqual, "Grafana/5.3.0")
})
})
@ -243,10 +263,10 @@ func TestDSRouteRule(t *testing.T) {
ctx := &m.ReqContext{}
proxy := NewDataSourceProxy(ds, plugin, ctx, "")
requestURL, _ := url.Parse("http://grafana.com/sub")
req := http.Request{URL: requestURL}
req, err := http.NewRequest(http.MethodGet, "http://grafana.com/sub", nil)
So(err, ShouldBeNil)
proxy.getDirector()(&req)
proxy.getDirector()(req)
Convey("Should add db to url", func() {
So(req.URL.Path, ShouldEqual, "/db/site/")

View File

@ -111,6 +111,21 @@ func GetSignedInUserOrgList(c *m.ReqContext) Response {
return getUserOrgList(c.UserId)
}
// GET /api/user/teams
func GetSignedInUserTeamList(c *m.ReqContext) Response {
query := m.GetTeamsByUserQuery{OrgId: c.OrgId, UserId: c.UserId}
if err := bus.Dispatch(&query); err != nil {
return Error(500, "Failed to get user teams", err)
}
for _, team := range query.Result {
team.AvatarUrl = dtos.GetGravatarUrlWithDefault(team.Email, team.Name)
}
return JSON(200, query.Result)
}
// GET /api/user/:id/orgs
func GetUserOrgList(c *m.ReqContext) Response {
return getUserOrgList(c.ParamsInt64(":id"))

View File

@ -59,6 +59,13 @@ func (a *ldapAuther) Dial() error {
}
}
}
var clientCert tls.Certificate
if a.server.ClientCert != "" && a.server.ClientKey != "" {
clientCert, err = tls.LoadX509KeyPair(a.server.ClientCert, a.server.ClientKey)
if err != nil {
return err
}
}
for _, host := range strings.Split(a.server.Host, " ") {
address := fmt.Sprintf("%s:%d", host, a.server.Port)
if a.server.UseSSL {
@ -67,6 +74,9 @@ func (a *ldapAuther) Dial() error {
ServerName: host,
RootCAs: certPool,
}
if len(clientCert.Certificate) > 0 {
tlsCfg.Certificates = append(tlsCfg.Certificates, clientCert)
}
if a.server.StartTLS {
a.conn, err = ldap.Dial("tcp", address)
if err == nil {

View File

@ -21,6 +21,8 @@ type LdapServerConf struct {
StartTLS bool `toml:"start_tls"`
SkipVerifySSL bool `toml:"ssl_skip_verify"`
RootCACert string `toml:"root_ca_cert"`
ClientCert string `toml:"client_cert"`
ClientKey string `toml:"client_key"`
BindDN string `toml:"bind_dn"`
BindPassword string `toml:"bind_password"`
Attr LdapAttributeMap `toml:"attributes"`

View File

@ -8,4 +8,5 @@ const (
TWITTER
GENERIC
GRAFANA_COM
GITLAB
)

View File

@ -3,7 +3,6 @@ package alerting
import (
"errors"
"fmt"
"time"
"golang.org/x/sync/errgroup"
@ -81,7 +80,7 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) {
renderOpts := rendering.Opts{
Width: 1000,
Height: 500,
Timeout: time.Second * 30,
Timeout: alertTimeout / 2,
OrgId: context.Rule.OrgId,
OrgRole: m.ROLE_ADMIN,
}

View File

@ -58,7 +58,7 @@ func init() {
data-placement="right">
</input>
<info-popover mode="right-absolute">
Provide a bot token to use the Slack file.upload API (starts with "xoxb")
Provide a bot token to use the Slack file.upload API (starts with "xoxb"). Specify #channel-name or @username in Recipient for this to work
</info-popover>
</div>
`,

View File

@ -83,7 +83,7 @@ func (cr *configReader) parseDatasourceConfig(path string, file os.FileInfo) (*D
}
func validateDefaultUniqueness(datasources []*DatasourcesAsConfig) error {
defaultCount := 0
defaultCount := map[int64]int{}
for i := range datasources {
if datasources[i].Datasources == nil {
continue
@ -95,8 +95,8 @@ func validateDefaultUniqueness(datasources []*DatasourcesAsConfig) error {
}
if ds.IsDefault {
defaultCount++
if defaultCount > 1 {
defaultCount[ds.OrgId] = defaultCount[ds.OrgId] + 1
if defaultCount[ds.OrgId] > 1 {
return ErrInvalidConfigToManyDefault
}
}

View File

@ -19,6 +19,7 @@ var (
allProperties = "testdata/all-properties"
versionZero = "testdata/version-0"
brokenYaml = "testdata/broken-yaml"
multipleOrgsWithDefault = "testdata/multiple-org-default"
fakeRepo *fakeRepository
)
@ -73,6 +74,19 @@ func TestDatasourceAsConfig(t *testing.T) {
})
})
Convey("Multiple datasources in different organizations with isDefault in each organization", func() {
dc := newDatasourceProvisioner(logger)
err := dc.applyChanges(multipleOrgsWithDefault)
Convey("should not raise error", func() {
So(err, ShouldBeNil)
So(len(fakeRepo.inserted), ShouldEqual, 4)
So(fakeRepo.inserted[0].IsDefault, ShouldBeTrue)
So(fakeRepo.inserted[0].OrgId, ShouldEqual, 1)
So(fakeRepo.inserted[2].IsDefault, ShouldBeTrue)
So(fakeRepo.inserted[2].OrgId, ShouldEqual, 2)
})
})
Convey("Two configured datasource and purge others ", func() {
Convey("two other datasources in database", func() {
fakeRepo.loadAll = []*models.DataSource{

View File

@ -11,7 +11,7 @@ import (
)
var (
ErrInvalidConfigToManyDefault = errors.New("datasource.yaml config is invalid. Only one datasource can be marked as default")
ErrInvalidConfigToManyDefault = errors.New("datasource.yaml config is invalid. Only one datasource per organization can be marked as default")
)
func Provision(configDirectory string) error {

View File

@ -0,0 +1,25 @@
apiVersion: 1
datasources:
- orgId: 1
name: prometheus
type: prometheus
isDefault: True
access: proxy
url: http://prometheus.example.com:9090
- name: Graphite
type: graphite
access: proxy
url: http://localhost:8080
- orgId: 2
name: prometheus
type: prometheus
isDefault: True
access: proxy
url: http://prometheus.example.com:9090
- orgId: 2
name: Graphite
type: graphite
access: proxy
url: http://localhost:8080

132
pkg/social/gitlab_oauth.go Normal file
View File

@ -0,0 +1,132 @@
package social
import (
"encoding/json"
"fmt"
"net/http"
"regexp"
"github.com/grafana/grafana/pkg/models"
"golang.org/x/oauth2"
)
type SocialGitlab struct {
*SocialBase
allowedDomains []string
allowedGroups []string
apiUrl string
allowSignup bool
}
var (
ErrMissingGroupMembership = &Error{"User not a member of one of the required groups"}
)
func (s *SocialGitlab) Type() int {
return int(models.GITLAB)
}
func (s *SocialGitlab) IsEmailAllowed(email string) bool {
return isEmailAllowed(email, s.allowedDomains)
}
func (s *SocialGitlab) IsSignupAllowed() bool {
return s.allowSignup
}
func (s *SocialGitlab) IsGroupMember(client *http.Client) bool {
if len(s.allowedGroups) == 0 {
return true
}
for groups, url := s.GetGroups(client, s.apiUrl+"/groups"); groups != nil; groups, url = s.GetGroups(client, url) {
for _, allowedGroup := range s.allowedGroups {
for _, group := range groups {
if group == allowedGroup {
return true
}
}
}
}
return false
}
func (s *SocialGitlab) GetGroups(client *http.Client, url string) ([]string, string) {
type Group struct {
FullPath string `json:"full_path"`
}
var (
groups []Group
next string
)
if url == "" {
return nil, next
}
response, err := HttpGet(client, url)
if err != nil {
s.log.Error("Error getting groups from GitLab API", "err", err)
return nil, next
}
if err := json.Unmarshal(response.Body, &groups); err != nil {
s.log.Error("Error parsing JSON from GitLab API", "err", err)
return nil, next
}
fullPaths := make([]string, len(groups))
for i, group := range groups {
fullPaths[i] = group.FullPath
}
if link, ok := response.Headers["Link"]; ok {
pattern := regexp.MustCompile(`<([^>]+)>; rel="next"`)
if matches := pattern.FindStringSubmatch(link[0]); matches != nil {
next = matches[1]
}
}
return fullPaths, next
}
func (s *SocialGitlab) UserInfo(client *http.Client, token *oauth2.Token) (*BasicUserInfo, error) {
var data struct {
Id int
Username string
Email string
Name string
State string
}
response, err := HttpGet(client, s.apiUrl+"/user")
if err != nil {
return nil, fmt.Errorf("Error getting user info: %s", err)
}
err = json.Unmarshal(response.Body, &data)
if err != nil {
return nil, fmt.Errorf("Error getting user info: %s", err)
}
if data.State != "active" {
return nil, fmt.Errorf("User %s is inactive", data.Username)
}
userInfo := &BasicUserInfo{
Id: fmt.Sprintf("%d", data.Id),
Name: data.Name,
Login: data.Username,
Email: data.Email,
}
if !s.IsGroupMember(client) {
return nil, ErrMissingGroupMembership
}
return userInfo, nil
}

View File

@ -55,7 +55,7 @@ func NewOAuthService() {
setting.OAuthService = &setting.OAuther{}
setting.OAuthService.OAuthInfos = make(map[string]*setting.OAuthInfo)
allOauthes := []string{"github", "google", "generic_oauth", "grafananet", "grafana_com"}
allOauthes := []string{"github", "gitlab", "google", "generic_oauth", "grafananet", "grafana_com"}
for _, name := range allOauthes {
sec := setting.Raw.Section("auth." + name)
@ -115,6 +115,20 @@ func NewOAuthService() {
}
}
// GitLab.
if name == "gitlab" {
SocialMap["gitlab"] = &SocialGitlab{
SocialBase: &SocialBase{
Config: &config,
log: logger,
},
allowedDomains: info.AllowedDomains,
apiUrl: info.ApiUrl,
allowSignup: info.AllowSignup,
allowedGroups: util.SplitString(sec.Key("allowed_groups").String()),
}
}
// Google.
if name == "google" {
SocialMap["google"] = &SocialGoogle{

View File

@ -6,8 +6,6 @@ import (
"strings"
"time"
"strconv"
"github.com/grafana/grafana/pkg/tsdb"
)
@ -97,20 +95,9 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
return "", fmt.Errorf("error parsing interval %v", args[1])
}
if len(args) == 3 {
m.query.Model.Set("fill", true)
m.query.Model.Set("fillInterval", interval.Seconds())
switch args[2] {
case "NULL":
m.query.Model.Set("fillMode", "null")
case "previous":
m.query.Model.Set("fillMode", "previous")
default:
m.query.Model.Set("fillMode", "value")
floatVal, err := strconv.ParseFloat(args[2], 64)
err := tsdb.SetupFillmode(m.query, interval, args[2])
if err != nil {
return "", fmt.Errorf("error parsing fill value %v", args[2])
}
m.query.Model.Set("fillValue", floatVal)
return "", err
}
}
return fmt.Sprintf("FLOOR(DATEDIFF(second, '1970-01-01', %s)/%.0f)*%.0f", args[0], interval.Seconds(), interval.Seconds()), nil
@ -129,6 +116,27 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil
case "__unixEpochTo":
return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil
case "__unixEpochGroup":
if len(args) < 2 {
return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)
}
interval, err := time.ParseDuration(strings.Trim(args[1], `'`))
if err != nil {
return "", fmt.Errorf("error parsing interval %v", args[1])
}
if len(args) == 3 {
err := tsdb.SetupFillmode(m.query, interval, args[2])
if err != nil {
return "", err
}
}
return fmt.Sprintf("FLOOR(%s/%v)*%v", args[0], interval.Seconds(), interval.Seconds()), nil
case "__unixEpochGroupAlias":
tg, err := m.evaluateMacro("__unixEpochGroup", args)
if err == nil {
return tg + " AS [time]", err
}
return "", err
default:
return "", fmt.Errorf("Unknown macro %v", name)
}

View File

@ -145,6 +145,18 @@ func TestMacroEngine(t *testing.T) {
So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
})
Convey("interpolate __unixEpochGroup function", func() {
sql, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroup(time_column,'5m')")
So(err, ShouldBeNil)
sql2, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroupAlias(time_column,'5m')")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "SELECT FLOOR(time_column/300)*300")
So(sql2, ShouldEqual, sql+" AS [time]")
})
})
Convey("Given a time range between 1960-02-01 07:00 and 1965-02-03 08:00", func() {

View File

@ -3,7 +3,6 @@ package mysql
import (
"fmt"
"regexp"
"strconv"
"strings"
"time"
@ -92,20 +91,9 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
return "", fmt.Errorf("error parsing interval %v", args[1])
}
if len(args) == 3 {
m.query.Model.Set("fill", true)
m.query.Model.Set("fillInterval", interval.Seconds())
switch args[2] {
case "NULL":
m.query.Model.Set("fillMode", "null")
case "previous":
m.query.Model.Set("fillMode", "previous")
default:
m.query.Model.Set("fillMode", "value")
floatVal, err := strconv.ParseFloat(args[2], 64)
err := tsdb.SetupFillmode(m.query, interval, args[2])
if err != nil {
return "", fmt.Errorf("error parsing fill value %v", args[2])
}
m.query.Model.Set("fillValue", floatVal)
return "", err
}
}
return fmt.Sprintf("UNIX_TIMESTAMP(%s) DIV %.0f * %.0f", args[0], interval.Seconds(), interval.Seconds()), nil
@ -124,6 +112,27 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil
case "__unixEpochTo":
return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil
case "__unixEpochGroup":
if len(args) < 2 {
return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)
}
interval, err := time.ParseDuration(strings.Trim(args[1], `'`))
if err != nil {
return "", fmt.Errorf("error parsing interval %v", args[1])
}
if len(args) == 3 {
err := tsdb.SetupFillmode(m.query, interval, args[2])
if err != nil {
return "", err
}
}
return fmt.Sprintf("%s DIV %v * %v", args[0], interval.Seconds(), interval.Seconds()), nil
case "__unixEpochGroupAlias":
tg, err := m.evaluateMacro("__unixEpochGroup", args)
if err == nil {
return tg + " AS \"time\"", err
}
return "", err
default:
return "", fmt.Errorf("Unknown macro %v", name)
}

View File

@ -97,6 +97,18 @@ func TestMacroEngine(t *testing.T) {
So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
})
Convey("interpolate __unixEpochGroup function", func() {
sql, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroup(time_column,'5m')")
So(err, ShouldBeNil)
sql2, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroupAlias(time_column,'5m')")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "SELECT time_column DIV 300 * 300")
So(sql2, ShouldEqual, sql+" AS \"time\"")
})
})
Convey("Given a time range between 1960-02-01 07:00 and 1965-02-03 08:00", func() {

View File

@ -3,7 +3,6 @@ package postgres
import (
"fmt"
"regexp"
"strconv"
"strings"
"time"
@ -17,10 +16,11 @@ const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)`
type postgresMacroEngine struct {
timeRange *tsdb.TimeRange
query *tsdb.Query
timescaledb bool
}
func newPostgresMacroEngine() tsdb.SqlMacroEngine {
return &postgresMacroEngine{}
func newPostgresMacroEngine(timescaledb bool) tsdb.SqlMacroEngine {
return &postgresMacroEngine{timescaledb: timescaledb}
}
func (m *postgresMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) {
@ -114,23 +114,17 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string,
return "", fmt.Errorf("error parsing interval %v", args[1])
}
if len(args) == 3 {
m.query.Model.Set("fill", true)
m.query.Model.Set("fillInterval", interval.Seconds())
switch args[2] {
case "NULL":
m.query.Model.Set("fillMode", "null")
case "previous":
m.query.Model.Set("fillMode", "previous")
default:
m.query.Model.Set("fillMode", "value")
floatVal, err := strconv.ParseFloat(args[2], 64)
err := tsdb.SetupFillmode(m.query, interval, args[2])
if err != nil {
return "", fmt.Errorf("error parsing fill value %v", args[2])
}
m.query.Model.Set("fillValue", floatVal)
return "", err
}
}
if m.timescaledb {
return fmt.Sprintf("time_bucket('%vs',%s)", interval.Seconds(), args[0]), nil
} else {
return fmt.Sprintf("floor(extract(epoch from %s)/%v)*%v", args[0], interval.Seconds(), interval.Seconds()), nil
}
case "__timeGroupAlias":
tg, err := m.evaluateMacro("__timeGroup", args)
if err == nil {
@ -146,6 +140,27 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string,
return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil
case "__unixEpochTo":
return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil
case "__unixEpochGroup":
if len(args) < 2 {
return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name)
}
interval, err := time.ParseDuration(strings.Trim(args[1], `'`))
if err != nil {
return "", fmt.Errorf("error parsing interval %v", args[1])
}
if len(args) == 3 {
err := tsdb.SetupFillmode(m.query, interval, args[2])
if err != nil {
return "", err
}
}
return fmt.Sprintf("floor(%s/%v)*%v", args[0], interval.Seconds(), interval.Seconds()), nil
case "__unixEpochGroupAlias":
tg, err := m.evaluateMacro("__unixEpochGroup", args)
if err == nil {
return tg + " AS \"time\"", err
}
return "", err
default:
return "", fmt.Errorf("Unknown macro %v", name)
}

View File

@ -12,7 +12,10 @@ import (
func TestMacroEngine(t *testing.T) {
Convey("MacroEngine", t, func() {
engine := newPostgresMacroEngine()
timescaledbEnabled := false
engine := newPostgresMacroEngine(timescaledbEnabled)
timescaledbEnabled = true
engineTS := newPostgresMacroEngine(timescaledbEnabled)
query := &tsdb.Query{}
Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() {
@ -83,6 +86,22 @@ func TestMacroEngine(t *testing.T) {
So(sql2, ShouldEqual, sql+" AS \"time\"")
})
Convey("interpolate __timeGroup function with TimescaleDB enabled", func() {
sql, err := engineTS.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "GROUP BY time_bucket('300s',time_column)")
})
Convey("interpolate __timeGroup function with spaces between args and TimescaleDB enabled", func() {
sql, err := engineTS.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "GROUP BY time_bucket('300s',time_column)")
})
Convey("interpolate __timeTo function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
So(err, ShouldBeNil)
@ -110,6 +129,18 @@ func TestMacroEngine(t *testing.T) {
So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix()))
})
Convey("interpolate __unixEpochGroup function", func() {
sql, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroup(time_column,'5m')")
So(err, ShouldBeNil)
sql2, err := engine.Interpolate(query, timeRange, "SELECT $__unixEpochGroupAlias(time_column,'5m')")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "SELECT floor(time_column/300)*300")
So(sql2, ShouldEqual, sql+" AS \"time\"")
})
})
Convey("Given a time range between 1960-02-01 07:00 and 1965-02-03 08:00", func() {

View File

@ -32,7 +32,9 @@ func newPostgresQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndp
log: logger,
}
return tsdb.NewSqlQueryEndpoint(&config, &rowTransformer, newPostgresMacroEngine(), logger)
timescaledb := datasource.JsonData.Get("timescaledb").MustBool(false)
return tsdb.NewSqlQueryEndpoint(&config, &rowTransformer, newPostgresMacroEngine(timescaledb), logger)
}
func generateConnectionString(datasource *models.DataSource) string {

View File

@ -27,7 +27,7 @@ import (
// use to verify that the generated data are vizualized as expected, see
// devenv/README.md for setup instructions.
func TestPostgres(t *testing.T) {
// change to true to run the MySQL tests
// change to true to run the PostgreSQL tests
runPostgresTests := false
// runPostgresTests := true

View File

@ -6,6 +6,7 @@ import (
"database/sql"
"fmt"
"math"
"strconv"
"strings"
"sync"
"time"
@ -568,3 +569,23 @@ func ConvertSqlValueColumnToFloat(columnName string, columnValue interface{}) (n
return value, nil
}
func SetupFillmode(query *Query, interval time.Duration, fillmode string) error {
query.Model.Set("fill", true)
query.Model.Set("fillInterval", interval.Seconds())
switch fillmode {
case "NULL":
query.Model.Set("fillMode", "null")
case "previous":
query.Model.Set("fillMode", "previous")
default:
query.Model.Set("fillMode", "value")
floatVal, err := strconv.ParseFloat(fillmode, 64)
if err != nil {
return fmt.Errorf("error parsing fill value %v", fillmode)
}
query.Model.Set("fillValue", floatVal)
}
return nil
}

View File

@ -207,6 +207,7 @@ export class Explore extends React.Component<any, IExploreState> {
datasourceError: null,
datasourceLoading: true,
graphResult: null,
latency: 0,
logsResult: null,
queryErrors: [],
queryHints: [],
@ -254,7 +255,10 @@ export class Explore extends React.Component<any, IExploreState> {
this.setState({
graphResult: null,
logsResult: null,
latency: 0,
queries: ensureQueries(),
queryErrors: [],
queryHints: [],
tableResult: null,
});
};
@ -276,8 +280,10 @@ export class Explore extends React.Component<any, IExploreState> {
onClickSplit = () => {
const { onChangeSplit } = this.props;
const state = { ...this.state };
state.queries = state.queries.map(({ edited, ...rest }) => rest);
if (onChangeSplit) {
onChangeSplit(true, this.state);
onChangeSplit(true, state);
}
};

View File

@ -331,7 +331,7 @@ class QueryField extends React.Component<TypeaheadFieldProps, TypeaheadFieldStat
}
break;
}
case 'Enter':
case 'Tab': {
if (this.menuEl) {
// Dont blur input

View File

@ -69,8 +69,9 @@ export class TeamMembers extends React.Component<Props, State> {
render() {
const { newTeamMember, isAdding } = this.state;
const members = this.props.team.members.values();
const members = this.props.team.filteredMembers;
const newTeamMemberValue = newTeamMember && newTeamMember.id.toString();
const { team } = this.props;
return (
<div>
@ -81,7 +82,7 @@ export class TeamMembers extends React.Component<Props, State> {
type="text"
className="gf-form-input"
placeholder="Search members"
value={''}
value={team.search}
onChange={this.onSearchQueryChange}
/>
<i className="gf-form-input-icon fa fa-search" />

View File

@ -25,6 +25,7 @@ export class HelpCtrl {
{ keys: ['d', 'k'], description: 'Toggle kiosk mode (hides top nav)' },
{ keys: ['d', 'E'], description: 'Expand all rows' },
{ keys: ['d', 'C'], description: 'Collapse all rows' },
{ keys: ['d', 'a'], description: 'Toggle auto fit panels (experimental feature)' },
{ keys: ['mod+o'], description: 'Toggle shared graph crosshair' },
],
'Focused Panel': [

View File

@ -15,14 +15,7 @@ export class KeybindingSrv {
timepickerOpen = false;
/** @ngInject */
constructor(
private $rootScope,
private $location,
private datasourceSrv,
private timeSrv,
private contextSrv,
private $route
) {
constructor(private $rootScope, private $location, private datasourceSrv, private timeSrv, private contextSrv) {
// clear out all shortcuts on route change
$rootScope.$on('$routeChangeSuccess', () => {
Mousetrap.reset();
@ -269,10 +262,8 @@ export class KeybindingSrv {
//Autofit panels
this.bind('d a', () => {
this.$location.search('autofitpanels', this.$location.search().autofitpanels ? null : true);
//Force reload
this.$route.reload();
// this has to be a full page reload
window.location.href = window.location.href + '&autofitpanels';
});
}
}

Some files were not shown because too many files have changed in this diff Show More