mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Merge branch 'master' into react-panels
This commit is contained in:
commit
1420147ccb
@ -88,7 +88,7 @@ jobs:
|
|||||||
|
|
||||||
test-frontend:
|
test-frontend:
|
||||||
docker:
|
docker:
|
||||||
- image: circleci/node:6.11.4
|
- image: circleci/node:8
|
||||||
steps:
|
steps:
|
||||||
- checkout
|
- checkout
|
||||||
- run:
|
- run:
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -33,6 +33,7 @@ public/css/*.min.css
|
|||||||
*.tmp
|
*.tmp
|
||||||
.DS_Store
|
.DS_Store
|
||||||
.vscode/
|
.vscode/
|
||||||
|
.vs/
|
||||||
|
|
||||||
/data/*
|
/data/*
|
||||||
/bin/*
|
/bin/*
|
||||||
|
34
CHANGELOG.md
34
CHANGELOG.md
@ -1,15 +1,36 @@
|
|||||||
# 5.3.0 (unreleased)
|
# 5.3.0 (unreleased)
|
||||||
|
|
||||||
|
* **Dataproxy**: Pass configured/auth headers to a Datasource [#10971](https://github.com/grafana/grafana/issues/10971), thx [@mrsiano](https://github.com/mrsiano)
|
||||||
* **Cleanup**: Make temp file time to live configurable [#11607](https://github.com/grafana/grafana/issues/11607), thx [@xapon](https://github.com/xapon)
|
* **Cleanup**: Make temp file time to live configurable [#11607](https://github.com/grafana/grafana/issues/11607), thx [@xapon](https://github.com/xapon)
|
||||||
|
|
||||||
|
### Minor
|
||||||
|
|
||||||
|
* **Api**: Delete nonexistent datasource should return 404 [#12313](https://github.com/grafana/grafana/issues/12313), thx [@AustinWinstanley](https://github.com/AustinWinstanley)
|
||||||
|
* **Dashboard**: Fix selecting current dashboard from search should not reload dashboard [#12248](https://github.com/grafana/grafana/issues/12248)
|
||||||
|
|
||||||
# 5.2.0 (unreleased)
|
# 5.2.0 (unreleased)
|
||||||
|
|
||||||
|
### Minor
|
||||||
|
|
||||||
|
* **Plugins**: Handle errors correctly when loading datasource plugin [#12383](https://github.com/grafana/grafana/pull/12383) thx [@rozetko](https://github.com/rozetko)
|
||||||
|
* **Render**: Enhance error message if phantomjs executable is not found [#11868](https://github.com/grafana/grafana/issues/11868)
|
||||||
|
* **Dashboard**: Set correct text in drop down when variable is present in url [#11968](https://github.com/grafana/grafana/issues/11968)
|
||||||
|
|
||||||
|
# 5.2.0-beta3 (2018-06-21)
|
||||||
|
|
||||||
|
### Minor
|
||||||
|
|
||||||
|
* **Build**: All rpm packages should be signed [#12359](https://github.com/grafana/grafana/issues/12359)
|
||||||
|
|
||||||
|
# 5.2.0-beta2 (2018-06-20)
|
||||||
|
|
||||||
### New Features
|
### New Features
|
||||||
|
|
||||||
* **Dashboard**: Import dashboard to folder [#10796](https://github.com/grafana/grafana/issues/10796)
|
* **Dashboard**: Import dashboard to folder [#10796](https://github.com/grafana/grafana/issues/10796)
|
||||||
|
|
||||||
### Minor
|
### Minor
|
||||||
|
|
||||||
|
* **Permissions**: Important security fix for API keys with viewer role [#12343](https://github.com/grafana/grafana/issues/12343)
|
||||||
* **Dashboard**: Fix so panel titles doesn't wrap [#11074](https://github.com/grafana/grafana/issues/11074)
|
* **Dashboard**: Fix so panel titles doesn't wrap [#11074](https://github.com/grafana/grafana/issues/11074)
|
||||||
* **Dashboard**: Prevent double-click when saving dashboard [#11963](https://github.com/grafana/grafana/issues/11963)
|
* **Dashboard**: Prevent double-click when saving dashboard [#11963](https://github.com/grafana/grafana/issues/11963)
|
||||||
* **Dashboard**: AutoFocus the add-panel search filter [#12189](https://github.com/grafana/grafana/pull/12189) thx [@ryantxu](https://github.com/ryantxu)
|
* **Dashboard**: AutoFocus the add-panel search filter [#12189](https://github.com/grafana/grafana/pull/12189) thx [@ryantxu](https://github.com/ryantxu)
|
||||||
@ -21,6 +42,15 @@
|
|||||||
* **Auth Proxy**: Whitelist proxy IP address instead of client IP address [#10707](https://github.com/grafana/grafana/issues/10707)
|
* **Auth Proxy**: Whitelist proxy IP address instead of client IP address [#10707](https://github.com/grafana/grafana/issues/10707)
|
||||||
* **User Management**: Make sure that a user always has a current org assigned [#11076](https://github.com/grafana/grafana/issues/11076)
|
* **User Management**: Make sure that a user always has a current org assigned [#11076](https://github.com/grafana/grafana/issues/11076)
|
||||||
* **Snapshots**: Fix: annotations not properly extracted leading to incorrect rendering of annotations [#12278](https://github.com/grafana/grafana/issues/12278)
|
* **Snapshots**: Fix: annotations not properly extracted leading to incorrect rendering of annotations [#12278](https://github.com/grafana/grafana/issues/12278)
|
||||||
|
* **LDAP**: Allow use of DN in group_search_filter_user_attribute and member_of [#3132](https://github.com/grafana/grafana/issues/3132), thx [@mmolnar](https://github.com/mmolnar)
|
||||||
|
* **Graph**: Fix legend decimals precision calculation [#11792](https://github.com/grafana/grafana/issues/11792)
|
||||||
|
* **Dashboard**: Make sure to process panels in collapsed rows when exporting dashboard [#12256](https://github.com/grafana/grafana/issues/12256)
|
||||||
|
|
||||||
|
### 5.2.0-beta1 fixes
|
||||||
|
|
||||||
|
* **Dashboard**: Dashboard link doesn't work when "As dropdown" option is checked [#12315](https://github.com/grafana/grafana/issues/12315)
|
||||||
|
* **Dashboard**: Fix regressions after save modal changes, including adhoc template issues [#12240](https://github.com/grafana/grafana/issues/12240)
|
||||||
|
* **Docker**: Config keys ending with _FILE are not respected [#170](https://github.com/grafana/grafana-docker/issues/170)
|
||||||
|
|
||||||
# 5.2.0-beta1 (2018-06-05)
|
# 5.2.0-beta1 (2018-06-05)
|
||||||
|
|
||||||
@ -62,6 +92,10 @@
|
|||||||
* **Dashboard list panel**: Search dashboards by folder [#11525](https://github.com/grafana/grafana/issues/11525)
|
* **Dashboard list panel**: Search dashboards by folder [#11525](https://github.com/grafana/grafana/issues/11525)
|
||||||
* **Sidenav**: Always show server admin link in sidenav if grafana admin [#11657](https://github.com/grafana/grafana/issues/11657)
|
* **Sidenav**: Always show server admin link in sidenav if grafana admin [#11657](https://github.com/grafana/grafana/issues/11657)
|
||||||
|
|
||||||
|
# 5.1.4 (2018-06-19)
|
||||||
|
|
||||||
|
* **Permissions**: Important security fix for API keys with viewer role [#12343](https://github.com/grafana/grafana/issues/12343)
|
||||||
|
|
||||||
# 5.1.3 (2018-05-16)
|
# 5.1.3 (2018-05-16)
|
||||||
|
|
||||||
* **Scroll**: Graph panel / legend texts shifts on the left each time we move scrollbar on firefox [#11830](https://github.com/grafana/grafana/issues/11830)
|
* **Scroll**: Graph panel / legend texts shifts on the left each time we move scrollbar on firefox [#11830](https://github.com/grafana/grafana/issues/11830)
|
||||||
|
17
ROADMAP.md
17
ROADMAP.md
@ -1,28 +1,21 @@
|
|||||||
# Roadmap (2018-05-06)
|
# Roadmap (2018-06-26)
|
||||||
|
|
||||||
This roadmap is a tentative plan for the core development team. Things change constantly as PRs come in and priorities change.
|
This roadmap is a tentative plan for the core development team. Things change constantly as PRs come in and priorities change.
|
||||||
But it will give you an idea of our current vision and plan.
|
But it will give you an idea of our current vision and plan.
|
||||||
|
|
||||||
### Short term (1-2 months)
|
### Short term (1-2 months)
|
||||||
|
|
||||||
- Elasticsearch alerting
|
|
||||||
- Crossplatform builds
|
|
||||||
- Backend service refactorings
|
|
||||||
- Explore UI
|
|
||||||
- First login registration view
|
|
||||||
|
|
||||||
### Mid term (2-4 months)
|
|
||||||
- Multi-Stat panel
|
- Multi-Stat panel
|
||||||
|
- Metrics & Log Explore UI
|
||||||
|
|
||||||
|
### Mid term (2-4 months)
|
||||||
- React Panels
|
- React Panels
|
||||||
|
- Change visualization (panel type) on the fly.
|
||||||
- Templating Query Editor UI Plugin hook
|
- Templating Query Editor UI Plugin hook
|
||||||
|
|
||||||
### Long term (4 - 8 months)
|
### Long term (4 - 8 months)
|
||||||
|
|
||||||
- Alerting improvements (silence, per series tracking, etc)
|
- Alerting improvements (silence, per series tracking, etc)
|
||||||
- Progress on React migration
|
- Progress on React migration
|
||||||
- Change visualization (panel type) on the fly.
|
|
||||||
- Multi stat panel (vertical version of singlestat with bars/graph mode with big number etc)
|
|
||||||
- Repeat panel by query results
|
|
||||||
|
|
||||||
### In a distant future far far away
|
### In a distant future far far away
|
||||||
|
|
||||||
|
@ -40,11 +40,14 @@ apiVersion: 1
|
|||||||
# graphiteVersion: "1.1"
|
# graphiteVersion: "1.1"
|
||||||
# tlsAuth: true
|
# tlsAuth: true
|
||||||
# tlsAuthWithCACert: true
|
# tlsAuthWithCACert: true
|
||||||
|
# httpHeaderName1: "Authorization"
|
||||||
# # <string> json object of data that will be encrypted.
|
# # <string> json object of data that will be encrypted.
|
||||||
# secureJsonData:
|
# secureJsonData:
|
||||||
# tlsCACert: "..."
|
# tlsCACert: "..."
|
||||||
# tlsClientCert: "..."
|
# tlsClientCert: "..."
|
||||||
# tlsClientKey: "..."
|
# tlsClientKey: "..."
|
||||||
|
# # <openshift\kubernetes token example>
|
||||||
|
# httpHeaderValue1: "Bearer xf5yhfkpsnmgo"
|
||||||
# version: 1
|
# version: 1
|
||||||
# # <bool> allow users to edit datasources from the UI.
|
# # <bool> allow users to edit datasources from the UI.
|
||||||
# editable: false
|
# editable: false
|
||||||
|
592
devenv/dashboards/dev-dashboards/dashboard_with_rows.json
Normal file
592
devenv/dashboards/dev-dashboards/dashboard_with_rows.json
Normal file
@ -0,0 +1,592 @@
|
|||||||
|
{
|
||||||
|
"annotations": {
|
||||||
|
"list": [
|
||||||
|
{
|
||||||
|
"builtIn": 1,
|
||||||
|
"datasource": "-- Grafana --",
|
||||||
|
"enable": true,
|
||||||
|
"hide": true,
|
||||||
|
"iconColor": "rgba(0, 211, 255, 1)",
|
||||||
|
"name": "Annotations & Alerts",
|
||||||
|
"type": "dashboard"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"editable": true,
|
||||||
|
"gnetId": null,
|
||||||
|
"graphTooltip": 0,
|
||||||
|
"id": 59,
|
||||||
|
"links": [],
|
||||||
|
"panels": [
|
||||||
|
{
|
||||||
|
"collapsed": false,
|
||||||
|
"gridPos": {
|
||||||
|
"h": 1,
|
||||||
|
"w": 24,
|
||||||
|
"x": 0,
|
||||||
|
"y": 0
|
||||||
|
},
|
||||||
|
"id": 9,
|
||||||
|
"panels": [],
|
||||||
|
"title": "Row title",
|
||||||
|
"type": "row"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"aliasColors": {},
|
||||||
|
"bars": false,
|
||||||
|
"dashLength": 10,
|
||||||
|
"dashes": false,
|
||||||
|
"datasource": "Prometheus",
|
||||||
|
"fill": 1,
|
||||||
|
"gridPos": {
|
||||||
|
"h": 4,
|
||||||
|
"w": 12,
|
||||||
|
"x": 0,
|
||||||
|
"y": 1
|
||||||
|
},
|
||||||
|
"id": 12,
|
||||||
|
"legend": {
|
||||||
|
"avg": false,
|
||||||
|
"current": false,
|
||||||
|
"max": false,
|
||||||
|
"min": false,
|
||||||
|
"show": true,
|
||||||
|
"total": false,
|
||||||
|
"values": false
|
||||||
|
},
|
||||||
|
"lines": true,
|
||||||
|
"linewidth": 1,
|
||||||
|
"nullPointMode": "null",
|
||||||
|
"percentage": false,
|
||||||
|
"pointradius": 5,
|
||||||
|
"points": false,
|
||||||
|
"renderer": "flot",
|
||||||
|
"seriesOverrides": [],
|
||||||
|
"spaceLength": 10,
|
||||||
|
"stack": false,
|
||||||
|
"steppedLine": false,
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"expr": "go_goroutines",
|
||||||
|
"format": "time_series",
|
||||||
|
"intervalFactor": 1,
|
||||||
|
"refId": "A"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"thresholds": [],
|
||||||
|
"timeFrom": null,
|
||||||
|
"timeShift": null,
|
||||||
|
"title": "Panel Title",
|
||||||
|
"tooltip": {
|
||||||
|
"shared": true,
|
||||||
|
"sort": 0,
|
||||||
|
"value_type": "individual"
|
||||||
|
},
|
||||||
|
"type": "graph",
|
||||||
|
"xaxis": {
|
||||||
|
"buckets": null,
|
||||||
|
"mode": "time",
|
||||||
|
"name": null,
|
||||||
|
"show": true,
|
||||||
|
"values": []
|
||||||
|
},
|
||||||
|
"yaxes": [
|
||||||
|
{
|
||||||
|
"format": "short",
|
||||||
|
"label": null,
|
||||||
|
"logBase": 1,
|
||||||
|
"max": null,
|
||||||
|
"min": null,
|
||||||
|
"show": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"format": "short",
|
||||||
|
"label": null,
|
||||||
|
"logBase": 1,
|
||||||
|
"max": null,
|
||||||
|
"min": null,
|
||||||
|
"show": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"yaxis": {
|
||||||
|
"align": false,
|
||||||
|
"alignLevel": null
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"aliasColors": {},
|
||||||
|
"bars": false,
|
||||||
|
"dashLength": 10,
|
||||||
|
"dashes": false,
|
||||||
|
"datasource": "Prometheus",
|
||||||
|
"fill": 1,
|
||||||
|
"gridPos": {
|
||||||
|
"h": 4,
|
||||||
|
"w": 12,
|
||||||
|
"x": 12,
|
||||||
|
"y": 1
|
||||||
|
},
|
||||||
|
"id": 5,
|
||||||
|
"legend": {
|
||||||
|
"avg": false,
|
||||||
|
"current": false,
|
||||||
|
"max": false,
|
||||||
|
"min": false,
|
||||||
|
"show": true,
|
||||||
|
"total": false,
|
||||||
|
"values": false
|
||||||
|
},
|
||||||
|
"lines": true,
|
||||||
|
"linewidth": 1,
|
||||||
|
"nullPointMode": "null",
|
||||||
|
"percentage": false,
|
||||||
|
"pointradius": 5,
|
||||||
|
"points": false,
|
||||||
|
"renderer": "flot",
|
||||||
|
"seriesOverrides": [],
|
||||||
|
"spaceLength": 10,
|
||||||
|
"stack": false,
|
||||||
|
"steppedLine": false,
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"expr": "go_goroutines",
|
||||||
|
"format": "time_series",
|
||||||
|
"intervalFactor": 1,
|
||||||
|
"refId": "A"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"thresholds": [],
|
||||||
|
"timeFrom": null,
|
||||||
|
"timeShift": null,
|
||||||
|
"title": "Panel Title",
|
||||||
|
"tooltip": {
|
||||||
|
"shared": true,
|
||||||
|
"sort": 0,
|
||||||
|
"value_type": "individual"
|
||||||
|
},
|
||||||
|
"type": "graph",
|
||||||
|
"xaxis": {
|
||||||
|
"buckets": null,
|
||||||
|
"mode": "time",
|
||||||
|
"name": null,
|
||||||
|
"show": true,
|
||||||
|
"values": []
|
||||||
|
},
|
||||||
|
"yaxes": [
|
||||||
|
{
|
||||||
|
"format": "short",
|
||||||
|
"label": null,
|
||||||
|
"logBase": 1,
|
||||||
|
"max": null,
|
||||||
|
"min": null,
|
||||||
|
"show": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"format": "short",
|
||||||
|
"label": null,
|
||||||
|
"logBase": 1,
|
||||||
|
"max": null,
|
||||||
|
"min": null,
|
||||||
|
"show": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"yaxis": {
|
||||||
|
"align": false,
|
||||||
|
"alignLevel": null
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"collapsed": false,
|
||||||
|
"gridPos": {
|
||||||
|
"h": 1,
|
||||||
|
"w": 24,
|
||||||
|
"x": 0,
|
||||||
|
"y": 5
|
||||||
|
},
|
||||||
|
"id": 7,
|
||||||
|
"panels": [],
|
||||||
|
"title": "Row",
|
||||||
|
"type": "row"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"aliasColors": {},
|
||||||
|
"bars": false,
|
||||||
|
"dashLength": 10,
|
||||||
|
"dashes": false,
|
||||||
|
"datasource": "Prometheus",
|
||||||
|
"fill": 1,
|
||||||
|
"gridPos": {
|
||||||
|
"h": 4,
|
||||||
|
"w": 12,
|
||||||
|
"x": 0,
|
||||||
|
"y": 6
|
||||||
|
},
|
||||||
|
"id": 2,
|
||||||
|
"legend": {
|
||||||
|
"avg": false,
|
||||||
|
"current": false,
|
||||||
|
"max": false,
|
||||||
|
"min": false,
|
||||||
|
"show": true,
|
||||||
|
"total": false,
|
||||||
|
"values": false
|
||||||
|
},
|
||||||
|
"lines": true,
|
||||||
|
"linewidth": 1,
|
||||||
|
"nullPointMode": "null",
|
||||||
|
"percentage": false,
|
||||||
|
"pointradius": 5,
|
||||||
|
"points": false,
|
||||||
|
"renderer": "flot",
|
||||||
|
"seriesOverrides": [],
|
||||||
|
"spaceLength": 10,
|
||||||
|
"stack": false,
|
||||||
|
"steppedLine": false,
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"expr": "go_goroutines",
|
||||||
|
"format": "time_series",
|
||||||
|
"intervalFactor": 1,
|
||||||
|
"refId": "A"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"thresholds": [],
|
||||||
|
"timeFrom": null,
|
||||||
|
"timeShift": null,
|
||||||
|
"title": "Panel Title",
|
||||||
|
"tooltip": {
|
||||||
|
"shared": true,
|
||||||
|
"sort": 0,
|
||||||
|
"value_type": "individual"
|
||||||
|
},
|
||||||
|
"type": "graph",
|
||||||
|
"xaxis": {
|
||||||
|
"buckets": null,
|
||||||
|
"mode": "time",
|
||||||
|
"name": null,
|
||||||
|
"show": true,
|
||||||
|
"values": []
|
||||||
|
},
|
||||||
|
"yaxes": [
|
||||||
|
{
|
||||||
|
"format": "short",
|
||||||
|
"label": null,
|
||||||
|
"logBase": 1,
|
||||||
|
"max": null,
|
||||||
|
"min": null,
|
||||||
|
"show": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"format": "short",
|
||||||
|
"label": null,
|
||||||
|
"logBase": 1,
|
||||||
|
"max": null,
|
||||||
|
"min": null,
|
||||||
|
"show": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"yaxis": {
|
||||||
|
"align": false,
|
||||||
|
"alignLevel": null
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"aliasColors": {},
|
||||||
|
"bars": false,
|
||||||
|
"dashLength": 10,
|
||||||
|
"dashes": false,
|
||||||
|
"datasource": "Prometheus",
|
||||||
|
"fill": 1,
|
||||||
|
"gridPos": {
|
||||||
|
"h": 4,
|
||||||
|
"w": 12,
|
||||||
|
"x": 12,
|
||||||
|
"y": 6
|
||||||
|
},
|
||||||
|
"id": 13,
|
||||||
|
"legend": {
|
||||||
|
"avg": false,
|
||||||
|
"current": false,
|
||||||
|
"max": false,
|
||||||
|
"min": false,
|
||||||
|
"show": true,
|
||||||
|
"total": false,
|
||||||
|
"values": false
|
||||||
|
},
|
||||||
|
"lines": true,
|
||||||
|
"linewidth": 1,
|
||||||
|
"nullPointMode": "null",
|
||||||
|
"percentage": false,
|
||||||
|
"pointradius": 5,
|
||||||
|
"points": false,
|
||||||
|
"renderer": "flot",
|
||||||
|
"seriesOverrides": [],
|
||||||
|
"spaceLength": 10,
|
||||||
|
"stack": false,
|
||||||
|
"steppedLine": false,
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"expr": "go_goroutines",
|
||||||
|
"format": "time_series",
|
||||||
|
"intervalFactor": 1,
|
||||||
|
"refId": "A"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"thresholds": [],
|
||||||
|
"timeFrom": null,
|
||||||
|
"timeShift": null,
|
||||||
|
"title": "Panel Title",
|
||||||
|
"tooltip": {
|
||||||
|
"shared": true,
|
||||||
|
"sort": 0,
|
||||||
|
"value_type": "individual"
|
||||||
|
},
|
||||||
|
"type": "graph",
|
||||||
|
"xaxis": {
|
||||||
|
"buckets": null,
|
||||||
|
"mode": "time",
|
||||||
|
"name": null,
|
||||||
|
"show": true,
|
||||||
|
"values": []
|
||||||
|
},
|
||||||
|
"yaxes": [
|
||||||
|
{
|
||||||
|
"format": "short",
|
||||||
|
"label": null,
|
||||||
|
"logBase": 1,
|
||||||
|
"max": null,
|
||||||
|
"min": null,
|
||||||
|
"show": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"format": "short",
|
||||||
|
"label": null,
|
||||||
|
"logBase": 1,
|
||||||
|
"max": null,
|
||||||
|
"min": null,
|
||||||
|
"show": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"yaxis": {
|
||||||
|
"align": false,
|
||||||
|
"alignLevel": null
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"collapsed": false,
|
||||||
|
"gridPos": {
|
||||||
|
"h": 1,
|
||||||
|
"w": 24,
|
||||||
|
"x": 0,
|
||||||
|
"y": 10
|
||||||
|
},
|
||||||
|
"id": 11,
|
||||||
|
"panels": [],
|
||||||
|
"title": "Row title",
|
||||||
|
"type": "row"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"aliasColors": {},
|
||||||
|
"bars": false,
|
||||||
|
"dashLength": 10,
|
||||||
|
"dashes": false,
|
||||||
|
"datasource": "Prometheus",
|
||||||
|
"fill": 1,
|
||||||
|
"gridPos": {
|
||||||
|
"h": 4,
|
||||||
|
"w": 12,
|
||||||
|
"x": 0,
|
||||||
|
"y": 11
|
||||||
|
},
|
||||||
|
"id": 4,
|
||||||
|
"legend": {
|
||||||
|
"avg": false,
|
||||||
|
"current": false,
|
||||||
|
"max": false,
|
||||||
|
"min": false,
|
||||||
|
"show": true,
|
||||||
|
"total": false,
|
||||||
|
"values": false
|
||||||
|
},
|
||||||
|
"lines": true,
|
||||||
|
"linewidth": 1,
|
||||||
|
"nullPointMode": "null",
|
||||||
|
"percentage": false,
|
||||||
|
"pointradius": 5,
|
||||||
|
"points": false,
|
||||||
|
"renderer": "flot",
|
||||||
|
"seriesOverrides": [],
|
||||||
|
"spaceLength": 10,
|
||||||
|
"stack": false,
|
||||||
|
"steppedLine": false,
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"expr": "go_goroutines",
|
||||||
|
"format": "time_series",
|
||||||
|
"intervalFactor": 1,
|
||||||
|
"refId": "A"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"thresholds": [],
|
||||||
|
"timeFrom": null,
|
||||||
|
"timeShift": null,
|
||||||
|
"title": "Panel Title",
|
||||||
|
"tooltip": {
|
||||||
|
"shared": true,
|
||||||
|
"sort": 0,
|
||||||
|
"value_type": "individual"
|
||||||
|
},
|
||||||
|
"type": "graph",
|
||||||
|
"xaxis": {
|
||||||
|
"buckets": null,
|
||||||
|
"mode": "time",
|
||||||
|
"name": null,
|
||||||
|
"show": true,
|
||||||
|
"values": []
|
||||||
|
},
|
||||||
|
"yaxes": [
|
||||||
|
{
|
||||||
|
"format": "short",
|
||||||
|
"label": null,
|
||||||
|
"logBase": 1,
|
||||||
|
"max": null,
|
||||||
|
"min": null,
|
||||||
|
"show": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"format": "short",
|
||||||
|
"label": null,
|
||||||
|
"logBase": 1,
|
||||||
|
"max": null,
|
||||||
|
"min": null,
|
||||||
|
"show": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"yaxis": {
|
||||||
|
"align": false,
|
||||||
|
"alignLevel": null
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"aliasColors": {},
|
||||||
|
"bars": false,
|
||||||
|
"dashLength": 10,
|
||||||
|
"dashes": false,
|
||||||
|
"datasource": "Prometheus",
|
||||||
|
"fill": 1,
|
||||||
|
"gridPos": {
|
||||||
|
"h": 4,
|
||||||
|
"w": 12,
|
||||||
|
"x": 12,
|
||||||
|
"y": 11
|
||||||
|
},
|
||||||
|
"id": 3,
|
||||||
|
"legend": {
|
||||||
|
"avg": false,
|
||||||
|
"current": false,
|
||||||
|
"max": false,
|
||||||
|
"min": false,
|
||||||
|
"show": true,
|
||||||
|
"total": false,
|
||||||
|
"values": false
|
||||||
|
},
|
||||||
|
"lines": true,
|
||||||
|
"linewidth": 1,
|
||||||
|
"nullPointMode": "null",
|
||||||
|
"percentage": false,
|
||||||
|
"pointradius": 5,
|
||||||
|
"points": false,
|
||||||
|
"renderer": "flot",
|
||||||
|
"seriesOverrides": [],
|
||||||
|
"spaceLength": 10,
|
||||||
|
"stack": false,
|
||||||
|
"steppedLine": false,
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"expr": "go_goroutines",
|
||||||
|
"format": "time_series",
|
||||||
|
"intervalFactor": 1,
|
||||||
|
"refId": "A"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"thresholds": [],
|
||||||
|
"timeFrom": null,
|
||||||
|
"timeShift": null,
|
||||||
|
"title": "Panel Title",
|
||||||
|
"tooltip": {
|
||||||
|
"shared": true,
|
||||||
|
"sort": 0,
|
||||||
|
"value_type": "individual"
|
||||||
|
},
|
||||||
|
"type": "graph",
|
||||||
|
"xaxis": {
|
||||||
|
"buckets": null,
|
||||||
|
"mode": "time",
|
||||||
|
"name": null,
|
||||||
|
"show": true,
|
||||||
|
"values": []
|
||||||
|
},
|
||||||
|
"yaxes": [
|
||||||
|
{
|
||||||
|
"format": "short",
|
||||||
|
"label": null,
|
||||||
|
"logBase": 1,
|
||||||
|
"max": null,
|
||||||
|
"min": null,
|
||||||
|
"show": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"format": "short",
|
||||||
|
"label": null,
|
||||||
|
"logBase": 1,
|
||||||
|
"max": null,
|
||||||
|
"min": null,
|
||||||
|
"show": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"yaxis": {
|
||||||
|
"align": false,
|
||||||
|
"alignLevel": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"schemaVersion": 16,
|
||||||
|
"style": "dark",
|
||||||
|
"tags": [],
|
||||||
|
"templating": {
|
||||||
|
"list": []
|
||||||
|
},
|
||||||
|
"time": {
|
||||||
|
"from": "now-30m",
|
||||||
|
"to": "now"
|
||||||
|
},
|
||||||
|
"timepicker": {
|
||||||
|
"refresh_intervals": [
|
||||||
|
"5s",
|
||||||
|
"10s",
|
||||||
|
"30s",
|
||||||
|
"1m",
|
||||||
|
"5m",
|
||||||
|
"15m",
|
||||||
|
"30m",
|
||||||
|
"1h",
|
||||||
|
"2h",
|
||||||
|
"1d"
|
||||||
|
],
|
||||||
|
"time_options": [
|
||||||
|
"5m",
|
||||||
|
"15m",
|
||||||
|
"1h",
|
||||||
|
"6h",
|
||||||
|
"12h",
|
||||||
|
"24h",
|
||||||
|
"2d",
|
||||||
|
"7d",
|
||||||
|
"30d"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"timezone": "",
|
||||||
|
"title": "Dashboard with rows",
|
||||||
|
"uid": "1DdOzBNmk",
|
||||||
|
"version": 5
|
||||||
|
}
|
9
devenv/dashboards/dev-dashboards/dev-dashboards.yaml
Normal file
9
devenv/dashboards/dev-dashboards/dev-dashboards.yaml
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
apiVersion: 1
|
||||||
|
|
||||||
|
providers:
|
||||||
|
- name: 'dev dashboards'
|
||||||
|
folder: 'dev dashboards'
|
||||||
|
type: file
|
||||||
|
options:
|
||||||
|
path: devenv/dashboards/dev-dashboards
|
||||||
|
|
@ -2,41 +2,43 @@
|
|||||||
|
|
||||||
bulkDashboard() {
|
bulkDashboard() {
|
||||||
|
|
||||||
requiresJsonnet
|
requiresJsonnet
|
||||||
|
|
||||||
COUNTER=0
|
COUNTER=0
|
||||||
MAX=400
|
MAX=400
|
||||||
while [ $COUNTER -lt $MAX ]; do
|
while [ $COUNTER -lt $MAX ]; do
|
||||||
jsonnet -o "dashboards/bulk-testing/dashboard${COUNTER}.json" -e "local bulkDash = import 'dashboards/bulk-testing/bulkdash.jsonnet'; bulkDash + { uid: 'uid-${COUNTER}', title: 'title-${COUNTER}' }"
|
jsonnet -o "dashboards/bulk-testing/dashboard${COUNTER}.json" -e "local bulkDash = import 'dashboards/bulk-testing/bulkdash.jsonnet'; bulkDash + { uid: 'uid-${COUNTER}', title: 'title-${COUNTER}' }"
|
||||||
let COUNTER=COUNTER+1
|
let COUNTER=COUNTER+1
|
||||||
done
|
done
|
||||||
|
|
||||||
ln -s -f -r ./dashboards/bulk-testing/bulk-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml
|
ln -s -f -r ./dashboards/bulk-testing/bulk-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml
|
||||||
}
|
}
|
||||||
|
|
||||||
requiresJsonnet() {
|
requiresJsonnet() {
|
||||||
if ! type "jsonnet" > /dev/null; then
|
if ! type "jsonnet" > /dev/null; then
|
||||||
echo "you need you install jsonnet to run this script"
|
echo "you need you install jsonnet to run this script"
|
||||||
echo "follow the instructions on https://github.com/google/jsonnet"
|
echo "follow the instructions on https://github.com/google/jsonnet"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
defaultDashboards() {
|
defaultDashboards() {
|
||||||
echo "not implemented yet"
|
requiresJsonnet
|
||||||
|
|
||||||
|
ln -s -f -r ./dashboards/dev-dashboards/dev-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml
|
||||||
}
|
}
|
||||||
|
|
||||||
defaultDatasources() {
|
defaultDatasources() {
|
||||||
echo "setting up all default datasources using provisioning"
|
echo "setting up all default datasources using provisioning"
|
||||||
|
|
||||||
ln -s -f -r ./datasources/default/default.yaml ../conf/provisioning/datasources/custom.yaml
|
ln -s -f -r ./datasources/default/default.yaml ../conf/provisioning/datasources/custom.yaml
|
||||||
}
|
}
|
||||||
|
|
||||||
usage() {
|
usage() {
|
||||||
echo -e "install.sh\n\tThis script installs my basic setup for a debian laptop\n"
|
echo -e "install.sh\n\tThis script installs my basic setup for a debian laptop\n"
|
||||||
echo "Usage:"
|
echo "Usage:"
|
||||||
echo " bulk-dashboards - create and provisioning 400 dashboards"
|
echo " bulk-dashboards - create and provisioning 400 dashboards"
|
||||||
echo " default-datasources - provisiong all core datasources"
|
echo " default-datasources - provisiong all core datasources"
|
||||||
}
|
}
|
||||||
|
|
||||||
main() {
|
main() {
|
||||||
@ -49,10 +51,10 @@ main() {
|
|||||||
|
|
||||||
if [[ $cmd == "bulk-dashboards" ]]; then
|
if [[ $cmd == "bulk-dashboards" ]]; then
|
||||||
bulkDashboard
|
bulkDashboard
|
||||||
elif [[ $cmd == "default-datasources" ]]; then
|
elif [[ $cmd == "default-datasources" ]]; then
|
||||||
defaultDatasources
|
defaultDatasources
|
||||||
elif [[ $cmd == "default-dashboards" ]]; then
|
elif [[ $cmd == "default-dashboards" ]]; then
|
||||||
bulkDashboard
|
defaultDashboards
|
||||||
else
|
else
|
||||||
usage
|
usage
|
||||||
fi
|
fi
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
# Fork of https://github.com/dinkel/docker-openldap
|
||||||
|
|
||||||
FROM debian:jessie
|
FROM debian:jessie
|
||||||
|
|
||||||
LABEL maintainer="Christian Luginbühl <dinke@pimprecords.com>"
|
LABEL maintainer="Christian Luginbühl <dinke@pimprecords.com>"
|
||||||
|
@ -11,3 +11,14 @@ After adding ldif files to `prepopulate`:
|
|||||||
1. Remove your current docker image: `docker rm docker_openldap_1`
|
1. Remove your current docker image: `docker rm docker_openldap_1`
|
||||||
2. Build: `docker-compose build`
|
2. Build: `docker-compose build`
|
||||||
3. `docker-compose up`
|
3. `docker-compose up`
|
||||||
|
|
||||||
|
## Enabling LDAP in Grafana
|
||||||
|
|
||||||
|
The default `ldap.toml` file in `conf` has host set to `127.0.0.1` and port to set to 389 so all you need to do is enable it in the .ini file to get Grafana to use this block:
|
||||||
|
|
||||||
|
```ini
|
||||||
|
[auth.ldap]
|
||||||
|
enabled = true
|
||||||
|
config_file = conf/ldap.toml
|
||||||
|
; allow_sign_up = true
|
||||||
|
```
|
||||||
|
@ -188,7 +188,7 @@ queries via the Dashboard menu / Annotations view.
|
|||||||
An example query:
|
An example query:
|
||||||
|
|
||||||
```SQL
|
```SQL
|
||||||
SELECT title, description from events WHERE $timeFilter order asc
|
SELECT title, description from events WHERE $timeFilter ORDER BY time ASC
|
||||||
```
|
```
|
||||||
|
|
||||||
For InfluxDB you need to enter a query like in the above example. You need to have the ```where $timeFilter```
|
For InfluxDB you need to enter a query like in the above example. You need to have the ```where $timeFilter```
|
||||||
|
@ -17,9 +17,11 @@ Grafana v5.2 brings new features, many enhancements and bug fixes. This article
|
|||||||
* [Elasticsearch alerting]({{< relref "#elasticsearch-alerting" >}}) it's finally here!
|
* [Elasticsearch alerting]({{< relref "#elasticsearch-alerting" >}}) it's finally here!
|
||||||
* [Cross platform build support]({{< relref "#cross-platform-build-support" >}}) enables native builds of Grafana for many more platforms!
|
* [Cross platform build support]({{< relref "#cross-platform-build-support" >}}) enables native builds of Grafana for many more platforms!
|
||||||
* [Improved Docker image]({{< relref "#improved-docker-image" >}}) with support for docker secrets
|
* [Improved Docker image]({{< relref "#improved-docker-image" >}}) with support for docker secrets
|
||||||
|
* [Security]({{< relref "#security" >}}) make your Grafana instance more secure
|
||||||
* [Prometheus]({{< relref "#prometheus" >}}) with alignment enhancements
|
* [Prometheus]({{< relref "#prometheus" >}}) with alignment enhancements
|
||||||
|
* [InfluxDB]({{< relref "#influxdb" >}}) with support for a new function
|
||||||
* [Alerting]({{< relref "#alerting" >}}) with alert notification channel type for Discord
|
* [Alerting]({{< relref "#alerting" >}}) with alert notification channel type for Discord
|
||||||
* [Dashboards & Panels]({{< relref "#dashboards-panels" >}})
|
* [Dashboards & Panels]({{< relref "#dashboards-panels" >}}) with save & import enhancements
|
||||||
|
|
||||||
## Elasticsearch alerting
|
## Elasticsearch alerting
|
||||||
|
|
||||||
@ -42,11 +44,24 @@ We've been longing for native ARM build support for a long time. With the help f
|
|||||||
The Grafana docker image now includes support for Docker secrets which enables you to supply Grafana with configuration through files. More
|
The Grafana docker image now includes support for Docker secrets which enables you to supply Grafana with configuration through files. More
|
||||||
information in the [Installing using Docker documentation](/installation/docker/#reading-secrets-from-files-support-for-docker-secrets).
|
information in the [Installing using Docker documentation](/installation/docker/#reading-secrets-from-files-support-for-docker-secrets).
|
||||||
|
|
||||||
|
## Security
|
||||||
|
|
||||||
|
{{< docs-imagebox img="/img/docs/v52/login_change_password.png" max-width="800px" class="docs-image--right" >}}
|
||||||
|
|
||||||
|
Starting from Grafana v5.2, when you login with the administrator account using the default password you'll be presented with a form to change the password.
|
||||||
|
By this we hope to encourage users to follow Grafana's best practices and change the default administrator password.
|
||||||
|
|
||||||
|
<div class="clearfix"></div>
|
||||||
|
|
||||||
## Prometheus
|
## Prometheus
|
||||||
|
|
||||||
The Prometheus datasource now aligns the start/end of the query sent to Prometheus with the step, which ensures PromQL expressions with *rate*
|
The Prometheus datasource now aligns the start/end of the query sent to Prometheus with the step, which ensures PromQL expressions with *rate*
|
||||||
functions get consistent results, and thus avoid graphs jumping around on reload.
|
functions get consistent results, and thus avoid graphs jumping around on reload.
|
||||||
|
|
||||||
|
## InfluxDB
|
||||||
|
|
||||||
|
The InfluxDB datasource now includes support for the *mode* function which allows to return the most frequent value in a list of field values.
|
||||||
|
|
||||||
## Alerting
|
## Alerting
|
||||||
|
|
||||||
By popular demand Grafana now includes support for an alert notification channel type for [Discord](https://discordapp.com/).
|
By popular demand Grafana now includes support for an alert notification channel type for [Discord](https://discordapp.com/).
|
||||||
@ -64,6 +79,20 @@ when you actually want to overwrite those settings.
|
|||||||
|
|
||||||
<div class="clearfix"></div>
|
<div class="clearfix"></div>
|
||||||
|
|
||||||
|
### Import dashboard enhancements
|
||||||
|
|
||||||
|
{{< docs-imagebox img="/img/docs/v52/dashboard_import.png" max-width="800px" class="docs-image--right" >}}
|
||||||
|
|
||||||
|
Grafana v5.2 adds support for specifying an existing folder or create a new one when importing a dashboard, a long awaited feature since
|
||||||
|
Grafana v5.0 introduced support for dashboard folders and permissions. The import dashboard page have also got some general improvements
|
||||||
|
and should now make it more clear if a possible import will overwrite an existing dashboard, or not.
|
||||||
|
|
||||||
|
This release also adds some improvements for those users only having editor or admin permissions in certain folders. Now the links to
|
||||||
|
*Create Dashboard* and *Import Dashboard* is available in side navigation, dashboard search and manage dashboards/folder page for a
|
||||||
|
user that has editor role in an organization or edit permission in at least one folder.
|
||||||
|
|
||||||
|
<div class="clearfix"></div>
|
||||||
|
|
||||||
## Changelog
|
## Changelog
|
||||||
|
|
||||||
Checkout the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list
|
Checkout the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list
|
||||||
|
@ -15,10 +15,9 @@ weight = 1
|
|||||||
|
|
||||||
Description | Download
|
Description | Download
|
||||||
------------ | -------------
|
------------ | -------------
|
||||||
Stable for Debian-based Linux | [grafana_5.1.3_amd64.deb](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_5.1.3_amd64.deb)
|
Stable for Debian-based Linux | [x86-64](https://grafana.com/grafana/download?platform=linux)
|
||||||
<!--
|
Stable for Debian-based Linux | [ARM64](https://grafana.com/grafana/download?platform=arm)
|
||||||
Beta for Debian-based Linux | [grafana_5.1.0-beta1_amd64.deb](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_5.1.0-beta1_amd64.deb)
|
Stable for Debian-based Linux | [ARMv7](https://grafana.com/grafana/download?platform=arm)
|
||||||
-->
|
|
||||||
|
|
||||||
Read [Upgrading Grafana]({{< relref "installation/upgrading.md" >}}) for tips and guidance on updating an existing
|
Read [Upgrading Grafana]({{< relref "installation/upgrading.md" >}}) for tips and guidance on updating an existing
|
||||||
installation.
|
installation.
|
||||||
@ -27,17 +26,18 @@ installation.
|
|||||||
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_5.1.3_amd64.deb
|
wget <debian package url>
|
||||||
sudo apt-get install -y adduser libfontconfig
|
sudo apt-get install -y adduser libfontconfig
|
||||||
sudo dpkg -i grafana_5.1.3_amd64.deb
|
sudo dpkg -i grafana_5.1.4_amd64.deb
|
||||||
```
|
```
|
||||||
|
|
||||||
<!-- ## Install Latest Beta
|
Example:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_5.1.0-beta1_amd64.deb
|
wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_5.1.4_amd64.deb
|
||||||
sudo apt-get install -y adduser libfontconfig
|
sudo apt-get install -y adduser libfontconfig
|
||||||
sudo dpkg -i grafana_5.1.0-beta1_amd64.deb
|
sudo dpkg -i grafana_5.1.4_amd64.deb
|
||||||
``` -->
|
```
|
||||||
|
|
||||||
## APT Repository
|
## APT Repository
|
||||||
|
|
||||||
|
@ -52,7 +52,7 @@ $ docker run \
|
|||||||
## Running of the master branch
|
## Running of the master branch
|
||||||
|
|
||||||
For every successful commit we publish a Grafana container to [`grafana/grafana`](https://hub.docker.com/r/grafana/grafana/tags/) and [`grafana/grafana-dev`](https://hub.docker.com/r/grafana/grafana-dev/tags/). In `grafana/grafana` container we will always overwrite the `master` tag with the latest version. In `grafana/grafana-dev` we will include
|
For every successful commit we publish a Grafana container to [`grafana/grafana`](https://hub.docker.com/r/grafana/grafana/tags/) and [`grafana/grafana-dev`](https://hub.docker.com/r/grafana/grafana-dev/tags/). In `grafana/grafana` container we will always overwrite the `master` tag with the latest version. In `grafana/grafana-dev` we will include
|
||||||
the git commit in the tag. If you run Grafana master in production we **strongly** recommend that you use the later since different machines might run different version of grafana if they pull the master tag at different times.
|
the git commit in the tag. If you run Grafana master in production we **strongly** recommend that you use the later since different machines might run different version of grafana if they pull the master tag at different times.
|
||||||
|
|
||||||
## Installing Plugins for Grafana
|
## Installing Plugins for Grafana
|
||||||
|
|
||||||
@ -137,16 +137,16 @@ docker run -d --user $ID --volume "$PWD/data:/var/lib/grafana" -p 3000:3000 graf
|
|||||||
|
|
||||||
## Reading secrets from files (support for Docker Secrets)
|
## Reading secrets from files (support for Docker Secrets)
|
||||||
|
|
||||||
> Available in v5.2.0 and later
|
> Only available in Grafana v5.2+.
|
||||||
|
|
||||||
It's possible to supply Grafana with configuration through files. This works well with [Docker Secrets](https://docs.docker.com/engine/swarm/secrets/) as the secrets by default gets mapped into `/run/secrets/<name of secret>` of the container.
|
It's possible to supply Grafana with configuration through files. This works well with [Docker Secrets](https://docs.docker.com/engine/swarm/secrets/) as the secrets by default gets mapped into `/run/secrets/<name of secret>` of the container.
|
||||||
|
|
||||||
You can do this with any of the configuration options in conf/grafana.ini by setting `GF_<SectionName>_<KeyName>_FILE` to the path of the file holding the secret.
|
You can do this with any of the configuration options in conf/grafana.ini by setting `GF_<SectionName>_<KeyName>__FILE` to the path of the file holding the secret.
|
||||||
|
|
||||||
Let's say you want to set the admin password this way.
|
Let's say you want to set the admin password this way.
|
||||||
|
|
||||||
- Admin password secret: `/run/secrets/admin_password`
|
- Admin password secret: `/run/secrets/admin_password`
|
||||||
- Environment variable: `GF_SECURITY_ADMIN_PASSWORD_FILE=/run/secrets/admin_password`
|
- Environment variable: `GF_SECURITY_ADMIN_PASSWORD__FILE=/run/secrets/admin_password`
|
||||||
|
|
||||||
|
|
||||||
## Migration from a previous version of the docker container to 5.1 or later
|
## Migration from a previous version of the docker container to 5.1 or later
|
||||||
@ -166,7 +166,7 @@ The docker container for Grafana has seen a major rewrite for 5.1.
|
|||||||
|
|
||||||
Previously `/var/lib/grafana`, `/etc/grafana` and `/var/log/grafana` were defined as volumes in the `Dockerfile`. This led to the creation of three volumes each time a new instance of the Grafana container started, whether you wanted it or not.
|
Previously `/var/lib/grafana`, `/etc/grafana` and `/var/log/grafana` were defined as volumes in the `Dockerfile`. This led to the creation of three volumes each time a new instance of the Grafana container started, whether you wanted it or not.
|
||||||
|
|
||||||
You should always be careful to define your own named volume for storage, but if you depended on these volumes you should be aware that an upgraded container will no longer have them.
|
You should always be careful to define your own named volume for storage, but if you depended on these volumes you should be aware that an upgraded container will no longer have them.
|
||||||
|
|
||||||
**Warning**: when migrating from an earlier version to 5.1 or later using docker compose and implicit volumes you need to use `docker inspect` to find out which volumes your container is mapped to so that you can map them to the upgraded container as well. You will also have to change file ownership (or user) as documented below.
|
**Warning**: when migrating from an earlier version to 5.1 or later using docker compose and implicit volumes you need to use `docker inspect` to find out which volumes your container is mapped to so that you can map them to the upgraded container as well. You will also have to change file ownership (or user) as documented below.
|
||||||
|
|
||||||
@ -201,7 +201,7 @@ services:
|
|||||||
|
|
||||||
#### Modifying permissions
|
#### Modifying permissions
|
||||||
|
|
||||||
The commands below will run bash inside the Grafana container with your volume mapped in. This makes it possible to modify the file ownership to match the new container. Always be careful when modifying permissions.
|
The commands below will run bash inside the Grafana container with your volume mapped in. This makes it possible to modify the file ownership to match the new container. Always be careful when modifying permissions.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ docker run -ti --user root --volume "<your volume mapping here>" --entrypoint bash grafana/grafana:5.1.0
|
$ docker run -ti --user root --volume "<your volume mapping here>" --entrypoint bash grafana/grafana:5.1.0
|
||||||
|
@ -11,6 +11,8 @@ weight = 4
|
|||||||
|
|
||||||
# Installing on Mac
|
# Installing on Mac
|
||||||
|
|
||||||
|
## Install using homebrew
|
||||||
|
|
||||||
Installation can be done using [homebrew](http://brew.sh/)
|
Installation can be done using [homebrew](http://brew.sh/)
|
||||||
|
|
||||||
Install latest stable:
|
Install latest stable:
|
||||||
@ -75,3 +77,18 @@ If you want to manually install a plugin place it here: `/usr/local/var/lib/graf
|
|||||||
|
|
||||||
The default sqlite database is located at `/usr/local/var/lib/grafana`
|
The default sqlite database is located at `/usr/local/var/lib/grafana`
|
||||||
|
|
||||||
|
## Installing from binary tar file
|
||||||
|
|
||||||
|
Download [the latest `.tar.gz` file](https://grafana.com/get) and
|
||||||
|
extract it. This will extract into a folder named after the version you
|
||||||
|
downloaded. This folder contains all files required to run Grafana. There are
|
||||||
|
no init scripts or install scripts in this package.
|
||||||
|
|
||||||
|
To configure Grafana add a configuration file named `custom.ini` to the
|
||||||
|
`conf` folder and override any of the settings defined in
|
||||||
|
`conf/defaults.ini`.
|
||||||
|
|
||||||
|
Start Grafana by executing `./bin/grafana-server web`. The `grafana-server`
|
||||||
|
binary needs the working directory to be the root install directory (where the
|
||||||
|
binary and the `public` folder is located).
|
||||||
|
|
||||||
|
@ -15,42 +15,49 @@ weight = 2
|
|||||||
|
|
||||||
Description | Download
|
Description | Download
|
||||||
------------ | -------------
|
------------ | -------------
|
||||||
Stable for CentOS / Fedora / OpenSuse / Redhat Linux | [5.1.3 (x86-64 rpm)](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.3-1.x86_64.rpm)
|
Stable for CentOS / Fedora / OpenSuse / Redhat Linux | [x86-64](https://grafana.com/grafana/download?platform=linux)
|
||||||
<!--
|
Stable for CentOS / Fedora / OpenSuse / Redhat Linux | [ARM64](https://grafana.com/grafana/download?platform=arm)
|
||||||
Latest Beta for CentOS / Fedora / OpenSuse / Redhat Linux | [5.1.0-beta1 (x86-64 rpm)](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.0-beta1.x86_64.rpm)
|
Stable for CentOS / Fedora / OpenSuse / Redhat Linux | [ARMv7](https://grafana.com/grafana/download?platform=arm)
|
||||||
-->
|
|
||||||
|
|
||||||
Read [Upgrading Grafana]({{< relref "installation/upgrading.md" >}}) for tips and guidance on updating an existing
|
Read [Upgrading Grafana]({{< relref "installation/upgrading.md" >}}) for tips and guidance on updating an existing installation.
|
||||||
installation.
|
|
||||||
|
|
||||||
## Install Stable
|
## Install Stable
|
||||||
|
|
||||||
You can install Grafana using Yum directly.
|
You can install Grafana using Yum directly.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ sudo yum install https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.3-1.x86_64.rpm
|
$ sudo yum install <rpm package url>
|
||||||
```
|
```
|
||||||
|
|
||||||
<!-- ## Install Beta
|
Example:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ sudo yum install https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.0-beta1.x86_64.rpm
|
$ sudo yum install https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.4-1.x86_64.rpm
|
||||||
``` -->
|
```
|
||||||
|
|
||||||
Or install manually using `rpm`.
|
Or install manually using `rpm`. First execute
|
||||||
|
|
||||||
#### On CentOS / Fedora / Redhat:
|
```bash
|
||||||
|
$ wget <rpm package url>
|
||||||
|
```
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.4-1.x86_64.rpm
|
||||||
|
```
|
||||||
|
|
||||||
|
### On CentOS / Fedora / Redhat:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.3-1.x86_64.rpm
|
|
||||||
$ sudo yum install initscripts fontconfig
|
$ sudo yum install initscripts fontconfig
|
||||||
$ sudo rpm -Uvh grafana-5.1.3-1.x86_64.rpm
|
$ sudo rpm -Uvh <local rpm package>
|
||||||
```
|
```
|
||||||
|
|
||||||
#### On OpenSuse:
|
### On OpenSuse:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ sudo rpm -i --nodeps grafana-5.1.3-1.x86_64.rpm
|
$ sudo rpm -i --nodeps <local rpm package>
|
||||||
```
|
```
|
||||||
|
|
||||||
## Install via YUM Repository
|
## Install via YUM Repository
|
||||||
|
@ -21,7 +21,7 @@ the data source response.
|
|||||||
|
|
||||||
To check this you should use Query Inspector (new in Grafana v4.5). The query Inspector shows query requests and responses.
|
To check this you should use Query Inspector (new in Grafana v4.5). The query Inspector shows query requests and responses.
|
||||||
|
|
||||||
For more on the query insector read [this guide here](https://community.grafana.com/t/using-grafanas-query-inspector-to-troubleshoot-issues/2630). For
|
For more on the query inspector read [this guide here](https://community.grafana.com/t/using-grafanas-query-inspector-to-troubleshoot-issues/2630). For
|
||||||
older versions of Grafana read the [how troubleshoot metric query issue](https://community.grafana.com/t/how-to-troubleshoot-metric-query-issues/50/2) article.
|
older versions of Grafana read the [how troubleshoot metric query issue](https://community.grafana.com/t/how-to-troubleshoot-metric-query-issues/50/2) article.
|
||||||
|
|
||||||
## Logging
|
## Logging
|
||||||
|
@ -12,11 +12,7 @@ weight = 3
|
|||||||
|
|
||||||
Description | Download
|
Description | Download
|
||||||
------------ | -------------
|
------------ | -------------
|
||||||
Latest stable package for Windows | [grafana-5.1.3.windows-x64.zip](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.3.windows-x64.zip)
|
Latest stable package for Windows | [x64](https://grafana.com/grafana/download?platform=windows)
|
||||||
|
|
||||||
<!--
|
|
||||||
Latest beta package for Windows | [grafana.5.1.0-beta1.windows-x64.zip](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.0.0-beta5.windows-x64.zip)
|
|
||||||
-->
|
|
||||||
|
|
||||||
Read [Upgrading Grafana]({{< relref "installation/upgrading.md" >}}) for tips and guidance on updating an existing
|
Read [Upgrading Grafana]({{< relref "installation/upgrading.md" >}}) for tips and guidance on updating an existing
|
||||||
installation.
|
installation.
|
||||||
|
99
docs/sources/plugins/developing/auth-for-datasources.md
Normal file
99
docs/sources/plugins/developing/auth-for-datasources.md
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
+++
|
||||||
|
title = "Authentication for Datasource Plugins"
|
||||||
|
type = "docs"
|
||||||
|
[menu.docs]
|
||||||
|
name = "Authentication for Datasource Plugins"
|
||||||
|
parent = "developing"
|
||||||
|
weight = 3
|
||||||
|
+++
|
||||||
|
|
||||||
|
# Authentication for Datasource Plugins
|
||||||
|
|
||||||
|
Grafana has a proxy feature that proxies all data requests through the Grafana backend. This is very useful when your datasource plugin calls an external/thirdy-party API. The Grafana proxy adds CORS headers and can authenticate against the external API. This means that a datasource plugin that proxies all requests via Grafana can enable token authentication and the token will be renewed automatically for the user when it expires.
|
||||||
|
|
||||||
|
The plugin config page should save the API key/password to be encrypted (using the `secureJsonData` feature) and then when a request from the datasource is made, the Grafana Proxy will:
|
||||||
|
|
||||||
|
1. decrypt the API key/password on the backend.
|
||||||
|
2. carry out authentication and generate an OAuth token that will be added as an `Authorization` HTTP header to all requests (or it will add a HTTP header with the API key).
|
||||||
|
3. renew the token if it expires.
|
||||||
|
|
||||||
|
This means that users that access the datasource config page cannot access the API key or password after is saved the first time and that no secret keys are sent in plain text through the browser where they can be spied on.
|
||||||
|
|
||||||
|
For backend authentication to work, the external/third-party API must either have an OAuth endpoint or that the API accepts an API key as a HTTP header for authentication.
|
||||||
|
|
||||||
|
## Plugin Routes
|
||||||
|
|
||||||
|
You can specify routes in the `plugin.json` file for your datasource plugin. [Here is an example](https://github.com/grafana/azure-monitor-datasource/blob/d74c82145c0a4af07a7e96cc8dde231bfd449bd9/src/plugin.json#L30-L95) with lots of routes (though most plugins will just have one route).
|
||||||
|
|
||||||
|
When you build your url to the third-party API in your datasource class, the url should start with the text specified in the path field for a route. The proxy will strip out the path text and replace it with the value in the url field.
|
||||||
|
|
||||||
|
For example, if my code makes a call to url `azuremonitor/foo/bar` with this code:
|
||||||
|
|
||||||
|
```js
|
||||||
|
this.backendSrv.datasourceRequest({
|
||||||
|
url: url,
|
||||||
|
method: 'GET',
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
and this route:
|
||||||
|
|
||||||
|
```json
|
||||||
|
"routes": [{
|
||||||
|
"path": "azuremonitor",
|
||||||
|
"method": "GET",
|
||||||
|
"url": "https://management.azure.com",
|
||||||
|
...
|
||||||
|
}]
|
||||||
|
```
|
||||||
|
|
||||||
|
then the Grafana proxy will transform it into "https://management.azure.com/foo/bar" and add CORS headers.
|
||||||
|
|
||||||
|
The `method` parameter is optional. It can be set to any HTTP verb to provide more fine-grained control.
|
||||||
|
|
||||||
|
## Encrypting Sensitive Data
|
||||||
|
|
||||||
|
When a user saves a password or secret with your datasource plugin's Config page, then you can save data to a column in the datasource table called `secureJsonData` that is an encrypted blob. Any data saved in the blob is encrypted by Grafana and can only be decrypted by the Grafana server on the backend. This means once a password is saved, no sensitive data is sent to the browser. If the password is saved in the `jsonData` blob or the `password` field then it is unencrypted and anyone with Admin access (with the help of Chrome Developer Tools) can read it.
|
||||||
|
|
||||||
|
This is an example of using the `secureJsonData` blob to save a property called `password`:
|
||||||
|
|
||||||
|
```html
|
||||||
|
<input type="password" class="gf-form-input" ng-model='ctrl.current.secureJsonData.password' placeholder="password"></input>
|
||||||
|
```
|
||||||
|
|
||||||
|
## API Key/HTTP Header Authentication
|
||||||
|
|
||||||
|
Some third-party API's accept a HTTP Header for authentication. The [example](https://github.com/grafana/azure-monitor-datasource/blob/d74c82145c0a4af07a7e96cc8dde231bfd449bd9/src/plugin.json#L91-L93) below has a `headers` section that defines the name of the HTTP Header that the API expects and it uses the `SecureJSONData` blob to fetch an encrypted API key. The Grafana server proxy will decrypt the key, add the `X-API-Key` header to the request and forward it to the third-party API.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"path": "appinsights",
|
||||||
|
"method": "GET",
|
||||||
|
"url": "https://api.applicationinsights.io",
|
||||||
|
"headers": [
|
||||||
|
{"name": "X-API-Key", "content": "{{.SecureJsonData.appInsightsApiKey}}"}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## How Token Authentication Works
|
||||||
|
|
||||||
|
The token auth section in the `plugin.json` file looks like this:
|
||||||
|
|
||||||
|
```json
|
||||||
|
"tokenAuth": {
|
||||||
|
"url": "https://login.microsoftonline.com/{{.JsonData.tenantId}}/oauth2/token",
|
||||||
|
"params": {
|
||||||
|
"grant_type": "client_credentials",
|
||||||
|
"client_id": "{{.JsonData.clientId}}",
|
||||||
|
"client_secret": "{{.SecureJsonData.clientSecret}}",
|
||||||
|
"resource": "https://management.azure.com/"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This interpolates in data from both `jsonData` and `secureJsonData` to generate the token request to the third-party API. It is common for tokens to have a short expiry period (30 minutes). The proxy in Grafana server will automatically renew the token if it has expired.
|
||||||
|
|
||||||
|
## Always Restart the Grafana Server After Route Changes
|
||||||
|
|
||||||
|
The plugin.json files are only loaded when the Grafana server starts so when a route is added or changed then the Grafana server has to be restarted for the changes to take effect.
|
175
docs/sources/plugins/developing/plugin-review-guidelines.md
Normal file
175
docs/sources/plugins/developing/plugin-review-guidelines.md
Normal file
@ -0,0 +1,175 @@
|
|||||||
|
+++
|
||||||
|
title = "Plugin Review Guidelines"
|
||||||
|
type = "docs"
|
||||||
|
[menu.docs]
|
||||||
|
name = "Plugin Review Guidelines"
|
||||||
|
parent = "developing"
|
||||||
|
weight = 2
|
||||||
|
+++
|
||||||
|
|
||||||
|
# Plugin Review Guidelines
|
||||||
|
|
||||||
|
The Grafana team reviews all plugins that are published on Grafana.com. There are two areas we review, the metadata for the plugin and the plugin functionality.
|
||||||
|
|
||||||
|
## Metadata
|
||||||
|
|
||||||
|
The plugin metadata consists of a `plugin.json` file and the README.md file. These `plugin.json` file is used by Grafana to load the plugin and the README.md file is shown in the plugins section of Grafana and the plugins section of Grafana.com.
|
||||||
|
|
||||||
|
### README.md
|
||||||
|
|
||||||
|
The README.md file is shown on the plugins page in Grafana and the plugin page on Grafana.com. There are some differences between the GitHub markdown and the markdown allowed in Grafana/Grafana.com:
|
||||||
|
|
||||||
|
- Cannot contain inline HTML.
|
||||||
|
- Any image links should be absolute links. For example: https://raw.githubusercontent.com/grafana/azure-monitor-datasource/master/dist/img/grafana_cloud_install.png
|
||||||
|
|
||||||
|
The README should:
|
||||||
|
|
||||||
|
- describe the purpose of the plugin.
|
||||||
|
- contain steps on how to get started.
|
||||||
|
|
||||||
|
### Plugin.json
|
||||||
|
|
||||||
|
The `plugin.json` file is the same concept as the `package.json` file for an npm package. When the Grafana server starts it will scan the plugin folders (all folders in the data/plugins subfolder) and load every folder that contains a `plugin.json` file unless the folder contains a subfolder named `dist`. In that case, the Grafana server will load the `dist` folder instead.
|
||||||
|
|
||||||
|
A minimal `plugin.json` file:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"type": "panel",
|
||||||
|
"name": "Clock",
|
||||||
|
"id": "yourorg-clock-panel",
|
||||||
|
|
||||||
|
"info": {
|
||||||
|
"description": "Clock panel for grafana",
|
||||||
|
"author": {
|
||||||
|
"name": "Author Name",
|
||||||
|
"url": "http://yourwebsite.com"
|
||||||
|
},
|
||||||
|
"keywords": ["clock", "panel"],
|
||||||
|
"version": "1.0.0",
|
||||||
|
"updated": "2018-03-24"
|
||||||
|
},
|
||||||
|
|
||||||
|
"dependencies": {
|
||||||
|
"grafanaVersion": "3.x.x",
|
||||||
|
"plugins": [ ]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- The convention for the plugin id is [github username/org]-[plugin name]-[datasource|app|panel] and it has to be unique. Although if org and plugin name are the same then [plugin name]-[datasource|app|panel] is also valid. The org **cannot** be `grafana` unless it is a plugin created by the Grafana core team.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
- raintank-worldping-app
|
||||||
|
- ryantxu-ajax-panel
|
||||||
|
- alexanderzobnin-zabbix-app
|
||||||
|
- hawkular-datasource
|
||||||
|
|
||||||
|
- The `type` field should be either `datasource` `app` or `panel`.
|
||||||
|
- The `version` field should be in the form: x.x.x e.g. `1.0.0` or `0.4.1`.
|
||||||
|
|
||||||
|
The full file format for the `plugin.json` file is described [here](http://docs.grafana.org/plugins/developing/plugin.json/).
|
||||||
|
|
||||||
|
## Plugin Language
|
||||||
|
|
||||||
|
JavaScript, TypeScript, ES6 (or any other language) are all fine as long as the contents of the `dist` subdirectory are transpiled to JavaScript (ES5).
|
||||||
|
|
||||||
|
## File and Directory Structure Conventions
|
||||||
|
|
||||||
|
Here is a typical directory structure for a plugin.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
johnnyb-awesome-datasource
|
||||||
|
|-- dist
|
||||||
|
|-- src
|
||||||
|
| |-- img
|
||||||
|
| | |-- logo.svg
|
||||||
|
| |-- partials
|
||||||
|
| | |-- annotations.editor.html
|
||||||
|
| | |-- config.html
|
||||||
|
| | |-- query.editor.html
|
||||||
|
| |-- datasource.js
|
||||||
|
| |-- module.js
|
||||||
|
| |-- plugin.json
|
||||||
|
| |-- query_ctrl.js
|
||||||
|
|-- Gruntfile.js
|
||||||
|
|-- LICENSE
|
||||||
|
|-- package.json
|
||||||
|
|-- README.md
|
||||||
|
```
|
||||||
|
|
||||||
|
Most JavaScript projects have a build step. The generated JavaScript should be placed in the `dist` directory and the source code in the `src` directory. We recommend that the plugin.json file be placed in the src directory and then copied over to the dist directory when building. The `README.md` can be placed in the root or in the dist directory.
|
||||||
|
|
||||||
|
Directories:
|
||||||
|
|
||||||
|
- `src/` contains plugin source files.
|
||||||
|
- `src/partials` contains html templates.
|
||||||
|
- `src/img` contains plugin logos and other images.
|
||||||
|
- `dist/` contains built content.
|
||||||
|
|
||||||
|
## HTML and CSS
|
||||||
|
|
||||||
|
For the HTML on editor tabs, we recommend using the inbuilt Grafana styles rather than defining your own. This makes plugins feel like a more natural part of Grafana. If done correctly, the html will also be responsive and adapt to smaller screens. The `gf-form` css classes should be used for labels and inputs.
|
||||||
|
|
||||||
|
Below is a minimal example of an editor row with one form group and two fields, a dropdown and a text input:
|
||||||
|
|
||||||
|
```html
|
||||||
|
<div class="editor-row">
|
||||||
|
<div class="section gf-form-group">
|
||||||
|
<h5 class="section-heading">My Plugin Options</h5>
|
||||||
|
<div class="gf-form">
|
||||||
|
<label class="gf-form-label width-10">Label1</label>
|
||||||
|
<div class="gf-form-select-wrapper max-width-10">
|
||||||
|
<select class="input-small gf-form-input" ng-model="ctrl.panel.mySelectProperty" ng-options="t for t in ['option1', 'option2', 'option3']" ng-change="ctrl.onSelectChange()"></select>
|
||||||
|
</div>
|
||||||
|
<div class="gf-form">
|
||||||
|
<label class="gf-form-label width-10">Label2</label>
|
||||||
|
<input type="text" class="input-small gf-form-input width-10" ng-model="ctrl.panel.myProperty" ng-change="ctrl.onFieldChange()" placeholder="suggestion for user" ng-model-onblur />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
```
|
||||||
|
|
||||||
|
Use the `width-x` and `max-width-x` classes to control the width of your labels and input fields. Try to get labels and input fields to line up neatly by having the same width for all the labels in a group and the same width for all inputs in a group if possible.
|
||||||
|
|
||||||
|
## Data Sources
|
||||||
|
|
||||||
|
A basic guide for data sources can be found [here](http://docs.grafana.org/plugins/developing/datasources/).
|
||||||
|
|
||||||
|
### Config Page Guidelines
|
||||||
|
|
||||||
|
- It should be as easy as possible for a user to configure a url. If the data source is using the `datasource-http-settings` component, it should use the `suggest-url` attribute to suggest the default url or a url that is similar to what it should be (especially important if the url refers to a REST endpoint that is not common knowledge for most users e.g. `https://yourserver:4000/api/custom-endpoint`).
|
||||||
|
|
||||||
|
```html
|
||||||
|
<datasource-http-settings
|
||||||
|
current="ctrl.current"
|
||||||
|
suggest-url="http://localhost:8080">
|
||||||
|
</datasource-http-settings>
|
||||||
|
```
|
||||||
|
|
||||||
|
- The `testDatasource` function should make a query to the data source that will also test that the authentication details are correct. This is so the data source is correctly configured when the user tries to write a query in a new dashboard.
|
||||||
|
|
||||||
|
#### Password Security
|
||||||
|
|
||||||
|
If possible, any passwords or secrets should be be saved in the `secureJsonData` blob. To encrypt sensitive data, the Grafana server's proxy feature must be used. The Grafana server has support for token authentication (OAuth) and HTTP Header authentication. If the calls have to be sent directly from the browser to a third-party API then this will not be possible and sensitive data will not be encrypted.
|
||||||
|
|
||||||
|
Read more here about how [Authentication for Datasources]({{< relref "auth-for-datasources.md" >}}) works.
|
||||||
|
|
||||||
|
If using the proxy feature then the Config page should use the `secureJsonData` blob like this:
|
||||||
|
|
||||||
|
- good: `<input type="password" class="gf-form-input" ng-model='ctrl.current.secureJsonData.password' placeholder="password"></input>`
|
||||||
|
- bad: `<input type="password" class="gf-form-input" ng-model='ctrl.current.password' placeholder="password"></input>`
|
||||||
|
|
||||||
|
### Query Editor
|
||||||
|
|
||||||
|
Each query editor is unique and can have a unique style. It should be adapted to what the users of the data source are used to.
|
||||||
|
|
||||||
|
- Should use the Grafana CSS `gf-form` classes.
|
||||||
|
- Should be neat and tidy. Labels and fields in columns should be aligned and should be the same width if possible.
|
||||||
|
- The datasource should be able to handle when a user toggles a query (by clicking on the eye icon) and not execute the query. This is done by checking the `hide` property - an [example](https://github.com/grafana/grafana/blob/master/public/app/plugins/datasource/postgres/datasource.ts#L35-L38).
|
||||||
|
- Should not execute queries if fields in the Query Editor are empty and the query will throw an exception (defensive programming).
|
||||||
|
- Should handle errors. There are two main ways to do this:
|
||||||
|
- use the notification system in Grafana to show a toaster popup with the error message. Example [here](https://github.com/alexanderzobnin/grafana-zabbix/blob/fdbbba2fb03f5f2a4b3b0715415e09d5a4cf6cde/src/panel-triggers/triggers_panel_ctrl.js#L467-L471).
|
||||||
|
- provide an error notification in the query editor like the MySQL/Postgres data sources do. Example code in the `query_ctrl` [here](https://github.com/grafana/azure-monitor-datasource/blob/b184d077f082a69f962120ef0d1f8296a0d46f03/src/query_ctrl.ts#L36-L51) and in the [html](https://github.com/grafana/azure-monitor-datasource/blob/b184d077f082a69f962120ef0d1f8296a0d46f03/src/partials/query.editor.html#L190-L193).
|
@ -19,8 +19,8 @@ module.exports = function(config) {
|
|||||||
},
|
},
|
||||||
|
|
||||||
webpack: webpackTestConfig,
|
webpack: webpackTestConfig,
|
||||||
webpackServer: {
|
webpackMiddleware: {
|
||||||
noInfo: true, // please don't spam the console when running in karma!
|
stats: 'minimal',
|
||||||
},
|
},
|
||||||
|
|
||||||
// list of files to exclude
|
// list of files to exclude
|
||||||
|
35
package.json
35
package.json
@ -16,11 +16,11 @@
|
|||||||
"@types/node": "^8.0.31",
|
"@types/node": "^8.0.31",
|
||||||
"@types/react": "^16.0.25",
|
"@types/react": "^16.0.25",
|
||||||
"@types/react-dom": "^16.0.3",
|
"@types/react-dom": "^16.0.3",
|
||||||
"angular-mocks": "^1.6.6",
|
"angular-mocks": "1.6.6",
|
||||||
"autoprefixer": "^6.4.0",
|
"autoprefixer": "^6.4.0",
|
||||||
"awesome-typescript-loader": "^4.0.0",
|
|
||||||
"axios": "^0.17.1",
|
"axios": "^0.17.1",
|
||||||
"babel-core": "^6.26.0",
|
"babel-core": "^6.26.0",
|
||||||
|
"babel-loader": "^7.1.4",
|
||||||
"babel-plugin-syntax-dynamic-import": "^6.18.0",
|
"babel-plugin-syntax-dynamic-import": "^6.18.0",
|
||||||
"babel-preset-es2015": "^6.24.1",
|
"babel-preset-es2015": "^6.24.1",
|
||||||
"clean-webpack-plugin": "^0.1.19",
|
"clean-webpack-plugin": "^0.1.19",
|
||||||
@ -32,8 +32,9 @@
|
|||||||
"es6-shim": "^0.35.3",
|
"es6-shim": "^0.35.3",
|
||||||
"expect.js": "~0.2.0",
|
"expect.js": "~0.2.0",
|
||||||
"expose-loader": "^0.7.3",
|
"expose-loader": "^0.7.3",
|
||||||
"extract-text-webpack-plugin": "^3.0.0",
|
"extract-text-webpack-plugin": "^4.0.0-beta.0",
|
||||||
"file-loader": "^1.1.11",
|
"file-loader": "^1.1.11",
|
||||||
|
"fork-ts-checker-webpack-plugin": "^0.4.1",
|
||||||
"gaze": "^1.1.2",
|
"gaze": "^1.1.2",
|
||||||
"glob": "~7.0.0",
|
"glob": "~7.0.0",
|
||||||
"grunt": "1.0.1",
|
"grunt": "1.0.1",
|
||||||
@ -56,7 +57,7 @@
|
|||||||
"grunt-webpack": "^3.0.2",
|
"grunt-webpack": "^3.0.2",
|
||||||
"html-loader": "^0.5.1",
|
"html-loader": "^0.5.1",
|
||||||
"html-webpack-harddisk-plugin": "^0.2.0",
|
"html-webpack-harddisk-plugin": "^0.2.0",
|
||||||
"html-webpack-plugin": "^2.30.1",
|
"html-webpack-plugin": "^3.2.0",
|
||||||
"husky": "^0.14.3",
|
"husky": "^0.14.3",
|
||||||
"jest": "^22.0.4",
|
"jest": "^22.0.4",
|
||||||
"jshint-stylish": "~2.2.1",
|
"jshint-stylish": "~2.2.1",
|
||||||
@ -67,7 +68,7 @@
|
|||||||
"karma-phantomjs-launcher": "1.0.4",
|
"karma-phantomjs-launcher": "1.0.4",
|
||||||
"karma-sinon": "^1.0.5",
|
"karma-sinon": "^1.0.5",
|
||||||
"karma-sourcemap-loader": "^0.3.7",
|
"karma-sourcemap-loader": "^0.3.7",
|
||||||
"karma-webpack": "^2.0.4",
|
"karma-webpack": "^3.0.0",
|
||||||
"lint-staged": "^6.0.0",
|
"lint-staged": "^6.0.0",
|
||||||
"load-grunt-tasks": "3.5.2",
|
"load-grunt-tasks": "3.5.2",
|
||||||
"mobx-react-devtools": "^4.2.15",
|
"mobx-react-devtools": "^4.2.15",
|
||||||
@ -89,21 +90,24 @@
|
|||||||
"style-loader": "^0.21.0",
|
"style-loader": "^0.21.0",
|
||||||
"systemjs": "0.20.19",
|
"systemjs": "0.20.19",
|
||||||
"systemjs-plugin-css": "^0.1.36",
|
"systemjs-plugin-css": "^0.1.36",
|
||||||
"ts-jest": "^22.0.0",
|
"ts-loader": "^4.3.0",
|
||||||
|
"ts-jest": "^22.4.6",
|
||||||
"tslint": "^5.8.0",
|
"tslint": "^5.8.0",
|
||||||
"tslint-loader": "^3.5.3",
|
"tslint-loader": "^3.5.3",
|
||||||
"typescript": "^2.6.2",
|
"typescript": "^2.6.2",
|
||||||
"webpack": "^3.10.0",
|
"webpack": "^4.8.0",
|
||||||
"webpack-bundle-analyzer": "^2.9.0",
|
"webpack-bundle-analyzer": "^2.9.0",
|
||||||
"webpack-cleanup-plugin": "^0.5.1",
|
"webpack-cleanup-plugin": "^0.5.1",
|
||||||
"webpack-dev-server": "2.11.1",
|
"fork-ts-checker-webpack-plugin": "^0.4.2",
|
||||||
|
"webpack-cli": "^2.1.4",
|
||||||
|
"webpack-dev-server": "^3.1.0",
|
||||||
"webpack-merge": "^4.1.0",
|
"webpack-merge": "^4.1.0",
|
||||||
"zone.js": "^0.7.2"
|
"zone.js": "^0.7.2"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "webpack --progress --colors --config scripts/webpack/webpack.dev.js",
|
"dev": "webpack --progress --colors --mode development --config scripts/webpack/webpack.dev.js",
|
||||||
"start": "webpack-dev-server --progress --colors --config scripts/webpack/webpack.hot.js",
|
"start": "webpack-dev-server --progress --colors --mode development --config scripts/webpack/webpack.hot.js",
|
||||||
"watch": "webpack --progress --colors --watch --config scripts/webpack/webpack.dev.js",
|
"watch": "webpack --progress --colors --watch --mode development --config scripts/webpack/webpack.dev.js",
|
||||||
"build": "grunt build",
|
"build": "grunt build",
|
||||||
"test": "grunt test",
|
"test": "grunt test",
|
||||||
"test:coverage": "grunt test --coverage=true",
|
"test:coverage": "grunt test --coverage=true",
|
||||||
@ -135,8 +139,8 @@
|
|||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"angular": "1.6.6",
|
"angular": "1.6.6",
|
||||||
"angular-bindonce": "^0.3.1",
|
"angular-bindonce": "0.3.1",
|
||||||
"angular-native-dragdrop": "^1.2.2",
|
"angular-native-dragdrop": "1.2.2",
|
||||||
"angular-route": "1.6.6",
|
"angular-route": "1.6.6",
|
||||||
"angular-sanitize": "1.6.6",
|
"angular-sanitize": "1.6.6",
|
||||||
"babel-polyfill": "^6.26.0",
|
"babel-polyfill": "^6.26.0",
|
||||||
@ -151,12 +155,14 @@
|
|||||||
"immutable": "^3.8.2",
|
"immutable": "^3.8.2",
|
||||||
"jquery": "^3.2.1",
|
"jquery": "^3.2.1",
|
||||||
"lodash": "^4.17.4",
|
"lodash": "^4.17.4",
|
||||||
|
"mini-css-extract-plugin": "^0.4.0",
|
||||||
"mobx": "^3.4.1",
|
"mobx": "^3.4.1",
|
||||||
"mobx-react": "^4.3.5",
|
"mobx-react": "^4.3.5",
|
||||||
"mobx-state-tree": "^1.3.1",
|
"mobx-state-tree": "^1.3.1",
|
||||||
"moment": "^2.18.1",
|
"moment": "^2.18.1",
|
||||||
"mousetrap": "^1.6.0",
|
"mousetrap": "^1.6.0",
|
||||||
"mousetrap-global-bind": "^1.1.0",
|
"mousetrap-global-bind": "^1.1.0",
|
||||||
|
"optimize-css-assets-webpack-plugin": "^4.0.2",
|
||||||
"prismjs": "^1.6.0",
|
"prismjs": "^1.6.0",
|
||||||
"prop-types": "^15.6.0",
|
"prop-types": "^15.6.0",
|
||||||
"react": "^16.2.0",
|
"react": "^16.2.0",
|
||||||
@ -175,7 +181,8 @@
|
|||||||
"slate-react": "^0.12.4",
|
"slate-react": "^0.12.4",
|
||||||
"tether": "^1.4.0",
|
"tether": "^1.4.0",
|
||||||
"tether-drop": "https://github.com/torkelo/drop/tarball/master",
|
"tether-drop": "https://github.com/torkelo/drop/tarball/master",
|
||||||
"tinycolor2": "^1.4.1"
|
"tinycolor2": "^1.4.1",
|
||||||
|
"uglifyjs-webpack-plugin": "^1.2.7"
|
||||||
},
|
},
|
||||||
"resolutions": {
|
"resolutions": {
|
||||||
"caniuse-db": "1.0.30000772"
|
"caniuse-db": "1.0.30000772"
|
||||||
|
@ -37,7 +37,6 @@ func GetAnnotations(c *m.ReqContext) Response {
|
|||||||
if item.Email != "" {
|
if item.Email != "" {
|
||||||
item.AvatarUrl = dtos.GetGravatarUrl(item.Email)
|
item.AvatarUrl = dtos.GetGravatarUrl(item.Email)
|
||||||
}
|
}
|
||||||
item.Time = item.Time
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return JSON(200, items)
|
return JSON(200, items)
|
||||||
@ -214,7 +213,9 @@ func DeleteAnnotations(c *m.ReqContext, cmd dtos.DeleteAnnotationsCmd) Response
|
|||||||
repo := annotations.GetRepository()
|
repo := annotations.GetRepository()
|
||||||
|
|
||||||
err := repo.Delete(&annotations.DeleteParams{
|
err := repo.Delete(&annotations.DeleteParams{
|
||||||
AlertId: cmd.PanelId,
|
OrgId: c.OrgId,
|
||||||
|
Id: cmd.AnnotationId,
|
||||||
|
RegionId: cmd.RegionId,
|
||||||
DashboardId: cmd.DashboardId,
|
DashboardId: cmd.DashboardId,
|
||||||
PanelId: cmd.PanelId,
|
PanelId: cmd.PanelId,
|
||||||
})
|
})
|
||||||
@ -235,7 +236,8 @@ func DeleteAnnotationByID(c *m.ReqContext) Response {
|
|||||||
}
|
}
|
||||||
|
|
||||||
err := repo.Delete(&annotations.DeleteParams{
|
err := repo.Delete(&annotations.DeleteParams{
|
||||||
Id: annotationID,
|
OrgId: c.OrgId,
|
||||||
|
Id: annotationID,
|
||||||
})
|
})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -254,6 +256,7 @@ func DeleteAnnotationRegion(c *m.ReqContext) Response {
|
|||||||
}
|
}
|
||||||
|
|
||||||
err := repo.Delete(&annotations.DeleteParams{
|
err := repo.Delete(&annotations.DeleteParams{
|
||||||
|
OrgId: c.OrgId,
|
||||||
RegionId: regionID,
|
RegionId: regionID,
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -269,9 +272,9 @@ func canSaveByDashboardID(c *m.ReqContext, dashboardID int64) (bool, error) {
|
|||||||
return false, nil
|
return false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if dashboardID > 0 {
|
if dashboardID != 0 {
|
||||||
guardian := guardian.New(dashboardID, c.OrgId, c.SignedInUser)
|
guard := guardian.New(dashboardID, c.OrgId, c.SignedInUser)
|
||||||
if canEdit, err := guardian.CanEdit(); err != nil || !canEdit {
|
if canEdit, err := guard.CanEdit(); err != nil || !canEdit {
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -100,6 +100,11 @@ func TestAnnotationsApiEndpoint(t *testing.T) {
|
|||||||
Id: 1,
|
Id: 1,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
deleteCmd := dtos.DeleteAnnotationsCmd{
|
||||||
|
DashboardId: 1,
|
||||||
|
PanelId: 1,
|
||||||
|
}
|
||||||
|
|
||||||
viewerRole := m.ROLE_VIEWER
|
viewerRole := m.ROLE_VIEWER
|
||||||
editorRole := m.ROLE_EDITOR
|
editorRole := m.ROLE_EDITOR
|
||||||
|
|
||||||
@ -171,6 +176,25 @@ func TestAnnotationsApiEndpoint(t *testing.T) {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
Convey("When user is an Admin", func() {
|
||||||
|
role := m.ROLE_ADMIN
|
||||||
|
Convey("Should be able to do anything", func() {
|
||||||
|
postAnnotationScenario("When calling POST on", "/api/annotations", "/api/annotations", role, cmd, func(sc *scenarioContext) {
|
||||||
|
sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec()
|
||||||
|
So(sc.resp.Code, ShouldEqual, 200)
|
||||||
|
})
|
||||||
|
|
||||||
|
putAnnotationScenario("When calling PUT on", "/api/annotations/1", "/api/annotations/:annotationId", role, updateCmd, func(sc *scenarioContext) {
|
||||||
|
sc.fakeReqWithParams("PUT", sc.url, map[string]string{}).exec()
|
||||||
|
So(sc.resp.Code, ShouldEqual, 200)
|
||||||
|
})
|
||||||
|
deleteAnnotationsScenario("When calling POST on", "/api/annotations/mass-delete", "/api/annotations/mass-delete", role, deleteCmd, func(sc *scenarioContext) {
|
||||||
|
sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec()
|
||||||
|
So(sc.resp.Code, ShouldEqual, 200)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -239,3 +263,26 @@ func putAnnotationScenario(desc string, url string, routePattern string, role m.
|
|||||||
fn(sc)
|
fn(sc)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func deleteAnnotationsScenario(desc string, url string, routePattern string, role m.RoleType, cmd dtos.DeleteAnnotationsCmd, fn scenarioFunc) {
|
||||||
|
Convey(desc+" "+url, func() {
|
||||||
|
defer bus.ClearBusHandlers()
|
||||||
|
|
||||||
|
sc := setupScenarioContext(url)
|
||||||
|
sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
|
||||||
|
sc.context = c
|
||||||
|
sc.context.UserId = TestUserID
|
||||||
|
sc.context.OrgId = TestOrgID
|
||||||
|
sc.context.OrgRole = role
|
||||||
|
|
||||||
|
return DeleteAnnotations(c, cmd)
|
||||||
|
})
|
||||||
|
|
||||||
|
fakeAnnoRepo = &fakeAnnotationsRepo{}
|
||||||
|
annotations.SetRepository(fakeAnnoRepo)
|
||||||
|
|
||||||
|
sc.m.Post(routePattern, sc.defaultHandler)
|
||||||
|
|
||||||
|
fn(sc)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
@ -4,6 +4,7 @@ import (
|
|||||||
"github.com/go-macaron/binding"
|
"github.com/go-macaron/binding"
|
||||||
"github.com/grafana/grafana/pkg/api/avatar"
|
"github.com/grafana/grafana/pkg/api/avatar"
|
||||||
"github.com/grafana/grafana/pkg/api/dtos"
|
"github.com/grafana/grafana/pkg/api/dtos"
|
||||||
|
"github.com/grafana/grafana/pkg/api/routing"
|
||||||
"github.com/grafana/grafana/pkg/middleware"
|
"github.com/grafana/grafana/pkg/middleware"
|
||||||
m "github.com/grafana/grafana/pkg/models"
|
m "github.com/grafana/grafana/pkg/models"
|
||||||
)
|
)
|
||||||
@ -117,10 +118,10 @@ func (hs *HTTPServer) registerRoutes() {
|
|||||||
r.Get("/api/login/ping", quota("session"), LoginAPIPing)
|
r.Get("/api/login/ping", quota("session"), LoginAPIPing)
|
||||||
|
|
||||||
// authed api
|
// authed api
|
||||||
r.Group("/api", func(apiRoute RouteRegister) {
|
r.Group("/api", func(apiRoute routing.RouteRegister) {
|
||||||
|
|
||||||
// user (signed in)
|
// user (signed in)
|
||||||
apiRoute.Group("/user", func(userRoute RouteRegister) {
|
apiRoute.Group("/user", func(userRoute routing.RouteRegister) {
|
||||||
userRoute.Get("/", wrap(GetSignedInUser))
|
userRoute.Get("/", wrap(GetSignedInUser))
|
||||||
userRoute.Put("/", bind(m.UpdateUserCommand{}), wrap(UpdateSignedInUser))
|
userRoute.Put("/", bind(m.UpdateUserCommand{}), wrap(UpdateSignedInUser))
|
||||||
userRoute.Post("/using/:id", wrap(UserSetUsingOrg))
|
userRoute.Post("/using/:id", wrap(UserSetUsingOrg))
|
||||||
@ -140,7 +141,7 @@ func (hs *HTTPServer) registerRoutes() {
|
|||||||
})
|
})
|
||||||
|
|
||||||
// users (admin permission required)
|
// users (admin permission required)
|
||||||
apiRoute.Group("/users", func(usersRoute RouteRegister) {
|
apiRoute.Group("/users", func(usersRoute routing.RouteRegister) {
|
||||||
usersRoute.Get("/", wrap(SearchUsers))
|
usersRoute.Get("/", wrap(SearchUsers))
|
||||||
usersRoute.Get("/search", wrap(SearchUsersWithPaging))
|
usersRoute.Get("/search", wrap(SearchUsersWithPaging))
|
||||||
usersRoute.Get("/:id", wrap(GetUserByID))
|
usersRoute.Get("/:id", wrap(GetUserByID))
|
||||||
@ -152,7 +153,7 @@ func (hs *HTTPServer) registerRoutes() {
|
|||||||
}, reqGrafanaAdmin)
|
}, reqGrafanaAdmin)
|
||||||
|
|
||||||
// team (admin permission required)
|
// team (admin permission required)
|
||||||
apiRoute.Group("/teams", func(teamsRoute RouteRegister) {
|
apiRoute.Group("/teams", func(teamsRoute routing.RouteRegister) {
|
||||||
teamsRoute.Post("/", bind(m.CreateTeamCommand{}), wrap(CreateTeam))
|
teamsRoute.Post("/", bind(m.CreateTeamCommand{}), wrap(CreateTeam))
|
||||||
teamsRoute.Put("/:teamId", bind(m.UpdateTeamCommand{}), wrap(UpdateTeam))
|
teamsRoute.Put("/:teamId", bind(m.UpdateTeamCommand{}), wrap(UpdateTeam))
|
||||||
teamsRoute.Delete("/:teamId", wrap(DeleteTeamByID))
|
teamsRoute.Delete("/:teamId", wrap(DeleteTeamByID))
|
||||||
@ -162,19 +163,19 @@ func (hs *HTTPServer) registerRoutes() {
|
|||||||
}, reqOrgAdmin)
|
}, reqOrgAdmin)
|
||||||
|
|
||||||
// team without requirement of user to be org admin
|
// team without requirement of user to be org admin
|
||||||
apiRoute.Group("/teams", func(teamsRoute RouteRegister) {
|
apiRoute.Group("/teams", func(teamsRoute routing.RouteRegister) {
|
||||||
teamsRoute.Get("/:teamId", wrap(GetTeamByID))
|
teamsRoute.Get("/:teamId", wrap(GetTeamByID))
|
||||||
teamsRoute.Get("/search", wrap(SearchTeams))
|
teamsRoute.Get("/search", wrap(SearchTeams))
|
||||||
})
|
})
|
||||||
|
|
||||||
// org information available to all users.
|
// org information available to all users.
|
||||||
apiRoute.Group("/org", func(orgRoute RouteRegister) {
|
apiRoute.Group("/org", func(orgRoute routing.RouteRegister) {
|
||||||
orgRoute.Get("/", wrap(GetOrgCurrent))
|
orgRoute.Get("/", wrap(GetOrgCurrent))
|
||||||
orgRoute.Get("/quotas", wrap(GetOrgQuotas))
|
orgRoute.Get("/quotas", wrap(GetOrgQuotas))
|
||||||
})
|
})
|
||||||
|
|
||||||
// current org
|
// current org
|
||||||
apiRoute.Group("/org", func(orgRoute RouteRegister) {
|
apiRoute.Group("/org", func(orgRoute routing.RouteRegister) {
|
||||||
orgRoute.Put("/", bind(dtos.UpdateOrgForm{}), wrap(UpdateOrgCurrent))
|
orgRoute.Put("/", bind(dtos.UpdateOrgForm{}), wrap(UpdateOrgCurrent))
|
||||||
orgRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), wrap(UpdateOrgAddressCurrent))
|
orgRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), wrap(UpdateOrgAddressCurrent))
|
||||||
orgRoute.Post("/users", quota("user"), bind(m.AddOrgUserCommand{}), wrap(AddOrgUserToCurrentOrg))
|
orgRoute.Post("/users", quota("user"), bind(m.AddOrgUserCommand{}), wrap(AddOrgUserToCurrentOrg))
|
||||||
@ -192,7 +193,7 @@ func (hs *HTTPServer) registerRoutes() {
|
|||||||
}, reqOrgAdmin)
|
}, reqOrgAdmin)
|
||||||
|
|
||||||
// current org without requirement of user to be org admin
|
// current org without requirement of user to be org admin
|
||||||
apiRoute.Group("/org", func(orgRoute RouteRegister) {
|
apiRoute.Group("/org", func(orgRoute routing.RouteRegister) {
|
||||||
orgRoute.Get("/users", wrap(GetOrgUsersForCurrentOrg))
|
orgRoute.Get("/users", wrap(GetOrgUsersForCurrentOrg))
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -203,7 +204,7 @@ func (hs *HTTPServer) registerRoutes() {
|
|||||||
apiRoute.Get("/orgs", reqGrafanaAdmin, wrap(SearchOrgs))
|
apiRoute.Get("/orgs", reqGrafanaAdmin, wrap(SearchOrgs))
|
||||||
|
|
||||||
// orgs (admin routes)
|
// orgs (admin routes)
|
||||||
apiRoute.Group("/orgs/:orgId", func(orgsRoute RouteRegister) {
|
apiRoute.Group("/orgs/:orgId", func(orgsRoute routing.RouteRegister) {
|
||||||
orgsRoute.Get("/", wrap(GetOrgByID))
|
orgsRoute.Get("/", wrap(GetOrgByID))
|
||||||
orgsRoute.Put("/", bind(dtos.UpdateOrgForm{}), wrap(UpdateOrg))
|
orgsRoute.Put("/", bind(dtos.UpdateOrgForm{}), wrap(UpdateOrg))
|
||||||
orgsRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), wrap(UpdateOrgAddress))
|
orgsRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), wrap(UpdateOrgAddress))
|
||||||
@ -217,24 +218,24 @@ func (hs *HTTPServer) registerRoutes() {
|
|||||||
}, reqGrafanaAdmin)
|
}, reqGrafanaAdmin)
|
||||||
|
|
||||||
// orgs (admin routes)
|
// orgs (admin routes)
|
||||||
apiRoute.Group("/orgs/name/:name", func(orgsRoute RouteRegister) {
|
apiRoute.Group("/orgs/name/:name", func(orgsRoute routing.RouteRegister) {
|
||||||
orgsRoute.Get("/", wrap(GetOrgByName))
|
orgsRoute.Get("/", wrap(GetOrgByName))
|
||||||
}, reqGrafanaAdmin)
|
}, reqGrafanaAdmin)
|
||||||
|
|
||||||
// auth api keys
|
// auth api keys
|
||||||
apiRoute.Group("/auth/keys", func(keysRoute RouteRegister) {
|
apiRoute.Group("/auth/keys", func(keysRoute routing.RouteRegister) {
|
||||||
keysRoute.Get("/", wrap(GetAPIKeys))
|
keysRoute.Get("/", wrap(GetAPIKeys))
|
||||||
keysRoute.Post("/", quota("api_key"), bind(m.AddApiKeyCommand{}), wrap(AddAPIKey))
|
keysRoute.Post("/", quota("api_key"), bind(m.AddApiKeyCommand{}), wrap(AddAPIKey))
|
||||||
keysRoute.Delete("/:id", wrap(DeleteAPIKey))
|
keysRoute.Delete("/:id", wrap(DeleteAPIKey))
|
||||||
}, reqOrgAdmin)
|
}, reqOrgAdmin)
|
||||||
|
|
||||||
// Preferences
|
// Preferences
|
||||||
apiRoute.Group("/preferences", func(prefRoute RouteRegister) {
|
apiRoute.Group("/preferences", func(prefRoute routing.RouteRegister) {
|
||||||
prefRoute.Post("/set-home-dash", bind(m.SavePreferencesCommand{}), wrap(SetHomeDashboard))
|
prefRoute.Post("/set-home-dash", bind(m.SavePreferencesCommand{}), wrap(SetHomeDashboard))
|
||||||
})
|
})
|
||||||
|
|
||||||
// Data sources
|
// Data sources
|
||||||
apiRoute.Group("/datasources", func(datasourceRoute RouteRegister) {
|
apiRoute.Group("/datasources", func(datasourceRoute routing.RouteRegister) {
|
||||||
datasourceRoute.Get("/", wrap(GetDataSources))
|
datasourceRoute.Get("/", wrap(GetDataSources))
|
||||||
datasourceRoute.Post("/", quota("data_source"), bind(m.AddDataSourceCommand{}), wrap(AddDataSource))
|
datasourceRoute.Post("/", quota("data_source"), bind(m.AddDataSourceCommand{}), wrap(AddDataSource))
|
||||||
datasourceRoute.Put("/:id", bind(m.UpdateDataSourceCommand{}), wrap(UpdateDataSource))
|
datasourceRoute.Put("/:id", bind(m.UpdateDataSourceCommand{}), wrap(UpdateDataSource))
|
||||||
@ -250,7 +251,7 @@ func (hs *HTTPServer) registerRoutes() {
|
|||||||
apiRoute.Get("/plugins/:pluginId/settings", wrap(GetPluginSettingByID))
|
apiRoute.Get("/plugins/:pluginId/settings", wrap(GetPluginSettingByID))
|
||||||
apiRoute.Get("/plugins/:pluginId/markdown/:name", wrap(GetPluginMarkdown))
|
apiRoute.Get("/plugins/:pluginId/markdown/:name", wrap(GetPluginMarkdown))
|
||||||
|
|
||||||
apiRoute.Group("/plugins", func(pluginRoute RouteRegister) {
|
apiRoute.Group("/plugins", func(pluginRoute routing.RouteRegister) {
|
||||||
pluginRoute.Get("/:pluginId/dashboards/", wrap(GetPluginDashboards))
|
pluginRoute.Get("/:pluginId/dashboards/", wrap(GetPluginDashboards))
|
||||||
pluginRoute.Post("/:pluginId/settings", bind(m.UpdatePluginSettingCmd{}), wrap(UpdatePluginSetting))
|
pluginRoute.Post("/:pluginId/settings", bind(m.UpdatePluginSettingCmd{}), wrap(UpdatePluginSetting))
|
||||||
}, reqOrgAdmin)
|
}, reqOrgAdmin)
|
||||||
@ -260,17 +261,17 @@ func (hs *HTTPServer) registerRoutes() {
|
|||||||
apiRoute.Any("/datasources/proxy/:id", reqSignedIn, hs.ProxyDataSourceRequest)
|
apiRoute.Any("/datasources/proxy/:id", reqSignedIn, hs.ProxyDataSourceRequest)
|
||||||
|
|
||||||
// Folders
|
// Folders
|
||||||
apiRoute.Group("/folders", func(folderRoute RouteRegister) {
|
apiRoute.Group("/folders", func(folderRoute routing.RouteRegister) {
|
||||||
folderRoute.Get("/", wrap(GetFolders))
|
folderRoute.Get("/", wrap(GetFolders))
|
||||||
folderRoute.Get("/id/:id", wrap(GetFolderByID))
|
folderRoute.Get("/id/:id", wrap(GetFolderByID))
|
||||||
folderRoute.Post("/", bind(m.CreateFolderCommand{}), wrap(CreateFolder))
|
folderRoute.Post("/", bind(m.CreateFolderCommand{}), wrap(CreateFolder))
|
||||||
|
|
||||||
folderRoute.Group("/:uid", func(folderUidRoute RouteRegister) {
|
folderRoute.Group("/:uid", func(folderUidRoute routing.RouteRegister) {
|
||||||
folderUidRoute.Get("/", wrap(GetFolderByUID))
|
folderUidRoute.Get("/", wrap(GetFolderByUID))
|
||||||
folderUidRoute.Put("/", bind(m.UpdateFolderCommand{}), wrap(UpdateFolder))
|
folderUidRoute.Put("/", bind(m.UpdateFolderCommand{}), wrap(UpdateFolder))
|
||||||
folderUidRoute.Delete("/", wrap(DeleteFolder))
|
folderUidRoute.Delete("/", wrap(DeleteFolder))
|
||||||
|
|
||||||
folderUidRoute.Group("/permissions", func(folderPermissionRoute RouteRegister) {
|
folderUidRoute.Group("/permissions", func(folderPermissionRoute routing.RouteRegister) {
|
||||||
folderPermissionRoute.Get("/", wrap(GetFolderPermissionList))
|
folderPermissionRoute.Get("/", wrap(GetFolderPermissionList))
|
||||||
folderPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), wrap(UpdateFolderPermissions))
|
folderPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), wrap(UpdateFolderPermissions))
|
||||||
})
|
})
|
||||||
@ -278,7 +279,7 @@ func (hs *HTTPServer) registerRoutes() {
|
|||||||
})
|
})
|
||||||
|
|
||||||
// Dashboard
|
// Dashboard
|
||||||
apiRoute.Group("/dashboards", func(dashboardRoute RouteRegister) {
|
apiRoute.Group("/dashboards", func(dashboardRoute routing.RouteRegister) {
|
||||||
dashboardRoute.Get("/uid/:uid", wrap(GetDashboard))
|
dashboardRoute.Get("/uid/:uid", wrap(GetDashboard))
|
||||||
dashboardRoute.Delete("/uid/:uid", wrap(DeleteDashboardByUID))
|
dashboardRoute.Delete("/uid/:uid", wrap(DeleteDashboardByUID))
|
||||||
|
|
||||||
@ -292,12 +293,12 @@ func (hs *HTTPServer) registerRoutes() {
|
|||||||
dashboardRoute.Get("/tags", GetDashboardTags)
|
dashboardRoute.Get("/tags", GetDashboardTags)
|
||||||
dashboardRoute.Post("/import", bind(dtos.ImportDashboardCommand{}), wrap(ImportDashboard))
|
dashboardRoute.Post("/import", bind(dtos.ImportDashboardCommand{}), wrap(ImportDashboard))
|
||||||
|
|
||||||
dashboardRoute.Group("/id/:dashboardId", func(dashIdRoute RouteRegister) {
|
dashboardRoute.Group("/id/:dashboardId", func(dashIdRoute routing.RouteRegister) {
|
||||||
dashIdRoute.Get("/versions", wrap(GetDashboardVersions))
|
dashIdRoute.Get("/versions", wrap(GetDashboardVersions))
|
||||||
dashIdRoute.Get("/versions/:id", wrap(GetDashboardVersion))
|
dashIdRoute.Get("/versions/:id", wrap(GetDashboardVersion))
|
||||||
dashIdRoute.Post("/restore", bind(dtos.RestoreDashboardVersionCommand{}), wrap(RestoreDashboardVersion))
|
dashIdRoute.Post("/restore", bind(dtos.RestoreDashboardVersionCommand{}), wrap(RestoreDashboardVersion))
|
||||||
|
|
||||||
dashIdRoute.Group("/permissions", func(dashboardPermissionRoute RouteRegister) {
|
dashIdRoute.Group("/permissions", func(dashboardPermissionRoute routing.RouteRegister) {
|
||||||
dashboardPermissionRoute.Get("/", wrap(GetDashboardPermissionList))
|
dashboardPermissionRoute.Get("/", wrap(GetDashboardPermissionList))
|
||||||
dashboardPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), wrap(UpdateDashboardPermissions))
|
dashboardPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), wrap(UpdateDashboardPermissions))
|
||||||
})
|
})
|
||||||
@ -305,12 +306,12 @@ func (hs *HTTPServer) registerRoutes() {
|
|||||||
})
|
})
|
||||||
|
|
||||||
// Dashboard snapshots
|
// Dashboard snapshots
|
||||||
apiRoute.Group("/dashboard/snapshots", func(dashboardRoute RouteRegister) {
|
apiRoute.Group("/dashboard/snapshots", func(dashboardRoute routing.RouteRegister) {
|
||||||
dashboardRoute.Get("/", wrap(SearchDashboardSnapshots))
|
dashboardRoute.Get("/", wrap(SearchDashboardSnapshots))
|
||||||
})
|
})
|
||||||
|
|
||||||
// Playlist
|
// Playlist
|
||||||
apiRoute.Group("/playlists", func(playlistRoute RouteRegister) {
|
apiRoute.Group("/playlists", func(playlistRoute routing.RouteRegister) {
|
||||||
playlistRoute.Get("/", wrap(SearchPlaylists))
|
playlistRoute.Get("/", wrap(SearchPlaylists))
|
||||||
playlistRoute.Get("/:id", ValidateOrgPlaylist, wrap(GetPlaylist))
|
playlistRoute.Get("/:id", ValidateOrgPlaylist, wrap(GetPlaylist))
|
||||||
playlistRoute.Get("/:id/items", ValidateOrgPlaylist, wrap(GetPlaylistItems))
|
playlistRoute.Get("/:id/items", ValidateOrgPlaylist, wrap(GetPlaylistItems))
|
||||||
@ -329,7 +330,7 @@ func (hs *HTTPServer) registerRoutes() {
|
|||||||
apiRoute.Get("/tsdb/testdata/gensql", reqGrafanaAdmin, wrap(GenerateSQLTestData))
|
apiRoute.Get("/tsdb/testdata/gensql", reqGrafanaAdmin, wrap(GenerateSQLTestData))
|
||||||
apiRoute.Get("/tsdb/testdata/random-walk", wrap(GetTestDataRandomWalk))
|
apiRoute.Get("/tsdb/testdata/random-walk", wrap(GetTestDataRandomWalk))
|
||||||
|
|
||||||
apiRoute.Group("/alerts", func(alertsRoute RouteRegister) {
|
apiRoute.Group("/alerts", func(alertsRoute routing.RouteRegister) {
|
||||||
alertsRoute.Post("/test", bind(dtos.AlertTestCommand{}), wrap(AlertTest))
|
alertsRoute.Post("/test", bind(dtos.AlertTestCommand{}), wrap(AlertTest))
|
||||||
alertsRoute.Post("/:alertId/pause", reqEditorRole, bind(dtos.PauseAlertCommand{}), wrap(PauseAlert))
|
alertsRoute.Post("/:alertId/pause", reqEditorRole, bind(dtos.PauseAlertCommand{}), wrap(PauseAlert))
|
||||||
alertsRoute.Get("/:alertId", ValidateOrgAlert, wrap(GetAlert))
|
alertsRoute.Get("/:alertId", ValidateOrgAlert, wrap(GetAlert))
|
||||||
@ -340,7 +341,7 @@ func (hs *HTTPServer) registerRoutes() {
|
|||||||
apiRoute.Get("/alert-notifications", wrap(GetAlertNotifications))
|
apiRoute.Get("/alert-notifications", wrap(GetAlertNotifications))
|
||||||
apiRoute.Get("/alert-notifiers", wrap(GetAlertNotifiers))
|
apiRoute.Get("/alert-notifiers", wrap(GetAlertNotifiers))
|
||||||
|
|
||||||
apiRoute.Group("/alert-notifications", func(alertNotifications RouteRegister) {
|
apiRoute.Group("/alert-notifications", func(alertNotifications routing.RouteRegister) {
|
||||||
alertNotifications.Post("/test", bind(dtos.NotificationTestCommand{}), wrap(NotificationTest))
|
alertNotifications.Post("/test", bind(dtos.NotificationTestCommand{}), wrap(NotificationTest))
|
||||||
alertNotifications.Post("/", bind(m.CreateAlertNotificationCommand{}), wrap(CreateAlertNotification))
|
alertNotifications.Post("/", bind(m.CreateAlertNotificationCommand{}), wrap(CreateAlertNotification))
|
||||||
alertNotifications.Put("/:notificationId", bind(m.UpdateAlertNotificationCommand{}), wrap(UpdateAlertNotification))
|
alertNotifications.Put("/:notificationId", bind(m.UpdateAlertNotificationCommand{}), wrap(UpdateAlertNotification))
|
||||||
@ -351,7 +352,7 @@ func (hs *HTTPServer) registerRoutes() {
|
|||||||
apiRoute.Get("/annotations", wrap(GetAnnotations))
|
apiRoute.Get("/annotations", wrap(GetAnnotations))
|
||||||
apiRoute.Post("/annotations/mass-delete", reqOrgAdmin, bind(dtos.DeleteAnnotationsCmd{}), wrap(DeleteAnnotations))
|
apiRoute.Post("/annotations/mass-delete", reqOrgAdmin, bind(dtos.DeleteAnnotationsCmd{}), wrap(DeleteAnnotations))
|
||||||
|
|
||||||
apiRoute.Group("/annotations", func(annotationsRoute RouteRegister) {
|
apiRoute.Group("/annotations", func(annotationsRoute routing.RouteRegister) {
|
||||||
annotationsRoute.Post("/", bind(dtos.PostAnnotationsCmd{}), wrap(PostAnnotation))
|
annotationsRoute.Post("/", bind(dtos.PostAnnotationsCmd{}), wrap(PostAnnotation))
|
||||||
annotationsRoute.Delete("/:annotationId", wrap(DeleteAnnotationByID))
|
annotationsRoute.Delete("/:annotationId", wrap(DeleteAnnotationByID))
|
||||||
annotationsRoute.Put("/:annotationId", bind(dtos.UpdateAnnotationsCmd{}), wrap(UpdateAnnotation))
|
annotationsRoute.Put("/:annotationId", bind(dtos.UpdateAnnotationsCmd{}), wrap(UpdateAnnotation))
|
||||||
@ -365,7 +366,7 @@ func (hs *HTTPServer) registerRoutes() {
|
|||||||
}, reqSignedIn)
|
}, reqSignedIn)
|
||||||
|
|
||||||
// admin api
|
// admin api
|
||||||
r.Group("/api/admin", func(adminRoute RouteRegister) {
|
r.Group("/api/admin", func(adminRoute routing.RouteRegister) {
|
||||||
adminRoute.Get("/settings", AdminGetSettings)
|
adminRoute.Get("/settings", AdminGetSettings)
|
||||||
adminRoute.Post("/users", bind(dtos.AdminCreateUserForm{}), AdminCreateUser)
|
adminRoute.Post("/users", bind(dtos.AdminCreateUserForm{}), AdminCreateUser)
|
||||||
adminRoute.Put("/users/:id/password", bind(dtos.AdminUpdateUserPasswordForm{}), AdminUpdateUserPassword)
|
adminRoute.Put("/users/:id/password", bind(dtos.AdminUpdateUserPasswordForm{}), AdminUpdateUserPassword)
|
||||||
|
@ -103,6 +103,9 @@ func DeleteDataSourceByName(c *m.ReqContext) Response {
|
|||||||
|
|
||||||
getCmd := &m.GetDataSourceByNameQuery{Name: name, OrgId: c.OrgId}
|
getCmd := &m.GetDataSourceByNameQuery{Name: name, OrgId: c.OrgId}
|
||||||
if err := bus.Dispatch(getCmd); err != nil {
|
if err := bus.Dispatch(getCmd); err != nil {
|
||||||
|
if err == m.ErrDataSourceNotFound {
|
||||||
|
return Error(404, "Data source not found", nil)
|
||||||
|
}
|
||||||
return Error(500, "Failed to delete datasource", err)
|
return Error(500, "Failed to delete datasource", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,5 +46,13 @@ func TestDataSourcesProxy(t *testing.T) {
|
|||||||
So(respJSON[3]["name"], ShouldEqual, "ZZZ")
|
So(respJSON[3]["name"], ShouldEqual, "ZZZ")
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
Convey("Should be able to save a data source", func() {
|
||||||
|
loggedInUserScenario("When calling DELETE on non-existing", "/api/datasources/name/12345", func(sc *scenarioContext) {
|
||||||
|
sc.handlerFunc = DeleteDataSourceByName
|
||||||
|
sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec()
|
||||||
|
So(sc.resp.Code, ShouldEqual, 404)
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -11,6 +11,7 @@ import (
|
|||||||
"path"
|
"path"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/grafana/grafana/pkg/api/routing"
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
|
|
||||||
"github.com/prometheus/client_golang/prometheus/promhttp"
|
"github.com/prometheus/client_golang/prometheus/promhttp"
|
||||||
@ -43,10 +44,10 @@ type HTTPServer struct {
|
|||||||
cache *gocache.Cache
|
cache *gocache.Cache
|
||||||
httpSrv *http.Server
|
httpSrv *http.Server
|
||||||
|
|
||||||
RouteRegister RouteRegister `inject:""`
|
RouteRegister routing.RouteRegister `inject:""`
|
||||||
Bus bus.Bus `inject:""`
|
Bus bus.Bus `inject:""`
|
||||||
RenderService rendering.Service `inject:""`
|
RenderService rendering.Service `inject:""`
|
||||||
Cfg *setting.Cfg `inject:""`
|
Cfg *setting.Cfg `inject:""`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (hs *HTTPServer) Init() error {
|
func (hs *HTTPServer) Init() error {
|
||||||
|
@ -117,6 +117,28 @@ func (proxy *DataSourceProxy) addTraceFromHeaderValue(span opentracing.Span, hea
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (proxy *DataSourceProxy) useCustomHeaders(req *http.Request) {
|
||||||
|
decryptSdj := proxy.ds.SecureJsonData.Decrypt()
|
||||||
|
index := 1
|
||||||
|
for {
|
||||||
|
headerNameSuffix := fmt.Sprintf("httpHeaderName%d", index)
|
||||||
|
headerValueSuffix := fmt.Sprintf("httpHeaderValue%d", index)
|
||||||
|
if key := proxy.ds.JsonData.Get(headerNameSuffix).MustString(); key != "" {
|
||||||
|
if val, ok := decryptSdj[headerValueSuffix]; ok {
|
||||||
|
// remove if exists
|
||||||
|
if req.Header.Get(key) != "" {
|
||||||
|
req.Header.Del(key)
|
||||||
|
}
|
||||||
|
req.Header.Add(key, val)
|
||||||
|
logger.Debug("Using custom header ", "CustomHeaders", key)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
index += 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (proxy *DataSourceProxy) getDirector() func(req *http.Request) {
|
func (proxy *DataSourceProxy) getDirector() func(req *http.Request) {
|
||||||
return func(req *http.Request) {
|
return func(req *http.Request) {
|
||||||
req.URL.Scheme = proxy.targetUrl.Scheme
|
req.URL.Scheme = proxy.targetUrl.Scheme
|
||||||
@ -146,6 +168,11 @@ func (proxy *DataSourceProxy) getDirector() func(req *http.Request) {
|
|||||||
req.Header.Add("Authorization", util.GetBasicAuthHeader(proxy.ds.BasicAuthUser, proxy.ds.BasicAuthPassword))
|
req.Header.Add("Authorization", util.GetBasicAuthHeader(proxy.ds.BasicAuthUser, proxy.ds.BasicAuthPassword))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Lookup and use custom headers
|
||||||
|
if proxy.ds.SecureJsonData != nil {
|
||||||
|
proxy.useCustomHeaders(req)
|
||||||
|
}
|
||||||
|
|
||||||
dsAuth := req.Header.Get("X-DS-Authorization")
|
dsAuth := req.Header.Get("X-DS-Authorization")
|
||||||
if len(dsAuth) > 0 {
|
if len(dsAuth) > 0 {
|
||||||
req.Header.Del("X-DS-Authorization")
|
req.Header.Del("X-DS-Authorization")
|
||||||
|
@ -12,6 +12,7 @@ import (
|
|||||||
macaron "gopkg.in/macaron.v1"
|
macaron "gopkg.in/macaron.v1"
|
||||||
|
|
||||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||||
|
"github.com/grafana/grafana/pkg/log"
|
||||||
m "github.com/grafana/grafana/pkg/models"
|
m "github.com/grafana/grafana/pkg/models"
|
||||||
"github.com/grafana/grafana/pkg/plugins"
|
"github.com/grafana/grafana/pkg/plugins"
|
||||||
"github.com/grafana/grafana/pkg/setting"
|
"github.com/grafana/grafana/pkg/setting"
|
||||||
@ -322,6 +323,37 @@ func TestDSRouteRule(t *testing.T) {
|
|||||||
So(interpolated, ShouldEqual, "0asd+asd")
|
So(interpolated, ShouldEqual, "0asd+asd")
|
||||||
})
|
})
|
||||||
|
|
||||||
|
Convey("When proxying a data source with custom headers specified", func() {
|
||||||
|
plugin := &plugins.DataSourcePlugin{}
|
||||||
|
|
||||||
|
encryptedData, err := util.Encrypt([]byte(`Bearer xf5yhfkpsnmgo`), setting.SecretKey)
|
||||||
|
ds := &m.DataSource{
|
||||||
|
Type: m.DS_PROMETHEUS,
|
||||||
|
Url: "http://prometheus:9090",
|
||||||
|
JsonData: simplejson.NewFromAny(map[string]interface{}{
|
||||||
|
"httpHeaderName1": "Authorization",
|
||||||
|
}),
|
||||||
|
SecureJsonData: map[string][]byte{
|
||||||
|
"httpHeaderValue1": encryptedData,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := &m.ReqContext{}
|
||||||
|
proxy := NewDataSourceProxy(ds, plugin, ctx, "")
|
||||||
|
|
||||||
|
requestURL, _ := url.Parse("http://grafana.com/sub")
|
||||||
|
req := http.Request{URL: requestURL, Header: make(http.Header)}
|
||||||
|
proxy.getDirector()(&req)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(4, err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
Convey("Match header value after decryption", func() {
|
||||||
|
So(req.Header.Get("Authorization"), ShouldEqual, "Bearer xf5yhfkpsnmgo")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,7 +3,9 @@ package api
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"runtime"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
m "github.com/grafana/grafana/pkg/models"
|
m "github.com/grafana/grafana/pkg/models"
|
||||||
@ -55,6 +57,15 @@ func (hs *HTTPServer) RenderToPng(c *m.ReqContext) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err != nil && err == rendering.ErrPhantomJSNotInstalled {
|
||||||
|
if strings.HasPrefix(runtime.GOARCH, "arm") {
|
||||||
|
c.Handle(500, "Rendering failed - PhantomJS isn't included in arm build per default", err)
|
||||||
|
} else {
|
||||||
|
c.Handle(500, "Rendering failed - PhantomJS isn't installed correctly", err)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.Handle(500, "Rendering failed.", err)
|
c.Handle(500, "Rendering failed.", err)
|
||||||
return
|
return
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
package api
|
package routing
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
macaron "gopkg.in/macaron.v1"
|
"gopkg.in/macaron.v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Router interface {
|
type Router interface {
|
||||||
@ -14,16 +15,34 @@ type Router interface {
|
|||||||
// RouteRegister allows you to add routes and macaron.Handlers
|
// RouteRegister allows you to add routes and macaron.Handlers
|
||||||
// that the web server should serve.
|
// that the web server should serve.
|
||||||
type RouteRegister interface {
|
type RouteRegister interface {
|
||||||
|
// Get adds a list of handlers to a given route with a GET HTTP verb
|
||||||
Get(string, ...macaron.Handler)
|
Get(string, ...macaron.Handler)
|
||||||
|
|
||||||
|
// Post adds a list of handlers to a given route with a POST HTTP verb
|
||||||
Post(string, ...macaron.Handler)
|
Post(string, ...macaron.Handler)
|
||||||
|
|
||||||
|
// Delete adds a list of handlers to a given route with a DELETE HTTP verb
|
||||||
Delete(string, ...macaron.Handler)
|
Delete(string, ...macaron.Handler)
|
||||||
|
|
||||||
|
// Put adds a list of handlers to a given route with a PUT HTTP verb
|
||||||
Put(string, ...macaron.Handler)
|
Put(string, ...macaron.Handler)
|
||||||
|
|
||||||
|
// Patch adds a list of handlers to a given route with a PATCH HTTP verb
|
||||||
Patch(string, ...macaron.Handler)
|
Patch(string, ...macaron.Handler)
|
||||||
|
|
||||||
|
// Any adds a list of handlers to a given route with any HTTP verb
|
||||||
Any(string, ...macaron.Handler)
|
Any(string, ...macaron.Handler)
|
||||||
|
|
||||||
|
// Group allows you to pass a function that can add multiple routes
|
||||||
|
// with a shared prefix route.
|
||||||
Group(string, func(RouteRegister), ...macaron.Handler)
|
Group(string, func(RouteRegister), ...macaron.Handler)
|
||||||
|
|
||||||
Register(Router) *macaron.Router
|
// Insert adds more routes to an existing Group.
|
||||||
|
Insert(string, func(RouteRegister), ...macaron.Handler)
|
||||||
|
|
||||||
|
// Register iterates over all routes added to the RouteRegister
|
||||||
|
// and add them to the `Router` pass as an parameter.
|
||||||
|
Register(Router)
|
||||||
}
|
}
|
||||||
|
|
||||||
type RegisterNamedMiddleware func(name string) macaron.Handler
|
type RegisterNamedMiddleware func(name string) macaron.Handler
|
||||||
@ -52,6 +71,24 @@ type routeRegister struct {
|
|||||||
groups []*routeRegister
|
groups []*routeRegister
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (rr *routeRegister) Insert(pattern string, fn func(RouteRegister), handlers ...macaron.Handler) {
|
||||||
|
|
||||||
|
//loop over all groups at current level
|
||||||
|
for _, g := range rr.groups {
|
||||||
|
|
||||||
|
// apply routes if the prefix matches the pattern
|
||||||
|
if g.prefix == pattern {
|
||||||
|
g.Group("", fn)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
// go down one level if the prefix can be find in the pattern
|
||||||
|
if strings.HasPrefix(pattern, g.prefix) {
|
||||||
|
g.Insert(pattern, fn)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (rr *routeRegister) Group(pattern string, fn func(rr RouteRegister), handlers ...macaron.Handler) {
|
func (rr *routeRegister) Group(pattern string, fn func(rr RouteRegister), handlers ...macaron.Handler) {
|
||||||
group := &routeRegister{
|
group := &routeRegister{
|
||||||
prefix: rr.prefix + pattern,
|
prefix: rr.prefix + pattern,
|
||||||
@ -64,7 +101,7 @@ func (rr *routeRegister) Group(pattern string, fn func(rr RouteRegister), handle
|
|||||||
rr.groups = append(rr.groups, group)
|
rr.groups = append(rr.groups, group)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (rr *routeRegister) Register(router Router) *macaron.Router {
|
func (rr *routeRegister) Register(router Router) {
|
||||||
for _, r := range rr.routes {
|
for _, r := range rr.routes {
|
||||||
// GET requests have to be added to macaron routing using Get()
|
// GET requests have to be added to macaron routing using Get()
|
||||||
// Otherwise HEAD requests will not be allowed.
|
// Otherwise HEAD requests will not be allowed.
|
||||||
@ -79,8 +116,6 @@ func (rr *routeRegister) Register(router Router) *macaron.Router {
|
|||||||
for _, g := range rr.groups {
|
for _, g := range rr.groups {
|
||||||
g.Register(router)
|
g.Register(router)
|
||||||
}
|
}
|
||||||
|
|
||||||
return &macaron.Router{}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (rr *routeRegister) route(pattern, method string, handlers ...macaron.Handler) {
|
func (rr *routeRegister) route(pattern, method string, handlers ...macaron.Handler) {
|
||||||
@ -92,6 +127,12 @@ func (rr *routeRegister) route(pattern, method string, handlers ...macaron.Handl
|
|||||||
h = append(h, rr.subfixHandlers...)
|
h = append(h, rr.subfixHandlers...)
|
||||||
h = append(h, handlers...)
|
h = append(h, handlers...)
|
||||||
|
|
||||||
|
for _, r := range rr.routes {
|
||||||
|
if r.pattern == rr.prefix+pattern && r.method == method {
|
||||||
|
panic("cannot add duplicate route")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
rr.routes = append(rr.routes, route{
|
rr.routes = append(rr.routes, route{
|
||||||
method: method,
|
method: method,
|
||||||
pattern: rr.prefix + pattern,
|
pattern: rr.prefix + pattern,
|
@ -1,11 +1,11 @@
|
|||||||
package api
|
package routing
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
"strconv"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
macaron "gopkg.in/macaron.v1"
|
"gopkg.in/macaron.v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
type fakeRouter struct {
|
type fakeRouter struct {
|
||||||
@ -33,7 +33,7 @@ func (fr *fakeRouter) Get(pattern string, handlers ...macaron.Handler) *macaron.
|
|||||||
}
|
}
|
||||||
|
|
||||||
func emptyHandlers(n int) []macaron.Handler {
|
func emptyHandlers(n int) []macaron.Handler {
|
||||||
res := []macaron.Handler{}
|
var res []macaron.Handler
|
||||||
for i := 1; n >= i; i++ {
|
for i := 1; n >= i; i++ {
|
||||||
res = append(res, emptyHandler(strconv.Itoa(i)))
|
res = append(res, emptyHandler(strconv.Itoa(i)))
|
||||||
}
|
}
|
||||||
@ -138,7 +138,78 @@ func TestRouteGroupedRegister(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
func TestRouteGroupInserting(t *testing.T) {
|
||||||
|
testTable := []route{
|
||||||
|
{method: http.MethodGet, pattern: "/api/", handlers: emptyHandlers(1)},
|
||||||
|
{method: http.MethodPost, pattern: "/api/group/endpoint", handlers: emptyHandlers(1)},
|
||||||
|
|
||||||
|
{method: http.MethodGet, pattern: "/api/group/inserted", handlers: emptyHandlers(1)},
|
||||||
|
{method: http.MethodDelete, pattern: "/api/inserted-endpoint", handlers: emptyHandlers(1)},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Setup
|
||||||
|
rr := NewRouteRegister()
|
||||||
|
|
||||||
|
rr.Group("/api", func(api RouteRegister) {
|
||||||
|
api.Get("/", emptyHandler("1"))
|
||||||
|
|
||||||
|
api.Group("/group", func(group RouteRegister) {
|
||||||
|
group.Post("/endpoint", emptyHandler("1"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
rr.Insert("/api", func(api RouteRegister) {
|
||||||
|
api.Delete("/inserted-endpoint", emptyHandler("1"))
|
||||||
|
})
|
||||||
|
|
||||||
|
rr.Insert("/api/group", func(group RouteRegister) {
|
||||||
|
group.Get("/inserted", emptyHandler("1"))
|
||||||
|
})
|
||||||
|
|
||||||
|
fr := &fakeRouter{}
|
||||||
|
rr.Register(fr)
|
||||||
|
|
||||||
|
// Validation
|
||||||
|
if len(fr.route) != len(testTable) {
|
||||||
|
t.Fatalf("want %v routes, got %v", len(testTable), len(fr.route))
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := range testTable {
|
||||||
|
if testTable[i].method != fr.route[i].method {
|
||||||
|
t.Errorf("want %s got %v", testTable[i].method, fr.route[i].method)
|
||||||
|
}
|
||||||
|
|
||||||
|
if testTable[i].pattern != fr.route[i].pattern {
|
||||||
|
t.Errorf("want %s got %v", testTable[i].pattern, fr.route[i].pattern)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(testTable[i].handlers) != len(fr.route[i].handlers) {
|
||||||
|
t.Errorf("want %d handlers got %d handlers \ntestcase: %v\nroute: %v\n",
|
||||||
|
len(testTable[i].handlers),
|
||||||
|
len(fr.route[i].handlers),
|
||||||
|
testTable[i],
|
||||||
|
fr.route[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDuplicateRoutShouldPanic(t *testing.T) {
|
||||||
|
defer func() {
|
||||||
|
if recover() != "cannot add duplicate route" {
|
||||||
|
t.Errorf("Should cause panic if duplicate routes are added ")
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
rr := NewRouteRegister(func(name string) macaron.Handler {
|
||||||
|
return emptyHandler(name)
|
||||||
|
})
|
||||||
|
|
||||||
|
rr.Get("/api", emptyHandler("1"))
|
||||||
|
rr.Get("/api", emptyHandler("1"))
|
||||||
|
|
||||||
|
fr := &fakeRouter{}
|
||||||
|
rr.Register(fr)
|
||||||
|
}
|
||||||
func TestNamedMiddlewareRouteRegister(t *testing.T) {
|
func TestNamedMiddlewareRouteRegister(t *testing.T) {
|
||||||
testTable := []route{
|
testTable := []route{
|
||||||
{method: "DELETE", pattern: "/admin", handlers: emptyHandlers(2)},
|
{method: "DELETE", pattern: "/admin", handlers: emptyHandlers(2)},
|
@ -12,6 +12,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/facebookgo/inject"
|
"github.com/facebookgo/inject"
|
||||||
|
"github.com/grafana/grafana/pkg/api/routing"
|
||||||
"github.com/grafana/grafana/pkg/bus"
|
"github.com/grafana/grafana/pkg/bus"
|
||||||
"github.com/grafana/grafana/pkg/middleware"
|
"github.com/grafana/grafana/pkg/middleware"
|
||||||
"github.com/grafana/grafana/pkg/registry"
|
"github.com/grafana/grafana/pkg/registry"
|
||||||
@ -61,8 +62,8 @@ type GrafanaServerImpl struct {
|
|||||||
shutdownReason string
|
shutdownReason string
|
||||||
shutdownInProgress bool
|
shutdownInProgress bool
|
||||||
|
|
||||||
RouteRegister api.RouteRegister `inject:""`
|
RouteRegister routing.RouteRegister `inject:""`
|
||||||
HttpServer *api.HTTPServer `inject:""`
|
HttpServer *api.HTTPServer `inject:""`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g *GrafanaServerImpl) Run() error {
|
func (g *GrafanaServerImpl) Run() error {
|
||||||
@ -75,7 +76,7 @@ func (g *GrafanaServerImpl) Run() error {
|
|||||||
serviceGraph := inject.Graph{}
|
serviceGraph := inject.Graph{}
|
||||||
serviceGraph.Provide(&inject.Object{Value: bus.GetBus()})
|
serviceGraph.Provide(&inject.Object{Value: bus.GetBus()})
|
||||||
serviceGraph.Provide(&inject.Object{Value: g.cfg})
|
serviceGraph.Provide(&inject.Object{Value: g.cfg})
|
||||||
serviceGraph.Provide(&inject.Object{Value: api.NewRouteRegister(middleware.RequestMetrics, middleware.RequestTracing)})
|
serviceGraph.Provide(&inject.Object{Value: routing.NewRouteRegister(middleware.RequestMetrics, middleware.RequestTracing)})
|
||||||
|
|
||||||
// self registered services
|
// self registered services
|
||||||
services := registry.GetServices()
|
services := registry.GetServices()
|
||||||
|
@ -308,6 +308,7 @@ func (a *ldapAuther) searchForUser(username string) (*LdapUserInfo, error) {
|
|||||||
} else {
|
} else {
|
||||||
filter_replace = getLdapAttr(a.server.GroupSearchFilterUserAttribute, searchResult)
|
filter_replace = getLdapAttr(a.server.GroupSearchFilterUserAttribute, searchResult)
|
||||||
}
|
}
|
||||||
|
|
||||||
filter := strings.Replace(a.server.GroupSearchFilter, "%s", ldap.EscapeFilter(filter_replace), -1)
|
filter := strings.Replace(a.server.GroupSearchFilter, "%s", ldap.EscapeFilter(filter_replace), -1)
|
||||||
|
|
||||||
a.log.Info("Searching for user's groups", "filter", filter)
|
a.log.Info("Searching for user's groups", "filter", filter)
|
||||||
@ -348,7 +349,7 @@ func (a *ldapAuther) searchForUser(username string) (*LdapUserInfo, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func getLdapAttrN(name string, result *ldap.SearchResult, n int) string {
|
func getLdapAttrN(name string, result *ldap.SearchResult, n int) string {
|
||||||
if name == "DN" {
|
if strings.ToLower(name) == "dn" {
|
||||||
return result.Entries[n].DN
|
return result.Entries[n].DN
|
||||||
}
|
}
|
||||||
for _, attr := range result.Entries[n].Attributes {
|
for _, attr := range result.Entries[n].Attributes {
|
||||||
|
@ -35,11 +35,12 @@ type PostParams struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type DeleteParams struct {
|
type DeleteParams struct {
|
||||||
Id int64 `json:"id"`
|
OrgId int64
|
||||||
AlertId int64 `json:"alertId"`
|
Id int64
|
||||||
DashboardId int64 `json:"dashboardId"`
|
AlertId int64
|
||||||
PanelId int64 `json:"panelId"`
|
DashboardId int64
|
||||||
RegionId int64 `json:"regionId"`
|
PanelId int64
|
||||||
|
RegionId int64
|
||||||
}
|
}
|
||||||
|
|
||||||
var repositoryInstance Repository
|
var repositoryInstance Repository
|
||||||
|
@ -83,7 +83,7 @@ func (g *dashboardGuardianImpl) checkAcl(permission m.PermissionType, acl []*m.D
|
|||||||
|
|
||||||
for _, p := range acl {
|
for _, p := range acl {
|
||||||
// user match
|
// user match
|
||||||
if !g.user.IsAnonymous {
|
if !g.user.IsAnonymous && p.UserId > 0 {
|
||||||
if p.UserId == g.user.UserId && p.Permission >= permission {
|
if p.UserId == g.user.UserId && p.Permission >= permission {
|
||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
|
@ -28,7 +28,7 @@ func TestGuardianAdmin(t *testing.T) {
|
|||||||
Convey("Guardian admin org role tests", t, func() {
|
Convey("Guardian admin org role tests", t, func() {
|
||||||
orgRoleScenario("Given user has admin org role", t, m.ROLE_ADMIN, func(sc *scenarioContext) {
|
orgRoleScenario("Given user has admin org role", t, m.ROLE_ADMIN, func(sc *scenarioContext) {
|
||||||
// dashboard has default permissions
|
// dashboard has default permissions
|
||||||
sc.defaultPermissionScenario(USER, m.PERMISSION_ADMIN, FULL_ACCESS)
|
sc.defaultPermissionScenario(USER, FULL_ACCESS)
|
||||||
|
|
||||||
// dashboard has user with permission
|
// dashboard has user with permission
|
||||||
sc.dashboardPermissionScenario(USER, m.PERMISSION_ADMIN, FULL_ACCESS)
|
sc.dashboardPermissionScenario(USER, m.PERMISSION_ADMIN, FULL_ACCESS)
|
||||||
@ -76,6 +76,9 @@ func TestGuardianAdmin(t *testing.T) {
|
|||||||
func TestGuardianEditor(t *testing.T) {
|
func TestGuardianEditor(t *testing.T) {
|
||||||
Convey("Guardian editor org role tests", t, func() {
|
Convey("Guardian editor org role tests", t, func() {
|
||||||
orgRoleScenario("Given user has editor org role", t, m.ROLE_EDITOR, func(sc *scenarioContext) {
|
orgRoleScenario("Given user has editor org role", t, m.ROLE_EDITOR, func(sc *scenarioContext) {
|
||||||
|
// dashboard has default permissions
|
||||||
|
sc.defaultPermissionScenario(USER, EDITOR_ACCESS)
|
||||||
|
|
||||||
// dashboard has user with permission
|
// dashboard has user with permission
|
||||||
sc.dashboardPermissionScenario(USER, m.PERMISSION_ADMIN, FULL_ACCESS)
|
sc.dashboardPermissionScenario(USER, m.PERMISSION_ADMIN, FULL_ACCESS)
|
||||||
sc.dashboardPermissionScenario(USER, m.PERMISSION_EDIT, EDITOR_ACCESS)
|
sc.dashboardPermissionScenario(USER, m.PERMISSION_EDIT, EDITOR_ACCESS)
|
||||||
@ -122,6 +125,9 @@ func TestGuardianEditor(t *testing.T) {
|
|||||||
func TestGuardianViewer(t *testing.T) {
|
func TestGuardianViewer(t *testing.T) {
|
||||||
Convey("Guardian viewer org role tests", t, func() {
|
Convey("Guardian viewer org role tests", t, func() {
|
||||||
orgRoleScenario("Given user has viewer org role", t, m.ROLE_VIEWER, func(sc *scenarioContext) {
|
orgRoleScenario("Given user has viewer org role", t, m.ROLE_VIEWER, func(sc *scenarioContext) {
|
||||||
|
// dashboard has default permissions
|
||||||
|
sc.defaultPermissionScenario(USER, VIEWER_ACCESS)
|
||||||
|
|
||||||
// dashboard has user with permission
|
// dashboard has user with permission
|
||||||
sc.dashboardPermissionScenario(USER, m.PERMISSION_ADMIN, FULL_ACCESS)
|
sc.dashboardPermissionScenario(USER, m.PERMISSION_ADMIN, FULL_ACCESS)
|
||||||
sc.dashboardPermissionScenario(USER, m.PERMISSION_EDIT, EDITOR_ACCESS)
|
sc.dashboardPermissionScenario(USER, m.PERMISSION_EDIT, EDITOR_ACCESS)
|
||||||
@ -162,10 +168,15 @@ func TestGuardianViewer(t *testing.T) {
|
|||||||
sc.parentFolderPermissionScenario(VIEWER, m.PERMISSION_EDIT, EDITOR_ACCESS)
|
sc.parentFolderPermissionScenario(VIEWER, m.PERMISSION_EDIT, EDITOR_ACCESS)
|
||||||
sc.parentFolderPermissionScenario(VIEWER, m.PERMISSION_VIEW, VIEWER_ACCESS)
|
sc.parentFolderPermissionScenario(VIEWER, m.PERMISSION_VIEW, VIEWER_ACCESS)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
apiKeyScenario("Given api key with viewer role", t, m.ROLE_VIEWER, func(sc *scenarioContext) {
|
||||||
|
// dashboard has default permissions
|
||||||
|
sc.defaultPermissionScenario(VIEWER, VIEWER_ACCESS)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sc *scenarioContext) defaultPermissionScenario(pt permissionType, permission m.PermissionType, flag permissionFlags) {
|
func (sc *scenarioContext) defaultPermissionScenario(pt permissionType, flag permissionFlags) {
|
||||||
_, callerFile, callerLine, _ := runtime.Caller(1)
|
_, callerFile, callerLine, _ := runtime.Caller(1)
|
||||||
sc.callerFile = callerFile
|
sc.callerFile = callerFile
|
||||||
sc.callerLine = callerLine
|
sc.callerLine = callerLine
|
||||||
@ -267,7 +278,7 @@ func (sc *scenarioContext) verifyExpectedPermissionsFlags() {
|
|||||||
actualFlag = NO_ACCESS
|
actualFlag = NO_ACCESS
|
||||||
}
|
}
|
||||||
|
|
||||||
if sc.expectedFlags&actualFlag != sc.expectedFlags {
|
if actualFlag&sc.expectedFlags != actualFlag {
|
||||||
sc.reportFailure(tc, sc.expectedFlags.String(), actualFlag.String())
|
sc.reportFailure(tc, sc.expectedFlags.String(), actualFlag.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -48,6 +48,27 @@ func orgRoleScenario(desc string, t *testing.T, role m.RoleType, fn scenarioFunc
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func apiKeyScenario(desc string, t *testing.T, role m.RoleType, fn scenarioFunc) {
|
||||||
|
user := &m.SignedInUser{
|
||||||
|
UserId: 0,
|
||||||
|
OrgId: orgID,
|
||||||
|
OrgRole: role,
|
||||||
|
ApiKeyId: 10,
|
||||||
|
}
|
||||||
|
guard := New(dashboardID, orgID, user)
|
||||||
|
sc := &scenarioContext{
|
||||||
|
t: t,
|
||||||
|
orgRoleScenario: desc,
|
||||||
|
givenUser: user,
|
||||||
|
givenDashboardID: dashboardID,
|
||||||
|
g: guard,
|
||||||
|
}
|
||||||
|
|
||||||
|
Convey(desc, func() {
|
||||||
|
fn(sc)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func permissionScenario(desc string, dashboardID int64, sc *scenarioContext, permissions []*m.DashboardAclInfoDTO, fn scenarioFunc) {
|
func permissionScenario(desc string, dashboardID int64, sc *scenarioContext, permissions []*m.DashboardAclInfoDTO, fn scenarioFunc) {
|
||||||
bus.ClearBusHandlers()
|
bus.ClearBusHandlers()
|
||||||
|
|
||||||
|
@ -10,6 +10,7 @@ import (
|
|||||||
|
|
||||||
var ErrTimeout = errors.New("Timeout error. You can set timeout in seconds with &timeout url parameter")
|
var ErrTimeout = errors.New("Timeout error. You can set timeout in seconds with &timeout url parameter")
|
||||||
var ErrNoRenderer = errors.New("No renderer plugin found nor is an external render server configured")
|
var ErrNoRenderer = errors.New("No renderer plugin found nor is an external render server configured")
|
||||||
|
var ErrPhantomJSNotInstalled = errors.New("PhantomJS executable not found")
|
||||||
|
|
||||||
type Opts struct {
|
type Opts struct {
|
||||||
Width int
|
Width int
|
||||||
|
@ -24,6 +24,11 @@ func (rs *RenderingService) renderViaPhantomJS(ctx context.Context, opts Opts) (
|
|||||||
|
|
||||||
url := rs.getURL(opts.Path)
|
url := rs.getURL(opts.Path)
|
||||||
binPath, _ := filepath.Abs(filepath.Join(rs.Cfg.PhantomDir, executable))
|
binPath, _ := filepath.Abs(filepath.Join(rs.Cfg.PhantomDir, executable))
|
||||||
|
if _, err := os.Stat(binPath); os.IsNotExist(err) {
|
||||||
|
rs.log.Error("executable not found", "executable", binPath)
|
||||||
|
return nil, ErrPhantomJSNotInstalled
|
||||||
|
}
|
||||||
|
|
||||||
scriptPath, _ := filepath.Abs(filepath.Join(rs.Cfg.PhantomDir, "render.js"))
|
scriptPath, _ := filepath.Abs(filepath.Join(rs.Cfg.PhantomDir, "render.js"))
|
||||||
pngPath := rs.getFilePathForNewImage()
|
pngPath := rs.getFilePathForNewImage()
|
||||||
|
|
||||||
|
@ -238,18 +238,19 @@ func (r *SqlAnnotationRepo) Delete(params *annotations.DeleteParams) error {
|
|||||||
queryParams []interface{}
|
queryParams []interface{}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
sqlog.Info("delete", "orgId", params.OrgId)
|
||||||
if params.RegionId != 0 {
|
if params.RegionId != 0 {
|
||||||
annoTagSql = "DELETE FROM annotation_tag WHERE annotation_id IN (SELECT id FROM annotation WHERE region_id = ?)"
|
annoTagSql = "DELETE FROM annotation_tag WHERE annotation_id IN (SELECT id FROM annotation WHERE region_id = ? AND org_id = ?)"
|
||||||
sql = "DELETE FROM annotation WHERE region_id = ?"
|
sql = "DELETE FROM annotation WHERE region_id = ? AND org_id = ?"
|
||||||
queryParams = []interface{}{params.RegionId}
|
queryParams = []interface{}{params.RegionId, params.OrgId}
|
||||||
} else if params.Id != 0 {
|
} else if params.Id != 0 {
|
||||||
annoTagSql = "DELETE FROM annotation_tag WHERE annotation_id IN (SELECT id FROM annotation WHERE id = ?)"
|
annoTagSql = "DELETE FROM annotation_tag WHERE annotation_id IN (SELECT id FROM annotation WHERE id = ? AND org_id = ?)"
|
||||||
sql = "DELETE FROM annotation WHERE id = ?"
|
sql = "DELETE FROM annotation WHERE id = ? AND org_id = ?"
|
||||||
queryParams = []interface{}{params.Id}
|
queryParams = []interface{}{params.Id, params.OrgId}
|
||||||
} else {
|
} else {
|
||||||
annoTagSql = "DELETE FROM annotation_tag WHERE annotation_id IN (SELECT id FROM annotation WHERE dashboard_id = ? AND panel_id = ?)"
|
annoTagSql = "DELETE FROM annotation_tag WHERE annotation_id IN (SELECT id FROM annotation WHERE dashboard_id = ? AND panel_id = ? AND org_id = ?)"
|
||||||
sql = "DELETE FROM annotation WHERE dashboard_id = ? AND panel_id = ?"
|
sql = "DELETE FROM annotation WHERE dashboard_id = ? AND panel_id = ? AND org_id = ?"
|
||||||
queryParams = []interface{}{params.DashboardId, params.PanelId}
|
queryParams = []interface{}{params.DashboardId, params.PanelId, params.OrgId}
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err := sess.Exec(annoTagSql, queryParams...); err != nil {
|
if _, err := sess.Exec(annoTagSql, queryParams...); err != nil {
|
||||||
|
@ -268,7 +268,7 @@ func TestAnnotations(t *testing.T) {
|
|||||||
|
|
||||||
annotationId := items[0].Id
|
annotationId := items[0].Id
|
||||||
|
|
||||||
err = repo.Delete(&annotations.DeleteParams{Id: annotationId})
|
err = repo.Delete(&annotations.DeleteParams{Id: annotationId, OrgId: 1})
|
||||||
So(err, ShouldBeNil)
|
So(err, ShouldBeNil)
|
||||||
|
|
||||||
items, err = repo.Find(query)
|
items, err = repo.Find(query)
|
||||||
|
@ -27,18 +27,18 @@ func startSession(ctx context.Context, engine *xorm.Engine, beginTran bool) (*DB
|
|||||||
var sess *DBSession
|
var sess *DBSession
|
||||||
sess, ok := value.(*DBSession)
|
sess, ok := value.(*DBSession)
|
||||||
|
|
||||||
if !ok {
|
if ok {
|
||||||
newSess := &DBSession{Session: engine.NewSession()}
|
return sess, nil
|
||||||
if beginTran {
|
|
||||||
err := newSess.Begin()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return newSess, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return sess, nil
|
newSess := &DBSession{Session: engine.NewSession()}
|
||||||
|
if beginTran {
|
||||||
|
err := newSess.Begin()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return newSess, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func withDbSession(ctx context.Context, callback dbTransactionFunc) error {
|
func withDbSession(ctx context.Context, callback dbTransactionFunc) error {
|
||||||
|
@ -26,7 +26,7 @@ import (
|
|||||||
|
|
||||||
_ "github.com/grafana/grafana/pkg/tsdb/mssql"
|
_ "github.com/grafana/grafana/pkg/tsdb/mssql"
|
||||||
_ "github.com/lib/pq"
|
_ "github.com/lib/pq"
|
||||||
_ "github.com/mattn/go-sqlite3"
|
sqlite3 "github.com/mattn/go-sqlite3"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -56,6 +56,64 @@ type SqlStore struct {
|
|||||||
skipEnsureAdmin bool
|
skipEnsureAdmin bool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// NewSession returns a new DBSession
|
||||||
|
func (ss *SqlStore) NewSession() *DBSession {
|
||||||
|
return &DBSession{Session: ss.engine.NewSession()}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithDbSession calls the callback with an session attached to the context.
|
||||||
|
func (ss *SqlStore) WithDbSession(ctx context.Context, callback dbTransactionFunc) error {
|
||||||
|
sess, err := startSession(ctx, ss.engine, false)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return callback(sess)
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithTransactionalDbSession calls the callback with an session within a transaction
|
||||||
|
func (ss *SqlStore) WithTransactionalDbSession(ctx context.Context, callback dbTransactionFunc) error {
|
||||||
|
return ss.inTransactionWithRetryCtx(ctx, callback, 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ss *SqlStore) inTransactionWithRetryCtx(ctx context.Context, callback dbTransactionFunc, retry int) error {
|
||||||
|
sess, err := startSession(ctx, ss.engine, true)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer sess.Close()
|
||||||
|
|
||||||
|
err = callback(sess)
|
||||||
|
|
||||||
|
// special handling of database locked errors for sqlite, then we can retry 3 times
|
||||||
|
if sqlError, ok := err.(sqlite3.Error); ok && retry < 5 {
|
||||||
|
if sqlError.Code == sqlite3.ErrLocked {
|
||||||
|
sess.Rollback()
|
||||||
|
time.Sleep(time.Millisecond * time.Duration(10))
|
||||||
|
sqlog.Info("Database table locked, sleeping then retrying", "retry", retry)
|
||||||
|
return ss.inTransactionWithRetryCtx(ctx, callback, retry+1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
sess.Rollback()
|
||||||
|
return err
|
||||||
|
} else if err = sess.Commit(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(sess.events) > 0 {
|
||||||
|
for _, e := range sess.events {
|
||||||
|
if err = bus.Publish(e); err != nil {
|
||||||
|
log.Error(3, "Failed to publish event after commit", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (ss *SqlStore) Init() error {
|
func (ss *SqlStore) Init() error {
|
||||||
ss.log = log.New("sqlstore")
|
ss.log = log.New("sqlstore")
|
||||||
ss.readConfig()
|
ss.readConfig()
|
||||||
|
@ -63,7 +63,8 @@ export class SearchResultsCtrl {
|
|||||||
}
|
}
|
||||||
|
|
||||||
onItemClick(item) {
|
onItemClick(item) {
|
||||||
if (this.$location.path().indexOf(item.url) > -1) {
|
//Check if one string can be found in the other
|
||||||
|
if (this.$location.path().indexOf(item.url) > -1 || item.url.indexOf(this.$location.path()) > -1) {
|
||||||
appEvents.emit('hide-dash-search');
|
appEvents.emit('hide-dash-search');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
25
public/app/core/specs/ticks.jest.ts
Normal file
25
public/app/core/specs/ticks.jest.ts
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
import * as ticks from '../utils/ticks';
|
||||||
|
|
||||||
|
describe('ticks', () => {
|
||||||
|
describe('getFlotTickDecimals()', () => {
|
||||||
|
let ctx: any = {};
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
ctx.axis = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should calculate decimals precision based on graph height', () => {
|
||||||
|
let dec = ticks.getFlotTickDecimals(0, 10, ctx.axis, 200);
|
||||||
|
expect(dec.tickDecimals).toBe(1);
|
||||||
|
expect(dec.scaledDecimals).toBe(1);
|
||||||
|
|
||||||
|
dec = ticks.getFlotTickDecimals(0, 100, ctx.axis, 200);
|
||||||
|
expect(dec.tickDecimals).toBe(0);
|
||||||
|
expect(dec.scaledDecimals).toBe(-1);
|
||||||
|
|
||||||
|
dec = ticks.getFlotTickDecimals(0, 1, ctx.axis, 200);
|
||||||
|
expect(dec.tickDecimals).toBe(2);
|
||||||
|
expect(dec.scaledDecimals).toBe(3);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@ -1,4 +1,5 @@
|
|||||||
import TimeSeries from 'app/core/time_series2';
|
import TimeSeries from 'app/core/time_series2';
|
||||||
|
import { updateLegendValues } from 'app/core/time_series2';
|
||||||
|
|
||||||
describe('TimeSeries', function() {
|
describe('TimeSeries', function() {
|
||||||
var points, series;
|
var points, series;
|
||||||
@ -311,4 +312,55 @@ describe('TimeSeries', function() {
|
|||||||
expect(series.formatValue(-Infinity)).toBe('');
|
expect(series.formatValue(-Infinity)).toBe('');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('legend decimals', function() {
|
||||||
|
let series, panel;
|
||||||
|
let height = 200;
|
||||||
|
beforeEach(function() {
|
||||||
|
testData = {
|
||||||
|
alias: 'test',
|
||||||
|
datapoints: [[1, 2], [0, 3], [10, 4], [8, 5]],
|
||||||
|
};
|
||||||
|
series = new TimeSeries(testData);
|
||||||
|
series.getFlotPairs();
|
||||||
|
panel = {
|
||||||
|
decimals: null,
|
||||||
|
yaxes: [
|
||||||
|
{
|
||||||
|
decimals: null,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set decimals based on Y axis (expect calculated decimals = 1)', function() {
|
||||||
|
let data = [series];
|
||||||
|
// Expect ticks with this data will have decimals = 1
|
||||||
|
updateLegendValues(data, panel, height);
|
||||||
|
expect(data[0].decimals).toBe(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set decimals based on Y axis to 0 if calculated decimals = 0)', function() {
|
||||||
|
testData.datapoints = [[10, 2], [0, 3], [100, 4], [80, 5]];
|
||||||
|
series = new TimeSeries(testData);
|
||||||
|
series.getFlotPairs();
|
||||||
|
let data = [series];
|
||||||
|
updateLegendValues(data, panel, height);
|
||||||
|
expect(data[0].decimals).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set decimals to Y axis decimals + 1', function() {
|
||||||
|
panel.yaxes[0].decimals = 2;
|
||||||
|
let data = [series];
|
||||||
|
updateLegendValues(data, panel, height);
|
||||||
|
expect(data[0].decimals).toBe(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set decimals to legend decimals value if it was set explicitly', function() {
|
||||||
|
panel.decimals = 3;
|
||||||
|
let data = [series];
|
||||||
|
updateLegendValues(data, panel, height);
|
||||||
|
expect(data[0].decimals).toBe(3);
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
@ -23,23 +23,27 @@ function translateFillOption(fill) {
|
|||||||
* Calculate decimals for legend and update values for each series.
|
* Calculate decimals for legend and update values for each series.
|
||||||
* @param data series data
|
* @param data series data
|
||||||
* @param panel
|
* @param panel
|
||||||
|
* @param height
|
||||||
*/
|
*/
|
||||||
export function updateLegendValues(data: TimeSeries[], panel) {
|
export function updateLegendValues(data: TimeSeries[], panel, height) {
|
||||||
for (let i = 0; i < data.length; i++) {
|
for (let i = 0; i < data.length; i++) {
|
||||||
let series = data[i];
|
let series = data[i];
|
||||||
let yaxes = panel.yaxes;
|
const yaxes = panel.yaxes;
|
||||||
const seriesYAxis = series.yaxis || 1;
|
const seriesYAxis = series.yaxis || 1;
|
||||||
let axis = yaxes[seriesYAxis - 1];
|
const axis = yaxes[seriesYAxis - 1];
|
||||||
let { tickDecimals, scaledDecimals } = getFlotTickDecimals(data, axis);
|
let formater = kbn.valueFormats[axis.format];
|
||||||
let formater = kbn.valueFormats[panel.yaxes[seriesYAxis - 1].format];
|
|
||||||
|
|
||||||
// decimal override
|
// decimal override
|
||||||
if (_.isNumber(panel.decimals)) {
|
if (_.isNumber(panel.decimals)) {
|
||||||
series.updateLegendValues(formater, panel.decimals, null);
|
series.updateLegendValues(formater, panel.decimals, null);
|
||||||
|
} else if (_.isNumber(axis.decimals)) {
|
||||||
|
series.updateLegendValues(formater, axis.decimals + 1, null);
|
||||||
} else {
|
} else {
|
||||||
// auto decimals
|
// auto decimals
|
||||||
// legend and tooltip gets one more decimal precision
|
// legend and tooltip gets one more decimal precision
|
||||||
// than graph legend ticks
|
// than graph legend ticks
|
||||||
|
const { datamin, datamax } = getDataMinMax(data);
|
||||||
|
let { tickDecimals, scaledDecimals } = getFlotTickDecimals(datamin, datamax, axis, height);
|
||||||
tickDecimals = (tickDecimals || -1) + 1;
|
tickDecimals = (tickDecimals || -1) + 1;
|
||||||
series.updateLegendValues(formater, tickDecimals, scaledDecimals + 2);
|
series.updateLegendValues(formater, tickDecimals, scaledDecimals + 2);
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
import { getDataMinMax } from 'app/core/time_series2';
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Calculate tick step.
|
* Calculate tick step.
|
||||||
* Implementation from d3-array (ticks.js)
|
* Implementation from d3-array (ticks.js)
|
||||||
@ -121,12 +119,10 @@ export function getFlotRange(panelMin, panelMax, datamin, datamax) {
|
|||||||
* Calculate tick decimals.
|
* Calculate tick decimals.
|
||||||
* Implementation from Flot.
|
* Implementation from Flot.
|
||||||
*/
|
*/
|
||||||
export function getFlotTickDecimals(data, axis) {
|
export function getFlotTickDecimals(datamin, datamax, axis, height) {
|
||||||
let { datamin, datamax } = getDataMinMax(data);
|
const { min, max } = getFlotRange(axis.min, axis.max, datamin, datamax);
|
||||||
let { min, max } = getFlotRange(axis.min, axis.max, datamin, datamax);
|
const noTicks = 0.3 * Math.sqrt(height);
|
||||||
let noTicks = 3;
|
const delta = (max - min) / noTicks;
|
||||||
let tickDecimals, maxDec;
|
|
||||||
let delta = (max - min) / noTicks;
|
|
||||||
let dec = -Math.floor(Math.log(delta) / Math.LN10);
|
let dec = -Math.floor(Math.log(delta) / Math.LN10);
|
||||||
|
|
||||||
let magn = Math.pow(10, -dec);
|
let magn = Math.pow(10, -dec);
|
||||||
@ -139,19 +135,17 @@ export function getFlotTickDecimals(data, axis) {
|
|||||||
} else if (norm < 3) {
|
} else if (norm < 3) {
|
||||||
size = 2;
|
size = 2;
|
||||||
// special case for 2.5, requires an extra decimal
|
// special case for 2.5, requires an extra decimal
|
||||||
if (norm > 2.25 && (maxDec == null || dec + 1 <= maxDec)) {
|
if (norm > 2.25) {
|
||||||
size = 2.5;
|
size = 2.5;
|
||||||
++dec;
|
|
||||||
}
|
}
|
||||||
} else if (norm < 7.5) {
|
} else if (norm < 7.5) {
|
||||||
size = 5;
|
size = 5;
|
||||||
} else {
|
} else {
|
||||||
size = 10;
|
size = 10;
|
||||||
}
|
}
|
||||||
|
|
||||||
size *= magn;
|
size *= magn;
|
||||||
|
|
||||||
tickDecimals = Math.max(0, maxDec != null ? maxDec : dec);
|
const tickDecimals = Math.max(0, -Math.floor(Math.log(delta) / Math.LN10) + 1);
|
||||||
// grafana addition
|
// grafana addition
|
||||||
const scaledDecimals = tickDecimals - Math.floor(Math.log(size) / Math.LN10);
|
const scaledDecimals = tickDecimals - Math.floor(Math.log(size) / Math.LN10);
|
||||||
return { tickDecimals, scaledDecimals };
|
return { tickDecimals, scaledDecimals };
|
||||||
|
@ -1,15 +1,18 @@
|
|||||||
import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
|
import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
|
||||||
import '../annotations_srv';
|
import '../annotations_srv';
|
||||||
import helpers from 'test/specs/helpers';
|
import helpers from 'test/specs/helpers';
|
||||||
|
import 'app/features/dashboard/time_srv';
|
||||||
|
|
||||||
describe('AnnotationsSrv', function() {
|
describe('AnnotationsSrv', function() {
|
||||||
var ctx = new helpers.ServiceTestContext();
|
var ctx = new helpers.ServiceTestContext();
|
||||||
|
|
||||||
beforeEach(angularMocks.module('grafana.core'));
|
beforeEach(angularMocks.module('grafana.core'));
|
||||||
beforeEach(angularMocks.module('grafana.services'));
|
beforeEach(angularMocks.module('grafana.services'));
|
||||||
|
beforeEach(ctx.createService('timeSrv'));
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
ctx.createService('annotationsSrv');
|
ctx.createService('annotationsSrv');
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('When translating the query result', () => {
|
describe('When translating the query result', () => {
|
||||||
const annotationSource = {
|
const annotationSource = {
|
||||||
datasource: '-- Grafana --',
|
datasource: '-- Grafana --',
|
||||||
|
@ -22,10 +22,10 @@ export class DashboardModel {
|
|||||||
editable: any;
|
editable: any;
|
||||||
graphTooltip: any;
|
graphTooltip: any;
|
||||||
time: any;
|
time: any;
|
||||||
originalTime: any;
|
private originalTime: any;
|
||||||
timepicker: any;
|
timepicker: any;
|
||||||
templating: any;
|
templating: any;
|
||||||
originalTemplating: any;
|
private originalTemplating: any;
|
||||||
annotations: any;
|
annotations: any;
|
||||||
refresh: any;
|
refresh: any;
|
||||||
snapshot: any;
|
snapshot: any;
|
||||||
@ -50,6 +50,8 @@ export class DashboardModel {
|
|||||||
meta: true,
|
meta: true,
|
||||||
panels: true, // needs special handling
|
panels: true, // needs special handling
|
||||||
templating: true, // needs special handling
|
templating: true, // needs special handling
|
||||||
|
originalTime: true,
|
||||||
|
originalTemplating: true,
|
||||||
};
|
};
|
||||||
|
|
||||||
constructor(data, meta?) {
|
constructor(data, meta?) {
|
||||||
@ -70,12 +72,8 @@ export class DashboardModel {
|
|||||||
this.editable = data.editable !== false;
|
this.editable = data.editable !== false;
|
||||||
this.graphTooltip = data.graphTooltip || 0;
|
this.graphTooltip = data.graphTooltip || 0;
|
||||||
this.time = data.time || { from: 'now-6h', to: 'now' };
|
this.time = data.time || { from: 'now-6h', to: 'now' };
|
||||||
this.originalTime = _.cloneDeep(this.time);
|
|
||||||
this.timepicker = data.timepicker || {};
|
this.timepicker = data.timepicker || {};
|
||||||
this.templating = this.ensureListExist(data.templating);
|
this.templating = this.ensureListExist(data.templating);
|
||||||
this.originalTemplating = _.map(this.templating.list, variable => {
|
|
||||||
return { name: variable.name, current: _.clone(variable.current) };
|
|
||||||
});
|
|
||||||
this.annotations = this.ensureListExist(data.annotations);
|
this.annotations = this.ensureListExist(data.annotations);
|
||||||
this.refresh = data.refresh;
|
this.refresh = data.refresh;
|
||||||
this.snapshot = data.snapshot;
|
this.snapshot = data.snapshot;
|
||||||
@ -85,6 +83,9 @@ export class DashboardModel {
|
|||||||
this.gnetId = data.gnetId || null;
|
this.gnetId = data.gnetId || null;
|
||||||
this.panels = _.map(data.panels || [], panelData => new PanelModel(panelData));
|
this.panels = _.map(data.panels || [], panelData => new PanelModel(panelData));
|
||||||
|
|
||||||
|
this.resetOriginalVariables();
|
||||||
|
this.resetOriginalTime();
|
||||||
|
|
||||||
this.initMeta(meta);
|
this.initMeta(meta);
|
||||||
this.updateSchema(data);
|
this.updateSchema(data);
|
||||||
|
|
||||||
@ -138,8 +139,8 @@ export class DashboardModel {
|
|||||||
// cleans meta data and other non persistent state
|
// cleans meta data and other non persistent state
|
||||||
getSaveModelClone(options?) {
|
getSaveModelClone(options?) {
|
||||||
let defaults = _.defaults(options || {}, {
|
let defaults = _.defaults(options || {}, {
|
||||||
saveVariables: false,
|
saveVariables: true,
|
||||||
saveTimerange: false,
|
saveTimerange: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
// make clone
|
// make clone
|
||||||
@ -153,15 +154,23 @@ export class DashboardModel {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// get variable save models
|
// get variable save models
|
||||||
//console.log(this.templating.list);
|
|
||||||
copy.templating = {
|
copy.templating = {
|
||||||
list: _.map(this.templating.list, variable => (variable.getSaveModel ? variable.getSaveModel() : variable)),
|
list: _.map(this.templating.list, variable => (variable.getSaveModel ? variable.getSaveModel() : variable)),
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!defaults.saveVariables && copy.templating.list.length === this.originalTemplating.length) {
|
if (!defaults.saveVariables) {
|
||||||
for (let i = 0; i < copy.templating.list.length; i++) {
|
for (let i = 0; i < copy.templating.list.length; i++) {
|
||||||
if (copy.templating.list[i].name === this.originalTemplating[i].name) {
|
let current = copy.templating.list[i];
|
||||||
copy.templating.list[i].current = this.originalTemplating[i].current;
|
let original = _.find(this.originalTemplating, { name: current.name, type: current.type });
|
||||||
|
|
||||||
|
if (!original) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (current.type === 'adhoc') {
|
||||||
|
copy.templating.list[i].filters = original.filters;
|
||||||
|
} else {
|
||||||
|
copy.templating.list[i].current = original.current;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -785,4 +794,40 @@ export class DashboardModel {
|
|||||||
let migrator = new DashboardMigrator(this);
|
let migrator = new DashboardMigrator(this);
|
||||||
migrator.updateSchema(old);
|
migrator.updateSchema(old);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
resetOriginalTime() {
|
||||||
|
this.originalTime = _.cloneDeep(this.time);
|
||||||
|
}
|
||||||
|
|
||||||
|
hasTimeChanged() {
|
||||||
|
return !_.isEqual(this.time, this.originalTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
resetOriginalVariables() {
|
||||||
|
this.originalTemplating = _.map(this.templating.list, variable => {
|
||||||
|
return {
|
||||||
|
name: variable.name,
|
||||||
|
type: variable.type,
|
||||||
|
current: _.cloneDeep(variable.current),
|
||||||
|
filters: _.cloneDeep(variable.filters),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
hasVariableValuesChanged() {
|
||||||
|
if (this.templating.list.length !== this.originalTemplating.length) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const updated = _.map(this.templating.list, variable => {
|
||||||
|
return {
|
||||||
|
name: variable.name,
|
||||||
|
type: variable.type,
|
||||||
|
current: _.cloneDeep(variable.current),
|
||||||
|
filters: _.cloneDeep(variable.filters),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
return !_.isEqual(updated, this.originalTemplating);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -63,8 +63,7 @@ export class DashboardExporter {
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
// check up panel data sources
|
const processPanel = panel => {
|
||||||
for (let panel of saveModel.panels) {
|
|
||||||
if (panel.datasource !== undefined) {
|
if (panel.datasource !== undefined) {
|
||||||
templateizeDatasourceUsage(panel);
|
templateizeDatasourceUsage(panel);
|
||||||
}
|
}
|
||||||
@ -86,6 +85,18 @@ export class DashboardExporter {
|
|||||||
version: panelDef.info.version,
|
version: panelDef.info.version,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// check up panel data sources
|
||||||
|
for (let panel of saveModel.panels) {
|
||||||
|
processPanel(panel);
|
||||||
|
|
||||||
|
// handle collapsed rows
|
||||||
|
if (panel.collapsed !== undefined && panel.collapsed === true && panel.panels) {
|
||||||
|
for (let rowPanel of panel.panels) {
|
||||||
|
processPanel(rowPanel);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// templatize template vars
|
// templatize template vars
|
||||||
|
@ -32,11 +32,11 @@ export interface DiffTarget {
|
|||||||
|
|
||||||
export class HistorySrv {
|
export class HistorySrv {
|
||||||
/** @ngInject */
|
/** @ngInject */
|
||||||
constructor(private backendSrv, private $q) {}
|
constructor(private backendSrv) {}
|
||||||
|
|
||||||
getHistoryList(dashboard: DashboardModel, options: HistoryListOpts) {
|
getHistoryList(dashboard: DashboardModel, options: HistoryListOpts) {
|
||||||
const id = dashboard && dashboard.id ? dashboard.id : void 0;
|
const id = dashboard && dashboard.id ? dashboard.id : void 0;
|
||||||
return id ? this.backendSrv.get(`api/dashboards/id/${id}/versions`, options) : this.$q.when([]);
|
return id ? this.backendSrv.get(`api/dashboards/id/${id}/versions`, options) : Promise.resolve([]);
|
||||||
}
|
}
|
||||||
|
|
||||||
calculateDiff(options: CalculateDiffOptions) {
|
calculateDiff(options: CalculateDiffOptions) {
|
||||||
@ -46,7 +46,8 @@ export class HistorySrv {
|
|||||||
restoreDashboard(dashboard: DashboardModel, version: number) {
|
restoreDashboard(dashboard: DashboardModel, version: number) {
|
||||||
const id = dashboard && dashboard.id ? dashboard.id : void 0;
|
const id = dashboard && dashboard.id ? dashboard.id : void 0;
|
||||||
const url = `api/dashboards/id/${id}/restore`;
|
const url = `api/dashboards/id/${id}/restore`;
|
||||||
return id && _.isNumber(version) ? this.backendSrv.post(url, { version }) : this.$q.when({});
|
|
||||||
|
return id && _.isNumber(version) ? this.backendSrv.post(url, { version }) : Promise.resolve({});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
import coreModule from 'app/core/core_module';
|
import coreModule from 'app/core/core_module';
|
||||||
import _ from 'lodash';
|
|
||||||
|
|
||||||
const template = `
|
const template = `
|
||||||
<div class="modal-body">
|
<div class="modal-body">
|
||||||
@ -70,7 +69,6 @@ export class SaveDashboardModalCtrl {
|
|||||||
message: string;
|
message: string;
|
||||||
saveVariables = false;
|
saveVariables = false;
|
||||||
saveTimerange = false;
|
saveTimerange = false;
|
||||||
templating: any;
|
|
||||||
time: any;
|
time: any;
|
||||||
originalTime: any;
|
originalTime: any;
|
||||||
current = [];
|
current = [];
|
||||||
@ -87,40 +85,8 @@ export class SaveDashboardModalCtrl {
|
|||||||
this.message = '';
|
this.message = '';
|
||||||
this.max = 64;
|
this.max = 64;
|
||||||
this.isSaving = false;
|
this.isSaving = false;
|
||||||
this.templating = dashboardSrv.dash.templating.list;
|
this.timeChange = this.dashboardSrv.getCurrent().hasTimeChanged();
|
||||||
|
this.variableValueChange = this.dashboardSrv.getCurrent().hasVariableValuesChanged();
|
||||||
this.compareTemplating();
|
|
||||||
this.compareTime();
|
|
||||||
}
|
|
||||||
|
|
||||||
compareTime() {
|
|
||||||
if (_.isEqual(this.dashboardSrv.dash.time, this.dashboardSrv.dash.originalTime)) {
|
|
||||||
this.timeChange = false;
|
|
||||||
} else {
|
|
||||||
this.timeChange = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
compareTemplating() {
|
|
||||||
//checks if variables has been added or removed, if so variables will be saved automatically
|
|
||||||
if (this.dashboardSrv.dash.originalTemplating.length !== this.dashboardSrv.dash.templating.list.length) {
|
|
||||||
return (this.variableValueChange = false);
|
|
||||||
}
|
|
||||||
|
|
||||||
//checks if variable value has changed
|
|
||||||
if (this.dashboardSrv.dash.templating.list.length > 0) {
|
|
||||||
for (let i = 0; i < this.dashboardSrv.dash.templating.list.length; i++) {
|
|
||||||
if (
|
|
||||||
this.dashboardSrv.dash.templating.list[i].current.text !==
|
|
||||||
this.dashboardSrv.dash.originalTemplating[i].current.text
|
|
||||||
) {
|
|
||||||
return (this.variableValueChange = true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return (this.variableValueChange = false);
|
|
||||||
} else {
|
|
||||||
return (this.variableValueChange = false);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
save() {
|
save() {
|
||||||
@ -139,7 +105,19 @@ export class SaveDashboardModalCtrl {
|
|||||||
|
|
||||||
this.isSaving = true;
|
this.isSaving = true;
|
||||||
|
|
||||||
return this.dashboardSrv.save(saveModel, options).then(this.dismiss);
|
return this.dashboardSrv.save(saveModel, options).then(this.postSave.bind(this, options));
|
||||||
|
}
|
||||||
|
|
||||||
|
postSave(options) {
|
||||||
|
if (options.saveVariables) {
|
||||||
|
this.dashboardSrv.getCurrent().resetOriginalVariables();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.saveTimerange) {
|
||||||
|
this.dashboardSrv.getCurrent().resetOriginalTime();
|
||||||
|
}
|
||||||
|
|
||||||
|
this.dismiss();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -435,8 +435,67 @@ describe('DashboardModel', function() {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('save variables and timeline', () => {
|
describe('Given model with time', () => {
|
||||||
let model;
|
let model: DashboardModel;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
model = new DashboardModel({
|
||||||
|
time: {
|
||||||
|
from: 'now-6h',
|
||||||
|
to: 'now',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
expect(model.hasTimeChanged()).toBeFalsy();
|
||||||
|
model.time = {
|
||||||
|
from: 'now-3h',
|
||||||
|
to: 'now-1h',
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
it('hasTimeChanged should be true', () => {
|
||||||
|
expect(model.hasTimeChanged()).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('getSaveModelClone should return original time when saveTimerange=false', () => {
|
||||||
|
let options = { saveTimerange: false };
|
||||||
|
let saveModel = model.getSaveModelClone(options);
|
||||||
|
|
||||||
|
expect(saveModel.time.from).toBe('now-6h');
|
||||||
|
expect(saveModel.time.to).toBe('now');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('getSaveModelClone should return updated time when saveTimerange=true', () => {
|
||||||
|
let options = { saveTimerange: true };
|
||||||
|
let saveModel = model.getSaveModelClone(options);
|
||||||
|
|
||||||
|
expect(saveModel.time.from).toBe('now-3h');
|
||||||
|
expect(saveModel.time.to).toBe('now-1h');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('hasTimeChanged should be false when reset original time', () => {
|
||||||
|
model.resetOriginalTime();
|
||||||
|
expect(model.hasTimeChanged()).toBeFalsy();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('getSaveModelClone should return original time when saveTimerange=false', () => {
|
||||||
|
let options = { saveTimerange: false };
|
||||||
|
let saveModel = model.getSaveModelClone(options);
|
||||||
|
|
||||||
|
expect(saveModel.time.from).toBe('now-6h');
|
||||||
|
expect(saveModel.time.to).toBe('now');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('getSaveModelClone should return updated time when saveTimerange=true', () => {
|
||||||
|
let options = { saveTimerange: true };
|
||||||
|
let saveModel = model.getSaveModelClone(options);
|
||||||
|
|
||||||
|
expect(saveModel.time.from).toBe('now-3h');
|
||||||
|
expect(saveModel.time.to).toBe('now-1h');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Given model with template variable of type query', () => {
|
||||||
|
let model: DashboardModel;
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
model = new DashboardModel({
|
model = new DashboardModel({
|
||||||
@ -444,6 +503,7 @@ describe('DashboardModel', function() {
|
|||||||
list: [
|
list: [
|
||||||
{
|
{
|
||||||
name: 'Server',
|
name: 'Server',
|
||||||
|
type: 'query',
|
||||||
current: {
|
current: {
|
||||||
selected: true,
|
selected: true,
|
||||||
text: 'server_001',
|
text: 'server_001',
|
||||||
@ -452,45 +512,127 @@ describe('DashboardModel', function() {
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
time: {
|
|
||||||
from: 'now-6h',
|
|
||||||
to: 'now',
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
model.templating.list[0] = {
|
expect(model.hasVariableValuesChanged()).toBeFalsy();
|
||||||
name: 'Server',
|
});
|
||||||
|
|
||||||
|
it('hasVariableValuesChanged should be false when adding a template variable', () => {
|
||||||
|
model.templating.list.push({
|
||||||
|
name: 'Server2',
|
||||||
|
type: 'query',
|
||||||
current: {
|
current: {
|
||||||
selected: true,
|
selected: true,
|
||||||
text: 'server_002',
|
text: 'server_002',
|
||||||
value: 'server_002',
|
value: 'server_002',
|
||||||
},
|
},
|
||||||
};
|
});
|
||||||
model.time = {
|
expect(model.hasVariableValuesChanged()).toBeFalsy();
|
||||||
from: 'now-3h',
|
|
||||||
to: 'now',
|
|
||||||
};
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not save variables and timeline', () => {
|
it('hasVariableValuesChanged should be false when removing existing template variable', () => {
|
||||||
let options = {
|
model.templating.list = [];
|
||||||
saveVariables: false,
|
expect(model.hasVariableValuesChanged()).toBeFalsy();
|
||||||
saveTimerange: false,
|
});
|
||||||
};
|
|
||||||
|
it('hasVariableValuesChanged should be true when changing value of template variable', () => {
|
||||||
|
model.templating.list[0].current.text = 'server_002';
|
||||||
|
expect(model.hasVariableValuesChanged()).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('getSaveModelClone should return original variable when saveVariables=false', () => {
|
||||||
|
model.templating.list[0].current.text = 'server_002';
|
||||||
|
|
||||||
|
let options = { saveVariables: false };
|
||||||
let saveModel = model.getSaveModelClone(options);
|
let saveModel = model.getSaveModelClone(options);
|
||||||
|
|
||||||
expect(saveModel.templating.list[0].current.text).toBe('server_001');
|
expect(saveModel.templating.list[0].current.text).toBe('server_001');
|
||||||
expect(saveModel.time.from).toBe('now-6h');
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should save variables and timeline', () => {
|
it('getSaveModelClone should return updated variable when saveVariables=true', () => {
|
||||||
let options = {
|
model.templating.list[0].current.text = 'server_002';
|
||||||
saveVariables: true,
|
|
||||||
saveTimerange: true,
|
let options = { saveVariables: true };
|
||||||
};
|
|
||||||
let saveModel = model.getSaveModelClone(options);
|
let saveModel = model.getSaveModelClone(options);
|
||||||
|
|
||||||
expect(saveModel.templating.list[0].current.text).toBe('server_002');
|
expect(saveModel.templating.list[0].current.text).toBe('server_002');
|
||||||
expect(saveModel.time.from).toBe('now-3h');
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Given model with template variable of type adhoc', () => {
|
||||||
|
let model: DashboardModel;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
model = new DashboardModel({
|
||||||
|
templating: {
|
||||||
|
list: [
|
||||||
|
{
|
||||||
|
name: 'Filter',
|
||||||
|
type: 'adhoc',
|
||||||
|
filters: [
|
||||||
|
{
|
||||||
|
key: '@hostname',
|
||||||
|
operator: '=',
|
||||||
|
value: 'server 20',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
expect(model.hasVariableValuesChanged()).toBeFalsy();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('hasVariableValuesChanged should be false when adding a template variable', () => {
|
||||||
|
model.templating.list.push({
|
||||||
|
name: 'Filter',
|
||||||
|
type: 'adhoc',
|
||||||
|
filters: [
|
||||||
|
{
|
||||||
|
key: '@hostname',
|
||||||
|
operator: '=',
|
||||||
|
value: 'server 1',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
expect(model.hasVariableValuesChanged()).toBeFalsy();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('hasVariableValuesChanged should be false when removing existing template variable', () => {
|
||||||
|
model.templating.list = [];
|
||||||
|
expect(model.hasVariableValuesChanged()).toBeFalsy();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('hasVariableValuesChanged should be true when changing value of filter', () => {
|
||||||
|
model.templating.list[0].filters[0].value = 'server 1';
|
||||||
|
expect(model.hasVariableValuesChanged()).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('hasVariableValuesChanged should be true when adding an additional condition', () => {
|
||||||
|
model.templating.list[0].filters[0].condition = 'AND';
|
||||||
|
model.templating.list[0].filters[1] = {
|
||||||
|
key: '@metric',
|
||||||
|
operator: '=',
|
||||||
|
value: 'logins.count',
|
||||||
|
};
|
||||||
|
expect(model.hasVariableValuesChanged()).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('getSaveModelClone should return original variable when saveVariables=false', () => {
|
||||||
|
model.templating.list[0].filters[0].value = 'server 1';
|
||||||
|
|
||||||
|
let options = { saveVariables: false };
|
||||||
|
let saveModel = model.getSaveModelClone(options);
|
||||||
|
|
||||||
|
expect(saveModel.templating.list[0].filters[0].value).toBe('server 20');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('getSaveModelClone should return updated variable when saveVariables=true', () => {
|
||||||
|
model.templating.list[0].filters[0].value = 'server 1';
|
||||||
|
|
||||||
|
let options = { saveVariables: true };
|
||||||
|
let saveModel = model.getSaveModelClone(options);
|
||||||
|
|
||||||
|
expect(saveModel.templating.list[0].filters[0].value).toBe('server 1');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -62,6 +62,27 @@ describe('given dashboard with repeated panels', () => {
|
|||||||
type: 'graph',
|
type: 'graph',
|
||||||
},
|
},
|
||||||
{ id: 3, repeat: null, repeatPanelId: 2 },
|
{ id: 3, repeat: null, repeatPanelId: 2 },
|
||||||
|
{
|
||||||
|
id: 4,
|
||||||
|
collapsed: true,
|
||||||
|
panels: [
|
||||||
|
{ id: 10, datasource: 'gfdb', type: 'table' },
|
||||||
|
{ id: 11 },
|
||||||
|
{
|
||||||
|
id: 12,
|
||||||
|
datasource: '-- Mixed --',
|
||||||
|
targets: [{ datasource: 'other' }],
|
||||||
|
},
|
||||||
|
{ id: 13, datasource: '$ds' },
|
||||||
|
{
|
||||||
|
id: 14,
|
||||||
|
repeat: 'apps',
|
||||||
|
datasource: 'gfdb',
|
||||||
|
type: 'heatmap',
|
||||||
|
},
|
||||||
|
{ id: 15, repeat: null, repeatPanelId: 14 },
|
||||||
|
],
|
||||||
|
},
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -78,6 +99,18 @@ describe('given dashboard with repeated panels', () => {
|
|||||||
info: { version: '1.1.0' },
|
info: { version: '1.1.0' },
|
||||||
};
|
};
|
||||||
|
|
||||||
|
config.panels['table'] = {
|
||||||
|
id: 'table',
|
||||||
|
name: 'Table',
|
||||||
|
info: { version: '1.1.1' },
|
||||||
|
};
|
||||||
|
|
||||||
|
config.panels['heatmap'] = {
|
||||||
|
id: 'heatmap',
|
||||||
|
name: 'Heatmap',
|
||||||
|
info: { version: '1.1.2' },
|
||||||
|
};
|
||||||
|
|
||||||
dash = new DashboardModel(dash, {});
|
dash = new DashboardModel(dash, {});
|
||||||
var exporter = new DashboardExporter(datasourceSrvStub);
|
var exporter = new DashboardExporter(datasourceSrvStub);
|
||||||
exporter.makeExportable(dash).then(clean => {
|
exporter.makeExportable(dash).then(clean => {
|
||||||
@ -91,6 +124,11 @@ describe('given dashboard with repeated panels', () => {
|
|||||||
expect(panel.datasource).toBe('${DS_GFDB}');
|
expect(panel.datasource).toBe('${DS_GFDB}');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should replace datasource refs in collapsed row', () => {
|
||||||
|
var panel = exported.panels[5].panels[0];
|
||||||
|
expect(panel.datasource).toBe('${DS_GFDB}');
|
||||||
|
});
|
||||||
|
|
||||||
it('should replace datasource in variable query', () => {
|
it('should replace datasource in variable query', () => {
|
||||||
expect(exported.templating.list[0].datasource).toBe('${DS_GFDB}');
|
expect(exported.templating.list[0].datasource).toBe('${DS_GFDB}');
|
||||||
expect(exported.templating.list[0].options.length).toBe(0);
|
expect(exported.templating.list[0].options.length).toBe(0);
|
||||||
@ -126,13 +164,27 @@ describe('given dashboard with repeated panels', () => {
|
|||||||
expect(require).not.toBe(undefined);
|
expect(require).not.toBe(undefined);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should add panel to required', () => {
|
it('should add graph panel to required', () => {
|
||||||
var require = _.find(exported.__requires, { name: 'Graph' });
|
var require = _.find(exported.__requires, { name: 'Graph' });
|
||||||
expect(require.name).toBe('Graph');
|
expect(require.name).toBe('Graph');
|
||||||
expect(require.id).toBe('graph');
|
expect(require.id).toBe('graph');
|
||||||
expect(require.version).toBe('1.1.0');
|
expect(require.version).toBe('1.1.0');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should add table panel to required', () => {
|
||||||
|
var require = _.find(exported.__requires, { name: 'Table' });
|
||||||
|
expect(require.name).toBe('Table');
|
||||||
|
expect(require.id).toBe('table');
|
||||||
|
expect(require.version).toBe('1.1.1');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should add heatmap panel to required', () => {
|
||||||
|
var require = _.find(exported.__requires, { name: 'Heatmap' });
|
||||||
|
expect(require.name).toBe('Heatmap');
|
||||||
|
expect(require.id).toBe('heatmap');
|
||||||
|
expect(require.version).toBe('1.1.2');
|
||||||
|
});
|
||||||
|
|
||||||
it('should add grafana version', () => {
|
it('should add grafana version', () => {
|
||||||
var require = _.find(exported.__requires, { name: 'Grafana' });
|
var require = _.find(exported.__requires, { name: 'Grafana' });
|
||||||
expect(require.type).toBe('grafana');
|
expect(require.type).toBe('grafana');
|
||||||
|
313
public/app/features/dashboard/specs/history_ctrl.jest.ts
Normal file
313
public/app/features/dashboard/specs/history_ctrl.jest.ts
Normal file
@ -0,0 +1,313 @@
|
|||||||
|
import _ from 'lodash';
|
||||||
|
import { HistoryListCtrl } from 'app/features/dashboard/history/history';
|
||||||
|
import { versions, compare, restore } from './history_mocks';
|
||||||
|
import $q from 'q';
|
||||||
|
|
||||||
|
describe('HistoryListCtrl', () => {
|
||||||
|
const RESTORE_ID = 4;
|
||||||
|
|
||||||
|
const versionsResponse: any = versions();
|
||||||
|
|
||||||
|
restore(7, RESTORE_ID);
|
||||||
|
|
||||||
|
let historySrv;
|
||||||
|
let $rootScope;
|
||||||
|
let historyListCtrl;
|
||||||
|
beforeEach(() => {
|
||||||
|
historySrv = {
|
||||||
|
calculateDiff: jest.fn(),
|
||||||
|
restoreDashboard: jest.fn(() => $q.when({})),
|
||||||
|
};
|
||||||
|
$rootScope = {
|
||||||
|
appEvent: jest.fn(),
|
||||||
|
onAppEvent: jest.fn(),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('when the history list component is loaded', () => {
|
||||||
|
let deferred;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
deferred = $q.defer({});
|
||||||
|
historySrv.getHistoryList = jest.fn(() => deferred.promise);
|
||||||
|
|
||||||
|
historyListCtrl = new HistoryListCtrl({}, $rootScope, {}, $q, historySrv, {});
|
||||||
|
|
||||||
|
historyListCtrl.dashboard = {
|
||||||
|
id: 2,
|
||||||
|
version: 3,
|
||||||
|
formatDate: jest.fn(() => 'date'),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should immediately attempt to fetch the history list', () => {
|
||||||
|
expect(historySrv.getHistoryList).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('and the history list is successfully fetched', () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
deferred.resolve(versionsResponse);
|
||||||
|
await historyListCtrl.getLog();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reset the controller's state", async () => {
|
||||||
|
expect(historyListCtrl.mode).toBe('list');
|
||||||
|
expect(historyListCtrl.delta).toEqual({ basic: '', json: '' });
|
||||||
|
|
||||||
|
expect(historyListCtrl.canCompare).toBe(false);
|
||||||
|
expect(_.find(historyListCtrl.revisions, rev => rev.checked)).toBe(undefined);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should indicate loading has finished', () => {
|
||||||
|
expect(historyListCtrl.loading).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should store the revisions sorted desc by version id', () => {
|
||||||
|
expect(historyListCtrl.revisions[0].version).toBe(4);
|
||||||
|
expect(historyListCtrl.revisions[1].version).toBe(3);
|
||||||
|
expect(historyListCtrl.revisions[2].version).toBe(2);
|
||||||
|
expect(historyListCtrl.revisions[3].version).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should add a checked property to each revision', () => {
|
||||||
|
let actual = _.filter(historyListCtrl.revisions, rev => rev.hasOwnProperty('checked'));
|
||||||
|
expect(actual.length).toBe(4);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set all checked properties to false on reset', () => {
|
||||||
|
historyListCtrl.revisions[0].checked = true;
|
||||||
|
historyListCtrl.revisions[2].checked = true;
|
||||||
|
historyListCtrl.reset();
|
||||||
|
let actual = _.filter(historyListCtrl.revisions, rev => !rev.checked);
|
||||||
|
expect(actual.length).toBe(4);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('and fetching the history list fails', () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
deferred = $q.defer();
|
||||||
|
|
||||||
|
historySrv.getHistoryList = jest.fn(() => deferred.promise);
|
||||||
|
|
||||||
|
historyListCtrl = new HistoryListCtrl({}, $rootScope, {}, $q, historySrv, {});
|
||||||
|
|
||||||
|
deferred.reject(new Error('HistoryListError'));
|
||||||
|
|
||||||
|
await historyListCtrl.getLog();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reset the controller's state", () => {
|
||||||
|
expect(historyListCtrl.mode).toBe('list');
|
||||||
|
expect(historyListCtrl.delta).toEqual({ basic: '', json: '' });
|
||||||
|
expect(_.find(historyListCtrl.revisions, rev => rev.checked)).toBe(undefined);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should indicate loading has finished', () => {
|
||||||
|
expect(historyListCtrl.loading).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should have an empty revisions list', () => {
|
||||||
|
expect(historyListCtrl.revisions).toEqual([]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('should update the history list when the dashboard is saved', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
historyListCtrl.dashboard = { version: 3 };
|
||||||
|
historyListCtrl.resetFromSource = jest.fn();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should listen for the `dashboard-saved` appEvent', () => {
|
||||||
|
expect($rootScope.onAppEvent).toHaveBeenCalledTimes(1);
|
||||||
|
expect($rootScope.onAppEvent.mock.calls[0][0]).toBe('dashboard-saved');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should call `onDashboardSaved` when the appEvent is received', () => {
|
||||||
|
expect($rootScope.onAppEvent.mock.calls[0][1]).not.toBe(historyListCtrl.onDashboardSaved);
|
||||||
|
expect($rootScope.onAppEvent.mock.calls[0][1].toString).toBe(historyListCtrl.onDashboardSaved.toString);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('when the user wants to compare two revisions', () => {
|
||||||
|
let deferred;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
deferred = $q.defer({});
|
||||||
|
historySrv.getHistoryList = jest.fn(() => $q.when(versionsResponse));
|
||||||
|
historySrv.calculateDiff = jest.fn(() => deferred.promise);
|
||||||
|
|
||||||
|
historyListCtrl = new HistoryListCtrl({}, $rootScope, {}, $q, historySrv, {});
|
||||||
|
|
||||||
|
historyListCtrl.dashboard = {
|
||||||
|
id: 2,
|
||||||
|
version: 3,
|
||||||
|
formatDate: jest.fn(() => 'date'),
|
||||||
|
};
|
||||||
|
|
||||||
|
deferred.resolve(versionsResponse);
|
||||||
|
await historyListCtrl.getLog();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should have already fetched the history list', () => {
|
||||||
|
expect(historySrv.getHistoryList).toHaveBeenCalled();
|
||||||
|
expect(historyListCtrl.revisions.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should check that two valid versions are selected', () => {
|
||||||
|
// []
|
||||||
|
expect(historyListCtrl.canCompare).toBe(false);
|
||||||
|
|
||||||
|
// single value
|
||||||
|
historyListCtrl.revisions = [{ checked: true }];
|
||||||
|
historyListCtrl.revisionSelectionChanged();
|
||||||
|
expect(historyListCtrl.canCompare).toBe(false);
|
||||||
|
|
||||||
|
// both values in range
|
||||||
|
historyListCtrl.revisions = [{ checked: true }, { checked: true }];
|
||||||
|
historyListCtrl.revisionSelectionChanged();
|
||||||
|
expect(historyListCtrl.canCompare).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('and the basic diff is successfully fetched', () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
deferred = $q.defer({});
|
||||||
|
historySrv.calculateDiff = jest.fn(() => deferred.promise);
|
||||||
|
deferred.resolve(compare('basic'));
|
||||||
|
historyListCtrl.revisions[1].checked = true;
|
||||||
|
historyListCtrl.revisions[3].checked = true;
|
||||||
|
await historyListCtrl.getDiff('basic');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should fetch the basic diff if two valid versions are selected', () => {
|
||||||
|
expect(historySrv.calculateDiff).toHaveBeenCalledTimes(1);
|
||||||
|
expect(historyListCtrl.delta.basic).toBe('<div></div>');
|
||||||
|
expect(historyListCtrl.delta.json).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set the basic diff view as active', () => {
|
||||||
|
expect(historyListCtrl.mode).toBe('compare');
|
||||||
|
expect(historyListCtrl.diff).toBe('basic');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should indicate loading has finished', () => {
|
||||||
|
expect(historyListCtrl.loading).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('and the json diff is successfully fetched', () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
deferred = $q.defer({});
|
||||||
|
historySrv.calculateDiff = jest.fn(() => deferred.promise);
|
||||||
|
deferred.resolve(compare('json'));
|
||||||
|
historyListCtrl.revisions[1].checked = true;
|
||||||
|
historyListCtrl.revisions[3].checked = true;
|
||||||
|
await historyListCtrl.getDiff('json');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should fetch the json diff if two valid versions are selected', () => {
|
||||||
|
expect(historySrv.calculateDiff).toHaveBeenCalledTimes(1);
|
||||||
|
expect(historyListCtrl.delta.basic).toBe('');
|
||||||
|
expect(historyListCtrl.delta.json).toBe('<pre><code></code></pre>');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set the json diff view as active', () => {
|
||||||
|
expect(historyListCtrl.mode).toBe('compare');
|
||||||
|
expect(historyListCtrl.diff).toBe('json');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should indicate loading has finished', () => {
|
||||||
|
expect(historyListCtrl.loading).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('and diffs have already been fetched', () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
deferred.resolve(compare('basic'));
|
||||||
|
|
||||||
|
historyListCtrl.revisions[3].checked = true;
|
||||||
|
historyListCtrl.revisions[1].checked = true;
|
||||||
|
historyListCtrl.delta.basic = 'cached basic';
|
||||||
|
historyListCtrl.getDiff('basic');
|
||||||
|
await historySrv.calculateDiff();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should use the cached diffs instead of fetching', () => {
|
||||||
|
expect(historySrv.calculateDiff).toHaveBeenCalledTimes(1);
|
||||||
|
expect(historyListCtrl.delta.basic).toBe('cached basic');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should indicate loading has finished', () => {
|
||||||
|
expect(historyListCtrl.loading).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('and fetching the diff fails', () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
deferred = $q.defer({});
|
||||||
|
historySrv.calculateDiff = jest.fn(() => deferred.promise);
|
||||||
|
|
||||||
|
historyListCtrl.revisions[3].checked = true;
|
||||||
|
historyListCtrl.revisions[1].checked = true;
|
||||||
|
deferred.reject();
|
||||||
|
await historyListCtrl.getDiff('basic');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should fetch the diff if two valid versions are selected', () => {
|
||||||
|
expect(historySrv.calculateDiff).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return to the history list view', () => {
|
||||||
|
expect(historyListCtrl.mode).toBe('list');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should indicate loading has finished', () => {
|
||||||
|
expect(historyListCtrl.loading).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should have an empty delta/changeset', () => {
|
||||||
|
expect(historyListCtrl.delta).toEqual({ basic: '', json: '' });
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('when the user wants to restore a revision', () => {
|
||||||
|
let deferred;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
deferred = $q.defer();
|
||||||
|
historySrv.getHistoryList = jest.fn(() => $q.when(versionsResponse));
|
||||||
|
historySrv.restoreDashboard = jest.fn(() => deferred.promise);
|
||||||
|
|
||||||
|
historyListCtrl = new HistoryListCtrl({}, $rootScope, {}, $q, historySrv, {});
|
||||||
|
|
||||||
|
historyListCtrl.dashboard = {
|
||||||
|
id: 1,
|
||||||
|
};
|
||||||
|
historyListCtrl.restore();
|
||||||
|
deferred.resolve(versionsResponse);
|
||||||
|
await historyListCtrl.getLog();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should display a modal allowing the user to restore or cancel', () => {
|
||||||
|
expect($rootScope.appEvent).toHaveBeenCalledTimes(1);
|
||||||
|
expect($rootScope.appEvent.mock.calls[0][0]).toBe('confirm-modal');
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('and restore fails to fetch', () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
deferred = $q.defer();
|
||||||
|
historySrv.getHistoryList = jest.fn(() => $q.when(versionsResponse));
|
||||||
|
historySrv.restoreDashboard = jest.fn(() => deferred.promise);
|
||||||
|
historyListCtrl = new HistoryListCtrl({}, $rootScope, {}, $q, historySrv, {});
|
||||||
|
deferred.reject(new Error('RestoreError'));
|
||||||
|
historyListCtrl.restoreConfirm(RESTORE_ID);
|
||||||
|
await historyListCtrl.getLog();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should indicate loading has finished', () => {
|
||||||
|
expect(historyListCtrl.loading).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@ -1,329 +0,0 @@
|
|||||||
import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common';
|
|
||||||
|
|
||||||
import _ from 'lodash';
|
|
||||||
import { HistoryListCtrl } from 'app/features/dashboard/history/history';
|
|
||||||
import { versions, compare, restore } from './history_mocks';
|
|
||||||
|
|
||||||
describe('HistoryListCtrl', function() {
|
|
||||||
var RESTORE_ID = 4;
|
|
||||||
|
|
||||||
var ctx: any = {};
|
|
||||||
var versionsResponse: any = versions();
|
|
||||||
|
|
||||||
restore(7, RESTORE_ID);
|
|
||||||
|
|
||||||
beforeEach(angularMocks.module('grafana.core'));
|
|
||||||
beforeEach(angularMocks.module('grafana.services'));
|
|
||||||
beforeEach(
|
|
||||||
angularMocks.inject($rootScope => {
|
|
||||||
ctx.scope = $rootScope.$new();
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
var historySrv;
|
|
||||||
var $rootScope;
|
|
||||||
beforeEach(function() {
|
|
||||||
historySrv = {
|
|
||||||
getHistoryList: sinon.stub(),
|
|
||||||
calculateDiff: sinon.stub(),
|
|
||||||
restoreDashboard: sinon.stub(),
|
|
||||||
};
|
|
||||||
$rootScope = {
|
|
||||||
appEvent: sinon.spy(),
|
|
||||||
onAppEvent: sinon.spy(),
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('when the history list component is loaded', function() {
|
|
||||||
var deferred;
|
|
||||||
|
|
||||||
beforeEach(
|
|
||||||
angularMocks.inject(($controller, $q) => {
|
|
||||||
deferred = $q.defer();
|
|
||||||
historySrv.getHistoryList.returns(deferred.promise);
|
|
||||||
ctx.ctrl = $controller(
|
|
||||||
HistoryListCtrl,
|
|
||||||
{
|
|
||||||
historySrv,
|
|
||||||
$rootScope,
|
|
||||||
$scope: ctx.scope,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
dashboard: {
|
|
||||||
id: 2,
|
|
||||||
version: 3,
|
|
||||||
formatDate: sinon.stub().returns('date'),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
);
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
it('should immediately attempt to fetch the history list', function() {
|
|
||||||
expect(historySrv.getHistoryList.calledOnce).to.be(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('and the history list is successfully fetched', function() {
|
|
||||||
beforeEach(function() {
|
|
||||||
deferred.resolve(versionsResponse);
|
|
||||||
ctx.ctrl.$scope.$apply();
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should reset the controller's state", function() {
|
|
||||||
expect(ctx.ctrl.mode).to.be('list');
|
|
||||||
expect(ctx.ctrl.delta).to.eql({ basic: '', json: '' });
|
|
||||||
expect(ctx.ctrl.canCompare).to.be(false);
|
|
||||||
expect(_.find(ctx.ctrl.revisions, rev => rev.checked)).to.be(undefined);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should indicate loading has finished', function() {
|
|
||||||
expect(ctx.ctrl.loading).to.be(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should store the revisions sorted desc by version id', function() {
|
|
||||||
expect(ctx.ctrl.revisions[0].version).to.be(4);
|
|
||||||
expect(ctx.ctrl.revisions[1].version).to.be(3);
|
|
||||||
expect(ctx.ctrl.revisions[2].version).to.be(2);
|
|
||||||
expect(ctx.ctrl.revisions[3].version).to.be(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should add a checked property to each revision', function() {
|
|
||||||
var actual = _.filter(ctx.ctrl.revisions, rev => rev.hasOwnProperty('checked'));
|
|
||||||
expect(actual.length).to.be(4);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should set all checked properties to false on reset', function() {
|
|
||||||
ctx.ctrl.revisions[0].checked = true;
|
|
||||||
ctx.ctrl.revisions[2].checked = true;
|
|
||||||
ctx.ctrl.reset();
|
|
||||||
var actual = _.filter(ctx.ctrl.revisions, rev => !rev.checked);
|
|
||||||
expect(actual.length).to.be(4);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('and fetching the history list fails', function() {
|
|
||||||
beforeEach(function() {
|
|
||||||
deferred.reject(new Error('HistoryListError'));
|
|
||||||
ctx.ctrl.$scope.$apply();
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should reset the controller's state", function() {
|
|
||||||
expect(ctx.ctrl.mode).to.be('list');
|
|
||||||
expect(ctx.ctrl.delta).to.eql({ basic: '', json: '' });
|
|
||||||
expect(_.find(ctx.ctrl.revisions, rev => rev.checked)).to.be(undefined);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should indicate loading has finished', function() {
|
|
||||||
expect(ctx.ctrl.loading).to.be(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should have an empty revisions list', function() {
|
|
||||||
expect(ctx.ctrl.revisions).to.eql([]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('should update the history list when the dashboard is saved', function() {
|
|
||||||
beforeEach(function() {
|
|
||||||
ctx.ctrl.dashboard = { version: 3 };
|
|
||||||
ctx.ctrl.resetFromSource = sinon.spy();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should listen for the `dashboard-saved` appEvent', function() {
|
|
||||||
expect($rootScope.onAppEvent.calledOnce).to.be(true);
|
|
||||||
expect($rootScope.onAppEvent.getCall(0).args[0]).to.be('dashboard-saved');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should call `onDashboardSaved` when the appEvent is received', function() {
|
|
||||||
expect($rootScope.onAppEvent.getCall(0).args[1]).to.not.be(ctx.ctrl.onDashboardSaved);
|
|
||||||
expect($rootScope.onAppEvent.getCall(0).args[1].toString).to.be(ctx.ctrl.onDashboardSaved.toString);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('when the user wants to compare two revisions', function() {
|
|
||||||
var deferred;
|
|
||||||
|
|
||||||
beforeEach(
|
|
||||||
angularMocks.inject(($controller, $q) => {
|
|
||||||
deferred = $q.defer();
|
|
||||||
historySrv.getHistoryList.returns($q.when(versionsResponse));
|
|
||||||
historySrv.calculateDiff.returns(deferred.promise);
|
|
||||||
ctx.ctrl = $controller(
|
|
||||||
HistoryListCtrl,
|
|
||||||
{
|
|
||||||
historySrv,
|
|
||||||
$rootScope,
|
|
||||||
$scope: ctx.scope,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
dashboard: {
|
|
||||||
id: 2,
|
|
||||||
version: 3,
|
|
||||||
formatDate: sinon.stub().returns('date'),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
ctx.ctrl.$scope.onDashboardSaved = sinon.spy();
|
|
||||||
ctx.ctrl.$scope.$apply();
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
it('should have already fetched the history list', function() {
|
|
||||||
expect(historySrv.getHistoryList.calledOnce).to.be(true);
|
|
||||||
expect(ctx.ctrl.revisions.length).to.be.above(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should check that two valid versions are selected', function() {
|
|
||||||
// []
|
|
||||||
expect(ctx.ctrl.canCompare).to.be(false);
|
|
||||||
|
|
||||||
// single value
|
|
||||||
ctx.ctrl.revisions = [{ checked: true }];
|
|
||||||
ctx.ctrl.revisionSelectionChanged();
|
|
||||||
expect(ctx.ctrl.canCompare).to.be(false);
|
|
||||||
|
|
||||||
// both values in range
|
|
||||||
ctx.ctrl.revisions = [{ checked: true }, { checked: true }];
|
|
||||||
ctx.ctrl.revisionSelectionChanged();
|
|
||||||
expect(ctx.ctrl.canCompare).to.be(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('and the basic diff is successfully fetched', function() {
|
|
||||||
beforeEach(function() {
|
|
||||||
deferred.resolve(compare('basic'));
|
|
||||||
ctx.ctrl.revisions[1].checked = true;
|
|
||||||
ctx.ctrl.revisions[3].checked = true;
|
|
||||||
ctx.ctrl.getDiff('basic');
|
|
||||||
ctx.ctrl.$scope.$apply();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should fetch the basic diff if two valid versions are selected', function() {
|
|
||||||
expect(historySrv.calculateDiff.calledOnce).to.be(true);
|
|
||||||
expect(ctx.ctrl.delta.basic).to.be('<div></div>');
|
|
||||||
expect(ctx.ctrl.delta.json).to.be('');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should set the basic diff view as active', function() {
|
|
||||||
expect(ctx.ctrl.mode).to.be('compare');
|
|
||||||
expect(ctx.ctrl.diff).to.be('basic');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should indicate loading has finished', function() {
|
|
||||||
expect(ctx.ctrl.loading).to.be(false);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('and the json diff is successfully fetched', function() {
|
|
||||||
beforeEach(function() {
|
|
||||||
deferred.resolve(compare('json'));
|
|
||||||
ctx.ctrl.revisions[1].checked = true;
|
|
||||||
ctx.ctrl.revisions[3].checked = true;
|
|
||||||
ctx.ctrl.getDiff('json');
|
|
||||||
ctx.ctrl.$scope.$apply();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should fetch the json diff if two valid versions are selected', function() {
|
|
||||||
expect(historySrv.calculateDiff.calledOnce).to.be(true);
|
|
||||||
expect(ctx.ctrl.delta.basic).to.be('');
|
|
||||||
expect(ctx.ctrl.delta.json).to.be('<pre><code></code></pre>');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should set the json diff view as active', function() {
|
|
||||||
expect(ctx.ctrl.mode).to.be('compare');
|
|
||||||
expect(ctx.ctrl.diff).to.be('json');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should indicate loading has finished', function() {
|
|
||||||
expect(ctx.ctrl.loading).to.be(false);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('and diffs have already been fetched', function() {
|
|
||||||
beforeEach(function() {
|
|
||||||
deferred.resolve(compare('basic'));
|
|
||||||
ctx.ctrl.revisions[3].checked = true;
|
|
||||||
ctx.ctrl.revisions[1].checked = true;
|
|
||||||
ctx.ctrl.delta.basic = 'cached basic';
|
|
||||||
ctx.ctrl.getDiff('basic');
|
|
||||||
ctx.ctrl.$scope.$apply();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should use the cached diffs instead of fetching', function() {
|
|
||||||
expect(historySrv.calculateDiff.calledOnce).to.be(false);
|
|
||||||
expect(ctx.ctrl.delta.basic).to.be('cached basic');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should indicate loading has finished', function() {
|
|
||||||
expect(ctx.ctrl.loading).to.be(false);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('and fetching the diff fails', function() {
|
|
||||||
beforeEach(function() {
|
|
||||||
deferred.reject(new Error('DiffError'));
|
|
||||||
ctx.ctrl.revisions[3].checked = true;
|
|
||||||
ctx.ctrl.revisions[1].checked = true;
|
|
||||||
ctx.ctrl.getDiff('basic');
|
|
||||||
ctx.ctrl.$scope.$apply();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should fetch the diff if two valid versions are selected', function() {
|
|
||||||
expect(historySrv.calculateDiff.calledOnce).to.be(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return to the history list view', function() {
|
|
||||||
expect(ctx.ctrl.mode).to.be('list');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should indicate loading has finished', function() {
|
|
||||||
expect(ctx.ctrl.loading).to.be(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should have an empty delta/changeset', function() {
|
|
||||||
expect(ctx.ctrl.delta).to.eql({ basic: '', json: '' });
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('when the user wants to restore a revision', function() {
|
|
||||||
var deferred;
|
|
||||||
|
|
||||||
beforeEach(
|
|
||||||
angularMocks.inject(($controller, $q) => {
|
|
||||||
deferred = $q.defer();
|
|
||||||
historySrv.getHistoryList.returns($q.when(versionsResponse));
|
|
||||||
historySrv.restoreDashboard.returns(deferred.promise);
|
|
||||||
ctx.ctrl = $controller(HistoryListCtrl, {
|
|
||||||
historySrv,
|
|
||||||
contextSrv: { user: { name: 'Carlos' } },
|
|
||||||
$rootScope,
|
|
||||||
$scope: ctx.scope,
|
|
||||||
});
|
|
||||||
ctx.ctrl.dashboard = { id: 1 };
|
|
||||||
ctx.ctrl.restore();
|
|
||||||
ctx.ctrl.$scope.$apply();
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
it('should display a modal allowing the user to restore or cancel', function() {
|
|
||||||
expect($rootScope.appEvent.calledOnce).to.be(true);
|
|
||||||
expect($rootScope.appEvent.calledWith('confirm-modal')).to.be(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('and restore fails to fetch', function() {
|
|
||||||
beforeEach(function() {
|
|
||||||
deferred.reject(new Error('RestoreError'));
|
|
||||||
ctx.ctrl.restoreConfirm(RESTORE_ID);
|
|
||||||
try {
|
|
||||||
// this throws error, due to promise rejection
|
|
||||||
ctx.ctrl.$scope.$apply();
|
|
||||||
} catch (e) {}
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should indicate loading has finished', function() {
|
|
||||||
expect(ctx.ctrl.loading).to.be(false);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
61
public/app/features/dashboard/specs/history_srv.jest.ts
Normal file
61
public/app/features/dashboard/specs/history_srv.jest.ts
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
import '../history/history_srv';
|
||||||
|
import { versions, restore } from './history_mocks';
|
||||||
|
import { HistorySrv } from '../history/history_srv';
|
||||||
|
import { DashboardModel } from '../dashboard_model';
|
||||||
|
jest.mock('app/core/store');
|
||||||
|
|
||||||
|
describe('historySrv', function() {
|
||||||
|
const versionsResponse = versions();
|
||||||
|
const restoreResponse = restore;
|
||||||
|
|
||||||
|
let backendSrv = {
|
||||||
|
get: jest.fn(() => Promise.resolve({})),
|
||||||
|
post: jest.fn(() => Promise.resolve({})),
|
||||||
|
};
|
||||||
|
|
||||||
|
let historySrv = new HistorySrv(backendSrv);
|
||||||
|
|
||||||
|
const dash = new DashboardModel({ id: 1 });
|
||||||
|
const emptyDash = new DashboardModel({});
|
||||||
|
const historyListOpts = { limit: 10, start: 0 };
|
||||||
|
|
||||||
|
describe('getHistoryList', function() {
|
||||||
|
it('should return a versions array for the given dashboard id', function() {
|
||||||
|
backendSrv.get = jest.fn(() => Promise.resolve(versionsResponse));
|
||||||
|
historySrv = new HistorySrv(backendSrv);
|
||||||
|
|
||||||
|
return historySrv.getHistoryList(dash, historyListOpts).then(function(versions) {
|
||||||
|
expect(versions).toEqual(versionsResponse);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return an empty array when not given an id', function() {
|
||||||
|
return historySrv.getHistoryList(emptyDash, historyListOpts).then(function(versions) {
|
||||||
|
expect(versions).toEqual([]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return an empty array when not given a dashboard', function() {
|
||||||
|
return historySrv.getHistoryList(null, historyListOpts).then(function(versions) {
|
||||||
|
expect(versions).toEqual([]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('restoreDashboard', () => {
|
||||||
|
it('should return a success response given valid parameters', function() {
|
||||||
|
let version = 6;
|
||||||
|
backendSrv.post = jest.fn(() => Promise.resolve(restoreResponse(version)));
|
||||||
|
historySrv = new HistorySrv(backendSrv);
|
||||||
|
return historySrv.restoreDashboard(dash, version).then(function(response) {
|
||||||
|
expect(response).toEqual(restoreResponse(version));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return an empty object when not given an id', async () => {
|
||||||
|
historySrv = new HistorySrv(backendSrv);
|
||||||
|
let rsp = await historySrv.restoreDashboard(emptyDash, 6);
|
||||||
|
expect(rsp).toEqual({});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@ -1,86 +0,0 @@
|
|||||||
import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
|
|
||||||
|
|
||||||
import helpers from 'test/specs/helpers';
|
|
||||||
import '../history/history_srv';
|
|
||||||
import { versions, restore } from './history_mocks';
|
|
||||||
|
|
||||||
describe('historySrv', function() {
|
|
||||||
var ctx = new helpers.ServiceTestContext();
|
|
||||||
|
|
||||||
var versionsResponse = versions();
|
|
||||||
var restoreResponse = restore;
|
|
||||||
|
|
||||||
beforeEach(angularMocks.module('grafana.core'));
|
|
||||||
beforeEach(angularMocks.module('grafana.services'));
|
|
||||||
beforeEach(
|
|
||||||
angularMocks.inject(function($httpBackend) {
|
|
||||||
ctx.$httpBackend = $httpBackend;
|
|
||||||
$httpBackend.whenRoute('GET', 'api/dashboards/id/:id/versions').respond(versionsResponse);
|
|
||||||
$httpBackend
|
|
||||||
.whenRoute('POST', 'api/dashboards/id/:id/restore')
|
|
||||||
.respond(function(method, url, data, headers, params) {
|
|
||||||
const parsedData = JSON.parse(data);
|
|
||||||
return [200, restoreResponse(parsedData.version)];
|
|
||||||
});
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
beforeEach(ctx.createService('historySrv'));
|
|
||||||
|
|
||||||
function wrapPromise(ctx, angularPromise) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
angularPromise.then(resolve, reject);
|
|
||||||
ctx.$httpBackend.flush();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('getHistoryList', function() {
|
|
||||||
it('should return a versions array for the given dashboard id', function() {
|
|
||||||
return wrapPromise(
|
|
||||||
ctx,
|
|
||||||
ctx.service.getHistoryList({ id: 1 }).then(function(versions) {
|
|
||||||
expect(versions).to.eql(versionsResponse);
|
|
||||||
})
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return an empty array when not given an id', function() {
|
|
||||||
return wrapPromise(
|
|
||||||
ctx,
|
|
||||||
ctx.service.getHistoryList({}).then(function(versions) {
|
|
||||||
expect(versions).to.eql([]);
|
|
||||||
})
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return an empty array when not given a dashboard', function() {
|
|
||||||
return wrapPromise(
|
|
||||||
ctx,
|
|
||||||
ctx.service.getHistoryList().then(function(versions) {
|
|
||||||
expect(versions).to.eql([]);
|
|
||||||
})
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('restoreDashboard', function() {
|
|
||||||
it('should return a success response given valid parameters', function() {
|
|
||||||
let version = 6;
|
|
||||||
return wrapPromise(
|
|
||||||
ctx,
|
|
||||||
ctx.service.restoreDashboard({ id: 1 }, version).then(function(response) {
|
|
||||||
expect(response).to.eql(restoreResponse(version));
|
|
||||||
})
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return an empty object when not given an id', function() {
|
|
||||||
return wrapPromise(
|
|
||||||
ctx,
|
|
||||||
ctx.service.restoreDashboard({}, 6).then(function(response) {
|
|
||||||
expect(response).to.eql({});
|
|
||||||
})
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
@ -1,128 +1,57 @@
|
|||||||
import { SaveDashboardModalCtrl } from '../save_modal';
|
import { SaveDashboardModalCtrl } from '../save_modal';
|
||||||
|
|
||||||
jest.mock('app/core/services/context_srv', () => ({}));
|
const setup = (timeChanged, variableValuesChanged, cb) => {
|
||||||
|
const dash = {
|
||||||
|
hasTimeChanged: jest.fn().mockReturnValue(timeChanged),
|
||||||
|
hasVariableValuesChanged: jest.fn().mockReturnValue(variableValuesChanged),
|
||||||
|
resetOriginalTime: jest.fn(),
|
||||||
|
resetOriginalVariables: jest.fn(),
|
||||||
|
getSaveModelClone: jest.fn().mockReturnValue({}),
|
||||||
|
};
|
||||||
|
const dashboardSrvMock = {
|
||||||
|
getCurrent: jest.fn().mockReturnValue(dash),
|
||||||
|
save: jest.fn().mockReturnValue(Promise.resolve()),
|
||||||
|
};
|
||||||
|
const ctrl = new SaveDashboardModalCtrl(dashboardSrvMock);
|
||||||
|
ctrl.saveForm = {
|
||||||
|
$valid: true,
|
||||||
|
};
|
||||||
|
ctrl.dismiss = () => Promise.resolve();
|
||||||
|
cb(dash, ctrl, dashboardSrvMock);
|
||||||
|
};
|
||||||
|
|
||||||
describe('SaveDashboardModal', () => {
|
describe('SaveDashboardModal', () => {
|
||||||
describe('save modal checkboxes', () => {
|
describe('Given time and template variable values have not changed', () => {
|
||||||
it('should show checkboxes', () => {
|
setup(false, false, (dash, ctrl: SaveDashboardModalCtrl) => {
|
||||||
let fakeDashboardSrv = {
|
it('When creating ctrl should set time and template variable values changed', () => {
|
||||||
dash: {
|
expect(ctrl.timeChange).toBeFalsy();
|
||||||
templating: {
|
expect(ctrl.variableValueChange).toBeFalsy();
|
||||||
list: [
|
});
|
||||||
{
|
|
||||||
current: {
|
|
||||||
selected: true,
|
|
||||||
tags: Array(0),
|
|
||||||
text: 'server_001',
|
|
||||||
value: 'server_001',
|
|
||||||
},
|
|
||||||
name: 'Server',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
originalTemplating: [
|
|
||||||
{
|
|
||||||
current: {
|
|
||||||
selected: true,
|
|
||||||
text: 'server_002',
|
|
||||||
value: 'server_002',
|
|
||||||
},
|
|
||||||
name: 'Server',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
time: {
|
|
||||||
from: 'now-3h',
|
|
||||||
to: 'now',
|
|
||||||
},
|
|
||||||
originalTime: {
|
|
||||||
from: 'now-6h',
|
|
||||||
to: 'now',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
let modal = new SaveDashboardModalCtrl(fakeDashboardSrv);
|
|
||||||
|
|
||||||
expect(modal.timeChange).toBe(true);
|
|
||||||
expect(modal.variableValueChange).toBe(true);
|
|
||||||
});
|
});
|
||||||
|
});
|
||||||
|
|
||||||
it('should hide checkboxes', () => {
|
describe('Given time and template variable values have changed', () => {
|
||||||
let fakeDashboardSrv = {
|
setup(true, true, (dash, ctrl: SaveDashboardModalCtrl) => {
|
||||||
dash: {
|
it('When creating ctrl should set time and template variable values changed', () => {
|
||||||
templating: {
|
expect(ctrl.timeChange).toBeTruthy();
|
||||||
list: [
|
expect(ctrl.variableValueChange).toBeTruthy();
|
||||||
{
|
});
|
||||||
current: {
|
|
||||||
selected: true,
|
|
||||||
text: 'server_002',
|
|
||||||
value: 'server_002',
|
|
||||||
},
|
|
||||||
name: 'Server',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
originalTemplating: [
|
|
||||||
{
|
|
||||||
current: {
|
|
||||||
selected: true,
|
|
||||||
text: 'server_002',
|
|
||||||
value: 'server_002',
|
|
||||||
},
|
|
||||||
name: 'Server',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
time: {
|
|
||||||
from: 'now-3h',
|
|
||||||
to: 'now',
|
|
||||||
},
|
|
||||||
originalTime: {
|
|
||||||
from: 'now-3h',
|
|
||||||
to: 'now',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
let modal = new SaveDashboardModalCtrl(fakeDashboardSrv);
|
|
||||||
expect(modal.timeChange).toBe(false);
|
|
||||||
expect(modal.variableValueChange).toBe(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should hide variable checkboxes', () => {
|
it('When save time and variable value changes disabled and saving should reset original time and template variable values', async () => {
|
||||||
let fakeDashboardSrv = {
|
ctrl.saveTimerange = false;
|
||||||
dash: {
|
ctrl.saveVariables = false;
|
||||||
templating: {
|
await ctrl.save();
|
||||||
list: [
|
expect(dash.resetOriginalTime).toHaveBeenCalledTimes(0);
|
||||||
{
|
expect(dash.resetOriginalVariables).toHaveBeenCalledTimes(0);
|
||||||
current: {
|
});
|
||||||
selected: true,
|
|
||||||
text: 'server_002',
|
it('When save time and variable value changes enabled and saving should reset original time and template variable values', async () => {
|
||||||
value: 'server_002',
|
ctrl.saveTimerange = true;
|
||||||
},
|
ctrl.saveVariables = true;
|
||||||
name: 'Server',
|
await ctrl.save();
|
||||||
},
|
expect(dash.resetOriginalTime).toHaveBeenCalledTimes(1);
|
||||||
{
|
expect(dash.resetOriginalVariables).toHaveBeenCalledTimes(1);
|
||||||
current: {
|
});
|
||||||
selected: true,
|
|
||||||
text: 'web_002',
|
|
||||||
value: 'web_002',
|
|
||||||
},
|
|
||||||
name: 'Web',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
originalTemplating: [
|
|
||||||
{
|
|
||||||
current: {
|
|
||||||
selected: true,
|
|
||||||
text: 'server_002',
|
|
||||||
value: 'server_002',
|
|
||||||
},
|
|
||||||
name: 'Server',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
let modal = new SaveDashboardModalCtrl(fakeDashboardSrv);
|
|
||||||
expect(modal.variableValueChange).toBe(false);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
163
public/app/features/dashboard/specs/time_srv.jest.ts
Normal file
163
public/app/features/dashboard/specs/time_srv.jest.ts
Normal file
@ -0,0 +1,163 @@
|
|||||||
|
import { TimeSrv } from '../time_srv';
|
||||||
|
import '../time_srv';
|
||||||
|
import moment from 'moment';
|
||||||
|
|
||||||
|
describe('timeSrv', function() {
|
||||||
|
var rootScope = {
|
||||||
|
$on: jest.fn(),
|
||||||
|
onAppEvent: jest.fn(),
|
||||||
|
appEvent: jest.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
var timer = {
|
||||||
|
register: jest.fn(),
|
||||||
|
cancel: jest.fn(),
|
||||||
|
cancelAll: jest.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
var location = {
|
||||||
|
search: jest.fn(() => ({})),
|
||||||
|
};
|
||||||
|
|
||||||
|
var timeSrv;
|
||||||
|
|
||||||
|
var _dashboard: any = {
|
||||||
|
time: { from: 'now-6h', to: 'now' },
|
||||||
|
getTimezone: jest.fn(() => 'browser'),
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(function() {
|
||||||
|
timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() });
|
||||||
|
timeSrv.init(_dashboard);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('timeRange', function() {
|
||||||
|
it('should return unparsed when parse is false', function() {
|
||||||
|
timeSrv.setTime({ from: 'now', to: 'now-1h' });
|
||||||
|
var time = timeSrv.timeRange();
|
||||||
|
expect(time.raw.from).toBe('now');
|
||||||
|
expect(time.raw.to).toBe('now-1h');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return parsed when parse is true', function() {
|
||||||
|
timeSrv.setTime({ from: 'now', to: 'now-1h' });
|
||||||
|
var time = timeSrv.timeRange();
|
||||||
|
expect(moment.isMoment(time.from)).toBe(true);
|
||||||
|
expect(moment.isMoment(time.to)).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('init time from url', function() {
|
||||||
|
it('should handle relative times', function() {
|
||||||
|
location = {
|
||||||
|
search: jest.fn(() => ({
|
||||||
|
from: 'now-2d',
|
||||||
|
to: 'now',
|
||||||
|
})),
|
||||||
|
};
|
||||||
|
|
||||||
|
timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() });
|
||||||
|
timeSrv.init(_dashboard);
|
||||||
|
var time = timeSrv.timeRange();
|
||||||
|
expect(time.raw.from).toBe('now-2d');
|
||||||
|
expect(time.raw.to).toBe('now');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle formatted dates', function() {
|
||||||
|
location = {
|
||||||
|
search: jest.fn(() => ({
|
||||||
|
from: '20140410T052010',
|
||||||
|
to: '20140520T031022',
|
||||||
|
})),
|
||||||
|
};
|
||||||
|
|
||||||
|
timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() });
|
||||||
|
|
||||||
|
timeSrv.init(_dashboard);
|
||||||
|
var time = timeSrv.timeRange();
|
||||||
|
expect(time.from.valueOf()).toEqual(new Date('2014-04-10T05:20:10Z').getTime());
|
||||||
|
expect(time.to.valueOf()).toEqual(new Date('2014-05-20T03:10:22Z').getTime());
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle formatted dates without time', function() {
|
||||||
|
location = {
|
||||||
|
search: jest.fn(() => ({
|
||||||
|
from: '20140410',
|
||||||
|
to: '20140520',
|
||||||
|
})),
|
||||||
|
};
|
||||||
|
|
||||||
|
timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() });
|
||||||
|
|
||||||
|
timeSrv.init(_dashboard);
|
||||||
|
var time = timeSrv.timeRange();
|
||||||
|
expect(time.from.valueOf()).toEqual(new Date('2014-04-10T00:00:00Z').getTime());
|
||||||
|
expect(time.to.valueOf()).toEqual(new Date('2014-05-20T00:00:00Z').getTime());
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle epochs', function() {
|
||||||
|
location = {
|
||||||
|
search: jest.fn(() => ({
|
||||||
|
from: '1410337646373',
|
||||||
|
to: '1410337665699',
|
||||||
|
})),
|
||||||
|
};
|
||||||
|
|
||||||
|
timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() });
|
||||||
|
|
||||||
|
timeSrv.init(_dashboard);
|
||||||
|
var time = timeSrv.timeRange();
|
||||||
|
expect(time.from.valueOf()).toEqual(1410337646373);
|
||||||
|
expect(time.to.valueOf()).toEqual(1410337665699);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle bad dates', function() {
|
||||||
|
location = {
|
||||||
|
search: jest.fn(() => ({
|
||||||
|
from: '20151126T00010%3C%2Fp%3E%3Cspan%20class',
|
||||||
|
to: 'now',
|
||||||
|
})),
|
||||||
|
};
|
||||||
|
|
||||||
|
timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() });
|
||||||
|
|
||||||
|
_dashboard.time.from = 'now-6h';
|
||||||
|
timeSrv.init(_dashboard);
|
||||||
|
expect(timeSrv.time.from).toEqual('now-6h');
|
||||||
|
expect(timeSrv.time.to).toEqual('now');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('setTime', function() {
|
||||||
|
it('should return disable refresh if refresh is disabled for any range', function() {
|
||||||
|
_dashboard.refresh = false;
|
||||||
|
|
||||||
|
timeSrv.setTime({ from: '2011-01-01', to: '2015-01-01' });
|
||||||
|
expect(_dashboard.refresh).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should restore refresh for absolute time range', function() {
|
||||||
|
_dashboard.refresh = '30s';
|
||||||
|
|
||||||
|
timeSrv.setTime({ from: '2011-01-01', to: '2015-01-01' });
|
||||||
|
expect(_dashboard.refresh).toBe('30s');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should restore refresh after relative time range is set', function() {
|
||||||
|
_dashboard.refresh = '10s';
|
||||||
|
timeSrv.setTime({
|
||||||
|
from: moment([2011, 1, 1]),
|
||||||
|
to: moment([2015, 1, 1]),
|
||||||
|
});
|
||||||
|
expect(_dashboard.refresh).toBe(false);
|
||||||
|
timeSrv.setTime({ from: '2011-01-01', to: 'now' });
|
||||||
|
expect(_dashboard.refresh).toBe('10s');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should keep refresh after relative time range is changed and now delay exists', function() {
|
||||||
|
_dashboard.refresh = '10s';
|
||||||
|
timeSrv.setTime({ from: 'now-1h', to: 'now-10s' });
|
||||||
|
expect(_dashboard.refresh).toBe('10s');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@ -1,115 +0,0 @@
|
|||||||
import { describe, beforeEach, it, expect, sinon, angularMocks } from 'test/lib/common';
|
|
||||||
|
|
||||||
import helpers from 'test/specs/helpers';
|
|
||||||
import '../time_srv';
|
|
||||||
import moment from 'moment';
|
|
||||||
|
|
||||||
describe('timeSrv', function() {
|
|
||||||
var ctx = new helpers.ServiceTestContext();
|
|
||||||
var _dashboard: any = {
|
|
||||||
time: { from: 'now-6h', to: 'now' },
|
|
||||||
getTimezone: sinon.stub().returns('browser'),
|
|
||||||
};
|
|
||||||
|
|
||||||
beforeEach(angularMocks.module('grafana.core'));
|
|
||||||
beforeEach(angularMocks.module('grafana.services'));
|
|
||||||
beforeEach(ctx.createService('timeSrv'));
|
|
||||||
|
|
||||||
beforeEach(function() {
|
|
||||||
ctx.service.init(_dashboard);
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('timeRange', function() {
|
|
||||||
it('should return unparsed when parse is false', function() {
|
|
||||||
ctx.service.setTime({ from: 'now', to: 'now-1h' });
|
|
||||||
var time = ctx.service.timeRange();
|
|
||||||
expect(time.raw.from).to.be('now');
|
|
||||||
expect(time.raw.to).to.be('now-1h');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return parsed when parse is true', function() {
|
|
||||||
ctx.service.setTime({ from: 'now', to: 'now-1h' });
|
|
||||||
var time = ctx.service.timeRange();
|
|
||||||
expect(moment.isMoment(time.from)).to.be(true);
|
|
||||||
expect(moment.isMoment(time.to)).to.be(true);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('init time from url', function() {
|
|
||||||
it('should handle relative times', function() {
|
|
||||||
ctx.$location.search({ from: 'now-2d', to: 'now' });
|
|
||||||
ctx.service.init(_dashboard);
|
|
||||||
var time = ctx.service.timeRange();
|
|
||||||
expect(time.raw.from).to.be('now-2d');
|
|
||||||
expect(time.raw.to).to.be('now');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle formatted dates', function() {
|
|
||||||
ctx.$location.search({ from: '20140410T052010', to: '20140520T031022' });
|
|
||||||
ctx.service.init(_dashboard);
|
|
||||||
var time = ctx.service.timeRange(true);
|
|
||||||
expect(time.from.valueOf()).to.equal(new Date('2014-04-10T05:20:10Z').getTime());
|
|
||||||
expect(time.to.valueOf()).to.equal(new Date('2014-05-20T03:10:22Z').getTime());
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle formatted dates without time', function() {
|
|
||||||
ctx.$location.search({ from: '20140410', to: '20140520' });
|
|
||||||
ctx.service.init(_dashboard);
|
|
||||||
var time = ctx.service.timeRange(true);
|
|
||||||
expect(time.from.valueOf()).to.equal(new Date('2014-04-10T00:00:00Z').getTime());
|
|
||||||
expect(time.to.valueOf()).to.equal(new Date('2014-05-20T00:00:00Z').getTime());
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle epochs', function() {
|
|
||||||
ctx.$location.search({ from: '1410337646373', to: '1410337665699' });
|
|
||||||
ctx.service.init(_dashboard);
|
|
||||||
var time = ctx.service.timeRange(true);
|
|
||||||
expect(time.from.valueOf()).to.equal(1410337646373);
|
|
||||||
expect(time.to.valueOf()).to.equal(1410337665699);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle bad dates', function() {
|
|
||||||
ctx.$location.search({
|
|
||||||
from: '20151126T00010%3C%2Fp%3E%3Cspan%20class',
|
|
||||||
to: 'now',
|
|
||||||
});
|
|
||||||
_dashboard.time.from = 'now-6h';
|
|
||||||
ctx.service.init(_dashboard);
|
|
||||||
expect(ctx.service.time.from).to.equal('now-6h');
|
|
||||||
expect(ctx.service.time.to).to.equal('now');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('setTime', function() {
|
|
||||||
it('should return disable refresh if refresh is disabled for any range', function() {
|
|
||||||
_dashboard.refresh = false;
|
|
||||||
|
|
||||||
ctx.service.setTime({ from: '2011-01-01', to: '2015-01-01' });
|
|
||||||
expect(_dashboard.refresh).to.be(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should restore refresh for absolute time range', function() {
|
|
||||||
_dashboard.refresh = '30s';
|
|
||||||
|
|
||||||
ctx.service.setTime({ from: '2011-01-01', to: '2015-01-01' });
|
|
||||||
expect(_dashboard.refresh).to.be('30s');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should restore refresh after relative time range is set', function() {
|
|
||||||
_dashboard.refresh = '10s';
|
|
||||||
ctx.service.setTime({
|
|
||||||
from: moment([2011, 1, 1]),
|
|
||||||
to: moment([2015, 1, 1]),
|
|
||||||
});
|
|
||||||
expect(_dashboard.refresh).to.be(false);
|
|
||||||
ctx.service.setTime({ from: '2011-01-01', to: 'now' });
|
|
||||||
expect(_dashboard.refresh).to.be('10s');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should keep refresh after relative time range is changed and now delay exists', function() {
|
|
||||||
_dashboard.refresh = '10s';
|
|
||||||
ctx.service.setTime({ from: 'now-1h', to: 'now-10s' });
|
|
||||||
expect(_dashboard.refresh).to.be('10s');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
@ -4,7 +4,7 @@ import coreModule from 'app/core/core_module';
|
|||||||
import kbn from 'app/core/utils/kbn';
|
import kbn from 'app/core/utils/kbn';
|
||||||
import * as dateMath from 'app/core/utils/datemath';
|
import * as dateMath from 'app/core/utils/datemath';
|
||||||
|
|
||||||
class TimeSrv {
|
export class TimeSrv {
|
||||||
time: any;
|
time: any;
|
||||||
refreshTimer: any;
|
refreshTimer: any;
|
||||||
refresh: boolean;
|
refresh: boolean;
|
||||||
|
@ -48,9 +48,11 @@ function dashLink($compile, $sanitize, linkSrv) {
|
|||||||
function update() {
|
function update() {
|
||||||
var linkInfo = linkSrv.getAnchorInfo(link);
|
var linkInfo = linkSrv.getAnchorInfo(link);
|
||||||
span.text(linkInfo.title);
|
span.text(linkInfo.title);
|
||||||
anchor.attr('href', linkInfo.href);
|
if (!link.asDropdown) {
|
||||||
sanitizeAnchor();
|
anchor.attr('href', linkInfo.href);
|
||||||
|
sanitizeAnchor();
|
||||||
|
}
|
||||||
|
elem.find('a').attr('data-placement', 'bottom');
|
||||||
// tooltip
|
// tooltip
|
||||||
elem.find('a').tooltip({
|
elem.find('a').tooltip({
|
||||||
title: $sanitize(scope.link.tooltip),
|
title: $sanitize(scope.link.tooltip),
|
||||||
|
42
public/app/features/org/specs/team_details_ctrl.jest.ts
Normal file
42
public/app/features/org/specs/team_details_ctrl.jest.ts
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
import '../team_details_ctrl';
|
||||||
|
import TeamDetailsCtrl from '../team_details_ctrl';
|
||||||
|
|
||||||
|
describe('TeamDetailsCtrl', () => {
|
||||||
|
var backendSrv = {
|
||||||
|
searchUsers: jest.fn(() => Promise.resolve([])),
|
||||||
|
get: jest.fn(() => Promise.resolve([])),
|
||||||
|
post: jest.fn(() => Promise.resolve([])),
|
||||||
|
};
|
||||||
|
|
||||||
|
//Team id
|
||||||
|
var routeParams = {
|
||||||
|
id: 1,
|
||||||
|
};
|
||||||
|
|
||||||
|
var navModelSrv = {
|
||||||
|
getNav: jest.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
var teamDetailsCtrl = new TeamDetailsCtrl({ $broadcast: jest.fn() }, backendSrv, routeParams, navModelSrv);
|
||||||
|
|
||||||
|
describe('when user is chosen to be added to team', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
teamDetailsCtrl = new TeamDetailsCtrl({ $broadcast: jest.fn() }, backendSrv, routeParams, navModelSrv);
|
||||||
|
const userItem = {
|
||||||
|
id: 2,
|
||||||
|
login: 'user2',
|
||||||
|
};
|
||||||
|
teamDetailsCtrl.userPicked(userItem);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse the result and save to db', () => {
|
||||||
|
expect(backendSrv.post.mock.calls[0][0]).toBe('/api/teams/1/members');
|
||||||
|
expect(backendSrv.post.mock.calls[0][1].userId).toBe(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should refresh the list after saving.', () => {
|
||||||
|
expect(backendSrv.get.mock.calls[0][0]).toBe('/api/teams/1');
|
||||||
|
expect(backendSrv.get.mock.calls[1][0]).toBe('/api/teams/1/members');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@ -1,48 +0,0 @@
|
|||||||
import '../team_details_ctrl';
|
|
||||||
import { describe, beforeEach, it, expect, sinon, angularMocks } from 'test/lib/common';
|
|
||||||
import TeamDetailsCtrl from '../team_details_ctrl';
|
|
||||||
|
|
||||||
describe('TeamDetailsCtrl', () => {
|
|
||||||
var ctx: any = {};
|
|
||||||
var backendSrv = {
|
|
||||||
searchUsers: sinon.stub().returns(Promise.resolve([])),
|
|
||||||
get: sinon.stub().returns(Promise.resolve([])),
|
|
||||||
post: sinon.stub().returns(Promise.resolve([])),
|
|
||||||
};
|
|
||||||
|
|
||||||
beforeEach(angularMocks.module('grafana.core'));
|
|
||||||
beforeEach(angularMocks.module('grafana.controllers'));
|
|
||||||
|
|
||||||
beforeEach(
|
|
||||||
angularMocks.inject(($rootScope, $controller, $q) => {
|
|
||||||
ctx.$q = $q;
|
|
||||||
ctx.scope = $rootScope.$new();
|
|
||||||
ctx.ctrl = $controller(TeamDetailsCtrl, {
|
|
||||||
$scope: ctx.scope,
|
|
||||||
backendSrv: backendSrv,
|
|
||||||
$routeParams: { id: 1 },
|
|
||||||
navModelSrv: { getNav: sinon.stub() },
|
|
||||||
});
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
describe('when user is chosen to be added to team', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
const userItem = {
|
|
||||||
id: 2,
|
|
||||||
login: 'user2',
|
|
||||||
};
|
|
||||||
ctx.ctrl.userPicked(userItem);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should parse the result and save to db', () => {
|
|
||||||
expect(backendSrv.post.getCall(0).args[0]).to.eql('/api/teams/1/members');
|
|
||||||
expect(backendSrv.post.getCall(0).args[1].userId).to.eql(2);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should refresh the list after saving.', () => {
|
|
||||||
expect(backendSrv.get.getCall(0).args[0]).to.eql('/api/teams/1');
|
|
||||||
expect(backendSrv.get.getCall(1).args[0]).to.eql('/api/teams/1/members');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
@ -7,7 +7,7 @@ export class DatasourceSrv {
|
|||||||
datasources: any;
|
datasources: any;
|
||||||
|
|
||||||
/** @ngInject */
|
/** @ngInject */
|
||||||
constructor(private $q, private $injector, $rootScope, private templateSrv) {
|
constructor(private $q, private $injector, private $rootScope, private templateSrv) {
|
||||||
this.init();
|
this.init();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -61,7 +61,7 @@ export class DatasourceSrv {
|
|||||||
this.datasources[name] = instance;
|
this.datasources[name] = instance;
|
||||||
deferred.resolve(instance);
|
deferred.resolve(instance);
|
||||||
})
|
})
|
||||||
.catch(function(err) {
|
.catch(err => {
|
||||||
this.$rootScope.appEvent('alert-error', [dsConfig.name + ' plugin failed', err.toString()]);
|
this.$rootScope.appEvent('alert-error', [dsConfig.name + ' plugin failed', err.toString()]);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -13,6 +13,7 @@ var defaults = {
|
|||||||
access: 'proxy',
|
access: 'proxy',
|
||||||
jsonData: {},
|
jsonData: {},
|
||||||
secureJsonFields: {},
|
secureJsonFields: {},
|
||||||
|
secureJsonData: {},
|
||||||
};
|
};
|
||||||
|
|
||||||
var datasourceCreated = false;
|
var datasourceCreated = false;
|
||||||
|
@ -179,4 +179,38 @@ describe('VariableSrv init', function() {
|
|||||||
expect(variable.options[2].selected).to.be(false);
|
expect(variable.options[2].selected).to.be(false);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describeInitScenario('when template variable is present in url multiple times using key/values', scenario => {
|
||||||
|
scenario.setup(() => {
|
||||||
|
scenario.variables = [
|
||||||
|
{
|
||||||
|
name: 'apps',
|
||||||
|
type: 'query',
|
||||||
|
multi: true,
|
||||||
|
current: { text: 'Val1', value: 'val1' },
|
||||||
|
options: [
|
||||||
|
{ text: 'Val1', value: 'val1' },
|
||||||
|
{ text: 'Val2', value: 'val2' },
|
||||||
|
{ text: 'Val3', value: 'val3', selected: true },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
scenario.urlParams['var-apps'] = ['val2', 'val1'];
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should update current value', function() {
|
||||||
|
var variable = ctx.variableSrv.variables[0];
|
||||||
|
expect(variable.current.value.length).to.be(2);
|
||||||
|
expect(variable.current.value[0]).to.be('val2');
|
||||||
|
expect(variable.current.value[1]).to.be('val1');
|
||||||
|
expect(variable.current.text).to.be('Val2 + Val1');
|
||||||
|
expect(variable.options[0].selected).to.be(true);
|
||||||
|
expect(variable.options[1].selected).to.be(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set options that are not in value to selected false', function() {
|
||||||
|
var variable = ctx.variableSrv.variables[0];
|
||||||
|
expect(variable.options[2].selected).to.be(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
@ -209,7 +209,24 @@ export class VariableSrv {
|
|||||||
return op.text === urlValue || op.value === urlValue;
|
return op.text === urlValue || op.value === urlValue;
|
||||||
});
|
});
|
||||||
|
|
||||||
option = option || { text: urlValue, value: urlValue };
|
let defaultText = urlValue;
|
||||||
|
let defaultValue = urlValue;
|
||||||
|
|
||||||
|
if (!option && _.isArray(urlValue)) {
|
||||||
|
defaultText = [];
|
||||||
|
|
||||||
|
for (let n = 0; n < urlValue.length; n++) {
|
||||||
|
let t = _.find(variable.options, op => {
|
||||||
|
return op.value === urlValue[n];
|
||||||
|
});
|
||||||
|
|
||||||
|
if (t) {
|
||||||
|
defaultText.push(t.text);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
option = option || { text: defaultText, value: defaultValue };
|
||||||
return variable.setValue(option);
|
return variable.setValue(option);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -374,23 +374,33 @@ export default class CloudWatchDatasource {
|
|||||||
getExpandedVariables(target, dimensionKey, variable, templateSrv) {
|
getExpandedVariables(target, dimensionKey, variable, templateSrv) {
|
||||||
/* if the all checkbox is marked we should add all values to the targets */
|
/* if the all checkbox is marked we should add all values to the targets */
|
||||||
var allSelected = _.find(variable.options, { selected: true, text: 'All' });
|
var allSelected = _.find(variable.options, { selected: true, text: 'All' });
|
||||||
return _.chain(variable.options)
|
var selectedVariables = _.filter(variable.options, v => {
|
||||||
.filter(v => {
|
if (allSelected) {
|
||||||
if (allSelected) {
|
return v.text !== 'All';
|
||||||
return v.text !== 'All';
|
} else {
|
||||||
} else {
|
return v.selected;
|
||||||
return v.selected;
|
}
|
||||||
}
|
});
|
||||||
})
|
var currentVariables = !_.isArray(variable.current.value)
|
||||||
.map(v => {
|
? [variable.current]
|
||||||
var t = angular.copy(target);
|
: variable.current.value.map(v => {
|
||||||
var scopedVar = {};
|
return {
|
||||||
scopedVar[variable.name] = v;
|
text: v,
|
||||||
t.refId = target.refId + '_' + v.value;
|
value: v,
|
||||||
t.dimensions[dimensionKey] = templateSrv.replace(t.dimensions[dimensionKey], scopedVar);
|
};
|
||||||
return t;
|
});
|
||||||
})
|
let useSelectedVariables =
|
||||||
.value();
|
selectedVariables.some(s => {
|
||||||
|
return s.value === currentVariables[0].value;
|
||||||
|
}) || currentVariables[0].value === '$__all';
|
||||||
|
return (useSelectedVariables ? selectedVariables : currentVariables).map(v => {
|
||||||
|
var t = angular.copy(target);
|
||||||
|
var scopedVar = {};
|
||||||
|
scopedVar[variable.name] = v;
|
||||||
|
t.refId = target.refId + '_' + v.value;
|
||||||
|
t.dimensions[dimensionKey] = templateSrv.replace(t.dimensions[dimensionKey], scopedVar);
|
||||||
|
return t;
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
expandTemplateVariable(targets, scopedVars, templateSrv) {
|
expandTemplateVariable(targets, scopedVars, templateSrv) {
|
||||||
|
@ -2,6 +2,7 @@ import '../datasource';
|
|||||||
import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
|
import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
|
||||||
import helpers from 'test/specs/helpers';
|
import helpers from 'test/specs/helpers';
|
||||||
import CloudWatchDatasource from '../datasource';
|
import CloudWatchDatasource from '../datasource';
|
||||||
|
import 'app/features/dashboard/time_srv';
|
||||||
|
|
||||||
describe('CloudWatchDatasource', function() {
|
describe('CloudWatchDatasource', function() {
|
||||||
var ctx = new helpers.ServiceTestContext();
|
var ctx = new helpers.ServiceTestContext();
|
||||||
@ -13,6 +14,7 @@ describe('CloudWatchDatasource', function() {
|
|||||||
beforeEach(angularMocks.module('grafana.services'));
|
beforeEach(angularMocks.module('grafana.services'));
|
||||||
beforeEach(angularMocks.module('grafana.controllers'));
|
beforeEach(angularMocks.module('grafana.controllers'));
|
||||||
beforeEach(ctx.providePhase(['templateSrv', 'backendSrv']));
|
beforeEach(ctx.providePhase(['templateSrv', 'backendSrv']));
|
||||||
|
beforeEach(ctx.createService('timeSrv'));
|
||||||
|
|
||||||
beforeEach(
|
beforeEach(
|
||||||
angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
|
angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
|
||||||
@ -133,6 +135,10 @@ describe('CloudWatchDatasource', function() {
|
|||||||
{ text: 'i-23456789', value: 'i-23456789', selected: false },
|
{ text: 'i-23456789', value: 'i-23456789', selected: false },
|
||||||
{ text: 'i-34567890', value: 'i-34567890', selected: true },
|
{ text: 'i-34567890', value: 'i-34567890', selected: true },
|
||||||
],
|
],
|
||||||
|
current: {
|
||||||
|
text: 'i-34567890',
|
||||||
|
value: 'i-34567890',
|
||||||
|
},
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
replace: function(target, scopedVars) {
|
replace: function(target, scopedVars) {
|
||||||
@ -169,6 +175,53 @@ describe('CloudWatchDatasource', function() {
|
|||||||
var result = ctx.ds.expandTemplateVariable(targets, {}, templateSrv);
|
var result = ctx.ds.expandTemplateVariable(targets, {}, templateSrv);
|
||||||
expect(result[0].dimensions.InstanceId).to.be('i-34567890');
|
expect(result[0].dimensions.InstanceId).to.be('i-34567890');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should generate the correct targets by expanding template variables from url', function() {
|
||||||
|
var templateSrv = {
|
||||||
|
variables: [
|
||||||
|
{
|
||||||
|
name: 'instance_id',
|
||||||
|
options: [
|
||||||
|
{ text: 'i-23456789', value: 'i-23456789', selected: false },
|
||||||
|
{ text: 'i-34567890', value: 'i-34567890', selected: false },
|
||||||
|
],
|
||||||
|
current: 'i-45678901',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
replace: function(target, scopedVars) {
|
||||||
|
if (target === '$instance_id') {
|
||||||
|
return 'i-45678901';
|
||||||
|
} else {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
},
|
||||||
|
getVariableName: function(e) {
|
||||||
|
return 'instance_id';
|
||||||
|
},
|
||||||
|
variableExists: function(e) {
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
containsVariable: function(str, variableName) {
|
||||||
|
return str.indexOf('$' + variableName) !== -1;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
var targets = [
|
||||||
|
{
|
||||||
|
region: 'us-east-1',
|
||||||
|
namespace: 'AWS/EC2',
|
||||||
|
metricName: 'CPUUtilization',
|
||||||
|
dimensions: {
|
||||||
|
InstanceId: '$instance_id',
|
||||||
|
},
|
||||||
|
statistics: ['Average'],
|
||||||
|
period: 300,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
var result = ctx.ds.expandTemplateVariable(targets, {}, templateSrv);
|
||||||
|
expect(result[0].dimensions.InstanceId).to.be('i-45678901');
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('When query region is "default"', function() {
|
describe('When query region is "default"', function() {
|
||||||
|
@ -11,14 +11,30 @@ export default class ResponseParser {
|
|||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
var influxdb11format = query.toLowerCase().indexOf('show tag values') >= 0;
|
var normalizedQuery = query.toLowerCase();
|
||||||
|
var isValueFirst =
|
||||||
|
normalizedQuery.indexOf('show field keys') >= 0 || normalizedQuery.indexOf('show retention policies') >= 0;
|
||||||
|
|
||||||
var res = {};
|
var res = {};
|
||||||
_.each(influxResults.series, serie => {
|
_.each(influxResults.series, serie => {
|
||||||
_.each(serie.values, value => {
|
_.each(serie.values, value => {
|
||||||
if (_.isArray(value)) {
|
if (_.isArray(value)) {
|
||||||
if (influxdb11format) {
|
// In general, there are 2 possible shapes for the returned value.
|
||||||
addUnique(res, value[1] || value[0]);
|
// The first one is a two-element array,
|
||||||
|
// where the first element is somewhat a metadata value:
|
||||||
|
// the tag name for SHOW TAG VALUES queries,
|
||||||
|
// the time field for SELECT queries, etc.
|
||||||
|
// The second shape is an one-element array,
|
||||||
|
// that is containing an immediate value.
|
||||||
|
// For example, SHOW FIELD KEYS queries return such shape.
|
||||||
|
// Note, pre-0.11 versions return
|
||||||
|
// the second shape for SHOW TAG VALUES queries
|
||||||
|
// (while the newer versions—first).
|
||||||
|
|
||||||
|
if (isValueFirst) {
|
||||||
|
addUnique(res, value[0]);
|
||||||
|
} else if (value[1] !== undefined) {
|
||||||
|
addUnique(res, value[1]);
|
||||||
} else {
|
} else {
|
||||||
addUnique(res, value[0]);
|
addUnique(res, value[0]);
|
||||||
}
|
}
|
||||||
@ -29,7 +45,7 @@ export default class ResponseParser {
|
|||||||
});
|
});
|
||||||
|
|
||||||
return _.map(res, value => {
|
return _.map(res, value => {
|
||||||
return { text: value };
|
return { text: value.toString() };
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -85,30 +85,36 @@ describe('influxdb response parser', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('SELECT response', () => {
|
||||||
|
var query = 'SELECT "usage_iowait" FROM "cpu" LIMIT 10';
|
||||||
|
var response = {
|
||||||
|
results: [
|
||||||
|
{
|
||||||
|
series: [
|
||||||
|
{
|
||||||
|
name: 'cpu',
|
||||||
|
columns: ['time', 'usage_iowait'],
|
||||||
|
values: [[1488465190006040638, 0.0], [1488465190006040638, 15.0], [1488465190006040638, 20.2]],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
var result = parser.parse(query, response);
|
||||||
|
|
||||||
|
it('should return second column', () => {
|
||||||
|
expect(_.size(result)).toBe(3);
|
||||||
|
expect(result[0].text).toBe('0');
|
||||||
|
expect(result[1].text).toBe('15');
|
||||||
|
expect(result[2].text).toBe('20.2');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('SHOW FIELD response', () => {
|
describe('SHOW FIELD response', () => {
|
||||||
var query = 'SHOW FIELD KEYS FROM "cpu"';
|
var query = 'SHOW FIELD KEYS FROM "cpu"';
|
||||||
describe('response from 0.10.0', () => {
|
|
||||||
var response = {
|
|
||||||
results: [
|
|
||||||
{
|
|
||||||
series: [
|
|
||||||
{
|
|
||||||
name: 'measurements',
|
|
||||||
columns: ['name'],
|
|
||||||
values: [['cpu'], ['derivative'], ['logins.count'], ['logs'], ['payment.ended'], ['payment.started']],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
|
|
||||||
var result = parser.parse(query, response);
|
describe('response from pre-1.0', () => {
|
||||||
it('should get two responses', () => {
|
|
||||||
expect(_.size(result)).toBe(6);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('response from 0.11.0', () => {
|
|
||||||
var response = {
|
var response = {
|
||||||
results: [
|
results: [
|
||||||
{
|
{
|
||||||
@ -129,5 +135,28 @@ describe('influxdb response parser', () => {
|
|||||||
expect(_.size(result)).toBe(1);
|
expect(_.size(result)).toBe(1);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('response from 1.0', () => {
|
||||||
|
var response = {
|
||||||
|
results: [
|
||||||
|
{
|
||||||
|
series: [
|
||||||
|
{
|
||||||
|
name: 'cpu',
|
||||||
|
columns: ['fieldKey', 'fieldType'],
|
||||||
|
values: [['time', 'float']],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
var result = parser.parse(query, response);
|
||||||
|
|
||||||
|
it('should return first column', () => {
|
||||||
|
expect(_.size(result)).toBe(1);
|
||||||
|
expect(result[0].text).toBe('time');
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -64,7 +64,8 @@ function graphDirective(timeSrv, popoverSrv, contextSrv) {
|
|||||||
}
|
}
|
||||||
annotations = ctrl.annotations || [];
|
annotations = ctrl.annotations || [];
|
||||||
buildFlotPairs(data);
|
buildFlotPairs(data);
|
||||||
updateLegendValues(data, panel);
|
const graphHeight = elem.height();
|
||||||
|
updateLegendValues(data, panel, graphHeight);
|
||||||
|
|
||||||
ctrl.events.emit('render-legend');
|
ctrl.events.emit('render-legend');
|
||||||
});
|
});
|
||||||
|
@ -6,4 +6,7 @@ gpg --allow-secret-key-import --import ~/private-repo/signing/private.key
|
|||||||
|
|
||||||
cp ./scripts/build/rpmmacros ~/.rpmmacros
|
cp ./scripts/build/rpmmacros ~/.rpmmacros
|
||||||
|
|
||||||
./scripts/build/sign_expect $GPG_KEY_PASSWORD dist/*.rpm
|
for package in dist/*.rpm; do
|
||||||
|
[ -e "$package" ] || continue
|
||||||
|
./scripts/build/sign_expect $GPG_KEY_PASSWORD $package
|
||||||
|
done
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
module.exports = function(config) {
|
module.exports = function (config) {
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@ -10,7 +10,10 @@ module.exports = function(config) {
|
|||||||
debug: {
|
debug: {
|
||||||
configFile: 'karma.conf.js',
|
configFile: 'karma.conf.js',
|
||||||
singleRun: false,
|
singleRun: false,
|
||||||
browsers: ['Chrome']
|
browsers: ['Chrome'],
|
||||||
|
mime: {
|
||||||
|
'text/x-typescript': ['ts', 'tsx']
|
||||||
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
test: {
|
test: {
|
||||||
|
@ -1,37 +1,29 @@
|
|||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const ExtractTextPlugin = require("extract-text-webpack-plugin");
|
const MiniCssExtractPlugin = require("mini-css-extract-plugin");
|
||||||
|
|
||||||
module.exports = function (options, extractSass) {
|
module.exports = function(options) {
|
||||||
return {
|
return {
|
||||||
test: /\.scss$/,
|
test: /\.scss$/,
|
||||||
use: (extractSass || ExtractTextPlugin).extract({
|
use: [
|
||||||
use: [
|
MiniCssExtractPlugin.loader,
|
||||||
{
|
{
|
||||||
loader: 'css-loader',
|
loader: 'css-loader',
|
||||||
options: {
|
|
||||||
importLoaders: 2,
|
|
||||||
url: options.preserveUrl,
|
|
||||||
sourceMap: options.sourceMap,
|
|
||||||
minimize: options.minimize,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
loader: 'postcss-loader',
|
|
||||||
options: {
|
|
||||||
sourceMap: options.sourceMap,
|
|
||||||
config: { path: __dirname + '/postcss.config.js' }
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{ loader: 'sass-loader', options: { sourceMap: options.sourceMap } }
|
|
||||||
],
|
|
||||||
fallback: [{
|
|
||||||
loader: 'style-loader',
|
|
||||||
options: {
|
options: {
|
||||||
sourceMap: true
|
importLoaders: 2,
|
||||||
}
|
url: options.preserveUrl,
|
||||||
}]
|
sourceMap: options.sourceMap,
|
||||||
})
|
minimize: options.minimize,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
loader: 'postcss-loader',
|
||||||
|
options: {
|
||||||
|
sourceMap: options.sourceMap,
|
||||||
|
config: { path: __dirname + '/postcss.config.js' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{ loader: 'sass-loader', options: { sourceMap: options.sourceMap } },
|
||||||
|
],
|
||||||
};
|
};
|
||||||
}
|
};
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
const path = require('path');
|
const path = require('path');
|
||||||
const { CheckerPlugin } = require('awesome-typescript-loader');
|
const ForkTsCheckerWebpackPlugin = require('fork-ts-checker-webpack-plugin');
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
target: 'web',
|
target: 'web',
|
||||||
@ -61,6 +61,8 @@ module.exports = {
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
plugins: [
|
plugins: [
|
||||||
new CheckerPlugin(),
|
new ForkTsCheckerWebpackPlugin({
|
||||||
|
checkSyntacticErrors: true,
|
||||||
|
}),
|
||||||
]
|
]
|
||||||
};
|
};
|
||||||
|
@ -7,20 +7,17 @@ const webpack = require('webpack');
|
|||||||
const HtmlWebpackPlugin = require("html-webpack-plugin");
|
const HtmlWebpackPlugin = require("html-webpack-plugin");
|
||||||
const ExtractTextPlugin = require("extract-text-webpack-plugin");
|
const ExtractTextPlugin = require("extract-text-webpack-plugin");
|
||||||
const CleanWebpackPlugin = require('clean-webpack-plugin');
|
const CleanWebpackPlugin = require('clean-webpack-plugin');
|
||||||
const BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin;
|
const MiniCssExtractPlugin = require("mini-css-extract-plugin");
|
||||||
|
// const BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin;
|
||||||
const extractSass = new ExtractTextPlugin({
|
|
||||||
filename: "grafana.[name].css"
|
|
||||||
});
|
|
||||||
|
|
||||||
module.exports = merge(common, {
|
module.exports = merge(common, {
|
||||||
devtool: "cheap-module-source-map",
|
devtool: "cheap-module-source-map",
|
||||||
|
mode: 'development',
|
||||||
|
|
||||||
entry: {
|
entry: {
|
||||||
app: './public/app/index.ts',
|
app: './public/app/index.ts',
|
||||||
dark: './public/sass/grafana.dark.scss',
|
dark: './public/sass/grafana.dark.scss',
|
||||||
light: './public/sass/grafana.light.scss',
|
light: './public/sass/grafana.light.scss',
|
||||||
vendor: require('./dependencies'),
|
|
||||||
},
|
},
|
||||||
|
|
||||||
output: {
|
output: {
|
||||||
@ -48,15 +45,13 @@ module.exports = merge(common, {
|
|||||||
test: /\.tsx?$/,
|
test: /\.tsx?$/,
|
||||||
exclude: /node_modules/,
|
exclude: /node_modules/,
|
||||||
use: {
|
use: {
|
||||||
loader: 'awesome-typescript-loader',
|
loader: 'ts-loader',
|
||||||
options: {
|
options: {
|
||||||
useCache: true,
|
transpileOnly: true
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
require('./sass.rule.js')({
|
require('./sass.rule.js')({ sourceMap: false, minimize: false, preserveUrl: false }),
|
||||||
sourceMap: true, minimize: false, preserveUrl: false
|
|
||||||
}, extractSass),
|
|
||||||
{
|
{
|
||||||
test: /\.(png|jpg|gif|ttf|eot|svg|woff(2)?)(\?[a-z0-9=&.]+)?$/,
|
test: /\.(png|jpg|gif|ttf|eot|svg|woff(2)?)(\?[a-z0-9=&.]+)?$/,
|
||||||
loader: 'file-loader'
|
loader: 'file-loader'
|
||||||
@ -64,9 +59,30 @@ module.exports = merge(common, {
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
|
||||||
|
optimization: {
|
||||||
|
splitChunks: {
|
||||||
|
cacheGroups: {
|
||||||
|
manifest: {
|
||||||
|
chunks: "initial",
|
||||||
|
test: "vendor",
|
||||||
|
name: "vendor",
|
||||||
|
enforce: true
|
||||||
|
},
|
||||||
|
vendor: {
|
||||||
|
chunks: "initial",
|
||||||
|
test: "vendor",
|
||||||
|
name: "vendor",
|
||||||
|
enforce: true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
plugins: [
|
plugins: [
|
||||||
new CleanWebpackPlugin('../public/build', { allowExternal: true }),
|
new CleanWebpackPlugin('../../public/build', { allowExternal: true }),
|
||||||
extractSass,
|
new MiniCssExtractPlugin({
|
||||||
|
filename: "grafana.[name].css"
|
||||||
|
}),
|
||||||
new HtmlWebpackPlugin({
|
new HtmlWebpackPlugin({
|
||||||
filename: path.resolve(__dirname, '../../public/views/index.html'),
|
filename: path.resolve(__dirname, '../../public/views/index.html'),
|
||||||
template: path.resolve(__dirname, '../../public/views/index.template.html'),
|
template: path.resolve(__dirname, '../../public/views/index.template.html'),
|
||||||
@ -80,9 +96,6 @@ module.exports = merge(common, {
|
|||||||
'NODE_ENV': JSON.stringify('development')
|
'NODE_ENV': JSON.stringify('development')
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
new webpack.optimize.CommonsChunkPlugin({
|
|
||||||
names: ['vendor', 'manifest'],
|
|
||||||
}),
|
|
||||||
// new BundleAnalyzerPlugin({
|
// new BundleAnalyzerPlugin({
|
||||||
// analyzerPort: 8889
|
// analyzerPort: 8889
|
||||||
// })
|
// })
|
||||||
|
@ -42,20 +42,23 @@ module.exports = merge(common, {
|
|||||||
{
|
{
|
||||||
test: /\.tsx?$/,
|
test: /\.tsx?$/,
|
||||||
exclude: /node_modules/,
|
exclude: /node_modules/,
|
||||||
use: {
|
use: [{
|
||||||
loader: 'awesome-typescript-loader',
|
loader: 'babel-loader',
|
||||||
options: {
|
options: {
|
||||||
useCache: true,
|
cacheDirectory: true,
|
||||||
useBabel: true,
|
babelrc: false,
|
||||||
babelOptions: {
|
plugins: [
|
||||||
babelrc: false,
|
'syntax-dynamic-import',
|
||||||
plugins: [
|
'react-hot-loader/babel'
|
||||||
'syntax-dynamic-import',
|
]
|
||||||
'react-hot-loader/babel'
|
}
|
||||||
]
|
},
|
||||||
}
|
{
|
||||||
|
loader: 'ts-loader',
|
||||||
|
options: {
|
||||||
|
transpileOnly: true
|
||||||
},
|
},
|
||||||
}
|
}],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
test: /\.scss$/,
|
test: /\.scss$/,
|
||||||
|
@ -1,21 +1,22 @@
|
|||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const merge = require('webpack-merge');
|
const merge = require('webpack-merge');
|
||||||
const UglifyJSPlugin = require('uglifyjs-webpack-plugin');
|
const UglifyJsPlugin = require('uglifyjs-webpack-plugin');
|
||||||
const common = require('./webpack.common.js');
|
const common = require('./webpack.common.js');
|
||||||
const webpack = require('webpack');
|
const webpack = require('webpack');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const ngAnnotatePlugin = require('ng-annotate-webpack-plugin');
|
const ngAnnotatePlugin = require('ng-annotate-webpack-plugin');
|
||||||
const HtmlWebpackPlugin = require("html-webpack-plugin");
|
const HtmlWebpackPlugin = require("html-webpack-plugin");
|
||||||
const ExtractTextPlugin = require("extract-text-webpack-plugin");
|
const MiniCssExtractPlugin = require("mini-css-extract-plugin");
|
||||||
|
const OptimizeCSSAssetsPlugin = require("optimize-css-assets-webpack-plugin");
|
||||||
|
|
||||||
module.exports = merge(common, {
|
module.exports = merge(common, {
|
||||||
|
mode: 'production',
|
||||||
devtool: "source-map",
|
devtool: "source-map",
|
||||||
|
|
||||||
entry: {
|
entry: {
|
||||||
dark: './public/sass/grafana.dark.scss',
|
dark: './public/sass/grafana.dark.scss',
|
||||||
light: './public/sass/grafana.light.scss',
|
light: './public/sass/grafana.light.scss',
|
||||||
vendor: require('./dependencies'),
|
|
||||||
},
|
},
|
||||||
|
|
||||||
module: {
|
module: {
|
||||||
@ -35,49 +36,49 @@ module.exports = merge(common, {
|
|||||||
{
|
{
|
||||||
test: /\.tsx?$/,
|
test: /\.tsx?$/,
|
||||||
exclude: /node_modules/,
|
exclude: /node_modules/,
|
||||||
use: [
|
use: {
|
||||||
{
|
loader: 'ts-loader',
|
||||||
loader: 'awesome-typescript-loader',
|
options: {
|
||||||
options: {
|
transpileOnly: true
|
||||||
errorsAsWarnings: false,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
]
|
},
|
||||||
},
|
},
|
||||||
require('./sass.rule.js')({
|
require('./sass.rule.js')({
|
||||||
sourceMap: false, minimize: true, preserveUrl: false
|
sourceMap: false, minimize: false, preserveUrl: false
|
||||||
})
|
})
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
|
||||||
devServer: {
|
optimization: {
|
||||||
noInfo: true,
|
splitChunks: {
|
||||||
stats: {
|
cacheGroups: {
|
||||||
chunks: false,
|
commons: {
|
||||||
|
test: /[\\/]node_modules[\\/].*[jt]sx?$/,
|
||||||
|
name: "vendor",
|
||||||
|
chunks: "all"
|
||||||
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
|
minimizer: [
|
||||||
|
new UglifyJsPlugin({
|
||||||
|
cache: true,
|
||||||
|
parallel: true,
|
||||||
|
sourceMap: true
|
||||||
|
}),
|
||||||
|
new OptimizeCSSAssetsPlugin({})
|
||||||
|
]
|
||||||
},
|
},
|
||||||
|
|
||||||
plugins: [
|
plugins: [
|
||||||
new ExtractTextPlugin({
|
new MiniCssExtractPlugin({
|
||||||
filename: 'grafana.[name].css',
|
filename: "grafana.[name].css"
|
||||||
}),
|
}),
|
||||||
new ngAnnotatePlugin(),
|
new ngAnnotatePlugin(),
|
||||||
new UglifyJSPlugin({
|
|
||||||
sourceMap: true,
|
|
||||||
}),
|
|
||||||
new webpack.DefinePlugin({
|
|
||||||
'process.env': {
|
|
||||||
'NODE_ENV': JSON.stringify('production')
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
new HtmlWebpackPlugin({
|
new HtmlWebpackPlugin({
|
||||||
filename: path.resolve(__dirname, '../../public/views/index.html'),
|
filename: path.resolve(__dirname, '../../public/views/index.html'),
|
||||||
template: path.resolve(__dirname, '../../public/views/index.template.html'),
|
template: path.resolve(__dirname, '../../public/views/index.template.html'),
|
||||||
inject: 'body',
|
inject: 'body',
|
||||||
chunks: ['manifest', 'vendor', 'app'],
|
chunks: ['vendor', 'app'],
|
||||||
}),
|
|
||||||
new webpack.optimize.CommonsChunkPlugin({
|
|
||||||
names: ['vendor', 'manifest'],
|
|
||||||
}),
|
}),
|
||||||
function () {
|
function () {
|
||||||
this.plugin("done", function (stats) {
|
this.plugin("done", function (stats) {
|
||||||
|
@ -3,29 +3,36 @@ const merge = require('webpack-merge');
|
|||||||
const common = require('./webpack.common.js');
|
const common = require('./webpack.common.js');
|
||||||
|
|
||||||
config = merge(common, {
|
config = merge(common, {
|
||||||
|
mode: 'development',
|
||||||
devtool: 'cheap-module-source-map',
|
devtool: 'cheap-module-source-map',
|
||||||
|
|
||||||
externals: {
|
externals: {
|
||||||
'react/addons': true,
|
'react/addons': true,
|
||||||
'react/lib/ExecutionEnvironment': true,
|
'react/lib/ExecutionEnvironment': true,
|
||||||
'react/lib/ReactContext': true,
|
'react/lib/ReactContext': true,
|
||||||
},
|
},
|
||||||
|
|
||||||
module: {
|
module: {
|
||||||
rules: [
|
rules: [
|
||||||
{
|
{
|
||||||
test: /\.tsx?$/,
|
test: /\.tsx?$/,
|
||||||
exclude: /node_modules/,
|
exclude: /node_modules/,
|
||||||
use: [
|
use: {
|
||||||
{ loader: "awesome-typescript-loader" }
|
loader: 'ts-loader',
|
||||||
]
|
options: {
|
||||||
|
transpileOnly: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
]
|
],
|
||||||
},
|
},
|
||||||
|
|
||||||
plugins: [
|
plugins: [
|
||||||
new webpack.SourceMapDevToolPlugin({
|
new webpack.SourceMapDevToolPlugin({
|
||||||
filename: null, // if no value is provided the sourcemap is inlined
|
filename: null, // if no value is provided the sourcemap is inlined
|
||||||
test: /\.(ts|js)($|\?)/i // process .js and .ts files only
|
test: /\.(ts|js)($|\?)/i, // process .js and .ts files only
|
||||||
}),
|
}),
|
||||||
]
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = config;
|
module.exports = config;
|
||||||
|
Loading…
Reference in New Issue
Block a user