diff --git a/.circleci/config.yml b/.circleci/config.yml
index 6e3cfc1e840..f351040fe2f 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -8,6 +8,9 @@ aliases:
- &filter-not-release
tags:
ignore: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/
+ - &filter-only-master
+ branches:
+ only: master
version: 2
@@ -91,9 +94,6 @@ jobs:
- image: circleci/node:8
steps:
- checkout
- - run:
- name: install yarn
- command: 'sudo npm install -g yarn --quiet'
- restore_cache:
key: dependency-cache-{{ checksum "yarn.lock" }}
- run:
@@ -163,7 +163,7 @@ jobs:
steps:
- checkout
- run:
- name: build and package grafana
+ name: build, test and package grafana enterprise
command: './scripts/build/build_enterprise.sh'
- run:
name: sign packages
@@ -171,6 +171,26 @@ jobs:
- run:
name: sha-sum packages
command: 'go run build.go sha-dist'
+ - run:
+ name: move enterprise packages into their own folder
+ command: 'mv dist enterprise-dist'
+ - persist_to_workspace:
+ root: .
+ paths:
+ - enterprise-dist/grafana-enterprise*
+
+ deploy-enterprise-master:
+ docker:
+ - image: circleci/python:2.7-stretch
+ steps:
+ - attach_workspace:
+ at: .
+ - run:
+ name: install awscli
+ command: 'sudo pip install awscli'
+ - run:
+ name: deploy to s3
+ command: 'aws s3 sync ./enterprise-dist s3://$ENTERPRISE_BUCKET_NAME/master'
deploy-master:
docker:
@@ -224,6 +244,8 @@ workflows:
jobs:
- build-all:
filters: *filter-not-release
+ - build-enterprise:
+ filters: *filter-only-master
- codespell:
filters: *filter-not-release
- gometalinter:
@@ -248,6 +270,18 @@ workflows:
filters:
branches:
only: master
+ - deploy-enterprise-master:
+ requires:
+ - build-all
+ - test-backend
+ - test-frontend
+ - codespell
+ - gometalinter
+ - mysql-integration-test
+ - postgres-integration-test
+ - build-enterprise
+ filters: *filter-only-master
+
release:
jobs:
- build-all:
diff --git a/.gitignore b/.gitignore
index 25325b37890..accc24d84cd 100644
--- a/.gitignore
+++ b/.gitignore
@@ -43,6 +43,7 @@ fig.yml
docker-compose.yml
docker-compose.yaml
/conf/provisioning/**/custom.yaml
+/conf/ldap_dev.toml
profile.cov
/grafana
/local
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 7df1533e86a..0177546b643 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,8 +7,24 @@
* **Api**: Delete nonexistent datasource should return 404 [#12313](https://github.com/grafana/grafana/issues/12313), thx [@AustinWinstanley](https://github.com/AustinWinstanley)
* **Dashboard**: Fix selecting current dashboard from search should not reload dashboard [#12248](https://github.com/grafana/grafana/issues/12248)
+* **Singlestat**: Make colorization of prefix and postfix optional in singlestat [#11892](https://github.com/grafana/grafana/pull/11892), thx [@ApsOps](https://github.com/ApsOps)
+* **Table**: Make table sorting stable when null values exist [#12362](https://github.com/grafana/grafana/pull/12362), thx [@bz2](https://github.com/bz2)
+* **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379)
+* **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda)
+* **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm)
+* **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley)
+* **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber)
-# 5.2.0 (unreleased)
+
+# 5.2.1 (2018-06-29)
+
+### Minor
+
+* **Auth Proxy**: Important security fix for whitelist of IP address feature [#12444](https://github.com/grafana/grafana/pull/12444)
+* **UI**: Fix - Grafana footer overlapping page [#12430](https://github.com/grafana/grafana/issues/12430)
+* **Logging**: Errors should be reported before crashing [#12438](https://github.com/grafana/grafana/issues/12438)
+
+# 5.2.0-stable (2018-06-27)
### Minor
@@ -16,6 +32,10 @@
* **Render**: Enhance error message if phantomjs executable is not found [#11868](https://github.com/grafana/grafana/issues/11868)
* **Dashboard**: Set correct text in drop down when variable is present in url [#11968](https://github.com/grafana/grafana/issues/11968)
+### 5.2.0-beta3 fixes
+
+* **LDAP**: Handle "dn" ldap attribute more gracefully [#12385](https://github.com/grafana/grafana/pull/12385), reverts [#10970](https://github.com/grafana/grafana/pull/10970)
+
# 5.2.0-beta3 (2018-06-21)
### Minor
@@ -57,6 +77,7 @@
### New Features
* **Elasticsearch**: Alerting support [#5893](https://github.com/grafana/grafana/issues/5893), thx [@WPH95](https://github.com/WPH95)
+* **Build**: Crosscompile and packages Grafana on arm, windows, linux and darwin [#11920](https://github.com/grafana/grafana/pull/11920), thx [@fg2it](https://github.com/fg2it)
* **Login**: Change admin password after first login [#11882](https://github.com/grafana/grafana/issues/11882)
* **Alert list panel**: Updated to support filtering alerts by name, dashboard title, folder, tags [#11500](https://github.com/grafana/grafana/issues/11500), [#8168](https://github.com/grafana/grafana/issues/8168), [#6541](https://github.com/grafana/grafana/issues/6541)
@@ -92,6 +113,10 @@
* **Dashboard list panel**: Search dashboards by folder [#11525](https://github.com/grafana/grafana/issues/11525)
* **Sidenav**: Always show server admin link in sidenav if grafana admin [#11657](https://github.com/grafana/grafana/issues/11657)
+# 5.1.5 (2018-06-27)
+
+* **Docker**: Config keys ending with _FILE are not respected [#170](https://github.com/grafana/grafana-docker/issues/170)
+
# 5.1.4 (2018-06-19)
* **Permissions**: Important security fix for API keys with viewer role [#12343](https://github.com/grafana/grafana/issues/12343)
@@ -1319,7 +1344,7 @@ Grafana 2.x is fundamentally different from 1.x; it now ships with an integrated
**New features**
- [Issue #1623](https://github.com/grafana/grafana/issues/1623). Share Dashboard: Dashboard snapshot sharing (dash and data snapshot), save to local or save to public snapshot dashboard snapshots.raintank.io site
- [Issue #1622](https://github.com/grafana/grafana/issues/1622). Share Panel: The share modal now has an embed option, gives you an iframe that you can use to embedd a single graph on another web site
-- [Issue #718](https://github.com/grafana/grafana/issues/718). Dashboard: When saving a dashboard and another user has made changes in between the user is promted with a warning if he really wants to overwrite the other's changes
+- [Issue #718](https://github.com/grafana/grafana/issues/718). Dashboard: When saving a dashboard and another user has made changes in between the user is prompted with a warning if he really wants to overwrite the other's changes
- [Issue #1331](https://github.com/grafana/grafana/issues/1331). Graph & Singlestat: New axis/unit format selector and more units (kbytes, Joule, Watt, eV), and new design for graph axis & grid tab and single stat options tab views
- [Issue #1241](https://github.com/grafana/grafana/issues/1242). Timepicker: New option in timepicker (under dashboard settings), to change ``now`` to be for example ``now-1m``, useful when you want to ignore last minute because it contains incomplete data
- [Issue #171](https://github.com/grafana/grafana/issues/171). Panel: Different time periods, panels can override dashboard relative time and/or add a time shift
diff --git a/build.go b/build.go
index 3f92f8833a2..77cbde50c41 100644
--- a/build.go
+++ b/build.go
@@ -465,7 +465,6 @@ func ldflags() string {
b.WriteString(fmt.Sprintf(" -X main.version=%s", version))
b.WriteString(fmt.Sprintf(" -X main.commit=%s", getGitSha()))
b.WriteString(fmt.Sprintf(" -X main.buildstamp=%d", buildStamp()))
- b.WriteString(fmt.Sprintf(" -X main.enterprise=%t", enterprise))
return b.String()
}
diff --git a/docker/blocks/openldap/Dockerfile b/docker/blocks/openldap/Dockerfile
index c9b928ad56a..76172e133a4 100644
--- a/docker/blocks/openldap/Dockerfile
+++ b/docker/blocks/openldap/Dockerfile
@@ -8,7 +8,8 @@ ENV OPENLDAP_VERSION 2.4.40
RUN apt-get update && \
DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y \
- slapd=${OPENLDAP_VERSION}* && \
+ slapd=${OPENLDAP_VERSION}* \
+ ldap-utils && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
@@ -22,6 +23,7 @@ COPY modules/ /etc/ldap.dist/modules
COPY prepopulate/ /etc/ldap.dist/prepopulate
COPY entrypoint.sh /entrypoint.sh
+COPY prepopulate.sh /prepopulate.sh
ENTRYPOINT ["/entrypoint.sh"]
diff --git a/docker/blocks/openldap/entrypoint.sh b/docker/blocks/openldap/entrypoint.sh
index d560b78d388..d202ed14b31 100755
--- a/docker/blocks/openldap/entrypoint.sh
+++ b/docker/blocks/openldap/entrypoint.sh
@@ -76,13 +76,14 @@ EOF
IFS=","; declare -a modules=($SLAPD_ADDITIONAL_MODULES); unset IFS
for module in "${modules[@]}"; do
- slapadd -n0 -F /etc/ldap/slapd.d -l "/etc/ldap/modules/${module}.ldif" >/dev/null 2>&1
+ echo "Adding module ${module}"
+ slapadd -n0 -F /etc/ldap/slapd.d -l "/etc/ldap/modules/${module}.ldif" >/dev/null 2>&1
done
fi
- for file in `ls /etc/ldap/prepopulate/*.ldif`; do
- slapadd -F /etc/ldap/slapd.d -l "$file"
- done
+ # This needs to run in background
+ # Will prepopulate entries after ldap daemon has started
+ ./prepopulate.sh &
chown -R openldap:openldap /etc/ldap/slapd.d/ /var/lib/ldap/ /var/run/slapd/
else
diff --git a/docker/blocks/openldap/notes.md b/docker/blocks/openldap/notes.md
index 483266f0d88..8de23d5ccf2 100644
--- a/docker/blocks/openldap/notes.md
+++ b/docker/blocks/openldap/notes.md
@@ -1,6 +1,6 @@
# Notes on OpenLdap Docker Block
-Any ldif files added to the prepopulate subdirectory will be automatically imported into the OpenLdap database.
+Any ldif files added to the prepopulate subdirectory will be automatically imported into the OpenLdap database.
The ldif files add three users, `ldapviewer`, `ldapeditor` and `ldapadmin`. Two groups, `admins` and `users`, are added that correspond with the group mappings in the default conf/ldap.toml. `ldapadmin` is a member of `admins` and `ldapeditor` is a member of `users`.
@@ -22,3 +22,27 @@ enabled = true
config_file = conf/ldap.toml
; allow_sign_up = true
```
+
+Test groups & users
+
+admins
+ ldap-admin
+ ldap-torkel
+ ldap-daniel
+backend
+ ldap-carl
+ ldap-torkel
+ ldap-leo
+frontend
+ ldap-torkel
+ ldap-tobias
+ ldap-daniel
+editors
+ ldap-editors
+
+
+no groups
+ ldap-viewer
+
+
+
diff --git a/docker/blocks/openldap/prepopulate.sh b/docker/blocks/openldap/prepopulate.sh
new file mode 100755
index 00000000000..aa11f8aba4f
--- /dev/null
+++ b/docker/blocks/openldap/prepopulate.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+echo "Pre-populating ldap entries, first waiting for ldap to start"
+
+sleep 3
+
+adminUserDn="cn=admin,dc=grafana,dc=org"
+adminPassword="grafana"
+
+for file in `ls /etc/ldap/prepopulate/*.ldif`; do
+ ldapadd -x -D $adminUserDn -w $adminPassword -f "$file"
+done
+
+
diff --git a/docker/blocks/openldap/prepopulate/1_units.ldif b/docker/blocks/openldap/prepopulate/1_units.ldif
new file mode 100644
index 00000000000..22e06303688
--- /dev/null
+++ b/docker/blocks/openldap/prepopulate/1_units.ldif
@@ -0,0 +1,9 @@
+dn: ou=groups,dc=grafana,dc=org
+ou: Groups
+objectclass: top
+objectclass: organizationalUnit
+
+dn: ou=users,dc=grafana,dc=org
+ou: Users
+objectclass: top
+objectclass: organizationalUnit
diff --git a/docker/blocks/openldap/prepopulate/2_users.ldif b/docker/blocks/openldap/prepopulate/2_users.ldif
new file mode 100644
index 00000000000..52e74b1e4b1
--- /dev/null
+++ b/docker/blocks/openldap/prepopulate/2_users.ldif
@@ -0,0 +1,80 @@
+# ldap-admin
+dn: cn=ldap-admin,ou=users,dc=grafana,dc=org
+mail: ldap-admin@grafana.com
+userPassword: grafana
+objectClass: person
+objectClass: top
+objectClass: inetOrgPerson
+objectClass: organizationalPerson
+sn: ldap-admin
+cn: ldap-admin
+
+dn: cn=ldap-editor,ou=users,dc=grafana,dc=org
+mail: ldap-editor@grafana.com
+userPassword: grafana
+objectClass: person
+objectClass: top
+objectClass: inetOrgPerson
+objectClass: organizationalPerson
+sn: ldap-editor
+cn: ldap-editor
+
+dn: cn=ldap-viewer,ou=users,dc=grafana,dc=org
+mail: ldap-viewer@grafana.com
+userPassword: grafana
+objectClass: person
+objectClass: top
+objectClass: inetOrgPerson
+objectClass: organizationalPerson
+sn: ldap-viewer
+cn: ldap-viewer
+
+dn: cn=ldap-carl,ou=users,dc=grafana,dc=org
+mail: ldap-carl@grafana.com
+userPassword: grafana
+objectClass: person
+objectClass: top
+objectClass: inetOrgPerson
+objectClass: organizationalPerson
+sn: ldap-carl
+cn: ldap-carl
+
+dn: cn=ldap-daniel,ou=users,dc=grafana,dc=org
+mail: ldap-daniel@grafana.com
+userPassword: grafana
+objectClass: person
+objectClass: top
+objectClass: inetOrgPerson
+objectClass: organizationalPerson
+sn: ldap-daniel
+cn: ldap-daniel
+
+dn: cn=ldap-leo,ou=users,dc=grafana,dc=org
+mail: ldap-leo@grafana.com
+userPassword: grafana
+objectClass: person
+objectClass: top
+objectClass: inetOrgPerson
+objectClass: organizationalPerson
+sn: ldap-leo
+cn: ldap-leo
+
+dn: cn=ldap-tobias,ou=users,dc=grafana,dc=org
+mail: ldap-tobias@grafana.com
+userPassword: grafana
+objectClass: person
+objectClass: top
+objectClass: inetOrgPerson
+objectClass: organizationalPerson
+sn: ldap-tobias
+cn: ldap-tobias
+
+dn: cn=ldap-torkel,ou=users,dc=grafana,dc=org
+mail: ldap-torkel@grafana.com
+userPassword: grafana
+objectClass: person
+objectClass: top
+objectClass: inetOrgPerson
+objectClass: organizationalPerson
+sn: ldap-torkel
+cn: ldap-torkel
diff --git a/docker/blocks/openldap/prepopulate/3_groups.ldif b/docker/blocks/openldap/prepopulate/3_groups.ldif
new file mode 100644
index 00000000000..8638a089cc8
--- /dev/null
+++ b/docker/blocks/openldap/prepopulate/3_groups.ldif
@@ -0,0 +1,25 @@
+dn: cn=admins,ou=groups,dc=grafana,dc=org
+cn: admins
+objectClass: groupOfNames
+objectClass: top
+member: cn=ldap-admin,ou=users,dc=grafana,dc=org
+member: cn=ldap-torkel,ou=users,dc=grafana,dc=org
+
+dn: cn=editors,ou=groups,dc=grafana,dc=org
+cn: editors
+objectClass: groupOfNames
+member: cn=ldap-editor,ou=users,dc=grafana,dc=org
+
+dn: cn=backend,ou=groups,dc=grafana,dc=org
+cn: backend
+objectClass: groupOfNames
+member: cn=ldap-carl,ou=users,dc=grafana,dc=org
+member: cn=ldap-leo,ou=users,dc=grafana,dc=org
+member: cn=ldap-torkel,ou=users,dc=grafana,dc=org
+
+dn: cn=frontend,ou=groups,dc=grafana,dc=org
+cn: frontend
+objectClass: groupOfNames
+member: cn=ldap-torkel,ou=users,dc=grafana,dc=org
+member: cn=ldap-daniel,ou=users,dc=grafana,dc=org
+member: cn=ldap-leo,ou=users,dc=grafana,dc=org
diff --git a/docker/blocks/openldap/prepopulate/admin.ldif b/docker/blocks/openldap/prepopulate/admin.ldif
deleted file mode 100644
index 3f4406d5810..00000000000
--- a/docker/blocks/openldap/prepopulate/admin.ldif
+++ /dev/null
@@ -1,10 +0,0 @@
-dn: cn=ldapadmin,dc=grafana,dc=org
-mail: ldapadmin@grafana.com
-userPassword: grafana
-objectClass: person
-objectClass: top
-objectClass: inetOrgPerson
-objectClass: organizationalPerson
-sn: ldapadmin
-cn: ldapadmin
-memberOf: cn=admins,dc=grafana,dc=org
diff --git a/docker/blocks/openldap/prepopulate/adminsgroup.ldif b/docker/blocks/openldap/prepopulate/adminsgroup.ldif
deleted file mode 100644
index d8dece4e458..00000000000
--- a/docker/blocks/openldap/prepopulate/adminsgroup.ldif
+++ /dev/null
@@ -1,5 +0,0 @@
-dn: cn=admins,dc=grafana,dc=org
-cn: admins
-member: cn=ldapadmin,dc=grafana,dc=org
-objectClass: groupOfNames
-objectClass: top
diff --git a/docker/blocks/openldap/prepopulate/editor.ldif b/docker/blocks/openldap/prepopulate/editor.ldif
deleted file mode 100644
index eba3adc4352..00000000000
--- a/docker/blocks/openldap/prepopulate/editor.ldif
+++ /dev/null
@@ -1,10 +0,0 @@
-dn: cn=ldapeditor,dc=grafana,dc=org
-mail: ldapeditor@grafana.com
-userPassword: grafana
-objectClass: person
-objectClass: top
-objectClass: inetOrgPerson
-objectClass: organizationalPerson
-sn: ldapeditor
-cn: ldapeditor
-memberOf: cn=users,dc=grafana,dc=org
diff --git a/docker/blocks/openldap/prepopulate/usersgroup.ldif b/docker/blocks/openldap/prepopulate/usersgroup.ldif
deleted file mode 100644
index a1de3a50d38..00000000000
--- a/docker/blocks/openldap/prepopulate/usersgroup.ldif
+++ /dev/null
@@ -1,5 +0,0 @@
-dn: cn=users,dc=grafana,dc=org
-cn: users
-member: cn=ldapeditor,dc=grafana,dc=org
-objectClass: groupOfNames
-objectClass: top
diff --git a/docker/blocks/openldap/prepopulate/viewer.ldif b/docker/blocks/openldap/prepopulate/viewer.ldif
deleted file mode 100644
index f699a7df57b..00000000000
--- a/docker/blocks/openldap/prepopulate/viewer.ldif
+++ /dev/null
@@ -1,9 +0,0 @@
-dn: cn=ldapviewer,dc=grafana,dc=org
-mail: ldapviewer@grafana.com
-userPassword: grafana
-objectClass: person
-objectClass: top
-objectClass: inetOrgPerson
-objectClass: organizationalPerson
-sn: ldapviewer
-cn: ldapviewer
diff --git a/docs/sources/features/datasources/mssql.md b/docs/sources/features/datasources/mssql.md
index 1676cffa0a8..d4d5cc6d73e 100644
--- a/docs/sources/features/datasources/mssql.md
+++ b/docs/sources/features/datasources/mssql.md
@@ -77,9 +77,9 @@ Macro example | Description
------------ | -------------
*$__time(dateColumn)* | Will be replaced by an expression to rename the column to *time*. For example, *dateColumn as time*
*$__timeEpoch(dateColumn)* | Will be replaced by an expression to convert a DATETIME column type to unix timestamp and rename it to *time*. For example, *DATEDIFF(second, '1970-01-01', dateColumn) AS time*
-*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn >= DATEADD(s, 1494410783, '1970-01-01') AND dateColumn <= DATEADD(s, 1494410783, '1970-01-01')*
-*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *DATEADD(second, 1494410783, '1970-01-01')*
-*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *DATEADD(second, 1494410783, '1970-01-01')*
+*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:06:17Z'*
+*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *'2017-04-21T05:01:17Z'*
+*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *'2017-04-21T05:06:17Z'*
*$__timeGroup(dateColumn,'5m'[, fillvalue])* | Will be replaced by an expression usable in GROUP BY clause. Providing a *fillValue* of *NULL* or *floating value* will automatically fill empty series in timerange with that value. For example, *CAST(ROUND(DATEDIFF(second, '1970-01-01', time_column)/300.0, 0) as bigint)\*300*.
*$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so all null values will be converted to the fill value (all null values would be set to zero using this example).
*$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn > 1494410783 AND dateColumn < 1494497183*
diff --git a/docs/sources/features/datasources/mysql.md b/docs/sources/features/datasources/mysql.md
index f91417a43b7..ce50053c7ea 100644
--- a/docs/sources/features/datasources/mysql.md
+++ b/docs/sources/features/datasources/mysql.md
@@ -60,9 +60,9 @@ Macro example | Description
------------ | -------------
*$__time(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec*
*$__timeEpoch(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec*
-*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn > FROM_UNIXTIME(1494410783) AND dateColumn < FROM_UNIXTIME(1494497183)*
-*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *FROM_UNIXTIME(1494410783)*
-*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *FROM_UNIXTIME(1494497183)*
+*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:06:17Z'*
+*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *'2017-04-21T05:01:17Z'*
+*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *'2017-04-21T05:06:17Z'*
*$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in GROUP BY clause. For example, *cast(cast(UNIX_TIMESTAMP(dateColumn)/(300) as signed)*300 as signed),*
*$__timeGroup(dateColumn,'5m',0)* | Same as above but with a fill parameter so all null values will be converted to the fill value (all null values would be set to zero using this example).
*$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn > 1494410783 AND dateColumn < 1494497183*
diff --git a/docs/sources/guides/whats-new-in-v5-2.md b/docs/sources/guides/whats-new-in-v5-2.md
index 554f8f073d8..e084f8618e4 100644
--- a/docs/sources/guides/whats-new-in-v5-2.md
+++ b/docs/sources/guides/whats-new-in-v5-2.md
@@ -14,14 +14,14 @@ weight = -8
Grafana v5.2 brings new features, many enhancements and bug fixes. This article will detail the major new features and enhancements.
-* [Elasticsearch alerting]({{< relref "#elasticsearch-alerting" >}}) it's finally here!
-* [Cross platform build support]({{< relref "#cross-platform-build-support" >}}) enables native builds of Grafana for many more platforms!
-* [Improved Docker image]({{< relref "#improved-docker-image" >}}) with support for docker secrets
-* [Security]({{< relref "#security" >}}) make your Grafana instance more secure
-* [Prometheus]({{< relref "#prometheus" >}}) with alignment enhancements
-* [InfluxDB]({{< relref "#influxdb" >}}) with support for a new function
-* [Alerting]({{< relref "#alerting" >}}) with alert notification channel type for Discord
-* [Dashboards & Panels]({{< relref "#dashboards-panels" >}}) with save & import enhancements
+- [Elasticsearch alerting]({{< relref "#elasticsearch-alerting" >}}) it's finally here!
+- [Native builds for ARM]({{< relref "#native-builds-for-arm" >}}) native builds of Grafana for many more platforms!
+- [Improved Docker image]({{< relref "#improved-docker-image" >}}) with support for docker secrets
+- [Security]({{< relref "#security" >}}) make your Grafana instance more secure
+- [Prometheus]({{< relref "#prometheus" >}}) with alignment enhancements
+- [InfluxDB]({{< relref "#influxdb" >}}) now supports the `mode` function
+- [Alerting]({{< relref "#alerting" >}}) with alert notification channel type for Discord
+- [Dashboards & Panels]({{< relref "#dashboards-panels" >}}) with save & import enhancements
## Elasticsearch alerting
@@ -32,16 +32,18 @@ the most requested features by our community and now it's finally here. Please t
-## Cross platform build support
+## Native builds for ARM
-Grafana v5.2 brings an improved build pipeline with cross platform support. This enables native builds of Grafana for ARMv7 (x32), ARM64 (x64),
-MacOS/Darwin (x64) and Windows (x64) in both stable and nightly builds.
+Grafana v5.2 brings an improved build pipeline with cross-platform support. This enables native builds of Grafana for ARMv7 (x32) and ARM64 (x64).
+We've been longing for native ARM build support for ages. With the help from our amazing community this is now finally available.
+Please try it out and let us know what you think.
-We've been longing for native ARM build support for a long time. With the help from our amazing community this is now finally available.
+Another great addition with the improved build pipeline is that binaries for MacOS/Darwin (x64) and Windows (x64) are now automatically built and
+published for both stable and nightly builds.
## Improved Docker image
-The Grafana docker image now includes support for Docker secrets which enables you to supply Grafana with configuration through files. More
+The Grafana docker image adds support for Docker secrets which enables you to supply Grafana with configuration through files. More
information in the [Installing using Docker documentation](/installation/docker/#reading-secrets-from-files-support-for-docker-secrets).
## Security
@@ -49,18 +51,18 @@ information in the [Installing using Docker documentation](/installation/docker/
{{< docs-imagebox img="/img/docs/v52/login_change_password.png" max-width="800px" class="docs-image--right" >}}
Starting from Grafana v5.2, when you login with the administrator account using the default password you'll be presented with a form to change the password.
-By this we hope to encourage users to follow Grafana's best practices and change the default administrator password.
+We hope this encourages users to follow Grafana's best practices and change the default administrator password.
## Prometheus
The Prometheus datasource now aligns the start/end of the query sent to Prometheus with the step, which ensures PromQL expressions with *rate*
-functions get consistent results, and thus avoid graphs jumping around on reload.
+functions get consistent results, and thus avoids graphs jumping around on reload.
## InfluxDB
-The InfluxDB datasource now includes support for the *mode* function which allows to return the most frequent value in a list of field values.
+The InfluxDB datasource now includes support for the *mode* function which returns the most frequent value in a list of field values.
## Alerting
@@ -72,9 +74,9 @@ By popular demand Grafana now includes support for an alert notification channel
{{< docs-imagebox img="/img/docs/v52/dashboard_save_modal.png" max-width="800px" class="docs-image--right" >}}
-Starting from Grafana v5.2 a modified time range or variable are no longer saved by default. To save a modified
-time range or variable you'll need to actively select that when saving a dashboard, see screenshot.
-This should hopefully make it easier to have sane defaults of time and variables in dashboards and make it more explicit
+Starting from Grafana v5.2, a modified time range or variable are no longer saved by default. To save a modified
+time range or variable, you'll need to actively select that when saving a dashboard, see screenshot.
+This should hopefully make it easier to have sane defaults for time and variables in dashboards and make it more explicit
when you actually want to overwrite those settings.
@@ -83,13 +85,13 @@ when you actually want to overwrite those settings.
{{< docs-imagebox img="/img/docs/v52/dashboard_import.png" max-width="800px" class="docs-image--right" >}}
-Grafana v5.2 adds support for specifying an existing folder or create a new one when importing a dashboard, a long awaited feature since
-Grafana v5.0 introduced support for dashboard folders and permissions. The import dashboard page have also got some general improvements
+Grafana v5.2 adds support for specifying an existing folder or creating a new one when importing a dashboard - a long-awaited feature since
+Grafana v5.0 introduced support for dashboard folders and permissions. The import dashboard page has also got some general improvements
and should now make it more clear if a possible import will overwrite an existing dashboard, or not.
-This release also adds some improvements for those users only having editor or admin permissions in certain folders. Now the links to
-*Create Dashboard* and *Import Dashboard* is available in side navigation, dashboard search and manage dashboards/folder page for a
-user that has editor role in an organization or edit permission in at least one folder.
+This release also adds some improvements for those users only having editor or admin permissions in certain folders. The links to
+*Create Dashboard* and *Import Dashboard* are now available in the side navigation, in dashboard search and on the manage dashboards/folder page for a
+user that has editor role in an organization or the edit permission in at least one folder.
diff --git a/docs/sources/http_api/admin.md b/docs/sources/http_api/admin.md
index 0194c69caac..2d4be21bb78 100644
--- a/docs/sources/http_api/admin.md
+++ b/docs/sources/http_api/admin.md
@@ -36,11 +36,10 @@ HTTP/1.1 200
Content-Type: application/json
{
-"DEFAULT":
-{
- "app_mode":"production"},
- "analytics":
- {
+ "DEFAULT": {
+ "app_mode":"production"
+ },
+ "analytics": {
"google_analytics_ua_id":"",
"reporting_enabled":"false"
},
@@ -195,15 +194,16 @@ HTTP/1.1 200
Content-Type: application/json
{
- "user_count":2,
- "org_count":1,
- "dashboard_count":4,
- "db_snapshot_count":2,
- "db_tag_count":6,
- "data_source_count":1,
- "playlist_count":1,
- "starred_db_count":2,
- "grafana_admin_count":2
+ "users":2,
+ "orgs":1,
+ "dashboards":4,
+ "snapshots":2,
+ "tags":6,
+ "datasources":1,
+ "playlists":1,
+ "stars":2,
+ "alerts":2,
+ "activeUsers":1
}
```
@@ -340,4 +340,4 @@ HTTP/1.1 200
Content-Type: application/json
{state: "new state", message: "alerts pause/un paused", "alertsAffected": 100}
-```
\ No newline at end of file
+```
diff --git a/docs/sources/http_api/auth.md b/docs/sources/http_api/auth.md
index 166a5a4fdb9..8ff40b5ef04 100644
--- a/docs/sources/http_api/auth.md
+++ b/docs/sources/http_api/auth.md
@@ -44,6 +44,14 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
The `Authorization` header value should be `Bearer `.
+The API Token can also be passed as a Basic authorization password with the special username `api_key`:
+
+curl example:
+```bash
+?curl http://api_key:eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk@localhost:3000/api/org
+{"id":1,"name":"Main Org."}
+```
+
# Auth HTTP resources / actions
## Api Keys
diff --git a/docs/sources/http_api/folder.md b/docs/sources/http_api/folder.md
index 7ee1f737799..fb318ecf58e 100644
--- a/docs/sources/http_api/folder.md
+++ b/docs/sources/http_api/folder.md
@@ -19,6 +19,10 @@ The unique identifier (uid) of a folder can be used for uniquely identify folder
The uid can have a maximum length of 40 characters.
+## A note about the General folder
+
+The General folder (id=0) is special and is not part of the Folder API which means
+that you cannot use this API for retrieving information about the General folder.
## Get all folders
@@ -273,14 +277,14 @@ Status Codes:
## Get folder by id
-`GET /api/folders/:id`
+`GET /api/folders/id/:id`
Will return the folder identified by id.
**Example Request**:
```http
-GET /api/folders/1 HTTP/1.1
+GET /api/folders/id/1 HTTP/1.1
Accept: application/json
Content-Type: application/json
Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
diff --git a/docs/sources/http_api/org.md b/docs/sources/http_api/org.md
index 1ebde0878e7..c55107d42f8 100644
--- a/docs/sources/http_api/org.md
+++ b/docs/sources/http_api/org.md
@@ -12,7 +12,13 @@ parent = "http_api"
# Organisation API
-## Get current Organisation
+The Organisation HTTP API is divided in two resources, `/api/org` (current organisation)
+and `/api/orgs` (admin organisations). One big difference between these are that
+the admin of all organisations API only works with basic authentication, see [Admin Organisations API](#admin-organisations-api) for more information.
+
+## Current Organisation API
+
+### Get current Organisation
`GET /api/org/`
@@ -37,135 +43,7 @@ Content-Type: application/json
}
```
-## Get Organisation by Id
-
-`GET /api/orgs/:orgId`
-
-**Example Request**:
-
-```http
-GET /api/orgs/1 HTTP/1.1
-Accept: application/json
-Content-Type: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
-```
-Note: The api will only work when you pass the admin name and password
-to the request http url, like http://admin:admin@localhost:3000/api/orgs/1
-
-**Example Response**:
-
-```http
-HTTP/1.1 200
-Content-Type: application/json
-
-{
- "id":1,
- "name":"Main Org.",
- "address":{
- "address1":"",
- "address2":"",
- "city":"",
- "zipCode":"",
- "state":"",
- "country":""
- }
-}
-```
-## Get Organisation by Name
-
-`GET /api/orgs/name/:orgName`
-
-**Example Request**:
-
-```http
-GET /api/orgs/name/Main%20Org%2E HTTP/1.1
-Accept: application/json
-Content-Type: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
-```
-Note: The api will only work when you pass the admin name and password
-to the request http url, like http://admin:admin@localhost:3000/api/orgs/name/Main%20Org%2E
-
-**Example Response**:
-
-```http
-HTTP/1.1 200
-Content-Type: application/json
-
-{
- "id":1,
- "name":"Main Org.",
- "address":{
- "address1":"",
- "address2":"",
- "city":"",
- "zipCode":"",
- "state":"",
- "country":""
- }
-}
-```
-
-## Create Organisation
-
-`POST /api/orgs`
-
-**Example Request**:
-
-```http
-POST /api/orgs HTTP/1.1
-Accept: application/json
-Content-Type: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
-
-{
- "name":"New Org."
-}
-```
-Note: The api will work in the following two ways
-1) Need to set GF_USERS_ALLOW_ORG_CREATE=true
-2) Set the config users.allow_org_create to true in ini file
-
-**Example Response**:
-
-```http
-HTTP/1.1 200
-Content-Type: application/json
-
-{
- "orgId":"1",
- "message":"Organization created"
-}
-```
-
-
-## Update current Organisation
-
-`PUT /api/org`
-
-**Example Request**:
-
-```http
-PUT /api/org HTTP/1.1
-Accept: application/json
-Content-Type: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
-
-{
- "name":"Main Org."
-}
-```
-
-**Example Response**:
-
-```http
-HTTP/1.1 200
-Content-Type: application/json
-
-{"message":"Organization updated"}
-```
-
-## Get all users within the actual organisation
+### Get all users within the current organisation
`GET /api/org/users`
@@ -195,36 +73,7 @@ Content-Type: application/json
]
```
-## Add a new user to the actual organisation
-
-`POST /api/org/users`
-
-Adds a global user to the actual organisation.
-
-**Example Request**:
-
-```http
-POST /api/org/users HTTP/1.1
-Accept: application/json
-Content-Type: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
-
-{
- "role": "Admin",
- "loginOrEmail": "admin"
-}
-```
-
-**Example Response**:
-
-```http
-HTTP/1.1 200
-Content-Type: application/json
-
-{"message":"User added to organization"}
-```
-
-## Updates the given user
+### Updates the given user
`PATCH /api/org/users/:userId`
@@ -250,7 +99,7 @@ Content-Type: application/json
{"message":"Organization user updated"}
```
-## Delete user in actual organisation
+### Delete user in current organisation
`DELETE /api/org/users/:userId`
@@ -272,19 +121,181 @@ Content-Type: application/json
{"message":"User removed from organization"}
```
-# Organisations
+### Update current Organisation
-## Search all Organisations
+`PUT /api/org`
+
+**Example Request**:
+
+```http
+PUT /api/org HTTP/1.1
+Accept: application/json
+Content-Type: application/json
+Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
+
+{
+ "name":"Main Org."
+}
+```
+
+**Example Response**:
+
+```http
+HTTP/1.1 200
+Content-Type: application/json
+
+{"message":"Organization updated"}
+```
+
+### Add a new user to the current organisation
+
+`POST /api/org/users`
+
+Adds a global user to the current organisation.
+
+**Example Request**:
+
+```http
+POST /api/org/users HTTP/1.1
+Accept: application/json
+Content-Type: application/json
+Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
+
+{
+ "role": "Admin",
+ "loginOrEmail": "admin"
+}
+```
+
+**Example Response**:
+
+```http
+HTTP/1.1 200
+Content-Type: application/json
+
+{"message":"User added to organization"}
+```
+
+## Admin Organisations API
+
+The Admin Organisations HTTP API does not currently work with an API Token. API Tokens are currently
+only linked to an organization and an organization role. They cannot be given the permission of server
+admin, only users can be given that permission. So in order to use these API calls you will have to
+use Basic Auth and the Grafana user must have the Grafana Admin permission (The default admin user
+is called `admin` and has permission to use this API).
+
+### Get Organisation by Id
+
+`GET /api/orgs/:orgId`
+
+Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+
+**Example Request**:
+
+```http
+GET /api/orgs/1 HTTP/1.1
+Accept: application/json
+Content-Type: application/json
+```
+
+**Example Response**:
+
+```http
+HTTP/1.1 200
+Content-Type: application/json
+
+{
+ "id":1,
+ "name":"Main Org.",
+ "address":{
+ "address1":"",
+ "address2":"",
+ "city":"",
+ "zipCode":"",
+ "state":"",
+ "country":""
+ }
+}
+```
+### Get Organisation by Name
+
+`GET /api/orgs/name/:orgName`
+
+Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+
+**Example Request**:
+
+```http
+GET /api/orgs/name/Main%20Org%2E HTTP/1.1
+Accept: application/json
+Content-Type: application/json
+```
+
+**Example Response**:
+
+```http
+HTTP/1.1 200
+Content-Type: application/json
+
+{
+ "id":1,
+ "name":"Main Org.",
+ "address":{
+ "address1":"",
+ "address2":"",
+ "city":"",
+ "zipCode":"",
+ "state":"",
+ "country":""
+ }
+}
+```
+
+### Create Organisation
+
+`POST /api/orgs`
+
+Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+
+**Example Request**:
+
+```http
+POST /api/orgs HTTP/1.1
+Accept: application/json
+Content-Type: application/json
+
+{
+ "name":"New Org."
+}
+```
+Note: The api will work in the following two ways
+1) Need to set GF_USERS_ALLOW_ORG_CREATE=true
+2) Set the config users.allow_org_create to true in ini file
+
+**Example Response**:
+
+```http
+HTTP/1.1 200
+Content-Type: application/json
+
+{
+ "orgId":"1",
+ "message":"Organization created"
+}
+```
+
+### Search all Organisations
`GET /api/orgs`
+Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+
**Example Request**:
```http
GET /api/orgs HTTP/1.1
Accept: application/json
Content-Type: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
```
Note: The api will only work when you pass the admin name and password
to the request http url, like http://admin:admin@localhost:3000/api/orgs
@@ -303,11 +314,12 @@ Content-Type: application/json
]
```
-## Update Organisation
+### Update Organisation
`PUT /api/orgs/:orgId`
Update Organisation, fields *Address 1*, *Address 2*, *City* are not implemented yet.
+Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
**Example Request**:
@@ -315,7 +327,6 @@ Update Organisation, fields *Address 1*, *Address 2*, *City* are not implemented
PUT /api/orgs/1 HTTP/1.1
Accept: application/json
Content-Type: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
{
"name":"Main Org 2."
@@ -331,16 +342,17 @@ Content-Type: application/json
{"message":"Organization updated"}
```
-## Delete Organisation
+### Delete Organisation
`DELETE /api/orgs/:orgId`
+Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+
**Example Request**:
```http
DELETE /api/orgs/1 HTTP/1.1
Accept: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
```
**Example Response**:
@@ -352,17 +364,18 @@ Content-Type: application/json
{"message":"Organization deleted"}
```
-## Get Users in Organisation
+### Get Users in Organisation
`GET /api/orgs/:orgId/users`
+Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+
**Example Request**:
```http
GET /api/orgs/1/users HTTP/1.1
Accept: application/json
Content-Type: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
```
Note: The api will only work when you pass the admin name and password
to the request http url, like http://admin:admin@localhost:3000/api/orgs/1/users
@@ -384,25 +397,24 @@ Content-Type: application/json
]
```
-## Add User in Organisation
+### Add User in Organisation
`POST /api/orgs/:orgId/users`
+Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+
**Example Request**:
```http
POST /api/orgs/1/users HTTP/1.1
Accept: application/json
Content-Type: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
{
"loginOrEmail":"user",
"role":"Viewer"
}
```
-Note: The api will only work when you pass the admin name and password
-to the request http url, like http://admin:admin@localhost:3000/api/orgs/1/users
**Example Response**:
@@ -413,17 +425,18 @@ Content-Type: application/json
{"message":"User added to organization"}
```
-## Update Users in Organisation
+### Update Users in Organisation
`PATCH /api/orgs/:orgId/users/:userId`
+Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+
**Example Request**:
```http
PATCH /api/orgs/1/users/2 HTTP/1.1
Accept: application/json
Content-Type: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
{
"role":"Admin"
@@ -439,17 +452,18 @@ Content-Type: application/json
{"message":"Organization user updated"}
```
-## Delete User in Organisation
+### Delete User in Organisation
`DELETE /api/orgs/:orgId/users/:userId`
+Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api).
+
**Example Request**:
```http
DELETE /api/orgs/1/users/2 HTTP/1.1
Accept: application/json
Content-Type: application/json
-Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
```
**Example Response**:
diff --git a/docs/sources/index.md b/docs/sources/index.md
index 3c59b9baba0..da977b73e0c 100644
--- a/docs/sources/index.md
+++ b/docs/sources/index.md
@@ -60,9 +60,9 @@ aliases = ["v1.1", "guides/reference/admin"]
Provisioning
A guide to help you automate your Grafana setup & configuration.
- }}" class="nav-cards__item nav-cards__item--guide">
- What's new in v5.0
- Article on all the new cool features and enhancements in v5.0
+ }}" class="nav-cards__item nav-cards__item--guide">
+ What's new in v5.2
+ Article on all the new cool features and enhancements in v5.2
}}" class="nav-cards__item nav-cards__item--guide">
Screencasts
diff --git a/docs/sources/installation/behind_proxy.md b/docs/sources/installation/behind_proxy.md
index 89711aecb46..6e3884456ac 100644
--- a/docs/sources/installation/behind_proxy.md
+++ b/docs/sources/installation/behind_proxy.md
@@ -26,7 +26,7 @@ Otherwise Grafana will not behave correctly. See example below.
## Examples
Here are some example configurations for running Grafana behind a reverse proxy.
-### Grafana configuration (ex http://foo.bar.com)
+### Grafana configuration (ex http://foo.bar)
```bash
[server]
@@ -47,7 +47,7 @@ server {
}
```
-### Examples with **sub path** (ex http://foo.bar.com/grafana)
+### Examples with **sub path** (ex http://foo.bar/grafana)
#### Grafana configuration with sub path
```bash
diff --git a/docs/sources/installation/windows.md b/docs/sources/installation/windows.md
index a9a7b5053c3..5dc87984512 100644
--- a/docs/sources/installation/windows.md
+++ b/docs/sources/installation/windows.md
@@ -19,6 +19,8 @@ installation.
## Configure
+**Important:** After you've downloaded the zip file and before extracting it, make sure to open properties for that file (right-click Properties) and check the `unblock` checkbox and `Ok`.
+
The zip file contains a folder with the current Grafana version. Extract
this folder to anywhere you want Grafana to run from. Go into the
`conf` directory and copy `sample.ini` to `custom.ini`. You should edit
diff --git a/docs/sources/reference/scripting.md b/docs/sources/reference/scripting.md
index 551805b567a..7f218765d39 100644
--- a/docs/sources/reference/scripting.md
+++ b/docs/sources/reference/scripting.md
@@ -21,42 +21,32 @@ If you open scripted.js you can see how it reads url parameters from ARGS variab
## Example
```javascript
-var rows = 1;
var seriesName = 'argName';
-if(!_.isUndefined(ARGS.rows)) {
- rows = parseInt(ARGS.rows, 10);
-}
-
if(!_.isUndefined(ARGS.name)) {
seriesName = ARGS.name;
}
-for (var i = 0; i < rows; i++) {
-
- dashboard.rows.push({
- title: 'Scripted Graph ' + i,
- height: '300px',
- panels: [
- {
- title: 'Events',
- type: 'graph',
- span: 12,
- fill: 1,
- linewidth: 2,
- targets: [
- {
- 'target': "randomWalk('" + seriesName + "')"
- },
- {
- 'target': "randomWalk('random walk2')"
- }
- ],
- }
- ]
- });
-
-}
+dashboard.panels.push({
+ title: 'Events',
+ type: 'graph',
+ fill: 1,
+ linewidth: 2,
+ gridPos: {
+ h: 10,
+ w: 24,
+ x: 0,
+ y: 10,
+ },
+ targets: [
+ {
+ 'target': "randomWalk('" + seriesName + "')"
+ },
+ {
+ 'target': "randomWalk('random walk2')"
+ }
+ ]
+});
return dashboard;
```
diff --git a/docs/versions.json b/docs/versions.json
index 61e471938f2..caefbe198d6 100644
--- a/docs/versions.json
+++ b/docs/versions.json
@@ -1,5 +1,6 @@
[
- { "version": "v5.1", "path": "/", "archived": false, "current": true },
+ { "version": "v5.2", "path": "/", "archived": false, "current": true },
+ { "version": "v5.1", "path": "/v5.1", "archived": true },
{ "version": "v5.0", "path": "/v5.0", "archived": true },
{ "version": "v4.6", "path": "/v4.6", "archived": true },
{ "version": "v4.5", "path": "/v4.5", "archived": true },
diff --git a/latest.json b/latest.json
index d8804f98441..8e26289c856 100644
--- a/latest.json
+++ b/latest.json
@@ -1,4 +1,4 @@
{
- "stable": "5.1.3",
- "testing": "5.1.3"
+ "stable": "5.2.0",
+ "testing": "5.2.0"
}
diff --git a/package.json b/package.json
index 0433dc3e986..4f2220abbae 100644
--- a/package.json
+++ b/package.json
@@ -4,7 +4,7 @@
"company": "Grafana Labs"
},
"name": "grafana",
- "version": "5.2.0-pre1",
+ "version": "5.3.0-pre1",
"repository": {
"type": "git",
"url": "http://github.com/grafana/grafana.git"
diff --git a/pkg/api/alerting_test.go b/pkg/api/alerting_test.go
index abfdfb66322..9eba0e0d5b6 100644
--- a/pkg/api/alerting_test.go
+++ b/pkg/api/alerting_test.go
@@ -135,7 +135,7 @@ func postAlertScenario(desc string, url string, routePattern string, role m.Role
defer bus.ClearBusHandlers()
sc := setupScenarioContext(url)
- sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+ sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
sc.context = c
sc.context.UserId = TestUserID
sc.context.OrgId = TestOrgID
diff --git a/pkg/api/annotations_test.go b/pkg/api/annotations_test.go
index e5f63ce022b..6590eb19ff2 100644
--- a/pkg/api/annotations_test.go
+++ b/pkg/api/annotations_test.go
@@ -223,7 +223,7 @@ func postAnnotationScenario(desc string, url string, routePattern string, role m
defer bus.ClearBusHandlers()
sc := setupScenarioContext(url)
- sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+ sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
sc.context = c
sc.context.UserId = TestUserID
sc.context.OrgId = TestOrgID
@@ -246,7 +246,7 @@ func putAnnotationScenario(desc string, url string, routePattern string, role m.
defer bus.ClearBusHandlers()
sc := setupScenarioContext(url)
- sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+ sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
sc.context = c
sc.context.UserId = TestUserID
sc.context.OrgId = TestOrgID
@@ -269,7 +269,7 @@ func deleteAnnotationsScenario(desc string, url string, routePattern string, rol
defer bus.ClearBusHandlers()
sc := setupScenarioContext(url)
- sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+ sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
sc.context = c
sc.context.UserId = TestUserID
sc.context.OrgId = TestOrgID
diff --git a/pkg/api/api.go b/pkg/api/api.go
index 39dae56eb69..8870b9b095e 100644
--- a/pkg/api/api.go
+++ b/pkg/api/api.go
@@ -9,9 +9,7 @@ import (
m "github.com/grafana/grafana/pkg/models"
)
-// Register adds http routes
func (hs *HTTPServer) registerRoutes() {
- macaronR := hs.macaron
reqSignedIn := middleware.Auth(&middleware.AuthOptions{ReqSignedIn: true})
reqGrafanaAdmin := middleware.Auth(&middleware.AuthOptions{ReqSignedIn: true, ReqGrafanaAdmin: true})
reqEditorRole := middleware.RoleAuth(m.ROLE_EDITOR, m.ROLE_ADMIN)
@@ -21,15 +19,12 @@ func (hs *HTTPServer) registerRoutes() {
quota := middleware.Quota
bind := binding.Bind
- // automatically set HEAD for every GET
- macaronR.SetAutoHead(true)
-
r := hs.RouteRegister
// not logged in views
r.Get("/", reqSignedIn, Index)
r.Get("/logout", Logout)
- r.Post("/login", quota("session"), bind(dtos.LoginCommand{}), wrap(LoginPost))
+ r.Post("/login", quota("session"), bind(dtos.LoginCommand{}), Wrap(LoginPost))
r.Get("/login/:name", quota("session"), OAuthLogin)
r.Get("/login", LoginView)
r.Get("/invite/:code", Index)
@@ -88,20 +83,20 @@ func (hs *HTTPServer) registerRoutes() {
// sign up
r.Get("/signup", Index)
- r.Get("/api/user/signup/options", wrap(GetSignUpOptions))
- r.Post("/api/user/signup", quota("user"), bind(dtos.SignUpForm{}), wrap(SignUp))
- r.Post("/api/user/signup/step2", bind(dtos.SignUpStep2Form{}), wrap(SignUpStep2))
+ r.Get("/api/user/signup/options", Wrap(GetSignUpOptions))
+ r.Post("/api/user/signup", quota("user"), bind(dtos.SignUpForm{}), Wrap(SignUp))
+ r.Post("/api/user/signup/step2", bind(dtos.SignUpStep2Form{}), Wrap(SignUpStep2))
// invited
- r.Get("/api/user/invite/:code", wrap(GetInviteInfoByCode))
- r.Post("/api/user/invite/complete", bind(dtos.CompleteInviteForm{}), wrap(CompleteInvite))
+ r.Get("/api/user/invite/:code", Wrap(GetInviteInfoByCode))
+ r.Post("/api/user/invite/complete", bind(dtos.CompleteInviteForm{}), Wrap(CompleteInvite))
// reset password
r.Get("/user/password/send-reset-email", Index)
r.Get("/user/password/reset", Index)
- r.Post("/api/user/password/send-reset-email", bind(dtos.SendResetPasswordEmailForm{}), wrap(SendResetPasswordEmail))
- r.Post("/api/user/password/reset", bind(dtos.ResetUserPasswordForm{}), wrap(ResetPassword))
+ r.Post("/api/user/password/send-reset-email", bind(dtos.SendResetPasswordEmailForm{}), Wrap(SendResetPasswordEmail))
+ r.Post("/api/user/password/reset", bind(dtos.ResetUserPasswordForm{}), Wrap(ResetPassword))
// dashboard snapshots
r.Get("/dashboard/snapshot/*", Index)
@@ -111,8 +106,8 @@ func (hs *HTTPServer) registerRoutes() {
r.Post("/api/snapshots/", bind(m.CreateDashboardSnapshotCommand{}), CreateDashboardSnapshot)
r.Get("/api/snapshot/shared-options/", GetSharingOptions)
r.Get("/api/snapshots/:key", GetDashboardSnapshot)
- r.Get("/api/snapshots-delete/:deleteKey", wrap(DeleteDashboardSnapshotByDeleteKey))
- r.Delete("/api/snapshots/:key", reqEditorRole, wrap(DeleteDashboardSnapshot))
+ r.Get("/api/snapshots-delete/:deleteKey", Wrap(DeleteDashboardSnapshotByDeleteKey))
+ r.Delete("/api/snapshots/:key", reqEditorRole, Wrap(DeleteDashboardSnapshot))
// api renew session based on remember cookie
r.Get("/api/login/ping", quota("session"), LoginAPIPing)
@@ -122,138 +117,138 @@ func (hs *HTTPServer) registerRoutes() {
// user (signed in)
apiRoute.Group("/user", func(userRoute routing.RouteRegister) {
- userRoute.Get("/", wrap(GetSignedInUser))
- userRoute.Put("/", bind(m.UpdateUserCommand{}), wrap(UpdateSignedInUser))
- userRoute.Post("/using/:id", wrap(UserSetUsingOrg))
- userRoute.Get("/orgs", wrap(GetSignedInUserOrgList))
+ userRoute.Get("/", Wrap(GetSignedInUser))
+ userRoute.Put("/", bind(m.UpdateUserCommand{}), Wrap(UpdateSignedInUser))
+ userRoute.Post("/using/:id", Wrap(UserSetUsingOrg))
+ userRoute.Get("/orgs", Wrap(GetSignedInUserOrgList))
- userRoute.Post("/stars/dashboard/:id", wrap(StarDashboard))
- userRoute.Delete("/stars/dashboard/:id", wrap(UnstarDashboard))
+ userRoute.Post("/stars/dashboard/:id", Wrap(StarDashboard))
+ userRoute.Delete("/stars/dashboard/:id", Wrap(UnstarDashboard))
- userRoute.Put("/password", bind(m.ChangeUserPasswordCommand{}), wrap(ChangeUserPassword))
- userRoute.Get("/quotas", wrap(GetUserQuotas))
- userRoute.Put("/helpflags/:id", wrap(SetHelpFlag))
+ userRoute.Put("/password", bind(m.ChangeUserPasswordCommand{}), Wrap(ChangeUserPassword))
+ userRoute.Get("/quotas", Wrap(GetUserQuotas))
+ userRoute.Put("/helpflags/:id", Wrap(SetHelpFlag))
// For dev purpose
- userRoute.Get("/helpflags/clear", wrap(ClearHelpFlags))
+ userRoute.Get("/helpflags/clear", Wrap(ClearHelpFlags))
- userRoute.Get("/preferences", wrap(GetUserPreferences))
- userRoute.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), wrap(UpdateUserPreferences))
+ userRoute.Get("/preferences", Wrap(GetUserPreferences))
+ userRoute.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), Wrap(UpdateUserPreferences))
})
// users (admin permission required)
apiRoute.Group("/users", func(usersRoute routing.RouteRegister) {
- usersRoute.Get("/", wrap(SearchUsers))
- usersRoute.Get("/search", wrap(SearchUsersWithPaging))
- usersRoute.Get("/:id", wrap(GetUserByID))
- usersRoute.Get("/:id/orgs", wrap(GetUserOrgList))
+ usersRoute.Get("/", Wrap(SearchUsers))
+ usersRoute.Get("/search", Wrap(SearchUsersWithPaging))
+ usersRoute.Get("/:id", Wrap(GetUserByID))
+ usersRoute.Get("/:id/orgs", Wrap(GetUserOrgList))
// query parameters /users/lookup?loginOrEmail=admin@example.com
- usersRoute.Get("/lookup", wrap(GetUserByLoginOrEmail))
- usersRoute.Put("/:id", bind(m.UpdateUserCommand{}), wrap(UpdateUser))
- usersRoute.Post("/:id/using/:orgId", wrap(UpdateUserActiveOrg))
+ usersRoute.Get("/lookup", Wrap(GetUserByLoginOrEmail))
+ usersRoute.Put("/:id", bind(m.UpdateUserCommand{}), Wrap(UpdateUser))
+ usersRoute.Post("/:id/using/:orgId", Wrap(UpdateUserActiveOrg))
}, reqGrafanaAdmin)
// team (admin permission required)
apiRoute.Group("/teams", func(teamsRoute routing.RouteRegister) {
- teamsRoute.Post("/", bind(m.CreateTeamCommand{}), wrap(CreateTeam))
- teamsRoute.Put("/:teamId", bind(m.UpdateTeamCommand{}), wrap(UpdateTeam))
- teamsRoute.Delete("/:teamId", wrap(DeleteTeamByID))
- teamsRoute.Get("/:teamId/members", wrap(GetTeamMembers))
- teamsRoute.Post("/:teamId/members", bind(m.AddTeamMemberCommand{}), wrap(AddTeamMember))
- teamsRoute.Delete("/:teamId/members/:userId", wrap(RemoveTeamMember))
+ teamsRoute.Post("/", bind(m.CreateTeamCommand{}), Wrap(CreateTeam))
+ teamsRoute.Put("/:teamId", bind(m.UpdateTeamCommand{}), Wrap(UpdateTeam))
+ teamsRoute.Delete("/:teamId", Wrap(DeleteTeamByID))
+ teamsRoute.Get("/:teamId/members", Wrap(GetTeamMembers))
+ teamsRoute.Post("/:teamId/members", bind(m.AddTeamMemberCommand{}), Wrap(AddTeamMember))
+ teamsRoute.Delete("/:teamId/members/:userId", Wrap(RemoveTeamMember))
}, reqOrgAdmin)
// team without requirement of user to be org admin
apiRoute.Group("/teams", func(teamsRoute routing.RouteRegister) {
- teamsRoute.Get("/:teamId", wrap(GetTeamByID))
- teamsRoute.Get("/search", wrap(SearchTeams))
+ teamsRoute.Get("/:teamId", Wrap(GetTeamByID))
+ teamsRoute.Get("/search", Wrap(SearchTeams))
})
// org information available to all users.
apiRoute.Group("/org", func(orgRoute routing.RouteRegister) {
- orgRoute.Get("/", wrap(GetOrgCurrent))
- orgRoute.Get("/quotas", wrap(GetOrgQuotas))
+ orgRoute.Get("/", Wrap(GetOrgCurrent))
+ orgRoute.Get("/quotas", Wrap(GetOrgQuotas))
})
// current org
apiRoute.Group("/org", func(orgRoute routing.RouteRegister) {
- orgRoute.Put("/", bind(dtos.UpdateOrgForm{}), wrap(UpdateOrgCurrent))
- orgRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), wrap(UpdateOrgAddressCurrent))
- orgRoute.Post("/users", quota("user"), bind(m.AddOrgUserCommand{}), wrap(AddOrgUserToCurrentOrg))
- orgRoute.Patch("/users/:userId", bind(m.UpdateOrgUserCommand{}), wrap(UpdateOrgUserForCurrentOrg))
- orgRoute.Delete("/users/:userId", wrap(RemoveOrgUserForCurrentOrg))
+ orgRoute.Put("/", bind(dtos.UpdateOrgForm{}), Wrap(UpdateOrgCurrent))
+ orgRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), Wrap(UpdateOrgAddressCurrent))
+ orgRoute.Post("/users", quota("user"), bind(m.AddOrgUserCommand{}), Wrap(AddOrgUserToCurrentOrg))
+ orgRoute.Patch("/users/:userId", bind(m.UpdateOrgUserCommand{}), Wrap(UpdateOrgUserForCurrentOrg))
+ orgRoute.Delete("/users/:userId", Wrap(RemoveOrgUserForCurrentOrg))
// invites
- orgRoute.Get("/invites", wrap(GetPendingOrgInvites))
- orgRoute.Post("/invites", quota("user"), bind(dtos.AddInviteForm{}), wrap(AddOrgInvite))
- orgRoute.Patch("/invites/:code/revoke", wrap(RevokeInvite))
+ orgRoute.Get("/invites", Wrap(GetPendingOrgInvites))
+ orgRoute.Post("/invites", quota("user"), bind(dtos.AddInviteForm{}), Wrap(AddOrgInvite))
+ orgRoute.Patch("/invites/:code/revoke", Wrap(RevokeInvite))
// prefs
- orgRoute.Get("/preferences", wrap(GetOrgPreferences))
- orgRoute.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), wrap(UpdateOrgPreferences))
+ orgRoute.Get("/preferences", Wrap(GetOrgPreferences))
+ orgRoute.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), Wrap(UpdateOrgPreferences))
}, reqOrgAdmin)
// current org without requirement of user to be org admin
apiRoute.Group("/org", func(orgRoute routing.RouteRegister) {
- orgRoute.Get("/users", wrap(GetOrgUsersForCurrentOrg))
+ orgRoute.Get("/users", Wrap(GetOrgUsersForCurrentOrg))
})
// create new org
- apiRoute.Post("/orgs", quota("org"), bind(m.CreateOrgCommand{}), wrap(CreateOrg))
+ apiRoute.Post("/orgs", quota("org"), bind(m.CreateOrgCommand{}), Wrap(CreateOrg))
// search all orgs
- apiRoute.Get("/orgs", reqGrafanaAdmin, wrap(SearchOrgs))
+ apiRoute.Get("/orgs", reqGrafanaAdmin, Wrap(SearchOrgs))
// orgs (admin routes)
apiRoute.Group("/orgs/:orgId", func(orgsRoute routing.RouteRegister) {
- orgsRoute.Get("/", wrap(GetOrgByID))
- orgsRoute.Put("/", bind(dtos.UpdateOrgForm{}), wrap(UpdateOrg))
- orgsRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), wrap(UpdateOrgAddress))
- orgsRoute.Delete("/", wrap(DeleteOrgByID))
- orgsRoute.Get("/users", wrap(GetOrgUsers))
- orgsRoute.Post("/users", bind(m.AddOrgUserCommand{}), wrap(AddOrgUser))
- orgsRoute.Patch("/users/:userId", bind(m.UpdateOrgUserCommand{}), wrap(UpdateOrgUser))
- orgsRoute.Delete("/users/:userId", wrap(RemoveOrgUser))
- orgsRoute.Get("/quotas", wrap(GetOrgQuotas))
- orgsRoute.Put("/quotas/:target", bind(m.UpdateOrgQuotaCmd{}), wrap(UpdateOrgQuota))
+ orgsRoute.Get("/", Wrap(GetOrgByID))
+ orgsRoute.Put("/", bind(dtos.UpdateOrgForm{}), Wrap(UpdateOrg))
+ orgsRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), Wrap(UpdateOrgAddress))
+ orgsRoute.Delete("/", Wrap(DeleteOrgByID))
+ orgsRoute.Get("/users", Wrap(GetOrgUsers))
+ orgsRoute.Post("/users", bind(m.AddOrgUserCommand{}), Wrap(AddOrgUser))
+ orgsRoute.Patch("/users/:userId", bind(m.UpdateOrgUserCommand{}), Wrap(UpdateOrgUser))
+ orgsRoute.Delete("/users/:userId", Wrap(RemoveOrgUser))
+ orgsRoute.Get("/quotas", Wrap(GetOrgQuotas))
+ orgsRoute.Put("/quotas/:target", bind(m.UpdateOrgQuotaCmd{}), Wrap(UpdateOrgQuota))
}, reqGrafanaAdmin)
// orgs (admin routes)
apiRoute.Group("/orgs/name/:name", func(orgsRoute routing.RouteRegister) {
- orgsRoute.Get("/", wrap(GetOrgByName))
+ orgsRoute.Get("/", Wrap(GetOrgByName))
}, reqGrafanaAdmin)
// auth api keys
apiRoute.Group("/auth/keys", func(keysRoute routing.RouteRegister) {
- keysRoute.Get("/", wrap(GetAPIKeys))
- keysRoute.Post("/", quota("api_key"), bind(m.AddApiKeyCommand{}), wrap(AddAPIKey))
- keysRoute.Delete("/:id", wrap(DeleteAPIKey))
+ keysRoute.Get("/", Wrap(GetAPIKeys))
+ keysRoute.Post("/", quota("api_key"), bind(m.AddApiKeyCommand{}), Wrap(AddAPIKey))
+ keysRoute.Delete("/:id", Wrap(DeleteAPIKey))
}, reqOrgAdmin)
// Preferences
apiRoute.Group("/preferences", func(prefRoute routing.RouteRegister) {
- prefRoute.Post("/set-home-dash", bind(m.SavePreferencesCommand{}), wrap(SetHomeDashboard))
+ prefRoute.Post("/set-home-dash", bind(m.SavePreferencesCommand{}), Wrap(SetHomeDashboard))
})
// Data sources
apiRoute.Group("/datasources", func(datasourceRoute routing.RouteRegister) {
- datasourceRoute.Get("/", wrap(GetDataSources))
- datasourceRoute.Post("/", quota("data_source"), bind(m.AddDataSourceCommand{}), wrap(AddDataSource))
- datasourceRoute.Put("/:id", bind(m.UpdateDataSourceCommand{}), wrap(UpdateDataSource))
- datasourceRoute.Delete("/:id", wrap(DeleteDataSourceByID))
- datasourceRoute.Delete("/name/:name", wrap(DeleteDataSourceByName))
- datasourceRoute.Get("/:id", wrap(GetDataSourceByID))
- datasourceRoute.Get("/name/:name", wrap(GetDataSourceByName))
+ datasourceRoute.Get("/", Wrap(GetDataSources))
+ datasourceRoute.Post("/", quota("data_source"), bind(m.AddDataSourceCommand{}), Wrap(AddDataSource))
+ datasourceRoute.Put("/:id", bind(m.UpdateDataSourceCommand{}), Wrap(UpdateDataSource))
+ datasourceRoute.Delete("/:id", Wrap(DeleteDataSourceByID))
+ datasourceRoute.Delete("/name/:name", Wrap(DeleteDataSourceByName))
+ datasourceRoute.Get("/:id", Wrap(GetDataSourceByID))
+ datasourceRoute.Get("/name/:name", Wrap(GetDataSourceByName))
}, reqOrgAdmin)
- apiRoute.Get("/datasources/id/:name", wrap(GetDataSourceIDByName), reqSignedIn)
+ apiRoute.Get("/datasources/id/:name", Wrap(GetDataSourceIDByName), reqSignedIn)
- apiRoute.Get("/plugins", wrap(GetPluginList))
- apiRoute.Get("/plugins/:pluginId/settings", wrap(GetPluginSettingByID))
- apiRoute.Get("/plugins/:pluginId/markdown/:name", wrap(GetPluginMarkdown))
+ apiRoute.Get("/plugins", Wrap(GetPluginList))
+ apiRoute.Get("/plugins/:pluginId/settings", Wrap(GetPluginSettingByID))
+ apiRoute.Get("/plugins/:pluginId/markdown/:name", Wrap(GetPluginMarkdown))
apiRoute.Group("/plugins", func(pluginRoute routing.RouteRegister) {
- pluginRoute.Get("/:pluginId/dashboards/", wrap(GetPluginDashboards))
- pluginRoute.Post("/:pluginId/settings", bind(m.UpdatePluginSettingCmd{}), wrap(UpdatePluginSetting))
+ pluginRoute.Get("/:pluginId/dashboards/", Wrap(GetPluginDashboards))
+ pluginRoute.Post("/:pluginId/settings", bind(m.UpdatePluginSettingCmd{}), Wrap(UpdatePluginSetting))
}, reqOrgAdmin)
apiRoute.Get("/frontend/settings/", GetFrontendSettings)
@@ -262,106 +257,106 @@ func (hs *HTTPServer) registerRoutes() {
// Folders
apiRoute.Group("/folders", func(folderRoute routing.RouteRegister) {
- folderRoute.Get("/", wrap(GetFolders))
- folderRoute.Get("/id/:id", wrap(GetFolderByID))
- folderRoute.Post("/", bind(m.CreateFolderCommand{}), wrap(CreateFolder))
+ folderRoute.Get("/", Wrap(GetFolders))
+ folderRoute.Get("/id/:id", Wrap(GetFolderByID))
+ folderRoute.Post("/", bind(m.CreateFolderCommand{}), Wrap(CreateFolder))
folderRoute.Group("/:uid", func(folderUidRoute routing.RouteRegister) {
- folderUidRoute.Get("/", wrap(GetFolderByUID))
- folderUidRoute.Put("/", bind(m.UpdateFolderCommand{}), wrap(UpdateFolder))
- folderUidRoute.Delete("/", wrap(DeleteFolder))
+ folderUidRoute.Get("/", Wrap(GetFolderByUID))
+ folderUidRoute.Put("/", bind(m.UpdateFolderCommand{}), Wrap(UpdateFolder))
+ folderUidRoute.Delete("/", Wrap(DeleteFolder))
folderUidRoute.Group("/permissions", func(folderPermissionRoute routing.RouteRegister) {
- folderPermissionRoute.Get("/", wrap(GetFolderPermissionList))
- folderPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), wrap(UpdateFolderPermissions))
+ folderPermissionRoute.Get("/", Wrap(GetFolderPermissionList))
+ folderPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), Wrap(UpdateFolderPermissions))
})
})
})
// Dashboard
apiRoute.Group("/dashboards", func(dashboardRoute routing.RouteRegister) {
- dashboardRoute.Get("/uid/:uid", wrap(GetDashboard))
- dashboardRoute.Delete("/uid/:uid", wrap(DeleteDashboardByUID))
+ dashboardRoute.Get("/uid/:uid", Wrap(GetDashboard))
+ dashboardRoute.Delete("/uid/:uid", Wrap(DeleteDashboardByUID))
- dashboardRoute.Get("/db/:slug", wrap(GetDashboard))
- dashboardRoute.Delete("/db/:slug", wrap(DeleteDashboard))
+ dashboardRoute.Get("/db/:slug", Wrap(GetDashboard))
+ dashboardRoute.Delete("/db/:slug", Wrap(DeleteDashboard))
- dashboardRoute.Post("/calculate-diff", bind(dtos.CalculateDiffOptions{}), wrap(CalculateDashboardDiff))
+ dashboardRoute.Post("/calculate-diff", bind(dtos.CalculateDiffOptions{}), Wrap(CalculateDashboardDiff))
- dashboardRoute.Post("/db", bind(m.SaveDashboardCommand{}), wrap(PostDashboard))
- dashboardRoute.Get("/home", wrap(GetHomeDashboard))
+ dashboardRoute.Post("/db", bind(m.SaveDashboardCommand{}), Wrap(PostDashboard))
+ dashboardRoute.Get("/home", Wrap(GetHomeDashboard))
dashboardRoute.Get("/tags", GetDashboardTags)
- dashboardRoute.Post("/import", bind(dtos.ImportDashboardCommand{}), wrap(ImportDashboard))
+ dashboardRoute.Post("/import", bind(dtos.ImportDashboardCommand{}), Wrap(ImportDashboard))
dashboardRoute.Group("/id/:dashboardId", func(dashIdRoute routing.RouteRegister) {
- dashIdRoute.Get("/versions", wrap(GetDashboardVersions))
- dashIdRoute.Get("/versions/:id", wrap(GetDashboardVersion))
- dashIdRoute.Post("/restore", bind(dtos.RestoreDashboardVersionCommand{}), wrap(RestoreDashboardVersion))
+ dashIdRoute.Get("/versions", Wrap(GetDashboardVersions))
+ dashIdRoute.Get("/versions/:id", Wrap(GetDashboardVersion))
+ dashIdRoute.Post("/restore", bind(dtos.RestoreDashboardVersionCommand{}), Wrap(RestoreDashboardVersion))
dashIdRoute.Group("/permissions", func(dashboardPermissionRoute routing.RouteRegister) {
- dashboardPermissionRoute.Get("/", wrap(GetDashboardPermissionList))
- dashboardPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), wrap(UpdateDashboardPermissions))
+ dashboardPermissionRoute.Get("/", Wrap(GetDashboardPermissionList))
+ dashboardPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), Wrap(UpdateDashboardPermissions))
})
})
})
// Dashboard snapshots
apiRoute.Group("/dashboard/snapshots", func(dashboardRoute routing.RouteRegister) {
- dashboardRoute.Get("/", wrap(SearchDashboardSnapshots))
+ dashboardRoute.Get("/", Wrap(SearchDashboardSnapshots))
})
// Playlist
apiRoute.Group("/playlists", func(playlistRoute routing.RouteRegister) {
- playlistRoute.Get("/", wrap(SearchPlaylists))
- playlistRoute.Get("/:id", ValidateOrgPlaylist, wrap(GetPlaylist))
- playlistRoute.Get("/:id/items", ValidateOrgPlaylist, wrap(GetPlaylistItems))
- playlistRoute.Get("/:id/dashboards", ValidateOrgPlaylist, wrap(GetPlaylistDashboards))
- playlistRoute.Delete("/:id", reqEditorRole, ValidateOrgPlaylist, wrap(DeletePlaylist))
- playlistRoute.Put("/:id", reqEditorRole, bind(m.UpdatePlaylistCommand{}), ValidateOrgPlaylist, wrap(UpdatePlaylist))
- playlistRoute.Post("/", reqEditorRole, bind(m.CreatePlaylistCommand{}), wrap(CreatePlaylist))
+ playlistRoute.Get("/", Wrap(SearchPlaylists))
+ playlistRoute.Get("/:id", ValidateOrgPlaylist, Wrap(GetPlaylist))
+ playlistRoute.Get("/:id/items", ValidateOrgPlaylist, Wrap(GetPlaylistItems))
+ playlistRoute.Get("/:id/dashboards", ValidateOrgPlaylist, Wrap(GetPlaylistDashboards))
+ playlistRoute.Delete("/:id", reqEditorRole, ValidateOrgPlaylist, Wrap(DeletePlaylist))
+ playlistRoute.Put("/:id", reqEditorRole, bind(m.UpdatePlaylistCommand{}), ValidateOrgPlaylist, Wrap(UpdatePlaylist))
+ playlistRoute.Post("/", reqEditorRole, bind(m.CreatePlaylistCommand{}), Wrap(CreatePlaylist))
})
// Search
apiRoute.Get("/search/", Search)
// metrics
- apiRoute.Post("/tsdb/query", bind(dtos.MetricRequest{}), wrap(QueryMetrics))
- apiRoute.Get("/tsdb/testdata/scenarios", wrap(GetTestDataScenarios))
- apiRoute.Get("/tsdb/testdata/gensql", reqGrafanaAdmin, wrap(GenerateSQLTestData))
- apiRoute.Get("/tsdb/testdata/random-walk", wrap(GetTestDataRandomWalk))
+ apiRoute.Post("/tsdb/query", bind(dtos.MetricRequest{}), Wrap(QueryMetrics))
+ apiRoute.Get("/tsdb/testdata/scenarios", Wrap(GetTestDataScenarios))
+ apiRoute.Get("/tsdb/testdata/gensql", reqGrafanaAdmin, Wrap(GenerateSQLTestData))
+ apiRoute.Get("/tsdb/testdata/random-walk", Wrap(GetTestDataRandomWalk))
apiRoute.Group("/alerts", func(alertsRoute routing.RouteRegister) {
- alertsRoute.Post("/test", bind(dtos.AlertTestCommand{}), wrap(AlertTest))
- alertsRoute.Post("/:alertId/pause", reqEditorRole, bind(dtos.PauseAlertCommand{}), wrap(PauseAlert))
- alertsRoute.Get("/:alertId", ValidateOrgAlert, wrap(GetAlert))
- alertsRoute.Get("/", wrap(GetAlerts))
- alertsRoute.Get("/states-for-dashboard", wrap(GetAlertStatesForDashboard))
+ alertsRoute.Post("/test", bind(dtos.AlertTestCommand{}), Wrap(AlertTest))
+ alertsRoute.Post("/:alertId/pause", reqEditorRole, bind(dtos.PauseAlertCommand{}), Wrap(PauseAlert))
+ alertsRoute.Get("/:alertId", ValidateOrgAlert, Wrap(GetAlert))
+ alertsRoute.Get("/", Wrap(GetAlerts))
+ alertsRoute.Get("/states-for-dashboard", Wrap(GetAlertStatesForDashboard))
})
- apiRoute.Get("/alert-notifications", wrap(GetAlertNotifications))
- apiRoute.Get("/alert-notifiers", wrap(GetAlertNotifiers))
+ apiRoute.Get("/alert-notifications", Wrap(GetAlertNotifications))
+ apiRoute.Get("/alert-notifiers", Wrap(GetAlertNotifiers))
apiRoute.Group("/alert-notifications", func(alertNotifications routing.RouteRegister) {
- alertNotifications.Post("/test", bind(dtos.NotificationTestCommand{}), wrap(NotificationTest))
- alertNotifications.Post("/", bind(m.CreateAlertNotificationCommand{}), wrap(CreateAlertNotification))
- alertNotifications.Put("/:notificationId", bind(m.UpdateAlertNotificationCommand{}), wrap(UpdateAlertNotification))
- alertNotifications.Get("/:notificationId", wrap(GetAlertNotificationByID))
- alertNotifications.Delete("/:notificationId", wrap(DeleteAlertNotification))
+ alertNotifications.Post("/test", bind(dtos.NotificationTestCommand{}), Wrap(NotificationTest))
+ alertNotifications.Post("/", bind(m.CreateAlertNotificationCommand{}), Wrap(CreateAlertNotification))
+ alertNotifications.Put("/:notificationId", bind(m.UpdateAlertNotificationCommand{}), Wrap(UpdateAlertNotification))
+ alertNotifications.Get("/:notificationId", Wrap(GetAlertNotificationByID))
+ alertNotifications.Delete("/:notificationId", Wrap(DeleteAlertNotification))
}, reqEditorRole)
- apiRoute.Get("/annotations", wrap(GetAnnotations))
- apiRoute.Post("/annotations/mass-delete", reqOrgAdmin, bind(dtos.DeleteAnnotationsCmd{}), wrap(DeleteAnnotations))
+ apiRoute.Get("/annotations", Wrap(GetAnnotations))
+ apiRoute.Post("/annotations/mass-delete", reqOrgAdmin, bind(dtos.DeleteAnnotationsCmd{}), Wrap(DeleteAnnotations))
apiRoute.Group("/annotations", func(annotationsRoute routing.RouteRegister) {
- annotationsRoute.Post("/", bind(dtos.PostAnnotationsCmd{}), wrap(PostAnnotation))
- annotationsRoute.Delete("/:annotationId", wrap(DeleteAnnotationByID))
- annotationsRoute.Put("/:annotationId", bind(dtos.UpdateAnnotationsCmd{}), wrap(UpdateAnnotation))
- annotationsRoute.Delete("/region/:regionId", wrap(DeleteAnnotationRegion))
- annotationsRoute.Post("/graphite", reqEditorRole, bind(dtos.PostGraphiteAnnotationsCmd{}), wrap(PostGraphiteAnnotation))
+ annotationsRoute.Post("/", bind(dtos.PostAnnotationsCmd{}), Wrap(PostAnnotation))
+ annotationsRoute.Delete("/:annotationId", Wrap(DeleteAnnotationByID))
+ annotationsRoute.Put("/:annotationId", bind(dtos.UpdateAnnotationsCmd{}), Wrap(UpdateAnnotation))
+ annotationsRoute.Delete("/region/:regionId", Wrap(DeleteAnnotationRegion))
+ annotationsRoute.Post("/graphite", reqEditorRole, bind(dtos.PostGraphiteAnnotationsCmd{}), Wrap(PostGraphiteAnnotation))
})
// error test
- r.Get("/metrics/error", wrap(GenerateError))
+ r.Get("/metrics/error", Wrap(GenerateError))
}, reqSignedIn)
@@ -372,10 +367,10 @@ func (hs *HTTPServer) registerRoutes() {
adminRoute.Put("/users/:id/password", bind(dtos.AdminUpdateUserPasswordForm{}), AdminUpdateUserPassword)
adminRoute.Put("/users/:id/permissions", bind(dtos.AdminUpdateUserPermissionsForm{}), AdminUpdateUserPermissions)
adminRoute.Delete("/users/:id", AdminDeleteUser)
- adminRoute.Get("/users/:id/quotas", wrap(GetUserQuotas))
- adminRoute.Put("/users/:id/quotas/:target", bind(m.UpdateUserQuotaCmd{}), wrap(UpdateUserQuota))
+ adminRoute.Get("/users/:id/quotas", Wrap(GetUserQuotas))
+ adminRoute.Put("/users/:id/quotas/:target", bind(m.UpdateUserQuotaCmd{}), Wrap(UpdateUserQuota))
adminRoute.Get("/stats", AdminGetStats)
- adminRoute.Post("/pause-all-alerts", bind(dtos.PauseAllAlertsCommand{}), wrap(PauseAllAlerts))
+ adminRoute.Post("/pause-all-alerts", bind(dtos.PauseAllAlertsCommand{}), Wrap(PauseAllAlerts))
}, reqGrafanaAdmin)
// rendering
@@ -393,10 +388,4 @@ func (hs *HTTPServer) registerRoutes() {
// streams
//r.Post("/api/streams/push", reqSignedIn, bind(dtos.StreamMessage{}), liveConn.PushToStream)
-
- r.Register(macaronR)
-
- InitAppPluginRoutes(macaronR)
-
- macaronR.NotFound(NotFoundHandler)
}
diff --git a/pkg/api/app_routes.go b/pkg/api/app_routes.go
index 0b7dcd32ce3..a2137089fc6 100644
--- a/pkg/api/app_routes.go
+++ b/pkg/api/app_routes.go
@@ -18,7 +18,7 @@ import (
var pluginProxyTransport *http.Transport
-func InitAppPluginRoutes(r *macaron.Macaron) {
+func (hs *HTTPServer) initAppPluginRoutes(r *macaron.Macaron) {
pluginProxyTransport = &http.Transport{
TLSClientConfig: &tls.Config{
InsecureSkipVerify: setting.PluginAppsSkipVerifyTLS,
diff --git a/pkg/api/common.go b/pkg/api/common.go
index 97f41ff7c72..7973c72c8fa 100644
--- a/pkg/api/common.go
+++ b/pkg/api/common.go
@@ -30,7 +30,7 @@ type NormalResponse struct {
err error
}
-func wrap(action interface{}) macaron.Handler {
+func Wrap(action interface{}) macaron.Handler {
return func(c *m.ReqContext) {
var res Response
diff --git a/pkg/api/common_test.go b/pkg/api/common_test.go
index 40c438b607a..8b66a7a468b 100644
--- a/pkg/api/common_test.go
+++ b/pkg/api/common_test.go
@@ -23,7 +23,7 @@ func loggedInUserScenarioWithRole(desc string, method string, url string, routeP
defer bus.ClearBusHandlers()
sc := setupScenarioContext(url)
- sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+ sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
sc.context = c
sc.context.UserId = TestUserID
sc.context.OrgId = TestOrgID
@@ -51,7 +51,7 @@ func anonymousUserScenario(desc string, method string, url string, routePattern
defer bus.ClearBusHandlers()
sc := setupScenarioContext(url)
- sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+ sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
sc.context = c
if sc.handlerFunc != nil {
return sc.handlerFunc(sc.context)
diff --git a/pkg/api/dashboard_permission_test.go b/pkg/api/dashboard_permission_test.go
index 24f0bdca365..f65c5f1f5fa 100644
--- a/pkg/api/dashboard_permission_test.go
+++ b/pkg/api/dashboard_permission_test.go
@@ -194,7 +194,7 @@ func updateDashboardPermissionScenario(desc string, url string, routePattern str
sc := setupScenarioContext(url)
- sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+ sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
sc.context = c
sc.context.OrgId = TestOrgID
sc.context.UserId = TestUserID
diff --git a/pkg/api/dashboard_test.go b/pkg/api/dashboard_test.go
index ccde2382787..50a2e314f5c 100644
--- a/pkg/api/dashboard_test.go
+++ b/pkg/api/dashboard_test.go
@@ -882,7 +882,7 @@ func postDashboardScenario(desc string, url string, routePattern string, mock *d
defer bus.ClearBusHandlers()
sc := setupScenarioContext(url)
- sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+ sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
sc.context = c
sc.context.SignedInUser = &m.SignedInUser{OrgId: cmd.OrgId, UserId: cmd.UserId}
@@ -907,7 +907,7 @@ func postDiffScenario(desc string, url string, routePattern string, cmd dtos.Cal
defer bus.ClearBusHandlers()
sc := setupScenarioContext(url)
- sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+ sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
sc.context = c
sc.context.SignedInUser = &m.SignedInUser{
OrgId: TestOrgID,
diff --git a/pkg/api/dtos/index.go b/pkg/api/dtos/index.go
index 8c7f505277d..77004899fc3 100644
--- a/pkg/api/dtos/index.go
+++ b/pkg/api/dtos/index.go
@@ -13,6 +13,7 @@ type IndexViewData struct {
Theme string
NewGrafanaVersionExists bool
NewGrafanaVersion string
+ AppName string
}
type PluginCss struct {
diff --git a/pkg/api/folder_permission_test.go b/pkg/api/folder_permission_test.go
index f7458af6dce..64a746ca937 100644
--- a/pkg/api/folder_permission_test.go
+++ b/pkg/api/folder_permission_test.go
@@ -226,7 +226,7 @@ func updateFolderPermissionScenario(desc string, url string, routePattern string
sc := setupScenarioContext(url)
- sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+ sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
sc.context = c
sc.context.OrgId = TestOrgID
sc.context.UserId = TestUserID
diff --git a/pkg/api/folder_test.go b/pkg/api/folder_test.go
index 0d9b9495686..6e24e432535 100644
--- a/pkg/api/folder_test.go
+++ b/pkg/api/folder_test.go
@@ -152,7 +152,7 @@ func createFolderScenario(desc string, url string, routePattern string, mock *fa
defer bus.ClearBusHandlers()
sc := setupScenarioContext(url)
- sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+ sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
sc.context = c
sc.context.SignedInUser = &m.SignedInUser{OrgId: TestOrgID, UserId: TestUserID}
@@ -181,7 +181,7 @@ func updateFolderScenario(desc string, url string, routePattern string, mock *fa
defer bus.ClearBusHandlers()
sc := setupScenarioContext(url)
- sc.defaultHandler = wrap(func(c *m.ReqContext) Response {
+ sc.defaultHandler = Wrap(func(c *m.ReqContext) Response {
sc.context = c
sc.context.SignedInUser = &m.SignedInUser{OrgId: TestOrgID, UserId: TestUserID}
diff --git a/pkg/api/frontendsettings.go b/pkg/api/frontendsettings.go
index e7272e68997..da3c88566c1 100644
--- a/pkg/api/frontendsettings.go
+++ b/pkg/api/frontendsettings.go
@@ -153,6 +153,7 @@ func getFrontendSettingsMap(c *m.ReqContext) (map[string]interface{}, error) {
"latestVersion": plugins.GrafanaLatestVersion,
"hasUpdate": plugins.GrafanaHasUpdate,
"env": setting.Env,
+ "isEnterprise": setting.IsEnterprise,
},
}
diff --git a/pkg/api/http_server.go b/pkg/api/http_server.go
index e1a10fb468f..0de63ce5e08 100644
--- a/pkg/api/http_server.go
+++ b/pkg/api/http_server.go
@@ -33,7 +33,11 @@ import (
)
func init() {
- registry.RegisterService(&HTTPServer{})
+ registry.Register(®istry.Descriptor{
+ Name: "HTTPServer",
+ Instance: &HTTPServer{},
+ InitPriority: registry.High,
+ })
}
type HTTPServer struct {
@@ -54,6 +58,10 @@ func (hs *HTTPServer) Init() error {
hs.log = log.New("http.server")
hs.cache = gocache.New(5*time.Minute, 10*time.Minute)
+ hs.streamManager = live.NewStreamManager()
+ hs.macaron = hs.newMacaron()
+ hs.registerRoutes()
+
return nil
}
@@ -61,10 +69,8 @@ func (hs *HTTPServer) Run(ctx context.Context) error {
var err error
hs.context = ctx
- hs.streamManager = live.NewStreamManager()
- hs.macaron = hs.newMacaron()
- hs.registerRoutes()
+ hs.applyRoutes()
hs.streamManager.Run(ctx)
listenAddr := fmt.Sprintf("%s:%s", setting.HttpAddr, setting.HttpPort)
@@ -164,6 +170,26 @@ func (hs *HTTPServer) newMacaron() *macaron.Macaron {
macaron.Env = setting.Env
m := macaron.New()
+ // automatically set HEAD for every GET
+ m.SetAutoHead(true)
+
+ return m
+}
+
+func (hs *HTTPServer) applyRoutes() {
+ // start with middlewares & static routes
+ hs.addMiddlewaresAndStaticRoutes()
+ // then add view routes & api routes
+ hs.RouteRegister.Register(hs.macaron)
+ // then custom app proxy routes
+ hs.initAppPluginRoutes(hs.macaron)
+ // lastly not found route
+ hs.macaron.NotFound(NotFoundHandler)
+}
+
+func (hs *HTTPServer) addMiddlewaresAndStaticRoutes() {
+ m := hs.macaron
+
m.Use(middleware.Logger())
if setting.EnableGzip {
@@ -175,7 +201,7 @@ func (hs *HTTPServer) newMacaron() *macaron.Macaron {
for _, route := range plugins.StaticRoutes {
pluginRoute := path.Join("/public/plugins/", route.PluginId)
hs.log.Debug("Plugins: Adding route", "route", pluginRoute, "dir", route.Directory)
- hs.mapStatic(m, route.Directory, "", pluginRoute)
+ hs.mapStatic(hs.macaron, route.Directory, "", pluginRoute)
}
hs.mapStatic(m, setting.StaticRootPath, "build", "public/build")
@@ -204,8 +230,6 @@ func (hs *HTTPServer) newMacaron() *macaron.Macaron {
}
m.Use(middleware.AddDefaultResponseHeaders())
-
- return m
}
func (hs *HTTPServer) metricsEndpoint(ctx *macaron.Context) {
diff --git a/pkg/api/index.go b/pkg/api/index.go
index a52bd3e77b0..ea10940d3ba 100644
--- a/pkg/api/index.go
+++ b/pkg/api/index.go
@@ -76,6 +76,7 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) {
BuildCommit: setting.BuildCommit,
NewGrafanaVersion: plugins.GrafanaLatestVersion,
NewGrafanaVersionExists: plugins.GrafanaHasUpdate,
+ AppName: setting.ApplicationName,
}
if setting.DisableGravatar {
diff --git a/pkg/cmd/grafana-server/main.go b/pkg/cmd/grafana-server/main.go
index 976c027d749..f00e6bba0fd 100644
--- a/pkg/cmd/grafana-server/main.go
+++ b/pkg/cmd/grafana-server/main.go
@@ -18,7 +18,7 @@ import (
"github.com/grafana/grafana/pkg/metrics"
"github.com/grafana/grafana/pkg/setting"
- _ "github.com/grafana/grafana/pkg/extensions"
+ extensions "github.com/grafana/grafana/pkg/extensions"
_ "github.com/grafana/grafana/pkg/services/alerting/conditions"
_ "github.com/grafana/grafana/pkg/services/alerting/notifiers"
_ "github.com/grafana/grafana/pkg/tsdb/cloudwatch"
@@ -35,7 +35,6 @@ import (
var version = "5.0.0"
var commit = "NA"
var buildstamp string
-var enterprise string
var configFile = flag.String("config", "", "path to config file")
var homePath = flag.String("homepath", "", "path to grafana install/home path, defaults to working directory")
@@ -78,7 +77,7 @@ func main() {
setting.BuildVersion = version
setting.BuildCommit = commit
setting.BuildStamp = buildstampInt64
- setting.Enterprise, _ = strconv.ParseBool(enterprise)
+ setting.IsEnterprise = extensions.IsEnterprise
metrics.M_Grafana_Version.WithLabelValues(version).Set(1)
@@ -88,10 +87,11 @@ func main() {
err := server.Run()
+ code := server.Exit(err)
trace.Stop()
log.Close()
- server.Exit(err)
+ os.Exit(code)
}
func listenToSystemSignals(server *GrafanaServerImpl) {
diff --git a/pkg/cmd/grafana-server/server.go b/pkg/cmd/grafana-server/server.go
index 6444528f7f0..8794d7d8338 100644
--- a/pkg/cmd/grafana-server/server.go
+++ b/pkg/cmd/grafana-server/server.go
@@ -175,7 +175,7 @@ func (g *GrafanaServerImpl) Shutdown(reason string) {
g.childRoutines.Wait()
}
-func (g *GrafanaServerImpl) Exit(reason error) {
+func (g *GrafanaServerImpl) Exit(reason error) int {
// default exit code is 1
code := 1
@@ -185,7 +185,7 @@ func (g *GrafanaServerImpl) Exit(reason error) {
}
g.log.Error("Server shutdown", "reason", reason)
- os.Exit(code)
+ return code
}
func (g *GrafanaServerImpl) writePIDFile() {
diff --git a/pkg/extensions/main.go b/pkg/extensions/main.go
index 34ac9da7e86..6e3461da8a8 100644
--- a/pkg/extensions/main.go
+++ b/pkg/extensions/main.go
@@ -1,3 +1,3 @@
package extensions
-import _ "github.com/pkg/errors"
+var IsEnterprise bool = false
diff --git a/pkg/login/ext_user.go b/pkg/login/ext_user.go
index e1d5e3e3b48..d6eaf9a975e 100644
--- a/pkg/login/ext_user.go
+++ b/pkg/login/ext_user.go
@@ -21,6 +21,7 @@ func UpsertUser(cmd *m.UpsertUserCommand) error {
Email: extUser.Email,
Login: extUser.Login,
}
+
err := bus.Dispatch(userQuery)
if err != m.ErrUserNotFound && err != nil {
return err
@@ -66,7 +67,21 @@ func UpsertUser(cmd *m.UpsertUserCommand) error {
}
}
- return syncOrgRoles(cmd.Result, extUser)
+ err = syncOrgRoles(cmd.Result, extUser)
+ if err != nil {
+ return err
+ }
+
+ err = bus.Dispatch(&m.SyncTeamsCommand{
+ User: cmd.Result,
+ ExternalUser: extUser,
+ })
+
+ if err == bus.ErrHandlerNotFound {
+ return nil
+ }
+
+ return err
}
func createUser(extUser *m.ExternalUserInfo) (*m.User, error) {
@@ -76,6 +91,7 @@ func createUser(extUser *m.ExternalUserInfo) (*m.User, error) {
Name: extUser.Name,
SkipOrgSetup: len(extUser.OrgRoles) > 0,
}
+
if err := bus.Dispatch(cmd); err != nil {
return nil, err
}
diff --git a/pkg/login/ldap.go b/pkg/login/ldap.go
index 026a94fa43e..bdf87b2db54 100644
--- a/pkg/login/ldap.go
+++ b/pkg/login/ldap.go
@@ -163,6 +163,7 @@ func (a *ldapAuther) GetGrafanaUserFor(ctx *m.ReqContext, ldapUser *LdapUserInfo
Name: fmt.Sprintf("%s %s", ldapUser.FirstName, ldapUser.LastName),
Login: ldapUser.Username,
Email: ldapUser.Email,
+ Groups: ldapUser.MemberOf,
OrgRoles: map[int64]m.RoleType{},
}
@@ -194,6 +195,7 @@ func (a *ldapAuther) GetGrafanaUserFor(ctx *m.ReqContext, ldapUser *LdapUserInfo
ExternalUser: extUser,
SignupAllowed: setting.LdapAllowSignup,
}
+
err := bus.Dispatch(userQuery)
if err != nil {
return nil, err
diff --git a/pkg/login/ldap_test.go b/pkg/login/ldap_test.go
index 34932926406..5080840704e 100644
--- a/pkg/login/ldap_test.go
+++ b/pkg/login/ldap_test.go
@@ -1,6 +1,7 @@
package login
import (
+ "context"
"crypto/tls"
"testing"
@@ -14,6 +15,14 @@ func TestLdapAuther(t *testing.T) {
Convey("When translating ldap user to grafana user", t, func() {
+ var user1 = &m.User{}
+
+ bus.AddHandlerCtx("test", func(ctx context.Context, cmd *m.UpsertUserCommand) error {
+ cmd.Result = user1
+ cmd.Result.Login = "torkelo"
+ return nil
+ })
+
Convey("Given no ldap group map match", func() {
ldapAuther := NewLdapAuthenticator(&LdapServerConf{
LdapGroups: []*LdapGroupToOrgRole{{}},
@@ -23,8 +32,6 @@ func TestLdapAuther(t *testing.T) {
So(err, ShouldEqual, ErrInvalidCredentials)
})
- var user1 = &m.User{}
-
ldapAutherScenario("Given wildcard group match", func(sc *scenarioContext) {
ldapAuther := NewLdapAuthenticator(&LdapServerConf{
LdapGroups: []*LdapGroupToOrgRole{
@@ -96,7 +103,6 @@ func TestLdapAuther(t *testing.T) {
})
Convey("When syncing ldap groups to grafana org roles", t, func() {
-
ldapAutherScenario("given no current user orgs", func(sc *scenarioContext) {
ldapAuther := NewLdapAuthenticator(&LdapServerConf{
LdapGroups: []*LdapGroupToOrgRole{
@@ -322,6 +328,10 @@ func ldapAutherScenario(desc string, fn scenarioFunc) {
bus.AddHandler("test", UpsertUser)
+ bus.AddHandlerCtx("test", func(ctx context.Context, cmd *m.SyncTeamsCommand) error {
+ return nil
+ })
+
bus.AddHandler("test", func(cmd *m.GetUserByAuthInfoQuery) error {
sc.getUserByAuthInfoQuery = cmd
sc.getUserByAuthInfoQuery.Result = &m.User{Login: cmd.Login}
diff --git a/pkg/metrics/metrics.go b/pkg/metrics/metrics.go
index 3d3cfc2e1b6..4dd84c12151 100644
--- a/pkg/metrics/metrics.go
+++ b/pkg/metrics/metrics.go
@@ -334,6 +334,14 @@ func updateTotalStats() {
var usageStatsURL = "https://stats.grafana.org/grafana-usage-report"
+func getEdition() string {
+ if setting.IsEnterprise {
+ return "enterprise"
+ } else {
+ return "oss"
+ }
+}
+
func sendUsageStats() {
if !setting.ReportingEnabled {
return
@@ -349,6 +357,7 @@ func sendUsageStats() {
"metrics": metrics,
"os": runtime.GOOS,
"arch": runtime.GOARCH,
+ "edition": getEdition(),
}
statsQuery := models.GetSystemStatsQuery{}
diff --git a/pkg/middleware/auth.go b/pkg/middleware/auth.go
index 37e79c01071..5faee1e3fa7 100644
--- a/pkg/middleware/auth.go
+++ b/pkg/middleware/auth.go
@@ -9,6 +9,7 @@ import (
m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/session"
"github.com/grafana/grafana/pkg/setting"
+ "github.com/grafana/grafana/pkg/util"
)
type AuthOptions struct {
@@ -34,6 +35,11 @@ func getApiKey(c *m.ReqContext) string {
return key
}
+ username, password, err := util.DecodeBasicAuthHeader(header)
+ if err == nil && username == "api_key" {
+ return password
+ }
+
return ""
}
diff --git a/pkg/middleware/auth_proxy.go b/pkg/middleware/auth_proxy.go
index eff532b0da2..144a0ae3a69 100644
--- a/pkg/middleware/auth_proxy.go
+++ b/pkg/middleware/auth_proxy.go
@@ -2,6 +2,7 @@ package middleware
import (
"fmt"
+ "net"
"net/mail"
"reflect"
"strings"
@@ -28,7 +29,7 @@ func initContextWithAuthProxy(ctx *m.ReqContext, orgID int64) bool {
}
// if auth proxy ip(s) defined, check if request comes from one of those
- if err := checkAuthenticationProxy(ctx.RemoteAddr(), proxyHeaderValue); err != nil {
+ if err := checkAuthenticationProxy(ctx.Req.RemoteAddr, proxyHeaderValue); err != nil {
ctx.Handle(407, "Proxy authentication required", err)
return true
}
@@ -196,23 +197,18 @@ func checkAuthenticationProxy(remoteAddr string, proxyHeaderValue string) error
return nil
}
- // Multiple ip addresses? Right-most IP address is the IP address of the most recent proxy
- if strings.Contains(remoteAddr, ",") {
- sourceIPs := strings.Split(remoteAddr, ",")
- remoteAddr = strings.TrimSpace(sourceIPs[len(sourceIPs)-1])
- }
-
- remoteAddr = strings.TrimPrefix(remoteAddr, "[")
- remoteAddr = strings.TrimSuffix(remoteAddr, "]")
-
proxies := strings.Split(setting.AuthProxyWhitelist, ",")
+ sourceIP, _, err := net.SplitHostPort(remoteAddr)
+ if err != nil {
+ return err
+ }
// Compare allowed IP addresses to actual address
for _, proxyIP := range proxies {
- if remoteAddr == strings.TrimSpace(proxyIP) {
+ if sourceIP == strings.TrimSpace(proxyIP) {
return nil
}
}
- return fmt.Errorf("Request for user (%s) from %s is not from the authentication proxy", proxyHeaderValue, remoteAddr)
+ return fmt.Errorf("Request for user (%s) from %s is not from the authentication proxy", proxyHeaderValue, sourceIP)
}
diff --git a/pkg/middleware/middleware_test.go b/pkg/middleware/middleware_test.go
index 0b50358ad73..87c23a7b49a 100644
--- a/pkg/middleware/middleware_test.go
+++ b/pkg/middleware/middleware_test.go
@@ -82,7 +82,7 @@ func TestMiddlewareContext(t *testing.T) {
setting.BasicAuthEnabled = true
authHeader := util.GetBasicAuthHeader("myUser", "myPass")
- sc.fakeReq("GET", "/").withAuthoriziationHeader(authHeader).exec()
+ sc.fakeReq("GET", "/").withAuthorizationHeader(authHeader).exec()
Convey("Should init middleware context with user", func() {
So(sc.context.IsSignedIn, ShouldEqual, true)
@@ -128,6 +128,28 @@ func TestMiddlewareContext(t *testing.T) {
})
})
+ middlewareScenario("Valid api key via Basic auth", func(sc *scenarioContext) {
+ keyhash := util.EncodePassword("v5nAwpMafFP6znaS4urhdWDLS5511M42", "asd")
+
+ bus.AddHandler("test", func(query *m.GetApiKeyByNameQuery) error {
+ query.Result = &m.ApiKey{OrgId: 12, Role: m.ROLE_EDITOR, Key: keyhash}
+ return nil
+ })
+
+ authHeader := util.GetBasicAuthHeader("api_key", "eyJrIjoidjVuQXdwTWFmRlA2em5hUzR1cmhkV0RMUzU1MTFNNDIiLCJuIjoiYXNkIiwiaWQiOjF9")
+ sc.fakeReq("GET", "/").withAuthorizationHeader(authHeader).exec()
+
+ Convey("Should return 200", func() {
+ So(sc.resp.Code, ShouldEqual, 200)
+ })
+
+ Convey("Should init middleware context", func() {
+ So(sc.context.IsSignedIn, ShouldEqual, true)
+ So(sc.context.OrgId, ShouldEqual, 12)
+ So(sc.context.OrgRole, ShouldEqual, m.ROLE_EDITOR)
+ })
+ })
+
middlewareScenario("UserId in session", func(sc *scenarioContext) {
sc.fakeReq("GET", "/").handler(func(c *m.ReqContext) {
@@ -293,61 +315,6 @@ func TestMiddlewareContext(t *testing.T) {
})
})
- middlewareScenario("When auth_proxy is enabled and request has X-Forwarded-For that is not trusted", func(sc *scenarioContext) {
- setting.AuthProxyEnabled = true
- setting.AuthProxyHeaderName = "X-WEBAUTH-USER"
- setting.AuthProxyHeaderProperty = "username"
- setting.AuthProxyWhitelist = "192.168.1.1, 2001::23"
-
- bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error {
- query.Result = &m.SignedInUser{OrgId: 4, UserId: 33}
- return nil
- })
-
- bus.AddHandler("test", func(cmd *m.UpsertUserCommand) error {
- cmd.Result = &m.User{Id: 33}
- return nil
- })
-
- sc.fakeReq("GET", "/")
- sc.req.Header.Add("X-WEBAUTH-USER", "torkelo")
- sc.req.Header.Add("X-Forwarded-For", "client-ip, 192.168.1.1, 192.168.1.2")
- sc.exec()
-
- Convey("should return 407 status code", func() {
- So(sc.resp.Code, ShouldEqual, 407)
- So(sc.resp.Body.String(), ShouldContainSubstring, "Request for user (torkelo) from 192.168.1.2 is not from the authentication proxy")
- })
- })
-
- middlewareScenario("When auth_proxy is enabled and request has X-Forwarded-For that is trusted", func(sc *scenarioContext) {
- setting.AuthProxyEnabled = true
- setting.AuthProxyHeaderName = "X-WEBAUTH-USER"
- setting.AuthProxyHeaderProperty = "username"
- setting.AuthProxyWhitelist = "192.168.1.1, 2001::23"
-
- bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error {
- query.Result = &m.SignedInUser{OrgId: 4, UserId: 33}
- return nil
- })
-
- bus.AddHandler("test", func(cmd *m.UpsertUserCommand) error {
- cmd.Result = &m.User{Id: 33}
- return nil
- })
-
- sc.fakeReq("GET", "/")
- sc.req.Header.Add("X-WEBAUTH-USER", "torkelo")
- sc.req.Header.Add("X-Forwarded-For", "client-ip, 192.168.1.2, 192.168.1.1")
- sc.exec()
-
- Convey("Should init context with user info", func() {
- So(sc.context.IsSignedIn, ShouldBeTrue)
- So(sc.context.UserId, ShouldEqual, 33)
- So(sc.context.OrgId, ShouldEqual, 4)
- })
- })
-
middlewareScenario("When session exists for previous user, create a new session", func(sc *scenarioContext) {
setting.AuthProxyEnabled = true
setting.AuthProxyHeaderName = "X-WEBAUTH-USER"
@@ -473,7 +440,7 @@ func (sc *scenarioContext) withInvalidApiKey() *scenarioContext {
return sc
}
-func (sc *scenarioContext) withAuthoriziationHeader(authHeader string) *scenarioContext {
+func (sc *scenarioContext) withAuthorizationHeader(authHeader string) *scenarioContext {
sc.authHeader = authHeader
return sc
}
diff --git a/pkg/models/team_member.go b/pkg/models/team_member.go
index 19cf657292d..9434dad8ecd 100644
--- a/pkg/models/team_member.go
+++ b/pkg/models/team_member.go
@@ -42,6 +42,7 @@ type RemoveTeamMemberCommand struct {
type GetTeamMembersQuery struct {
OrgId int64
TeamId int64
+ UserId int64
Result []*TeamMemberDTO
}
diff --git a/pkg/models/user_auth.go b/pkg/models/user_auth.go
index 0ecd144d52c..162a4d867a9 100644
--- a/pkg/models/user_auth.go
+++ b/pkg/models/user_auth.go
@@ -19,6 +19,7 @@ type ExternalUserInfo struct {
Email string
Login string
Name string
+ Groups []string
OrgRoles map[int64]RoleType
}
@@ -70,3 +71,8 @@ type GetAuthInfoQuery struct {
Result *UserAuth
}
+
+type SyncTeamsCommand struct {
+ ExternalUser *ExternalUserInfo
+ User *User
+}
diff --git a/pkg/registry/registry.go b/pkg/registry/registry.go
index e597a06f15f..87fca27f6c1 100644
--- a/pkg/registry/registry.go
+++ b/pkg/registry/registry.go
@@ -4,6 +4,8 @@ import (
"context"
"reflect"
"sort"
+
+ "github.com/grafana/grafana/pkg/services/sqlstore/migrator"
)
type Descriptor struct {
@@ -34,23 +36,45 @@ func GetServices() []*Descriptor {
return services
}
+// Service interface is the lowest common shape that services
+// are expected to forfill to be started within Grafana.
type Service interface {
+
+ // Init is called by Grafana main process which gives the service
+ // the possibility do some initial work before its started. Things
+ // like adding routes, bus handlers should be done in the Init function
Init() error
}
-// Useful for alerting service
+// CanBeDisabled allows the services to decide if it should
+// be started or not by itself. This is useful for services
+// that might not always be started, ex alerting.
+// This will be called after `Init()`.
type CanBeDisabled interface {
+
+ // IsDisabled should return a bool saying if it can be started or not.
IsDisabled() bool
}
+// BackgroundService should be implemented for services that have
+// long running tasks in the background.
type BackgroundService interface {
+ // Run starts the background process of the service after `Init` have been called
+ // on all services. The `context.Context` passed into the function should be used
+ // to subscribe to ctx.Done() so the service can be notified when Grafana shuts down.
Run(ctx context.Context) error
}
-type HasInitPriority interface {
- GetInitPriority() Priority
+// DatabaseMigrator allows the caller to add migrations to
+// the migrator passed as argument
+type DatabaseMigrator interface {
+
+ // AddMigrations allows the service to add migrations to
+ // the database migrator.
+ AddMigration(mg *migrator.Migrator)
}
+// IsDisabled takes an service and return true if its disabled
func IsDisabled(srv Service) bool {
canBeDisabled, ok := srv.(CanBeDisabled)
return ok && canBeDisabled.IsDisabled()
diff --git a/pkg/services/alerting/extractor_test.go b/pkg/services/alerting/extractor_test.go
index 861e9b9cbfc..c7212e48174 100644
--- a/pkg/services/alerting/extractor_test.go
+++ b/pkg/services/alerting/extractor_test.go
@@ -50,7 +50,7 @@ func TestAlertRuleExtraction(t *testing.T) {
So(err, ShouldBeNil)
Convey("Extractor should not modify the original json", func() {
- dashJson, err := simplejson.NewJson([]byte(json))
+ dashJson, err := simplejson.NewJson(json)
So(err, ShouldBeNil)
dash := m.NewDashboardFromJson(dashJson)
@@ -79,7 +79,7 @@ func TestAlertRuleExtraction(t *testing.T) {
Convey("Parsing and validating dashboard containing graphite alerts", func() {
- dashJson, err := simplejson.NewJson([]byte(json))
+ dashJson, err := simplejson.NewJson(json)
So(err, ShouldBeNil)
dash := m.NewDashboardFromJson(dashJson)
@@ -143,7 +143,7 @@ func TestAlertRuleExtraction(t *testing.T) {
panelWithoutId, err := ioutil.ReadFile("./test-data/panels-missing-id.json")
So(err, ShouldBeNil)
- dashJson, err := simplejson.NewJson([]byte(panelWithoutId))
+ dashJson, err := simplejson.NewJson(panelWithoutId)
So(err, ShouldBeNil)
dash := m.NewDashboardFromJson(dashJson)
extractor := NewDashAlertExtractor(dash, 1)
@@ -159,7 +159,7 @@ func TestAlertRuleExtraction(t *testing.T) {
panelWithIdZero, err := ioutil.ReadFile("./test-data/panel-with-id-0.json")
So(err, ShouldBeNil)
- dashJson, err := simplejson.NewJson([]byte(panelWithIdZero))
+ dashJson, err := simplejson.NewJson(panelWithIdZero)
So(err, ShouldBeNil)
dash := m.NewDashboardFromJson(dashJson)
extractor := NewDashAlertExtractor(dash, 1)
diff --git a/pkg/services/alerting/notifier.go b/pkg/services/alerting/notifier.go
index 2ea68cf5085..07212746f7e 100644
--- a/pkg/services/alerting/notifier.go
+++ b/pkg/services/alerting/notifier.go
@@ -104,7 +104,10 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) {
return err
}
- n.log.Info("uploaded", "url", context.ImagePublicUrl)
+ if context.ImagePublicUrl != "" {
+ n.log.Info("uploaded screenshot of alert to external image store", "url", context.ImagePublicUrl)
+ }
+
return nil
}
diff --git a/pkg/services/sqlstore/migrations/team_mig.go b/pkg/services/sqlstore/migrations/team_mig.go
index eb0641fbc32..9800d27f8ab 100644
--- a/pkg/services/sqlstore/migrations/team_mig.go
+++ b/pkg/services/sqlstore/migrations/team_mig.go
@@ -50,4 +50,5 @@ func addTeamMigrations(mg *Migrator) {
mg.AddMigration("Add column email to team table", NewAddColumnMigration(teamV1, &Column{
Name: "email", Type: DB_NVarchar, Nullable: true, Length: 190,
}))
+
}
diff --git a/pkg/services/sqlstore/sqlstore.go b/pkg/services/sqlstore/sqlstore.go
index b0edc1676e0..13d706b6198 100644
--- a/pkg/services/sqlstore/sqlstore.go
+++ b/pkg/services/sqlstore/sqlstore.go
@@ -132,6 +132,13 @@ func (ss *SqlStore) Init() error {
migrator := migrator.NewMigrator(x)
migrations.AddMigrations(migrator)
+ for _, descriptor := range registry.GetServices() {
+ sc, ok := descriptor.Instance.(registry.DatabaseMigrator)
+ if ok {
+ sc.AddMigration(migrator)
+ }
+ }
+
if err := migrator.Start(); err != nil {
return fmt.Errorf("Migration failed err: %v", err)
}
diff --git a/pkg/services/sqlstore/team.go b/pkg/services/sqlstore/team.go
index 7d53d114235..9378ca37f60 100644
--- a/pkg/services/sqlstore/team.go
+++ b/pkg/services/sqlstore/team.go
@@ -268,7 +268,15 @@ func GetTeamMembers(query *m.GetTeamMembersQuery) error {
query.Result = make([]*m.TeamMemberDTO, 0)
sess := x.Table("team_member")
sess.Join("INNER", "user", fmt.Sprintf("team_member.user_id=%s.id", x.Dialect().Quote("user")))
- sess.Where("team_member.org_id=? and team_member.team_id=?", query.OrgId, query.TeamId)
+ if query.OrgId != 0 {
+ sess.Where("team_member.org_id=?", query.OrgId)
+ }
+ if query.TeamId != 0 {
+ sess.Where("team_member.team_id=?", query.TeamId)
+ }
+ if query.UserId != 0 {
+ sess.Where("team_member.user_id=?", query.UserId)
+ }
sess.Cols("user.org_id", "team_member.team_id", "team_member.user_id", "user.email", "user.login")
sess.Asc("user.login", "user.email")
diff --git a/pkg/setting/setting.go b/pkg/setting/setting.go
index e71a3619aa5..d8c8e6431c0 100644
--- a/pkg/setting/setting.go
+++ b/pkg/setting/setting.go
@@ -18,9 +18,10 @@ import (
"github.com/go-macaron/session"
+ "time"
+
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/util"
- "time"
)
type Scheme string
@@ -49,7 +50,7 @@ var (
BuildVersion string
BuildCommit string
BuildStamp int64
- Enterprise bool
+ IsEnterprise bool
ApplicationName string
// Paths
@@ -517,7 +518,7 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error {
Raw = cfg.Raw
ApplicationName = "Grafana"
- if Enterprise {
+ if IsEnterprise {
ApplicationName += " Enterprise"
}
diff --git a/pkg/social/github_oauth.go b/pkg/social/github_oauth.go
index 815c684cf03..b07f112b8d3 100644
--- a/pkg/social/github_oauth.go
+++ b/pkg/social/github_oauth.go
@@ -213,6 +213,7 @@ func (s *SocialGithub) UserInfo(client *http.Client, token *oauth2.Token) (*Basi
userInfo := &BasicUserInfo{
Name: data.Login,
Login: data.Login,
+ Id: fmt.Sprintf("%d", data.Id),
Email: data.Email,
}
diff --git a/pkg/tsdb/elasticsearch/client/search_request_test.go b/pkg/tsdb/elasticsearch/client/search_request_test.go
index b026578d64f..862b8058cba 100644
--- a/pkg/tsdb/elasticsearch/client/search_request_test.go
+++ b/pkg/tsdb/elasticsearch/client/search_request_test.go
@@ -32,7 +32,7 @@ func TestSearchRequest(t *testing.T) {
Convey("When marshal to JSON should generate correct json", func() {
body, err := json.Marshal(sr)
So(err, ShouldBeNil)
- json, err := simplejson.NewJson([]byte(body))
+ json, err := simplejson.NewJson(body)
So(err, ShouldBeNil)
So(json.Get("size").MustInt(500), ShouldEqual, 0)
So(json.Get("sort").Interface(), ShouldBeNil)
@@ -81,7 +81,7 @@ func TestSearchRequest(t *testing.T) {
Convey("When marshal to JSON should generate correct json", func() {
body, err := json.Marshal(sr)
So(err, ShouldBeNil)
- json, err := simplejson.NewJson([]byte(body))
+ json, err := simplejson.NewJson(body)
So(err, ShouldBeNil)
So(json.Get("size").MustInt(0), ShouldEqual, 200)
@@ -124,7 +124,7 @@ func TestSearchRequest(t *testing.T) {
Convey("When marshal to JSON should generate correct json", func() {
body, err := json.Marshal(sr)
So(err, ShouldBeNil)
- json, err := simplejson.NewJson([]byte(body))
+ json, err := simplejson.NewJson(body)
So(err, ShouldBeNil)
scriptFields, err := json.Get("script_fields").Map()
@@ -163,7 +163,7 @@ func TestSearchRequest(t *testing.T) {
Convey("When marshal to JSON should generate correct json", func() {
body, err := json.Marshal(sr)
So(err, ShouldBeNil)
- json, err := simplejson.NewJson([]byte(body))
+ json, err := simplejson.NewJson(body)
So(err, ShouldBeNil)
So(json.Get("aggs").MustMap(), ShouldHaveLength, 2)
@@ -200,7 +200,7 @@ func TestSearchRequest(t *testing.T) {
Convey("When marshal to JSON should generate correct json", func() {
body, err := json.Marshal(sr)
So(err, ShouldBeNil)
- json, err := simplejson.NewJson([]byte(body))
+ json, err := simplejson.NewJson(body)
So(err, ShouldBeNil)
So(json.Get("aggs").MustMap(), ShouldHaveLength, 1)
@@ -251,7 +251,7 @@ func TestSearchRequest(t *testing.T) {
Convey("When marshal to JSON should generate correct json", func() {
body, err := json.Marshal(sr)
So(err, ShouldBeNil)
- json, err := simplejson.NewJson([]byte(body))
+ json, err := simplejson.NewJson(body)
So(err, ShouldBeNil)
topAggOne := json.GetPath("aggs", "1")
@@ -300,7 +300,7 @@ func TestSearchRequest(t *testing.T) {
Convey("When marshal to JSON should generate correct json", func() {
body, err := json.Marshal(sr)
So(err, ShouldBeNil)
- json, err := simplejson.NewJson([]byte(body))
+ json, err := simplejson.NewJson(body)
So(err, ShouldBeNil)
topAgg := json.GetPath("aggs", "1")
@@ -364,7 +364,7 @@ func TestSearchRequest(t *testing.T) {
Convey("When marshal to JSON should generate correct json", func() {
body, err := json.Marshal(sr)
So(err, ShouldBeNil)
- json, err := simplejson.NewJson([]byte(body))
+ json, err := simplejson.NewJson(body)
So(err, ShouldBeNil)
termsAgg := json.GetPath("aggs", "1")
@@ -419,7 +419,7 @@ func TestSearchRequest(t *testing.T) {
Convey("When marshal to JSON should generate correct json", func() {
body, err := json.Marshal(sr)
So(err, ShouldBeNil)
- json, err := simplejson.NewJson([]byte(body))
+ json, err := simplejson.NewJson(body)
So(err, ShouldBeNil)
scriptFields, err := json.Get("script_fields").Map()
diff --git a/pkg/tsdb/mssql/macros.go b/pkg/tsdb/mssql/macros.go
index bb9489cd654..ad3d1edd5d7 100644
--- a/pkg/tsdb/mssql/macros.go
+++ b/pkg/tsdb/mssql/macros.go
@@ -82,11 +82,12 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
- return fmt.Sprintf("%s >= DATEADD(s, %d, '1970-01-01') AND %s <= DATEADD(s, %d, '1970-01-01')", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil
+
+ return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
case "__timeFrom":
- return fmt.Sprintf("DATEADD(second, %d, '1970-01-01')", m.TimeRange.GetFromAsSecondsEpoch()), nil
+ return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
case "__timeTo":
- return fmt.Sprintf("DATEADD(second, %d, '1970-01-01')", m.TimeRange.GetToAsSecondsEpoch()), nil
+ return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
case "__timeGroup":
if len(args) < 2 {
return "", fmt.Errorf("macro %v needs time column and interval", name)
@@ -108,7 +109,7 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
m.Query.Model.Set("fillValue", floatVal)
}
}
- return fmt.Sprintf("CAST(ROUND(DATEDIFF(second, '1970-01-01', %s)/%.1f, 0) as bigint)*%.0f", args[0], interval.Seconds(), interval.Seconds()), nil
+ return fmt.Sprintf("FLOOR(DATEDIFF(second, '1970-01-01', %s)/%.0f)*%.0f", args[0], interval.Seconds(), interval.Seconds()), nil
case "__unixEpochFilter":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
diff --git a/pkg/tsdb/mssql/macros_test.go b/pkg/tsdb/mssql/macros_test.go
index ae0d4f67d2b..49368fe3631 100644
--- a/pkg/tsdb/mssql/macros_test.go
+++ b/pkg/tsdb/mssql/macros_test.go
@@ -49,21 +49,21 @@ func TestMacroEngine(t *testing.T) {
sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= DATEADD(s, %d, '1970-01-01') AND time_column <= DATEADD(s, %d, '1970-01-01')", from.Unix(), to.Unix()))
+ So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
})
Convey("interpolate __timeGroup function", func() {
sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, "GROUP BY CAST(ROUND(DATEDIFF(second, '1970-01-01', time_column)/300.0, 0) as bigint)*300")
+ So(sql, ShouldEqual, "GROUP BY FLOOR(DATEDIFF(second, '1970-01-01', time_column)/300)*300")
})
Convey("interpolate __timeGroup function with spaces around arguments", func() {
sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, "GROUP BY CAST(ROUND(DATEDIFF(second, '1970-01-01', time_column)/300.0, 0) as bigint)*300")
+ So(sql, ShouldEqual, "GROUP BY FLOOR(DATEDIFF(second, '1970-01-01', time_column)/300)*300")
})
Convey("interpolate __timeGroup function with fill (value = NULL)", func() {
@@ -96,14 +96,14 @@ func TestMacroEngine(t *testing.T) {
sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", from.Unix()))
+ So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
})
Convey("interpolate __timeTo function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", to.Unix()))
+ So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
})
Convey("interpolate __unixEpochFilter function", func() {
@@ -137,21 +137,21 @@ func TestMacroEngine(t *testing.T) {
sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= DATEADD(s, %d, '1970-01-01') AND time_column <= DATEADD(s, %d, '1970-01-01')", from.Unix(), to.Unix()))
+ So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
})
Convey("interpolate __timeFrom function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", from.Unix()))
+ So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
})
Convey("interpolate __timeTo function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", to.Unix()))
+ So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
})
Convey("interpolate __unixEpochFilter function", func() {
@@ -185,21 +185,21 @@ func TestMacroEngine(t *testing.T) {
sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= DATEADD(s, %d, '1970-01-01') AND time_column <= DATEADD(s, %d, '1970-01-01')", from.Unix(), to.Unix()))
+ So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
})
Convey("interpolate __timeFrom function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", from.Unix()))
+ So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
})
Convey("interpolate __timeTo function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", to.Unix()))
+ So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
})
Convey("interpolate __unixEpochFilter function", func() {
diff --git a/pkg/tsdb/mssql/mssql_test.go b/pkg/tsdb/mssql/mssql_test.go
index e62d30a6325..db04d6d1f02 100644
--- a/pkg/tsdb/mssql/mssql_test.go
+++ b/pkg/tsdb/mssql/mssql_test.go
@@ -210,11 +210,12 @@ func TestMSSQL(t *testing.T) {
So(queryResult.Error, ShouldBeNil)
points := queryResult.Series[0].Points
- So(len(points), ShouldEqual, 6)
+ // without fill this should result in 4 buckets
+ So(len(points), ShouldEqual, 4)
dt := fromStart
- for i := 0; i < 3; i++ {
+ for i := 0; i < 2; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 15)
@@ -222,9 +223,9 @@ func TestMSSQL(t *testing.T) {
dt = dt.Add(5 * time.Minute)
}
- // adjust for 5 minute gap
- dt = dt.Add(5 * time.Minute)
- for i := 3; i < 6; i++ {
+ // adjust for 10 minute gap between first and second set of points
+ dt = dt.Add(10 * time.Minute)
+ for i := 2; i < 4; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 20)
@@ -260,7 +261,7 @@ func TestMSSQL(t *testing.T) {
dt := fromStart
- for i := 0; i < 3; i++ {
+ for i := 0; i < 2; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 15)
@@ -268,17 +269,22 @@ func TestMSSQL(t *testing.T) {
dt = dt.Add(5 * time.Minute)
}
+ // check for NULL values inserted by fill
+ So(points[2][0].Valid, ShouldBeFalse)
So(points[3][0].Valid, ShouldBeFalse)
- // adjust for 5 minute gap
- dt = dt.Add(5 * time.Minute)
- for i := 4; i < 7; i++ {
+ // adjust for 10 minute gap between first and second set of points
+ dt = dt.Add(10 * time.Minute)
+ for i := 4; i < 6; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 20)
So(aTime, ShouldEqual, dt)
dt = dt.Add(5 * time.Minute)
}
+
+ So(points[6][0].Valid, ShouldBeFalse)
+
})
Convey("When doing a metric query using timeGroup with float fill enabled", func() {
@@ -525,7 +531,7 @@ func TestMSSQL(t *testing.T) {
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Series), ShouldEqual, 1)
- So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3)
+ So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3)
})
Convey("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable) should return metric with time in milliseconds", func() {
@@ -547,7 +553,7 @@ func TestMSSQL(t *testing.T) {
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Series), ShouldEqual, 1)
- So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3)
+ So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3)
})
Convey("When doing a metric query grouping by time and select metric column should return correct series", func() {
@@ -924,7 +930,7 @@ func TestMSSQL(t *testing.T) {
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
- So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000))
+ So(columns[0].(int64), ShouldEqual, dt.Unix()*1000)
})
Convey("When doing an annotation query with a time column in epoch second format (int) should return ms", func() {
@@ -954,7 +960,7 @@ func TestMSSQL(t *testing.T) {
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
- So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000))
+ So(columns[0].(int64), ShouldEqual, dt.Unix()*1000)
})
Convey("When doing an annotation query with a time column in epoch millisecond format should return ms", func() {
diff --git a/pkg/tsdb/mysql/macros.go b/pkg/tsdb/mysql/macros.go
index fadcbe4edbc..584f731f3b8 100644
--- a/pkg/tsdb/mysql/macros.go
+++ b/pkg/tsdb/mysql/macros.go
@@ -77,11 +77,12 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
- return fmt.Sprintf("%s >= FROM_UNIXTIME(%d) AND %s <= FROM_UNIXTIME(%d)", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil
+
+ return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
case "__timeFrom":
- return fmt.Sprintf("FROM_UNIXTIME(%d)", m.TimeRange.GetFromAsSecondsEpoch()), nil
+ return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
case "__timeTo":
- return fmt.Sprintf("FROM_UNIXTIME(%d)", m.TimeRange.GetToAsSecondsEpoch()), nil
+ return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
case "__timeGroup":
if len(args) < 2 {
return "", fmt.Errorf("macro %v needs time column and interval", name)
@@ -103,7 +104,7 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
m.Query.Model.Set("fillValue", floatVal)
}
}
- return fmt.Sprintf("cast(cast(UNIX_TIMESTAMP(%s)/(%.0f) as signed)*%.0f as signed)", args[0], interval.Seconds(), interval.Seconds()), nil
+ return fmt.Sprintf("UNIX_TIMESTAMP(%s) DIV %.0f * %.0f", args[0], interval.Seconds(), interval.Seconds()), nil
case "__unixEpochFilter":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
diff --git a/pkg/tsdb/mysql/macros_test.go b/pkg/tsdb/mysql/macros_test.go
index 66ec143eac8..2561661b385 100644
--- a/pkg/tsdb/mysql/macros_test.go
+++ b/pkg/tsdb/mysql/macros_test.go
@@ -39,7 +39,7 @@ func TestMacroEngine(t *testing.T) {
sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, "GROUP BY cast(cast(UNIX_TIMESTAMP(time_column)/(300) as signed)*300 as signed)")
+ So(sql, ShouldEqual, "GROUP BY UNIX_TIMESTAMP(time_column) DIV 300 * 300")
})
Convey("interpolate __timeGroup function with spaces around arguments", func() {
@@ -47,28 +47,28 @@ func TestMacroEngine(t *testing.T) {
sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, "GROUP BY cast(cast(UNIX_TIMESTAMP(time_column)/(300) as signed)*300 as signed)")
+ So(sql, ShouldEqual, "GROUP BY UNIX_TIMESTAMP(time_column) DIV 300 * 300")
})
Convey("interpolate __timeFilter function", func() {
sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= FROM_UNIXTIME(%d) AND time_column <= FROM_UNIXTIME(%d)", from.Unix(), to.Unix()))
+ So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
})
Convey("interpolate __timeFrom function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", from.Unix()))
+ So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
})
Convey("interpolate __timeTo function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", to.Unix()))
+ So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
})
Convey("interpolate __unixEpochFilter function", func() {
@@ -102,21 +102,21 @@ func TestMacroEngine(t *testing.T) {
sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= FROM_UNIXTIME(%d) AND time_column <= FROM_UNIXTIME(%d)", from.Unix(), to.Unix()))
+ So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
})
Convey("interpolate __timeFrom function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", from.Unix()))
+ So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
})
Convey("interpolate __timeTo function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", to.Unix()))
+ So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
})
Convey("interpolate __unixEpochFilter function", func() {
@@ -150,21 +150,21 @@ func TestMacroEngine(t *testing.T) {
sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= FROM_UNIXTIME(%d) AND time_column <= FROM_UNIXTIME(%d)", from.Unix(), to.Unix()))
+ So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
})
Convey("interpolate __timeFrom function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", from.Unix()))
+ So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
})
Convey("interpolate __timeTo function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", to.Unix()))
+ So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
})
Convey("interpolate __unixEpochFilter function", func() {
diff --git a/pkg/tsdb/mysql/mysql_test.go b/pkg/tsdb/mysql/mysql_test.go
index 5650de237c5..850a37617e2 100644
--- a/pkg/tsdb/mysql/mysql_test.go
+++ b/pkg/tsdb/mysql/mysql_test.go
@@ -132,8 +132,8 @@ func TestMySQL(t *testing.T) {
So(column[7].(float64), ShouldEqual, 1.11)
So(column[8].(float64), ShouldEqual, 2.22)
So(*column[9].(*float32), ShouldEqual, 3.33)
- So(column[10].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now())
- So(column[11].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now())
+ So(column[10].(time.Time), ShouldHappenWithin, 10*time.Second, time.Now())
+ So(column[11].(time.Time), ShouldHappenWithin, 10*time.Second, time.Now())
So(column[12].(string), ShouldEqual, "11:11:11")
So(column[13].(int64), ShouldEqual, 2018)
So(*column[14].(*[]byte), ShouldHaveSameTypeAs, []byte{1})
@@ -209,11 +209,12 @@ func TestMySQL(t *testing.T) {
So(queryResult.Error, ShouldBeNil)
points := queryResult.Series[0].Points
- So(len(points), ShouldEqual, 6)
+ // without fill this should result in 4 buckets
+ So(len(points), ShouldEqual, 4)
dt := fromStart
- for i := 0; i < 3; i++ {
+ for i := 0; i < 2; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 15)
@@ -221,9 +222,9 @@ func TestMySQL(t *testing.T) {
dt = dt.Add(5 * time.Minute)
}
- // adjust for 5 minute gap
- dt = dt.Add(5 * time.Minute)
- for i := 3; i < 6; i++ {
+ // adjust for 10 minute gap between first and second set of points
+ dt = dt.Add(10 * time.Minute)
+ for i := 2; i < 4; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 20)
@@ -259,7 +260,7 @@ func TestMySQL(t *testing.T) {
dt := fromStart
- for i := 0; i < 3; i++ {
+ for i := 0; i < 2; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 15)
@@ -267,17 +268,23 @@ func TestMySQL(t *testing.T) {
dt = dt.Add(5 * time.Minute)
}
+ // check for NULL values inserted by fill
+ So(points[2][0].Valid, ShouldBeFalse)
So(points[3][0].Valid, ShouldBeFalse)
- // adjust for 5 minute gap
- dt = dt.Add(5 * time.Minute)
- for i := 4; i < 7; i++ {
+ // adjust for 10 minute gap between first and second set of points
+ dt = dt.Add(10 * time.Minute)
+ for i := 4; i < 6; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 20)
So(aTime, ShouldEqual, dt)
dt = dt.Add(5 * time.Minute)
}
+
+ // check for NULL values inserted by fill
+ So(points[6][0].Valid, ShouldBeFalse)
+
})
Convey("When doing a metric query using timeGroup with float fill enabled", func() {
@@ -571,7 +578,7 @@ func TestMySQL(t *testing.T) {
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Series), ShouldEqual, 1)
- So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3)
+ So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3)
})
Convey("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable) should return metric with time in milliseconds", func() {
@@ -593,7 +600,7 @@ func TestMySQL(t *testing.T) {
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Series), ShouldEqual, 1)
- So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3)
+ So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3)
})
Convey("When doing a metric query grouping by time and select metric column should return correct series", func() {
@@ -810,7 +817,7 @@ func TestMySQL(t *testing.T) {
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
- So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000))
+ So(columns[0].(int64), ShouldEqual, dt.Unix()*1000)
})
Convey("When doing an annotation query with a time column in epoch millisecond format should return ms", func() {
diff --git a/pkg/tsdb/postgres/macros.go b/pkg/tsdb/postgres/macros.go
index 05e39f2c762..61e88418ff4 100644
--- a/pkg/tsdb/postgres/macros.go
+++ b/pkg/tsdb/postgres/macros.go
@@ -109,7 +109,7 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string,
m.Query.Model.Set("fillValue", floatVal)
}
}
- return fmt.Sprintf("(extract(epoch from %s)/%v)::bigint*%v AS time", args[0], interval.Seconds(), interval.Seconds()), nil
+ return fmt.Sprintf("floor(extract(epoch from %s)/%v)*%v AS time", args[0], interval.Seconds(), interval.Seconds()), nil
case "__unixEpochFilter":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
diff --git a/pkg/tsdb/postgres/macros_test.go b/pkg/tsdb/postgres/macros_test.go
index c3c15691e42..8c581850430 100644
--- a/pkg/tsdb/postgres/macros_test.go
+++ b/pkg/tsdb/postgres/macros_test.go
@@ -53,7 +53,7 @@ func TestMacroEngine(t *testing.T) {
sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, "GROUP BY (extract(epoch from time_column)/300)::bigint*300 AS time")
+ So(sql, ShouldEqual, "GROUP BY floor(extract(epoch from time_column)/300)*300 AS time")
})
Convey("interpolate __timeGroup function with spaces between args", func() {
@@ -61,7 +61,7 @@ func TestMacroEngine(t *testing.T) {
sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')")
So(err, ShouldBeNil)
- So(sql, ShouldEqual, "GROUP BY (extract(epoch from time_column)/300)::bigint*300 AS time")
+ So(sql, ShouldEqual, "GROUP BY floor(extract(epoch from time_column)/300)*300 AS time")
})
Convey("interpolate __timeTo function", func() {
diff --git a/pkg/tsdb/postgres/postgres_test.go b/pkg/tsdb/postgres/postgres_test.go
index 7f24d5a2063..a3a6d6546df 100644
--- a/pkg/tsdb/postgres/postgres_test.go
+++ b/pkg/tsdb/postgres/postgres_test.go
@@ -189,21 +189,23 @@ func TestPostgres(t *testing.T) {
So(queryResult.Error, ShouldBeNil)
points := queryResult.Series[0].Points
- So(len(points), ShouldEqual, 6)
+ // without fill this should result in 4 buckets
+ So(len(points), ShouldEqual, 4)
dt := fromStart
- for i := 0; i < 3; i++ {
+ for i := 0; i < 2; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 15)
So(aTime, ShouldEqual, dt)
+ So(aTime.Unix()%300, ShouldEqual, 0)
dt = dt.Add(5 * time.Minute)
}
- // adjust for 5 minute gap
- dt = dt.Add(5 * time.Minute)
- for i := 3; i < 6; i++ {
+ // adjust for 10 minute gap between first and second set of points
+ dt = dt.Add(10 * time.Minute)
+ for i := 2; i < 4; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 20)
@@ -239,7 +241,7 @@ func TestPostgres(t *testing.T) {
dt := fromStart
- for i := 0; i < 3; i++ {
+ for i := 0; i < 2; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 15)
@@ -247,17 +249,23 @@ func TestPostgres(t *testing.T) {
dt = dt.Add(5 * time.Minute)
}
+ // check for NULL values inserted by fill
+ So(points[2][0].Valid, ShouldBeFalse)
So(points[3][0].Valid, ShouldBeFalse)
- // adjust for 5 minute gap
- dt = dt.Add(5 * time.Minute)
- for i := 4; i < 7; i++ {
+ // adjust for 10 minute gap between first and second set of points
+ dt = dt.Add(10 * time.Minute)
+ for i := 4; i < 6; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 20)
So(aTime, ShouldEqual, dt)
dt = dt.Add(5 * time.Minute)
}
+
+ // check for NULL values inserted by fill
+ So(points[6][0].Valid, ShouldBeFalse)
+
})
Convey("When doing a metric query using timeGroup with float fill enabled", func() {
@@ -504,7 +512,7 @@ func TestPostgres(t *testing.T) {
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Series), ShouldEqual, 1)
- So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3)
+ So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3)
})
Convey("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable) should return metric with time in milliseconds", func() {
@@ -526,7 +534,7 @@ func TestPostgres(t *testing.T) {
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Series), ShouldEqual, 1)
- So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3)
+ So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3)
})
Convey("When doing a metric query grouping by time and select metric column should return correct series", func() {
@@ -713,7 +721,7 @@ func TestPostgres(t *testing.T) {
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
- So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000))
+ So(columns[0].(int64), ShouldEqual, dt.Unix()*1000)
})
Convey("When doing an annotation query with a time column in epoch second format (int) should return ms", func() {
@@ -743,7 +751,7 @@ func TestPostgres(t *testing.T) {
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
- So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000))
+ So(columns[0].(int64), ShouldEqual, dt.Unix()*1000)
})
Convey("When doing an annotation query with a time column in epoch millisecond format should return ms", func() {
diff --git a/public/app/core/config.ts b/public/app/core/config.ts
index e111d0d0e9f..e065ddb22fb 100644
--- a/public/app/core/config.ts
+++ b/public/app/core/config.ts
@@ -1,11 +1,18 @@
import _ from 'lodash';
-class Settings {
+export interface BuildInfo {
+ version: string;
+ commit: string;
+ isEnterprise: boolean;
+ env: string;
+}
+
+export class Settings {
datasources: any;
panels: any;
appSubUrl: string;
window_title_prefix: string;
- buildInfo: any;
+ buildInfo: BuildInfo;
new_panel_title: string;
bootData: any;
externalUserMngLinkUrl: string;
@@ -32,7 +39,14 @@ class Settings {
playlist_timespan: '1m',
unsaved_changes_warning: true,
appSubUrl: '',
+ buildInfo: {
+ version: 'v1.0',
+ commit: '1',
+ env: 'production',
+ isEnterprise: false,
+ },
};
+
_.extend(this, defaults, options);
}
}
diff --git a/public/app/core/directives/value_select_dropdown.ts b/public/app/core/directives/value_select_dropdown.ts
index d6c6c3af5c5..d384904c2d8 100644
--- a/public/app/core/directives/value_select_dropdown.ts
+++ b/public/app/core/directives/value_select_dropdown.ts
@@ -93,7 +93,7 @@ export class ValueSelectDropdownCtrl {
tagValuesPromise = this.$q.when(tag.values);
}
- tagValuesPromise.then(values => {
+ return tagValuesPromise.then(values => {
tag.values = values;
tag.valuesText = values.join(' + ');
_.each(this.options, option => {
@@ -132,7 +132,7 @@ export class ValueSelectDropdownCtrl {
this.highlightIndex = (this.highlightIndex + direction) % this.search.options.length;
}
- selectValue(option, event, commitChange, excludeOthers) {
+ selectValue(option, event, commitChange?, excludeOthers?) {
if (!option) {
return;
}
diff --git a/public/app/core/services/context_srv.ts b/public/app/core/services/context_srv.ts
index be8a0af7b7b..8959573e731 100644
--- a/public/app/core/services/context_srv.ts
+++ b/public/app/core/services/context_srv.ts
@@ -34,14 +34,10 @@ export class ContextSrv {
constructor() {
this.sidemenu = store.getBool('grafana.sidemenu', true);
- if (!config.buildInfo) {
- config.buildInfo = {};
- }
if (!config.bootData) {
config.bootData = { user: {}, settings: {} };
}
- this.version = config.buildInfo.version;
this.user = new User();
this.isSignedIn = this.user.isSignedIn;
this.isGrafanaAdmin = this.user.isGrafanaAdmin;
diff --git a/public/app/core/specs/table_model.jest.ts b/public/app/core/specs/table_model.jest.ts
index a2c1eb5e1af..3d4c526cfea 100644
--- a/public/app/core/specs/table_model.jest.ts
+++ b/public/app/core/specs/table_model.jest.ts
@@ -44,3 +44,38 @@ describe('when sorting table asc', () => {
expect(table.rows[2][1]).toBe(15);
});
});
+
+describe('when sorting with nulls', () => {
+ var table;
+ var values;
+
+ beforeEach(() => {
+ table = new TableModel();
+ table.columns = [{}, {}];
+ table.rows = [[42, ''], [19, 'a'], [null, 'b'], [0, 'd'], [null, null], [2, 'c'], [0, null], [-8, '']];
+ });
+
+ it('numbers with nulls at end with asc sort', () => {
+ table.sort({ col: 0, desc: false });
+ values = table.rows.map(row => row[0]);
+ expect(values).toEqual([-8, 0, 0, 2, 19, 42, null, null]);
+ });
+
+ it('numbers with nulls at start with desc sort', () => {
+ table.sort({ col: 0, desc: true });
+ values = table.rows.map(row => row[0]);
+ expect(values).toEqual([null, null, 42, 19, 2, 0, 0, -8]);
+ });
+
+ it('strings with nulls at end with asc sort', () => {
+ table.sort({ col: 1, desc: false });
+ values = table.rows.map(row => row[1]);
+ expect(values).toEqual(['', '', 'a', 'b', 'c', 'd', null, null]);
+ });
+
+ it('strings with nulls at start with desc sort', () => {
+ table.sort({ col: 1, desc: true });
+ values = table.rows.map(row => row[1]);
+ expect(values).toEqual([null, null, 'd', 'c', 'b', 'a', '', '']);
+ });
+});
diff --git a/public/app/core/specs/time_series.jest.ts b/public/app/core/specs/time_series.jest.ts
index f5245476218..bf50d807e03 100644
--- a/public/app/core/specs/time_series.jest.ts
+++ b/public/app/core/specs/time_series.jest.ts
@@ -119,6 +119,20 @@ describe('TimeSeries', function() {
series.getFlotPairs('null');
expect(series.stats.avg).toBe(null);
});
+
+ it('calculates timeStep', function() {
+ series = new TimeSeries({
+ datapoints: [[null, 1], [null, 2], [null, 3]],
+ });
+ series.getFlotPairs('null');
+ expect(series.stats.timeStep).toBe(1);
+
+ series = new TimeSeries({
+ datapoints: [[0, 1530529290], [0, 1530529305], [0, 1530529320]],
+ });
+ series.getFlotPairs('null');
+ expect(series.stats.timeStep).toBe(15);
+ });
});
describe('When checking if ms resolution is needed', function() {
diff --git a/public/app/core/specs/value_select_dropdown.jest.ts b/public/app/core/specs/value_select_dropdown.jest.ts
new file mode 100644
index 00000000000..3cc310435b7
--- /dev/null
+++ b/public/app/core/specs/value_select_dropdown.jest.ts
@@ -0,0 +1,159 @@
+import 'app/core/directives/value_select_dropdown';
+import { ValueSelectDropdownCtrl } from '../directives/value_select_dropdown';
+import q from 'q';
+
+describe('SelectDropdownCtrl', () => {
+ let tagValuesMap: any = {};
+
+ ValueSelectDropdownCtrl.prototype.onUpdated = jest.fn();
+ let ctrl;
+
+ describe('Given simple variable', () => {
+ beforeEach(() => {
+ ctrl = new ValueSelectDropdownCtrl(q);
+ ctrl.variable = {
+ current: { text: 'hej', value: 'hej' },
+ getValuesForTag: key => {
+ return Promise.resolve(tagValuesMap[key]);
+ },
+ };
+ ctrl.init();
+ });
+
+ it('Should init labelText and linkText', () => {
+ expect(ctrl.linkText).toBe('hej');
+ });
+ });
+
+ describe('Given variable with tags and dropdown is opened', () => {
+ beforeEach(() => {
+ ctrl = new ValueSelectDropdownCtrl(q);
+ ctrl.variable = {
+ current: { text: 'server-1', value: 'server-1' },
+ options: [
+ { text: 'server-1', value: 'server-1', selected: true },
+ { text: 'server-2', value: 'server-2' },
+ { text: 'server-3', value: 'server-3' },
+ ],
+ tags: ['key1', 'key2', 'key3'],
+ getValuesForTag: key => {
+ return Promise.resolve(tagValuesMap[key]);
+ },
+ multi: true,
+ };
+ tagValuesMap.key1 = ['server-1', 'server-3'];
+ tagValuesMap.key2 = ['server-2', 'server-3'];
+ tagValuesMap.key3 = ['server-1', 'server-2', 'server-3'];
+ ctrl.init();
+ ctrl.show();
+ });
+
+ it('should init tags model', () => {
+ expect(ctrl.tags.length).toBe(3);
+ expect(ctrl.tags[0].text).toBe('key1');
+ });
+
+ it('should init options model', () => {
+ expect(ctrl.options.length).toBe(3);
+ });
+
+ it('should init selected values array', () => {
+ expect(ctrl.selectedValues.length).toBe(1);
+ });
+
+ it('should set linkText', () => {
+ expect(ctrl.linkText).toBe('server-1');
+ });
+
+ describe('after adititional value is selected', () => {
+ beforeEach(() => {
+ ctrl.selectValue(ctrl.options[2], {});
+ ctrl.commitChanges();
+ });
+
+ it('should update link text', () => {
+ expect(ctrl.linkText).toBe('server-1 + server-3');
+ });
+ });
+
+ describe('When tag is selected', () => {
+ beforeEach(async () => {
+ await ctrl.selectTag(ctrl.tags[0]);
+ ctrl.commitChanges();
+ });
+
+ it('should select tag', () => {
+ expect(ctrl.selectedTags.length).toBe(1);
+ });
+
+ it('should select values', () => {
+ expect(ctrl.options[0].selected).toBe(true);
+ expect(ctrl.options[2].selected).toBe(true);
+ });
+
+ it('link text should not include tag values', () => {
+ expect(ctrl.linkText).toBe('');
+ });
+
+ describe('and then dropdown is opened and closed without changes', () => {
+ beforeEach(() => {
+ ctrl.show();
+ ctrl.commitChanges();
+ });
+
+ it('should still have selected tag', () => {
+ expect(ctrl.selectedTags.length).toBe(1);
+ });
+ });
+
+ describe('and then unselected', () => {
+ beforeEach(async () => {
+ await ctrl.selectTag(ctrl.tags[0]);
+ });
+
+ it('should deselect tag', () => {
+ expect(ctrl.selectedTags.length).toBe(0);
+ });
+ });
+
+ describe('and then value is unselected', () => {
+ beforeEach(() => {
+ ctrl.selectValue(ctrl.options[0], {});
+ });
+
+ it('should deselect tag', () => {
+ expect(ctrl.selectedTags.length).toBe(0);
+ });
+ });
+ });
+ });
+
+ describe('Given variable with selected tags', () => {
+ beforeEach(() => {
+ ctrl = new ValueSelectDropdownCtrl(q);
+ ctrl.variable = {
+ current: {
+ text: 'server-1',
+ value: 'server-1',
+ tags: [{ text: 'key1', selected: true }],
+ },
+ options: [
+ { text: 'server-1', value: 'server-1' },
+ { text: 'server-2', value: 'server-2' },
+ { text: 'server-3', value: 'server-3' },
+ ],
+ tags: ['key1', 'key2', 'key3'],
+ getValuesForTag: key => {
+ return Promise.resolve(tagValuesMap[key]);
+ },
+ multi: true,
+ };
+ ctrl.init();
+ ctrl.show();
+ });
+
+ it('should set tag as selected', () => {
+ expect(ctrl.tags[0].selected).toBe(true);
+ });
+ });
+});
diff --git a/public/app/core/specs/value_select_dropdown_specs.ts b/public/app/core/specs/value_select_dropdown_specs.ts
deleted file mode 100644
index 8f6408fb389..00000000000
--- a/public/app/core/specs/value_select_dropdown_specs.ts
+++ /dev/null
@@ -1,171 +0,0 @@
-import { describe, beforeEach, it, expect, angularMocks, sinon } from 'test/lib/common';
-import 'app/core/directives/value_select_dropdown';
-
-describe('SelectDropdownCtrl', function() {
- var scope;
- var ctrl;
- var tagValuesMap: any = {};
- var rootScope;
- var q;
-
- beforeEach(angularMocks.module('grafana.core'));
- beforeEach(
- angularMocks.inject(function($controller, $rootScope, $q, $httpBackend) {
- rootScope = $rootScope;
- q = $q;
- scope = $rootScope.$new();
- ctrl = $controller('ValueSelectDropdownCtrl', { $scope: scope });
- ctrl.onUpdated = sinon.spy();
- $httpBackend.when('GET', /\.html$/).respond('');
- })
- );
-
- describe('Given simple variable', function() {
- beforeEach(function() {
- ctrl.variable = {
- current: { text: 'hej', value: 'hej' },
- getValuesForTag: function(key) {
- return q.when(tagValuesMap[key]);
- },
- };
- ctrl.init();
- });
-
- it('Should init labelText and linkText', function() {
- expect(ctrl.linkText).to.be('hej');
- });
- });
-
- describe('Given variable with tags and dropdown is opened', function() {
- beforeEach(function() {
- ctrl.variable = {
- current: { text: 'server-1', value: 'server-1' },
- options: [
- { text: 'server-1', value: 'server-1', selected: true },
- { text: 'server-2', value: 'server-2' },
- { text: 'server-3', value: 'server-3' },
- ],
- tags: ['key1', 'key2', 'key3'],
- getValuesForTag: function(key) {
- return q.when(tagValuesMap[key]);
- },
- multi: true,
- };
- tagValuesMap.key1 = ['server-1', 'server-3'];
- tagValuesMap.key2 = ['server-2', 'server-3'];
- tagValuesMap.key3 = ['server-1', 'server-2', 'server-3'];
- ctrl.init();
- ctrl.show();
- });
-
- it('should init tags model', function() {
- expect(ctrl.tags.length).to.be(3);
- expect(ctrl.tags[0].text).to.be('key1');
- });
-
- it('should init options model', function() {
- expect(ctrl.options.length).to.be(3);
- });
-
- it('should init selected values array', function() {
- expect(ctrl.selectedValues.length).to.be(1);
- });
-
- it('should set linkText', function() {
- expect(ctrl.linkText).to.be('server-1');
- });
-
- describe('after adititional value is selected', function() {
- beforeEach(function() {
- ctrl.selectValue(ctrl.options[2], {});
- ctrl.commitChanges();
- });
-
- it('should update link text', function() {
- expect(ctrl.linkText).to.be('server-1 + server-3');
- });
- });
-
- describe('When tag is selected', function() {
- beforeEach(function() {
- ctrl.selectTag(ctrl.tags[0]);
- rootScope.$digest();
- ctrl.commitChanges();
- });
-
- it('should select tag', function() {
- expect(ctrl.selectedTags.length).to.be(1);
- });
-
- it('should select values', function() {
- expect(ctrl.options[0].selected).to.be(true);
- expect(ctrl.options[2].selected).to.be(true);
- });
-
- it('link text should not include tag values', function() {
- expect(ctrl.linkText).to.be('');
- });
-
- describe('and then dropdown is opened and closed without changes', function() {
- beforeEach(function() {
- ctrl.show();
- ctrl.commitChanges();
- rootScope.$digest();
- });
-
- it('should still have selected tag', function() {
- expect(ctrl.selectedTags.length).to.be(1);
- });
- });
-
- describe('and then unselected', function() {
- beforeEach(function() {
- ctrl.selectTag(ctrl.tags[0]);
- rootScope.$digest();
- });
-
- it('should deselect tag', function() {
- expect(ctrl.selectedTags.length).to.be(0);
- });
- });
-
- describe('and then value is unselected', function() {
- beforeEach(function() {
- ctrl.selectValue(ctrl.options[0], {});
- });
-
- it('should deselect tag', function() {
- expect(ctrl.selectedTags.length).to.be(0);
- });
- });
- });
- });
-
- describe('Given variable with selected tags', function() {
- beforeEach(function() {
- ctrl.variable = {
- current: {
- text: 'server-1',
- value: 'server-1',
- tags: [{ text: 'key1', selected: true }],
- },
- options: [
- { text: 'server-1', value: 'server-1' },
- { text: 'server-2', value: 'server-2' },
- { text: 'server-3', value: 'server-3' },
- ],
- tags: ['key1', 'key2', 'key3'],
- getValuesForTag: function(key) {
- return q.when(tagValuesMap[key]);
- },
- multi: true,
- };
- ctrl.init();
- ctrl.show();
- });
-
- it('should set tag as selected', function() {
- expect(ctrl.tags[0].selected).to.be(true);
- });
- });
-});
diff --git a/public/app/core/table_model.ts b/public/app/core/table_model.ts
index 5716aac2be6..04857eb806d 100644
--- a/public/app/core/table_model.ts
+++ b/public/app/core/table_model.ts
@@ -19,23 +19,16 @@ export default class TableModel {
this.rows.sort(function(a, b) {
a = a[options.col];
b = b[options.col];
- if (a < b) {
- return -1;
- }
- if (a > b) {
- return 1;
- }
- return 0;
+ // Sort null or undefined seperately from comparable values
+ return +(a == null) - +(b == null) || +(a > b) || -(a < b);
});
- this.columns[options.col].sort = true;
-
if (options.desc) {
this.rows.reverse();
- this.columns[options.col].desc = true;
- } else {
- this.columns[options.col].desc = false;
}
+
+ this.columns[options.col].sort = true;
+ this.columns[options.col].desc = options.desc;
}
addColumn(col) {
diff --git a/public/app/features/annotations/specs/annotations_srv_specs.ts b/public/app/features/annotations/specs/annotations_srv.jest.ts
similarity index 52%
rename from public/app/features/annotations/specs/annotations_srv_specs.ts
rename to public/app/features/annotations/specs/annotations_srv.jest.ts
index 932fcf9415c..7db7b6c9f05 100644
--- a/public/app/features/annotations/specs/annotations_srv_specs.ts
+++ b/public/app/features/annotations/specs/annotations_srv.jest.ts
@@ -1,17 +1,17 @@
-import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
import '../annotations_srv';
-import helpers from 'test/specs/helpers';
import 'app/features/dashboard/time_srv';
+import { AnnotationsSrv } from '../annotations_srv';
describe('AnnotationsSrv', function() {
- var ctx = new helpers.ServiceTestContext();
+ let $rootScope = {
+ onAppEvent: jest.fn(),
+ };
+ let $q;
+ let datasourceSrv;
+ let backendSrv;
+ let timeSrv;
- beforeEach(angularMocks.module('grafana.core'));
- beforeEach(angularMocks.module('grafana.services'));
- beforeEach(ctx.createService('timeSrv'));
- beforeEach(() => {
- ctx.createService('annotationsSrv');
- });
+ let annotationsSrv = new AnnotationsSrv($rootScope, $q, datasourceSrv, backendSrv, timeSrv);
describe('When translating the query result', () => {
const annotationSource = {
@@ -30,11 +30,11 @@ describe('AnnotationsSrv', function() {
let translatedAnnotations;
beforeEach(() => {
- translatedAnnotations = ctx.service.translateQueryResult(annotationSource, annotations);
+ translatedAnnotations = annotationsSrv.translateQueryResult(annotationSource, annotations);
});
it('should set defaults', () => {
- expect(translatedAnnotations[0].source).to.eql(annotationSource);
+ expect(translatedAnnotations[0].source).toEqual(annotationSource);
});
});
});
diff --git a/public/app/features/dashboard/specs/exporter.jest.ts b/public/app/features/dashboard/specs/exporter.jest.ts
index aa574a4b85a..c7727a4af4d 100644
--- a/public/app/features/dashboard/specs/exporter.jest.ts
+++ b/public/app/features/dashboard/specs/exporter.jest.ts
@@ -86,9 +86,7 @@ describe('given dashboard with repeated panels', () => {
],
};
- config.buildInfo = {
- version: '3.0.2',
- };
+ config.buildInfo.version = '3.0.2';
//Stubs test function calls
var datasourceSrvStub = { get: jest.fn(arg => getStub(arg)) };
diff --git a/public/app/features/dashboard/specs/viewstate_srv.jest.ts b/public/app/features/dashboard/specs/viewstate_srv.jest.ts
new file mode 100644
index 00000000000..08166c6f2bd
--- /dev/null
+++ b/public/app/features/dashboard/specs/viewstate_srv.jest.ts
@@ -0,0 +1,67 @@
+//import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
+import 'app/features/dashboard/view_state_srv';
+import config from 'app/core/config';
+import { DashboardViewState } from '../view_state_srv';
+
+describe('when updating view state', () => {
+ let location = {
+ replace: jest.fn(),
+ search: jest.fn(),
+ };
+
+ let $scope = {
+ onAppEvent: jest.fn(() => {}),
+ dashboard: {
+ meta: {},
+ panels: [],
+ },
+ };
+
+ let $rootScope = {};
+ let viewState;
+
+ beforeEach(() => {
+ config.bootData = {
+ user: {
+ orgId: 1,
+ },
+ };
+ });
+
+ describe('to fullscreen true and edit true', () => {
+ beforeEach(() => {
+ location.search = jest.fn(() => {
+ return { fullscreen: true, edit: true, panelId: 1 };
+ });
+ viewState = new DashboardViewState($scope, location, {}, $rootScope);
+ });
+
+ it('should update querystring and view state', () => {
+ var updateState = { fullscreen: true, edit: true, panelId: 1 };
+
+ viewState.update(updateState);
+
+ expect(location.search).toHaveBeenCalledWith({
+ edit: true,
+ editview: null,
+ fullscreen: true,
+ orgId: 1,
+ panelId: 1,
+ });
+ expect(viewState.dashboard.meta.fullscreen).toBe(true);
+ expect(viewState.state.fullscreen).toBe(true);
+ });
+ });
+
+ describe('to fullscreen false', () => {
+ beforeEach(() => {
+ viewState = new DashboardViewState($scope, location, {}, $rootScope);
+ });
+ it('should remove params from query string', () => {
+ viewState.update({ fullscreen: true, panelId: 1, edit: true });
+ viewState.update({ fullscreen: false });
+ expect(viewState.dashboard.meta.fullscreen).toBe(false);
+ expect(viewState.state.fullscreen).toBe(null);
+ });
+ });
+});
diff --git a/public/app/features/dashboard/specs/viewstate_srv_specs.ts b/public/app/features/dashboard/specs/viewstate_srv_specs.ts
deleted file mode 100644
index d34b15b9113..00000000000
--- a/public/app/features/dashboard/specs/viewstate_srv_specs.ts
+++ /dev/null
@@ -1,65 +0,0 @@
-import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
-import 'app/features/dashboard/view_state_srv';
-import config from 'app/core/config';
-
-describe('when updating view state', function() {
- var viewState, location;
- var timeSrv = {};
- var templateSrv = {};
- var contextSrv = {
- user: {
- orgId: 19,
- },
- };
- beforeEach(function() {
- config.bootData = {
- user: {
- orgId: 1,
- },
- };
- });
- beforeEach(angularMocks.module('grafana.services'));
- beforeEach(
- angularMocks.module(function($provide) {
- $provide.value('timeSrv', timeSrv);
- $provide.value('templateSrv', templateSrv);
- $provide.value('contextSrv', contextSrv);
- })
- );
-
- beforeEach(
- angularMocks.inject(function(dashboardViewStateSrv, $location, $rootScope) {
- $rootScope.onAppEvent = function() {};
- $rootScope.dashboard = {
- meta: {},
- panels: [],
- };
- viewState = dashboardViewStateSrv.create($rootScope);
- location = $location;
- })
- );
-
- describe('to fullscreen true and edit true', function() {
- it('should update querystring and view state', function() {
- var updateState = { fullscreen: true, edit: true, panelId: 1 };
- viewState.update(updateState);
- expect(location.search()).to.eql({
- fullscreen: true,
- edit: true,
- panelId: 1,
- orgId: 1,
- });
- expect(viewState.dashboard.meta.fullscreen).to.be(true);
- expect(viewState.state.fullscreen).to.be(true);
- });
- });
-
- describe('to fullscreen false', function() {
- it('should remove params from query string', function() {
- viewState.update({ fullscreen: true, panelId: 1, edit: true });
- viewState.update({ fullscreen: false });
- expect(viewState.dashboard.meta.fullscreen).to.be(false);
- expect(viewState.state.fullscreen).to.be(null);
- });
- });
-});
diff --git a/public/app/features/org/partials/team_details.html b/public/app/features/org/partials/team_details.html
index 3fce8b3c720..3ce851d5546 100644
--- a/public/app/features/org/partials/team_details.html
+++ b/public/app/features/org/partials/team_details.html
@@ -1,22 +1,22 @@
-
Team Details
+
Team Details
+
+
+ Email
+
+ This is optional and is primarily used for allowing custom team avatars.
+
+
+
+
diff --git a/public/app/features/plugins/specs/datasource_srv.jest.ts b/public/app/features/plugins/specs/datasource_srv.jest.ts
new file mode 100644
index 00000000000..f261c4e2249
--- /dev/null
+++ b/public/app/features/plugins/specs/datasource_srv.jest.ts
@@ -0,0 +1,59 @@
+import config from 'app/core/config';
+import 'app/features/plugins/datasource_srv';
+import { DatasourceSrv } from 'app/features/plugins/datasource_srv';
+
+describe('datasource_srv', function() {
+ let _datasourceSrv = new DatasourceSrv({}, {}, {}, {});
+ let metricSources;
+
+ describe('when loading metric sources', () => {
+ let unsortedDatasources = {
+ mmm: {
+ type: 'test-db',
+ meta: { metrics: { m: 1 } },
+ },
+ '--Grafana--': {
+ type: 'grafana',
+ meta: { builtIn: true, metrics: { m: 1 }, id: 'grafana' },
+ },
+ '--Mixed--': {
+ type: 'test-db',
+ meta: { builtIn: true, metrics: { m: 1 }, id: 'mixed' },
+ },
+ ZZZ: {
+ type: 'test-db',
+ meta: { metrics: { m: 1 } },
+ },
+ aaa: {
+ type: 'test-db',
+ meta: { metrics: { m: 1 } },
+ },
+ BBB: {
+ type: 'test-db',
+ meta: { metrics: { m: 1 } },
+ },
+ };
+ beforeEach(() => {
+ config.datasources = unsortedDatasources;
+ metricSources = _datasourceSrv.getMetricSources({ skipVariables: true });
+ });
+
+ it('should return a list of sources sorted case insensitively with builtin sources last', () => {
+ expect(metricSources[0].name).toBe('aaa');
+ expect(metricSources[1].name).toBe('BBB');
+ expect(metricSources[2].name).toBe('mmm');
+ expect(metricSources[3].name).toBe('ZZZ');
+ expect(metricSources[4].name).toBe('--Grafana--');
+ expect(metricSources[5].name).toBe('--Mixed--');
+ });
+
+ beforeEach(() => {
+ config.defaultDatasource = 'BBB';
+ });
+
+ it('should set default data source', () => {
+ expect(metricSources[2].name).toBe('default');
+ expect(metricSources[2].sort).toBe('BBB');
+ });
+ });
+});
diff --git a/public/app/features/plugins/specs/datasource_srv_specs.ts b/public/app/features/plugins/specs/datasource_srv_specs.ts
deleted file mode 100644
index 85a66b59ee7..00000000000
--- a/public/app/features/plugins/specs/datasource_srv_specs.ts
+++ /dev/null
@@ -1,64 +0,0 @@
-import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
-import config from 'app/core/config';
-import 'app/features/plugins/datasource_srv';
-
-describe('datasource_srv', function() {
- var _datasourceSrv;
- var metricSources;
- var templateSrv = {};
-
- beforeEach(angularMocks.module('grafana.core'));
- beforeEach(
- angularMocks.module(function($provide) {
- $provide.value('templateSrv', templateSrv);
- })
- );
- beforeEach(angularMocks.module('grafana.services'));
- beforeEach(
- angularMocks.inject(function(datasourceSrv) {
- _datasourceSrv = datasourceSrv;
- })
- );
-
- describe('when loading metric sources', function() {
- var unsortedDatasources = {
- mmm: {
- type: 'test-db',
- meta: { metrics: { m: 1 } },
- },
- '--Grafana--': {
- type: 'grafana',
- meta: { builtIn: true, metrics: { m: 1 }, id: 'grafana' },
- },
- '--Mixed--': {
- type: 'test-db',
- meta: { builtIn: true, metrics: { m: 1 }, id: 'mixed' },
- },
- ZZZ: {
- type: 'test-db',
- meta: { metrics: { m: 1 } },
- },
- aaa: {
- type: 'test-db',
- meta: { metrics: { m: 1 } },
- },
- BBB: {
- type: 'test-db',
- meta: { metrics: { m: 1 } },
- },
- };
- beforeEach(function() {
- config.datasources = unsortedDatasources;
- metricSources = _datasourceSrv.getMetricSources({ skipVariables: true });
- });
-
- it('should return a list of sources sorted case insensitively with builtin sources last', function() {
- expect(metricSources[0].name).to.be('aaa');
- expect(metricSources[1].name).to.be('BBB');
- expect(metricSources[2].name).to.be('mmm');
- expect(metricSources[3].name).to.be('ZZZ');
- expect(metricSources[4].name).to.be('--Grafana--');
- expect(metricSources[5].name).to.be('--Mixed--');
- });
- });
-});
diff --git a/public/app/features/templating/variable_srv.ts b/public/app/features/templating/variable_srv.ts
index 8a096dd9ad2..8ad3c2845e2 100644
--- a/public/app/features/templating/variable_srv.ts
+++ b/public/app/features/templating/variable_srv.ts
@@ -38,7 +38,11 @@ export class VariableSrv {
});
}
- onDashboardRefresh() {
+ onDashboardRefresh(evt, payload) {
+ if (payload && payload.fromVariableValueUpdated) {
+ return Promise.resolve({});
+ }
+
var promises = this.variables.filter(variable => variable.refresh === 2).map(variable => {
var previousOptions = variable.options.slice();
@@ -130,7 +134,7 @@ export class VariableSrv {
return this.$q.all(promises).then(() => {
if (emitChangeEvents) {
this.$rootScope.$emit('template-variable-value-updated');
- this.$rootScope.$broadcast('refresh');
+ this.$rootScope.$broadcast('refresh', { fromVariableValueUpdated: true });
}
});
}
diff --git a/public/app/partials/login.html b/public/app/partials/login.html
index 8be9e777b9f..1919759334b 100644
--- a/public/app/partials/login.html
+++ b/public/app/partials/login.html
@@ -89,7 +89,7 @@
Skip
- If you skip you will be promted to change password next time you login.
+ If you skip you will be prompted to change password next time you login.
diff --git a/public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts b/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts
similarity index 70%
rename from public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts
rename to public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts
index 558bccf3d0f..36e7a63a005 100644
--- a/public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts
+++ b/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts
@@ -1,32 +1,46 @@
import _ from 'lodash';
-import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
import moment from 'moment';
import angular from 'angular';
-import helpers from 'test/specs/helpers';
import { ElasticDatasource } from '../datasource';
+import * as dateMath from 'app/core/utils/datemath';
+
describe('ElasticDatasource', function() {
- var ctx = new helpers.ServiceTestContext();
+ let backendSrv = {
+ datasourceRequest: jest.fn(),
+ };
- beforeEach(angularMocks.module('grafana.core'));
- beforeEach(angularMocks.module('grafana.services'));
- beforeEach(ctx.providePhase(['templateSrv', 'backendSrv', 'timeSrv']));
+ let $rootScope = {
+ $on: jest.fn(),
+ appEvent: jest.fn(),
+ };
- beforeEach(
- angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
- ctx.$q = $q;
- ctx.$httpBackend = $httpBackend;
- ctx.$rootScope = $rootScope;
- ctx.$injector = $injector;
- $httpBackend.when('GET', /\.html$/).respond('');
- })
- );
+ let templateSrv = {
+ replace: jest.fn(text => text),
+ getAdhocFilters: jest.fn(() => []),
+ };
+
+ let timeSrv = {
+ time: { from: 'now-1h', to: 'now' },
+ timeRange: jest.fn(() => {
+ return {
+ from: dateMath.parse(this.time.from, false),
+ to: dateMath.parse(this.time.to, true),
+ };
+ }),
+ setTime: jest.fn(time => {
+ this.time = time;
+ }),
+ };
+
+ let ctx = {
+ $rootScope,
+ backendSrv,
+ };
function createDatasource(instanceSettings) {
instanceSettings.jsonData = instanceSettings.jsonData || {};
- ctx.ds = ctx.$injector.instantiate(ElasticDatasource, {
- instanceSettings: instanceSettings,
- });
+ ctx.ds = new ElasticDatasource(instanceSettings, {}, backendSrv, templateSrv, timeSrv);
}
describe('When testing datasource with index pattern', function() {
@@ -40,33 +54,32 @@ describe('ElasticDatasource', function() {
it('should translate index pattern to current day', function() {
var requestOptions;
- ctx.backendSrv.datasourceRequest = function(options) {
+ ctx.backendSrv.datasourceRequest = jest.fn(options => {
requestOptions = options;
- return ctx.$q.when({ data: {} });
- };
+ return Promise.resolve({ data: {} });
+ });
ctx.ds.testDatasource();
- ctx.$rootScope.$apply();
var today = moment.utc().format('YYYY.MM.DD');
- expect(requestOptions.url).to.be('http://es.com/asd-' + today + '/_mapping');
+ expect(requestOptions.url).toBe('http://es.com/asd-' + today + '/_mapping');
});
});
describe('When issuing metric query with interval pattern', function() {
var requestOptions, parts, header;
- beforeEach(function() {
+ beforeEach(() => {
createDatasource({
url: 'http://es.com',
index: '[asd-]YYYY.MM.DD',
jsonData: { interval: 'Daily', esVersion: '2' },
});
- ctx.backendSrv.datasourceRequest = function(options) {
+ ctx.backendSrv.datasourceRequest = jest.fn(options => {
requestOptions = options;
- return ctx.$q.when({ data: { responses: [] } });
- };
+ return Promise.resolve({ data: { responses: [] } });
+ });
ctx.ds.query({
range: {
@@ -82,19 +95,17 @@ describe('ElasticDatasource', function() {
],
});
- ctx.$rootScope.$apply();
-
parts = requestOptions.data.split('\n');
header = angular.fromJson(parts[0]);
});
it('should translate index pattern to current day', function() {
- expect(header.index).to.eql(['asd-2015.05.30', 'asd-2015.05.31', 'asd-2015.06.01']);
+ expect(header.index).toEqual(['asd-2015.05.30', 'asd-2015.05.31', 'asd-2015.06.01']);
});
it('should json escape lucene query', function() {
var body = angular.fromJson(parts[1]);
- expect(body.query.bool.filter[1].query_string.query).to.be('escape\\:test');
+ expect(body.query.bool.filter[1].query_string.query).toBe('escape\\:test');
});
});
@@ -108,10 +119,10 @@ describe('ElasticDatasource', function() {
jsonData: { esVersion: '2' },
});
- ctx.backendSrv.datasourceRequest = function(options) {
+ ctx.backendSrv.datasourceRequest = jest.fn(options => {
requestOptions = options;
- return ctx.$q.when({ data: { responses: [] } });
- };
+ return Promise.resolve({ data: { responses: [] } });
+ });
ctx.ds.query({
range: {
@@ -127,27 +138,26 @@ describe('ElasticDatasource', function() {
],
});
- ctx.$rootScope.$apply();
parts = requestOptions.data.split('\n');
header = angular.fromJson(parts[0]);
});
it('should set search type to query_then_fetch', function() {
- expect(header.search_type).to.eql('query_then_fetch');
+ expect(header.search_type).toEqual('query_then_fetch');
});
it('should set size', function() {
var body = angular.fromJson(parts[1]);
- expect(body.size).to.be(500);
+ expect(body.size).toBe(500);
});
});
describe('When getting fields', function() {
- beforeEach(function() {
+ beforeEach(() => {
createDatasource({ url: 'http://es.com', index: 'metricbeat' });
- ctx.backendSrv.datasourceRequest = function(options) {
- return ctx.$q.when({
+ ctx.backendSrv.datasourceRequest = jest.fn(options => {
+ return Promise.resolve({
data: {
metricbeat: {
mappings: {
@@ -190,7 +200,7 @@ describe('ElasticDatasource', function() {
},
},
});
- };
+ });
});
it('should return nested fields', function() {
@@ -201,7 +211,7 @@ describe('ElasticDatasource', function() {
})
.then(fieldObjects => {
var fields = _.map(fieldObjects, 'text');
- expect(fields).to.eql([
+ expect(fields).toEqual([
'@timestamp',
'beat.name.raw',
'beat.name',
@@ -212,7 +222,6 @@ describe('ElasticDatasource', function() {
'system.process.name',
]);
});
- ctx.$rootScope.$apply();
});
it('should return fields related to query type', function() {
@@ -224,7 +233,7 @@ describe('ElasticDatasource', function() {
})
.then(fieldObjects => {
var fields = _.map(fieldObjects, 'text');
- expect(fields).to.eql(['system.cpu.system', 'system.cpu.user', 'system.process.cpu.total']);
+ expect(fields).toEqual(['system.cpu.system', 'system.cpu.user', 'system.process.cpu.total']);
});
ctx.ds
@@ -235,10 +244,8 @@ describe('ElasticDatasource', function() {
})
.then(fieldObjects => {
var fields = _.map(fieldObjects, 'text');
- expect(fields).to.eql(['@timestamp']);
+ expect(fields).toEqual(['@timestamp']);
});
-
- ctx.$rootScope.$apply();
});
});
@@ -252,10 +259,10 @@ describe('ElasticDatasource', function() {
jsonData: { esVersion: '5' },
});
- ctx.backendSrv.datasourceRequest = function(options) {
+ ctx.backendSrv.datasourceRequest = jest.fn(options => {
requestOptions = options;
- return ctx.$q.when({ data: { responses: [] } });
- };
+ return Promise.resolve({ data: { responses: [] } });
+ });
ctx.ds.query({
range: {
@@ -271,34 +278,33 @@ describe('ElasticDatasource', function() {
],
});
- ctx.$rootScope.$apply();
parts = requestOptions.data.split('\n');
header = angular.fromJson(parts[0]);
});
it('should not set search type to count', function() {
- expect(header.search_type).to.not.eql('count');
+ expect(header.search_type).not.toEqual('count');
});
it('should set size to 0', function() {
var body = angular.fromJson(parts[1]);
- expect(body.size).to.be(0);
+ expect(body.size).toBe(0);
});
});
describe('When issuing metricFind query on es5.x', function() {
var requestOptions, parts, header, body, results;
- beforeEach(function() {
+ beforeEach(() => {
createDatasource({
url: 'http://es.com',
index: 'test',
jsonData: { esVersion: '5' },
});
- ctx.backendSrv.datasourceRequest = function(options) {
+ ctx.backendSrv.datasourceRequest = jest.fn(options => {
requestOptions = options;
- return ctx.$q.when({
+ return Promise.resolve({
data: {
responses: [
{
@@ -318,38 +324,36 @@ describe('ElasticDatasource', function() {
],
},
});
- };
+ });
ctx.ds.metricFindQuery('{"find": "terms", "field": "test"}').then(res => {
results = res;
});
- ctx.$rootScope.$apply();
-
parts = requestOptions.data.split('\n');
header = angular.fromJson(parts[0]);
body = angular.fromJson(parts[1]);
});
- it('should get results', function() {
- expect(results.length).to.eql(2);
+ it('should get results', () => {
+ expect(results.length).toEqual(2);
});
- it('should use key or key_as_string', function() {
- expect(results[0].text).to.eql('test');
- expect(results[1].text).to.eql('test2_as_string');
+ it('should use key or key_as_string', () => {
+ expect(results[0].text).toEqual('test');
+ expect(results[1].text).toEqual('test2_as_string');
});
- it('should not set search type to count', function() {
- expect(header.search_type).to.not.eql('count');
+ it('should not set search type to count', () => {
+ expect(header.search_type).not.toEqual('count');
});
- it('should set size to 0', function() {
- expect(body.size).to.be(0);
+ it('should set size to 0', () => {
+ expect(body.size).toBe(0);
});
- it('should not set terms aggregation size to 0', function() {
- expect(body['aggs']['1']['terms'].size).to.not.be(0);
+ it('should not set terms aggregation size to 0', () => {
+ expect(body['aggs']['1']['terms'].size).not.toBe(0);
});
});
});
diff --git a/public/app/plugins/datasource/mssql/partials/annotations.editor.html b/public/app/plugins/datasource/mssql/partials/annotations.editor.html
index b2c0d7b97a6..0ad8b0c01f0 100644
--- a/public/app/plugins/datasource/mssql/partials/annotations.editor.html
+++ b/public/app/plugins/datasource/mssql/partials/annotations.editor.html
@@ -28,12 +28,12 @@ An annotation is an event that is overlaid on top of graphs. The query can have
Macros:
- $__time(column) -> column AS time
- $__timeEpoch(column) -> DATEDIFF(second, '1970-01-01', column) AS time
-- $__timeFilter(column) -> column >= DATEADD(s, 18446744066914186738, '1970-01-01') AND column <= DATEADD(s, 18446744066914187038, '1970-01-01')
+- $__timeFilter(column) -> column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
- $__unixEpochFilter(column) -> column >= 1492750877 AND column <= 1492750877
Or build your own conditionals using these macros which just return the values:
-- $__timeFrom() -> DATEADD(second, 1492750877, '1970-01-01')
-- $__timeTo() -> DATEADD(second, 1492750877, '1970-01-01')
+- $__timeFrom() -> '2017-04-21T05:01:17Z'
+- $__timeTo() -> '2017-04-21T05:01:17Z'
- $__unixEpochFrom() -> 1492750877
- $__unixEpochTo() -> 1492750877
diff --git a/public/app/plugins/datasource/mssql/partials/query.editor.html b/public/app/plugins/datasource/mssql/partials/query.editor.html
index f29dfa18db2..ddc24475d60 100644
--- a/public/app/plugins/datasource/mssql/partials/query.editor.html
+++ b/public/app/plugins/datasource/mssql/partials/query.editor.html
@@ -49,7 +49,7 @@ Table:
Macros:
- $__time(column) -> column AS time
- $__timeEpoch(column) -> DATEDIFF(second, '1970-01-01', column) AS time
-- $__timeFilter(column) -> column >= DATEADD(s, 18446744066914186738, '1970-01-01') AND column <= DATEADD(s, 18446744066914187038, '1970-01-01')
+- $__timeFilter(column) -> column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
- $__unixEpochFilter(column) -> column >= 1492750877 AND column <= 1492750877
- $__timeGroup(column, '5m'[, fillvalue]) -> CAST(ROUND(DATEDIFF(second, '1970-01-01', column)/300.0, 0) as bigint)*300. Providing a fillValue of NULL or floating value will automatically fill empty series in timerange with that value.
@@ -62,8 +62,8 @@ GROUP BY $__timeGroup(date_time_col, '1h')
ORDER BY 1
Or build your own conditionals using these macros which just return the values:
-- $__timeFrom() -> DATEADD(second, 1492750877, '1970-01-01')
-- $__timeTo() -> DATEADD(second, 1492750877, '1970-01-01')
+- $__timeFrom() -> '2017-04-21T05:01:17Z'
+- $__timeTo() -> '2017-04-21T05:01:17Z'
- $__unixEpochFrom() -> 1492750877
- $__unixEpochTo() -> 1492750877
diff --git a/public/app/plugins/datasource/mysql/partials/annotations.editor.html b/public/app/plugins/datasource/mysql/partials/annotations.editor.html
index 23ec726a9f0..5f2e44887ba 100644
--- a/public/app/plugins/datasource/mysql/partials/annotations.editor.html
+++ b/public/app/plugins/datasource/mysql/partials/annotations.editor.html
@@ -28,12 +28,12 @@ An annotation is an event that is overlaid on top of graphs. The query can have
Macros:
- $__time(column) -> UNIX_TIMESTAMP(column) as time (or as time_sec)
- $__timeEpoch(column) -> UNIX_TIMESTAMP(column) as time (or as time_sec)
-- $__timeFilter(column) -> UNIX_TIMESTAMP(time_date_time) > 1492750877 AND UNIX_TIMESTAMP(time_date_time) < 1492750877
+- $__timeFilter(column) -> column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
- $__unixEpochFilter(column) -> time_unix_epoch > 1492750877 AND time_unix_epoch < 1492750877
Or build your own conditionals using these macros which just return the values:
-- $__timeFrom() -> FROM_UNIXTIME(1492750877)
-- $__timeTo() -> FROM_UNIXTIME(1492750877)
+- $__timeFrom() -> '2017-04-21T05:01:17Z'
+- $__timeTo() -> '2017-04-21T05:01:17Z'
- $__unixEpochFrom() -> 1492750877
- $__unixEpochTo() -> 1492750877
diff --git a/public/app/plugins/datasource/mysql/partials/query.editor.html b/public/app/plugins/datasource/mysql/partials/query.editor.html
index 9acf32405c1..df68982fcfa 100644
--- a/public/app/plugins/datasource/mysql/partials/query.editor.html
+++ b/public/app/plugins/datasource/mysql/partials/query.editor.html
@@ -48,7 +48,7 @@ Table:
Macros:
- $__time(column) -> UNIX_TIMESTAMP(column) as time_sec
- $__timeEpoch(column) -> UNIX_TIMESTAMP(column) as time_sec
-- $__timeFilter(column) -> UNIX_TIMESTAMP(time_date_time) ≥ 1492750877 AND UNIX_TIMESTAMP(time_date_time) ≤ 1492750877
+- $__timeFilter(column) -> column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
- $__unixEpochFilter(column) -> time_unix_epoch > 1492750877 AND time_unix_epoch < 1492750877
- $__timeGroup(column,'5m') -> cast(cast(UNIX_TIMESTAMP(column)/(300) as signed)*300 as signed)
@@ -61,8 +61,8 @@ GROUP BY 1
ORDER BY 1
Or build your own conditionals using these macros which just return the values:
-- $__timeFrom() -> FROM_UNIXTIME(1492750877)
-- $__timeTo() -> FROM_UNIXTIME(1492750877)
+- $__timeFrom() -> '2017-04-21T05:01:17Z'
+- $__timeTo() -> '2017-04-21T05:01:17Z'
- $__unixEpochFrom() -> 1492750877
- $__unixEpochTo() -> 1492750877
diff --git a/public/app/plugins/datasource/prometheus/datasource.ts b/public/app/plugins/datasource/prometheus/datasource.ts
index 46431a08ab1..d7d33264c99 100644
--- a/public/app/plugins/datasource/prometheus/datasource.ts
+++ b/public/app/plugins/datasource/prometheus/datasource.ts
@@ -162,8 +162,8 @@ export class PrometheusDatasource {
format: activeTargets[index].format,
step: queries[index].step,
legendFormat: activeTargets[index].legendFormat,
- start: start,
- end: end,
+ start: queries[index].start,
+ end: queries[index].end,
query: queries[index].expr,
responseListLength: responseList.length,
responseIndex: index,
diff --git a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts
index 0157322da58..219b990e5dd 100644
--- a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts
+++ b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts
@@ -68,7 +68,7 @@ describe('PrometheusDatasource', () => {
ctx.query = {
range: { from: moment(1443454528000), to: moment(1443454528000) },
targets: [{ expr: 'test{job="testjob"}', format: 'heatmap', legendFormat: '{{le}}' }],
- interval: '60s',
+ interval: '1s',
};
});
diff --git a/public/app/plugins/datasource/prometheus/specs/result_transformer.jest.ts b/public/app/plugins/datasource/prometheus/specs/result_transformer.jest.ts
index 56a05d5aedb..b94cca79059 100644
--- a/public/app/plugins/datasource/prometheus/specs/result_transformer.jest.ts
+++ b/public/app/plugins/datasource/prometheus/specs/result_transformer.jest.ts
@@ -127,4 +127,82 @@ describe('Prometheus Result Transformer', () => {
]);
});
});
+
+ describe('When resultFormat is time series', () => {
+ it('should transform matrix into timeseries', () => {
+ const response = {
+ status: 'success',
+ data: {
+ resultType: 'matrix',
+ result: [
+ {
+ metric: { __name__: 'test', job: 'testjob' },
+ values: [[0, '10'], [1, '10'], [2, '0']],
+ },
+ ],
+ },
+ };
+ let result = [];
+ let options = {
+ format: 'timeseries',
+ start: 0,
+ end: 2,
+ };
+
+ ctx.resultTransformer.transform(result, { data: response }, options);
+ expect(result).toEqual([{ target: 'test{job="testjob"}', datapoints: [[10, 0], [10, 1000], [0, 2000]] }]);
+ });
+
+ it('should fill timeseries with null values', () => {
+ const response = {
+ status: 'success',
+ data: {
+ resultType: 'matrix',
+ result: [
+ {
+ metric: { __name__: 'test', job: 'testjob' },
+ values: [[1, '10'], [2, '0']],
+ },
+ ],
+ },
+ };
+ let result = [];
+ let options = {
+ format: 'timeseries',
+ step: 1,
+ start: 0,
+ end: 2,
+ };
+
+ ctx.resultTransformer.transform(result, { data: response }, options);
+ expect(result).toEqual([{ target: 'test{job="testjob"}', datapoints: [[null, 0], [10, 1000], [0, 2000]] }]);
+ });
+
+ it('should align null values with step', () => {
+ const response = {
+ status: 'success',
+ data: {
+ resultType: 'matrix',
+ result: [
+ {
+ metric: { __name__: 'test', job: 'testjob' },
+ values: [[4, '10'], [8, '10']],
+ },
+ ],
+ },
+ };
+ let result = [];
+ let options = {
+ format: 'timeseries',
+ step: 2,
+ start: 0,
+ end: 8,
+ };
+
+ ctx.resultTransformer.transform(result, { data: response }, options);
+ expect(result).toEqual([
+ { target: 'test{job="testjob"}', datapoints: [[null, 0], [null, 2000], [10, 4000], [null, 6000], [10, 8000]] },
+ ]);
+ });
+ });
});
diff --git a/public/app/plugins/panel/graph/series_overrides_ctrl.ts b/public/app/plugins/panel/graph/series_overrides_ctrl.ts
index ecf79a8a4fb..5958c80bac9 100644
--- a/public/app/plugins/panel/graph/series_overrides_ctrl.ts
+++ b/public/app/plugins/panel/graph/series_overrides_ctrl.ts
@@ -1,160 +1,158 @@
import _ from 'lodash';
import angular from 'angular';
-export class SeriesOverridesCtrl {
- /** @ngInject */
- constructor($scope, $element, popoverSrv) {
- $scope.overrideMenu = [];
- $scope.currentOverrides = [];
- $scope.override = $scope.override || {};
+/** @ngInject */
+export function SeriesOverridesCtrl($scope, $element, popoverSrv) {
+ $scope.overrideMenu = [];
+ $scope.currentOverrides = [];
+ $scope.override = $scope.override || {};
- $scope.addOverrideOption = function(name, propertyName, values) {
- var option = {
- text: name,
- propertyName: propertyName,
- index: $scope.overrideMenu.lenght,
- values: values,
- submenu: _.map(values, function(value) {
- return { text: String(value), value: value };
- }),
- };
-
- $scope.overrideMenu.push(option);
+ $scope.addOverrideOption = function(name, propertyName, values) {
+ var option = {
+ text: name,
+ propertyName: propertyName,
+ index: $scope.overrideMenu.lenght,
+ values: values,
+ submenu: _.map(values, function(value) {
+ return { text: String(value), value: value };
+ }),
};
- $scope.setOverride = function(item, subItem) {
- // handle color overrides
- if (item.propertyName === 'color') {
- $scope.openColorSelector($scope.override['color']);
+ $scope.overrideMenu.push(option);
+ };
+
+ $scope.setOverride = function(item, subItem) {
+ // handle color overrides
+ if (item.propertyName === 'color') {
+ $scope.openColorSelector($scope.override['color']);
+ return;
+ }
+
+ $scope.override[item.propertyName] = subItem.value;
+
+ // automatically disable lines for this series and the fill below to series
+ // can be removed by the user if they still want lines
+ if (item.propertyName === 'fillBelowTo') {
+ $scope.override['lines'] = false;
+ $scope.ctrl.addSeriesOverride({ alias: subItem.value, lines: false });
+ }
+
+ $scope.updateCurrentOverrides();
+ $scope.ctrl.render();
+ };
+
+ $scope.colorSelected = function(color) {
+ $scope.override['color'] = color;
+ $scope.updateCurrentOverrides();
+ $scope.ctrl.render();
+ };
+
+ $scope.openColorSelector = function(color) {
+ var fakeSeries = { color: color };
+ popoverSrv.show({
+ element: $element.find('.dropdown')[0],
+ position: 'top center',
+ openOn: 'click',
+ template: ' ',
+ model: {
+ autoClose: true,
+ colorSelected: $scope.colorSelected,
+ series: fakeSeries,
+ },
+ onClose: function() {
+ $scope.ctrl.render();
+ },
+ });
+ };
+
+ $scope.removeOverride = function(option) {
+ delete $scope.override[option.propertyName];
+ $scope.updateCurrentOverrides();
+ $scope.ctrl.refresh();
+ };
+
+ $scope.getSeriesNames = function() {
+ return _.map($scope.ctrl.seriesList, function(series) {
+ return series.alias;
+ });
+ };
+
+ $scope.updateCurrentOverrides = function() {
+ $scope.currentOverrides = [];
+ _.each($scope.overrideMenu, function(option) {
+ var value = $scope.override[option.propertyName];
+ if (_.isUndefined(value)) {
return;
}
-
- $scope.override[item.propertyName] = subItem.value;
-
- // automatically disable lines for this series and the fill below to series
- // can be removed by the user if they still want lines
- if (item.propertyName === 'fillBelowTo') {
- $scope.override['lines'] = false;
- $scope.ctrl.addSeriesOverride({ alias: subItem.value, lines: false });
- }
-
- $scope.updateCurrentOverrides();
- $scope.ctrl.render();
- };
-
- $scope.colorSelected = function(color) {
- $scope.override['color'] = color;
- $scope.updateCurrentOverrides();
- $scope.ctrl.render();
- };
-
- $scope.openColorSelector = function(color) {
- var fakeSeries = { color: color };
- popoverSrv.show({
- element: $element.find('.dropdown')[0],
- position: 'top center',
- openOn: 'click',
- template: ' ',
- model: {
- autoClose: true,
- colorSelected: $scope.colorSelected,
- series: fakeSeries,
- },
- onClose: function() {
- $scope.ctrl.render();
- },
+ $scope.currentOverrides.push({
+ name: option.text,
+ propertyName: option.propertyName,
+ value: String(value),
});
- };
+ });
+ };
- $scope.removeOverride = function(option) {
- delete $scope.override[option.propertyName];
- $scope.updateCurrentOverrides();
- $scope.ctrl.refresh();
- };
-
- $scope.getSeriesNames = function() {
- return _.map($scope.ctrl.seriesList, function(series) {
- return series.alias;
- });
- };
-
- $scope.updateCurrentOverrides = function() {
- $scope.currentOverrides = [];
- _.each($scope.overrideMenu, function(option) {
- var value = $scope.override[option.propertyName];
- if (_.isUndefined(value)) {
- return;
- }
- $scope.currentOverrides.push({
- name: option.text,
- propertyName: option.propertyName,
- value: String(value),
- });
- });
- };
-
- $scope.addOverrideOption('Bars', 'bars', [true, false]);
- $scope.addOverrideOption('Lines', 'lines', [true, false]);
- $scope.addOverrideOption('Line fill', 'fill', [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
- $scope.addOverrideOption('Line width', 'linewidth', [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
- $scope.addOverrideOption('Null point mode', 'nullPointMode', ['connected', 'null', 'null as zero']);
- $scope.addOverrideOption('Fill below to', 'fillBelowTo', $scope.getSeriesNames());
- $scope.addOverrideOption('Staircase line', 'steppedLine', [true, false]);
- $scope.addOverrideOption('Dashes', 'dashes', [true, false]);
- $scope.addOverrideOption('Dash Length', 'dashLength', [
- 1,
- 2,
- 3,
- 4,
- 5,
- 6,
- 7,
- 8,
- 9,
- 10,
- 11,
- 12,
- 13,
- 14,
- 15,
- 16,
- 17,
- 18,
- 19,
- 20,
- ]);
- $scope.addOverrideOption('Dash Space', 'spaceLength', [
- 1,
- 2,
- 3,
- 4,
- 5,
- 6,
- 7,
- 8,
- 9,
- 10,
- 11,
- 12,
- 13,
- 14,
- 15,
- 16,
- 17,
- 18,
- 19,
- 20,
- ]);
- $scope.addOverrideOption('Points', 'points', [true, false]);
- $scope.addOverrideOption('Points Radius', 'pointradius', [1, 2, 3, 4, 5]);
- $scope.addOverrideOption('Stack', 'stack', [true, false, 'A', 'B', 'C', 'D']);
- $scope.addOverrideOption('Color', 'color', ['change']);
- $scope.addOverrideOption('Y-axis', 'yaxis', [1, 2]);
- $scope.addOverrideOption('Z-index', 'zindex', [-3, -2, -1, 0, 1, 2, 3]);
- $scope.addOverrideOption('Transform', 'transform', ['negative-Y']);
- $scope.addOverrideOption('Legend', 'legend', [true, false]);
- $scope.updateCurrentOverrides();
- }
+ $scope.addOverrideOption('Bars', 'bars', [true, false]);
+ $scope.addOverrideOption('Lines', 'lines', [true, false]);
+ $scope.addOverrideOption('Line fill', 'fill', [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
+ $scope.addOverrideOption('Line width', 'linewidth', [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
+ $scope.addOverrideOption('Null point mode', 'nullPointMode', ['connected', 'null', 'null as zero']);
+ $scope.addOverrideOption('Fill below to', 'fillBelowTo', $scope.getSeriesNames());
+ $scope.addOverrideOption('Staircase line', 'steppedLine', [true, false]);
+ $scope.addOverrideOption('Dashes', 'dashes', [true, false]);
+ $scope.addOverrideOption('Dash Length', 'dashLength', [
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ ]);
+ $scope.addOverrideOption('Dash Space', 'spaceLength', [
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 13,
+ 14,
+ 15,
+ 16,
+ 17,
+ 18,
+ 19,
+ 20,
+ ]);
+ $scope.addOverrideOption('Points', 'points', [true, false]);
+ $scope.addOverrideOption('Points Radius', 'pointradius', [1, 2, 3, 4, 5]);
+ $scope.addOverrideOption('Stack', 'stack', [true, false, 'A', 'B', 'C', 'D']);
+ $scope.addOverrideOption('Color', 'color', ['change']);
+ $scope.addOverrideOption('Y-axis', 'yaxis', [1, 2]);
+ $scope.addOverrideOption('Z-index', 'zindex', [-3, -2, -1, 0, 1, 2, 3]);
+ $scope.addOverrideOption('Transform', 'transform', ['negative-Y']);
+ $scope.addOverrideOption('Legend', 'legend', [true, false]);
+ $scope.updateCurrentOverrides();
}
angular.module('grafana.controllers').controller('SeriesOverridesCtrl', SeriesOverridesCtrl);
diff --git a/public/app/plugins/panel/graph/specs/series_override_ctrl.jest.ts b/public/app/plugins/panel/graph/specs/series_override_ctrl.jest.ts
new file mode 100644
index 00000000000..2e7456a132a
--- /dev/null
+++ b/public/app/plugins/panel/graph/specs/series_override_ctrl.jest.ts
@@ -0,0 +1,42 @@
+import '../series_overrides_ctrl';
+import { SeriesOverridesCtrl } from '../series_overrides_ctrl';
+
+describe('SeriesOverridesCtrl', () => {
+ let popoverSrv = {};
+ let $scope;
+
+ beforeEach(() => {
+ $scope = {
+ ctrl: {
+ refresh: jest.fn(),
+ render: jest.fn(),
+ seriesList: [],
+ },
+ render: jest.fn(() => {}),
+ };
+ SeriesOverridesCtrl($scope, {}, popoverSrv);
+ });
+
+ describe('When setting an override', () => {
+ beforeEach(() => {
+ $scope.setOverride({ propertyName: 'lines' }, { value: true });
+ });
+
+ it('should set override property', () => {
+ expect($scope.override.lines).toBe(true);
+ });
+
+ it('should update view model', () => {
+ expect($scope.currentOverrides[0].name).toBe('Lines');
+ expect($scope.currentOverrides[0].value).toBe('true');
+ });
+ });
+
+ describe('When removing overide', () => {
+ it('click should include option and value index', () => {
+ $scope.setOverride(1, 0);
+ $scope.removeOverride({ propertyName: 'lines' });
+ expect($scope.currentOverrides.length).toBe(0);
+ });
+ });
+});
diff --git a/public/app/plugins/panel/graph/specs/series_override_ctrl_specs.ts b/public/app/plugins/panel/graph/specs/series_override_ctrl_specs.ts
deleted file mode 100644
index 9e311c0775e..00000000000
--- a/public/app/plugins/panel/graph/specs/series_override_ctrl_specs.ts
+++ /dev/null
@@ -1,55 +0,0 @@
-import { describe, beforeEach, it, expect, sinon, angularMocks } from 'test/lib/common';
-import '../series_overrides_ctrl';
-import helpers from 'test/specs/helpers';
-
-describe('SeriesOverridesCtrl', function() {
- var ctx = new helpers.ControllerTestContext();
- var popoverSrv = {};
-
- beforeEach(angularMocks.module('grafana.services'));
- beforeEach(angularMocks.module('grafana.controllers'));
-
- beforeEach(
- ctx.providePhase({
- popoverSrv: popoverSrv,
- })
- );
-
- beforeEach(
- angularMocks.inject(function($rootScope, $controller) {
- ctx.scope = $rootScope.$new();
- ctx.scope.ctrl = {
- refresh: sinon.spy(),
- render: sinon.spy(),
- seriesList: [],
- };
- ctx.scope.render = function() {};
- ctx.controller = $controller('SeriesOverridesCtrl', {
- $scope: ctx.scope,
- });
- })
- );
-
- describe('When setting an override', function() {
- beforeEach(function() {
- ctx.scope.setOverride({ propertyName: 'lines' }, { value: true });
- });
-
- it('should set override property', function() {
- expect(ctx.scope.override.lines).to.be(true);
- });
-
- it('should update view model', function() {
- expect(ctx.scope.currentOverrides[0].name).to.be('Lines');
- expect(ctx.scope.currentOverrides[0].value).to.be('true');
- });
- });
-
- describe('When removing overide', function() {
- it('click should include option and value index', function() {
- ctx.scope.setOverride(1, 0);
- ctx.scope.removeOverride({ propertyName: 'lines' });
- expect(ctx.scope.currentOverrides.length).to.be(0);
- });
- });
-});
diff --git a/public/app/plugins/panel/singlestat/editor.html b/public/app/plugins/panel/singlestat/editor.html
index 15f4e6a9efa..96576fd3c41 100644
--- a/public/app/plugins/panel/singlestat/editor.html
+++ b/public/app/plugins/panel/singlestat/editor.html
@@ -29,7 +29,7 @@
Font size
-
+
@@ -39,7 +39,7 @@
Font size
-
+
@@ -58,6 +58,10 @@
+
+
+
+