Merge remote-tracking branch 'upstream/master' into postgres-query-builder

This commit is contained in:
Sven Klemm 2018-07-07 20:54:33 +02:00
commit 3f4c808cc5
27 changed files with 181 additions and 116 deletions

1
.gitignore vendored
View File

@ -43,6 +43,7 @@ fig.yml
docker-compose.yml
docker-compose.yaml
/conf/provisioning/**/custom.yaml
/conf/provisioning/**/dev.yaml
/conf/ldap_dev.toml
profile.cov
/grafana

View File

@ -10,10 +10,19 @@
* **Singlestat**: Make colorization of prefix and postfix optional in singlestat [#11892](https://github.com/grafana/grafana/pull/11892), thx [@ApsOps](https://github.com/ApsOps)
* **Table**: Make table sorting stable when null values exist [#12362](https://github.com/grafana/grafana/pull/12362), thx [@bz2](https://github.com/bz2)
* **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379)
* **Prometheus**: Heatmap - fix unhandled error when some points are missing [#12484](https://github.com/grafana/grafana/issues/12484)
* **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda)
* **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm)
* **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley)
* **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber)
# 5.2.2 (unreleased)
### Minor
* **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379)
* **Dashboard**: Dashboard links not updated when changing variables [#12506](https://github.com/grafana/grafana/issues/12506)
# 5.2.1 (2018-06-29)
### Minor

9
devenv/dashboards.yaml Normal file
View File

@ -0,0 +1,9 @@
apiVersion: 1
providers:
- name: 'gdev dashboards'
folder: 'gdev dashboards'
type: file
options:
path: devenv/dev-dashboards

View File

@ -1,9 +0,0 @@
apiVersion: 1
providers:
- name: 'dev dashboards'
folder: 'dev dashboards'
type: file
options:
path: devenv/dashboards/dev-dashboards

View File

@ -1,38 +1,38 @@
apiVersion: 1
datasources:
- name: Graphite
- name: gdev-graphite
type: graphite
access: proxy
url: http://localhost:8080
jsonData:
graphiteVersion: "1.1"
- name: Prometheus
- name: gdev-prometheus
type: prometheus
access: proxy
isDefault: true
url: http://localhost:9090
- name: InfluxDB
- name: gdev-influxdb
type: influxdb
access: proxy
database: site
user: grafana
password: grafana
url: http://localhost:8086
jsonData:
jsonData:
timeInterval: "15s"
- name: OpenTsdb
- name: gdev-opentsdb
type: opentsdb
access: proxy
url: http://localhost:4242
jsonData:
jsonData:
tsdbResolution: 1
tsdbVersion: 1
- name: Elastic
- name: gdev-elasticsearch-metrics
type: elasticsearch
access: proxy
database: "[metrics-]YYYY.MM.DD"
@ -40,22 +40,22 @@ datasources:
jsonData:
interval: Daily
timeField: "@timestamp"
- name: MySQL
- name: gdev-mysql
type: mysql
url: localhost:3306
database: grafana
user: grafana
password: password
- name: MSSQL
- name: gdev-mssql
type: mssql
url: localhost:1433
database: grafana
user: grafana
password: "Password!"
- name: Postgres
- name: gdev-postgres
type: postgres
url: localhost:5432
database: grafana
@ -64,7 +64,7 @@ datasources:
jsonData:
sslmode: "disable"
- name: Cloudwatch
- name: gdev-cloudwatch
type: cloudwatch
editable: true
jsonData:

View File

@ -23,41 +23,36 @@ requiresJsonnet() {
}
defaultDashboards() {
requiresJsonnet
ln -s -f -r ./dashboards/dev-dashboards/dev-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml
ln -s -f ../../../devenv/dashboards.yaml ../conf/provisioning/dashboards/dev.yaml
}
defaultDatasources() {
echo "setting up all default datasources using provisioning"
ln -s -f -r ./datasources/default/default.yaml ../conf/provisioning/datasources/custom.yaml
ln -s -f ../../../devenv/datasources.yaml ../conf/provisioning/datasources/dev.yaml
}
usage() {
echo -e "install.sh\n\tThis script installs my basic setup for a debian laptop\n"
echo -e "install.sh\n\tThis script setups dev provision for datasources and dashboards"
echo "Usage:"
echo " bulk-dashboards - create and provisioning 400 dashboards"
echo " default-datasources - provisiong all core datasources"
echo " no args - provisiong core datasources and dev dashboards"
}
main() {
local cmd=$1
if [[ -z "$cmd" ]]; then
usage
exit 1
fi
if [[ $cmd == "bulk-dashboards" ]]; then
bulkDashboard
elif [[ $cmd == "default-datasources" ]]; then
defaultDatasources
elif [[ $cmd == "default-dashboards" ]]; then
defaultDashboards
else
defaultDashboards
defaultDatasources
fi
if [[ -z "$cmd" ]]; then
usage
fi
}
main "$@"

View File

@ -77,9 +77,9 @@ Macro example | Description
------------ | -------------
*$__time(dateColumn)* | Will be replaced by an expression to rename the column to *time*. For example, *dateColumn as time*
*$__timeEpoch(dateColumn)* | Will be replaced by an expression to convert a DATETIME column type to unix timestamp and rename it to *time*. <br/>For example, *DATEDIFF(second, '1970-01-01', dateColumn) AS time*
*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. <br/>For example, *dateColumn >= DATEADD(s, 1494410783, '1970-01-01') AND dateColumn <= DATEADD(s, 1494410783, '1970-01-01')*
*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *DATEADD(second, 1494410783, '1970-01-01')*
*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *DATEADD(second, 1494410783, '1970-01-01')*
*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. <br/>For example, *dateColumn BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:06:17Z'*
*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *'2017-04-21T05:01:17Z'*
*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *'2017-04-21T05:06:17Z'*
*$__timeGroup(dateColumn,'5m'[, fillvalue])* | Will be replaced by an expression usable in GROUP BY clause. Providing a *fillValue* of *NULL* or *floating value* will automatically fill empty series in timerange with that value. <br/>For example, *CAST(ROUND(DATEDIFF(second, '1970-01-01', time_column)/300.0, 0) as bigint)\*300*.
*$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so all null values will be converted to the fill value (all null values would be set to zero using this example).
*$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn > 1494410783 AND dateColumn < 1494497183*

View File

@ -60,9 +60,9 @@ Macro example | Description
------------ | -------------
*$__time(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec*
*$__timeEpoch(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec*
*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn > FROM_UNIXTIME(1494410783) AND dateColumn < FROM_UNIXTIME(1494497183)*
*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *FROM_UNIXTIME(1494410783)*
*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *FROM_UNIXTIME(1494497183)*
*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:06:17Z'*
*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *'2017-04-21T05:01:17Z'*
*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *'2017-04-21T05:06:17Z'*
*$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in GROUP BY clause. For example, *cast(cast(UNIX_TIMESTAMP(dateColumn)/(300) as signed)*300 as signed),*
*$__timeGroup(dateColumn,'5m',0)* | Same as above but with a fill parameter so all null values will be converted to the fill value (all null values would be set to zero using this example).
*$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn > 1494410783 AND dateColumn < 1494497183*

View File

@ -82,11 +82,12 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("%s >= DATEADD(s, %d, '1970-01-01') AND %s <= DATEADD(s, %d, '1970-01-01')", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil
return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
case "__timeFrom":
return fmt.Sprintf("DATEADD(second, %d, '1970-01-01')", m.TimeRange.GetFromAsSecondsEpoch()), nil
return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
case "__timeTo":
return fmt.Sprintf("DATEADD(second, %d, '1970-01-01')", m.TimeRange.GetToAsSecondsEpoch()), nil
return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
case "__timeGroup":
if len(args) < 2 {
return "", fmt.Errorf("macro %v needs time column and interval", name)

View File

@ -49,7 +49,7 @@ func TestMacroEngine(t *testing.T) {
sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= DATEADD(s, %d, '1970-01-01') AND time_column <= DATEADD(s, %d, '1970-01-01')", from.Unix(), to.Unix()))
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
})
Convey("interpolate __timeGroup function", func() {
@ -96,14 +96,14 @@ func TestMacroEngine(t *testing.T) {
sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", from.Unix()))
So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
})
Convey("interpolate __timeTo function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", to.Unix()))
So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
})
Convey("interpolate __unixEpochFilter function", func() {
@ -137,21 +137,21 @@ func TestMacroEngine(t *testing.T) {
sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= DATEADD(s, %d, '1970-01-01') AND time_column <= DATEADD(s, %d, '1970-01-01')", from.Unix(), to.Unix()))
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
})
Convey("interpolate __timeFrom function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", from.Unix()))
So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
})
Convey("interpolate __timeTo function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", to.Unix()))
So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
})
Convey("interpolate __unixEpochFilter function", func() {
@ -185,21 +185,21 @@ func TestMacroEngine(t *testing.T) {
sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= DATEADD(s, %d, '1970-01-01') AND time_column <= DATEADD(s, %d, '1970-01-01')", from.Unix(), to.Unix()))
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
})
Convey("interpolate __timeFrom function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", from.Unix()))
So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
})
Convey("interpolate __timeTo function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", to.Unix()))
So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
})
Convey("interpolate __unixEpochFilter function", func() {

View File

@ -77,11 +77,12 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("%s >= FROM_UNIXTIME(%d) AND %s <= FROM_UNIXTIME(%d)", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil
return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
case "__timeFrom":
return fmt.Sprintf("FROM_UNIXTIME(%d)", m.TimeRange.GetFromAsSecondsEpoch()), nil
return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil
case "__timeTo":
return fmt.Sprintf("FROM_UNIXTIME(%d)", m.TimeRange.GetToAsSecondsEpoch()), nil
return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil
case "__timeGroup":
if len(args) < 2 {
return "", fmt.Errorf("macro %v needs time column and interval", name)

View File

@ -54,21 +54,21 @@ func TestMacroEngine(t *testing.T) {
sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= FROM_UNIXTIME(%d) AND time_column <= FROM_UNIXTIME(%d)", from.Unix(), to.Unix()))
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
})
Convey("interpolate __timeFrom function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", from.Unix()))
So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
})
Convey("interpolate __timeTo function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", to.Unix()))
So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
})
Convey("interpolate __unixEpochFilter function", func() {
@ -102,21 +102,21 @@ func TestMacroEngine(t *testing.T) {
sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= FROM_UNIXTIME(%d) AND time_column <= FROM_UNIXTIME(%d)", from.Unix(), to.Unix()))
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
})
Convey("interpolate __timeFrom function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", from.Unix()))
So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
})
Convey("interpolate __timeTo function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", to.Unix()))
So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
})
Convey("interpolate __unixEpochFilter function", func() {
@ -150,21 +150,21 @@ func TestMacroEngine(t *testing.T) {
sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= FROM_UNIXTIME(%d) AND time_column <= FROM_UNIXTIME(%d)", from.Unix(), to.Unix()))
So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339)))
})
Convey("interpolate __timeFrom function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", from.Unix()))
So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339)))
})
Convey("interpolate __timeTo function", func() {
sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", to.Unix()))
So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339)))
})
Convey("interpolate __unixEpochFilter function", func() {

View File

@ -41,20 +41,20 @@ function dashLink($compile, $sanitize, linkSrv) {
elem.html(template);
$compile(elem.contents())(scope);
var anchor = elem.find('a');
var icon = elem.find('i');
var span = elem.find('span');
function update() {
var linkInfo = linkSrv.getAnchorInfo(link);
const anchor = elem.find('a');
const span = elem.find('span');
span.text(linkInfo.title);
if (!link.asDropdown) {
anchor.attr('href', linkInfo.href);
sanitizeAnchor();
}
elem.find('a').attr('data-placement', 'bottom');
anchor.attr('data-placement', 'bottom');
// tooltip
elem.find('a').tooltip({
anchor.tooltip({
title: $sanitize(scope.link.tooltip),
html: true,
container: 'body',
@ -62,12 +62,13 @@ function dashLink($compile, $sanitize, linkSrv) {
}
function sanitizeAnchor() {
const anchor = elem.find('a');
const anchorSanitized = $sanitize(anchor.parent().html());
anchor.parent().html(anchorSanitized);
}
icon.attr('class', 'fa fa-fw ' + scope.link.icon);
anchor.attr('target', scope.link.target);
elem.find('i').attr('class', 'fa fa-fw ' + scope.link.icon);
elem.find('a').attr('target', scope.link.target);
// fix for menus on the far right
if (link.asDropdown && scope.$last) {

View File

@ -142,10 +142,12 @@ export class DatasourceSrv {
var ds = config.datasources[first];
if (ds) {
const key = `$${variable.name}`;
list.push({
name: '$' + variable.name,
value: '$' + variable.name,
name: key,
value: key,
meta: ds.meta,
sort: key,
});
}
}

View File

@ -32,8 +32,8 @@
<div class="gf-form">
<label class="gf-form-label query-keyword pointer" ng-click="toggleAccessHelp()">
Help&nbsp;
<i class="fa fa-caret-down" ng-show="ctrl.showAccessHelp"></i>
<i class="fa fa-caret-right" ng-hide="ctrl.showAccessHelp">&nbsp;</i>
<i class="fa fa-caret-down" ng-show="showAccessHelp"></i>
<i class="fa fa-caret-right" ng-hide="showAccessHelp">&nbsp;</i>
</label>
</div>
</div>

View File

@ -2,8 +2,21 @@ import config from 'app/core/config';
import 'app/features/plugins/datasource_srv';
import { DatasourceSrv } from 'app/features/plugins/datasource_srv';
// Datasource variable $datasource with current value 'BBB'
const templateSrv = {
variables: [
{
type: 'datasource',
name: 'datasource',
current: {
value: 'BBB',
},
},
],
};
describe('datasource_srv', function() {
let _datasourceSrv = new DatasourceSrv({}, {}, {}, {});
let _datasourceSrv = new DatasourceSrv({}, {}, {}, templateSrv);
let metricSources;
describe('when loading metric sources', () => {
@ -35,25 +48,27 @@ describe('datasource_srv', function() {
};
beforeEach(() => {
config.datasources = unsortedDatasources;
metricSources = _datasourceSrv.getMetricSources({ skipVariables: true });
});
it('should return a list of sources sorted case insensitively with builtin sources last', () => {
expect(metricSources[0].name).toBe('aaa');
expect(metricSources[1].name).toBe('BBB');
expect(metricSources[2].name).toBe('mmm');
expect(metricSources[3].name).toBe('ZZZ');
expect(metricSources[4].name).toBe('--Grafana--');
expect(metricSources[5].name).toBe('--Mixed--');
});
beforeEach(() => {
metricSources = _datasourceSrv.getMetricSources({});
config.defaultDatasource = 'BBB';
});
it('should return a list of sources sorted case insensitively with builtin sources last', () => {
expect(metricSources[1].name).toBe('aaa');
expect(metricSources[2].name).toBe('BBB');
expect(metricSources[3].name).toBe('mmm');
expect(metricSources[4].name).toBe('ZZZ');
expect(metricSources[5].name).toBe('--Grafana--');
expect(metricSources[6].name).toBe('--Mixed--');
});
it('should set default data source', () => {
expect(metricSources[2].name).toBe('default');
expect(metricSources[2].sort).toBe('BBB');
expect(metricSources[3].name).toBe('default');
expect(metricSources[3].sort).toBe('BBB');
});
it('should set default inject the variable datasources', () => {
expect(metricSources[0].name).toBe('$datasource');
expect(metricSources[0].sort).toBe('$datasource');
});
});
});

View File

@ -28,12 +28,12 @@ An annotation is an event that is overlaid on top of graphs. The query can have
Macros:
- $__time(column) -&gt; column AS time
- $__timeEpoch(column) -&gt; DATEDIFF(second, '1970-01-01', column) AS time
- $__timeFilter(column) -&gt; column &gt;= DATEADD(s, 18446744066914186738, '1970-01-01') AND column &lt;= DATEADD(s, 18446744066914187038, '1970-01-01')
- $__timeFilter(column) -&gt; column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
- $__unixEpochFilter(column) -&gt; column &gt;= 1492750877 AND column &lt;= 1492750877
Or build your own conditionals using these macros which just return the values:
- $__timeFrom() -&gt; DATEADD(second, 1492750877, '1970-01-01')
- $__timeTo() -&gt; DATEADD(second, 1492750877, '1970-01-01')
- $__timeFrom() -&gt; '2017-04-21T05:01:17Z'
- $__timeTo() -&gt; '2017-04-21T05:01:17Z'
- $__unixEpochFrom() -&gt; 1492750877
- $__unixEpochTo() -&gt; 1492750877
</pre>

View File

@ -49,7 +49,7 @@ Table:
Macros:
- $__time(column) -&gt; column AS time
- $__timeEpoch(column) -&gt; DATEDIFF(second, '1970-01-01', column) AS time
- $__timeFilter(column) -&gt; column &gt;= DATEADD(s, 18446744066914186738, '1970-01-01') AND column &lt;= DATEADD(s, 18446744066914187038, '1970-01-01')
- $__timeFilter(column) -&gt; column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
- $__unixEpochFilter(column) -&gt; column &gt;= 1492750877 AND column &lt;= 1492750877
- $__timeGroup(column, '5m'[, fillvalue]) -&gt; CAST(ROUND(DATEDIFF(second, '1970-01-01', column)/300.0, 0) as bigint)*300. Providing a <i>fillValue</i> of <i>NULL</i> or floating value will automatically fill empty series in timerange with that value.
@ -62,8 +62,8 @@ GROUP BY $__timeGroup(date_time_col, '1h')
ORDER BY 1
Or build your own conditionals using these macros which just return the values:
- $__timeFrom() -&gt; DATEADD(second, 1492750877, '1970-01-01')
- $__timeTo() -&gt; DATEADD(second, 1492750877, '1970-01-01')
- $__timeFrom() -&gt; '2017-04-21T05:01:17Z'
- $__timeTo() -&gt; '2017-04-21T05:01:17Z'
- $__unixEpochFrom() -&gt; 1492750877
- $__unixEpochTo() -&gt; 1492750877
</pre>

View File

@ -28,12 +28,12 @@ An annotation is an event that is overlaid on top of graphs. The query can have
Macros:
- $__time(column) -&gt; UNIX_TIMESTAMP(column) as time (or as time_sec)
- $__timeEpoch(column) -&gt; UNIX_TIMESTAMP(column) as time (or as time_sec)
- $__timeFilter(column) -&gt; UNIX_TIMESTAMP(time_date_time) &gt; 1492750877 AND UNIX_TIMESTAMP(time_date_time) &lt; 1492750877
- $__timeFilter(column) -&gt; column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
- $__unixEpochFilter(column) -&gt; time_unix_epoch &gt; 1492750877 AND time_unix_epoch &lt; 1492750877
Or build your own conditionals using these macros which just return the values:
- $__timeFrom() -&gt; FROM_UNIXTIME(1492750877)
- $__timeTo() -&gt; FROM_UNIXTIME(1492750877)
- $__timeFrom() -&gt; '2017-04-21T05:01:17Z'
- $__timeTo() -&gt; '2017-04-21T05:01:17Z'
- $__unixEpochFrom() -&gt; 1492750877
- $__unixEpochTo() -&gt; 1492750877
</pre>

View File

@ -48,7 +48,7 @@ Table:
Macros:
- $__time(column) -&gt; UNIX_TIMESTAMP(column) as time_sec
- $__timeEpoch(column) -&gt; UNIX_TIMESTAMP(column) as time_sec
- $__timeFilter(column) -&gt; UNIX_TIMESTAMP(time_date_time) &ge; 1492750877 AND UNIX_TIMESTAMP(time_date_time) &le; 1492750877
- $__timeFilter(column) -&gt; column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
- $__unixEpochFilter(column) -&gt; time_unix_epoch &gt; 1492750877 AND time_unix_epoch &lt; 1492750877
- $__timeGroup(column,'5m') -&gt; cast(cast(UNIX_TIMESTAMP(column)/(300) as signed)*300 as signed)
@ -61,8 +61,8 @@ GROUP BY 1
ORDER BY 1
Or build your own conditionals using these macros which just return the values:
- $__timeFrom() -&gt; FROM_UNIXTIME(1492750877)
- $__timeTo() -&gt; FROM_UNIXTIME(1492750877)
- $__timeFrom() -&gt; '2017-04-21T05:01:17Z'
- $__timeTo() -&gt; '2017-04-21T05:01:17Z'
- $__unixEpochFrom() -&gt; 1492750877
- $__unixEpochTo() -&gt; 1492750877
</pre>

View File

@ -28,15 +28,20 @@ export class ResultTransformer {
}
}
transformMetricData(md, options, start, end) {
transformMetricData(metricData, options, start, end) {
let dps = [],
metricLabel = null;
metricLabel = this.createMetricLabel(md.metric, options);
metricLabel = this.createMetricLabel(metricData.metric, options);
const stepMs = parseInt(options.step) * 1000;
let baseTimestamp = start * 1000;
for (let value of md.values) {
if (metricData.values === undefined) {
throw new Error('Prometheus heatmap error: data should be a time series');
}
for (let value of metricData.values) {
let dp_value = parseFloat(value[1]);
if (_.isNaN(dp_value)) {
dp_value = null;
@ -164,8 +169,13 @@ export class ResultTransformer {
for (let i = seriesList.length - 1; i > 0; i--) {
let topSeries = seriesList[i].datapoints;
let bottomSeries = seriesList[i - 1].datapoints;
if (!topSeries || !bottomSeries) {
throw new Error('Prometheus heatmap transform error: data should be a time series');
}
for (let j = 0; j < topSeries.length; j++) {
topSeries[j][0] -= bottomSeries[j][0];
const bottomPoint = bottomSeries[j] || [0];
topSeries[j][0] -= bottomPoint[0];
}
}

View File

@ -126,6 +126,36 @@ describe('Prometheus Result Transformer', () => {
{ target: '3', datapoints: [[10, 1445000010000], [0, 1445000020000], [10, 1445000030000]] },
]);
});
it('should handle missing datapoints', () => {
const seriesList = [
{ datapoints: [[1, 1000], [2, 2000]] },
{ datapoints: [[2, 1000], [5, 2000], [1, 3000]] },
{ datapoints: [[3, 1000], [7, 2000]] },
];
const expected = [
{ datapoints: [[1, 1000], [2, 2000]] },
{ datapoints: [[1, 1000], [3, 2000], [1, 3000]] },
{ datapoints: [[1, 1000], [2, 2000]] },
];
const result = ctx.resultTransformer.transformToHistogramOverTime(seriesList);
expect(result).toEqual(expected);
});
it('should throw error when data in wrong format', () => {
const seriesList = [{ rows: [] }, { datapoints: [] }];
expect(() => {
ctx.resultTransformer.transformToHistogramOverTime(seriesList);
}).toThrow();
});
it('should throw error when prometheus returned non-timeseries', () => {
// should be { metric: {}, values: [] } for timeseries
const metricData = { metric: {}, value: [] };
expect(() => {
ctx.resultTransformer.transformMetricData(metricData, { step: 1 }, 1000, 2000);
}).toThrow();
});
});
describe('When resultFormat is time series', () => {

View File

@ -302,6 +302,10 @@ export class HeatmapCtrl extends MetricsPanelCtrl {
}
seriesHandler(seriesData) {
if (seriesData.datapoints === undefined) {
throw new Error('Heatmap error: data should be a time series');
}
let series = new TimeSeries({
datapoints: seriesData.datapoints,
alias: seriesData.target,

View File

@ -36,10 +36,6 @@
}
}
.sidemenu {
display: none;
}
.gf-timepicker-nav-btn {
transform: translate3d(40px, 0, 0);
}