mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Merge branch 'master' into cloudwatch
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -6,6 +6,7 @@ awsconfig
|
||||
/emails/dist
|
||||
/public_gen
|
||||
/tmp
|
||||
vendor/phantomjs/phantomjs
|
||||
|
||||
docs/AWS_S3_BUCKET
|
||||
docs/GIT_BRANCH
|
||||
|
||||
@@ -25,6 +25,7 @@ module.exports = function(config) {
|
||||
browsers: ['PhantomJS'],
|
||||
captureTimeout: 60000,
|
||||
singleRun: true,
|
||||
autoWatchBatchDelay: 1000,
|
||||
|
||||
});
|
||||
|
||||
|
||||
@@ -148,7 +148,7 @@ case "$1" in
|
||||
$0 start
|
||||
;;
|
||||
*)
|
||||
echo -n "Usage: $0 {start|stop|restart|force-reload|status}"
|
||||
echo "Usage: $0 {start|stop|restart|force-reload|status}"
|
||||
exit 3
|
||||
;;
|
||||
esac
|
||||
|
||||
@@ -36,12 +36,6 @@ func sendUsageStats() {
|
||||
"metrics": metrics,
|
||||
}
|
||||
|
||||
statsQuery := m.GetSystemStatsQuery{}
|
||||
if err := bus.Dispatch(&statsQuery); err != nil {
|
||||
log.Error(3, "Failed to get system stats", err)
|
||||
return
|
||||
}
|
||||
|
||||
UsageStats.Each(func(name string, i interface{}) {
|
||||
switch metric := i.(type) {
|
||||
case Counter:
|
||||
@@ -52,11 +46,36 @@ func sendUsageStats() {
|
||||
}
|
||||
})
|
||||
|
||||
statsQuery := m.GetSystemStatsQuery{}
|
||||
if err := bus.Dispatch(&statsQuery); err != nil {
|
||||
log.Error(3, "Failed to get system stats", err)
|
||||
return
|
||||
}
|
||||
|
||||
metrics["stats.dashboards.count"] = statsQuery.Result.DashboardCount
|
||||
metrics["stats.users.count"] = statsQuery.Result.UserCount
|
||||
metrics["stats.orgs.count"] = statsQuery.Result.OrgCount
|
||||
|
||||
out, _ := json.Marshal(report)
|
||||
dsStats := m.GetDataSourceStatsQuery{}
|
||||
if err := bus.Dispatch(&dsStats); err != nil {
|
||||
log.Error(3, "Failed to get datasource stats", err)
|
||||
return
|
||||
}
|
||||
|
||||
// send counters for each data source
|
||||
// but ignore any custom data sources
|
||||
// as sending that name could be sensitive information
|
||||
dsOtherCount := 0
|
||||
for _, dsStat := range dsStats.Result {
|
||||
if m.IsStandardDataSource(dsStat.Type) {
|
||||
metrics["stats.ds."+dsStat.Type+".count"] = dsStat.Count
|
||||
} else {
|
||||
dsOtherCount += dsStat.Count
|
||||
}
|
||||
}
|
||||
metrics["stats.ds.other.count"] = dsOtherCount
|
||||
|
||||
out, _ := json.MarshalIndent(report, "", " ")
|
||||
data := bytes.NewBuffer(out)
|
||||
|
||||
client := http.Client{Timeout: time.Duration(5 * time.Second)}
|
||||
|
||||
@@ -12,6 +12,8 @@ const (
|
||||
DS_ES = "elasticsearch"
|
||||
DS_OPENTSDB = "opentsdb"
|
||||
DS_CLOUDWATCH = "cloudwatch"
|
||||
DS_KAIROSDB = "kairosdb"
|
||||
DS_PROMETHEUS = "prometheus"
|
||||
DS_ACCESS_DIRECT = "direct"
|
||||
DS_ACCESS_PROXY = "proxy"
|
||||
)
|
||||
@@ -45,6 +47,25 @@ type DataSource struct {
|
||||
Updated time.Time
|
||||
}
|
||||
|
||||
func IsStandardDataSource(dsType string) bool {
|
||||
switch dsType {
|
||||
case DS_ES:
|
||||
return true
|
||||
case DS_INFLUXDB:
|
||||
return true
|
||||
case DS_OPENTSDB:
|
||||
return true
|
||||
case DS_CLOUDWATCH:
|
||||
return true
|
||||
case DS_PROMETHEUS:
|
||||
return true
|
||||
case DS_GRAPHITE:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// ----------------------
|
||||
// COMMANDS
|
||||
|
||||
|
||||
@@ -6,6 +6,15 @@ type SystemStats struct {
|
||||
OrgCount int
|
||||
}
|
||||
|
||||
type DataSourceStats struct {
|
||||
Count int
|
||||
Type string
|
||||
}
|
||||
|
||||
type GetSystemStatsQuery struct {
|
||||
Result *SystemStats
|
||||
}
|
||||
|
||||
type GetDataSourceStatsQuery struct {
|
||||
Result []*DataSourceStats
|
||||
}
|
||||
|
||||
@@ -7,6 +7,18 @@ import (
|
||||
|
||||
func init() {
|
||||
bus.AddHandler("sql", GetSystemStats)
|
||||
bus.AddHandler("sql", GetDataSourceStats)
|
||||
}
|
||||
|
||||
func GetDataSourceStats(query *m.GetDataSourceStatsQuery) error {
|
||||
var rawSql = `SELECT COUNT(*) as count, type FROM data_source GROUP BY type`
|
||||
query.Result = make([]*m.DataSourceStats, 0)
|
||||
err := x.Sql(rawSql).Find(&query.Result)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func GetSystemStats(query *m.GetSystemStatsQuery) error {
|
||||
|
||||
@@ -106,7 +106,7 @@ _.each(rangeOptions, function (frame) {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
opt.display = 'parse error';
|
||||
opt.display = opt.from + ' to ' + opt.to;
|
||||
opt.invalid = true;
|
||||
}
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
<div class="text-center">
|
||||
<h4>New title</h4>
|
||||
|
||||
<input type="text" class="input input-fluid" ng-model="clone.title" give-focus="clone" ng-keydown="keyDown($event)">
|
||||
<input type="text" class="input input-fluid" ng-model="clone.title" give-focus="true" ng-keydown="keyDown($event)">
|
||||
<br>
|
||||
<br>
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
define([
|
||||
'angular',
|
||||
'lodash',
|
||||
'./queryDef',
|
||||
'./query_def',
|
||||
],
|
||||
function (angular, _, queryDef) {
|
||||
'use strict';
|
||||
@@ -3,10 +3,10 @@ define([
|
||||
'lodash',
|
||||
'moment',
|
||||
'kbn',
|
||||
'./queryBuilder',
|
||||
'./indexPattern',
|
||||
'./elasticResponse',
|
||||
'./queryCtrl',
|
||||
'./query_builder',
|
||||
'./index_pattern',
|
||||
'./elastic_response',
|
||||
'./query_ctrl',
|
||||
'./directives'
|
||||
],
|
||||
function (angular, _, moment, kbn, ElasticQueryBuilder, IndexPattern, ElasticResponse) {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
define([
|
||||
'angular',
|
||||
'./bucketAgg',
|
||||
'./metricAgg',
|
||||
'./bucket_agg',
|
||||
'./metric_agg',
|
||||
],
|
||||
function (angular) {
|
||||
'use strict';
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
define([
|
||||
"lodash",
|
||||
"./queryDef"
|
||||
"./query_def"
|
||||
],
|
||||
function (_, queryDef) {
|
||||
'use strict';
|
||||
@@ -1,7 +1,7 @@
|
||||
define([
|
||||
'angular',
|
||||
'lodash',
|
||||
'./queryDef'
|
||||
'./query_def'
|
||||
],
|
||||
function (angular, _, queryDef) {
|
||||
'use strict';
|
||||
@@ -0,0 +1,74 @@
|
||||
///<amd-dependency path="../datasource" />
|
||||
///<amd-dependency path="test/specs/helpers" name="helpers" />
|
||||
|
||||
import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common';
|
||||
import moment = require('moment');
|
||||
import angular = require('angular');
|
||||
|
||||
declare var helpers: any;
|
||||
|
||||
describe('ElasticDatasource', function() {
|
||||
var ctx = new helpers.ServiceTestContext();
|
||||
|
||||
beforeEach(angularMocks.module('grafana.services'));
|
||||
beforeEach(ctx.providePhase(['templateSrv', 'backendSrv']));
|
||||
beforeEach(ctx.createService('ElasticDatasource'));
|
||||
beforeEach(function() {
|
||||
ctx.ds = new ctx.service({jsonData: {}});
|
||||
});
|
||||
|
||||
describe('When testing datasource with index pattern', function() {
|
||||
beforeEach(function() {
|
||||
ctx.ds = new ctx.service({
|
||||
url: 'http://es.com',
|
||||
index: '[asd-]YYYY.MM.DD',
|
||||
jsonData: { interval: 'Daily' }
|
||||
});
|
||||
});
|
||||
|
||||
it('should translate index pattern to current day', function() {
|
||||
var requestOptions;
|
||||
ctx.backendSrv.datasourceRequest = function(options) {
|
||||
requestOptions = options;
|
||||
return ctx.$q.when({});
|
||||
};
|
||||
|
||||
ctx.ds.testDatasource();
|
||||
ctx.$rootScope.$apply();
|
||||
|
||||
var today = moment().format("YYYY.MM.DD");
|
||||
expect(requestOptions.url).to.be("http://es.com/asd-" + today + '/_stats');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When issueing metric query with interval pattern', function() {
|
||||
beforeEach(function() {
|
||||
ctx.ds = new ctx.service({
|
||||
url: 'http://es.com',
|
||||
index: '[asd-]YYYY.MM.DD',
|
||||
jsonData: { interval: 'Daily' }
|
||||
});
|
||||
});
|
||||
|
||||
it('should translate index pattern to current day', function() {
|
||||
var requestOptions;
|
||||
ctx.backendSrv.datasourceRequest = function(options) {
|
||||
requestOptions = options;
|
||||
return ctx.$q.when({data: {responses: []}});
|
||||
};
|
||||
|
||||
ctx.ds.query({
|
||||
range: {
|
||||
from: moment([2015, 4, 30, 10]),
|
||||
to: moment([2015, 5, 1, 10])
|
||||
},
|
||||
targets: [{ bucketAggs: [], metrics: [] }]
|
||||
});
|
||||
|
||||
ctx.$rootScope.$apply();
|
||||
var parts = requestOptions.data.split('\n');
|
||||
var header = angular.fromJson(parts[0]);
|
||||
expect(header.index).to.eql(['asd-2015.05.30', 'asd-2015.05.31', 'asd-2015.06.01']);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,414 @@
|
||||
///<amd-dependency path="../elastic_response" name="ElasticResponse"/>
|
||||
|
||||
import {describe, beforeEach, it, sinon, expect} from 'test/lib/common';
|
||||
|
||||
declare var ElasticResponse: any;
|
||||
|
||||
describe('ElasticResponse', function() {
|
||||
var targets;
|
||||
var response;
|
||||
var result;
|
||||
|
||||
describe('simple query and count', function() {
|
||||
|
||||
beforeEach(function() {
|
||||
targets = [{
|
||||
refId: 'A',
|
||||
metrics: [{type: 'count', id: '1'}],
|
||||
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '2'}],
|
||||
}];
|
||||
response = {
|
||||
responses: [{
|
||||
aggregations: {
|
||||
"2": {
|
||||
buckets: [
|
||||
{
|
||||
doc_count: 10,
|
||||
key: 1000
|
||||
},
|
||||
{
|
||||
doc_count: 15,
|
||||
key: 2000
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}]
|
||||
};
|
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries();
|
||||
});
|
||||
|
||||
it('should return 1 series', function() {
|
||||
expect(result.data.length).to.be(1);
|
||||
expect(result.data[0].target).to.be('Count');
|
||||
expect(result.data[0].datapoints.length).to.be(2);
|
||||
expect(result.data[0].datapoints[0][0]).to.be(10);
|
||||
expect(result.data[0].datapoints[0][1]).to.be(1000);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('simple query count & avg aggregation', function() {
|
||||
var result;
|
||||
|
||||
beforeEach(function() {
|
||||
targets = [{
|
||||
refId: 'A',
|
||||
metrics: [{type: 'count', id: '1'}, {type: 'avg', field: 'value', id: '2'}],
|
||||
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '3'}],
|
||||
}];
|
||||
response = {
|
||||
responses: [{
|
||||
aggregations: {
|
||||
"3": {
|
||||
buckets: [
|
||||
{
|
||||
"2": {value: 88},
|
||||
doc_count: 10,
|
||||
key: 1000
|
||||
},
|
||||
{
|
||||
"2": {value: 99},
|
||||
doc_count: 15,
|
||||
key: 2000
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}]
|
||||
};
|
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries();
|
||||
});
|
||||
|
||||
it('should return 2 series', function() {
|
||||
expect(result.data.length).to.be(2);
|
||||
expect(result.data[0].datapoints.length).to.be(2);
|
||||
expect(result.data[0].datapoints[0][0]).to.be(10);
|
||||
expect(result.data[0].datapoints[0][1]).to.be(1000);
|
||||
|
||||
expect(result.data[1].target).to.be("Average value");
|
||||
expect(result.data[1].datapoints[0][0]).to.be(88);
|
||||
expect(result.data[1].datapoints[1][0]).to.be(99);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('single group by query one metric', function() {
|
||||
var result;
|
||||
|
||||
beforeEach(function() {
|
||||
targets = [{
|
||||
refId: 'A',
|
||||
metrics: [{type: 'count', id: '1'}],
|
||||
bucketAggs: [{type: 'terms', field: 'host', id: '2'}, {type: 'date_histogram', field: '@timestamp', id: '3'}],
|
||||
}];
|
||||
response = {
|
||||
responses: [{
|
||||
aggregations: {
|
||||
"2": {
|
||||
buckets: [
|
||||
{
|
||||
"3": {
|
||||
buckets: [
|
||||
{doc_count: 1, key: 1000},
|
||||
{doc_count: 3, key: 2000}
|
||||
]
|
||||
},
|
||||
doc_count: 4,
|
||||
key: 'server1',
|
||||
},
|
||||
{
|
||||
"3": {
|
||||
buckets: [
|
||||
{doc_count: 2, key: 1000},
|
||||
{doc_count: 8, key: 2000}
|
||||
]
|
||||
},
|
||||
doc_count: 10,
|
||||
key: 'server2',
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
}]
|
||||
};
|
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries();
|
||||
});
|
||||
|
||||
it('should return 2 series', function() {
|
||||
expect(result.data.length).to.be(2);
|
||||
expect(result.data[0].datapoints.length).to.be(2);
|
||||
expect(result.data[0].target).to.be('server1');
|
||||
expect(result.data[1].target).to.be('server2');
|
||||
});
|
||||
});
|
||||
|
||||
describe('single group by query two metrics', function() {
|
||||
var result;
|
||||
|
||||
beforeEach(function() {
|
||||
targets = [{
|
||||
refId: 'A',
|
||||
metrics: [{type: 'count', id: '1'}, {type: 'avg', field: '@value', id: '4'}],
|
||||
bucketAggs: [{type: 'terms', field: 'host', id: '2'}, {type: 'date_histogram', field: '@timestamp', id: '3'}],
|
||||
}];
|
||||
response = {
|
||||
responses: [{
|
||||
aggregations: {
|
||||
"2": {
|
||||
buckets: [
|
||||
{
|
||||
"3": {
|
||||
buckets: [
|
||||
{ "4": {value: 10}, doc_count: 1, key: 1000},
|
||||
{ "4": {value: 12}, doc_count: 3, key: 2000}
|
||||
]
|
||||
},
|
||||
doc_count: 4,
|
||||
key: 'server1',
|
||||
},
|
||||
{
|
||||
"3": {
|
||||
buckets: [
|
||||
{ "4": {value: 20}, doc_count: 1, key: 1000},
|
||||
{ "4": {value: 32}, doc_count: 3, key: 2000}
|
||||
]
|
||||
},
|
||||
doc_count: 10,
|
||||
key: 'server2',
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
}]
|
||||
};
|
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries();
|
||||
});
|
||||
|
||||
it('should return 2 series', function() {
|
||||
expect(result.data.length).to.be(4);
|
||||
expect(result.data[0].datapoints.length).to.be(2);
|
||||
expect(result.data[0].target).to.be('server1 Count');
|
||||
expect(result.data[1].target).to.be('server1 Average @value');
|
||||
expect(result.data[2].target).to.be('server2 Count');
|
||||
expect(result.data[3].target).to.be('server2 Average @value');
|
||||
});
|
||||
});
|
||||
|
||||
describe('with percentiles ', function() {
|
||||
var result;
|
||||
|
||||
beforeEach(function() {
|
||||
targets = [{
|
||||
refId: 'A',
|
||||
metrics: [{type: 'percentiles', settings: {percents: [75, 90]}, id: '1'}],
|
||||
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '3'}],
|
||||
}];
|
||||
response = {
|
||||
responses: [{
|
||||
aggregations: {
|
||||
"3": {
|
||||
buckets: [
|
||||
{
|
||||
"1": {values: {"75": 3.3, "90": 5.5}},
|
||||
doc_count: 10,
|
||||
key: 1000
|
||||
},
|
||||
{
|
||||
"1": {values: {"75": 2.3, "90": 4.5}},
|
||||
doc_count: 15,
|
||||
key: 2000
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}]
|
||||
};
|
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries();
|
||||
});
|
||||
|
||||
it('should return 2 series', function() {
|
||||
expect(result.data.length).to.be(2);
|
||||
expect(result.data[0].datapoints.length).to.be(2);
|
||||
expect(result.data[0].target).to.be('p75');
|
||||
expect(result.data[1].target).to.be('p90');
|
||||
expect(result.data[0].datapoints[0][0]).to.be(3.3);
|
||||
expect(result.data[0].datapoints[0][1]).to.be(1000);
|
||||
expect(result.data[1].datapoints[1][0]).to.be(4.5);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with extended_stats', function() {
|
||||
var result;
|
||||
|
||||
beforeEach(function() {
|
||||
targets = [{
|
||||
refId: 'A',
|
||||
metrics: [{type: 'extended_stats', meta: {max: true, std_deviation_bounds_upper: true}, id: '1'}],
|
||||
bucketAggs: [{type: 'terms', field: 'host', id: '3'}, {type: 'date_histogram', id: '4'}],
|
||||
}];
|
||||
response = {
|
||||
responses: [{
|
||||
aggregations: {
|
||||
"3": {
|
||||
buckets: [
|
||||
{
|
||||
key: 'server1',
|
||||
"4": {
|
||||
buckets: [{
|
||||
"1": {max: 10.2, min: 5.5, std_deviation_bounds: {upper: 3, lower: -2}},
|
||||
doc_count: 10,
|
||||
key: 1000
|
||||
}]
|
||||
}
|
||||
},
|
||||
{
|
||||
key: 'server2',
|
||||
"4": {
|
||||
buckets: [{
|
||||
"1": {max: 10.2, min: 5.5, std_deviation_bounds: {upper: 3, lower: -2}},
|
||||
doc_count: 10,
|
||||
key: 1000
|
||||
}]
|
||||
}
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
}]
|
||||
};
|
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries();
|
||||
});
|
||||
|
||||
it('should return 4 series', function() {
|
||||
expect(result.data.length).to.be(4);
|
||||
expect(result.data[0].datapoints.length).to.be(1);
|
||||
expect(result.data[0].target).to.be('server1 Max');
|
||||
expect(result.data[1].target).to.be('server1 Std Dev Upper');
|
||||
|
||||
expect(result.data[0].datapoints[0][0]).to.be(10.2);
|
||||
expect(result.data[1].datapoints[0][0]).to.be(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('single group by with alias pattern', function() {
|
||||
var result;
|
||||
|
||||
beforeEach(function() {
|
||||
targets = [{
|
||||
refId: 'A',
|
||||
metrics: [{type: 'count', id: '1'}],
|
||||
alias: '{{term @host}} {{metric}} and!',
|
||||
bucketAggs: [
|
||||
{type: 'terms', field: '@host', id: '2'},
|
||||
{type: 'date_histogram', field: '@timestamp', id: '3'}
|
||||
],
|
||||
}];
|
||||
response = {
|
||||
responses: [{
|
||||
aggregations: {
|
||||
"2": {
|
||||
buckets: [
|
||||
{
|
||||
"3": {
|
||||
buckets: [
|
||||
{doc_count: 1, key: 1000},
|
||||
{doc_count: 3, key: 2000}
|
||||
]
|
||||
},
|
||||
doc_count: 4,
|
||||
key: 'server1',
|
||||
},
|
||||
{
|
||||
"3": {
|
||||
buckets: [
|
||||
{doc_count: 2, key: 1000},
|
||||
{doc_count: 8, key: 2000}
|
||||
]
|
||||
},
|
||||
doc_count: 10,
|
||||
key: 'server2',
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
}]
|
||||
};
|
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries();
|
||||
});
|
||||
|
||||
it('should return 2 series', function() {
|
||||
expect(result.data.length).to.be(2);
|
||||
expect(result.data[0].datapoints.length).to.be(2);
|
||||
expect(result.data[0].target).to.be('server1 Count and!');
|
||||
expect(result.data[1].target).to.be('server2 Count and!');
|
||||
});
|
||||
});
|
||||
|
||||
describe('with two filters agg', function() {
|
||||
var result;
|
||||
|
||||
beforeEach(function() {
|
||||
targets = [{
|
||||
refId: 'A',
|
||||
metrics: [{type: 'count', id: '1'}],
|
||||
bucketAggs: [
|
||||
{
|
||||
id: '2',
|
||||
type: 'filters',
|
||||
settings: {
|
||||
filters: [
|
||||
{query: '@metric:cpu' },
|
||||
{query: '@metric:logins.count' },
|
||||
]
|
||||
}
|
||||
},
|
||||
{type: 'date_histogram', field: '@timestamp', id: '3'}
|
||||
],
|
||||
}];
|
||||
response = {
|
||||
responses: [{
|
||||
aggregations: {
|
||||
"2": {
|
||||
buckets: {
|
||||
"@metric:cpu": {
|
||||
"3": {
|
||||
buckets: [
|
||||
{doc_count: 1, key: 1000},
|
||||
{doc_count: 3, key: 2000}
|
||||
]
|
||||
},
|
||||
},
|
||||
"@metric:logins.count": {
|
||||
"3": {
|
||||
buckets: [
|
||||
{doc_count: 2, key: 1000},
|
||||
{doc_count: 8, key: 2000}
|
||||
]
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}]
|
||||
};
|
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries();
|
||||
});
|
||||
|
||||
it('should return 2 series', function() {
|
||||
expect(result.data.length).to.be(2);
|
||||
expect(result.data[0].datapoints.length).to.be(2);
|
||||
expect(result.data[0].target).to.be('@metric:cpu');
|
||||
expect(result.data[1].target).to.be('@metric:logins.count');
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
@@ -0,0 +1,51 @@
|
||||
///<amd-dependency path="../index_pattern" name="IndexPattern"/>
|
||||
///<amd-dependency path="test/specs/helpers" name="helpers" />
|
||||
|
||||
import {describe, beforeEach, it, sinon, expect} from 'test/lib/common';
|
||||
import moment = require('moment');
|
||||
|
||||
declare var IndexPattern: any;
|
||||
|
||||
describe('IndexPattern', function() {
|
||||
|
||||
describe('when getting index for today', function() {
|
||||
it('should return correct index name', function() {
|
||||
var pattern = new IndexPattern('[asd-]YYYY.MM.DD', 'Daily');
|
||||
var expected = 'asd-' + moment().format('YYYY.MM.DD');
|
||||
|
||||
expect(pattern.getIndexForToday()).to.be(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when getting index list for time range', function() {
|
||||
|
||||
describe('no interval', function() {
|
||||
it('should return correct index', function() {
|
||||
var pattern = new IndexPattern('my-metrics');
|
||||
var from = new Date(2015, 4, 30, 1, 2, 3);
|
||||
var to = new Date(2015, 5, 1, 12, 5 , 6);
|
||||
expect(pattern.getIndexList(from, to)).to.eql('my-metrics');
|
||||
});
|
||||
});
|
||||
|
||||
describe('daily', function() {
|
||||
|
||||
it('should return correct index list', function() {
|
||||
var pattern = new IndexPattern('[asd-]YYYY.MM.DD', 'Daily');
|
||||
var from = new Date(1432940523000);
|
||||
var to = new Date(1433153106000);
|
||||
|
||||
var expected = [
|
||||
'asd-2015.05.29',
|
||||
'asd-2015.05.30',
|
||||
'asd-2015.05.31',
|
||||
'asd-2015.06.01',
|
||||
];
|
||||
|
||||
expect(pattern.getIndexList(from, to)).to.eql(expected);
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
@@ -0,0 +1,123 @@
|
||||
///<amd-dependency path="../query_builder" name="ElasticQueryBuilder"/>
|
||||
|
||||
import {describe, beforeEach, it, sinon, expect} from 'test/lib/common';
|
||||
|
||||
declare var ElasticQueryBuilder: any;
|
||||
|
||||
describe('ElasticQueryBuilder', function() {
|
||||
var builder;
|
||||
|
||||
beforeEach(function() {
|
||||
builder = new ElasticQueryBuilder({timeField: '@timestamp'});
|
||||
});
|
||||
|
||||
it('with defaults', function() {
|
||||
var query = builder.build({
|
||||
metrics: [{type: 'Count', id: '0'}],
|
||||
timeField: '@timestamp',
|
||||
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '1'}],
|
||||
});
|
||||
|
||||
expect(query.query.filtered.filter.bool.must[0].range["@timestamp"].gte).to.be("$timeFrom");
|
||||
expect(query.aggs["1"].date_histogram.extended_bounds.min).to.be("$timeFrom");
|
||||
});
|
||||
|
||||
it('with raw query', function() {
|
||||
var query = builder.build({
|
||||
rawQuery: '{"query": "$lucene_query"}',
|
||||
});
|
||||
|
||||
expect(query.query).to.be("$lucene_query");
|
||||
});
|
||||
|
||||
it('with multiple bucket aggs', function() {
|
||||
var query = builder.build({
|
||||
metrics: [{type: 'count', id: '1'}],
|
||||
timeField: '@timestamp',
|
||||
bucketAggs: [
|
||||
{type: 'terms', field: '@host', id: '2'},
|
||||
{type: 'date_histogram', field: '@timestamp', id: '3'}
|
||||
],
|
||||
});
|
||||
|
||||
expect(query.aggs["2"].terms.field).to.be("@host");
|
||||
expect(query.aggs["2"].aggs["3"].date_histogram.field).to.be("@timestamp");
|
||||
});
|
||||
|
||||
it('with select field', function() {
|
||||
var query = builder.build({
|
||||
metrics: [{type: 'avg', field: '@value', id: '1'}],
|
||||
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '2'}],
|
||||
}, 100, 1000);
|
||||
|
||||
var aggs = query.aggs["2"].aggs;
|
||||
expect(aggs["1"].avg.field).to.be("@value");
|
||||
});
|
||||
|
||||
it('with term agg and order by metric agg', function() {
|
||||
var query = builder.build({
|
||||
metrics: [
|
||||
{type: 'count', id: '1'},
|
||||
{type: 'avg', field: '@value', id: '5'}
|
||||
],
|
||||
bucketAggs: [
|
||||
{type: 'terms', field: '@host', settings: {size: 5, order: 'asc', orderBy: '5'}, id: '2' },
|
||||
{type: 'date_histogram', field: '@timestamp', id: '3'}
|
||||
],
|
||||
}, 100, 1000);
|
||||
|
||||
var firstLevel = query.aggs["2"];
|
||||
var secondLevel = firstLevel.aggs["3"];
|
||||
|
||||
expect(firstLevel.aggs["5"].avg.field).to.be("@value");
|
||||
expect(secondLevel.aggs["5"].avg.field).to.be("@value");
|
||||
});
|
||||
|
||||
it('with metric percentiles', function() {
|
||||
var query = builder.build({
|
||||
metrics: [
|
||||
{
|
||||
id: '1',
|
||||
type: 'percentiles',
|
||||
field: '@load_time',
|
||||
settings: {
|
||||
percents: [1,2,3,4]
|
||||
}
|
||||
}
|
||||
],
|
||||
bucketAggs: [
|
||||
{type: 'date_histogram', field: '@timestamp', id: '3'}
|
||||
],
|
||||
}, 100, 1000);
|
||||
|
||||
var firstLevel = query.aggs["3"];
|
||||
|
||||
expect(firstLevel.aggs["1"].percentiles.field).to.be("@load_time");
|
||||
expect(firstLevel.aggs["1"].percentiles.percents).to.eql([1,2,3,4]);
|
||||
});
|
||||
|
||||
it('with filters aggs', function() {
|
||||
var query = builder.build({
|
||||
metrics: [{type: 'count', id: '1'}],
|
||||
timeField: '@timestamp',
|
||||
bucketAggs: [
|
||||
{
|
||||
id: '2',
|
||||
type: 'filters',
|
||||
settings: {
|
||||
filters: [
|
||||
{query: '@metric:cpu' },
|
||||
{query: '@metric:logins.count' },
|
||||
]
|
||||
}
|
||||
},
|
||||
{type: 'date_histogram', field: '@timestamp', id: '4'}
|
||||
],
|
||||
});
|
||||
|
||||
expect(query.aggs["2"].filters.filters["@metric:cpu"].query.query_string.query).to.be("@metric:cpu");
|
||||
expect(query.aggs["2"].filters.filters["@metric:logins.count"].query.query_string.query).to.be("@metric:logins.count");
|
||||
expect(query.aggs["2"].aggs["4"].date_histogram.field).to.be("@timestamp");
|
||||
});
|
||||
|
||||
});
|
||||
@@ -0,0 +1,30 @@
|
||||
///<amd-dependency path="../query_ctrl" />
|
||||
///<amd-dependency path="app/services/uiSegmentSrv" />
|
||||
///<amd-dependency path="test/specs/helpers" name="helpers" />
|
||||
|
||||
import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common';
|
||||
|
||||
declare var helpers: any;
|
||||
|
||||
describe('ElasticQueryCtrl', function() {
|
||||
var ctx = new helpers.ControllerTestContext();
|
||||
|
||||
beforeEach(angularMocks.module('grafana.controllers'));
|
||||
beforeEach(angularMocks.module('grafana.services'));
|
||||
beforeEach(ctx.providePhase());
|
||||
beforeEach(ctx.createControllerPhase('ElasticQueryCtrl'));
|
||||
|
||||
beforeEach(function() {
|
||||
ctx.scope.target = {};
|
||||
ctx.scope.$parent = { get_data: sinon.spy() };
|
||||
|
||||
ctx.scope.datasource = ctx.datasource;
|
||||
ctx.scope.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([]));
|
||||
});
|
||||
|
||||
describe('init', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -5,9 +5,9 @@ define([
|
||||
'config',
|
||||
'app/core/utils/datemath',
|
||||
'./directives',
|
||||
'./queryCtrl',
|
||||
'./funcEditor',
|
||||
'./addGraphiteFunc',
|
||||
'./query_ctrl',
|
||||
'./func_editor',
|
||||
'./add_graphite_func',
|
||||
],
|
||||
function (angular, _, $, config, dateMath) {
|
||||
'use strict';
|
||||
|
||||
@@ -87,6 +87,13 @@ function (_, $) {
|
||||
category: categories.Calculate,
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'multiplySeries',
|
||||
params: optionalSeriesRefArgs,
|
||||
defaultParams: ['#A'],
|
||||
category: categories.Calculate,
|
||||
});
|
||||
|
||||
addFuncDef({
|
||||
name: 'asPercent',
|
||||
params: optionalSeriesRefArgs,
|
||||
|
||||
120
public/app/plugins/datasource/graphite/specs/datasource_specs.ts
Normal file
120
public/app/plugins/datasource/graphite/specs/datasource_specs.ts
Normal file
@@ -0,0 +1,120 @@
|
||||
///<amd-dependency path="app/plugins/datasource/graphite/datasource" />
|
||||
///<amd-dependency path="test/specs/helpers" name="helpers" />
|
||||
|
||||
import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common';
|
||||
declare var helpers: any;
|
||||
|
||||
describe('graphiteDatasource', function() {
|
||||
var ctx = new helpers.ServiceTestContext();
|
||||
|
||||
beforeEach(angularMocks.module('grafana.services'));
|
||||
beforeEach(ctx.providePhase(['backendSrv']));
|
||||
|
||||
beforeEach(ctx.createService('GraphiteDatasource'));
|
||||
beforeEach(function() {
|
||||
ctx.ds = new ctx.service({ url: [''] });
|
||||
});
|
||||
|
||||
describe('When querying influxdb with one target using query editor target spec', function() {
|
||||
var query = {
|
||||
rangeRaw: { from: 'now-1h', to: 'now' },
|
||||
targets: [{ target: 'prod1.count' }, {target: 'prod2.count'}],
|
||||
maxDataPoints: 500,
|
||||
};
|
||||
|
||||
var results;
|
||||
var requestOptions;
|
||||
|
||||
beforeEach(function() {
|
||||
ctx.backendSrv.datasourceRequest = function(options) {
|
||||
requestOptions = options;
|
||||
return ctx.$q.when({data: [{ target: 'prod1.count', datapoints: [[10, 1], [12,1]] }]});
|
||||
};
|
||||
|
||||
ctx.ds.query(query).then(function(data) { results = data; });
|
||||
ctx.$rootScope.$apply();
|
||||
});
|
||||
|
||||
it('should generate the correct query', function() {
|
||||
expect(requestOptions.url).to.be('/render');
|
||||
});
|
||||
|
||||
it('should query correctly', function() {
|
||||
var params = requestOptions.data.split('&');
|
||||
expect(params).to.contain('target=prod1.count');
|
||||
expect(params).to.contain('target=prod2.count');
|
||||
expect(params).to.contain('from=-1h');
|
||||
expect(params).to.contain('until=now');
|
||||
});
|
||||
|
||||
it('should exclude undefined params', function() {
|
||||
var params = requestOptions.data.split('&');
|
||||
expect(params).to.not.contain('cacheTimeout=undefined');
|
||||
});
|
||||
|
||||
it('should return series list', function() {
|
||||
expect(results.data.length).to.be(1);
|
||||
expect(results.data[0].target).to.be('prod1.count');
|
||||
});
|
||||
|
||||
it('should convert to millisecond resolution', function() {
|
||||
expect(results.data[0].datapoints[0][0]).to.be(10);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('building graphite params', function() {
|
||||
|
||||
it('should uri escape targets', function() {
|
||||
var results = ctx.ds.buildGraphiteParams({
|
||||
targets: [{target: 'prod1.{test,test2}'}, {target: 'prod2.count'}]
|
||||
});
|
||||
expect(results).to.contain('target=prod1.%7Btest%2Ctest2%7D');
|
||||
});
|
||||
|
||||
it('should replace target placeholder', function() {
|
||||
var results = ctx.ds.buildGraphiteParams({
|
||||
targets: [{target: 'series1'}, {target: 'series2'}, {target: 'asPercent(#A,#B)'}]
|
||||
});
|
||||
expect(results[2]).to.be('target=asPercent(series1%2Cseries2)');
|
||||
});
|
||||
|
||||
it('should replace target placeholder for hidden series', function() {
|
||||
var results = ctx.ds.buildGraphiteParams({
|
||||
targets: [{target: 'series1', hide: true}, {target: 'sumSeries(#A)', hide: true}, {target: 'asPercent(#A,#B)'}]
|
||||
});
|
||||
expect(results[0]).to.be('target=' + encodeURIComponent('asPercent(series1,sumSeries(series1))'));
|
||||
});
|
||||
|
||||
it('should replace target placeholder when nesting query references', function() {
|
||||
var results = ctx.ds.buildGraphiteParams({
|
||||
targets: [{target: 'series1'}, {target: 'sumSeries(#A)'}, {target: 'asPercent(#A,#B)'}]
|
||||
});
|
||||
expect(results[2]).to.be('target=' + encodeURIComponent("asPercent(series1,sumSeries(series1))"));
|
||||
});
|
||||
|
||||
it('should fix wrong minute interval parameters', function() {
|
||||
var results = ctx.ds.buildGraphiteParams({
|
||||
targets: [{target: "summarize(prod.25m.count, '25m', 'sum')" }]
|
||||
});
|
||||
expect(results[0]).to.be('target=' + encodeURIComponent("summarize(prod.25m.count, '25min', 'sum')"));
|
||||
});
|
||||
|
||||
it('should fix wrong month interval parameters', function() {
|
||||
var results = ctx.ds.buildGraphiteParams({
|
||||
targets: [{target: "summarize(prod.5M.count, '5M', 'sum')" }]
|
||||
});
|
||||
expect(results[0]).to.be('target=' + encodeURIComponent("summarize(prod.5M.count, '5mon', 'sum')"));
|
||||
});
|
||||
|
||||
it('should ignore empty targets', function() {
|
||||
var results = ctx.ds.buildGraphiteParams({
|
||||
targets: [{target: 'series1'}, {target: ''}]
|
||||
});
|
||||
expect(results.length).to.be(2);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
127
public/app/plugins/datasource/graphite/specs/gfunc_specs.ts
Normal file
127
public/app/plugins/datasource/graphite/specs/gfunc_specs.ts
Normal file
@@ -0,0 +1,127 @@
|
||||
///<amd-dependency path="app/plugins/datasource/graphite/gfunc" name="gfunc" />
|
||||
|
||||
import {describe, beforeEach, it, sinon, expect} from 'test/lib/common';
|
||||
|
||||
declare var gfunc: any;
|
||||
|
||||
describe('when creating func instance from func names', function() {
|
||||
it('should return func instance', function() {
|
||||
var func = gfunc.createFuncInstance('sumSeries');
|
||||
expect(func).to.be.ok();
|
||||
expect(func.def.name).to.equal('sumSeries');
|
||||
expect(func.def.params.length).to.equal(5);
|
||||
expect(func.def.defaultParams.length).to.equal(1);
|
||||
});
|
||||
|
||||
it('should return func instance with shortName', function() {
|
||||
var func = gfunc.createFuncInstance('sum');
|
||||
expect(func).to.be.ok();
|
||||
});
|
||||
|
||||
it('should return func instance from funcDef', function() {
|
||||
var func = gfunc.createFuncInstance('sum');
|
||||
var func2 = gfunc.createFuncInstance(func.def);
|
||||
expect(func2).to.be.ok();
|
||||
});
|
||||
|
||||
it('func instance should have text representation', function() {
|
||||
var func = gfunc.createFuncInstance('groupByNode');
|
||||
func.params[0] = 5;
|
||||
func.params[1] = 'avg';
|
||||
func.updateText();
|
||||
expect(func.text).to.equal("groupByNode(5, avg)");
|
||||
});
|
||||
});
|
||||
|
||||
describe('when rendering func instance', function() {
|
||||
|
||||
it('should handle single metric param', function() {
|
||||
var func = gfunc.createFuncInstance('sumSeries');
|
||||
expect(func.render('hello.metric')).to.equal("sumSeries(hello.metric)");
|
||||
});
|
||||
|
||||
it('should include default params if options enable it', function() {
|
||||
var func = gfunc.createFuncInstance('scaleToSeconds', { withDefaultParams: true });
|
||||
expect(func.render('hello')).to.equal("scaleToSeconds(hello, 1)");
|
||||
});
|
||||
|
||||
it('should handle int or interval params with number', function() {
|
||||
var func = gfunc.createFuncInstance('movingMedian');
|
||||
func.params[0] = '5';
|
||||
expect(func.render('hello')).to.equal("movingMedian(hello, 5)");
|
||||
});
|
||||
|
||||
it('should handle int or interval params with interval string', function() {
|
||||
var func = gfunc.createFuncInstance('movingMedian');
|
||||
func.params[0] = '5min';
|
||||
expect(func.render('hello')).to.equal("movingMedian(hello, '5min')");
|
||||
});
|
||||
|
||||
it('should handle metric param and int param and string param', function() {
|
||||
var func = gfunc.createFuncInstance('groupByNode');
|
||||
func.params[0] = 5;
|
||||
func.params[1] = 'avg';
|
||||
expect(func.render('hello.metric')).to.equal("groupByNode(hello.metric, 5, 'avg')");
|
||||
});
|
||||
|
||||
it('should handle function with no metric param', function() {
|
||||
var func = gfunc.createFuncInstance('randomWalk');
|
||||
func.params[0] = 'test';
|
||||
expect(func.render(undefined)).to.equal("randomWalk('test')");
|
||||
});
|
||||
|
||||
it('should handle function multiple series params', function() {
|
||||
var func = gfunc.createFuncInstance('asPercent');
|
||||
func.params[0] = '#B';
|
||||
expect(func.render('#A')).to.equal("asPercent(#A, #B)");
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('when requesting function categories', function() {
|
||||
it('should return function categories', function() {
|
||||
var catIndex = gfunc.getCategories();
|
||||
expect(catIndex.Special.length).to.be.greaterThan(8);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when updating func param', function() {
|
||||
it('should update param value and update text representation', function() {
|
||||
var func = gfunc.createFuncInstance('summarize', { withDefaultParams: true });
|
||||
func.updateParam('1h', 0);
|
||||
expect(func.params[0]).to.be('1h');
|
||||
expect(func.text).to.be('summarize(1h, sum, false)');
|
||||
});
|
||||
|
||||
it('should parse numbers as float', function() {
|
||||
var func = gfunc.createFuncInstance('scale');
|
||||
func.updateParam('0.001', 0);
|
||||
expect(func.params[0]).to.be('0.001');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when updating func param with optional second parameter', function() {
|
||||
it('should update value and text', function() {
|
||||
var func = gfunc.createFuncInstance('aliasByNode');
|
||||
func.updateParam('1', 0);
|
||||
expect(func.params[0]).to.be('1');
|
||||
});
|
||||
|
||||
it('should slit text and put value in second param', function() {
|
||||
var func = gfunc.createFuncInstance('aliasByNode');
|
||||
func.updateParam('4,-5', 0);
|
||||
expect(func.params[0]).to.be('4');
|
||||
expect(func.params[1]).to.be('-5');
|
||||
expect(func.text).to.be('aliasByNode(4, -5)');
|
||||
});
|
||||
|
||||
it('should remove second param when empty string is set', function() {
|
||||
var func = gfunc.createFuncInstance('aliasByNode');
|
||||
func.updateParam('4,-5', 0);
|
||||
func.updateParam('', 1);
|
||||
expect(func.params[0]).to.be('4');
|
||||
expect(func.params[1]).to.be(undefined);
|
||||
expect(func.text).to.be('aliasByNode(4)');
|
||||
});
|
||||
});
|
||||
|
||||
178
public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts
Normal file
178
public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts
Normal file
@@ -0,0 +1,178 @@
|
||||
///<amd-dependency path="app/plugins/datasource/graphite/gfunc" name="gfunc"/>
|
||||
///<amd-dependency path="app/plugins/datasource/graphite/query_ctrl" />
|
||||
///<amd-dependency path="app/services/uiSegmentSrv" />
|
||||
///<amd-dependency path="test/specs/helpers" name="helpers" />
|
||||
|
||||
import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common';
|
||||
|
||||
declare var gfunc: any;
|
||||
declare var helpers: any;
|
||||
|
||||
describe('GraphiteQueryCtrl', function() {
|
||||
var ctx = new helpers.ControllerTestContext();
|
||||
|
||||
beforeEach(angularMocks.module('grafana.controllers'));
|
||||
beforeEach(angularMocks.module('grafana.services'));
|
||||
beforeEach(ctx.providePhase());
|
||||
beforeEach(ctx.createControllerPhase('GraphiteQueryCtrl'));
|
||||
|
||||
beforeEach(function() {
|
||||
ctx.scope.target = {target: 'aliasByNode(scaleToSeconds(test.prod.*,1),2)'};
|
||||
|
||||
ctx.scope.datasource = ctx.datasource;
|
||||
ctx.scope.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([]));
|
||||
});
|
||||
|
||||
describe('init', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
ctx.scope.$digest();
|
||||
});
|
||||
|
||||
it('should validate metric key exists', function() {
|
||||
expect(ctx.scope.datasource.metricFindQuery.getCall(0).args[0]).to.be('test.prod.*');
|
||||
});
|
||||
|
||||
it('should delete last segment if no metrics are found', function() {
|
||||
expect(ctx.scope.segments[2].value).to.be('select metric');
|
||||
});
|
||||
|
||||
it('should parse expression and build function model', function() {
|
||||
expect(ctx.scope.functions.length).to.be(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when adding function', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.target.target = 'test.prod.*.count';
|
||||
ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([{expandable: false}]));
|
||||
ctx.scope.init();
|
||||
ctx.scope.$digest();
|
||||
|
||||
ctx.scope.$parent = { get_data: sinon.spy() };
|
||||
ctx.scope.addFunction(gfunc.getFuncDef('aliasByNode'));
|
||||
});
|
||||
|
||||
it('should add function with correct node number', function() {
|
||||
expect(ctx.scope.functions[0].params[0]).to.be(2);
|
||||
});
|
||||
|
||||
it('should update target', function() {
|
||||
expect(ctx.scope.target.target).to.be('aliasByNode(test.prod.*.count, 2)');
|
||||
});
|
||||
|
||||
it('should call get_data', function() {
|
||||
expect(ctx.scope.$parent.get_data.called).to.be(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when adding function before any metric segment', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.target.target = '';
|
||||
ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([{expandable: true}]));
|
||||
ctx.scope.init();
|
||||
ctx.scope.$digest();
|
||||
|
||||
ctx.scope.$parent = { get_data: sinon.spy() };
|
||||
ctx.scope.addFunction(gfunc.getFuncDef('asPercent'));
|
||||
});
|
||||
|
||||
it('should add function and remove select metric link', function() {
|
||||
expect(ctx.scope.segments.length).to.be(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when initalizing target without metric expression and only function', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.target.target = 'asPercent(#A, #B)';
|
||||
ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([]));
|
||||
ctx.scope.init();
|
||||
ctx.scope.$digest();
|
||||
ctx.scope.$parent = { get_data: sinon.spy() };
|
||||
});
|
||||
|
||||
it('should not add select metric segment', function() {
|
||||
expect(ctx.scope.segments.length).to.be(0);
|
||||
});
|
||||
|
||||
it('should add both series refs as params', function() {
|
||||
expect(ctx.scope.functions[0].params.length).to.be(2);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('when initializing a target with single param func using variable', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.target.target = 'movingAverage(prod.count, $var)';
|
||||
ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([]));
|
||||
ctx.scope.init();
|
||||
ctx.scope.$digest();
|
||||
ctx.scope.$parent = { get_data: sinon.spy() };
|
||||
});
|
||||
|
||||
it('should add 2 segments', function() {
|
||||
expect(ctx.scope.segments.length).to.be(2);
|
||||
});
|
||||
|
||||
it('should add function param', function() {
|
||||
expect(ctx.scope.functions[0].params.length).to.be(1);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('when initalizing target without metric expression and function with series-ref', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.target.target = 'asPercent(metric.node.count, #A)';
|
||||
ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([]));
|
||||
ctx.scope.init();
|
||||
ctx.scope.$digest();
|
||||
ctx.scope.$parent = { get_data: sinon.spy() };
|
||||
});
|
||||
|
||||
it('should add segments', function() {
|
||||
expect(ctx.scope.segments.length).to.be(3);
|
||||
});
|
||||
|
||||
it('should have correct func params', function() {
|
||||
expect(ctx.scope.functions[0].params.length).to.be(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when getting altSegments and metricFindQuery retuns empty array', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.target.target = 'test.count';
|
||||
ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([]));
|
||||
ctx.scope.init();
|
||||
ctx.scope.getAltSegments(1).then(function(results) {
|
||||
ctx.altSegments = results;
|
||||
});
|
||||
ctx.scope.$digest();
|
||||
ctx.scope.$parent = { get_data: sinon.spy() };
|
||||
});
|
||||
|
||||
it('should have no segments', function() {
|
||||
expect(ctx.altSegments.length).to.be(0);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('targetChanged', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([{expandable: false}]));
|
||||
ctx.scope.init();
|
||||
ctx.scope.$digest();
|
||||
|
||||
ctx.scope.$parent = { get_data: sinon.spy() };
|
||||
ctx.scope.target.target = '';
|
||||
ctx.scope.targetChanged();
|
||||
});
|
||||
|
||||
it('should rebuld target after expression model', function() {
|
||||
expect(ctx.scope.target.target).to.be('aliasByNode(scaleToSeconds(test.prod.*, 1), 2)');
|
||||
});
|
||||
|
||||
it('should call get_data', function() {
|
||||
expect(ctx.scope.$parent.get_data.called).to.be(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -2,10 +2,10 @@ define([
|
||||
'angular',
|
||||
'lodash',
|
||||
'app/core/utils/datemath',
|
||||
'./influxSeries',
|
||||
'./queryBuilder',
|
||||
'./influx_series',
|
||||
'./query_builder',
|
||||
'./directives',
|
||||
'./queryCtrl',
|
||||
'./query_ctrl',
|
||||
],
|
||||
function (angular, _, dateMath, InfluxSeries, InfluxQueryBuilder) {
|
||||
'use strict';
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
define([
|
||||
'angular',
|
||||
'lodash',
|
||||
'./queryBuilder',
|
||||
'./query_builder',
|
||||
],
|
||||
function (angular, _, InfluxQueryBuilder) {
|
||||
'use strict';
|
||||
@@ -0,0 +1,190 @@
|
||||
///<amd-dependency path="app/plugins/datasource/influxdb/influx_series" name="InfluxSeries"/>
|
||||
|
||||
import {describe, beforeEach, it, sinon, expect} from 'test/lib/common';
|
||||
|
||||
declare var InfluxSeries: any;
|
||||
|
||||
describe('when generating timeseries from influxdb response', function() {
|
||||
|
||||
describe('given multiple fields for series', function() {
|
||||
var options = {
|
||||
alias: '',
|
||||
series: [
|
||||
{
|
||||
name: 'cpu',
|
||||
tags: {app: 'test', server: 'server1'},
|
||||
columns: ['time', 'mean', 'max', 'min'],
|
||||
values: [[1431946625000, 10, 11, 9], [1431946626000, 20, 21, 19]]
|
||||
}
|
||||
]
|
||||
};
|
||||
describe('and no alias', function() {
|
||||
it('should generate multiple datapoints for each column', function() {
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
|
||||
expect(result.length).to.be(3);
|
||||
expect(result[0].target).to.be('cpu.mean {app: test, server: server1}');
|
||||
expect(result[0].datapoints[0][0]).to.be(10);
|
||||
expect(result[0].datapoints[0][1]).to.be(1431946625000);
|
||||
expect(result[0].datapoints[1][0]).to.be(20);
|
||||
expect(result[0].datapoints[1][1]).to.be(1431946626000);
|
||||
|
||||
expect(result[1].target).to.be('cpu.max {app: test, server: server1}');
|
||||
expect(result[1].datapoints[0][0]).to.be(11);
|
||||
expect(result[1].datapoints[0][1]).to.be(1431946625000);
|
||||
expect(result[1].datapoints[1][0]).to.be(21);
|
||||
expect(result[1].datapoints[1][1]).to.be(1431946626000);
|
||||
|
||||
expect(result[2].target).to.be('cpu.min {app: test, server: server1}');
|
||||
expect(result[2].datapoints[0][0]).to.be(9);
|
||||
expect(result[2].datapoints[0][1]).to.be(1431946625000);
|
||||
expect(result[2].datapoints[1][0]).to.be(19);
|
||||
expect(result[2].datapoints[1][1]).to.be(1431946626000);
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
describe('and simple alias', function() {
|
||||
it('should use alias', function() {
|
||||
options.alias = 'new series';
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).to.be('new series');
|
||||
expect(result[1].target).to.be('new series');
|
||||
expect(result[2].target).to.be('new series');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('and alias patterns', function() {
|
||||
it('should replace patterns', function() {
|
||||
options.alias = 'alias: $m -> $tag_server ([[measurement]])';
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).to.be('alias: cpu -> server1 (cpu)');
|
||||
expect(result[1].target).to.be('alias: cpu -> server1 (cpu)');
|
||||
expect(result[2].target).to.be('alias: cpu -> server1 (cpu)');
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
describe('given measurement with default fieldname', function() {
|
||||
var options = { series: [
|
||||
{
|
||||
name: 'cpu',
|
||||
tags: {app: 'test', server: 'server1'},
|
||||
columns: ['time', 'value'],
|
||||
values: [["2015-05-18T10:57:05Z", 10], ["2015-05-18T10:57:06Z", 12]]
|
||||
},
|
||||
{
|
||||
name: 'cpu',
|
||||
tags: {app: 'test2', server: 'server2'},
|
||||
columns: ['time', 'value'],
|
||||
values: [["2015-05-18T10:57:05Z", 15], ["2015-05-18T10:57:06Z", 16]]
|
||||
}
|
||||
]};
|
||||
|
||||
describe('and no alias', function() {
|
||||
|
||||
it('should generate label with no field', function() {
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).to.be('cpu {app: test, server: server1}');
|
||||
expect(result[1].target).to.be('cpu {app: test2, server: server2}');
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
describe('given two series', function() {
|
||||
var options = {
|
||||
alias: '',
|
||||
series: [
|
||||
{
|
||||
name: 'cpu',
|
||||
tags: {app: 'test', server: 'server1'},
|
||||
columns: ['time', 'mean'],
|
||||
values: [[1431946625000, 10], [1431946626000, 12]]
|
||||
},
|
||||
{
|
||||
name: 'cpu',
|
||||
tags: {app: 'test2', server: 'server2'},
|
||||
columns: ['time', 'mean'],
|
||||
values: [[1431946625000, 15], [1431946626000, 16]]
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
describe('and no alias', function() {
|
||||
|
||||
it('should generate two time series', function() {
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
|
||||
expect(result.length).to.be(2);
|
||||
expect(result[0].target).to.be('cpu.mean {app: test, server: server1}');
|
||||
expect(result[0].datapoints[0][0]).to.be(10);
|
||||
expect(result[0].datapoints[0][1]).to.be(1431946625000);
|
||||
expect(result[0].datapoints[1][0]).to.be(12);
|
||||
expect(result[0].datapoints[1][1]).to.be(1431946626000);
|
||||
|
||||
expect(result[1].target).to.be('cpu.mean {app: test2, server: server2}');
|
||||
expect(result[1].datapoints[0][0]).to.be(15);
|
||||
expect(result[1].datapoints[0][1]).to.be(1431946625000);
|
||||
expect(result[1].datapoints[1][0]).to.be(16);
|
||||
expect(result[1].datapoints[1][1]).to.be(1431946626000);
|
||||
});
|
||||
});
|
||||
|
||||
describe('and simple alias', function() {
|
||||
it('should use alias', function() {
|
||||
options.alias = 'new series';
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).to.be('new series');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('and alias patterns', function() {
|
||||
it('should replace patterns', function() {
|
||||
options.alias = 'alias: $m -> $tag_server ([[measurement]])';
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).to.be('alias: cpu -> server1 (cpu)');
|
||||
expect(result[1].target).to.be('alias: cpu -> server2 (cpu)');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('given measurement with dots', function() {
|
||||
var options = {
|
||||
alias: '',
|
||||
series: [
|
||||
{
|
||||
name: 'app.prod.server1.count',
|
||||
tags: {},
|
||||
columns: ['time', 'mean'],
|
||||
values: [[1431946625000, 10], [1431946626000, 12]]
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
it('should replace patterns', function() {
|
||||
options.alias = 'alias: $1 -> [[3]]';
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).to.be('alias: prod -> count');
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
@@ -0,0 +1,186 @@
|
||||
///<amd-dependency path="app/plugins/datasource/influxdb/query_builder" name="InfluxQueryBuilder"/>
|
||||
|
||||
import {describe, beforeEach, it, sinon, expect} from 'test/lib/common';
|
||||
|
||||
declare var InfluxQueryBuilder: any;
|
||||
|
||||
describe('InfluxQueryBuilder', function() {
|
||||
|
||||
describe('series with mesurement only', function() {
|
||||
it('should generate correct query', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
measurement: 'cpu',
|
||||
groupBy: [{type: 'time', interval: 'auto'}]
|
||||
});
|
||||
|
||||
var query = builder.build();
|
||||
|
||||
expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE $timeFilter GROUP BY time($interval)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('series with math expr and as expr', function() {
|
||||
it('should generate correct query', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
measurement: 'cpu',
|
||||
fields: [{name: 'test', func: 'max', mathExpr: '*2', asExpr: 'new_name'}],
|
||||
groupBy: [{type: 'time', interval: 'auto'}]
|
||||
});
|
||||
|
||||
var query = builder.build();
|
||||
|
||||
expect(query).to.be('SELECT max("test")*2 AS "new_name" FROM "cpu" WHERE $timeFilter GROUP BY time($interval)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('series with single tag only', function() {
|
||||
it('should generate correct query', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
measurement: 'cpu',
|
||||
groupBy: [{type: 'time', interval: 'auto'}],
|
||||
tags: [{key: 'hostname', value: 'server1'}]
|
||||
});
|
||||
|
||||
var query = builder.build();
|
||||
|
||||
expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE "hostname" = \'server1\' AND $timeFilter'
|
||||
+ ' GROUP BY time($interval)');
|
||||
});
|
||||
|
||||
it('should switch regex operator with tag value is regex', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
measurement: 'cpu',
|
||||
groupBy: [{type: 'time', interval: 'auto'}],
|
||||
tags: [{key: 'app', value: '/e.*/'}]
|
||||
});
|
||||
|
||||
var query = builder.build();
|
||||
expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE "app" =~ /e.*/ AND $timeFilter GROUP BY time($interval)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('series with multiple fields', function() {
|
||||
it('should generate correct query', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
measurement: 'cpu',
|
||||
tags: [],
|
||||
groupBy: [{type: 'time', interval: 'auto'}],
|
||||
fields: [{ name: 'tx_in', func: 'sum' }, { name: 'tx_out', func: 'mean' }]
|
||||
});
|
||||
|
||||
var query = builder.build();
|
||||
expect(query).to.be('SELECT sum("tx_in") AS "tx_in", mean("tx_out") AS "tx_out" ' +
|
||||
'FROM "cpu" WHERE $timeFilter GROUP BY time($interval)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('series with multiple tags only', function() {
|
||||
it('should generate correct query', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
measurement: 'cpu',
|
||||
groupBy: [{type: 'time', interval: 'auto'}],
|
||||
tags: [{key: 'hostname', value: 'server1'}, {key: 'app', value: 'email', condition: "AND"}]
|
||||
});
|
||||
|
||||
var query = builder.build();
|
||||
expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE "hostname" = \'server1\' AND "app" = \'email\' AND ' +
|
||||
'$timeFilter GROUP BY time($interval)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('series with tags OR condition', function() {
|
||||
it('should generate correct query', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
measurement: 'cpu',
|
||||
groupBy: [{type: 'time', interval: 'auto'}],
|
||||
tags: [{key: 'hostname', value: 'server1'}, {key: 'hostname', value: 'server2', condition: "OR"}]
|
||||
});
|
||||
|
||||
var query = builder.build();
|
||||
expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE "hostname" = \'server1\' OR "hostname" = \'server2\' AND ' +
|
||||
'$timeFilter GROUP BY time($interval)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('series with groupByTag', function() {
|
||||
it('should generate correct query', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
measurement: 'cpu',
|
||||
tags: [],
|
||||
groupBy: [{type: 'time', interval: 'auto'}, {type: 'tag', key: 'host'}],
|
||||
});
|
||||
|
||||
var query = builder.build();
|
||||
expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE $timeFilter ' +
|
||||
'GROUP BY time($interval), "host"');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when building explore queries', function() {
|
||||
|
||||
it('should only have measurement condition in tag keys query given query with measurement', function() {
|
||||
var builder = new InfluxQueryBuilder({ measurement: 'cpu', tags: [] });
|
||||
var query = builder.buildExploreQuery('TAG_KEYS');
|
||||
expect(query).to.be('SHOW TAG KEYS FROM "cpu"');
|
||||
});
|
||||
|
||||
it('should handle regex measurement in tag keys query', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
measurement: '/.*/',
|
||||
tags: []
|
||||
});
|
||||
var query = builder.buildExploreQuery('TAG_KEYS');
|
||||
expect(query).to.be('SHOW TAG KEYS FROM /.*/');
|
||||
});
|
||||
|
||||
it('should have no conditions in tags keys query given query with no measurement or tag', function() {
|
||||
var builder = new InfluxQueryBuilder({ measurement: '', tags: [] });
|
||||
var query = builder.buildExploreQuery('TAG_KEYS');
|
||||
expect(query).to.be('SHOW TAG KEYS');
|
||||
});
|
||||
|
||||
it('should have where condition in tag keys query with tags', function() {
|
||||
var builder = new InfluxQueryBuilder({ measurement: '', tags: [{key: 'host', value: 'se1'}] });
|
||||
var query = builder.buildExploreQuery('TAG_KEYS');
|
||||
expect(query).to.be("SHOW TAG KEYS WHERE \"host\" = 'se1'");
|
||||
});
|
||||
|
||||
it('should have no conditions in measurement query for query with no tags', function() {
|
||||
var builder = new InfluxQueryBuilder({ measurement: '', tags: [] });
|
||||
var query = builder.buildExploreQuery('MEASUREMENTS');
|
||||
expect(query).to.be('SHOW MEASUREMENTS');
|
||||
});
|
||||
|
||||
it('should have where condition in measurement query for query with tags', function() {
|
||||
var builder = new InfluxQueryBuilder({measurement: '', tags: [{key: 'app', value: 'email'}]});
|
||||
var query = builder.buildExploreQuery('MEASUREMENTS');
|
||||
expect(query).to.be("SHOW MEASUREMENTS WHERE \"app\" = 'email'");
|
||||
});
|
||||
|
||||
it('should have where tag name IN filter in tag values query for query with one tag', function() {
|
||||
var builder = new InfluxQueryBuilder({measurement: '', tags: [{key: 'app', value: 'asdsadsad'}]});
|
||||
var query = builder.buildExploreQuery('TAG_VALUES', 'app');
|
||||
expect(query).to.be('SHOW TAG VALUES WITH KEY = "app"');
|
||||
});
|
||||
|
||||
it('should have measurement tag condition and tag name IN filter in tag values query', function() {
|
||||
var builder = new InfluxQueryBuilder({measurement: 'cpu', tags: [{key: 'app', value: 'email'}, {key: 'host', value: 'server1'}]});
|
||||
var query = builder.buildExploreQuery('TAG_VALUES', 'app');
|
||||
expect(query).to.be('SHOW TAG VALUES FROM "cpu" WITH KEY = "app" WHERE "host" = \'server1\'');
|
||||
});
|
||||
|
||||
it('should switch to regex operator in tag condition', function() {
|
||||
var builder = new InfluxQueryBuilder({measurement: 'cpu', tags: [{key: 'host', value: '/server.*/'}]});
|
||||
var query = builder.buildExploreQuery('TAG_VALUES', 'app');
|
||||
expect(query).to.be('SHOW TAG VALUES FROM "cpu" WITH KEY = "app" WHERE "host" =~ /server.*/');
|
||||
});
|
||||
|
||||
it('should build show field query', function() {
|
||||
var builder = new InfluxQueryBuilder({measurement: 'cpu', tags: [{key: 'app', value: 'email'}]});
|
||||
var query = builder.buildExploreQuery('FIELDS');
|
||||
expect(query).to.be('SHOW FIELD KEYS FROM "cpu"');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
188
public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts
Normal file
188
public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts
Normal file
@@ -0,0 +1,188 @@
|
||||
///<amd-dependency path="app/plugins/datasource/influxdb/query_ctrl"/>
|
||||
///<amd-dependency path="app/services/uiSegmentSrv" />
|
||||
///<amd-dependency path="test/specs/helpers" name="helpers" />
|
||||
|
||||
import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common';
|
||||
|
||||
declare var helpers: any;
|
||||
|
||||
describe('InfluxDBQueryCtrl', function() {
|
||||
var ctx = new helpers.ControllerTestContext();
|
||||
|
||||
beforeEach(angularMocks.module('grafana.controllers'));
|
||||
beforeEach(angularMocks.module('grafana.services'));
|
||||
beforeEach(ctx.providePhase());
|
||||
beforeEach(ctx.createControllerPhase('InfluxQueryCtrl'));
|
||||
|
||||
beforeEach(function() {
|
||||
ctx.scope.target = {};
|
||||
ctx.scope.$parent = { get_data: sinon.spy() };
|
||||
|
||||
ctx.scope.datasource = ctx.datasource;
|
||||
ctx.scope.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([]));
|
||||
});
|
||||
|
||||
describe('init', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
});
|
||||
|
||||
it('should init tagSegments', function() {
|
||||
expect(ctx.scope.tagSegments.length).to.be(1);
|
||||
});
|
||||
|
||||
it('should init measurementSegment', function() {
|
||||
expect(ctx.scope.measurementSegment.value).to.be('select measurement');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when first tag segment is updated', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button'}, 0);
|
||||
});
|
||||
|
||||
it('should update tag key', function() {
|
||||
expect(ctx.scope.target.tags[0].key).to.be('asd');
|
||||
expect(ctx.scope.tagSegments[0].type).to.be('key');
|
||||
});
|
||||
|
||||
it('should add tagSegments', function() {
|
||||
expect(ctx.scope.tagSegments.length).to.be(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when last tag value segment is updated', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button'}, 0);
|
||||
ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
|
||||
});
|
||||
|
||||
it('should update tag value', function() {
|
||||
expect(ctx.scope.target.tags[0].value).to.be('server1');
|
||||
});
|
||||
|
||||
it('should set tag operator', function() {
|
||||
expect(ctx.scope.target.tags[0].operator).to.be('=');
|
||||
});
|
||||
|
||||
it('should add plus button for another filter', function() {
|
||||
expect(ctx.scope.tagSegments[3].fake).to.be(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when last tag value segment is updated to regex', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button'}, 0);
|
||||
ctx.scope.tagSegmentUpdated({value: '/server.*/', type: 'value'}, 2);
|
||||
});
|
||||
|
||||
it('should update operator', function() {
|
||||
expect(ctx.scope.tagSegments[1].value).to.be('=~');
|
||||
expect(ctx.scope.target.tags[0].operator).to.be('=~');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when second tag key is added', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
|
||||
ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
|
||||
ctx.scope.tagSegmentUpdated({value: 'key2', type: 'plus-button'}, 3);
|
||||
});
|
||||
|
||||
it('should update tag key', function() {
|
||||
expect(ctx.scope.target.tags[1].key).to.be('key2');
|
||||
});
|
||||
|
||||
it('should add AND segment', function() {
|
||||
expect(ctx.scope.tagSegments[3].value).to.be('AND');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when condition is changed', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
|
||||
ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
|
||||
ctx.scope.tagSegmentUpdated({value: 'key2', type: 'plus-button'}, 3);
|
||||
ctx.scope.tagSegmentUpdated({value: 'OR', type: 'condition'}, 3);
|
||||
});
|
||||
|
||||
it('should update tag condition', function() {
|
||||
expect(ctx.scope.target.tags[1].condition).to.be('OR');
|
||||
});
|
||||
|
||||
it('should update AND segment', function() {
|
||||
expect(ctx.scope.tagSegments[3].value).to.be('OR');
|
||||
expect(ctx.scope.tagSegments.length).to.be(7);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when deleting first tag filter after value is selected', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
|
||||
ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
|
||||
ctx.scope.tagSegmentUpdated(ctx.scope.removeTagFilterSegment, 0);
|
||||
});
|
||||
|
||||
it('should remove tags', function() {
|
||||
expect(ctx.scope.target.tags.length).to.be(0);
|
||||
});
|
||||
|
||||
it('should remove all segment after 2 and replace with plus button', function() {
|
||||
expect(ctx.scope.tagSegments.length).to.be(1);
|
||||
expect(ctx.scope.tagSegments[0].type).to.be('plus-button');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when deleting second tag value before second tag value is complete', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
|
||||
ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
|
||||
ctx.scope.tagSegmentUpdated({value: 'key2', type: 'plus-button'}, 3);
|
||||
ctx.scope.tagSegmentUpdated(ctx.scope.removeTagFilterSegment, 4);
|
||||
});
|
||||
|
||||
it('should remove all segment after 2 and replace with plus button', function() {
|
||||
expect(ctx.scope.tagSegments.length).to.be(4);
|
||||
expect(ctx.scope.tagSegments[3].type).to.be('plus-button');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when deleting second tag value before second tag value is complete', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
|
||||
ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
|
||||
ctx.scope.tagSegmentUpdated({value: 'key2', type: 'plus-button'}, 3);
|
||||
ctx.scope.tagSegmentUpdated(ctx.scope.removeTagFilterSegment, 4);
|
||||
});
|
||||
|
||||
it('should remove all segment after 2 and replace with plus button', function() {
|
||||
expect(ctx.scope.tagSegments.length).to.be(4);
|
||||
expect(ctx.scope.tagSegments[3].type).to.be('plus-button');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when deleting second tag value after second tag filter is complete', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
|
||||
ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
|
||||
ctx.scope.tagSegmentUpdated({value: 'key2', type: 'plus-button'}, 3);
|
||||
ctx.scope.tagSegmentUpdated({value: 'value', type: 'value'}, 6);
|
||||
ctx.scope.tagSegmentUpdated(ctx.scope.removeTagFilterSegment, 4);
|
||||
});
|
||||
|
||||
it('should remove all segment after 2 and replace with plus button', function() {
|
||||
expect(ctx.scope.tagSegments.length).to.be(4);
|
||||
expect(ctx.scope.tagSegments[3].type).to.be('plus-button');
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
@@ -7,10 +7,15 @@ var it = _global.it;
|
||||
var sinon = _global.sinon;
|
||||
var expect = _global.expect;
|
||||
|
||||
var angularMocks = {
|
||||
module: _global.module,
|
||||
};
|
||||
|
||||
export {
|
||||
beforeEach,
|
||||
describe,
|
||||
it,
|
||||
sinon,
|
||||
expect
|
||||
expect,
|
||||
angularMocks,
|
||||
}
|
||||
|
||||
@@ -56,6 +56,11 @@ describe("rangeUtil", () => {
|
||||
expect(text).to.be('Last 1 hour')
|
||||
});
|
||||
|
||||
it('Date range with rounding ranges', () => {
|
||||
var text = rangeUtil.describeTimeRange({from: 'now/d+6h', to: 'now'});
|
||||
expect(text).to.be('now/d+6h to now')
|
||||
});
|
||||
|
||||
it('Date range with absolute to now', () => {
|
||||
var text = rangeUtil.describeTimeRange({from: moment([2014,10,10,2,3,4]), to: 'now'});
|
||||
expect(text).to.be('Nov 10, 2014 02:03:04 to a few seconds ago')
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
define([
|
||||
'moment',
|
||||
'app/plugins/datasource/elasticsearch/indexPattern'
|
||||
], function(moment, IndexPattern) {
|
||||
'use strict';
|
||||
|
||||
describe('IndexPattern', function() {
|
||||
|
||||
describe('when getting index for today', function() {
|
||||
it('should return correct index name', function() {
|
||||
var pattern = new IndexPattern('[asd-]YYYY.MM.DD', 'Daily');
|
||||
var expected = 'asd-' + moment().format('YYYY.MM.DD');
|
||||
|
||||
expect(pattern.getIndexForToday()).to.be(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when getting index list for time range', function() {
|
||||
|
||||
describe('no interval', function() {
|
||||
it('should return correct index', function() {
|
||||
var pattern = new IndexPattern('my-metrics');
|
||||
var from = new Date(2015, 4, 30, 1, 2, 3);
|
||||
var to = new Date(2015, 5, 1, 12, 5 , 6);
|
||||
expect(pattern.getIndexList(from, to)).to.eql('my-metrics');
|
||||
});
|
||||
});
|
||||
|
||||
describe('daily', function() {
|
||||
|
||||
it('should return correct index list', function() {
|
||||
var pattern = new IndexPattern('[asd-]YYYY.MM.DD', 'Daily');
|
||||
var from = new Date(1432940523000);
|
||||
var to = new Date(1433153106000);
|
||||
|
||||
var expected = [
|
||||
'asd-2015.05.29',
|
||||
'asd-2015.05.30',
|
||||
'asd-2015.05.31',
|
||||
'asd-2015.06.01',
|
||||
];
|
||||
|
||||
expect(pattern.getIndexList(from, to)).to.eql(expected);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
@@ -1,124 +0,0 @@
|
||||
define([
|
||||
'app/plugins/datasource/elasticsearch/queryBuilder'
|
||||
], function(ElasticQueryBuilder) {
|
||||
'use strict';
|
||||
|
||||
describe('ElasticQueryBuilder', function() {
|
||||
var builder;
|
||||
|
||||
beforeEach(function() {
|
||||
builder = new ElasticQueryBuilder({timeField: '@timestamp'});
|
||||
});
|
||||
|
||||
it('with defaults', function() {
|
||||
var query = builder.build({
|
||||
metrics: [{type: 'Count', id: '0'}],
|
||||
timeField: '@timestamp',
|
||||
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '1'}],
|
||||
});
|
||||
|
||||
expect(query.query.filtered.filter.bool.must[0].range["@timestamp"].gte).to.be("$timeFrom");
|
||||
expect(query.aggs["1"].date_histogram.extended_bounds.min).to.be("$timeFrom");
|
||||
});
|
||||
|
||||
it('with raw query', function() {
|
||||
var query = builder.build({
|
||||
rawQuery: '{"query": "$lucene_query"}',
|
||||
});
|
||||
|
||||
expect(query.query).to.be("$lucene_query");
|
||||
});
|
||||
|
||||
it('with multiple bucket aggs', function() {
|
||||
var query = builder.build({
|
||||
metrics: [{type: 'count', id: '1'}],
|
||||
timeField: '@timestamp',
|
||||
bucketAggs: [
|
||||
{type: 'terms', field: '@host', id: '2'},
|
||||
{type: 'date_histogram', field: '@timestamp', id: '3'}
|
||||
],
|
||||
});
|
||||
|
||||
expect(query.aggs["2"].terms.field).to.be("@host");
|
||||
expect(query.aggs["2"].aggs["3"].date_histogram.field).to.be("@timestamp");
|
||||
});
|
||||
|
||||
it('with select field', function() {
|
||||
var query = builder.build({
|
||||
metrics: [{type: 'avg', field: '@value', id: '1'}],
|
||||
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '2'}],
|
||||
}, 100, 1000);
|
||||
|
||||
var aggs = query.aggs["2"].aggs;
|
||||
expect(aggs["1"].avg.field).to.be("@value");
|
||||
});
|
||||
|
||||
it('with term agg and order by metric agg', function() {
|
||||
var query = builder.build({
|
||||
metrics: [
|
||||
{type: 'count', id: '1'},
|
||||
{type: 'avg', field: '@value', id: '5'}
|
||||
],
|
||||
bucketAggs: [
|
||||
{type: 'terms', field: '@host', settings: {size: 5, order: 'asc', orderBy: '5'}, id: '2' },
|
||||
{type: 'date_histogram', field: '@timestamp', id: '3'}
|
||||
],
|
||||
}, 100, 1000);
|
||||
|
||||
var firstLevel = query.aggs["2"];
|
||||
var secondLevel = firstLevel.aggs["3"];
|
||||
|
||||
expect(firstLevel.aggs["5"].avg.field).to.be("@value");
|
||||
expect(secondLevel.aggs["5"].avg.field).to.be("@value");
|
||||
});
|
||||
|
||||
it('with metric percentiles', function() {
|
||||
var query = builder.build({
|
||||
metrics: [
|
||||
{
|
||||
id: '1',
|
||||
type: 'percentiles',
|
||||
field: '@load_time',
|
||||
settings: {
|
||||
percents: [1,2,3,4]
|
||||
}
|
||||
}
|
||||
],
|
||||
bucketAggs: [
|
||||
{type: 'date_histogram', field: '@timestamp', id: '3'}
|
||||
],
|
||||
}, 100, 1000);
|
||||
|
||||
var firstLevel = query.aggs["3"];
|
||||
|
||||
expect(firstLevel.aggs["1"].percentiles.field).to.be("@load_time");
|
||||
expect(firstLevel.aggs["1"].percentiles.percents).to.eql([1,2,3,4]);
|
||||
});
|
||||
|
||||
it('with filters aggs', function() {
|
||||
var query = builder.build({
|
||||
metrics: [{type: 'count', id: '1'}],
|
||||
timeField: '@timestamp',
|
||||
bucketAggs: [
|
||||
{
|
||||
id: '2',
|
||||
type: 'filters',
|
||||
settings: {
|
||||
filters: [
|
||||
{query: '@metric:cpu' },
|
||||
{query: '@metric:logins.count' },
|
||||
]
|
||||
}
|
||||
},
|
||||
{type: 'date_histogram', field: '@timestamp', id: '4'}
|
||||
],
|
||||
});
|
||||
|
||||
expect(query.aggs["2"].filters.filters["@metric:cpu"].query.query_string.query).to.be("@metric:cpu");
|
||||
expect(query.aggs["2"].filters.filters["@metric:logins.count"].query.query_string.query).to.be("@metric:logins.count");
|
||||
expect(query.aggs["2"].aggs["4"].date_histogram.field).to.be("@timestamp");
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
@@ -1,32 +0,0 @@
|
||||
define([
|
||||
'./helpers',
|
||||
'app/plugins/datasource/elasticsearch/queryCtrl',
|
||||
'app/services/uiSegmentSrv'
|
||||
], function(helpers) {
|
||||
'use strict';
|
||||
|
||||
describe('ElasticQueryCtrl', function() {
|
||||
var ctx = new helpers.ControllerTestContext();
|
||||
|
||||
beforeEach(module('grafana.controllers'));
|
||||
beforeEach(module('grafana.services'));
|
||||
beforeEach(ctx.providePhase());
|
||||
beforeEach(ctx.createControllerPhase('ElasticQueryCtrl'));
|
||||
|
||||
beforeEach(function() {
|
||||
ctx.scope.target = {};
|
||||
ctx.scope.$parent = { get_data: sinon.spy() };
|
||||
|
||||
ctx.scope.datasource = ctx.datasource;
|
||||
ctx.scope.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([]));
|
||||
});
|
||||
|
||||
describe('init', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
@@ -1,414 +0,0 @@
|
||||
define([
|
||||
'app/plugins/datasource/elasticsearch/elasticResponse',
|
||||
], function(ElasticResponse) {
|
||||
'use strict';
|
||||
|
||||
describe('ElasticResponse', function() {
|
||||
var targets;
|
||||
var response;
|
||||
var result;
|
||||
|
||||
describe('simple query and count', function() {
|
||||
|
||||
beforeEach(function() {
|
||||
targets = [{
|
||||
refId: 'A',
|
||||
metrics: [{type: 'count', id: '1'}],
|
||||
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '2'}],
|
||||
}];
|
||||
response = {
|
||||
responses: [{
|
||||
aggregations: {
|
||||
"2": {
|
||||
buckets: [
|
||||
{
|
||||
doc_count: 10,
|
||||
key: 1000
|
||||
},
|
||||
{
|
||||
doc_count: 15,
|
||||
key: 2000
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}]
|
||||
};
|
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries();
|
||||
});
|
||||
|
||||
it('should return 1 series', function() {
|
||||
expect(result.data.length).to.be(1);
|
||||
expect(result.data[0].target).to.be('Count');
|
||||
expect(result.data[0].datapoints.length).to.be(2);
|
||||
expect(result.data[0].datapoints[0][0]).to.be(10);
|
||||
expect(result.data[0].datapoints[0][1]).to.be(1000);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('simple query count & avg aggregation', function() {
|
||||
var result;
|
||||
|
||||
beforeEach(function() {
|
||||
targets = [{
|
||||
refId: 'A',
|
||||
metrics: [{type: 'count', id: '1'}, {type: 'avg', field: 'value', id: '2'}],
|
||||
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '3'}],
|
||||
}];
|
||||
response = {
|
||||
responses: [{
|
||||
aggregations: {
|
||||
"3": {
|
||||
buckets: [
|
||||
{
|
||||
"2": {value: 88},
|
||||
doc_count: 10,
|
||||
key: 1000
|
||||
},
|
||||
{
|
||||
"2": {value: 99},
|
||||
doc_count: 15,
|
||||
key: 2000
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}]
|
||||
};
|
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries();
|
||||
});
|
||||
|
||||
it('should return 2 series', function() {
|
||||
expect(result.data.length).to.be(2);
|
||||
expect(result.data[0].datapoints.length).to.be(2);
|
||||
expect(result.data[0].datapoints[0][0]).to.be(10);
|
||||
expect(result.data[0].datapoints[0][1]).to.be(1000);
|
||||
|
||||
expect(result.data[1].target).to.be("Average value");
|
||||
expect(result.data[1].datapoints[0][0]).to.be(88);
|
||||
expect(result.data[1].datapoints[1][0]).to.be(99);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('single group by query one metric', function() {
|
||||
var result;
|
||||
|
||||
beforeEach(function() {
|
||||
targets = [{
|
||||
refId: 'A',
|
||||
metrics: [{type: 'count', id: '1'}],
|
||||
bucketAggs: [{type: 'terms', field: 'host', id: '2'}, {type: 'date_histogram', field: '@timestamp', id: '3'}],
|
||||
}];
|
||||
response = {
|
||||
responses: [{
|
||||
aggregations: {
|
||||
"2": {
|
||||
buckets: [
|
||||
{
|
||||
"3": {
|
||||
buckets: [
|
||||
{doc_count: 1, key: 1000},
|
||||
{doc_count: 3, key: 2000}
|
||||
]
|
||||
},
|
||||
doc_count: 4,
|
||||
key: 'server1',
|
||||
},
|
||||
{
|
||||
"3": {
|
||||
buckets: [
|
||||
{doc_count: 2, key: 1000},
|
||||
{doc_count: 8, key: 2000}
|
||||
]
|
||||
},
|
||||
doc_count: 10,
|
||||
key: 'server2',
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
}]
|
||||
};
|
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries();
|
||||
});
|
||||
|
||||
it('should return 2 series', function() {
|
||||
expect(result.data.length).to.be(2);
|
||||
expect(result.data[0].datapoints.length).to.be(2);
|
||||
expect(result.data[0].target).to.be('server1');
|
||||
expect(result.data[1].target).to.be('server2');
|
||||
});
|
||||
});
|
||||
|
||||
describe('single group by query two metrics', function() {
|
||||
var result;
|
||||
|
||||
beforeEach(function() {
|
||||
targets = [{
|
||||
refId: 'A',
|
||||
metrics: [{type: 'count', id: '1'}, {type: 'avg', field: '@value', id: '4'}],
|
||||
bucketAggs: [{type: 'terms', field: 'host', id: '2'}, {type: 'date_histogram', field: '@timestamp', id: '3'}],
|
||||
}];
|
||||
response = {
|
||||
responses: [{
|
||||
aggregations: {
|
||||
"2": {
|
||||
buckets: [
|
||||
{
|
||||
"3": {
|
||||
buckets: [
|
||||
{ "4": {value: 10}, doc_count: 1, key: 1000},
|
||||
{ "4": {value: 12}, doc_count: 3, key: 2000}
|
||||
]
|
||||
},
|
||||
doc_count: 4,
|
||||
key: 'server1',
|
||||
},
|
||||
{
|
||||
"3": {
|
||||
buckets: [
|
||||
{ "4": {value: 20}, doc_count: 1, key: 1000},
|
||||
{ "4": {value: 32}, doc_count: 3, key: 2000}
|
||||
]
|
||||
},
|
||||
doc_count: 10,
|
||||
key: 'server2',
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
}]
|
||||
};
|
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries();
|
||||
});
|
||||
|
||||
it('should return 2 series', function() {
|
||||
expect(result.data.length).to.be(4);
|
||||
expect(result.data[0].datapoints.length).to.be(2);
|
||||
expect(result.data[0].target).to.be('server1 Count');
|
||||
expect(result.data[1].target).to.be('server1 Average @value');
|
||||
expect(result.data[2].target).to.be('server2 Count');
|
||||
expect(result.data[3].target).to.be('server2 Average @value');
|
||||
});
|
||||
});
|
||||
|
||||
describe('with percentiles ', function() {
|
||||
var result;
|
||||
|
||||
beforeEach(function() {
|
||||
targets = [{
|
||||
refId: 'A',
|
||||
metrics: [{type: 'percentiles', settings: {percents: [75, 90]}, id: '1'}],
|
||||
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '3'}],
|
||||
}];
|
||||
response = {
|
||||
responses: [{
|
||||
aggregations: {
|
||||
"3": {
|
||||
buckets: [
|
||||
{
|
||||
"1": {values: {"75": 3.3, "90": 5.5}},
|
||||
doc_count: 10,
|
||||
key: 1000
|
||||
},
|
||||
{
|
||||
"1": {values: {"75": 2.3, "90": 4.5}},
|
||||
doc_count: 15,
|
||||
key: 2000
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}]
|
||||
};
|
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries();
|
||||
});
|
||||
|
||||
it('should return 2 series', function() {
|
||||
expect(result.data.length).to.be(2);
|
||||
expect(result.data[0].datapoints.length).to.be(2);
|
||||
expect(result.data[0].target).to.be('p75');
|
||||
expect(result.data[1].target).to.be('p90');
|
||||
expect(result.data[0].datapoints[0][0]).to.be(3.3);
|
||||
expect(result.data[0].datapoints[0][1]).to.be(1000);
|
||||
expect(result.data[1].datapoints[1][0]).to.be(4.5);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with extended_stats', function() {
|
||||
var result;
|
||||
|
||||
beforeEach(function() {
|
||||
targets = [{
|
||||
refId: 'A',
|
||||
metrics: [{type: 'extended_stats', meta: {max: true, std_deviation_bounds_upper: true}, id: '1'}],
|
||||
bucketAggs: [{type: 'terms', field: 'host', id: '3'}, {type: 'date_histogram', id: '4'}],
|
||||
}];
|
||||
response = {
|
||||
responses: [{
|
||||
aggregations: {
|
||||
"3": {
|
||||
buckets: [
|
||||
{
|
||||
key: 'server1',
|
||||
"4": {
|
||||
buckets: [{
|
||||
"1": {max: 10.2, min: 5.5, std_deviation_bounds: {upper: 3, lower: -2}},
|
||||
doc_count: 10,
|
||||
key: 1000
|
||||
}]
|
||||
}
|
||||
},
|
||||
{
|
||||
key: 'server2',
|
||||
"4": {
|
||||
buckets: [{
|
||||
"1": {max: 10.2, min: 5.5, std_deviation_bounds: {upper: 3, lower: -2}},
|
||||
doc_count: 10,
|
||||
key: 1000
|
||||
}]
|
||||
}
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
}]
|
||||
};
|
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries();
|
||||
});
|
||||
|
||||
it('should return 4 series', function() {
|
||||
expect(result.data.length).to.be(4);
|
||||
expect(result.data[0].datapoints.length).to.be(1);
|
||||
expect(result.data[0].target).to.be('server1 Max');
|
||||
expect(result.data[1].target).to.be('server1 Std Dev Upper');
|
||||
|
||||
expect(result.data[0].datapoints[0][0]).to.be(10.2);
|
||||
expect(result.data[1].datapoints[0][0]).to.be(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('single group by with alias pattern', function() {
|
||||
var result;
|
||||
|
||||
beforeEach(function() {
|
||||
targets = [{
|
||||
refId: 'A',
|
||||
metrics: [{type: 'count', id: '1'}],
|
||||
alias: '{{term @host}} {{metric}} and!',
|
||||
bucketAggs: [
|
||||
{type: 'terms', field: '@host', id: '2'},
|
||||
{type: 'date_histogram', field: '@timestamp', id: '3'}
|
||||
],
|
||||
}];
|
||||
response = {
|
||||
responses: [{
|
||||
aggregations: {
|
||||
"2": {
|
||||
buckets: [
|
||||
{
|
||||
"3": {
|
||||
buckets: [
|
||||
{doc_count: 1, key: 1000},
|
||||
{doc_count: 3, key: 2000}
|
||||
]
|
||||
},
|
||||
doc_count: 4,
|
||||
key: 'server1',
|
||||
},
|
||||
{
|
||||
"3": {
|
||||
buckets: [
|
||||
{doc_count: 2, key: 1000},
|
||||
{doc_count: 8, key: 2000}
|
||||
]
|
||||
},
|
||||
doc_count: 10,
|
||||
key: 'server2',
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
}]
|
||||
};
|
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries();
|
||||
});
|
||||
|
||||
it('should return 2 series', function() {
|
||||
expect(result.data.length).to.be(2);
|
||||
expect(result.data[0].datapoints.length).to.be(2);
|
||||
expect(result.data[0].target).to.be('server1 Count and!');
|
||||
expect(result.data[1].target).to.be('server2 Count and!');
|
||||
});
|
||||
});
|
||||
|
||||
describe('with two filters agg', function() {
|
||||
var result;
|
||||
|
||||
beforeEach(function() {
|
||||
targets = [{
|
||||
refId: 'A',
|
||||
metrics: [{type: 'count', id: '1'}],
|
||||
bucketAggs: [
|
||||
{
|
||||
id: '2',
|
||||
type: 'filters',
|
||||
settings: {
|
||||
filters: [
|
||||
{query: '@metric:cpu' },
|
||||
{query: '@metric:logins.count' },
|
||||
]
|
||||
}
|
||||
},
|
||||
{type: 'date_histogram', field: '@timestamp', id: '3'}
|
||||
],
|
||||
}];
|
||||
response = {
|
||||
responses: [{
|
||||
aggregations: {
|
||||
"2": {
|
||||
buckets: {
|
||||
"@metric:cpu": {
|
||||
"3": {
|
||||
buckets: [
|
||||
{doc_count: 1, key: 1000},
|
||||
{doc_count: 3, key: 2000}
|
||||
]
|
||||
},
|
||||
},
|
||||
"@metric:logins.count": {
|
||||
"3": {
|
||||
buckets: [
|
||||
{doc_count: 2, key: 1000},
|
||||
{doc_count: 8, key: 2000}
|
||||
]
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}]
|
||||
};
|
||||
|
||||
result = new ElasticResponse(targets, response).getTimeSeries();
|
||||
});
|
||||
|
||||
it('should return 2 series', function() {
|
||||
expect(result.data.length).to.be(2);
|
||||
expect(result.data[0].datapoints.length).to.be(2);
|
||||
expect(result.data[0].target).to.be('@metric:cpu');
|
||||
expect(result.data[1].target).to.be('@metric:logins.count');
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
@@ -1,77 +0,0 @@
|
||||
define([
|
||||
'./helpers',
|
||||
'moment',
|
||||
'angular',
|
||||
'app/plugins/datasource/elasticsearch/datasource',
|
||||
], function(helpers, moment, angular) {
|
||||
'use strict';
|
||||
|
||||
describe('ElasticDatasource', function() {
|
||||
var ctx = new helpers.ServiceTestContext();
|
||||
|
||||
beforeEach(module('grafana.services'));
|
||||
beforeEach(ctx.providePhase(['templateSrv', 'backendSrv']));
|
||||
beforeEach(ctx.createService('ElasticDatasource'));
|
||||
beforeEach(function() {
|
||||
ctx.ds = new ctx.service({jsonData: {}});
|
||||
});
|
||||
|
||||
describe('When testing datasource with index pattern', function() {
|
||||
beforeEach(function() {
|
||||
ctx.ds = new ctx.service({
|
||||
url: 'http://es.com',
|
||||
index: '[asd-]YYYY.MM.DD',
|
||||
jsonData: { interval: 'Daily' }
|
||||
});
|
||||
});
|
||||
|
||||
it('should translate index pattern to current day', function() {
|
||||
var requestOptions;
|
||||
ctx.backendSrv.datasourceRequest = function(options) {
|
||||
requestOptions = options;
|
||||
return ctx.$q.when({});
|
||||
};
|
||||
|
||||
ctx.ds.testDatasource();
|
||||
ctx.$rootScope.$apply();
|
||||
|
||||
var today = moment().format("YYYY.MM.DD");
|
||||
expect(requestOptions.url).to.be("http://es.com/asd-" + today + '/_stats');
|
||||
});
|
||||
});
|
||||
|
||||
describe('When issueing metric query with interval pattern', function() {
|
||||
beforeEach(function() {
|
||||
ctx.ds = new ctx.service({
|
||||
url: 'http://es.com',
|
||||
index: '[asd-]YYYY.MM.DD',
|
||||
jsonData: { interval: 'Daily' }
|
||||
});
|
||||
});
|
||||
|
||||
it('should translate index pattern to current day', function() {
|
||||
var requestOptions;
|
||||
ctx.backendSrv.datasourceRequest = function(options) {
|
||||
requestOptions = options;
|
||||
return ctx.$q.when({data: {responses: []}});
|
||||
};
|
||||
|
||||
ctx.ds.query({
|
||||
range: {
|
||||
from: moment([2015, 4, 30, 10]),
|
||||
to: moment([2015, 5, 1, 10])
|
||||
},
|
||||
targets: [{ bucketAggs: [], metrics: [] }]
|
||||
});
|
||||
|
||||
ctx.$rootScope.$apply();
|
||||
var parts = requestOptions.data.split('\n');
|
||||
var header = angular.fromJson(parts[0]);
|
||||
expect(header.index).to.eql(['asd-2015.05.30', 'asd-2015.05.31', 'asd-2015.06.01']);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
@@ -1,130 +0,0 @@
|
||||
define([
|
||||
'app/plugins/datasource/graphite/gfunc'
|
||||
], function(gfunc) {
|
||||
'use strict';
|
||||
|
||||
describe('when creating func instance from func names', function() {
|
||||
|
||||
it('should return func instance', function() {
|
||||
var func = gfunc.createFuncInstance('sumSeries');
|
||||
expect(func).to.be.ok();
|
||||
expect(func.def.name).to.equal('sumSeries');
|
||||
expect(func.def.params.length).to.equal(5);
|
||||
expect(func.def.defaultParams.length).to.equal(1);
|
||||
});
|
||||
|
||||
it('should return func instance with shortName', function() {
|
||||
var func = gfunc.createFuncInstance('sum');
|
||||
expect(func).to.be.ok();
|
||||
});
|
||||
|
||||
it('should return func instance from funcDef', function() {
|
||||
var func = gfunc.createFuncInstance('sum');
|
||||
var func2 = gfunc.createFuncInstance(func.def);
|
||||
expect(func2).to.be.ok();
|
||||
});
|
||||
|
||||
it('func instance should have text representation', function() {
|
||||
var func = gfunc.createFuncInstance('groupByNode');
|
||||
func.params[0] = 5;
|
||||
func.params[1] = 'avg';
|
||||
func.updateText();
|
||||
expect(func.text).to.equal("groupByNode(5, avg)");
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('when rendering func instance', function() {
|
||||
|
||||
it('should handle single metric param', function() {
|
||||
var func = gfunc.createFuncInstance('sumSeries');
|
||||
expect(func.render('hello.metric')).to.equal("sumSeries(hello.metric)");
|
||||
});
|
||||
|
||||
it('should include default params if options enable it', function() {
|
||||
var func = gfunc.createFuncInstance('scaleToSeconds', { withDefaultParams: true });
|
||||
expect(func.render('hello')).to.equal("scaleToSeconds(hello, 1)");
|
||||
});
|
||||
|
||||
it('should handle int or interval params with number', function() {
|
||||
var func = gfunc.createFuncInstance('movingMedian');
|
||||
func.params[0] = '5';
|
||||
expect(func.render('hello')).to.equal("movingMedian(hello, 5)");
|
||||
});
|
||||
|
||||
it('should handle int or interval params with interval string', function() {
|
||||
var func = gfunc.createFuncInstance('movingMedian');
|
||||
func.params[0] = '5min';
|
||||
expect(func.render('hello')).to.equal("movingMedian(hello, '5min')");
|
||||
});
|
||||
|
||||
it('should handle metric param and int param and string param', function() {
|
||||
var func = gfunc.createFuncInstance('groupByNode');
|
||||
func.params[0] = 5;
|
||||
func.params[1] = 'avg';
|
||||
expect(func.render('hello.metric')).to.equal("groupByNode(hello.metric, 5, 'avg')");
|
||||
});
|
||||
|
||||
it('should handle function with no metric param', function() {
|
||||
var func = gfunc.createFuncInstance('randomWalk');
|
||||
func.params[0] = 'test';
|
||||
expect(func.render(undefined)).to.equal("randomWalk('test')");
|
||||
});
|
||||
|
||||
it('should handle function multiple series params', function() {
|
||||
var func = gfunc.createFuncInstance('asPercent');
|
||||
func.params[0] = '#B';
|
||||
expect(func.render('#A')).to.equal("asPercent(#A, #B)");
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('when requesting function categories', function() {
|
||||
it('should return function categories', function() {
|
||||
var catIndex = gfunc.getCategories();
|
||||
expect(catIndex.Special.length).to.be.greaterThan(8);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when updating func param', function() {
|
||||
it('should update param value and update text representation', function() {
|
||||
var func = gfunc.createFuncInstance('summarize', { withDefaultParams: true });
|
||||
func.updateParam('1h', 0);
|
||||
expect(func.params[0]).to.be('1h');
|
||||
expect(func.text).to.be('summarize(1h, sum, false)');
|
||||
});
|
||||
|
||||
it('should parse numbers as float', function() {
|
||||
var func = gfunc.createFuncInstance('scale');
|
||||
func.updateParam('0.001', 0);
|
||||
expect(func.params[0]).to.be('0.001');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when updating func param with optional second parameter', function() {
|
||||
it('should update value and text', function() {
|
||||
var func = gfunc.createFuncInstance('aliasByNode');
|
||||
func.updateParam('1', 0);
|
||||
expect(func.params[0]).to.be('1');
|
||||
});
|
||||
|
||||
it('should slit text and put value in second param', function() {
|
||||
var func = gfunc.createFuncInstance('aliasByNode');
|
||||
func.updateParam('4,-5', 0);
|
||||
expect(func.params[0]).to.be('4');
|
||||
expect(func.params[1]).to.be('-5');
|
||||
expect(func.text).to.be('aliasByNode(4, -5)');
|
||||
});
|
||||
|
||||
it('should remove second param when empty string is set', function() {
|
||||
var func = gfunc.createFuncInstance('aliasByNode');
|
||||
func.updateParam('4,-5', 0);
|
||||
func.updateParam('', 1);
|
||||
expect(func.params[0]).to.be('4');
|
||||
expect(func.params[1]).to.be(undefined);
|
||||
expect(func.text).to.be('aliasByNode(4)');
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
@@ -1,122 +0,0 @@
|
||||
define([
|
||||
'./helpers',
|
||||
'app/plugins/datasource/graphite/datasource'
|
||||
], function(helpers) {
|
||||
'use strict';
|
||||
|
||||
describe('graphiteDatasource', function() {
|
||||
var ctx = new helpers.ServiceTestContext();
|
||||
|
||||
beforeEach(module('grafana.services'));
|
||||
beforeEach(ctx.providePhase(['backendSrv']));
|
||||
|
||||
beforeEach(ctx.createService('GraphiteDatasource'));
|
||||
beforeEach(function() {
|
||||
ctx.ds = new ctx.service({ url: [''] });
|
||||
});
|
||||
|
||||
describe('When querying influxdb with one target using query editor target spec', function() {
|
||||
var query = {
|
||||
rangeRaw: { from: 'now-1h', to: 'now' },
|
||||
targets: [{ target: 'prod1.count' }, {target: 'prod2.count'}],
|
||||
maxDataPoints: 500,
|
||||
};
|
||||
|
||||
var results;
|
||||
var requestOptions;
|
||||
|
||||
beforeEach(function() {
|
||||
ctx.backendSrv.datasourceRequest = function(options) {
|
||||
requestOptions = options;
|
||||
return ctx.$q.when({data: [{ target: 'prod1.count', datapoints: [[10, 1], [12,1]] }]});
|
||||
};
|
||||
|
||||
ctx.ds.query(query).then(function(data) { results = data; });
|
||||
ctx.$rootScope.$apply();
|
||||
});
|
||||
|
||||
it('should generate the correct query', function() {
|
||||
expect(requestOptions.url).to.be('/render');
|
||||
});
|
||||
|
||||
it('should query correctly', function() {
|
||||
var params = requestOptions.data.split('&');
|
||||
expect(params).to.contain('target=prod1.count');
|
||||
expect(params).to.contain('target=prod2.count');
|
||||
expect(params).to.contain('from=-1h');
|
||||
expect(params).to.contain('until=now');
|
||||
});
|
||||
|
||||
it('should exclude undefined params', function() {
|
||||
var params = requestOptions.data.split('&');
|
||||
expect(params).to.not.contain('cacheTimeout=undefined');
|
||||
});
|
||||
|
||||
it('should return series list', function() {
|
||||
expect(results.data.length).to.be(1);
|
||||
expect(results.data[0].target).to.be('prod1.count');
|
||||
});
|
||||
|
||||
it('should convert to millisecond resolution', function() {
|
||||
expect(results.data[0].datapoints[0][0]).to.be(10);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('building graphite params', function() {
|
||||
|
||||
it('should uri escape targets', function() {
|
||||
var results = ctx.ds.buildGraphiteParams({
|
||||
targets: [{target: 'prod1.{test,test2}'}, {target: 'prod2.count'}]
|
||||
});
|
||||
expect(results).to.contain('target=prod1.%7Btest%2Ctest2%7D');
|
||||
});
|
||||
|
||||
it('should replace target placeholder', function() {
|
||||
var results = ctx.ds.buildGraphiteParams({
|
||||
targets: [{target: 'series1'}, {target: 'series2'}, {target: 'asPercent(#A,#B)'}]
|
||||
});
|
||||
expect(results[2]).to.be('target=asPercent(series1%2Cseries2)');
|
||||
});
|
||||
|
||||
it('should replace target placeholder for hidden series', function() {
|
||||
var results = ctx.ds.buildGraphiteParams({
|
||||
targets: [{target: 'series1', hide: true}, {target: 'sumSeries(#A)', hide: true}, {target: 'asPercent(#A,#B)'}]
|
||||
});
|
||||
expect(results[0]).to.be('target=' + encodeURIComponent('asPercent(series1,sumSeries(series1))'));
|
||||
});
|
||||
|
||||
it('should replace target placeholder when nesting query references', function() {
|
||||
var results = ctx.ds.buildGraphiteParams({
|
||||
targets: [{target: 'series1'}, {target: 'sumSeries(#A)'}, {target: 'asPercent(#A,#B)'}]
|
||||
});
|
||||
expect(results[2]).to.be('target=' + encodeURIComponent("asPercent(series1,sumSeries(series1))"));
|
||||
});
|
||||
|
||||
it('should fix wrong minute interval parameters', function() {
|
||||
var results = ctx.ds.buildGraphiteParams({
|
||||
targets: [{target: "summarize(prod.25m.count, '25m', 'sum')" }]
|
||||
});
|
||||
expect(results[0]).to.be('target=' + encodeURIComponent("summarize(prod.25m.count, '25min', 'sum')"));
|
||||
});
|
||||
|
||||
it('should fix wrong month interval parameters', function() {
|
||||
var results = ctx.ds.buildGraphiteParams({
|
||||
targets: [{target: "summarize(prod.5M.count, '5M', 'sum')" }]
|
||||
});
|
||||
expect(results[0]).to.be('target=' + encodeURIComponent("summarize(prod.5M.count, '5mon', 'sum')"));
|
||||
});
|
||||
|
||||
it('should ignore empty targets', function() {
|
||||
var results = ctx.ds.buildGraphiteParams({
|
||||
targets: [{target: 'series1'}, {target: ''}]
|
||||
});
|
||||
expect(results.length).to.be(2);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
@@ -1,179 +0,0 @@
|
||||
define([
|
||||
'./helpers',
|
||||
'app/plugins/datasource/graphite/gfunc',
|
||||
'app/plugins/datasource/graphite/queryCtrl',
|
||||
'app/services/uiSegmentSrv'
|
||||
], function(helpers, gfunc) {
|
||||
'use strict';
|
||||
|
||||
describe('GraphiteQueryCtrl', function() {
|
||||
var ctx = new helpers.ControllerTestContext();
|
||||
|
||||
beforeEach(module('grafana.controllers'));
|
||||
beforeEach(module('grafana.services'));
|
||||
beforeEach(ctx.providePhase());
|
||||
beforeEach(ctx.createControllerPhase('GraphiteQueryCtrl'));
|
||||
|
||||
beforeEach(function() {
|
||||
ctx.scope.target = {target: 'aliasByNode(scaleToSeconds(test.prod.*,1),2)'};
|
||||
|
||||
ctx.scope.datasource = ctx.datasource;
|
||||
ctx.scope.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([]));
|
||||
});
|
||||
|
||||
describe('init', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
ctx.scope.$digest();
|
||||
});
|
||||
|
||||
it('should validate metric key exists', function() {
|
||||
expect(ctx.scope.datasource.metricFindQuery.getCall(0).args[0]).to.be('test.prod.*');
|
||||
});
|
||||
|
||||
it('should delete last segment if no metrics are found', function() {
|
||||
expect(ctx.scope.segments[2].value).to.be('select metric');
|
||||
});
|
||||
|
||||
it('should parse expression and build function model', function() {
|
||||
expect(ctx.scope.functions.length).to.be(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when adding function', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.target.target = 'test.prod.*.count';
|
||||
ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([{expandable: false}]));
|
||||
ctx.scope.init();
|
||||
ctx.scope.$digest();
|
||||
|
||||
ctx.scope.$parent = { get_data: sinon.spy() };
|
||||
ctx.scope.addFunction(gfunc.getFuncDef('aliasByNode'));
|
||||
});
|
||||
|
||||
it('should add function with correct node number', function() {
|
||||
expect(ctx.scope.functions[0].params[0]).to.be(2);
|
||||
});
|
||||
|
||||
it('should update target', function() {
|
||||
expect(ctx.scope.target.target).to.be('aliasByNode(test.prod.*.count, 2)');
|
||||
});
|
||||
|
||||
it('should call get_data', function() {
|
||||
expect(ctx.scope.$parent.get_data.called).to.be(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when adding function before any metric segment', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.target.target = '';
|
||||
ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([{expandable: true}]));
|
||||
ctx.scope.init();
|
||||
ctx.scope.$digest();
|
||||
|
||||
ctx.scope.$parent = { get_data: sinon.spy() };
|
||||
ctx.scope.addFunction(gfunc.getFuncDef('asPercent'));
|
||||
});
|
||||
|
||||
it('should add function and remove select metric link', function() {
|
||||
expect(ctx.scope.segments.length).to.be(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when initalizing target without metric expression and only function', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.target.target = 'asPercent(#A, #B)';
|
||||
ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([]));
|
||||
ctx.scope.init();
|
||||
ctx.scope.$digest();
|
||||
ctx.scope.$parent = { get_data: sinon.spy() };
|
||||
});
|
||||
|
||||
it('should not add select metric segment', function() {
|
||||
expect(ctx.scope.segments.length).to.be(0);
|
||||
});
|
||||
|
||||
it('should add both series refs as params', function() {
|
||||
expect(ctx.scope.functions[0].params.length).to.be(2);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('when initializing a target with single param func using variable', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.target.target = 'movingAverage(prod.count, $var)';
|
||||
ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([]));
|
||||
ctx.scope.init();
|
||||
ctx.scope.$digest();
|
||||
ctx.scope.$parent = { get_data: sinon.spy() };
|
||||
});
|
||||
|
||||
it('should add 2 segments', function() {
|
||||
expect(ctx.scope.segments.length).to.be(2);
|
||||
});
|
||||
|
||||
it('should add function param', function() {
|
||||
expect(ctx.scope.functions[0].params.length).to.be(1);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('when initalizing target without metric expression and function with series-ref', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.target.target = 'asPercent(metric.node.count, #A)';
|
||||
ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([]));
|
||||
ctx.scope.init();
|
||||
ctx.scope.$digest();
|
||||
ctx.scope.$parent = { get_data: sinon.spy() };
|
||||
});
|
||||
|
||||
it('should add segments', function() {
|
||||
expect(ctx.scope.segments.length).to.be(3);
|
||||
});
|
||||
|
||||
it('should have correct func params', function() {
|
||||
expect(ctx.scope.functions[0].params.length).to.be(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when getting altSegments and metricFindQuery retuns empty array', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.target.target = 'test.count';
|
||||
ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([]));
|
||||
ctx.scope.init();
|
||||
ctx.scope.getAltSegments(1).then(function(results) {
|
||||
ctx.altSegments = results;
|
||||
});
|
||||
ctx.scope.$digest();
|
||||
ctx.scope.$parent = { get_data: sinon.spy() };
|
||||
});
|
||||
|
||||
it('should have no segments', function() {
|
||||
expect(ctx.altSegments.length).to.be(0);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('targetChanged', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([{expandable: false}]));
|
||||
ctx.scope.init();
|
||||
ctx.scope.$digest();
|
||||
|
||||
ctx.scope.$parent = { get_data: sinon.spy() };
|
||||
ctx.scope.target.target = '';
|
||||
ctx.scope.targetChanged();
|
||||
});
|
||||
|
||||
it('should rebuld target after expression model', function() {
|
||||
expect(ctx.scope.target.target).to.be('aliasByNode(scaleToSeconds(test.prod.*, 1), 2)');
|
||||
});
|
||||
|
||||
it('should call get_data', function() {
|
||||
expect(ctx.scope.$parent.get_data.called).to.be(true);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
});
|
||||
});
|
||||
@@ -1,187 +0,0 @@
|
||||
define([
|
||||
'app/plugins/datasource/influxdb/queryBuilder'
|
||||
], function(InfluxQueryBuilder) {
|
||||
'use strict';
|
||||
|
||||
describe('InfluxQueryBuilder', function() {
|
||||
|
||||
describe('series with mesurement only', function() {
|
||||
it('should generate correct query', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
measurement: 'cpu',
|
||||
groupBy: [{type: 'time', interval: 'auto'}]
|
||||
});
|
||||
|
||||
var query = builder.build();
|
||||
|
||||
expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE $timeFilter GROUP BY time($interval)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('series with math expr and as expr', function() {
|
||||
it('should generate correct query', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
measurement: 'cpu',
|
||||
fields: [{name: 'test', func: 'max', mathExpr: '*2', asExpr: 'new_name'}],
|
||||
groupBy: [{type: 'time', interval: 'auto'}]
|
||||
});
|
||||
|
||||
var query = builder.build();
|
||||
|
||||
expect(query).to.be('SELECT max("test")*2 AS "new_name" FROM "cpu" WHERE $timeFilter GROUP BY time($interval)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('series with single tag only', function() {
|
||||
it('should generate correct query', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
measurement: 'cpu',
|
||||
groupBy: [{type: 'time', interval: 'auto'}],
|
||||
tags: [{key: 'hostname', value: 'server1'}]
|
||||
});
|
||||
|
||||
var query = builder.build();
|
||||
|
||||
expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE "hostname" = \'server1\' AND $timeFilter'
|
||||
+ ' GROUP BY time($interval)');
|
||||
});
|
||||
|
||||
it('should switch regex operator with tag value is regex', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
measurement: 'cpu',
|
||||
groupBy: [{type: 'time', interval: 'auto'}],
|
||||
tags: [{key: 'app', value: '/e.*/'}]
|
||||
});
|
||||
|
||||
var query = builder.build();
|
||||
expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE "app" =~ /e.*/ AND $timeFilter GROUP BY time($interval)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('series with multiple fields', function() {
|
||||
it('should generate correct query', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
measurement: 'cpu',
|
||||
tags: [],
|
||||
groupBy: [{type: 'time', interval: 'auto'}],
|
||||
fields: [{ name: 'tx_in', func: 'sum' }, { name: 'tx_out', func: 'mean' }]
|
||||
});
|
||||
|
||||
var query = builder.build();
|
||||
expect(query).to.be('SELECT sum("tx_in") AS "tx_in", mean("tx_out") AS "tx_out" ' +
|
||||
'FROM "cpu" WHERE $timeFilter GROUP BY time($interval)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('series with multiple tags only', function() {
|
||||
it('should generate correct query', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
measurement: 'cpu',
|
||||
groupBy: [{type: 'time', interval: 'auto'}],
|
||||
tags: [{key: 'hostname', value: 'server1'}, {key: 'app', value: 'email', condition: "AND"}]
|
||||
});
|
||||
|
||||
var query = builder.build();
|
||||
expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE "hostname" = \'server1\' AND "app" = \'email\' AND ' +
|
||||
'$timeFilter GROUP BY time($interval)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('series with tags OR condition', function() {
|
||||
it('should generate correct query', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
measurement: 'cpu',
|
||||
groupBy: [{type: 'time', interval: 'auto'}],
|
||||
tags: [{key: 'hostname', value: 'server1'}, {key: 'hostname', value: 'server2', condition: "OR"}]
|
||||
});
|
||||
|
||||
var query = builder.build();
|
||||
expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE "hostname" = \'server1\' OR "hostname" = \'server2\' AND ' +
|
||||
'$timeFilter GROUP BY time($interval)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('series with groupByTag', function() {
|
||||
it('should generate correct query', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
measurement: 'cpu',
|
||||
tags: [],
|
||||
groupBy: [{type: 'time', interval: 'auto'}, {type: 'tag', key: 'host'}],
|
||||
});
|
||||
|
||||
var query = builder.build();
|
||||
expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE $timeFilter ' +
|
||||
'GROUP BY time($interval), "host"');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when building explore queries', function() {
|
||||
|
||||
it('should only have measurement condition in tag keys query given query with measurement', function() {
|
||||
var builder = new InfluxQueryBuilder({ measurement: 'cpu', tags: [] });
|
||||
var query = builder.buildExploreQuery('TAG_KEYS');
|
||||
expect(query).to.be('SHOW TAG KEYS FROM "cpu"');
|
||||
});
|
||||
|
||||
it('should handle regex measurement in tag keys query', function() {
|
||||
var builder = new InfluxQueryBuilder({
|
||||
measurement: '/.*/',
|
||||
tags: []
|
||||
});
|
||||
var query = builder.buildExploreQuery('TAG_KEYS');
|
||||
expect(query).to.be('SHOW TAG KEYS FROM /.*/');
|
||||
});
|
||||
|
||||
it('should have no conditions in tags keys query given query with no measurement or tag', function() {
|
||||
var builder = new InfluxQueryBuilder({ measurement: '', tags: [] });
|
||||
var query = builder.buildExploreQuery('TAG_KEYS');
|
||||
expect(query).to.be('SHOW TAG KEYS');
|
||||
});
|
||||
|
||||
it('should have where condition in tag keys query with tags', function() {
|
||||
var builder = new InfluxQueryBuilder({ measurement: '', tags: [{key: 'host', value: 'se1'}] });
|
||||
var query = builder.buildExploreQuery('TAG_KEYS');
|
||||
expect(query).to.be("SHOW TAG KEYS WHERE \"host\" = 'se1'");
|
||||
});
|
||||
|
||||
it('should have no conditions in measurement query for query with no tags', function() {
|
||||
var builder = new InfluxQueryBuilder({ measurement: '', tags: [] });
|
||||
var query = builder.buildExploreQuery('MEASUREMENTS');
|
||||
expect(query).to.be('SHOW MEASUREMENTS');
|
||||
});
|
||||
|
||||
it('should have where condition in measurement query for query with tags', function() {
|
||||
var builder = new InfluxQueryBuilder({measurement: '', tags: [{key: 'app', value: 'email'}]});
|
||||
var query = builder.buildExploreQuery('MEASUREMENTS');
|
||||
expect(query).to.be("SHOW MEASUREMENTS WHERE \"app\" = 'email'");
|
||||
});
|
||||
|
||||
it('should have where tag name IN filter in tag values query for query with one tag', function() {
|
||||
var builder = new InfluxQueryBuilder({measurement: '', tags: [{key: 'app', value: 'asdsadsad'}]});
|
||||
var query = builder.buildExploreQuery('TAG_VALUES', 'app');
|
||||
expect(query).to.be('SHOW TAG VALUES WITH KEY = "app"');
|
||||
});
|
||||
|
||||
it('should have measurement tag condition and tag name IN filter in tag values query', function() {
|
||||
var builder = new InfluxQueryBuilder({measurement: 'cpu', tags: [{key: 'app', value: 'email'}, {key: 'host', value: 'server1'}]});
|
||||
var query = builder.buildExploreQuery('TAG_VALUES', 'app');
|
||||
expect(query).to.be('SHOW TAG VALUES FROM "cpu" WITH KEY = "app" WHERE "host" = \'server1\'');
|
||||
});
|
||||
|
||||
it('should switch to regex operator in tag condition', function() {
|
||||
var builder = new InfluxQueryBuilder({measurement: 'cpu', tags: [{key: 'host', value: '/server.*/'}]});
|
||||
var query = builder.buildExploreQuery('TAG_VALUES', 'app');
|
||||
expect(query).to.be('SHOW TAG VALUES FROM "cpu" WITH KEY = "app" WHERE "host" =~ /server.*/');
|
||||
});
|
||||
|
||||
it('should build show field query', function() {
|
||||
var builder = new InfluxQueryBuilder({measurement: 'cpu', tags: [{key: 'app', value: 'email'}]});
|
||||
var query = builder.buildExploreQuery('FIELDS');
|
||||
expect(query).to.be('SHOW FIELD KEYS FROM "cpu"');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
@@ -1,180 +0,0 @@
|
||||
define([
|
||||
'app/plugins/datasource/influxdb/influxSeries'
|
||||
], function(InfluxSeries) {
|
||||
'use strict';
|
||||
|
||||
describe('when generating timeseries from influxdb response', function() {
|
||||
|
||||
describe('given multiple fields for series', function() {
|
||||
var options = { series: [
|
||||
{
|
||||
name: 'cpu',
|
||||
tags: {app: 'test', server: 'server1'},
|
||||
columns: ['time', 'mean', 'max', 'min'],
|
||||
values: [[1431946625000, 10, 11, 9], [1431946626000, 20, 21, 19]]
|
||||
}
|
||||
]};
|
||||
describe('and no alias', function() {
|
||||
it('should generate multiple datapoints for each column', function() {
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
|
||||
expect(result.length).to.be(3);
|
||||
expect(result[0].target).to.be('cpu.mean {app: test, server: server1}');
|
||||
expect(result[0].datapoints[0][0]).to.be(10);
|
||||
expect(result[0].datapoints[0][1]).to.be(1431946625000);
|
||||
expect(result[0].datapoints[1][0]).to.be(20);
|
||||
expect(result[0].datapoints[1][1]).to.be(1431946626000);
|
||||
|
||||
expect(result[1].target).to.be('cpu.max {app: test, server: server1}');
|
||||
expect(result[1].datapoints[0][0]).to.be(11);
|
||||
expect(result[1].datapoints[0][1]).to.be(1431946625000);
|
||||
expect(result[1].datapoints[1][0]).to.be(21);
|
||||
expect(result[1].datapoints[1][1]).to.be(1431946626000);
|
||||
|
||||
expect(result[2].target).to.be('cpu.min {app: test, server: server1}');
|
||||
expect(result[2].datapoints[0][0]).to.be(9);
|
||||
expect(result[2].datapoints[0][1]).to.be(1431946625000);
|
||||
expect(result[2].datapoints[1][0]).to.be(19);
|
||||
expect(result[2].datapoints[1][1]).to.be(1431946626000);
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
describe('and simple alias', function() {
|
||||
it('should use alias', function() {
|
||||
options.alias = 'new series';
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).to.be('new series');
|
||||
expect(result[1].target).to.be('new series');
|
||||
expect(result[2].target).to.be('new series');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('and alias patterns', function() {
|
||||
it('should replace patterns', function() {
|
||||
options.alias = 'alias: $m -> $tag_server ([[measurement]])';
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).to.be('alias: cpu -> server1 (cpu)');
|
||||
expect(result[1].target).to.be('alias: cpu -> server1 (cpu)');
|
||||
expect(result[2].target).to.be('alias: cpu -> server1 (cpu)');
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
describe('given measurement with default fieldname', function() {
|
||||
var options = { series: [
|
||||
{
|
||||
name: 'cpu',
|
||||
tags: {app: 'test', server: 'server1'},
|
||||
columns: ['time', 'value'],
|
||||
values: [["2015-05-18T10:57:05Z", 10], ["2015-05-18T10:57:06Z", 12]]
|
||||
},
|
||||
{
|
||||
name: 'cpu',
|
||||
tags: {app: 'test2', server: 'server2'},
|
||||
columns: ['time', 'value'],
|
||||
values: [["2015-05-18T10:57:05Z", 15], ["2015-05-18T10:57:06Z", 16]]
|
||||
}
|
||||
]};
|
||||
|
||||
describe('and no alias', function() {
|
||||
|
||||
it('should generate label with no field', function() {
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).to.be('cpu {app: test, server: server1}');
|
||||
expect(result[1].target).to.be('cpu {app: test2, server: server2}');
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
describe('given two series', function() {
|
||||
var options = { series: [
|
||||
{
|
||||
name: 'cpu',
|
||||
tags: {app: 'test', server: 'server1'},
|
||||
columns: ['time', 'mean'],
|
||||
values: [[1431946625000, 10], [1431946626000, 12]]
|
||||
},
|
||||
{
|
||||
name: 'cpu',
|
||||
tags: {app: 'test2', server: 'server2'},
|
||||
columns: ['time', 'mean'],
|
||||
values: [[1431946625000, 15], [1431946626000, 16]]
|
||||
}
|
||||
]};
|
||||
|
||||
describe('and no alias', function() {
|
||||
|
||||
it('should generate two time series', function() {
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
|
||||
expect(result.length).to.be(2);
|
||||
expect(result[0].target).to.be('cpu.mean {app: test, server: server1}');
|
||||
expect(result[0].datapoints[0][0]).to.be(10);
|
||||
expect(result[0].datapoints[0][1]).to.be(1431946625000);
|
||||
expect(result[0].datapoints[1][0]).to.be(12);
|
||||
expect(result[0].datapoints[1][1]).to.be(1431946626000);
|
||||
|
||||
expect(result[1].target).to.be('cpu.mean {app: test2, server: server2}');
|
||||
expect(result[1].datapoints[0][0]).to.be(15);
|
||||
expect(result[1].datapoints[0][1]).to.be(1431946625000);
|
||||
expect(result[1].datapoints[1][0]).to.be(16);
|
||||
expect(result[1].datapoints[1][1]).to.be(1431946626000);
|
||||
});
|
||||
});
|
||||
|
||||
describe('and simple alias', function() {
|
||||
it('should use alias', function() {
|
||||
options.alias = 'new series';
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).to.be('new series');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('and alias patterns', function() {
|
||||
it('should replace patterns', function() {
|
||||
options.alias = 'alias: $m -> $tag_server ([[measurement]])';
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).to.be('alias: cpu -> server1 (cpu)');
|
||||
expect(result[1].target).to.be('alias: cpu -> server2 (cpu)');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('given measurement with dots', function() {
|
||||
var options = { series: [
|
||||
{
|
||||
name: 'app.prod.server1.count',
|
||||
tags: {},
|
||||
columns: ['time', 'mean'],
|
||||
values: [[1431946625000, 10], [1431946626000, 12]]
|
||||
}
|
||||
]};
|
||||
|
||||
it('should replace patterns', function() {
|
||||
options.alias = 'alias: $1 -> [[3]]';
|
||||
var series = new InfluxSeries(options);
|
||||
var result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).to.be('alias: prod -> count');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
@@ -1,188 +0,0 @@
|
||||
define([
|
||||
'./helpers',
|
||||
'app/plugins/datasource/influxdb/queryCtrl',
|
||||
'app/services/uiSegmentSrv'
|
||||
], function(helpers) {
|
||||
'use strict';
|
||||
|
||||
describe('InfluxDBQueryCtrl', function() {
|
||||
var ctx = new helpers.ControllerTestContext();
|
||||
|
||||
beforeEach(module('grafana.controllers'));
|
||||
beforeEach(module('grafana.services'));
|
||||
beforeEach(ctx.providePhase());
|
||||
beforeEach(ctx.createControllerPhase('InfluxQueryCtrl'));
|
||||
|
||||
beforeEach(function() {
|
||||
ctx.scope.target = {};
|
||||
ctx.scope.$parent = { get_data: sinon.spy() };
|
||||
|
||||
ctx.scope.datasource = ctx.datasource;
|
||||
ctx.scope.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([]));
|
||||
});
|
||||
|
||||
describe('init', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
});
|
||||
|
||||
it('should init tagSegments', function() {
|
||||
expect(ctx.scope.tagSegments.length).to.be(1);
|
||||
});
|
||||
|
||||
it('should init measurementSegment', function() {
|
||||
expect(ctx.scope.measurementSegment.value).to.be('select measurement');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when first tag segment is updated', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button'}, 0);
|
||||
});
|
||||
|
||||
it('should update tag key', function() {
|
||||
expect(ctx.scope.target.tags[0].key).to.be('asd');
|
||||
expect(ctx.scope.tagSegments[0].type).to.be('key');
|
||||
});
|
||||
|
||||
it('should add tagSegments', function() {
|
||||
expect(ctx.scope.tagSegments.length).to.be(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when last tag value segment is updated', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button'}, 0);
|
||||
ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
|
||||
});
|
||||
|
||||
it('should update tag value', function() {
|
||||
expect(ctx.scope.target.tags[0].value).to.be('server1');
|
||||
});
|
||||
|
||||
it('should set tag operator', function() {
|
||||
expect(ctx.scope.target.tags[0].operator).to.be('=');
|
||||
});
|
||||
|
||||
it('should add plus button for another filter', function() {
|
||||
expect(ctx.scope.tagSegments[3].fake).to.be(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when last tag value segment is updated to regex', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button'}, 0);
|
||||
ctx.scope.tagSegmentUpdated({value: '/server.*/', type: 'value'}, 2);
|
||||
});
|
||||
|
||||
it('should update operator', function() {
|
||||
expect(ctx.scope.tagSegments[1].value).to.be('=~');
|
||||
expect(ctx.scope.target.tags[0].operator).to.be('=~');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when second tag key is added', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
|
||||
ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
|
||||
ctx.scope.tagSegmentUpdated({value: 'key2', type: 'plus-button'}, 3);
|
||||
});
|
||||
|
||||
it('should update tag key', function() {
|
||||
expect(ctx.scope.target.tags[1].key).to.be('key2');
|
||||
});
|
||||
|
||||
it('should add AND segment', function() {
|
||||
expect(ctx.scope.tagSegments[3].value).to.be('AND');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when condition is changed', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
|
||||
ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
|
||||
ctx.scope.tagSegmentUpdated({value: 'key2', type: 'plus-button'}, 3);
|
||||
ctx.scope.tagSegmentUpdated({value: 'OR', type: 'condition'}, 3);
|
||||
});
|
||||
|
||||
it('should update tag condition', function() {
|
||||
expect(ctx.scope.target.tags[1].condition).to.be('OR');
|
||||
});
|
||||
|
||||
it('should update AND segment', function() {
|
||||
expect(ctx.scope.tagSegments[3].value).to.be('OR');
|
||||
expect(ctx.scope.tagSegments.length).to.be(7);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when deleting first tag filter after value is selected', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
|
||||
ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
|
||||
ctx.scope.tagSegmentUpdated(ctx.scope.removeTagFilterSegment, 0);
|
||||
});
|
||||
|
||||
it('should remove tags', function() {
|
||||
expect(ctx.scope.target.tags.length).to.be(0);
|
||||
});
|
||||
|
||||
it('should remove all segment after 2 and replace with plus button', function() {
|
||||
expect(ctx.scope.tagSegments.length).to.be(1);
|
||||
expect(ctx.scope.tagSegments[0].type).to.be('plus-button');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when deleting second tag value before second tag value is complete', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
|
||||
ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
|
||||
ctx.scope.tagSegmentUpdated({value: 'key2', type: 'plus-button'}, 3);
|
||||
ctx.scope.tagSegmentUpdated(ctx.scope.removeTagFilterSegment, 4);
|
||||
});
|
||||
|
||||
it('should remove all segment after 2 and replace with plus button', function() {
|
||||
expect(ctx.scope.tagSegments.length).to.be(4);
|
||||
expect(ctx.scope.tagSegments[3].type).to.be('plus-button');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when deleting second tag value before second tag value is complete', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
|
||||
ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
|
||||
ctx.scope.tagSegmentUpdated({value: 'key2', type: 'plus-button'}, 3);
|
||||
ctx.scope.tagSegmentUpdated(ctx.scope.removeTagFilterSegment, 4);
|
||||
});
|
||||
|
||||
it('should remove all segment after 2 and replace with plus button', function() {
|
||||
expect(ctx.scope.tagSegments.length).to.be(4);
|
||||
expect(ctx.scope.tagSegments[3].type).to.be('plus-button');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when deleting second tag value after second tag filter is complete', function() {
|
||||
beforeEach(function() {
|
||||
ctx.scope.init();
|
||||
ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
|
||||
ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
|
||||
ctx.scope.tagSegmentUpdated({value: 'key2', type: 'plus-button'}, 3);
|
||||
ctx.scope.tagSegmentUpdated({value: 'value', type: 'value'}, 6);
|
||||
ctx.scope.tagSegmentUpdated(ctx.scope.removeTagFilterSegment, 4);
|
||||
});
|
||||
|
||||
it('should remove all segment after 2 and replace with plus button', function() {
|
||||
expect(ctx.scope.tagSegments.length).to.be(4);
|
||||
expect(ctx.scope.tagSegments[3].type).to.be('plus-button');
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
@@ -113,7 +113,7 @@ require([
|
||||
var specs = [];
|
||||
|
||||
for (var file in window.__karma__.files) {
|
||||
if (/base\/test\/specs.*/.test(file)) {
|
||||
if (/specs.*/.test(file)) {
|
||||
file = file2moduleName(file);
|
||||
specs.push(file);
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ module.exports = function(grunt) {
|
||||
'copy:public_to_gen',
|
||||
'typescript:build',
|
||||
'karma:test',
|
||||
'phantomjs',
|
||||
'css',
|
||||
'htmlmin:build',
|
||||
'ngtemplates',
|
||||
|
||||
@@ -9,6 +9,7 @@ module.exports = function(grunt) {
|
||||
'tslint',
|
||||
'clean:gen',
|
||||
'copy:public_to_gen',
|
||||
'phantomjs',
|
||||
'css',
|
||||
'typescript:build'
|
||||
]);
|
||||
|
||||
36
tasks/options/phantomjs.js
Normal file
36
tasks/options/phantomjs.js
Normal file
@@ -0,0 +1,36 @@
|
||||
module.exports = function(config,grunt) {
|
||||
'use strict';
|
||||
|
||||
grunt.registerTask('phantomjs', 'Copy phantomjs binary from node', function() {
|
||||
|
||||
var dest = './vendor/phantomjs/phantomjs';
|
||||
var confDir = './node_modules/karma-phantomjs-launcher/node_modules/phantomjs/lib/'
|
||||
|
||||
if (!grunt.file.exists(dest)){
|
||||
|
||||
var m=grunt.file.read(confDir+"location.js")
|
||||
var src=/= \"([^\"]*)\"/.exec(m)[1];
|
||||
|
||||
if (!grunt.file.isPathAbsolute(src)) {
|
||||
src = confDir+src;
|
||||
}
|
||||
|
||||
var exec = require('child_process').execFileSync;
|
||||
|
||||
try {
|
||||
grunt.config('copy.phantom_bin', {
|
||||
src: src,
|
||||
dest: dest,
|
||||
options: { mode: true},
|
||||
});
|
||||
grunt.task.run('copy:phantom_bin');
|
||||
} catch (err) {
|
||||
grunt.verbose.writeln(err);
|
||||
grunt.fail.warn('No working Phantomjs binary available')
|
||||
}
|
||||
|
||||
} else {
|
||||
grunt.log.writeln('Phantomjs already imported from node');
|
||||
}
|
||||
});
|
||||
};
|
||||
BIN
vendor/phantomjs/phantomjs
vendored
BIN
vendor/phantomjs/phantomjs
vendored
Binary file not shown.
Reference in New Issue
Block a user