refactor: moved elasticsearch specs to plugin folder and to typescript

This commit is contained in:
Torkel Ödegaard 2015-09-28 16:28:19 +02:00
parent 7d2646f60a
commit 8a39b32b5c
20 changed files with 702 additions and 709 deletions

View File

@ -1,7 +1,7 @@
define([
'angular',
'lodash',
'./queryDef',
'./query_def',
],
function (angular, _, queryDef) {
'use strict';

View File

@ -3,10 +3,10 @@ define([
'lodash',
'moment',
'kbn',
'./queryBuilder',
'./indexPattern',
'./elasticResponse',
'./queryCtrl',
'./query_builder',
'./index_pattern',
'./elastic_response',
'./query_ctrl',
'./directives'
],
function (angular, _, moment, kbn, ElasticQueryBuilder, IndexPattern, ElasticResponse) {

View File

@ -1,7 +1,7 @@
define([
'angular',
'./bucketAgg',
'./metricAgg',
'./bucket_agg',
'./metric_agg',
],
function (angular) {
'use strict';

View File

@ -1,6 +1,6 @@
define([
"lodash",
"./queryDef"
"./query_def"
],
function (_, queryDef) {
'use strict';

View File

@ -1,7 +1,7 @@
define([
'angular',
'lodash',
'./queryDef'
'./query_def'
],
function (angular, _, queryDef) {
'use strict';

View File

@ -0,0 +1,74 @@
///<amd-dependency path="../datasource" />
///<amd-dependency path="test/specs/helpers" name="helpers" />
import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common';
import moment = require('moment');
import angular = require('angular');
declare var helpers: any;
describe('ElasticDatasource', function() {
var ctx = new helpers.ServiceTestContext();
beforeEach(angularMocks.module('grafana.services'));
beforeEach(ctx.providePhase(['templateSrv', 'backendSrv']));
beforeEach(ctx.createService('ElasticDatasource'));
beforeEach(function() {
ctx.ds = new ctx.service({jsonData: {}});
});
describe('When testing datasource with index pattern', function() {
beforeEach(function() {
ctx.ds = new ctx.service({
url: 'http://es.com',
index: '[asd-]YYYY.MM.DD',
jsonData: { interval: 'Daily' }
});
});
it('should translate index pattern to current day', function() {
var requestOptions;
ctx.backendSrv.datasourceRequest = function(options) {
requestOptions = options;
return ctx.$q.when({});
};
ctx.ds.testDatasource();
ctx.$rootScope.$apply();
var today = moment().format("YYYY.MM.DD");
expect(requestOptions.url).to.be("http://es.com/asd-" + today + '/_stats');
});
});
describe('When issueing metric query with interval pattern', function() {
beforeEach(function() {
ctx.ds = new ctx.service({
url: 'http://es.com',
index: '[asd-]YYYY.MM.DD',
jsonData: { interval: 'Daily' }
});
});
it('should translate index pattern to current day', function() {
var requestOptions;
ctx.backendSrv.datasourceRequest = function(options) {
requestOptions = options;
return ctx.$q.when({data: {responses: []}});
};
ctx.ds.query({
range: {
from: moment([2015, 4, 30, 10]),
to: moment([2015, 5, 1, 10])
},
targets: [{ bucketAggs: [], metrics: [] }]
});
ctx.$rootScope.$apply();
var parts = requestOptions.data.split('\n');
var header = angular.fromJson(parts[0]);
expect(header.index).to.eql(['asd-2015.05.30', 'asd-2015.05.31', 'asd-2015.06.01']);
});
});
});

View File

@ -0,0 +1,414 @@
///<amd-dependency path="../elastic_response" name="ElasticResponse"/>
import {describe, beforeEach, it, sinon, expect} from 'test/lib/common';
declare var ElasticResponse: any;
describe('ElasticResponse', function() {
var targets;
var response;
var result;
describe('simple query and count', function() {
beforeEach(function() {
targets = [{
refId: 'A',
metrics: [{type: 'count', id: '1'}],
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '2'}],
}];
response = {
responses: [{
aggregations: {
"2": {
buckets: [
{
doc_count: 10,
key: 1000
},
{
doc_count: 15,
key: 2000
}
]
}
}
}]
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 1 series', function() {
expect(result.data.length).to.be(1);
expect(result.data[0].target).to.be('Count');
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].datapoints[0][0]).to.be(10);
expect(result.data[0].datapoints[0][1]).to.be(1000);
});
});
describe('simple query count & avg aggregation', function() {
var result;
beforeEach(function() {
targets = [{
refId: 'A',
metrics: [{type: 'count', id: '1'}, {type: 'avg', field: 'value', id: '2'}],
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '3'}],
}];
response = {
responses: [{
aggregations: {
"3": {
buckets: [
{
"2": {value: 88},
doc_count: 10,
key: 1000
},
{
"2": {value: 99},
doc_count: 15,
key: 2000
}
]
}
}
}]
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', function() {
expect(result.data.length).to.be(2);
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].datapoints[0][0]).to.be(10);
expect(result.data[0].datapoints[0][1]).to.be(1000);
expect(result.data[1].target).to.be("Average value");
expect(result.data[1].datapoints[0][0]).to.be(88);
expect(result.data[1].datapoints[1][0]).to.be(99);
});
});
describe('single group by query one metric', function() {
var result;
beforeEach(function() {
targets = [{
refId: 'A',
metrics: [{type: 'count', id: '1'}],
bucketAggs: [{type: 'terms', field: 'host', id: '2'}, {type: 'date_histogram', field: '@timestamp', id: '3'}],
}];
response = {
responses: [{
aggregations: {
"2": {
buckets: [
{
"3": {
buckets: [
{doc_count: 1, key: 1000},
{doc_count: 3, key: 2000}
]
},
doc_count: 4,
key: 'server1',
},
{
"3": {
buckets: [
{doc_count: 2, key: 1000},
{doc_count: 8, key: 2000}
]
},
doc_count: 10,
key: 'server2',
},
]
}
}
}]
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', function() {
expect(result.data.length).to.be(2);
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].target).to.be('server1');
expect(result.data[1].target).to.be('server2');
});
});
describe('single group by query two metrics', function() {
var result;
beforeEach(function() {
targets = [{
refId: 'A',
metrics: [{type: 'count', id: '1'}, {type: 'avg', field: '@value', id: '4'}],
bucketAggs: [{type: 'terms', field: 'host', id: '2'}, {type: 'date_histogram', field: '@timestamp', id: '3'}],
}];
response = {
responses: [{
aggregations: {
"2": {
buckets: [
{
"3": {
buckets: [
{ "4": {value: 10}, doc_count: 1, key: 1000},
{ "4": {value: 12}, doc_count: 3, key: 2000}
]
},
doc_count: 4,
key: 'server1',
},
{
"3": {
buckets: [
{ "4": {value: 20}, doc_count: 1, key: 1000},
{ "4": {value: 32}, doc_count: 3, key: 2000}
]
},
doc_count: 10,
key: 'server2',
},
]
}
}
}]
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', function() {
expect(result.data.length).to.be(4);
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].target).to.be('server1 Count');
expect(result.data[1].target).to.be('server1 Average @value');
expect(result.data[2].target).to.be('server2 Count');
expect(result.data[3].target).to.be('server2 Average @value');
});
});
describe('with percentiles ', function() {
var result;
beforeEach(function() {
targets = [{
refId: 'A',
metrics: [{type: 'percentiles', settings: {percents: [75, 90]}, id: '1'}],
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '3'}],
}];
response = {
responses: [{
aggregations: {
"3": {
buckets: [
{
"1": {values: {"75": 3.3, "90": 5.5}},
doc_count: 10,
key: 1000
},
{
"1": {values: {"75": 2.3, "90": 4.5}},
doc_count: 15,
key: 2000
}
]
}
}
}]
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', function() {
expect(result.data.length).to.be(2);
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].target).to.be('p75');
expect(result.data[1].target).to.be('p90');
expect(result.data[0].datapoints[0][0]).to.be(3.3);
expect(result.data[0].datapoints[0][1]).to.be(1000);
expect(result.data[1].datapoints[1][0]).to.be(4.5);
});
});
describe('with extended_stats', function() {
var result;
beforeEach(function() {
targets = [{
refId: 'A',
metrics: [{type: 'extended_stats', meta: {max: true, std_deviation_bounds_upper: true}, id: '1'}],
bucketAggs: [{type: 'terms', field: 'host', id: '3'}, {type: 'date_histogram', id: '4'}],
}];
response = {
responses: [{
aggregations: {
"3": {
buckets: [
{
key: 'server1',
"4": {
buckets: [{
"1": {max: 10.2, min: 5.5, std_deviation_bounds: {upper: 3, lower: -2}},
doc_count: 10,
key: 1000
}]
}
},
{
key: 'server2',
"4": {
buckets: [{
"1": {max: 10.2, min: 5.5, std_deviation_bounds: {upper: 3, lower: -2}},
doc_count: 10,
key: 1000
}]
}
},
]
}
}
}]
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 4 series', function() {
expect(result.data.length).to.be(4);
expect(result.data[0].datapoints.length).to.be(1);
expect(result.data[0].target).to.be('server1 Max');
expect(result.data[1].target).to.be('server1 Std Dev Upper');
expect(result.data[0].datapoints[0][0]).to.be(10.2);
expect(result.data[1].datapoints[0][0]).to.be(3);
});
});
describe('single group by with alias pattern', function() {
var result;
beforeEach(function() {
targets = [{
refId: 'A',
metrics: [{type: 'count', id: '1'}],
alias: '{{term @host}} {{metric}} and!',
bucketAggs: [
{type: 'terms', field: '@host', id: '2'},
{type: 'date_histogram', field: '@timestamp', id: '3'}
],
}];
response = {
responses: [{
aggregations: {
"2": {
buckets: [
{
"3": {
buckets: [
{doc_count: 1, key: 1000},
{doc_count: 3, key: 2000}
]
},
doc_count: 4,
key: 'server1',
},
{
"3": {
buckets: [
{doc_count: 2, key: 1000},
{doc_count: 8, key: 2000}
]
},
doc_count: 10,
key: 'server2',
},
]
}
}
}]
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', function() {
expect(result.data.length).to.be(2);
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].target).to.be('server1 Count and!');
expect(result.data[1].target).to.be('server2 Count and!');
});
});
describe('with two filters agg', function() {
var result;
beforeEach(function() {
targets = [{
refId: 'A',
metrics: [{type: 'count', id: '1'}],
bucketAggs: [
{
id: '2',
type: 'filters',
settings: {
filters: [
{query: '@metric:cpu' },
{query: '@metric:logins.count' },
]
}
},
{type: 'date_histogram', field: '@timestamp', id: '3'}
],
}];
response = {
responses: [{
aggregations: {
"2": {
buckets: {
"@metric:cpu": {
"3": {
buckets: [
{doc_count: 1, key: 1000},
{doc_count: 3, key: 2000}
]
},
},
"@metric:logins.count": {
"3": {
buckets: [
{doc_count: 2, key: 1000},
{doc_count: 8, key: 2000}
]
},
},
}
}
}
}]
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', function() {
expect(result.data.length).to.be(2);
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].target).to.be('@metric:cpu');
expect(result.data[1].target).to.be('@metric:logins.count');
});
});
});

View File

@ -0,0 +1,51 @@
///<amd-dependency path="../index_pattern" name="IndexPattern"/>
///<amd-dependency path="test/specs/helpers" name="helpers" />
import {describe, beforeEach, it, sinon, expect} from 'test/lib/common';
import moment = require('moment');
declare var IndexPattern: any;
describe('IndexPattern', function() {
describe('when getting index for today', function() {
it('should return correct index name', function() {
var pattern = new IndexPattern('[asd-]YYYY.MM.DD', 'Daily');
var expected = 'asd-' + moment().format('YYYY.MM.DD');
expect(pattern.getIndexForToday()).to.be(expected);
});
});
describe('when getting index list for time range', function() {
describe('no interval', function() {
it('should return correct index', function() {
var pattern = new IndexPattern('my-metrics');
var from = new Date(2015, 4, 30, 1, 2, 3);
var to = new Date(2015, 5, 1, 12, 5 , 6);
expect(pattern.getIndexList(from, to)).to.eql('my-metrics');
});
});
describe('daily', function() {
it('should return correct index list', function() {
var pattern = new IndexPattern('[asd-]YYYY.MM.DD', 'Daily');
var from = new Date(1432940523000);
var to = new Date(1433153106000);
var expected = [
'asd-2015.05.29',
'asd-2015.05.30',
'asd-2015.05.31',
'asd-2015.06.01',
];
expect(pattern.getIndexList(from, to)).to.eql(expected);
});
});
});
});

View File

@ -0,0 +1,123 @@
///<amd-dependency path="../query_builder" name="ElasticQueryBuilder"/>
import {describe, beforeEach, it, sinon, expect} from 'test/lib/common';
declare var ElasticQueryBuilder: any;
describe('ElasticQueryBuilder', function() {
var builder;
beforeEach(function() {
builder = new ElasticQueryBuilder({timeField: '@timestamp'});
});
it('with defaults', function() {
var query = builder.build({
metrics: [{type: 'Count', id: '0'}],
timeField: '@timestamp',
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '1'}],
});
expect(query.query.filtered.filter.bool.must[0].range["@timestamp"].gte).to.be("$timeFrom");
expect(query.aggs["1"].date_histogram.extended_bounds.min).to.be("$timeFrom");
});
it('with raw query', function() {
var query = builder.build({
rawQuery: '{"query": "$lucene_query"}',
});
expect(query.query).to.be("$lucene_query");
});
it('with multiple bucket aggs', function() {
var query = builder.build({
metrics: [{type: 'count', id: '1'}],
timeField: '@timestamp',
bucketAggs: [
{type: 'terms', field: '@host', id: '2'},
{type: 'date_histogram', field: '@timestamp', id: '3'}
],
});
expect(query.aggs["2"].terms.field).to.be("@host");
expect(query.aggs["2"].aggs["3"].date_histogram.field).to.be("@timestamp");
});
it('with select field', function() {
var query = builder.build({
metrics: [{type: 'avg', field: '@value', id: '1'}],
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '2'}],
}, 100, 1000);
var aggs = query.aggs["2"].aggs;
expect(aggs["1"].avg.field).to.be("@value");
});
it('with term agg and order by metric agg', function() {
var query = builder.build({
metrics: [
{type: 'count', id: '1'},
{type: 'avg', field: '@value', id: '5'}
],
bucketAggs: [
{type: 'terms', field: '@host', settings: {size: 5, order: 'asc', orderBy: '5'}, id: '2' },
{type: 'date_histogram', field: '@timestamp', id: '3'}
],
}, 100, 1000);
var firstLevel = query.aggs["2"];
var secondLevel = firstLevel.aggs["3"];
expect(firstLevel.aggs["5"].avg.field).to.be("@value");
expect(secondLevel.aggs["5"].avg.field).to.be("@value");
});
it('with metric percentiles', function() {
var query = builder.build({
metrics: [
{
id: '1',
type: 'percentiles',
field: '@load_time',
settings: {
percents: [1,2,3,4]
}
}
],
bucketAggs: [
{type: 'date_histogram', field: '@timestamp', id: '3'}
],
}, 100, 1000);
var firstLevel = query.aggs["3"];
expect(firstLevel.aggs["1"].percentiles.field).to.be("@load_time");
expect(firstLevel.aggs["1"].percentiles.percents).to.eql([1,2,3,4]);
});
it('with filters aggs', function() {
var query = builder.build({
metrics: [{type: 'count', id: '1'}],
timeField: '@timestamp',
bucketAggs: [
{
id: '2',
type: 'filters',
settings: {
filters: [
{query: '@metric:cpu' },
{query: '@metric:logins.count' },
]
}
},
{type: 'date_histogram', field: '@timestamp', id: '4'}
],
});
expect(query.aggs["2"].filters.filters["@metric:cpu"].query.query_string.query).to.be("@metric:cpu");
expect(query.aggs["2"].filters.filters["@metric:logins.count"].query.query_string.query).to.be("@metric:logins.count");
expect(query.aggs["2"].aggs["4"].date_histogram.field).to.be("@timestamp");
});
});

View File

@ -0,0 +1,30 @@
///<amd-dependency path="../query_ctrl" />
///<amd-dependency path="app/services/uiSegmentSrv" />
///<amd-dependency path="test/specs/helpers" name="helpers" />
import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common';
declare var helpers: any;
describe('ElasticQueryCtrl', function() {
var ctx = new helpers.ControllerTestContext();
beforeEach(angularMocks.module('grafana.controllers'));
beforeEach(angularMocks.module('grafana.services'));
beforeEach(ctx.providePhase());
beforeEach(ctx.createControllerPhase('ElasticQueryCtrl'));
beforeEach(function() {
ctx.scope.target = {};
ctx.scope.$parent = { get_data: sinon.spy() };
ctx.scope.datasource = ctx.datasource;
ctx.scope.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([]));
});
describe('init', function() {
beforeEach(function() {
ctx.scope.init();
});
});
});

View File

@ -1,5 +1,5 @@
///<amd-dependency path="app/plugins/datasource/influxdb/query_builder" name="InfluxQueryBuilder"/>
//
import {describe, beforeEach, it, sinon, expect} from 'test/lib/common';
declare var InfluxQueryBuilder: any;

View File

@ -1,52 +0,0 @@
define([
'moment',
'app/plugins/datasource/elasticsearch/indexPattern'
], function(moment, IndexPattern) {
'use strict';
describe('IndexPattern', function() {
describe('when getting index for today', function() {
it('should return correct index name', function() {
var pattern = new IndexPattern('[asd-]YYYY.MM.DD', 'Daily');
var expected = 'asd-' + moment().format('YYYY.MM.DD');
expect(pattern.getIndexForToday()).to.be(expected);
});
});
describe('when getting index list for time range', function() {
describe('no interval', function() {
it('should return correct index', function() {
var pattern = new IndexPattern('my-metrics');
var from = new Date(2015, 4, 30, 1, 2, 3);
var to = new Date(2015, 5, 1, 12, 5 , 6);
expect(pattern.getIndexList(from, to)).to.eql('my-metrics');
});
});
describe('daily', function() {
it('should return correct index list', function() {
var pattern = new IndexPattern('[asd-]YYYY.MM.DD', 'Daily');
var from = new Date(1432940523000);
var to = new Date(1433153106000);
var expected = [
'asd-2015.05.29',
'asd-2015.05.30',
'asd-2015.05.31',
'asd-2015.06.01',
];
expect(pattern.getIndexList(from, to)).to.eql(expected);
});
});
});
});
});

View File

@ -1,124 +0,0 @@
define([
'app/plugins/datasource/elasticsearch/queryBuilder'
], function(ElasticQueryBuilder) {
'use strict';
describe('ElasticQueryBuilder', function() {
var builder;
beforeEach(function() {
builder = new ElasticQueryBuilder({timeField: '@timestamp'});
});
it('with defaults', function() {
var query = builder.build({
metrics: [{type: 'Count', id: '0'}],
timeField: '@timestamp',
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '1'}],
});
expect(query.query.filtered.filter.bool.must[0].range["@timestamp"].gte).to.be("$timeFrom");
expect(query.aggs["1"].date_histogram.extended_bounds.min).to.be("$timeFrom");
});
it('with raw query', function() {
var query = builder.build({
rawQuery: '{"query": "$lucene_query"}',
});
expect(query.query).to.be("$lucene_query");
});
it('with multiple bucket aggs', function() {
var query = builder.build({
metrics: [{type: 'count', id: '1'}],
timeField: '@timestamp',
bucketAggs: [
{type: 'terms', field: '@host', id: '2'},
{type: 'date_histogram', field: '@timestamp', id: '3'}
],
});
expect(query.aggs["2"].terms.field).to.be("@host");
expect(query.aggs["2"].aggs["3"].date_histogram.field).to.be("@timestamp");
});
it('with select field', function() {
var query = builder.build({
metrics: [{type: 'avg', field: '@value', id: '1'}],
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '2'}],
}, 100, 1000);
var aggs = query.aggs["2"].aggs;
expect(aggs["1"].avg.field).to.be("@value");
});
it('with term agg and order by metric agg', function() {
var query = builder.build({
metrics: [
{type: 'count', id: '1'},
{type: 'avg', field: '@value', id: '5'}
],
bucketAggs: [
{type: 'terms', field: '@host', settings: {size: 5, order: 'asc', orderBy: '5'}, id: '2' },
{type: 'date_histogram', field: '@timestamp', id: '3'}
],
}, 100, 1000);
var firstLevel = query.aggs["2"];
var secondLevel = firstLevel.aggs["3"];
expect(firstLevel.aggs["5"].avg.field).to.be("@value");
expect(secondLevel.aggs["5"].avg.field).to.be("@value");
});
it('with metric percentiles', function() {
var query = builder.build({
metrics: [
{
id: '1',
type: 'percentiles',
field: '@load_time',
settings: {
percents: [1,2,3,4]
}
}
],
bucketAggs: [
{type: 'date_histogram', field: '@timestamp', id: '3'}
],
}, 100, 1000);
var firstLevel = query.aggs["3"];
expect(firstLevel.aggs["1"].percentiles.field).to.be("@load_time");
expect(firstLevel.aggs["1"].percentiles.percents).to.eql([1,2,3,4]);
});
it('with filters aggs', function() {
var query = builder.build({
metrics: [{type: 'count', id: '1'}],
timeField: '@timestamp',
bucketAggs: [
{
id: '2',
type: 'filters',
settings: {
filters: [
{query: '@metric:cpu' },
{query: '@metric:logins.count' },
]
}
},
{type: 'date_histogram', field: '@timestamp', id: '4'}
],
});
expect(query.aggs["2"].filters.filters["@metric:cpu"].query.query_string.query).to.be("@metric:cpu");
expect(query.aggs["2"].filters.filters["@metric:logins.count"].query.query_string.query).to.be("@metric:logins.count");
expect(query.aggs["2"].aggs["4"].date_histogram.field).to.be("@timestamp");
});
});
});

View File

@ -1,32 +0,0 @@
define([
'./helpers',
'app/plugins/datasource/elasticsearch/queryCtrl',
'app/services/uiSegmentSrv'
], function(helpers) {
'use strict';
describe('ElasticQueryCtrl', function() {
var ctx = new helpers.ControllerTestContext();
beforeEach(module('grafana.controllers'));
beforeEach(module('grafana.services'));
beforeEach(ctx.providePhase());
beforeEach(ctx.createControllerPhase('ElasticQueryCtrl'));
beforeEach(function() {
ctx.scope.target = {};
ctx.scope.$parent = { get_data: sinon.spy() };
ctx.scope.datasource = ctx.datasource;
ctx.scope.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([]));
});
describe('init', function() {
beforeEach(function() {
ctx.scope.init();
});
});
});
});

View File

@ -1,414 +0,0 @@
define([
'app/plugins/datasource/elasticsearch/elasticResponse',
], function(ElasticResponse) {
'use strict';
describe('ElasticResponse', function() {
var targets;
var response;
var result;
describe('simple query and count', function() {
beforeEach(function() {
targets = [{
refId: 'A',
metrics: [{type: 'count', id: '1'}],
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '2'}],
}];
response = {
responses: [{
aggregations: {
"2": {
buckets: [
{
doc_count: 10,
key: 1000
},
{
doc_count: 15,
key: 2000
}
]
}
}
}]
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 1 series', function() {
expect(result.data.length).to.be(1);
expect(result.data[0].target).to.be('Count');
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].datapoints[0][0]).to.be(10);
expect(result.data[0].datapoints[0][1]).to.be(1000);
});
});
describe('simple query count & avg aggregation', function() {
var result;
beforeEach(function() {
targets = [{
refId: 'A',
metrics: [{type: 'count', id: '1'}, {type: 'avg', field: 'value', id: '2'}],
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '3'}],
}];
response = {
responses: [{
aggregations: {
"3": {
buckets: [
{
"2": {value: 88},
doc_count: 10,
key: 1000
},
{
"2": {value: 99},
doc_count: 15,
key: 2000
}
]
}
}
}]
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', function() {
expect(result.data.length).to.be(2);
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].datapoints[0][0]).to.be(10);
expect(result.data[0].datapoints[0][1]).to.be(1000);
expect(result.data[1].target).to.be("Average value");
expect(result.data[1].datapoints[0][0]).to.be(88);
expect(result.data[1].datapoints[1][0]).to.be(99);
});
});
describe('single group by query one metric', function() {
var result;
beforeEach(function() {
targets = [{
refId: 'A',
metrics: [{type: 'count', id: '1'}],
bucketAggs: [{type: 'terms', field: 'host', id: '2'}, {type: 'date_histogram', field: '@timestamp', id: '3'}],
}];
response = {
responses: [{
aggregations: {
"2": {
buckets: [
{
"3": {
buckets: [
{doc_count: 1, key: 1000},
{doc_count: 3, key: 2000}
]
},
doc_count: 4,
key: 'server1',
},
{
"3": {
buckets: [
{doc_count: 2, key: 1000},
{doc_count: 8, key: 2000}
]
},
doc_count: 10,
key: 'server2',
},
]
}
}
}]
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', function() {
expect(result.data.length).to.be(2);
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].target).to.be('server1');
expect(result.data[1].target).to.be('server2');
});
});
describe('single group by query two metrics', function() {
var result;
beforeEach(function() {
targets = [{
refId: 'A',
metrics: [{type: 'count', id: '1'}, {type: 'avg', field: '@value', id: '4'}],
bucketAggs: [{type: 'terms', field: 'host', id: '2'}, {type: 'date_histogram', field: '@timestamp', id: '3'}],
}];
response = {
responses: [{
aggregations: {
"2": {
buckets: [
{
"3": {
buckets: [
{ "4": {value: 10}, doc_count: 1, key: 1000},
{ "4": {value: 12}, doc_count: 3, key: 2000}
]
},
doc_count: 4,
key: 'server1',
},
{
"3": {
buckets: [
{ "4": {value: 20}, doc_count: 1, key: 1000},
{ "4": {value: 32}, doc_count: 3, key: 2000}
]
},
doc_count: 10,
key: 'server2',
},
]
}
}
}]
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', function() {
expect(result.data.length).to.be(4);
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].target).to.be('server1 Count');
expect(result.data[1].target).to.be('server1 Average @value');
expect(result.data[2].target).to.be('server2 Count');
expect(result.data[3].target).to.be('server2 Average @value');
});
});
describe('with percentiles ', function() {
var result;
beforeEach(function() {
targets = [{
refId: 'A',
metrics: [{type: 'percentiles', settings: {percents: [75, 90]}, id: '1'}],
bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '3'}],
}];
response = {
responses: [{
aggregations: {
"3": {
buckets: [
{
"1": {values: {"75": 3.3, "90": 5.5}},
doc_count: 10,
key: 1000
},
{
"1": {values: {"75": 2.3, "90": 4.5}},
doc_count: 15,
key: 2000
}
]
}
}
}]
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', function() {
expect(result.data.length).to.be(2);
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].target).to.be('p75');
expect(result.data[1].target).to.be('p90');
expect(result.data[0].datapoints[0][0]).to.be(3.3);
expect(result.data[0].datapoints[0][1]).to.be(1000);
expect(result.data[1].datapoints[1][0]).to.be(4.5);
});
});
describe('with extended_stats', function() {
var result;
beforeEach(function() {
targets = [{
refId: 'A',
metrics: [{type: 'extended_stats', meta: {max: true, std_deviation_bounds_upper: true}, id: '1'}],
bucketAggs: [{type: 'terms', field: 'host', id: '3'}, {type: 'date_histogram', id: '4'}],
}];
response = {
responses: [{
aggregations: {
"3": {
buckets: [
{
key: 'server1',
"4": {
buckets: [{
"1": {max: 10.2, min: 5.5, std_deviation_bounds: {upper: 3, lower: -2}},
doc_count: 10,
key: 1000
}]
}
},
{
key: 'server2',
"4": {
buckets: [{
"1": {max: 10.2, min: 5.5, std_deviation_bounds: {upper: 3, lower: -2}},
doc_count: 10,
key: 1000
}]
}
},
]
}
}
}]
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 4 series', function() {
expect(result.data.length).to.be(4);
expect(result.data[0].datapoints.length).to.be(1);
expect(result.data[0].target).to.be('server1 Max');
expect(result.data[1].target).to.be('server1 Std Dev Upper');
expect(result.data[0].datapoints[0][0]).to.be(10.2);
expect(result.data[1].datapoints[0][0]).to.be(3);
});
});
describe('single group by with alias pattern', function() {
var result;
beforeEach(function() {
targets = [{
refId: 'A',
metrics: [{type: 'count', id: '1'}],
alias: '{{term @host}} {{metric}} and!',
bucketAggs: [
{type: 'terms', field: '@host', id: '2'},
{type: 'date_histogram', field: '@timestamp', id: '3'}
],
}];
response = {
responses: [{
aggregations: {
"2": {
buckets: [
{
"3": {
buckets: [
{doc_count: 1, key: 1000},
{doc_count: 3, key: 2000}
]
},
doc_count: 4,
key: 'server1',
},
{
"3": {
buckets: [
{doc_count: 2, key: 1000},
{doc_count: 8, key: 2000}
]
},
doc_count: 10,
key: 'server2',
},
]
}
}
}]
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', function() {
expect(result.data.length).to.be(2);
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].target).to.be('server1 Count and!');
expect(result.data[1].target).to.be('server2 Count and!');
});
});
describe('with two filters agg', function() {
var result;
beforeEach(function() {
targets = [{
refId: 'A',
metrics: [{type: 'count', id: '1'}],
bucketAggs: [
{
id: '2',
type: 'filters',
settings: {
filters: [
{query: '@metric:cpu' },
{query: '@metric:logins.count' },
]
}
},
{type: 'date_histogram', field: '@timestamp', id: '3'}
],
}];
response = {
responses: [{
aggregations: {
"2": {
buckets: {
"@metric:cpu": {
"3": {
buckets: [
{doc_count: 1, key: 1000},
{doc_count: 3, key: 2000}
]
},
},
"@metric:logins.count": {
"3": {
buckets: [
{doc_count: 2, key: 1000},
{doc_count: 8, key: 2000}
]
},
},
}
}
}
}]
};
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', function() {
expect(result.data.length).to.be(2);
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].target).to.be('@metric:cpu');
expect(result.data[1].target).to.be('@metric:logins.count');
});
});
});
});

View File

@ -1,77 +0,0 @@
define([
'./helpers',
'moment',
'angular',
'app/plugins/datasource/elasticsearch/datasource',
], function(helpers, moment, angular) {
'use strict';
describe('ElasticDatasource', function() {
var ctx = new helpers.ServiceTestContext();
beforeEach(module('grafana.services'));
beforeEach(ctx.providePhase(['templateSrv', 'backendSrv']));
beforeEach(ctx.createService('ElasticDatasource'));
beforeEach(function() {
ctx.ds = new ctx.service({jsonData: {}});
});
describe('When testing datasource with index pattern', function() {
beforeEach(function() {
ctx.ds = new ctx.service({
url: 'http://es.com',
index: '[asd-]YYYY.MM.DD',
jsonData: { interval: 'Daily' }
});
});
it('should translate index pattern to current day', function() {
var requestOptions;
ctx.backendSrv.datasourceRequest = function(options) {
requestOptions = options;
return ctx.$q.when({});
};
ctx.ds.testDatasource();
ctx.$rootScope.$apply();
var today = moment().format("YYYY.MM.DD");
expect(requestOptions.url).to.be("http://es.com/asd-" + today + '/_stats');
});
});
describe('When issueing metric query with interval pattern', function() {
beforeEach(function() {
ctx.ds = new ctx.service({
url: 'http://es.com',
index: '[asd-]YYYY.MM.DD',
jsonData: { interval: 'Daily' }
});
});
it('should translate index pattern to current day', function() {
var requestOptions;
ctx.backendSrv.datasourceRequest = function(options) {
requestOptions = options;
return ctx.$q.when({data: {responses: []}});
};
ctx.ds.query({
range: {
from: moment([2015, 4, 30, 10]),
to: moment([2015, 5, 1, 10])
},
targets: [{ bucketAggs: [], metrics: [] }]
});
ctx.$rootScope.$apply();
var parts = requestOptions.data.split('\n');
var header = angular.fromJson(parts[0]);
expect(header.index).to.eql(['asd-2015.05.30', 'asd-2015.05.31', 'asd-2015.06.01']);
});
});
});
});