diff --git a/.gitignore b/.gitignore
index 8abd0d17cbd..f77072fde1e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,6 +6,7 @@ awsconfig
/emails/dist
/public_gen
/tmp
+vendor/phantomjs/phantomjs
docs/AWS_S3_BUCKET
docs/GIT_BRANCH
diff --git a/karma.conf.js b/karma.conf.js
index 569cf845b16..f05b7dcc61a 100644
--- a/karma.conf.js
+++ b/karma.conf.js
@@ -25,6 +25,7 @@ module.exports = function(config) {
browsers: ['PhantomJS'],
captureTimeout: 60000,
singleRun: true,
+ autoWatchBatchDelay: 1000,
});
diff --git a/packaging/rpm/init.d/grafana-server b/packaging/rpm/init.d/grafana-server
index 92e88673d74..bb27882f625 100755
--- a/packaging/rpm/init.d/grafana-server
+++ b/packaging/rpm/init.d/grafana-server
@@ -148,7 +148,7 @@ case "$1" in
$0 start
;;
*)
- echo -n "Usage: $0 {start|stop|restart|force-reload|status}"
+ echo "Usage: $0 {start|stop|restart|force-reload|status}"
exit 3
;;
esac
diff --git a/pkg/metrics/report_usage.go b/pkg/metrics/report_usage.go
index c8848fb5371..065c83d3b8f 100644
--- a/pkg/metrics/report_usage.go
+++ b/pkg/metrics/report_usage.go
@@ -36,12 +36,6 @@ func sendUsageStats() {
"metrics": metrics,
}
- statsQuery := m.GetSystemStatsQuery{}
- if err := bus.Dispatch(&statsQuery); err != nil {
- log.Error(3, "Failed to get system stats", err)
- return
- }
-
UsageStats.Each(func(name string, i interface{}) {
switch metric := i.(type) {
case Counter:
@@ -52,11 +46,36 @@ func sendUsageStats() {
}
})
+ statsQuery := m.GetSystemStatsQuery{}
+ if err := bus.Dispatch(&statsQuery); err != nil {
+ log.Error(3, "Failed to get system stats", err)
+ return
+ }
+
metrics["stats.dashboards.count"] = statsQuery.Result.DashboardCount
metrics["stats.users.count"] = statsQuery.Result.UserCount
metrics["stats.orgs.count"] = statsQuery.Result.OrgCount
- out, _ := json.Marshal(report)
+ dsStats := m.GetDataSourceStatsQuery{}
+ if err := bus.Dispatch(&dsStats); err != nil {
+ log.Error(3, "Failed to get datasource stats", err)
+ return
+ }
+
+ // send counters for each data source
+ // but ignore any custom data sources
+ // as sending that name could be sensitive information
+ dsOtherCount := 0
+ for _, dsStat := range dsStats.Result {
+ if m.IsStandardDataSource(dsStat.Type) {
+ metrics["stats.ds."+dsStat.Type+".count"] = dsStat.Count
+ } else {
+ dsOtherCount += dsStat.Count
+ }
+ }
+ metrics["stats.ds.other.count"] = dsOtherCount
+
+ out, _ := json.MarshalIndent(report, "", " ")
data := bytes.NewBuffer(out)
client := http.Client{Timeout: time.Duration(5 * time.Second)}
diff --git a/pkg/models/datasource.go b/pkg/models/datasource.go
index 75e2134c09f..35b623ce30a 100644
--- a/pkg/models/datasource.go
+++ b/pkg/models/datasource.go
@@ -12,6 +12,8 @@ const (
DS_ES = "elasticsearch"
DS_OPENTSDB = "opentsdb"
DS_CLOUDWATCH = "cloudwatch"
+ DS_KAIROSDB = "kairosdb"
+ DS_PROMETHEUS = "prometheus"
DS_ACCESS_DIRECT = "direct"
DS_ACCESS_PROXY = "proxy"
)
@@ -45,6 +47,25 @@ type DataSource struct {
Updated time.Time
}
+func IsStandardDataSource(dsType string) bool {
+ switch dsType {
+ case DS_ES:
+ return true
+ case DS_INFLUXDB:
+ return true
+ case DS_OPENTSDB:
+ return true
+ case DS_CLOUDWATCH:
+ return true
+ case DS_PROMETHEUS:
+ return true
+ case DS_GRAPHITE:
+ return true
+ default:
+ return false
+ }
+}
+
// ----------------------
// COMMANDS
diff --git a/pkg/models/stats.go b/pkg/models/stats.go
index 0d83882e666..6a060137ac7 100644
--- a/pkg/models/stats.go
+++ b/pkg/models/stats.go
@@ -6,6 +6,15 @@ type SystemStats struct {
OrgCount int
}
+type DataSourceStats struct {
+ Count int
+ Type string
+}
+
type GetSystemStatsQuery struct {
Result *SystemStats
}
+
+type GetDataSourceStatsQuery struct {
+ Result []*DataSourceStats
+}
diff --git a/pkg/services/sqlstore/stats.go b/pkg/services/sqlstore/stats.go
index 7995dd43f38..044aa185f19 100644
--- a/pkg/services/sqlstore/stats.go
+++ b/pkg/services/sqlstore/stats.go
@@ -7,6 +7,18 @@ import (
func init() {
bus.AddHandler("sql", GetSystemStats)
+ bus.AddHandler("sql", GetDataSourceStats)
+}
+
+func GetDataSourceStats(query *m.GetDataSourceStatsQuery) error {
+ var rawSql = `SELECT COUNT(*) as count, type FROM data_source GROUP BY type`
+ query.Result = make([]*m.DataSourceStats, 0)
+ err := x.Sql(rawSql).Find(&query.Result)
+ if err != nil {
+ return err
+ }
+
+ return err
}
func GetSystemStats(query *m.GetSystemStatsQuery) error {
diff --git a/public/app/core/utils/rangeutil.ts b/public/app/core/utils/rangeutil.ts
index b87dd4969a1..1e64fcc0061 100644
--- a/public/app/core/utils/rangeutil.ts
+++ b/public/app/core/utils/rangeutil.ts
@@ -106,7 +106,7 @@ _.each(rangeOptions, function (frame) {
}
}
} else {
- opt.display = 'parse error';
+ opt.display = opt.from + ' to ' + opt.to;
opt.invalid = true;
}
diff --git a/public/app/features/dashboard/partials/saveDashboardAs.html b/public/app/features/dashboard/partials/saveDashboardAs.html
index 9af18591fde..5c069f199e6 100644
--- a/public/app/features/dashboard/partials/saveDashboardAs.html
+++ b/public/app/features/dashboard/partials/saveDashboardAs.html
@@ -14,7 +14,7 @@
New title
-
+
diff --git a/public/app/plugins/datasource/elasticsearch/bucketAgg.js b/public/app/plugins/datasource/elasticsearch/bucket_agg.js
similarity index 99%
rename from public/app/plugins/datasource/elasticsearch/bucketAgg.js
rename to public/app/plugins/datasource/elasticsearch/bucket_agg.js
index 33efa4cbf6a..00bba14af05 100644
--- a/public/app/plugins/datasource/elasticsearch/bucketAgg.js
+++ b/public/app/plugins/datasource/elasticsearch/bucket_agg.js
@@ -1,7 +1,7 @@
define([
'angular',
'lodash',
- './queryDef',
+ './query_def',
],
function (angular, _, queryDef) {
'use strict';
diff --git a/public/app/plugins/datasource/elasticsearch/datasource.js b/public/app/plugins/datasource/elasticsearch/datasource.js
index 5f7d3860cf3..9d6b8e4c834 100644
--- a/public/app/plugins/datasource/elasticsearch/datasource.js
+++ b/public/app/plugins/datasource/elasticsearch/datasource.js
@@ -3,10 +3,10 @@ define([
'lodash',
'moment',
'kbn',
- './queryBuilder',
- './indexPattern',
- './elasticResponse',
- './queryCtrl',
+ './query_builder',
+ './index_pattern',
+ './elastic_response',
+ './query_ctrl',
'./directives'
],
function (angular, _, moment, kbn, ElasticQueryBuilder, IndexPattern, ElasticResponse) {
diff --git a/public/app/plugins/datasource/elasticsearch/directives.js b/public/app/plugins/datasource/elasticsearch/directives.js
index 57656050cab..480ee79ad0a 100644
--- a/public/app/plugins/datasource/elasticsearch/directives.js
+++ b/public/app/plugins/datasource/elasticsearch/directives.js
@@ -1,7 +1,7 @@
define([
'angular',
- './bucketAgg',
- './metricAgg',
+ './bucket_agg',
+ './metric_agg',
],
function (angular) {
'use strict';
diff --git a/public/app/plugins/datasource/elasticsearch/elasticResponse.js b/public/app/plugins/datasource/elasticsearch/elastic_response.js
similarity index 99%
rename from public/app/plugins/datasource/elasticsearch/elasticResponse.js
rename to public/app/plugins/datasource/elasticsearch/elastic_response.js
index 5914e024c44..bfbcb2034ab 100644
--- a/public/app/plugins/datasource/elasticsearch/elasticResponse.js
+++ b/public/app/plugins/datasource/elasticsearch/elastic_response.js
@@ -1,6 +1,6 @@
define([
"lodash",
- "./queryDef"
+ "./query_def"
],
function (_, queryDef) {
'use strict';
diff --git a/public/app/plugins/datasource/elasticsearch/indexPattern.js b/public/app/plugins/datasource/elasticsearch/index_pattern.js
similarity index 100%
rename from public/app/plugins/datasource/elasticsearch/indexPattern.js
rename to public/app/plugins/datasource/elasticsearch/index_pattern.js
diff --git a/public/app/plugins/datasource/elasticsearch/metricAgg.js b/public/app/plugins/datasource/elasticsearch/metric_agg.js
similarity index 99%
rename from public/app/plugins/datasource/elasticsearch/metricAgg.js
rename to public/app/plugins/datasource/elasticsearch/metric_agg.js
index f2edd203e86..fd748a1f8b0 100644
--- a/public/app/plugins/datasource/elasticsearch/metricAgg.js
+++ b/public/app/plugins/datasource/elasticsearch/metric_agg.js
@@ -1,7 +1,7 @@
define([
'angular',
'lodash',
- './queryDef'
+ './query_def'
],
function (angular, _, queryDef) {
'use strict';
diff --git a/public/app/plugins/datasource/elasticsearch/queryBuilder.js b/public/app/plugins/datasource/elasticsearch/query_builder.js
similarity index 100%
rename from public/app/plugins/datasource/elasticsearch/queryBuilder.js
rename to public/app/plugins/datasource/elasticsearch/query_builder.js
diff --git a/public/app/plugins/datasource/elasticsearch/queryCtrl.js b/public/app/plugins/datasource/elasticsearch/query_ctrl.js
similarity index 100%
rename from public/app/plugins/datasource/elasticsearch/queryCtrl.js
rename to public/app/plugins/datasource/elasticsearch/query_ctrl.js
diff --git a/public/app/plugins/datasource/elasticsearch/queryDef.js b/public/app/plugins/datasource/elasticsearch/query_def.js
similarity index 100%
rename from public/app/plugins/datasource/elasticsearch/queryDef.js
rename to public/app/plugins/datasource/elasticsearch/query_def.js
diff --git a/public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts b/public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts
new file mode 100644
index 00000000000..aecb16501b7
--- /dev/null
+++ b/public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts
@@ -0,0 +1,74 @@
+///
+///
+
+import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common';
+import moment = require('moment');
+import angular = require('angular');
+
+declare var helpers: any;
+
+describe('ElasticDatasource', function() {
+ var ctx = new helpers.ServiceTestContext();
+
+ beforeEach(angularMocks.module('grafana.services'));
+ beforeEach(ctx.providePhase(['templateSrv', 'backendSrv']));
+ beforeEach(ctx.createService('ElasticDatasource'));
+ beforeEach(function() {
+ ctx.ds = new ctx.service({jsonData: {}});
+ });
+
+ describe('When testing datasource with index pattern', function() {
+ beforeEach(function() {
+ ctx.ds = new ctx.service({
+ url: 'http://es.com',
+ index: '[asd-]YYYY.MM.DD',
+ jsonData: { interval: 'Daily' }
+ });
+ });
+
+ it('should translate index pattern to current day', function() {
+ var requestOptions;
+ ctx.backendSrv.datasourceRequest = function(options) {
+ requestOptions = options;
+ return ctx.$q.when({});
+ };
+
+ ctx.ds.testDatasource();
+ ctx.$rootScope.$apply();
+
+ var today = moment().format("YYYY.MM.DD");
+ expect(requestOptions.url).to.be("http://es.com/asd-" + today + '/_stats');
+ });
+ });
+
+ describe('When issueing metric query with interval pattern', function() {
+ beforeEach(function() {
+ ctx.ds = new ctx.service({
+ url: 'http://es.com',
+ index: '[asd-]YYYY.MM.DD',
+ jsonData: { interval: 'Daily' }
+ });
+ });
+
+ it('should translate index pattern to current day', function() {
+ var requestOptions;
+ ctx.backendSrv.datasourceRequest = function(options) {
+ requestOptions = options;
+ return ctx.$q.when({data: {responses: []}});
+ };
+
+ ctx.ds.query({
+ range: {
+ from: moment([2015, 4, 30, 10]),
+ to: moment([2015, 5, 1, 10])
+ },
+ targets: [{ bucketAggs: [], metrics: [] }]
+ });
+
+ ctx.$rootScope.$apply();
+ var parts = requestOptions.data.split('\n');
+ var header = angular.fromJson(parts[0]);
+ expect(header.index).to.eql(['asd-2015.05.30', 'asd-2015.05.31', 'asd-2015.06.01']);
+ });
+ });
+});
diff --git a/public/app/plugins/datasource/elasticsearch/specs/elastic_response_specs.ts b/public/app/plugins/datasource/elasticsearch/specs/elastic_response_specs.ts
new file mode 100644
index 00000000000..df810e3a9d9
--- /dev/null
+++ b/public/app/plugins/datasource/elasticsearch/specs/elastic_response_specs.ts
@@ -0,0 +1,414 @@
+///
+
+import {describe, beforeEach, it, sinon, expect} from 'test/lib/common';
+
+declare var ElasticResponse: any;
+
+describe('ElasticResponse', function() {
+ var targets;
+ var response;
+ var result;
+
+ describe('simple query and count', function() {
+
+ beforeEach(function() {
+ targets = [{
+ refId: 'A',
+ metrics: [{type: 'count', id: '1'}],
+ bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '2'}],
+ }];
+ response = {
+ responses: [{
+ aggregations: {
+ "2": {
+ buckets: [
+ {
+ doc_count: 10,
+ key: 1000
+ },
+ {
+ doc_count: 15,
+ key: 2000
+ }
+ ]
+ }
+ }
+ }]
+ };
+
+ result = new ElasticResponse(targets, response).getTimeSeries();
+ });
+
+ it('should return 1 series', function() {
+ expect(result.data.length).to.be(1);
+ expect(result.data[0].target).to.be('Count');
+ expect(result.data[0].datapoints.length).to.be(2);
+ expect(result.data[0].datapoints[0][0]).to.be(10);
+ expect(result.data[0].datapoints[0][1]).to.be(1000);
+ });
+
+ });
+
+ describe('simple query count & avg aggregation', function() {
+ var result;
+
+ beforeEach(function() {
+ targets = [{
+ refId: 'A',
+ metrics: [{type: 'count', id: '1'}, {type: 'avg', field: 'value', id: '2'}],
+ bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '3'}],
+ }];
+ response = {
+ responses: [{
+ aggregations: {
+ "3": {
+ buckets: [
+ {
+ "2": {value: 88},
+ doc_count: 10,
+ key: 1000
+ },
+ {
+ "2": {value: 99},
+ doc_count: 15,
+ key: 2000
+ }
+ ]
+ }
+ }
+ }]
+ };
+
+ result = new ElasticResponse(targets, response).getTimeSeries();
+ });
+
+ it('should return 2 series', function() {
+ expect(result.data.length).to.be(2);
+ expect(result.data[0].datapoints.length).to.be(2);
+ expect(result.data[0].datapoints[0][0]).to.be(10);
+ expect(result.data[0].datapoints[0][1]).to.be(1000);
+
+ expect(result.data[1].target).to.be("Average value");
+ expect(result.data[1].datapoints[0][0]).to.be(88);
+ expect(result.data[1].datapoints[1][0]).to.be(99);
+ });
+
+ });
+
+ describe('single group by query one metric', function() {
+ var result;
+
+ beforeEach(function() {
+ targets = [{
+ refId: 'A',
+ metrics: [{type: 'count', id: '1'}],
+ bucketAggs: [{type: 'terms', field: 'host', id: '2'}, {type: 'date_histogram', field: '@timestamp', id: '3'}],
+ }];
+ response = {
+ responses: [{
+ aggregations: {
+ "2": {
+ buckets: [
+ {
+ "3": {
+ buckets: [
+ {doc_count: 1, key: 1000},
+ {doc_count: 3, key: 2000}
+ ]
+ },
+ doc_count: 4,
+ key: 'server1',
+ },
+ {
+ "3": {
+ buckets: [
+ {doc_count: 2, key: 1000},
+ {doc_count: 8, key: 2000}
+ ]
+ },
+ doc_count: 10,
+ key: 'server2',
+ },
+ ]
+ }
+ }
+ }]
+ };
+
+ result = new ElasticResponse(targets, response).getTimeSeries();
+ });
+
+ it('should return 2 series', function() {
+ expect(result.data.length).to.be(2);
+ expect(result.data[0].datapoints.length).to.be(2);
+ expect(result.data[0].target).to.be('server1');
+ expect(result.data[1].target).to.be('server2');
+ });
+ });
+
+ describe('single group by query two metrics', function() {
+ var result;
+
+ beforeEach(function() {
+ targets = [{
+ refId: 'A',
+ metrics: [{type: 'count', id: '1'}, {type: 'avg', field: '@value', id: '4'}],
+ bucketAggs: [{type: 'terms', field: 'host', id: '2'}, {type: 'date_histogram', field: '@timestamp', id: '3'}],
+ }];
+ response = {
+ responses: [{
+ aggregations: {
+ "2": {
+ buckets: [
+ {
+ "3": {
+ buckets: [
+ { "4": {value: 10}, doc_count: 1, key: 1000},
+ { "4": {value: 12}, doc_count: 3, key: 2000}
+ ]
+ },
+ doc_count: 4,
+ key: 'server1',
+ },
+ {
+ "3": {
+ buckets: [
+ { "4": {value: 20}, doc_count: 1, key: 1000},
+ { "4": {value: 32}, doc_count: 3, key: 2000}
+ ]
+ },
+ doc_count: 10,
+ key: 'server2',
+ },
+ ]
+ }
+ }
+ }]
+ };
+
+ result = new ElasticResponse(targets, response).getTimeSeries();
+ });
+
+ it('should return 2 series', function() {
+ expect(result.data.length).to.be(4);
+ expect(result.data[0].datapoints.length).to.be(2);
+ expect(result.data[0].target).to.be('server1 Count');
+ expect(result.data[1].target).to.be('server1 Average @value');
+ expect(result.data[2].target).to.be('server2 Count');
+ expect(result.data[3].target).to.be('server2 Average @value');
+ });
+ });
+
+ describe('with percentiles ', function() {
+ var result;
+
+ beforeEach(function() {
+ targets = [{
+ refId: 'A',
+ metrics: [{type: 'percentiles', settings: {percents: [75, 90]}, id: '1'}],
+ bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '3'}],
+ }];
+ response = {
+ responses: [{
+ aggregations: {
+ "3": {
+ buckets: [
+ {
+ "1": {values: {"75": 3.3, "90": 5.5}},
+ doc_count: 10,
+ key: 1000
+ },
+ {
+ "1": {values: {"75": 2.3, "90": 4.5}},
+ doc_count: 15,
+ key: 2000
+ }
+ ]
+ }
+ }
+ }]
+ };
+
+ result = new ElasticResponse(targets, response).getTimeSeries();
+ });
+
+ it('should return 2 series', function() {
+ expect(result.data.length).to.be(2);
+ expect(result.data[0].datapoints.length).to.be(2);
+ expect(result.data[0].target).to.be('p75');
+ expect(result.data[1].target).to.be('p90');
+ expect(result.data[0].datapoints[0][0]).to.be(3.3);
+ expect(result.data[0].datapoints[0][1]).to.be(1000);
+ expect(result.data[1].datapoints[1][0]).to.be(4.5);
+ });
+ });
+
+ describe('with extended_stats', function() {
+ var result;
+
+ beforeEach(function() {
+ targets = [{
+ refId: 'A',
+ metrics: [{type: 'extended_stats', meta: {max: true, std_deviation_bounds_upper: true}, id: '1'}],
+ bucketAggs: [{type: 'terms', field: 'host', id: '3'}, {type: 'date_histogram', id: '4'}],
+ }];
+ response = {
+ responses: [{
+ aggregations: {
+ "3": {
+ buckets: [
+ {
+ key: 'server1',
+ "4": {
+ buckets: [{
+ "1": {max: 10.2, min: 5.5, std_deviation_bounds: {upper: 3, lower: -2}},
+ doc_count: 10,
+ key: 1000
+ }]
+ }
+ },
+ {
+ key: 'server2',
+ "4": {
+ buckets: [{
+ "1": {max: 10.2, min: 5.5, std_deviation_bounds: {upper: 3, lower: -2}},
+ doc_count: 10,
+ key: 1000
+ }]
+ }
+ },
+ ]
+ }
+ }
+ }]
+ };
+
+ result = new ElasticResponse(targets, response).getTimeSeries();
+ });
+
+ it('should return 4 series', function() {
+ expect(result.data.length).to.be(4);
+ expect(result.data[0].datapoints.length).to.be(1);
+ expect(result.data[0].target).to.be('server1 Max');
+ expect(result.data[1].target).to.be('server1 Std Dev Upper');
+
+ expect(result.data[0].datapoints[0][0]).to.be(10.2);
+ expect(result.data[1].datapoints[0][0]).to.be(3);
+ });
+ });
+
+ describe('single group by with alias pattern', function() {
+ var result;
+
+ beforeEach(function() {
+ targets = [{
+ refId: 'A',
+ metrics: [{type: 'count', id: '1'}],
+ alias: '{{term @host}} {{metric}} and!',
+ bucketAggs: [
+ {type: 'terms', field: '@host', id: '2'},
+ {type: 'date_histogram', field: '@timestamp', id: '3'}
+ ],
+ }];
+ response = {
+ responses: [{
+ aggregations: {
+ "2": {
+ buckets: [
+ {
+ "3": {
+ buckets: [
+ {doc_count: 1, key: 1000},
+ {doc_count: 3, key: 2000}
+ ]
+ },
+ doc_count: 4,
+ key: 'server1',
+ },
+ {
+ "3": {
+ buckets: [
+ {doc_count: 2, key: 1000},
+ {doc_count: 8, key: 2000}
+ ]
+ },
+ doc_count: 10,
+ key: 'server2',
+ },
+ ]
+ }
+ }
+ }]
+ };
+
+ result = new ElasticResponse(targets, response).getTimeSeries();
+ });
+
+ it('should return 2 series', function() {
+ expect(result.data.length).to.be(2);
+ expect(result.data[0].datapoints.length).to.be(2);
+ expect(result.data[0].target).to.be('server1 Count and!');
+ expect(result.data[1].target).to.be('server2 Count and!');
+ });
+ });
+
+ describe('with two filters agg', function() {
+ var result;
+
+ beforeEach(function() {
+ targets = [{
+ refId: 'A',
+ metrics: [{type: 'count', id: '1'}],
+ bucketAggs: [
+ {
+ id: '2',
+ type: 'filters',
+ settings: {
+ filters: [
+ {query: '@metric:cpu' },
+ {query: '@metric:logins.count' },
+ ]
+ }
+ },
+ {type: 'date_histogram', field: '@timestamp', id: '3'}
+ ],
+ }];
+ response = {
+ responses: [{
+ aggregations: {
+ "2": {
+ buckets: {
+ "@metric:cpu": {
+ "3": {
+ buckets: [
+ {doc_count: 1, key: 1000},
+ {doc_count: 3, key: 2000}
+ ]
+ },
+ },
+ "@metric:logins.count": {
+ "3": {
+ buckets: [
+ {doc_count: 2, key: 1000},
+ {doc_count: 8, key: 2000}
+ ]
+ },
+ },
+ }
+ }
+ }
+ }]
+ };
+
+ result = new ElasticResponse(targets, response).getTimeSeries();
+ });
+
+ it('should return 2 series', function() {
+ expect(result.data.length).to.be(2);
+ expect(result.data[0].datapoints.length).to.be(2);
+ expect(result.data[0].target).to.be('@metric:cpu');
+ expect(result.data[1].target).to.be('@metric:logins.count');
+ });
+ });
+
+});
diff --git a/public/app/plugins/datasource/elasticsearch/specs/index_pattern_specs.ts b/public/app/plugins/datasource/elasticsearch/specs/index_pattern_specs.ts
new file mode 100644
index 00000000000..8f662bb075f
--- /dev/null
+++ b/public/app/plugins/datasource/elasticsearch/specs/index_pattern_specs.ts
@@ -0,0 +1,51 @@
+///
+///
+
+import {describe, beforeEach, it, sinon, expect} from 'test/lib/common';
+import moment = require('moment');
+
+declare var IndexPattern: any;
+
+describe('IndexPattern', function() {
+
+ describe('when getting index for today', function() {
+ it('should return correct index name', function() {
+ var pattern = new IndexPattern('[asd-]YYYY.MM.DD', 'Daily');
+ var expected = 'asd-' + moment().format('YYYY.MM.DD');
+
+ expect(pattern.getIndexForToday()).to.be(expected);
+ });
+ });
+
+ describe('when getting index list for time range', function() {
+
+ describe('no interval', function() {
+ it('should return correct index', function() {
+ var pattern = new IndexPattern('my-metrics');
+ var from = new Date(2015, 4, 30, 1, 2, 3);
+ var to = new Date(2015, 5, 1, 12, 5 , 6);
+ expect(pattern.getIndexList(from, to)).to.eql('my-metrics');
+ });
+ });
+
+ describe('daily', function() {
+
+ it('should return correct index list', function() {
+ var pattern = new IndexPattern('[asd-]YYYY.MM.DD', 'Daily');
+ var from = new Date(1432940523000);
+ var to = new Date(1433153106000);
+
+ var expected = [
+ 'asd-2015.05.29',
+ 'asd-2015.05.30',
+ 'asd-2015.05.31',
+ 'asd-2015.06.01',
+ ];
+
+ expect(pattern.getIndexList(from, to)).to.eql(expected);
+ });
+
+ });
+ });
+
+});
diff --git a/public/app/plugins/datasource/elasticsearch/specs/query_builder_specs.ts b/public/app/plugins/datasource/elasticsearch/specs/query_builder_specs.ts
new file mode 100644
index 00000000000..c32b1463ca3
--- /dev/null
+++ b/public/app/plugins/datasource/elasticsearch/specs/query_builder_specs.ts
@@ -0,0 +1,123 @@
+///
+
+import {describe, beforeEach, it, sinon, expect} from 'test/lib/common';
+
+declare var ElasticQueryBuilder: any;
+
+describe('ElasticQueryBuilder', function() {
+ var builder;
+
+ beforeEach(function() {
+ builder = new ElasticQueryBuilder({timeField: '@timestamp'});
+ });
+
+ it('with defaults', function() {
+ var query = builder.build({
+ metrics: [{type: 'Count', id: '0'}],
+ timeField: '@timestamp',
+ bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '1'}],
+ });
+
+ expect(query.query.filtered.filter.bool.must[0].range["@timestamp"].gte).to.be("$timeFrom");
+ expect(query.aggs["1"].date_histogram.extended_bounds.min).to.be("$timeFrom");
+ });
+
+ it('with raw query', function() {
+ var query = builder.build({
+ rawQuery: '{"query": "$lucene_query"}',
+ });
+
+ expect(query.query).to.be("$lucene_query");
+ });
+
+ it('with multiple bucket aggs', function() {
+ var query = builder.build({
+ metrics: [{type: 'count', id: '1'}],
+ timeField: '@timestamp',
+ bucketAggs: [
+ {type: 'terms', field: '@host', id: '2'},
+ {type: 'date_histogram', field: '@timestamp', id: '3'}
+ ],
+ });
+
+ expect(query.aggs["2"].terms.field).to.be("@host");
+ expect(query.aggs["2"].aggs["3"].date_histogram.field).to.be("@timestamp");
+ });
+
+ it('with select field', function() {
+ var query = builder.build({
+ metrics: [{type: 'avg', field: '@value', id: '1'}],
+ bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '2'}],
+ }, 100, 1000);
+
+ var aggs = query.aggs["2"].aggs;
+ expect(aggs["1"].avg.field).to.be("@value");
+ });
+
+ it('with term agg and order by metric agg', function() {
+ var query = builder.build({
+ metrics: [
+ {type: 'count', id: '1'},
+ {type: 'avg', field: '@value', id: '5'}
+ ],
+ bucketAggs: [
+ {type: 'terms', field: '@host', settings: {size: 5, order: 'asc', orderBy: '5'}, id: '2' },
+ {type: 'date_histogram', field: '@timestamp', id: '3'}
+ ],
+ }, 100, 1000);
+
+ var firstLevel = query.aggs["2"];
+ var secondLevel = firstLevel.aggs["3"];
+
+ expect(firstLevel.aggs["5"].avg.field).to.be("@value");
+ expect(secondLevel.aggs["5"].avg.field).to.be("@value");
+ });
+
+ it('with metric percentiles', function() {
+ var query = builder.build({
+ metrics: [
+ {
+ id: '1',
+ type: 'percentiles',
+ field: '@load_time',
+ settings: {
+ percents: [1,2,3,4]
+ }
+ }
+ ],
+ bucketAggs: [
+ {type: 'date_histogram', field: '@timestamp', id: '3'}
+ ],
+ }, 100, 1000);
+
+ var firstLevel = query.aggs["3"];
+
+ expect(firstLevel.aggs["1"].percentiles.field).to.be("@load_time");
+ expect(firstLevel.aggs["1"].percentiles.percents).to.eql([1,2,3,4]);
+ });
+
+ it('with filters aggs', function() {
+ var query = builder.build({
+ metrics: [{type: 'count', id: '1'}],
+ timeField: '@timestamp',
+ bucketAggs: [
+ {
+ id: '2',
+ type: 'filters',
+ settings: {
+ filters: [
+ {query: '@metric:cpu' },
+ {query: '@metric:logins.count' },
+ ]
+ }
+ },
+ {type: 'date_histogram', field: '@timestamp', id: '4'}
+ ],
+ });
+
+ expect(query.aggs["2"].filters.filters["@metric:cpu"].query.query_string.query).to.be("@metric:cpu");
+ expect(query.aggs["2"].filters.filters["@metric:logins.count"].query.query_string.query).to.be("@metric:logins.count");
+ expect(query.aggs["2"].aggs["4"].date_histogram.field).to.be("@timestamp");
+ });
+
+});
diff --git a/public/app/plugins/datasource/elasticsearch/specs/query_ctrl_specs.ts b/public/app/plugins/datasource/elasticsearch/specs/query_ctrl_specs.ts
new file mode 100644
index 00000000000..d88e8446ead
--- /dev/null
+++ b/public/app/plugins/datasource/elasticsearch/specs/query_ctrl_specs.ts
@@ -0,0 +1,30 @@
+///
+///
+///
+
+import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common';
+
+declare var helpers: any;
+
+describe('ElasticQueryCtrl', function() {
+ var ctx = new helpers.ControllerTestContext();
+
+ beforeEach(angularMocks.module('grafana.controllers'));
+ beforeEach(angularMocks.module('grafana.services'));
+ beforeEach(ctx.providePhase());
+ beforeEach(ctx.createControllerPhase('ElasticQueryCtrl'));
+
+ beforeEach(function() {
+ ctx.scope.target = {};
+ ctx.scope.$parent = { get_data: sinon.spy() };
+
+ ctx.scope.datasource = ctx.datasource;
+ ctx.scope.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([]));
+ });
+
+ describe('init', function() {
+ beforeEach(function() {
+ ctx.scope.init();
+ });
+ });
+});
diff --git a/public/app/plugins/datasource/graphite/addGraphiteFunc.js b/public/app/plugins/datasource/graphite/add_graphite_func.js
similarity index 100%
rename from public/app/plugins/datasource/graphite/addGraphiteFunc.js
rename to public/app/plugins/datasource/graphite/add_graphite_func.js
diff --git a/public/app/plugins/datasource/graphite/datasource.js b/public/app/plugins/datasource/graphite/datasource.js
index d51f9a93280..553a11c2350 100644
--- a/public/app/plugins/datasource/graphite/datasource.js
+++ b/public/app/plugins/datasource/graphite/datasource.js
@@ -5,9 +5,9 @@ define([
'config',
'app/core/utils/datemath',
'./directives',
- './queryCtrl',
- './funcEditor',
- './addGraphiteFunc',
+ './query_ctrl',
+ './func_editor',
+ './add_graphite_func',
],
function (angular, _, $, config, dateMath) {
'use strict';
diff --git a/public/app/plugins/datasource/graphite/funcEditor.js b/public/app/plugins/datasource/graphite/func_editor.js
similarity index 100%
rename from public/app/plugins/datasource/graphite/funcEditor.js
rename to public/app/plugins/datasource/graphite/func_editor.js
diff --git a/public/app/plugins/datasource/graphite/gfunc.js b/public/app/plugins/datasource/graphite/gfunc.js
index 748bd25fb8a..234486a3e1e 100644
--- a/public/app/plugins/datasource/graphite/gfunc.js
+++ b/public/app/plugins/datasource/graphite/gfunc.js
@@ -87,6 +87,13 @@ function (_, $) {
category: categories.Calculate,
});
+ addFuncDef({
+ name: 'multiplySeries',
+ params: optionalSeriesRefArgs,
+ defaultParams: ['#A'],
+ category: categories.Calculate,
+ });
+
addFuncDef({
name: 'asPercent',
params: optionalSeriesRefArgs,
diff --git a/public/app/plugins/datasource/graphite/queryCtrl.js b/public/app/plugins/datasource/graphite/query_ctrl.js
similarity index 100%
rename from public/app/plugins/datasource/graphite/queryCtrl.js
rename to public/app/plugins/datasource/graphite/query_ctrl.js
diff --git a/public/app/plugins/datasource/graphite/specs/datasource_specs.ts b/public/app/plugins/datasource/graphite/specs/datasource_specs.ts
new file mode 100644
index 00000000000..8ba7e35b773
--- /dev/null
+++ b/public/app/plugins/datasource/graphite/specs/datasource_specs.ts
@@ -0,0 +1,120 @@
+///
+///
+
+import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common';
+declare var helpers: any;
+
+describe('graphiteDatasource', function() {
+ var ctx = new helpers.ServiceTestContext();
+
+ beforeEach(angularMocks.module('grafana.services'));
+ beforeEach(ctx.providePhase(['backendSrv']));
+
+ beforeEach(ctx.createService('GraphiteDatasource'));
+ beforeEach(function() {
+ ctx.ds = new ctx.service({ url: [''] });
+ });
+
+ describe('When querying influxdb with one target using query editor target spec', function() {
+ var query = {
+ rangeRaw: { from: 'now-1h', to: 'now' },
+ targets: [{ target: 'prod1.count' }, {target: 'prod2.count'}],
+ maxDataPoints: 500,
+ };
+
+ var results;
+ var requestOptions;
+
+ beforeEach(function() {
+ ctx.backendSrv.datasourceRequest = function(options) {
+ requestOptions = options;
+ return ctx.$q.when({data: [{ target: 'prod1.count', datapoints: [[10, 1], [12,1]] }]});
+ };
+
+ ctx.ds.query(query).then(function(data) { results = data; });
+ ctx.$rootScope.$apply();
+ });
+
+ it('should generate the correct query', function() {
+ expect(requestOptions.url).to.be('/render');
+ });
+
+ it('should query correctly', function() {
+ var params = requestOptions.data.split('&');
+ expect(params).to.contain('target=prod1.count');
+ expect(params).to.contain('target=prod2.count');
+ expect(params).to.contain('from=-1h');
+ expect(params).to.contain('until=now');
+ });
+
+ it('should exclude undefined params', function() {
+ var params = requestOptions.data.split('&');
+ expect(params).to.not.contain('cacheTimeout=undefined');
+ });
+
+ it('should return series list', function() {
+ expect(results.data.length).to.be(1);
+ expect(results.data[0].target).to.be('prod1.count');
+ });
+
+ it('should convert to millisecond resolution', function() {
+ expect(results.data[0].datapoints[0][0]).to.be(10);
+ });
+
+ });
+
+ describe('building graphite params', function() {
+
+ it('should uri escape targets', function() {
+ var results = ctx.ds.buildGraphiteParams({
+ targets: [{target: 'prod1.{test,test2}'}, {target: 'prod2.count'}]
+ });
+ expect(results).to.contain('target=prod1.%7Btest%2Ctest2%7D');
+ });
+
+ it('should replace target placeholder', function() {
+ var results = ctx.ds.buildGraphiteParams({
+ targets: [{target: 'series1'}, {target: 'series2'}, {target: 'asPercent(#A,#B)'}]
+ });
+ expect(results[2]).to.be('target=asPercent(series1%2Cseries2)');
+ });
+
+ it('should replace target placeholder for hidden series', function() {
+ var results = ctx.ds.buildGraphiteParams({
+ targets: [{target: 'series1', hide: true}, {target: 'sumSeries(#A)', hide: true}, {target: 'asPercent(#A,#B)'}]
+ });
+ expect(results[0]).to.be('target=' + encodeURIComponent('asPercent(series1,sumSeries(series1))'));
+ });
+
+ it('should replace target placeholder when nesting query references', function() {
+ var results = ctx.ds.buildGraphiteParams({
+ targets: [{target: 'series1'}, {target: 'sumSeries(#A)'}, {target: 'asPercent(#A,#B)'}]
+ });
+ expect(results[2]).to.be('target=' + encodeURIComponent("asPercent(series1,sumSeries(series1))"));
+ });
+
+ it('should fix wrong minute interval parameters', function() {
+ var results = ctx.ds.buildGraphiteParams({
+ targets: [{target: "summarize(prod.25m.count, '25m', 'sum')" }]
+ });
+ expect(results[0]).to.be('target=' + encodeURIComponent("summarize(prod.25m.count, '25min', 'sum')"));
+ });
+
+ it('should fix wrong month interval parameters', function() {
+ var results = ctx.ds.buildGraphiteParams({
+ targets: [{target: "summarize(prod.5M.count, '5M', 'sum')" }]
+ });
+ expect(results[0]).to.be('target=' + encodeURIComponent("summarize(prod.5M.count, '5mon', 'sum')"));
+ });
+
+ it('should ignore empty targets', function() {
+ var results = ctx.ds.buildGraphiteParams({
+ targets: [{target: 'series1'}, {target: ''}]
+ });
+ expect(results.length).to.be(2);
+ });
+
+ });
+
+});
+
diff --git a/public/app/plugins/datasource/graphite/specs/gfunc_specs.ts b/public/app/plugins/datasource/graphite/specs/gfunc_specs.ts
new file mode 100644
index 00000000000..ec8c5dbe1ad
--- /dev/null
+++ b/public/app/plugins/datasource/graphite/specs/gfunc_specs.ts
@@ -0,0 +1,127 @@
+///
+
+import {describe, beforeEach, it, sinon, expect} from 'test/lib/common';
+
+declare var gfunc: any;
+
+describe('when creating func instance from func names', function() {
+ it('should return func instance', function() {
+ var func = gfunc.createFuncInstance('sumSeries');
+ expect(func).to.be.ok();
+ expect(func.def.name).to.equal('sumSeries');
+ expect(func.def.params.length).to.equal(5);
+ expect(func.def.defaultParams.length).to.equal(1);
+ });
+
+ it('should return func instance with shortName', function() {
+ var func = gfunc.createFuncInstance('sum');
+ expect(func).to.be.ok();
+ });
+
+ it('should return func instance from funcDef', function() {
+ var func = gfunc.createFuncInstance('sum');
+ var func2 = gfunc.createFuncInstance(func.def);
+ expect(func2).to.be.ok();
+ });
+
+ it('func instance should have text representation', function() {
+ var func = gfunc.createFuncInstance('groupByNode');
+ func.params[0] = 5;
+ func.params[1] = 'avg';
+ func.updateText();
+ expect(func.text).to.equal("groupByNode(5, avg)");
+ });
+});
+
+describe('when rendering func instance', function() {
+
+ it('should handle single metric param', function() {
+ var func = gfunc.createFuncInstance('sumSeries');
+ expect(func.render('hello.metric')).to.equal("sumSeries(hello.metric)");
+ });
+
+ it('should include default params if options enable it', function() {
+ var func = gfunc.createFuncInstance('scaleToSeconds', { withDefaultParams: true });
+ expect(func.render('hello')).to.equal("scaleToSeconds(hello, 1)");
+ });
+
+ it('should handle int or interval params with number', function() {
+ var func = gfunc.createFuncInstance('movingMedian');
+ func.params[0] = '5';
+ expect(func.render('hello')).to.equal("movingMedian(hello, 5)");
+ });
+
+ it('should handle int or interval params with interval string', function() {
+ var func = gfunc.createFuncInstance('movingMedian');
+ func.params[0] = '5min';
+ expect(func.render('hello')).to.equal("movingMedian(hello, '5min')");
+ });
+
+ it('should handle metric param and int param and string param', function() {
+ var func = gfunc.createFuncInstance('groupByNode');
+ func.params[0] = 5;
+ func.params[1] = 'avg';
+ expect(func.render('hello.metric')).to.equal("groupByNode(hello.metric, 5, 'avg')");
+ });
+
+ it('should handle function with no metric param', function() {
+ var func = gfunc.createFuncInstance('randomWalk');
+ func.params[0] = 'test';
+ expect(func.render(undefined)).to.equal("randomWalk('test')");
+ });
+
+ it('should handle function multiple series params', function() {
+ var func = gfunc.createFuncInstance('asPercent');
+ func.params[0] = '#B';
+ expect(func.render('#A')).to.equal("asPercent(#A, #B)");
+ });
+
+});
+
+describe('when requesting function categories', function() {
+ it('should return function categories', function() {
+ var catIndex = gfunc.getCategories();
+ expect(catIndex.Special.length).to.be.greaterThan(8);
+ });
+});
+
+describe('when updating func param', function() {
+ it('should update param value and update text representation', function() {
+ var func = gfunc.createFuncInstance('summarize', { withDefaultParams: true });
+ func.updateParam('1h', 0);
+ expect(func.params[0]).to.be('1h');
+ expect(func.text).to.be('summarize(1h, sum, false)');
+ });
+
+ it('should parse numbers as float', function() {
+ var func = gfunc.createFuncInstance('scale');
+ func.updateParam('0.001', 0);
+ expect(func.params[0]).to.be('0.001');
+ });
+});
+
+describe('when updating func param with optional second parameter', function() {
+ it('should update value and text', function() {
+ var func = gfunc.createFuncInstance('aliasByNode');
+ func.updateParam('1', 0);
+ expect(func.params[0]).to.be('1');
+ });
+
+ it('should slit text and put value in second param', function() {
+ var func = gfunc.createFuncInstance('aliasByNode');
+ func.updateParam('4,-5', 0);
+ expect(func.params[0]).to.be('4');
+ expect(func.params[1]).to.be('-5');
+ expect(func.text).to.be('aliasByNode(4, -5)');
+ });
+
+ it('should remove second param when empty string is set', function() {
+ var func = gfunc.createFuncInstance('aliasByNode');
+ func.updateParam('4,-5', 0);
+ func.updateParam('', 1);
+ expect(func.params[0]).to.be('4');
+ expect(func.params[1]).to.be(undefined);
+ expect(func.text).to.be('aliasByNode(4)');
+ });
+});
+
diff --git a/public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts b/public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts
new file mode 100644
index 00000000000..0b12ea051e0
--- /dev/null
+++ b/public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts
@@ -0,0 +1,178 @@
+///
+///
+///
+///
+
+import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common';
+
+declare var gfunc: any;
+declare var helpers: any;
+
+describe('GraphiteQueryCtrl', function() {
+ var ctx = new helpers.ControllerTestContext();
+
+ beforeEach(angularMocks.module('grafana.controllers'));
+ beforeEach(angularMocks.module('grafana.services'));
+ beforeEach(ctx.providePhase());
+ beforeEach(ctx.createControllerPhase('GraphiteQueryCtrl'));
+
+ beforeEach(function() {
+ ctx.scope.target = {target: 'aliasByNode(scaleToSeconds(test.prod.*,1),2)'};
+
+ ctx.scope.datasource = ctx.datasource;
+ ctx.scope.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([]));
+ });
+
+ describe('init', function() {
+ beforeEach(function() {
+ ctx.scope.init();
+ ctx.scope.$digest();
+ });
+
+ it('should validate metric key exists', function() {
+ expect(ctx.scope.datasource.metricFindQuery.getCall(0).args[0]).to.be('test.prod.*');
+ });
+
+ it('should delete last segment if no metrics are found', function() {
+ expect(ctx.scope.segments[2].value).to.be('select metric');
+ });
+
+ it('should parse expression and build function model', function() {
+ expect(ctx.scope.functions.length).to.be(2);
+ });
+ });
+
+ describe('when adding function', function() {
+ beforeEach(function() {
+ ctx.scope.target.target = 'test.prod.*.count';
+ ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([{expandable: false}]));
+ ctx.scope.init();
+ ctx.scope.$digest();
+
+ ctx.scope.$parent = { get_data: sinon.spy() };
+ ctx.scope.addFunction(gfunc.getFuncDef('aliasByNode'));
+ });
+
+ it('should add function with correct node number', function() {
+ expect(ctx.scope.functions[0].params[0]).to.be(2);
+ });
+
+ it('should update target', function() {
+ expect(ctx.scope.target.target).to.be('aliasByNode(test.prod.*.count, 2)');
+ });
+
+ it('should call get_data', function() {
+ expect(ctx.scope.$parent.get_data.called).to.be(true);
+ });
+ });
+
+ describe('when adding function before any metric segment', function() {
+ beforeEach(function() {
+ ctx.scope.target.target = '';
+ ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([{expandable: true}]));
+ ctx.scope.init();
+ ctx.scope.$digest();
+
+ ctx.scope.$parent = { get_data: sinon.spy() };
+ ctx.scope.addFunction(gfunc.getFuncDef('asPercent'));
+ });
+
+ it('should add function and remove select metric link', function() {
+ expect(ctx.scope.segments.length).to.be(0);
+ });
+ });
+
+ describe('when initalizing target without metric expression and only function', function() {
+ beforeEach(function() {
+ ctx.scope.target.target = 'asPercent(#A, #B)';
+ ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([]));
+ ctx.scope.init();
+ ctx.scope.$digest();
+ ctx.scope.$parent = { get_data: sinon.spy() };
+ });
+
+ it('should not add select metric segment', function() {
+ expect(ctx.scope.segments.length).to.be(0);
+ });
+
+ it('should add both series refs as params', function() {
+ expect(ctx.scope.functions[0].params.length).to.be(2);
+ });
+
+ });
+
+ describe('when initializing a target with single param func using variable', function() {
+ beforeEach(function() {
+ ctx.scope.target.target = 'movingAverage(prod.count, $var)';
+ ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([]));
+ ctx.scope.init();
+ ctx.scope.$digest();
+ ctx.scope.$parent = { get_data: sinon.spy() };
+ });
+
+ it('should add 2 segments', function() {
+ expect(ctx.scope.segments.length).to.be(2);
+ });
+
+ it('should add function param', function() {
+ expect(ctx.scope.functions[0].params.length).to.be(1);
+ });
+
+ });
+
+ describe('when initalizing target without metric expression and function with series-ref', function() {
+ beforeEach(function() {
+ ctx.scope.target.target = 'asPercent(metric.node.count, #A)';
+ ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([]));
+ ctx.scope.init();
+ ctx.scope.$digest();
+ ctx.scope.$parent = { get_data: sinon.spy() };
+ });
+
+ it('should add segments', function() {
+ expect(ctx.scope.segments.length).to.be(3);
+ });
+
+ it('should have correct func params', function() {
+ expect(ctx.scope.functions[0].params.length).to.be(1);
+ });
+ });
+
+ describe('when getting altSegments and metricFindQuery retuns empty array', function() {
+ beforeEach(function() {
+ ctx.scope.target.target = 'test.count';
+ ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([]));
+ ctx.scope.init();
+ ctx.scope.getAltSegments(1).then(function(results) {
+ ctx.altSegments = results;
+ });
+ ctx.scope.$digest();
+ ctx.scope.$parent = { get_data: sinon.spy() };
+ });
+
+ it('should have no segments', function() {
+ expect(ctx.altSegments.length).to.be(0);
+ });
+
+ });
+
+ describe('targetChanged', function() {
+ beforeEach(function() {
+ ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([{expandable: false}]));
+ ctx.scope.init();
+ ctx.scope.$digest();
+
+ ctx.scope.$parent = { get_data: sinon.spy() };
+ ctx.scope.target.target = '';
+ ctx.scope.targetChanged();
+ });
+
+ it('should rebuld target after expression model', function() {
+ expect(ctx.scope.target.target).to.be('aliasByNode(scaleToSeconds(test.prod.*, 1), 2)');
+ });
+
+ it('should call get_data', function() {
+ expect(ctx.scope.$parent.get_data.called).to.be(true);
+ });
+ });
+});
diff --git a/public/app/plugins/datasource/influxdb/datasource.js b/public/app/plugins/datasource/influxdb/datasource.js
index b9ae9069323..d6a5ccc37ad 100644
--- a/public/app/plugins/datasource/influxdb/datasource.js
+++ b/public/app/plugins/datasource/influxdb/datasource.js
@@ -2,10 +2,10 @@ define([
'angular',
'lodash',
'app/core/utils/datemath',
- './influxSeries',
- './queryBuilder',
+ './influx_series',
+ './query_builder',
'./directives',
- './queryCtrl',
+ './query_ctrl',
],
function (angular, _, dateMath, InfluxSeries, InfluxQueryBuilder) {
'use strict';
diff --git a/public/app/plugins/datasource/influxdb/influxSeries.js b/public/app/plugins/datasource/influxdb/influx_series.js
similarity index 100%
rename from public/app/plugins/datasource/influxdb/influxSeries.js
rename to public/app/plugins/datasource/influxdb/influx_series.js
diff --git a/public/app/plugins/datasource/influxdb/queryBuilder.js b/public/app/plugins/datasource/influxdb/query_builder.js
similarity index 100%
rename from public/app/plugins/datasource/influxdb/queryBuilder.js
rename to public/app/plugins/datasource/influxdb/query_builder.js
diff --git a/public/app/plugins/datasource/influxdb/queryCtrl.js b/public/app/plugins/datasource/influxdb/query_ctrl.js
similarity index 99%
rename from public/app/plugins/datasource/influxdb/queryCtrl.js
rename to public/app/plugins/datasource/influxdb/query_ctrl.js
index 5df07d678b4..75dd972fbaa 100644
--- a/public/app/plugins/datasource/influxdb/queryCtrl.js
+++ b/public/app/plugins/datasource/influxdb/query_ctrl.js
@@ -1,7 +1,7 @@
define([
'angular',
'lodash',
- './queryBuilder',
+ './query_builder',
],
function (angular, _, InfluxQueryBuilder) {
'use strict';
diff --git a/public/app/plugins/datasource/influxdb/specs/influx_series_specs.ts b/public/app/plugins/datasource/influxdb/specs/influx_series_specs.ts
new file mode 100644
index 00000000000..c8c127ed759
--- /dev/null
+++ b/public/app/plugins/datasource/influxdb/specs/influx_series_specs.ts
@@ -0,0 +1,190 @@
+///
+
+import {describe, beforeEach, it, sinon, expect} from 'test/lib/common';
+
+declare var InfluxSeries: any;
+
+describe('when generating timeseries from influxdb response', function() {
+
+ describe('given multiple fields for series', function() {
+ var options = {
+ alias: '',
+ series: [
+ {
+ name: 'cpu',
+ tags: {app: 'test', server: 'server1'},
+ columns: ['time', 'mean', 'max', 'min'],
+ values: [[1431946625000, 10, 11, 9], [1431946626000, 20, 21, 19]]
+ }
+ ]
+ };
+ describe('and no alias', function() {
+ it('should generate multiple datapoints for each column', function() {
+ var series = new InfluxSeries(options);
+ var result = series.getTimeSeries();
+
+ expect(result.length).to.be(3);
+ expect(result[0].target).to.be('cpu.mean {app: test, server: server1}');
+ expect(result[0].datapoints[0][0]).to.be(10);
+ expect(result[0].datapoints[0][1]).to.be(1431946625000);
+ expect(result[0].datapoints[1][0]).to.be(20);
+ expect(result[0].datapoints[1][1]).to.be(1431946626000);
+
+ expect(result[1].target).to.be('cpu.max {app: test, server: server1}');
+ expect(result[1].datapoints[0][0]).to.be(11);
+ expect(result[1].datapoints[0][1]).to.be(1431946625000);
+ expect(result[1].datapoints[1][0]).to.be(21);
+ expect(result[1].datapoints[1][1]).to.be(1431946626000);
+
+ expect(result[2].target).to.be('cpu.min {app: test, server: server1}');
+ expect(result[2].datapoints[0][0]).to.be(9);
+ expect(result[2].datapoints[0][1]).to.be(1431946625000);
+ expect(result[2].datapoints[1][0]).to.be(19);
+ expect(result[2].datapoints[1][1]).to.be(1431946626000);
+
+ });
+ });
+
+ describe('and simple alias', function() {
+ it('should use alias', function() {
+ options.alias = 'new series';
+ var series = new InfluxSeries(options);
+ var result = series.getTimeSeries();
+
+ expect(result[0].target).to.be('new series');
+ expect(result[1].target).to.be('new series');
+ expect(result[2].target).to.be('new series');
+ });
+
+ });
+
+ describe('and alias patterns', function() {
+ it('should replace patterns', function() {
+ options.alias = 'alias: $m -> $tag_server ([[measurement]])';
+ var series = new InfluxSeries(options);
+ var result = series.getTimeSeries();
+
+ expect(result[0].target).to.be('alias: cpu -> server1 (cpu)');
+ expect(result[1].target).to.be('alias: cpu -> server1 (cpu)');
+ expect(result[2].target).to.be('alias: cpu -> server1 (cpu)');
+ });
+
+ });
+ });
+ describe('given measurement with default fieldname', function() {
+ var options = { series: [
+ {
+ name: 'cpu',
+ tags: {app: 'test', server: 'server1'},
+ columns: ['time', 'value'],
+ values: [["2015-05-18T10:57:05Z", 10], ["2015-05-18T10:57:06Z", 12]]
+ },
+ {
+ name: 'cpu',
+ tags: {app: 'test2', server: 'server2'},
+ columns: ['time', 'value'],
+ values: [["2015-05-18T10:57:05Z", 15], ["2015-05-18T10:57:06Z", 16]]
+ }
+ ]};
+
+ describe('and no alias', function() {
+
+ it('should generate label with no field', function() {
+ var series = new InfluxSeries(options);
+ var result = series.getTimeSeries();
+
+ expect(result[0].target).to.be('cpu {app: test, server: server1}');
+ expect(result[1].target).to.be('cpu {app: test2, server: server2}');
+ });
+ });
+
+ });
+ describe('given two series', function() {
+ var options = {
+ alias: '',
+ series: [
+ {
+ name: 'cpu',
+ tags: {app: 'test', server: 'server1'},
+ columns: ['time', 'mean'],
+ values: [[1431946625000, 10], [1431946626000, 12]]
+ },
+ {
+ name: 'cpu',
+ tags: {app: 'test2', server: 'server2'},
+ columns: ['time', 'mean'],
+ values: [[1431946625000, 15], [1431946626000, 16]]
+ }
+ ]
+ };
+
+ describe('and no alias', function() {
+
+ it('should generate two time series', function() {
+ var series = new InfluxSeries(options);
+ var result = series.getTimeSeries();
+
+ expect(result.length).to.be(2);
+ expect(result[0].target).to.be('cpu.mean {app: test, server: server1}');
+ expect(result[0].datapoints[0][0]).to.be(10);
+ expect(result[0].datapoints[0][1]).to.be(1431946625000);
+ expect(result[0].datapoints[1][0]).to.be(12);
+ expect(result[0].datapoints[1][1]).to.be(1431946626000);
+
+ expect(result[1].target).to.be('cpu.mean {app: test2, server: server2}');
+ expect(result[1].datapoints[0][0]).to.be(15);
+ expect(result[1].datapoints[0][1]).to.be(1431946625000);
+ expect(result[1].datapoints[1][0]).to.be(16);
+ expect(result[1].datapoints[1][1]).to.be(1431946626000);
+ });
+ });
+
+ describe('and simple alias', function() {
+ it('should use alias', function() {
+ options.alias = 'new series';
+ var series = new InfluxSeries(options);
+ var result = series.getTimeSeries();
+
+ expect(result[0].target).to.be('new series');
+ });
+
+ });
+
+ describe('and alias patterns', function() {
+ it('should replace patterns', function() {
+ options.alias = 'alias: $m -> $tag_server ([[measurement]])';
+ var series = new InfluxSeries(options);
+ var result = series.getTimeSeries();
+
+ expect(result[0].target).to.be('alias: cpu -> server1 (cpu)');
+ expect(result[1].target).to.be('alias: cpu -> server2 (cpu)');
+ });
+
+ });
+
+ });
+
+ describe('given measurement with dots', function() {
+ var options = {
+ alias: '',
+ series: [
+ {
+ name: 'app.prod.server1.count',
+ tags: {},
+ columns: ['time', 'mean'],
+ values: [[1431946625000, 10], [1431946626000, 12]]
+ }
+ ]
+ };
+
+ it('should replace patterns', function() {
+ options.alias = 'alias: $1 -> [[3]]';
+ var series = new InfluxSeries(options);
+ var result = series.getTimeSeries();
+
+ expect(result[0].target).to.be('alias: prod -> count');
+ });
+ });
+
+});
+
diff --git a/public/app/plugins/datasource/influxdb/specs/query_builder_specs.ts b/public/app/plugins/datasource/influxdb/specs/query_builder_specs.ts
new file mode 100644
index 00000000000..65a2f453385
--- /dev/null
+++ b/public/app/plugins/datasource/influxdb/specs/query_builder_specs.ts
@@ -0,0 +1,186 @@
+///
+
+import {describe, beforeEach, it, sinon, expect} from 'test/lib/common';
+
+declare var InfluxQueryBuilder: any;
+
+describe('InfluxQueryBuilder', function() {
+
+ describe('series with mesurement only', function() {
+ it('should generate correct query', function() {
+ var builder = new InfluxQueryBuilder({
+ measurement: 'cpu',
+ groupBy: [{type: 'time', interval: 'auto'}]
+ });
+
+ var query = builder.build();
+
+ expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE $timeFilter GROUP BY time($interval)');
+ });
+ });
+
+ describe('series with math expr and as expr', function() {
+ it('should generate correct query', function() {
+ var builder = new InfluxQueryBuilder({
+ measurement: 'cpu',
+ fields: [{name: 'test', func: 'max', mathExpr: '*2', asExpr: 'new_name'}],
+ groupBy: [{type: 'time', interval: 'auto'}]
+ });
+
+ var query = builder.build();
+
+ expect(query).to.be('SELECT max("test")*2 AS "new_name" FROM "cpu" WHERE $timeFilter GROUP BY time($interval)');
+ });
+ });
+
+ describe('series with single tag only', function() {
+ it('should generate correct query', function() {
+ var builder = new InfluxQueryBuilder({
+ measurement: 'cpu',
+ groupBy: [{type: 'time', interval: 'auto'}],
+ tags: [{key: 'hostname', value: 'server1'}]
+ });
+
+ var query = builder.build();
+
+ expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE "hostname" = \'server1\' AND $timeFilter'
+ + ' GROUP BY time($interval)');
+ });
+
+ it('should switch regex operator with tag value is regex', function() {
+ var builder = new InfluxQueryBuilder({
+ measurement: 'cpu',
+ groupBy: [{type: 'time', interval: 'auto'}],
+ tags: [{key: 'app', value: '/e.*/'}]
+ });
+
+ var query = builder.build();
+ expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE "app" =~ /e.*/ AND $timeFilter GROUP BY time($interval)');
+ });
+ });
+
+ describe('series with multiple fields', function() {
+ it('should generate correct query', function() {
+ var builder = new InfluxQueryBuilder({
+ measurement: 'cpu',
+ tags: [],
+ groupBy: [{type: 'time', interval: 'auto'}],
+ fields: [{ name: 'tx_in', func: 'sum' }, { name: 'tx_out', func: 'mean' }]
+ });
+
+ var query = builder.build();
+ expect(query).to.be('SELECT sum("tx_in") AS "tx_in", mean("tx_out") AS "tx_out" ' +
+ 'FROM "cpu" WHERE $timeFilter GROUP BY time($interval)');
+ });
+ });
+
+ describe('series with multiple tags only', function() {
+ it('should generate correct query', function() {
+ var builder = new InfluxQueryBuilder({
+ measurement: 'cpu',
+ groupBy: [{type: 'time', interval: 'auto'}],
+ tags: [{key: 'hostname', value: 'server1'}, {key: 'app', value: 'email', condition: "AND"}]
+ });
+
+ var query = builder.build();
+ expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE "hostname" = \'server1\' AND "app" = \'email\' AND ' +
+ '$timeFilter GROUP BY time($interval)');
+ });
+ });
+
+ describe('series with tags OR condition', function() {
+ it('should generate correct query', function() {
+ var builder = new InfluxQueryBuilder({
+ measurement: 'cpu',
+ groupBy: [{type: 'time', interval: 'auto'}],
+ tags: [{key: 'hostname', value: 'server1'}, {key: 'hostname', value: 'server2', condition: "OR"}]
+ });
+
+ var query = builder.build();
+ expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE "hostname" = \'server1\' OR "hostname" = \'server2\' AND ' +
+ '$timeFilter GROUP BY time($interval)');
+ });
+ });
+
+ describe('series with groupByTag', function() {
+ it('should generate correct query', function() {
+ var builder = new InfluxQueryBuilder({
+ measurement: 'cpu',
+ tags: [],
+ groupBy: [{type: 'time', interval: 'auto'}, {type: 'tag', key: 'host'}],
+ });
+
+ var query = builder.build();
+ expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE $timeFilter ' +
+ 'GROUP BY time($interval), "host"');
+ });
+ });
+
+ describe('when building explore queries', function() {
+
+ it('should only have measurement condition in tag keys query given query with measurement', function() {
+ var builder = new InfluxQueryBuilder({ measurement: 'cpu', tags: [] });
+ var query = builder.buildExploreQuery('TAG_KEYS');
+ expect(query).to.be('SHOW TAG KEYS FROM "cpu"');
+ });
+
+ it('should handle regex measurement in tag keys query', function() {
+ var builder = new InfluxQueryBuilder({
+ measurement: '/.*/',
+ tags: []
+ });
+ var query = builder.buildExploreQuery('TAG_KEYS');
+ expect(query).to.be('SHOW TAG KEYS FROM /.*/');
+ });
+
+ it('should have no conditions in tags keys query given query with no measurement or tag', function() {
+ var builder = new InfluxQueryBuilder({ measurement: '', tags: [] });
+ var query = builder.buildExploreQuery('TAG_KEYS');
+ expect(query).to.be('SHOW TAG KEYS');
+ });
+
+ it('should have where condition in tag keys query with tags', function() {
+ var builder = new InfluxQueryBuilder({ measurement: '', tags: [{key: 'host', value: 'se1'}] });
+ var query = builder.buildExploreQuery('TAG_KEYS');
+ expect(query).to.be("SHOW TAG KEYS WHERE \"host\" = 'se1'");
+ });
+
+ it('should have no conditions in measurement query for query with no tags', function() {
+ var builder = new InfluxQueryBuilder({ measurement: '', tags: [] });
+ var query = builder.buildExploreQuery('MEASUREMENTS');
+ expect(query).to.be('SHOW MEASUREMENTS');
+ });
+
+ it('should have where condition in measurement query for query with tags', function() {
+ var builder = new InfluxQueryBuilder({measurement: '', tags: [{key: 'app', value: 'email'}]});
+ var query = builder.buildExploreQuery('MEASUREMENTS');
+ expect(query).to.be("SHOW MEASUREMENTS WHERE \"app\" = 'email'");
+ });
+
+ it('should have where tag name IN filter in tag values query for query with one tag', function() {
+ var builder = new InfluxQueryBuilder({measurement: '', tags: [{key: 'app', value: 'asdsadsad'}]});
+ var query = builder.buildExploreQuery('TAG_VALUES', 'app');
+ expect(query).to.be('SHOW TAG VALUES WITH KEY = "app"');
+ });
+
+ it('should have measurement tag condition and tag name IN filter in tag values query', function() {
+ var builder = new InfluxQueryBuilder({measurement: 'cpu', tags: [{key: 'app', value: 'email'}, {key: 'host', value: 'server1'}]});
+ var query = builder.buildExploreQuery('TAG_VALUES', 'app');
+ expect(query).to.be('SHOW TAG VALUES FROM "cpu" WITH KEY = "app" WHERE "host" = \'server1\'');
+ });
+
+ it('should switch to regex operator in tag condition', function() {
+ var builder = new InfluxQueryBuilder({measurement: 'cpu', tags: [{key: 'host', value: '/server.*/'}]});
+ var query = builder.buildExploreQuery('TAG_VALUES', 'app');
+ expect(query).to.be('SHOW TAG VALUES FROM "cpu" WITH KEY = "app" WHERE "host" =~ /server.*/');
+ });
+
+ it('should build show field query', function() {
+ var builder = new InfluxQueryBuilder({measurement: 'cpu', tags: [{key: 'app', value: 'email'}]});
+ var query = builder.buildExploreQuery('FIELDS');
+ expect(query).to.be('SHOW FIELD KEYS FROM "cpu"');
+ });
+
+ });
+
+});
diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts
new file mode 100644
index 00000000000..ee02338c3da
--- /dev/null
+++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts
@@ -0,0 +1,188 @@
+///
+///
+///
+
+import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common';
+
+declare var helpers: any;
+
+describe('InfluxDBQueryCtrl', function() {
+ var ctx = new helpers.ControllerTestContext();
+
+ beforeEach(angularMocks.module('grafana.controllers'));
+ beforeEach(angularMocks.module('grafana.services'));
+ beforeEach(ctx.providePhase());
+ beforeEach(ctx.createControllerPhase('InfluxQueryCtrl'));
+
+ beforeEach(function() {
+ ctx.scope.target = {};
+ ctx.scope.$parent = { get_data: sinon.spy() };
+
+ ctx.scope.datasource = ctx.datasource;
+ ctx.scope.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([]));
+ });
+
+ describe('init', function() {
+ beforeEach(function() {
+ ctx.scope.init();
+ });
+
+ it('should init tagSegments', function() {
+ expect(ctx.scope.tagSegments.length).to.be(1);
+ });
+
+ it('should init measurementSegment', function() {
+ expect(ctx.scope.measurementSegment.value).to.be('select measurement');
+ });
+ });
+
+ describe('when first tag segment is updated', function() {
+ beforeEach(function() {
+ ctx.scope.init();
+ ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button'}, 0);
+ });
+
+ it('should update tag key', function() {
+ expect(ctx.scope.target.tags[0].key).to.be('asd');
+ expect(ctx.scope.tagSegments[0].type).to.be('key');
+ });
+
+ it('should add tagSegments', function() {
+ expect(ctx.scope.tagSegments.length).to.be(3);
+ });
+ });
+
+ describe('when last tag value segment is updated', function() {
+ beforeEach(function() {
+ ctx.scope.init();
+ ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button'}, 0);
+ ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
+ });
+
+ it('should update tag value', function() {
+ expect(ctx.scope.target.tags[0].value).to.be('server1');
+ });
+
+ it('should set tag operator', function() {
+ expect(ctx.scope.target.tags[0].operator).to.be('=');
+ });
+
+ it('should add plus button for another filter', function() {
+ expect(ctx.scope.tagSegments[3].fake).to.be(true);
+ });
+ });
+
+ describe('when last tag value segment is updated to regex', function() {
+ beforeEach(function() {
+ ctx.scope.init();
+ ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button'}, 0);
+ ctx.scope.tagSegmentUpdated({value: '/server.*/', type: 'value'}, 2);
+ });
+
+ it('should update operator', function() {
+ expect(ctx.scope.tagSegments[1].value).to.be('=~');
+ expect(ctx.scope.target.tags[0].operator).to.be('=~');
+ });
+ });
+
+ describe('when second tag key is added', function() {
+ beforeEach(function() {
+ ctx.scope.init();
+ ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
+ ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
+ ctx.scope.tagSegmentUpdated({value: 'key2', type: 'plus-button'}, 3);
+ });
+
+ it('should update tag key', function() {
+ expect(ctx.scope.target.tags[1].key).to.be('key2');
+ });
+
+ it('should add AND segment', function() {
+ expect(ctx.scope.tagSegments[3].value).to.be('AND');
+ });
+ });
+
+ describe('when condition is changed', function() {
+ beforeEach(function() {
+ ctx.scope.init();
+ ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
+ ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
+ ctx.scope.tagSegmentUpdated({value: 'key2', type: 'plus-button'}, 3);
+ ctx.scope.tagSegmentUpdated({value: 'OR', type: 'condition'}, 3);
+ });
+
+ it('should update tag condition', function() {
+ expect(ctx.scope.target.tags[1].condition).to.be('OR');
+ });
+
+ it('should update AND segment', function() {
+ expect(ctx.scope.tagSegments[3].value).to.be('OR');
+ expect(ctx.scope.tagSegments.length).to.be(7);
+ });
+ });
+
+ describe('when deleting first tag filter after value is selected', function() {
+ beforeEach(function() {
+ ctx.scope.init();
+ ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
+ ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
+ ctx.scope.tagSegmentUpdated(ctx.scope.removeTagFilterSegment, 0);
+ });
+
+ it('should remove tags', function() {
+ expect(ctx.scope.target.tags.length).to.be(0);
+ });
+
+ it('should remove all segment after 2 and replace with plus button', function() {
+ expect(ctx.scope.tagSegments.length).to.be(1);
+ expect(ctx.scope.tagSegments[0].type).to.be('plus-button');
+ });
+ });
+
+ describe('when deleting second tag value before second tag value is complete', function() {
+ beforeEach(function() {
+ ctx.scope.init();
+ ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
+ ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
+ ctx.scope.tagSegmentUpdated({value: 'key2', type: 'plus-button'}, 3);
+ ctx.scope.tagSegmentUpdated(ctx.scope.removeTagFilterSegment, 4);
+ });
+
+ it('should remove all segment after 2 and replace with plus button', function() {
+ expect(ctx.scope.tagSegments.length).to.be(4);
+ expect(ctx.scope.tagSegments[3].type).to.be('plus-button');
+ });
+ });
+
+ describe('when deleting second tag value before second tag value is complete', function() {
+ beforeEach(function() {
+ ctx.scope.init();
+ ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
+ ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
+ ctx.scope.tagSegmentUpdated({value: 'key2', type: 'plus-button'}, 3);
+ ctx.scope.tagSegmentUpdated(ctx.scope.removeTagFilterSegment, 4);
+ });
+
+ it('should remove all segment after 2 and replace with plus button', function() {
+ expect(ctx.scope.tagSegments.length).to.be(4);
+ expect(ctx.scope.tagSegments[3].type).to.be('plus-button');
+ });
+ });
+
+ describe('when deleting second tag value after second tag filter is complete', function() {
+ beforeEach(function() {
+ ctx.scope.init();
+ ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
+ ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
+ ctx.scope.tagSegmentUpdated({value: 'key2', type: 'plus-button'}, 3);
+ ctx.scope.tagSegmentUpdated({value: 'value', type: 'value'}, 6);
+ ctx.scope.tagSegmentUpdated(ctx.scope.removeTagFilterSegment, 4);
+ });
+
+ it('should remove all segment after 2 and replace with plus button', function() {
+ expect(ctx.scope.tagSegments.length).to.be(4);
+ expect(ctx.scope.tagSegments[3].type).to.be('plus-button');
+ });
+ });
+
+});
diff --git a/public/test/lib/common.ts b/public/test/lib/common.ts
index 53f50301d42..523a97c2c03 100644
--- a/public/test/lib/common.ts
+++ b/public/test/lib/common.ts
@@ -7,10 +7,15 @@ var it = _global.it;
var sinon = _global.sinon;
var expect = _global.expect;
+var angularMocks = {
+ module: _global.module,
+};
+
export {
beforeEach,
describe,
it,
sinon,
- expect
+ expect,
+ angularMocks,
}
diff --git a/public/test/specs/core/utils/rangeutil_specs.ts b/public/test/specs/core/utils/rangeutil_specs.ts
index 5ff1639a204..8816a15f73d 100644
--- a/public/test/specs/core/utils/rangeutil_specs.ts
+++ b/public/test/specs/core/utils/rangeutil_specs.ts
@@ -56,6 +56,11 @@ describe("rangeUtil", () => {
expect(text).to.be('Last 1 hour')
});
+ it('Date range with rounding ranges', () => {
+ var text = rangeUtil.describeTimeRange({from: 'now/d+6h', to: 'now'});
+ expect(text).to.be('now/d+6h to now')
+ });
+
it('Date range with absolute to now', () => {
var text = rangeUtil.describeTimeRange({from: moment([2014,10,10,2,3,4]), to: 'now'});
expect(text).to.be('Nov 10, 2014 02:03:04 to a few seconds ago')
diff --git a/public/test/specs/elasticsearch-indexPattern-specs.js b/public/test/specs/elasticsearch-indexPattern-specs.js
deleted file mode 100644
index 7a93c7be3a1..00000000000
--- a/public/test/specs/elasticsearch-indexPattern-specs.js
+++ /dev/null
@@ -1,52 +0,0 @@
-define([
- 'moment',
- 'app/plugins/datasource/elasticsearch/indexPattern'
-], function(moment, IndexPattern) {
- 'use strict';
-
- describe('IndexPattern', function() {
-
- describe('when getting index for today', function() {
- it('should return correct index name', function() {
- var pattern = new IndexPattern('[asd-]YYYY.MM.DD', 'Daily');
- var expected = 'asd-' + moment().format('YYYY.MM.DD');
-
- expect(pattern.getIndexForToday()).to.be(expected);
- });
- });
-
- describe('when getting index list for time range', function() {
-
- describe('no interval', function() {
- it('should return correct index', function() {
- var pattern = new IndexPattern('my-metrics');
- var from = new Date(2015, 4, 30, 1, 2, 3);
- var to = new Date(2015, 5, 1, 12, 5 , 6);
- expect(pattern.getIndexList(from, to)).to.eql('my-metrics');
- });
- });
-
- describe('daily', function() {
-
- it('should return correct index list', function() {
- var pattern = new IndexPattern('[asd-]YYYY.MM.DD', 'Daily');
- var from = new Date(1432940523000);
- var to = new Date(1433153106000);
-
- var expected = [
- 'asd-2015.05.29',
- 'asd-2015.05.30',
- 'asd-2015.05.31',
- 'asd-2015.06.01',
- ];
-
- expect(pattern.getIndexList(from, to)).to.eql(expected);
- });
-
- });
-
- });
-
- });
-
-});
diff --git a/public/test/specs/elasticsearch-querybuilder-specs.js b/public/test/specs/elasticsearch-querybuilder-specs.js
deleted file mode 100644
index 467d3706e03..00000000000
--- a/public/test/specs/elasticsearch-querybuilder-specs.js
+++ /dev/null
@@ -1,124 +0,0 @@
-define([
- 'app/plugins/datasource/elasticsearch/queryBuilder'
-], function(ElasticQueryBuilder) {
- 'use strict';
-
- describe('ElasticQueryBuilder', function() {
- var builder;
-
- beforeEach(function() {
- builder = new ElasticQueryBuilder({timeField: '@timestamp'});
- });
-
- it('with defaults', function() {
- var query = builder.build({
- metrics: [{type: 'Count', id: '0'}],
- timeField: '@timestamp',
- bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '1'}],
- });
-
- expect(query.query.filtered.filter.bool.must[0].range["@timestamp"].gte).to.be("$timeFrom");
- expect(query.aggs["1"].date_histogram.extended_bounds.min).to.be("$timeFrom");
- });
-
- it('with raw query', function() {
- var query = builder.build({
- rawQuery: '{"query": "$lucene_query"}',
- });
-
- expect(query.query).to.be("$lucene_query");
- });
-
- it('with multiple bucket aggs', function() {
- var query = builder.build({
- metrics: [{type: 'count', id: '1'}],
- timeField: '@timestamp',
- bucketAggs: [
- {type: 'terms', field: '@host', id: '2'},
- {type: 'date_histogram', field: '@timestamp', id: '3'}
- ],
- });
-
- expect(query.aggs["2"].terms.field).to.be("@host");
- expect(query.aggs["2"].aggs["3"].date_histogram.field).to.be("@timestamp");
- });
-
- it('with select field', function() {
- var query = builder.build({
- metrics: [{type: 'avg', field: '@value', id: '1'}],
- bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '2'}],
- }, 100, 1000);
-
- var aggs = query.aggs["2"].aggs;
- expect(aggs["1"].avg.field).to.be("@value");
- });
-
- it('with term agg and order by metric agg', function() {
- var query = builder.build({
- metrics: [
- {type: 'count', id: '1'},
- {type: 'avg', field: '@value', id: '5'}
- ],
- bucketAggs: [
- {type: 'terms', field: '@host', settings: {size: 5, order: 'asc', orderBy: '5'}, id: '2' },
- {type: 'date_histogram', field: '@timestamp', id: '3'}
- ],
- }, 100, 1000);
-
- var firstLevel = query.aggs["2"];
- var secondLevel = firstLevel.aggs["3"];
-
- expect(firstLevel.aggs["5"].avg.field).to.be("@value");
- expect(secondLevel.aggs["5"].avg.field).to.be("@value");
- });
-
- it('with metric percentiles', function() {
- var query = builder.build({
- metrics: [
- {
- id: '1',
- type: 'percentiles',
- field: '@load_time',
- settings: {
- percents: [1,2,3,4]
- }
- }
- ],
- bucketAggs: [
- {type: 'date_histogram', field: '@timestamp', id: '3'}
- ],
- }, 100, 1000);
-
- var firstLevel = query.aggs["3"];
-
- expect(firstLevel.aggs["1"].percentiles.field).to.be("@load_time");
- expect(firstLevel.aggs["1"].percentiles.percents).to.eql([1,2,3,4]);
- });
-
- it('with filters aggs', function() {
- var query = builder.build({
- metrics: [{type: 'count', id: '1'}],
- timeField: '@timestamp',
- bucketAggs: [
- {
- id: '2',
- type: 'filters',
- settings: {
- filters: [
- {query: '@metric:cpu' },
- {query: '@metric:logins.count' },
- ]
- }
- },
- {type: 'date_histogram', field: '@timestamp', id: '4'}
- ],
- });
-
- expect(query.aggs["2"].filters.filters["@metric:cpu"].query.query_string.query).to.be("@metric:cpu");
- expect(query.aggs["2"].filters.filters["@metric:logins.count"].query.query_string.query).to.be("@metric:logins.count");
- expect(query.aggs["2"].aggs["4"].date_histogram.field).to.be("@timestamp");
- });
-
- });
-
-});
diff --git a/public/test/specs/elasticsearch-queryctrl-specs.js b/public/test/specs/elasticsearch-queryctrl-specs.js
deleted file mode 100644
index 3cda8e23350..00000000000
--- a/public/test/specs/elasticsearch-queryctrl-specs.js
+++ /dev/null
@@ -1,32 +0,0 @@
-define([
- './helpers',
- 'app/plugins/datasource/elasticsearch/queryCtrl',
- 'app/services/uiSegmentSrv'
-], function(helpers) {
- 'use strict';
-
- describe('ElasticQueryCtrl', function() {
- var ctx = new helpers.ControllerTestContext();
-
- beforeEach(module('grafana.controllers'));
- beforeEach(module('grafana.services'));
- beforeEach(ctx.providePhase());
- beforeEach(ctx.createControllerPhase('ElasticQueryCtrl'));
-
- beforeEach(function() {
- ctx.scope.target = {};
- ctx.scope.$parent = { get_data: sinon.spy() };
-
- ctx.scope.datasource = ctx.datasource;
- ctx.scope.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([]));
- });
-
- describe('init', function() {
- beforeEach(function() {
- ctx.scope.init();
- });
-
- });
-
- });
-});
diff --git a/public/test/specs/elasticsearch-response-specs.js b/public/test/specs/elasticsearch-response-specs.js
deleted file mode 100644
index fe465df3129..00000000000
--- a/public/test/specs/elasticsearch-response-specs.js
+++ /dev/null
@@ -1,414 +0,0 @@
-define([
- 'app/plugins/datasource/elasticsearch/elasticResponse',
-], function(ElasticResponse) {
- 'use strict';
-
- describe('ElasticResponse', function() {
- var targets;
- var response;
- var result;
-
- describe('simple query and count', function() {
-
- beforeEach(function() {
- targets = [{
- refId: 'A',
- metrics: [{type: 'count', id: '1'}],
- bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '2'}],
- }];
- response = {
- responses: [{
- aggregations: {
- "2": {
- buckets: [
- {
- doc_count: 10,
- key: 1000
- },
- {
- doc_count: 15,
- key: 2000
- }
- ]
- }
- }
- }]
- };
-
- result = new ElasticResponse(targets, response).getTimeSeries();
- });
-
- it('should return 1 series', function() {
- expect(result.data.length).to.be(1);
- expect(result.data[0].target).to.be('Count');
- expect(result.data[0].datapoints.length).to.be(2);
- expect(result.data[0].datapoints[0][0]).to.be(10);
- expect(result.data[0].datapoints[0][1]).to.be(1000);
- });
-
- });
-
- describe('simple query count & avg aggregation', function() {
- var result;
-
- beforeEach(function() {
- targets = [{
- refId: 'A',
- metrics: [{type: 'count', id: '1'}, {type: 'avg', field: 'value', id: '2'}],
- bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '3'}],
- }];
- response = {
- responses: [{
- aggregations: {
- "3": {
- buckets: [
- {
- "2": {value: 88},
- doc_count: 10,
- key: 1000
- },
- {
- "2": {value: 99},
- doc_count: 15,
- key: 2000
- }
- ]
- }
- }
- }]
- };
-
- result = new ElasticResponse(targets, response).getTimeSeries();
- });
-
- it('should return 2 series', function() {
- expect(result.data.length).to.be(2);
- expect(result.data[0].datapoints.length).to.be(2);
- expect(result.data[0].datapoints[0][0]).to.be(10);
- expect(result.data[0].datapoints[0][1]).to.be(1000);
-
- expect(result.data[1].target).to.be("Average value");
- expect(result.data[1].datapoints[0][0]).to.be(88);
- expect(result.data[1].datapoints[1][0]).to.be(99);
- });
-
- });
-
- describe('single group by query one metric', function() {
- var result;
-
- beforeEach(function() {
- targets = [{
- refId: 'A',
- metrics: [{type: 'count', id: '1'}],
- bucketAggs: [{type: 'terms', field: 'host', id: '2'}, {type: 'date_histogram', field: '@timestamp', id: '3'}],
- }];
- response = {
- responses: [{
- aggregations: {
- "2": {
- buckets: [
- {
- "3": {
- buckets: [
- {doc_count: 1, key: 1000},
- {doc_count: 3, key: 2000}
- ]
- },
- doc_count: 4,
- key: 'server1',
- },
- {
- "3": {
- buckets: [
- {doc_count: 2, key: 1000},
- {doc_count: 8, key: 2000}
- ]
- },
- doc_count: 10,
- key: 'server2',
- },
- ]
- }
- }
- }]
- };
-
- result = new ElasticResponse(targets, response).getTimeSeries();
- });
-
- it('should return 2 series', function() {
- expect(result.data.length).to.be(2);
- expect(result.data[0].datapoints.length).to.be(2);
- expect(result.data[0].target).to.be('server1');
- expect(result.data[1].target).to.be('server2');
- });
- });
-
- describe('single group by query two metrics', function() {
- var result;
-
- beforeEach(function() {
- targets = [{
- refId: 'A',
- metrics: [{type: 'count', id: '1'}, {type: 'avg', field: '@value', id: '4'}],
- bucketAggs: [{type: 'terms', field: 'host', id: '2'}, {type: 'date_histogram', field: '@timestamp', id: '3'}],
- }];
- response = {
- responses: [{
- aggregations: {
- "2": {
- buckets: [
- {
- "3": {
- buckets: [
- { "4": {value: 10}, doc_count: 1, key: 1000},
- { "4": {value: 12}, doc_count: 3, key: 2000}
- ]
- },
- doc_count: 4,
- key: 'server1',
- },
- {
- "3": {
- buckets: [
- { "4": {value: 20}, doc_count: 1, key: 1000},
- { "4": {value: 32}, doc_count: 3, key: 2000}
- ]
- },
- doc_count: 10,
- key: 'server2',
- },
- ]
- }
- }
- }]
- };
-
- result = new ElasticResponse(targets, response).getTimeSeries();
- });
-
- it('should return 2 series', function() {
- expect(result.data.length).to.be(4);
- expect(result.data[0].datapoints.length).to.be(2);
- expect(result.data[0].target).to.be('server1 Count');
- expect(result.data[1].target).to.be('server1 Average @value');
- expect(result.data[2].target).to.be('server2 Count');
- expect(result.data[3].target).to.be('server2 Average @value');
- });
- });
-
- describe('with percentiles ', function() {
- var result;
-
- beforeEach(function() {
- targets = [{
- refId: 'A',
- metrics: [{type: 'percentiles', settings: {percents: [75, 90]}, id: '1'}],
- bucketAggs: [{type: 'date_histogram', field: '@timestamp', id: '3'}],
- }];
- response = {
- responses: [{
- aggregations: {
- "3": {
- buckets: [
- {
- "1": {values: {"75": 3.3, "90": 5.5}},
- doc_count: 10,
- key: 1000
- },
- {
- "1": {values: {"75": 2.3, "90": 4.5}},
- doc_count: 15,
- key: 2000
- }
- ]
- }
- }
- }]
- };
-
- result = new ElasticResponse(targets, response).getTimeSeries();
- });
-
- it('should return 2 series', function() {
- expect(result.data.length).to.be(2);
- expect(result.data[0].datapoints.length).to.be(2);
- expect(result.data[0].target).to.be('p75');
- expect(result.data[1].target).to.be('p90');
- expect(result.data[0].datapoints[0][0]).to.be(3.3);
- expect(result.data[0].datapoints[0][1]).to.be(1000);
- expect(result.data[1].datapoints[1][0]).to.be(4.5);
- });
- });
-
- describe('with extended_stats', function() {
- var result;
-
- beforeEach(function() {
- targets = [{
- refId: 'A',
- metrics: [{type: 'extended_stats', meta: {max: true, std_deviation_bounds_upper: true}, id: '1'}],
- bucketAggs: [{type: 'terms', field: 'host', id: '3'}, {type: 'date_histogram', id: '4'}],
- }];
- response = {
- responses: [{
- aggregations: {
- "3": {
- buckets: [
- {
- key: 'server1',
- "4": {
- buckets: [{
- "1": {max: 10.2, min: 5.5, std_deviation_bounds: {upper: 3, lower: -2}},
- doc_count: 10,
- key: 1000
- }]
- }
- },
- {
- key: 'server2',
- "4": {
- buckets: [{
- "1": {max: 10.2, min: 5.5, std_deviation_bounds: {upper: 3, lower: -2}},
- doc_count: 10,
- key: 1000
- }]
- }
- },
- ]
- }
- }
- }]
- };
-
- result = new ElasticResponse(targets, response).getTimeSeries();
- });
-
- it('should return 4 series', function() {
- expect(result.data.length).to.be(4);
- expect(result.data[0].datapoints.length).to.be(1);
- expect(result.data[0].target).to.be('server1 Max');
- expect(result.data[1].target).to.be('server1 Std Dev Upper');
-
- expect(result.data[0].datapoints[0][0]).to.be(10.2);
- expect(result.data[1].datapoints[0][0]).to.be(3);
- });
- });
-
- describe('single group by with alias pattern', function() {
- var result;
-
- beforeEach(function() {
- targets = [{
- refId: 'A',
- metrics: [{type: 'count', id: '1'}],
- alias: '{{term @host}} {{metric}} and!',
- bucketAggs: [
- {type: 'terms', field: '@host', id: '2'},
- {type: 'date_histogram', field: '@timestamp', id: '3'}
- ],
- }];
- response = {
- responses: [{
- aggregations: {
- "2": {
- buckets: [
- {
- "3": {
- buckets: [
- {doc_count: 1, key: 1000},
- {doc_count: 3, key: 2000}
- ]
- },
- doc_count: 4,
- key: 'server1',
- },
- {
- "3": {
- buckets: [
- {doc_count: 2, key: 1000},
- {doc_count: 8, key: 2000}
- ]
- },
- doc_count: 10,
- key: 'server2',
- },
- ]
- }
- }
- }]
- };
-
- result = new ElasticResponse(targets, response).getTimeSeries();
- });
-
- it('should return 2 series', function() {
- expect(result.data.length).to.be(2);
- expect(result.data[0].datapoints.length).to.be(2);
- expect(result.data[0].target).to.be('server1 Count and!');
- expect(result.data[1].target).to.be('server2 Count and!');
- });
- });
-
- describe('with two filters agg', function() {
- var result;
-
- beforeEach(function() {
- targets = [{
- refId: 'A',
- metrics: [{type: 'count', id: '1'}],
- bucketAggs: [
- {
- id: '2',
- type: 'filters',
- settings: {
- filters: [
- {query: '@metric:cpu' },
- {query: '@metric:logins.count' },
- ]
- }
- },
- {type: 'date_histogram', field: '@timestamp', id: '3'}
- ],
- }];
- response = {
- responses: [{
- aggregations: {
- "2": {
- buckets: {
- "@metric:cpu": {
- "3": {
- buckets: [
- {doc_count: 1, key: 1000},
- {doc_count: 3, key: 2000}
- ]
- },
- },
- "@metric:logins.count": {
- "3": {
- buckets: [
- {doc_count: 2, key: 1000},
- {doc_count: 8, key: 2000}
- ]
- },
- },
- }
- }
- }
- }]
- };
-
- result = new ElasticResponse(targets, response).getTimeSeries();
- });
-
- it('should return 2 series', function() {
- expect(result.data.length).to.be(2);
- expect(result.data[0].datapoints.length).to.be(2);
- expect(result.data[0].target).to.be('@metric:cpu');
- expect(result.data[1].target).to.be('@metric:logins.count');
- });
- });
-
- });
-});
diff --git a/public/test/specs/elasticsearch-specs.js b/public/test/specs/elasticsearch-specs.js
deleted file mode 100644
index 784108a2fd0..00000000000
--- a/public/test/specs/elasticsearch-specs.js
+++ /dev/null
@@ -1,77 +0,0 @@
-define([
- './helpers',
- 'moment',
- 'angular',
- 'app/plugins/datasource/elasticsearch/datasource',
-], function(helpers, moment, angular) {
- 'use strict';
-
- describe('ElasticDatasource', function() {
- var ctx = new helpers.ServiceTestContext();
-
- beforeEach(module('grafana.services'));
- beforeEach(ctx.providePhase(['templateSrv', 'backendSrv']));
- beforeEach(ctx.createService('ElasticDatasource'));
- beforeEach(function() {
- ctx.ds = new ctx.service({jsonData: {}});
- });
-
- describe('When testing datasource with index pattern', function() {
- beforeEach(function() {
- ctx.ds = new ctx.service({
- url: 'http://es.com',
- index: '[asd-]YYYY.MM.DD',
- jsonData: { interval: 'Daily' }
- });
- });
-
- it('should translate index pattern to current day', function() {
- var requestOptions;
- ctx.backendSrv.datasourceRequest = function(options) {
- requestOptions = options;
- return ctx.$q.when({});
- };
-
- ctx.ds.testDatasource();
- ctx.$rootScope.$apply();
-
- var today = moment().format("YYYY.MM.DD");
- expect(requestOptions.url).to.be("http://es.com/asd-" + today + '/_stats');
- });
- });
-
- describe('When issueing metric query with interval pattern', function() {
- beforeEach(function() {
- ctx.ds = new ctx.service({
- url: 'http://es.com',
- index: '[asd-]YYYY.MM.DD',
- jsonData: { interval: 'Daily' }
- });
- });
-
- it('should translate index pattern to current day', function() {
- var requestOptions;
- ctx.backendSrv.datasourceRequest = function(options) {
- requestOptions = options;
- return ctx.$q.when({data: {responses: []}});
- };
-
- ctx.ds.query({
- range: {
- from: moment([2015, 4, 30, 10]),
- to: moment([2015, 5, 1, 10])
- },
- targets: [{ bucketAggs: [], metrics: [] }]
- });
-
- ctx.$rootScope.$apply();
- var parts = requestOptions.data.split('\n');
- var header = angular.fromJson(parts[0]);
- expect(header.index).to.eql(['asd-2015.05.30', 'asd-2015.05.31', 'asd-2015.06.01']);
- });
-
- });
-
- });
-
-});
diff --git a/public/test/specs/gfunc-specs.js b/public/test/specs/gfunc-specs.js
deleted file mode 100644
index 15096536de5..00000000000
--- a/public/test/specs/gfunc-specs.js
+++ /dev/null
@@ -1,130 +0,0 @@
-define([
- 'app/plugins/datasource/graphite/gfunc'
-], function(gfunc) {
- 'use strict';
-
- describe('when creating func instance from func names', function() {
-
- it('should return func instance', function() {
- var func = gfunc.createFuncInstance('sumSeries');
- expect(func).to.be.ok();
- expect(func.def.name).to.equal('sumSeries');
- expect(func.def.params.length).to.equal(5);
- expect(func.def.defaultParams.length).to.equal(1);
- });
-
- it('should return func instance with shortName', function() {
- var func = gfunc.createFuncInstance('sum');
- expect(func).to.be.ok();
- });
-
- it('should return func instance from funcDef', function() {
- var func = gfunc.createFuncInstance('sum');
- var func2 = gfunc.createFuncInstance(func.def);
- expect(func2).to.be.ok();
- });
-
- it('func instance should have text representation', function() {
- var func = gfunc.createFuncInstance('groupByNode');
- func.params[0] = 5;
- func.params[1] = 'avg';
- func.updateText();
- expect(func.text).to.equal("groupByNode(5, avg)");
- });
-
- });
-
- describe('when rendering func instance', function() {
-
- it('should handle single metric param', function() {
- var func = gfunc.createFuncInstance('sumSeries');
- expect(func.render('hello.metric')).to.equal("sumSeries(hello.metric)");
- });
-
- it('should include default params if options enable it', function() {
- var func = gfunc.createFuncInstance('scaleToSeconds', { withDefaultParams: true });
- expect(func.render('hello')).to.equal("scaleToSeconds(hello, 1)");
- });
-
- it('should handle int or interval params with number', function() {
- var func = gfunc.createFuncInstance('movingMedian');
- func.params[0] = '5';
- expect(func.render('hello')).to.equal("movingMedian(hello, 5)");
- });
-
- it('should handle int or interval params with interval string', function() {
- var func = gfunc.createFuncInstance('movingMedian');
- func.params[0] = '5min';
- expect(func.render('hello')).to.equal("movingMedian(hello, '5min')");
- });
-
- it('should handle metric param and int param and string param', function() {
- var func = gfunc.createFuncInstance('groupByNode');
- func.params[0] = 5;
- func.params[1] = 'avg';
- expect(func.render('hello.metric')).to.equal("groupByNode(hello.metric, 5, 'avg')");
- });
-
- it('should handle function with no metric param', function() {
- var func = gfunc.createFuncInstance('randomWalk');
- func.params[0] = 'test';
- expect(func.render(undefined)).to.equal("randomWalk('test')");
- });
-
- it('should handle function multiple series params', function() {
- var func = gfunc.createFuncInstance('asPercent');
- func.params[0] = '#B';
- expect(func.render('#A')).to.equal("asPercent(#A, #B)");
- });
-
- });
-
- describe('when requesting function categories', function() {
- it('should return function categories', function() {
- var catIndex = gfunc.getCategories();
- expect(catIndex.Special.length).to.be.greaterThan(8);
- });
- });
-
- describe('when updating func param', function() {
- it('should update param value and update text representation', function() {
- var func = gfunc.createFuncInstance('summarize', { withDefaultParams: true });
- func.updateParam('1h', 0);
- expect(func.params[0]).to.be('1h');
- expect(func.text).to.be('summarize(1h, sum, false)');
- });
-
- it('should parse numbers as float', function() {
- var func = gfunc.createFuncInstance('scale');
- func.updateParam('0.001', 0);
- expect(func.params[0]).to.be('0.001');
- });
- });
-
- describe('when updating func param with optional second parameter', function() {
- it('should update value and text', function() {
- var func = gfunc.createFuncInstance('aliasByNode');
- func.updateParam('1', 0);
- expect(func.params[0]).to.be('1');
- });
-
- it('should slit text and put value in second param', function() {
- var func = gfunc.createFuncInstance('aliasByNode');
- func.updateParam('4,-5', 0);
- expect(func.params[0]).to.be('4');
- expect(func.params[1]).to.be('-5');
- expect(func.text).to.be('aliasByNode(4, -5)');
- });
-
- it('should remove second param when empty string is set', function() {
- var func = gfunc.createFuncInstance('aliasByNode');
- func.updateParam('4,-5', 0);
- func.updateParam('', 1);
- expect(func.params[0]).to.be('4');
- expect(func.params[1]).to.be(undefined);
- expect(func.text).to.be('aliasByNode(4)');
- });
- });
-
-});
-
diff --git a/public/test/specs/graphiteDatasource-specs.js b/public/test/specs/graphiteDatasource-specs.js
deleted file mode 100644
index 3aacff95efd..00000000000
--- a/public/test/specs/graphiteDatasource-specs.js
+++ /dev/null
@@ -1,122 +0,0 @@
-define([
- './helpers',
- 'app/plugins/datasource/graphite/datasource'
-], function(helpers) {
- 'use strict';
-
- describe('graphiteDatasource', function() {
- var ctx = new helpers.ServiceTestContext();
-
- beforeEach(module('grafana.services'));
- beforeEach(ctx.providePhase(['backendSrv']));
-
- beforeEach(ctx.createService('GraphiteDatasource'));
- beforeEach(function() {
- ctx.ds = new ctx.service({ url: [''] });
- });
-
- describe('When querying influxdb with one target using query editor target spec', function() {
- var query = {
- rangeRaw: { from: 'now-1h', to: 'now' },
- targets: [{ target: 'prod1.count' }, {target: 'prod2.count'}],
- maxDataPoints: 500,
- };
-
- var results;
- var requestOptions;
-
- beforeEach(function() {
- ctx.backendSrv.datasourceRequest = function(options) {
- requestOptions = options;
- return ctx.$q.when({data: [{ target: 'prod1.count', datapoints: [[10, 1], [12,1]] }]});
- };
-
- ctx.ds.query(query).then(function(data) { results = data; });
- ctx.$rootScope.$apply();
- });
-
- it('should generate the correct query', function() {
- expect(requestOptions.url).to.be('/render');
- });
-
- it('should query correctly', function() {
- var params = requestOptions.data.split('&');
- expect(params).to.contain('target=prod1.count');
- expect(params).to.contain('target=prod2.count');
- expect(params).to.contain('from=-1h');
- expect(params).to.contain('until=now');
- });
-
- it('should exclude undefined params', function() {
- var params = requestOptions.data.split('&');
- expect(params).to.not.contain('cacheTimeout=undefined');
- });
-
- it('should return series list', function() {
- expect(results.data.length).to.be(1);
- expect(results.data[0].target).to.be('prod1.count');
- });
-
- it('should convert to millisecond resolution', function() {
- expect(results.data[0].datapoints[0][0]).to.be(10);
- });
-
- });
-
- describe('building graphite params', function() {
-
- it('should uri escape targets', function() {
- var results = ctx.ds.buildGraphiteParams({
- targets: [{target: 'prod1.{test,test2}'}, {target: 'prod2.count'}]
- });
- expect(results).to.contain('target=prod1.%7Btest%2Ctest2%7D');
- });
-
- it('should replace target placeholder', function() {
- var results = ctx.ds.buildGraphiteParams({
- targets: [{target: 'series1'}, {target: 'series2'}, {target: 'asPercent(#A,#B)'}]
- });
- expect(results[2]).to.be('target=asPercent(series1%2Cseries2)');
- });
-
- it('should replace target placeholder for hidden series', function() {
- var results = ctx.ds.buildGraphiteParams({
- targets: [{target: 'series1', hide: true}, {target: 'sumSeries(#A)', hide: true}, {target: 'asPercent(#A,#B)'}]
- });
- expect(results[0]).to.be('target=' + encodeURIComponent('asPercent(series1,sumSeries(series1))'));
- });
-
- it('should replace target placeholder when nesting query references', function() {
- var results = ctx.ds.buildGraphiteParams({
- targets: [{target: 'series1'}, {target: 'sumSeries(#A)'}, {target: 'asPercent(#A,#B)'}]
- });
- expect(results[2]).to.be('target=' + encodeURIComponent("asPercent(series1,sumSeries(series1))"));
- });
-
- it('should fix wrong minute interval parameters', function() {
- var results = ctx.ds.buildGraphiteParams({
- targets: [{target: "summarize(prod.25m.count, '25m', 'sum')" }]
- });
- expect(results[0]).to.be('target=' + encodeURIComponent("summarize(prod.25m.count, '25min', 'sum')"));
- });
-
- it('should fix wrong month interval parameters', function() {
- var results = ctx.ds.buildGraphiteParams({
- targets: [{target: "summarize(prod.5M.count, '5M', 'sum')" }]
- });
- expect(results[0]).to.be('target=' + encodeURIComponent("summarize(prod.5M.count, '5mon', 'sum')"));
- });
-
- it('should ignore empty targets', function() {
- var results = ctx.ds.buildGraphiteParams({
- targets: [{target: 'series1'}, {target: ''}]
- });
- expect(results.length).to.be(2);
- });
-
- });
-
- });
-
-});
-
diff --git a/public/test/specs/graphiteTargetCtrl-specs.js b/public/test/specs/graphiteTargetCtrl-specs.js
deleted file mode 100644
index 42d97ea5fa2..00000000000
--- a/public/test/specs/graphiteTargetCtrl-specs.js
+++ /dev/null
@@ -1,179 +0,0 @@
-define([
- './helpers',
- 'app/plugins/datasource/graphite/gfunc',
- 'app/plugins/datasource/graphite/queryCtrl',
- 'app/services/uiSegmentSrv'
-], function(helpers, gfunc) {
- 'use strict';
-
- describe('GraphiteQueryCtrl', function() {
- var ctx = new helpers.ControllerTestContext();
-
- beforeEach(module('grafana.controllers'));
- beforeEach(module('grafana.services'));
- beforeEach(ctx.providePhase());
- beforeEach(ctx.createControllerPhase('GraphiteQueryCtrl'));
-
- beforeEach(function() {
- ctx.scope.target = {target: 'aliasByNode(scaleToSeconds(test.prod.*,1),2)'};
-
- ctx.scope.datasource = ctx.datasource;
- ctx.scope.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([]));
- });
-
- describe('init', function() {
- beforeEach(function() {
- ctx.scope.init();
- ctx.scope.$digest();
- });
-
- it('should validate metric key exists', function() {
- expect(ctx.scope.datasource.metricFindQuery.getCall(0).args[0]).to.be('test.prod.*');
- });
-
- it('should delete last segment if no metrics are found', function() {
- expect(ctx.scope.segments[2].value).to.be('select metric');
- });
-
- it('should parse expression and build function model', function() {
- expect(ctx.scope.functions.length).to.be(2);
- });
- });
-
- describe('when adding function', function() {
- beforeEach(function() {
- ctx.scope.target.target = 'test.prod.*.count';
- ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([{expandable: false}]));
- ctx.scope.init();
- ctx.scope.$digest();
-
- ctx.scope.$parent = { get_data: sinon.spy() };
- ctx.scope.addFunction(gfunc.getFuncDef('aliasByNode'));
- });
-
- it('should add function with correct node number', function() {
- expect(ctx.scope.functions[0].params[0]).to.be(2);
- });
-
- it('should update target', function() {
- expect(ctx.scope.target.target).to.be('aliasByNode(test.prod.*.count, 2)');
- });
-
- it('should call get_data', function() {
- expect(ctx.scope.$parent.get_data.called).to.be(true);
- });
- });
-
- describe('when adding function before any metric segment', function() {
- beforeEach(function() {
- ctx.scope.target.target = '';
- ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([{expandable: true}]));
- ctx.scope.init();
- ctx.scope.$digest();
-
- ctx.scope.$parent = { get_data: sinon.spy() };
- ctx.scope.addFunction(gfunc.getFuncDef('asPercent'));
- });
-
- it('should add function and remove select metric link', function() {
- expect(ctx.scope.segments.length).to.be(0);
- });
- });
-
- describe('when initalizing target without metric expression and only function', function() {
- beforeEach(function() {
- ctx.scope.target.target = 'asPercent(#A, #B)';
- ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([]));
- ctx.scope.init();
- ctx.scope.$digest();
- ctx.scope.$parent = { get_data: sinon.spy() };
- });
-
- it('should not add select metric segment', function() {
- expect(ctx.scope.segments.length).to.be(0);
- });
-
- it('should add both series refs as params', function() {
- expect(ctx.scope.functions[0].params.length).to.be(2);
- });
-
- });
-
- describe('when initializing a target with single param func using variable', function() {
- beforeEach(function() {
- ctx.scope.target.target = 'movingAverage(prod.count, $var)';
- ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([]));
- ctx.scope.init();
- ctx.scope.$digest();
- ctx.scope.$parent = { get_data: sinon.spy() };
- });
-
- it('should add 2 segments', function() {
- expect(ctx.scope.segments.length).to.be(2);
- });
-
- it('should add function param', function() {
- expect(ctx.scope.functions[0].params.length).to.be(1);
- });
-
- });
-
- describe('when initalizing target without metric expression and function with series-ref', function() {
- beforeEach(function() {
- ctx.scope.target.target = 'asPercent(metric.node.count, #A)';
- ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([]));
- ctx.scope.init();
- ctx.scope.$digest();
- ctx.scope.$parent = { get_data: sinon.spy() };
- });
-
- it('should add segments', function() {
- expect(ctx.scope.segments.length).to.be(3);
- });
-
- it('should have correct func params', function() {
- expect(ctx.scope.functions[0].params.length).to.be(1);
- });
- });
-
- describe('when getting altSegments and metricFindQuery retuns empty array', function() {
- beforeEach(function() {
- ctx.scope.target.target = 'test.count';
- ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([]));
- ctx.scope.init();
- ctx.scope.getAltSegments(1).then(function(results) {
- ctx.altSegments = results;
- });
- ctx.scope.$digest();
- ctx.scope.$parent = { get_data: sinon.spy() };
- });
-
- it('should have no segments', function() {
- expect(ctx.altSegments.length).to.be(0);
- });
-
- });
-
- describe('targetChanged', function() {
- beforeEach(function() {
- ctx.scope.datasource.metricFindQuery.returns(ctx.$q.when([{expandable: false}]));
- ctx.scope.init();
- ctx.scope.$digest();
-
- ctx.scope.$parent = { get_data: sinon.spy() };
- ctx.scope.target.target = '';
- ctx.scope.targetChanged();
- });
-
- it('should rebuld target after expression model', function() {
- expect(ctx.scope.target.target).to.be('aliasByNode(scaleToSeconds(test.prod.*, 1), 2)');
- });
-
- it('should call get_data', function() {
- expect(ctx.scope.$parent.get_data.called).to.be(true);
- });
- });
-
-
- });
-});
diff --git a/public/test/specs/influx09-querybuilder-specs.js b/public/test/specs/influx09-querybuilder-specs.js
deleted file mode 100644
index 7dd4f4b4ad9..00000000000
--- a/public/test/specs/influx09-querybuilder-specs.js
+++ /dev/null
@@ -1,187 +0,0 @@
-define([
- 'app/plugins/datasource/influxdb/queryBuilder'
-], function(InfluxQueryBuilder) {
- 'use strict';
-
- describe('InfluxQueryBuilder', function() {
-
- describe('series with mesurement only', function() {
- it('should generate correct query', function() {
- var builder = new InfluxQueryBuilder({
- measurement: 'cpu',
- groupBy: [{type: 'time', interval: 'auto'}]
- });
-
- var query = builder.build();
-
- expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE $timeFilter GROUP BY time($interval)');
- });
- });
-
- describe('series with math expr and as expr', function() {
- it('should generate correct query', function() {
- var builder = new InfluxQueryBuilder({
- measurement: 'cpu',
- fields: [{name: 'test', func: 'max', mathExpr: '*2', asExpr: 'new_name'}],
- groupBy: [{type: 'time', interval: 'auto'}]
- });
-
- var query = builder.build();
-
- expect(query).to.be('SELECT max("test")*2 AS "new_name" FROM "cpu" WHERE $timeFilter GROUP BY time($interval)');
- });
- });
-
- describe('series with single tag only', function() {
- it('should generate correct query', function() {
- var builder = new InfluxQueryBuilder({
- measurement: 'cpu',
- groupBy: [{type: 'time', interval: 'auto'}],
- tags: [{key: 'hostname', value: 'server1'}]
- });
-
- var query = builder.build();
-
- expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE "hostname" = \'server1\' AND $timeFilter'
- + ' GROUP BY time($interval)');
- });
-
- it('should switch regex operator with tag value is regex', function() {
- var builder = new InfluxQueryBuilder({
- measurement: 'cpu',
- groupBy: [{type: 'time', interval: 'auto'}],
- tags: [{key: 'app', value: '/e.*/'}]
- });
-
- var query = builder.build();
- expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE "app" =~ /e.*/ AND $timeFilter GROUP BY time($interval)');
- });
- });
-
- describe('series with multiple fields', function() {
- it('should generate correct query', function() {
- var builder = new InfluxQueryBuilder({
- measurement: 'cpu',
- tags: [],
- groupBy: [{type: 'time', interval: 'auto'}],
- fields: [{ name: 'tx_in', func: 'sum' }, { name: 'tx_out', func: 'mean' }]
- });
-
- var query = builder.build();
- expect(query).to.be('SELECT sum("tx_in") AS "tx_in", mean("tx_out") AS "tx_out" ' +
- 'FROM "cpu" WHERE $timeFilter GROUP BY time($interval)');
- });
- });
-
- describe('series with multiple tags only', function() {
- it('should generate correct query', function() {
- var builder = new InfluxQueryBuilder({
- measurement: 'cpu',
- groupBy: [{type: 'time', interval: 'auto'}],
- tags: [{key: 'hostname', value: 'server1'}, {key: 'app', value: 'email', condition: "AND"}]
- });
-
- var query = builder.build();
- expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE "hostname" = \'server1\' AND "app" = \'email\' AND ' +
- '$timeFilter GROUP BY time($interval)');
- });
- });
-
- describe('series with tags OR condition', function() {
- it('should generate correct query', function() {
- var builder = new InfluxQueryBuilder({
- measurement: 'cpu',
- groupBy: [{type: 'time', interval: 'auto'}],
- tags: [{key: 'hostname', value: 'server1'}, {key: 'hostname', value: 'server2', condition: "OR"}]
- });
-
- var query = builder.build();
- expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE "hostname" = \'server1\' OR "hostname" = \'server2\' AND ' +
- '$timeFilter GROUP BY time($interval)');
- });
- });
-
- describe('series with groupByTag', function() {
- it('should generate correct query', function() {
- var builder = new InfluxQueryBuilder({
- measurement: 'cpu',
- tags: [],
- groupBy: [{type: 'time', interval: 'auto'}, {type: 'tag', key: 'host'}],
- });
-
- var query = builder.build();
- expect(query).to.be('SELECT mean("value") AS "value" FROM "cpu" WHERE $timeFilter ' +
- 'GROUP BY time($interval), "host"');
- });
- });
-
- describe('when building explore queries', function() {
-
- it('should only have measurement condition in tag keys query given query with measurement', function() {
- var builder = new InfluxQueryBuilder({ measurement: 'cpu', tags: [] });
- var query = builder.buildExploreQuery('TAG_KEYS');
- expect(query).to.be('SHOW TAG KEYS FROM "cpu"');
- });
-
- it('should handle regex measurement in tag keys query', function() {
- var builder = new InfluxQueryBuilder({
- measurement: '/.*/',
- tags: []
- });
- var query = builder.buildExploreQuery('TAG_KEYS');
- expect(query).to.be('SHOW TAG KEYS FROM /.*/');
- });
-
- it('should have no conditions in tags keys query given query with no measurement or tag', function() {
- var builder = new InfluxQueryBuilder({ measurement: '', tags: [] });
- var query = builder.buildExploreQuery('TAG_KEYS');
- expect(query).to.be('SHOW TAG KEYS');
- });
-
- it('should have where condition in tag keys query with tags', function() {
- var builder = new InfluxQueryBuilder({ measurement: '', tags: [{key: 'host', value: 'se1'}] });
- var query = builder.buildExploreQuery('TAG_KEYS');
- expect(query).to.be("SHOW TAG KEYS WHERE \"host\" = 'se1'");
- });
-
- it('should have no conditions in measurement query for query with no tags', function() {
- var builder = new InfluxQueryBuilder({ measurement: '', tags: [] });
- var query = builder.buildExploreQuery('MEASUREMENTS');
- expect(query).to.be('SHOW MEASUREMENTS');
- });
-
- it('should have where condition in measurement query for query with tags', function() {
- var builder = new InfluxQueryBuilder({measurement: '', tags: [{key: 'app', value: 'email'}]});
- var query = builder.buildExploreQuery('MEASUREMENTS');
- expect(query).to.be("SHOW MEASUREMENTS WHERE \"app\" = 'email'");
- });
-
- it('should have where tag name IN filter in tag values query for query with one tag', function() {
- var builder = new InfluxQueryBuilder({measurement: '', tags: [{key: 'app', value: 'asdsadsad'}]});
- var query = builder.buildExploreQuery('TAG_VALUES', 'app');
- expect(query).to.be('SHOW TAG VALUES WITH KEY = "app"');
- });
-
- it('should have measurement tag condition and tag name IN filter in tag values query', function() {
- var builder = new InfluxQueryBuilder({measurement: 'cpu', tags: [{key: 'app', value: 'email'}, {key: 'host', value: 'server1'}]});
- var query = builder.buildExploreQuery('TAG_VALUES', 'app');
- expect(query).to.be('SHOW TAG VALUES FROM "cpu" WITH KEY = "app" WHERE "host" = \'server1\'');
- });
-
- it('should switch to regex operator in tag condition', function() {
- var builder = new InfluxQueryBuilder({measurement: 'cpu', tags: [{key: 'host', value: '/server.*/'}]});
- var query = builder.buildExploreQuery('TAG_VALUES', 'app');
- expect(query).to.be('SHOW TAG VALUES FROM "cpu" WITH KEY = "app" WHERE "host" =~ /server.*/');
- });
-
- it('should build show field query', function() {
- var builder = new InfluxQueryBuilder({measurement: 'cpu', tags: [{key: 'app', value: 'email'}]});
- var query = builder.buildExploreQuery('FIELDS');
- expect(query).to.be('SHOW FIELD KEYS FROM "cpu"');
- });
-
- });
-
- });
-
-});
diff --git a/public/test/specs/influxSeries-specs.js b/public/test/specs/influxSeries-specs.js
deleted file mode 100644
index 0c6d786392d..00000000000
--- a/public/test/specs/influxSeries-specs.js
+++ /dev/null
@@ -1,180 +0,0 @@
-define([
- 'app/plugins/datasource/influxdb/influxSeries'
-], function(InfluxSeries) {
- 'use strict';
-
- describe('when generating timeseries from influxdb response', function() {
-
- describe('given multiple fields for series', function() {
- var options = { series: [
- {
- name: 'cpu',
- tags: {app: 'test', server: 'server1'},
- columns: ['time', 'mean', 'max', 'min'],
- values: [[1431946625000, 10, 11, 9], [1431946626000, 20, 21, 19]]
- }
- ]};
- describe('and no alias', function() {
- it('should generate multiple datapoints for each column', function() {
- var series = new InfluxSeries(options);
- var result = series.getTimeSeries();
-
- expect(result.length).to.be(3);
- expect(result[0].target).to.be('cpu.mean {app: test, server: server1}');
- expect(result[0].datapoints[0][0]).to.be(10);
- expect(result[0].datapoints[0][1]).to.be(1431946625000);
- expect(result[0].datapoints[1][0]).to.be(20);
- expect(result[0].datapoints[1][1]).to.be(1431946626000);
-
- expect(result[1].target).to.be('cpu.max {app: test, server: server1}');
- expect(result[1].datapoints[0][0]).to.be(11);
- expect(result[1].datapoints[0][1]).to.be(1431946625000);
- expect(result[1].datapoints[1][0]).to.be(21);
- expect(result[1].datapoints[1][1]).to.be(1431946626000);
-
- expect(result[2].target).to.be('cpu.min {app: test, server: server1}');
- expect(result[2].datapoints[0][0]).to.be(9);
- expect(result[2].datapoints[0][1]).to.be(1431946625000);
- expect(result[2].datapoints[1][0]).to.be(19);
- expect(result[2].datapoints[1][1]).to.be(1431946626000);
-
- });
- });
-
- describe('and simple alias', function() {
- it('should use alias', function() {
- options.alias = 'new series';
- var series = new InfluxSeries(options);
- var result = series.getTimeSeries();
-
- expect(result[0].target).to.be('new series');
- expect(result[1].target).to.be('new series');
- expect(result[2].target).to.be('new series');
- });
-
- });
-
- describe('and alias patterns', function() {
- it('should replace patterns', function() {
- options.alias = 'alias: $m -> $tag_server ([[measurement]])';
- var series = new InfluxSeries(options);
- var result = series.getTimeSeries();
-
- expect(result[0].target).to.be('alias: cpu -> server1 (cpu)');
- expect(result[1].target).to.be('alias: cpu -> server1 (cpu)');
- expect(result[2].target).to.be('alias: cpu -> server1 (cpu)');
- });
-
- });
- });
- describe('given measurement with default fieldname', function() {
- var options = { series: [
- {
- name: 'cpu',
- tags: {app: 'test', server: 'server1'},
- columns: ['time', 'value'],
- values: [["2015-05-18T10:57:05Z", 10], ["2015-05-18T10:57:06Z", 12]]
- },
- {
- name: 'cpu',
- tags: {app: 'test2', server: 'server2'},
- columns: ['time', 'value'],
- values: [["2015-05-18T10:57:05Z", 15], ["2015-05-18T10:57:06Z", 16]]
- }
- ]};
-
- describe('and no alias', function() {
-
- it('should generate label with no field', function() {
- var series = new InfluxSeries(options);
- var result = series.getTimeSeries();
-
- expect(result[0].target).to.be('cpu {app: test, server: server1}');
- expect(result[1].target).to.be('cpu {app: test2, server: server2}');
- });
- });
-
- });
- describe('given two series', function() {
- var options = { series: [
- {
- name: 'cpu',
- tags: {app: 'test', server: 'server1'},
- columns: ['time', 'mean'],
- values: [[1431946625000, 10], [1431946626000, 12]]
- },
- {
- name: 'cpu',
- tags: {app: 'test2', server: 'server2'},
- columns: ['time', 'mean'],
- values: [[1431946625000, 15], [1431946626000, 16]]
- }
- ]};
-
- describe('and no alias', function() {
-
- it('should generate two time series', function() {
- var series = new InfluxSeries(options);
- var result = series.getTimeSeries();
-
- expect(result.length).to.be(2);
- expect(result[0].target).to.be('cpu.mean {app: test, server: server1}');
- expect(result[0].datapoints[0][0]).to.be(10);
- expect(result[0].datapoints[0][1]).to.be(1431946625000);
- expect(result[0].datapoints[1][0]).to.be(12);
- expect(result[0].datapoints[1][1]).to.be(1431946626000);
-
- expect(result[1].target).to.be('cpu.mean {app: test2, server: server2}');
- expect(result[1].datapoints[0][0]).to.be(15);
- expect(result[1].datapoints[0][1]).to.be(1431946625000);
- expect(result[1].datapoints[1][0]).to.be(16);
- expect(result[1].datapoints[1][1]).to.be(1431946626000);
- });
- });
-
- describe('and simple alias', function() {
- it('should use alias', function() {
- options.alias = 'new series';
- var series = new InfluxSeries(options);
- var result = series.getTimeSeries();
-
- expect(result[0].target).to.be('new series');
- });
-
- });
-
- describe('and alias patterns', function() {
- it('should replace patterns', function() {
- options.alias = 'alias: $m -> $tag_server ([[measurement]])';
- var series = new InfluxSeries(options);
- var result = series.getTimeSeries();
-
- expect(result[0].target).to.be('alias: cpu -> server1 (cpu)');
- expect(result[1].target).to.be('alias: cpu -> server2 (cpu)');
- });
-
- });
-
- });
-
- describe('given measurement with dots', function() {
- var options = { series: [
- {
- name: 'app.prod.server1.count',
- tags: {},
- columns: ['time', 'mean'],
- values: [[1431946625000, 10], [1431946626000, 12]]
- }
- ]};
-
- it('should replace patterns', function() {
- options.alias = 'alias: $1 -> [[3]]';
- var series = new InfluxSeries(options);
- var result = series.getTimeSeries();
-
- expect(result[0].target).to.be('alias: prod -> count');
- });
- });
- });
-
-});
diff --git a/public/test/specs/influxdbQueryCtrl-specs.js b/public/test/specs/influxdbQueryCtrl-specs.js
deleted file mode 100644
index 2d05a032fd6..00000000000
--- a/public/test/specs/influxdbQueryCtrl-specs.js
+++ /dev/null
@@ -1,188 +0,0 @@
-define([
- './helpers',
- 'app/plugins/datasource/influxdb/queryCtrl',
- 'app/services/uiSegmentSrv'
-], function(helpers) {
- 'use strict';
-
- describe('InfluxDBQueryCtrl', function() {
- var ctx = new helpers.ControllerTestContext();
-
- beforeEach(module('grafana.controllers'));
- beforeEach(module('grafana.services'));
- beforeEach(ctx.providePhase());
- beforeEach(ctx.createControllerPhase('InfluxQueryCtrl'));
-
- beforeEach(function() {
- ctx.scope.target = {};
- ctx.scope.$parent = { get_data: sinon.spy() };
-
- ctx.scope.datasource = ctx.datasource;
- ctx.scope.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([]));
- });
-
- describe('init', function() {
- beforeEach(function() {
- ctx.scope.init();
- });
-
- it('should init tagSegments', function() {
- expect(ctx.scope.tagSegments.length).to.be(1);
- });
-
- it('should init measurementSegment', function() {
- expect(ctx.scope.measurementSegment.value).to.be('select measurement');
- });
- });
-
- describe('when first tag segment is updated', function() {
- beforeEach(function() {
- ctx.scope.init();
- ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button'}, 0);
- });
-
- it('should update tag key', function() {
- expect(ctx.scope.target.tags[0].key).to.be('asd');
- expect(ctx.scope.tagSegments[0].type).to.be('key');
- });
-
- it('should add tagSegments', function() {
- expect(ctx.scope.tagSegments.length).to.be(3);
- });
- });
-
- describe('when last tag value segment is updated', function() {
- beforeEach(function() {
- ctx.scope.init();
- ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button'}, 0);
- ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
- });
-
- it('should update tag value', function() {
- expect(ctx.scope.target.tags[0].value).to.be('server1');
- });
-
- it('should set tag operator', function() {
- expect(ctx.scope.target.tags[0].operator).to.be('=');
- });
-
- it('should add plus button for another filter', function() {
- expect(ctx.scope.tagSegments[3].fake).to.be(true);
- });
- });
-
- describe('when last tag value segment is updated to regex', function() {
- beforeEach(function() {
- ctx.scope.init();
- ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button'}, 0);
- ctx.scope.tagSegmentUpdated({value: '/server.*/', type: 'value'}, 2);
- });
-
- it('should update operator', function() {
- expect(ctx.scope.tagSegments[1].value).to.be('=~');
- expect(ctx.scope.target.tags[0].operator).to.be('=~');
- });
- });
-
- describe('when second tag key is added', function() {
- beforeEach(function() {
- ctx.scope.init();
- ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
- ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
- ctx.scope.tagSegmentUpdated({value: 'key2', type: 'plus-button'}, 3);
- });
-
- it('should update tag key', function() {
- expect(ctx.scope.target.tags[1].key).to.be('key2');
- });
-
- it('should add AND segment', function() {
- expect(ctx.scope.tagSegments[3].value).to.be('AND');
- });
- });
-
- describe('when condition is changed', function() {
- beforeEach(function() {
- ctx.scope.init();
- ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
- ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
- ctx.scope.tagSegmentUpdated({value: 'key2', type: 'plus-button'}, 3);
- ctx.scope.tagSegmentUpdated({value: 'OR', type: 'condition'}, 3);
- });
-
- it('should update tag condition', function() {
- expect(ctx.scope.target.tags[1].condition).to.be('OR');
- });
-
- it('should update AND segment', function() {
- expect(ctx.scope.tagSegments[3].value).to.be('OR');
- expect(ctx.scope.tagSegments.length).to.be(7);
- });
- });
-
- describe('when deleting first tag filter after value is selected', function() {
- beforeEach(function() {
- ctx.scope.init();
- ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
- ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
- ctx.scope.tagSegmentUpdated(ctx.scope.removeTagFilterSegment, 0);
- });
-
- it('should remove tags', function() {
- expect(ctx.scope.target.tags.length).to.be(0);
- });
-
- it('should remove all segment after 2 and replace with plus button', function() {
- expect(ctx.scope.tagSegments.length).to.be(1);
- expect(ctx.scope.tagSegments[0].type).to.be('plus-button');
- });
- });
-
- describe('when deleting second tag value before second tag value is complete', function() {
- beforeEach(function() {
- ctx.scope.init();
- ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
- ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
- ctx.scope.tagSegmentUpdated({value: 'key2', type: 'plus-button'}, 3);
- ctx.scope.tagSegmentUpdated(ctx.scope.removeTagFilterSegment, 4);
- });
-
- it('should remove all segment after 2 and replace with plus button', function() {
- expect(ctx.scope.tagSegments.length).to.be(4);
- expect(ctx.scope.tagSegments[3].type).to.be('plus-button');
- });
- });
-
- describe('when deleting second tag value before second tag value is complete', function() {
- beforeEach(function() {
- ctx.scope.init();
- ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
- ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
- ctx.scope.tagSegmentUpdated({value: 'key2', type: 'plus-button'}, 3);
- ctx.scope.tagSegmentUpdated(ctx.scope.removeTagFilterSegment, 4);
- });
-
- it('should remove all segment after 2 and replace with plus button', function() {
- expect(ctx.scope.tagSegments.length).to.be(4);
- expect(ctx.scope.tagSegments[3].type).to.be('plus-button');
- });
- });
-
- describe('when deleting second tag value after second tag filter is complete', function() {
- beforeEach(function() {
- ctx.scope.init();
- ctx.scope.tagSegmentUpdated({value: 'asd', type: 'plus-button' }, 0);
- ctx.scope.tagSegmentUpdated({value: 'server1', type: 'value'}, 2);
- ctx.scope.tagSegmentUpdated({value: 'key2', type: 'plus-button'}, 3);
- ctx.scope.tagSegmentUpdated({value: 'value', type: 'value'}, 6);
- ctx.scope.tagSegmentUpdated(ctx.scope.removeTagFilterSegment, 4);
- });
-
- it('should remove all segment after 2 and replace with plus button', function() {
- expect(ctx.scope.tagSegments.length).to.be(4);
- expect(ctx.scope.tagSegments[3].type).to.be('plus-button');
- });
- });
-
- });
-});
diff --git a/public/test/test-main.js b/public/test/test-main.js
index 4c5d63ede2e..5a7f1eeb437 100644
--- a/public/test/test-main.js
+++ b/public/test/test-main.js
@@ -113,7 +113,7 @@ require([
var specs = [];
for (var file in window.__karma__.files) {
- if (/base\/test\/specs.*/.test(file)) {
+ if (/specs.*/.test(file)) {
file = file2moduleName(file);
specs.push(file);
}
diff --git a/tasks/build_task.js b/tasks/build_task.js
index 0ca987ddf38..364c3dc797c 100644
--- a/tasks/build_task.js
+++ b/tasks/build_task.js
@@ -11,6 +11,7 @@ module.exports = function(grunt) {
'copy:public_to_gen',
'typescript:build',
'karma:test',
+ 'phantomjs',
'css',
'htmlmin:build',
'ngtemplates',
diff --git a/tasks/default_task.js b/tasks/default_task.js
index 03f1b667941..8c11631d0e4 100644
--- a/tasks/default_task.js
+++ b/tasks/default_task.js
@@ -9,6 +9,7 @@ module.exports = function(grunt) {
'tslint',
'clean:gen',
'copy:public_to_gen',
+ 'phantomjs',
'css',
'typescript:build'
]);
diff --git a/tasks/options/phantomjs.js b/tasks/options/phantomjs.js
new file mode 100644
index 00000000000..7eac7cb36ac
--- /dev/null
+++ b/tasks/options/phantomjs.js
@@ -0,0 +1,36 @@
+module.exports = function(config,grunt) {
+ 'use strict';
+
+ grunt.registerTask('phantomjs', 'Copy phantomjs binary from node', function() {
+
+ var dest = './vendor/phantomjs/phantomjs';
+ var confDir = './node_modules/karma-phantomjs-launcher/node_modules/phantomjs/lib/'
+
+ if (!grunt.file.exists(dest)){
+
+ var m=grunt.file.read(confDir+"location.js")
+ var src=/= \"([^\"]*)\"/.exec(m)[1];
+
+ if (!grunt.file.isPathAbsolute(src)) {
+ src = confDir+src;
+ }
+
+ var exec = require('child_process').execFileSync;
+
+ try {
+ grunt.config('copy.phantom_bin', {
+ src: src,
+ dest: dest,
+ options: { mode: true},
+ });
+ grunt.task.run('copy:phantom_bin');
+ } catch (err) {
+ grunt.verbose.writeln(err);
+ grunt.fail.warn('No working Phantomjs binary available')
+ }
+
+ } else {
+ grunt.log.writeln('Phantomjs already imported from node');
+ }
+ });
+};
diff --git a/vendor/phantomjs/phantomjs b/vendor/phantomjs/phantomjs
deleted file mode 100755
index e249c26674d..00000000000
Binary files a/vendor/phantomjs/phantomjs and /dev/null differ