changed var to const 2 (#13068)

* changed var to const

* fixed typo created in last commit

* added or empty object to options in prometheus/datasource
This commit is contained in:
Patrick O'Carroll
2018-08-29 14:27:29 +02:00
committed by Torkel Ödegaard
parent 5e0d0c5816
commit a702603e7b
64 changed files with 667 additions and 666 deletions

View File

@@ -33,10 +33,10 @@ export class ElasticResponse {
break;
}
var firstBucket = esAgg.buckets[0];
var percentiles = firstBucket[metric.id].values;
const firstBucket = esAgg.buckets[0];
const percentiles = firstBucket[metric.id].values;
for (var percentileName in percentiles) {
for (const percentileName in percentiles) {
newSeries = {
datapoints: [],
metric: 'p' + percentileName,
@@ -46,7 +46,7 @@ export class ElasticResponse {
for (i = 0; i < esAgg.buckets.length; i++) {
bucket = esAgg.buckets[i];
var values = bucket[metric.id].values;
const values = bucket[metric.id].values;
newSeries.datapoints.push([values[percentileName], bucket.key]);
}
seriesList.push(newSeries);
@@ -55,7 +55,7 @@ export class ElasticResponse {
break;
}
case 'extended_stats': {
for (var statName in metric.meta) {
for (const statName in metric.meta) {
if (!metric.meta[statName]) {
continue;
}
@@ -69,7 +69,7 @@ export class ElasticResponse {
for (i = 0; i < esAgg.buckets.length; i++) {
bucket = esAgg.buckets[i];
var stats = bucket[metric.id];
const stats = bucket[metric.id];
// add stats that are in nested obj to top level obj
stats.std_deviation_bounds_upper = stats.std_deviation_bounds.upper;
@@ -141,12 +141,12 @@ export class ElasticResponse {
break;
}
case 'extended_stats': {
for (var statName in metric.meta) {
for (const statName in metric.meta) {
if (!metric.meta[statName]) {
continue;
}
var stats = bucket[metric.id];
const stats = bucket[metric.id];
// add stats that are in nested obj to top level obj
stats.std_deviation_bounds_upper = stats.std_deviation_bounds.upper;
stats.std_deviation_bounds_lower = stats.std_deviation_bounds.lower;
@@ -178,7 +178,7 @@ export class ElasticResponse {
// need to recurise down the nested buckets to build series
processBuckets(aggs, target, seriesList, table, props, depth) {
var bucket, aggDef, esAgg, aggId;
var maxDepth = target.bucketAggs.length - 1;
const maxDepth = target.bucketAggs.length - 1;
for (aggId in aggs) {
aggDef = _.find(target.bucketAggs, { id: aggId });
@@ -195,7 +195,7 @@ export class ElasticResponse {
this.processAggregationDocs(esAgg, aggDef, target, table, props);
}
} else {
for (var nameIndex in esAgg.buckets) {
for (const nameIndex in esAgg.buckets) {
bucket = esAgg.buckets[nameIndex];
props = _.clone(props);
if (bucket.key !== void 0) {
@@ -225,10 +225,10 @@ export class ElasticResponse {
var metricName = this.getMetricName(series.metric);
if (target.alias) {
var regex = /\{\{([\s\S]+?)\}\}/g;
const regex = /\{\{([\s\S]+?)\}\}/g;
return target.alias.replace(regex, function(match, g1, g2) {
var group = g1 || g2;
const group = g1 || g2;
if (group.indexOf('term ') === 0) {
return series.props[group.substring(5)];
@@ -248,7 +248,7 @@ export class ElasticResponse {
}
if (series.field && queryDef.isPipelineAgg(series.metric)) {
var appliedAgg = _.find(target.metrics, { id: series.field });
const appliedAgg = _.find(target.metrics, { id: series.field });
if (appliedAgg) {
metricName += ' ' + queryDef.describeMetric(appliedAgg);
} else {
@@ -258,13 +258,13 @@ export class ElasticResponse {
metricName += ' ' + series.field;
}
var propKeys = _.keys(series.props);
const propKeys = _.keys(series.props);
if (propKeys.length === 0) {
return metricName;
}
var name = '';
for (var propName in series.props) {
for (const propName in series.props) {
name += series.props[propName] + ' ';
}
@@ -276,16 +276,16 @@ export class ElasticResponse {
}
nameSeries(seriesList, target) {
var metricTypeCount = _.uniq(_.map(seriesList, 'metric')).length;
const metricTypeCount = _.uniq(_.map(seriesList, 'metric')).length;
for (var i = 0; i < seriesList.length; i++) {
var series = seriesList[i];
const series = seriesList[i];
series.target = this.getSeriesName(series, target, metricTypeCount);
}
}
processHits(hits, seriesList) {
var series = {
const series = {
target: 'docs',
type: 'docs',
datapoints: [],
@@ -318,13 +318,13 @@ export class ElasticResponse {
}
trimDatapoints(aggregations, target) {
var histogram = _.find(target.bucketAggs, { type: 'date_histogram' });
const histogram = _.find(target.bucketAggs, { type: 'date_histogram' });
var shouldDropFirstAndLast = histogram && histogram.settings && histogram.settings.trimEdges;
const shouldDropFirstAndLast = histogram && histogram.settings && histogram.settings.trimEdges;
if (shouldDropFirstAndLast) {
var trim = histogram.settings.trimEdges;
for (var prop in aggregations) {
var points = aggregations[prop];
const trim = histogram.settings.trimEdges;
for (const prop in aggregations) {
const points = aggregations[prop];
if (points.datapoints.length > trim * 2) {
points.datapoints = points.datapoints.slice(trim, points.datapoints.length - trim);
}
@@ -333,7 +333,7 @@ export class ElasticResponse {
}
getErrorFromElasticResponse(response, err) {
var result: any = {};
const result: any = {};
result.data = JSON.stringify(err, null, 4);
if (err.root_cause && err.root_cause.length > 0 && err.root_cause[0].reason) {
result.message = err.root_cause[0].reason;
@@ -349,10 +349,10 @@ export class ElasticResponse {
}
getTimeSeries() {
var seriesList = [];
const seriesList = [];
for (var i = 0; i < this.response.responses.length; i++) {
var response = this.response.responses[i];
const response = this.response.responses[i];
if (response.error) {
throw this.getErrorFromElasticResponse(this.response, response.error);
}
@@ -362,10 +362,10 @@ export class ElasticResponse {
}
if (response.aggregations) {
var aggregations = response.aggregations;
var target = this.targets[i];
var tmpSeriesList = [];
var table = new TableModel();
const aggregations = response.aggregations;
const target = this.targets[i];
const tmpSeriesList = [];
const table = new TableModel();
this.processBuckets(aggregations, target, tmpSeriesList, table, {}, 0);
this.trimDatapoints(tmpSeriesList, target);