Elasticsearch: Migrate frontend query builder tests to backend (#59578)

* WIP: Migrated first batch of tests

* Migrate more tests

* Migrate rest of the tests

* Update comments

* Update

* Update

* Remove newline
This commit is contained in:
Ivana Huckova 2022-12-01 12:09:12 +01:00 committed by GitHub
parent 0a9b238d39
commit a2ed586576
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 680 additions and 21 deletions

View File

@ -5486,14 +5486,7 @@ exports[`better eslint`] = {
[0, 0, 0, "Unexpected any. Specify a different type.", "3"]
],
"public/app/plugins/datasource/elasticsearch/QueryBuilder.test.ts:5381": [
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
[0, 0, 0, "Unexpected any. Specify a different type.", "2"],
[0, 0, 0, "Unexpected any. Specify a different type.", "3"],
[0, 0, 0, "Unexpected any. Specify a different type.", "4"],
[0, 0, 0, "Unexpected any. Specify a different type.", "5"],
[0, 0, 0, "Unexpected any. Specify a different type.", "6"],
[0, 0, 0, "Unexpected any. Specify a different type.", "7"]
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
],
"public/app/plugins/datasource/elasticsearch/QueryBuilder.ts:5381": [
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],

View File

@ -40,6 +40,22 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.Equal(t, dateHistogramAgg.ExtendedBounds.Max, toMs)
})
t.Run("Should clean settings from null values (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "1" }],
"metrics": [{"type": "avg", "id": "0", "settings": {"missing": "null", "script": "1" } }]
}`, from, to, 15*time.Second)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
secondLevel := firstLevel.Aggregation.Aggs[0]
require.Equal(t, secondLevel.Aggregation.Aggregation.(*es.MetricAggregation).Settings["script"], "1")
// FIXME: This is a bug in implementation, missing is set to "null" instead of being removed
// require.Equal(t, secondLevel.Aggregation.Aggregation.(*es.MetricAggregation).Settings["missing"], nil)
})
t.Run("With multiple bucket aggs", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
@ -82,6 +98,30 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.Equal(t, secondLevel.Aggregation.Aggregation.(*es.MetricAggregation).Field, "@value")
})
t.Run("With term agg and order by term (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"type": "terms",
"field": "@host",
"id": "2",
"settings": { "size": "5", "order": "asc", "orderBy": "_term" }
},
{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
],
"metrics": [
{"type": "count", "id": "1" },
{"type": "avg", "field": "@value", "id": "5" }
]
}`, from, to, 15*time.Second)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
require.Equal(t, firstLevel.Aggregation.Aggregation.(*es.TermsAggregation).Order["_key"], "asc")
})
t.Run("With term agg and order by metric agg", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
@ -106,10 +146,12 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
avgAggOrderBy := sr.Aggs[0].Aggregation.Aggs[0]
require.Equal(t, avgAggOrderBy.Key, "5")
require.Equal(t, avgAggOrderBy.Aggregation.Type, "avg")
require.Equal(t, avgAggOrderBy.Aggregation.Aggregation.(*es.MetricAggregation).Field, "@value")
avgAgg := sr.Aggs[0].Aggregation.Aggs[1].Aggregation.Aggs[0]
require.Equal(t, avgAgg.Key, "5")
require.Equal(t, avgAgg.Aggregation.Type, "avg")
require.Equal(t, avgAgg.Aggregation.Aggregation.(*es.MetricAggregation).Field, "@value")
})
t.Run("With term agg and order by count metric agg", func(t *testing.T) {
@ -136,6 +178,33 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.Equal(t, termsAgg.Order["_count"], "asc")
})
t.Run("With term agg and order by count agg (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"metrics": [
{"type": "count", "id": "1" },
{"type": "avg", "field": "@value", "id": "5" }
],
"bucketAggs": [
{
"type": "terms",
"field": "@host",
"id": "2",
"settings": { "size": "5", "order": "asc", "orderBy": "1" }
},
{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
]
}`, from, to, 15*time.Second)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
termsAgg := firstLevel.Aggregation.Aggregation.(*es.TermsAggregation)
require.Equal(t, termsAgg.Order["_count"], "asc")
require.NotEqual(t, firstLevel.Aggregation.Aggs[0].Key, "1")
})
t.Run("With term agg and order by percentiles agg", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
@ -157,8 +226,12 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
sr := c.multisearchRequests[0].Requests[0]
orderByAgg := sr.Aggs[0].Aggregation.Aggs[0]
secondLevel := orderByAgg.Aggregation.Aggregation
require.Equal(t, orderByAgg.Key, "1")
require.Equal(t, orderByAgg.Aggregation.Type, "percentiles")
require.Equal(t, orderByAgg.Aggregation.Aggregation.(*es.MetricAggregation).Field, "@value")
require.Equal(t, secondLevel.(*es.MetricAggregation).Field, "@value")
})
t.Run("With term agg and order by extended stats agg", func(t *testing.T) {
@ -181,9 +254,14 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
orderByAgg := sr.Aggs[0].Aggregation.Aggs[0]
firstLevel := sr.Aggs[0]
orderByAgg := firstLevel.Aggregation.Aggs[0]
secondLevel := orderByAgg.Aggregation.Aggregation
require.Equal(t, orderByAgg.Key, "1")
require.Equal(t, orderByAgg.Aggregation.Type, "extended_stats")
require.Equal(t, orderByAgg.Aggregation.Aggregation.(*es.MetricAggregation).Field, "@value")
require.Equal(t, secondLevel.(*es.MetricAggregation).Field, "@value")
})
t.Run("With term agg and order by term", func(t *testing.T) {
@ -213,6 +291,32 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.Equal(t, termsAgg.Order["_key"], "asc")
})
t.Run("With term agg and valid min_doc_count (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"type": "terms",
"field": "@host",
"id": "2",
"settings": { "min_doc_count": "1" }
},
{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
],
"metrics": [
{"type": "count", "id": "1" }
]
}`, from, to, 15*time.Second)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
require.Equal(t, firstLevel.Key, "2")
// FIXME: This is a bug in the current implementation. The min_doc_count is not set.
// termsAgg := firstLevel.Aggregation.Aggregation.(*es.TermsAggregation)
// require.Equal(t, termsAgg.MinDocCount, "1")
})
t.Run("With metric percentiles", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
@ -238,6 +342,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.Equal(t, percentilesAgg.Key, "1")
require.Equal(t, percentilesAgg.Aggregation.Type, "percentiles")
metricAgg := percentilesAgg.Aggregation.Aggregation.(*es.MetricAggregation)
require.Equal(t, metricAgg.Field, "@load_time")
percents := metricAgg.Settings["percents"].([]interface{})
require.Len(t, percents, 4)
require.Equal(t, percents[0], "1")
@ -277,7 +382,38 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.Equal(t, dateHistogramAgg.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, "@timestamp")
})
t.Run("With raw document metric", func(t *testing.T) {
t.Run("With filters aggs and empty label (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"id": "2",
"type": "filters",
"settings": {
"filters": [ { "query": "@metric:cpu", "label": "" }, { "query": "@metric:logins.count", "label": "" } ]
}
},
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
],
"metrics": [{"type": "count", "id": "1" }]
}`, from, to, 15*time.Second)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
filtersAgg := sr.Aggs[0]
require.Equal(t, filtersAgg.Key, "2")
require.Equal(t, filtersAgg.Aggregation.Type, "filters")
fAgg := filtersAgg.Aggregation.Aggregation.(*es.FiltersAggregation)
require.Equal(t, fAgg.Filters["@metric:cpu"].(*es.QueryStringFilter).Query, "@metric:cpu")
require.Equal(t, fAgg.Filters["@metric:logins.count"].(*es.QueryStringFilter).Query, "@metric:logins.count")
dateHistogramAgg := sr.Aggs[0].Aggregation.Aggs[0]
require.Equal(t, dateHistogramAgg.Key, "4")
require.Equal(t, dateHistogramAgg.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, "@timestamp")
})
t.Run("With raw document metric size", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@ -290,6 +426,19 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.Equal(t, sr.Size, 500)
})
t.Run("With raw document metric query (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [],
"metrics": [{ "id": "1", "type": "raw_document", "settings": {} }]
}`, from, to, 15*time.Second)
require.NoError(t, err)
// FIXME: { _doc: { order: 'desc' } } is missing
// sr := c.multisearchRequests[0].Requests[0]
// require.Equal(t, sr, `{"docvalue_fields":["@timestamp"],"query":{"bool":{"filter":{"range":{"@timestamp":{"format":"epoch_millis","gte":1526406600000,"lte":1526406900000}}}}},"script_fields":{},"size":500,"sort":[{"@timestamp":{"order":"desc","unmapped_type":"boolean"}}, {"_doc": {"order": "desc"}}]}`)
})
t.Run("With raw document metric size set", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
@ -328,6 +477,29 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.Equal(t, hAgg.FixedInterval, "$__interval_msms")
require.Equal(t, hAgg.MinDocCount, 2)
t.Run("Should not include time_zone if not present in the query model (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"id": "2",
"type": "date_histogram",
"field": "@timestamp",
"settings": {
"min_doc_count": "1"
}
}
],
"metrics": [{"type": "count", "id": "1" }]
}`, from, to, 15*time.Second)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
dateHistogram := sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg)
require.Empty(t, dateHistogram.TimeZone)
})
t.Run("Should not include time_zone when timeZone is utc", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
@ -370,8 +542,8 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
deteHistogram := sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg)
require.Equal(t, deteHistogram.TimeZone, "America/Los_Angeles")
dateHistogram := sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg)
require.Equal(t, dateHistogram.TimeZone, "America/Los_Angeles")
})
})
@ -402,6 +574,32 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.Equal(t, *hAgg.Missing, 5)
})
t.Run("With histogram (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"id": "3",
"type": "histogram",
"field": "bytes",
"settings": { "interval": 10, "min_doc_count": 2 }
}
],
"metrics": [{"type": "count", "id": "1" }]
}`, from, to, 15*time.Second)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
require.Equal(t, firstLevel.Key, "3")
require.Equal(t, firstLevel.Aggregation.Type, "histogram")
hAgg := firstLevel.Aggregation.Aggregation.(*es.HistogramAgg)
require.Equal(t, hAgg.Field, "bytes")
require.Equal(t, hAgg.Interval, 10)
require.Equal(t, hAgg.MinDocCount, 2)
})
t.Run("With geo hash grid agg", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
@ -427,7 +625,46 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.Equal(t, ghGridAgg.Precision, 3)
})
t.Run("With moving average (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
],
"metrics": [
{ "id": "3", "type": "sum", "field": "@value" },
{
"id": "2",
"type": "moving_avg",
"field": "3"
}
]
}`, from, to, 15*time.Second)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
require.Equal(t, firstLevel.Key, "4")
// FIXME: Currently this is 1 as movingAvg is completely missing. We have only sum.
// require.Equal(t, len(firstLevel.Aggregation.Aggs), 2)
sumAgg := firstLevel.Aggregation.Aggs[0]
require.Equal(t, sumAgg.Key, "3")
require.Equal(t, sumAgg.Aggregation.Type, "sum")
mAgg := sumAgg.Aggregation.Aggregation.(*es.MetricAggregation)
require.Equal(t, mAgg.Field, "@value")
// FIXME: This is currently fully missing
// in the test bellow with pipelineAgg it is working as expected
// movingAvgAgg := firstLevel.Aggregation.Aggs[1]
// require.Equal(t, movingAvgAgg.Key, "2")
// require.Equal(t, movingAvgAgg.Aggregation.Type, "moving_avg")
// pl := movingAvgAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
// require.Equal(t, pl.BucketPath, "3")
})
t.Run("With moving average", func(t *testing.T) {
// This test is with pipelineAgg and is passing. Same test without pipelineAgg is failing.
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@ -465,7 +702,41 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.Equal(t, pl.BucketPath, "3")
})
t.Run("With moving average doc count (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
],
"metrics": [
{ "id": "3", "type": "count"},
{
"id": "2",
"type": "moving_avg",
"field": "3"
}
]
}`, from, to, 15*time.Second)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
require.Equal(t, firstLevel.Key, "4")
require.Equal(t, firstLevel.Aggregation.Type, "date_histogram")
// FIXME: Currently, movingAvg is completely missing
// in the test bellow with pipelineAgg it is working as expected
// require.Len(t, firstLevel.Aggregation.Aggs, 1)
// movingAvgAgg := firstLevel.Aggregation.Aggs[0]
// require.Equal(t, movingAvgAgg.Key, "2")
// require.Equal(t, movingAvgAgg.Aggregation.Type, "moving_avg")
// pl := movingAvgAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
// require.Equal(t, pl.BucketPath, "_count")
})
t.Run("With moving average doc count", func(t *testing.T) {
// This test is with pipelineAgg and is passing. Same test without pipelineAgg is failing.
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@ -497,7 +768,46 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.Equal(t, pl.BucketPath, "_count")
})
t.Run("With broken moving average (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
],
"metrics": [
{ "id": "3", "type": "sum", "field": "@value" },
{
"id": "2",
"type": "moving_avg",
"field": "3"
},
{
"id": "4",
"type": "moving_avg"
}
]
}`, from, to, 15*time.Second)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
require.Equal(t, firstLevel.Key, "3")
// FIXME: Currently, movingAvg is completely missing
// in the test bellow with pipelineAgg it is working as expected
// require.Len(t, firstLevel.Aggregation.Aggs, 2)
sumAgg := firstLevel.Aggregation.Aggs[0]
require.Equal(t, sumAgg.Key, "3")
// movingAvgAgg := firstLevel.Aggregation.Aggs[1]
// require.Equal(t, movingAvgAgg.Key, "2")
// plAgg := movingAvgAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
// require.Equal(t, plAgg.BucketPath, "3")
})
t.Run("With broken moving average", func(t *testing.T) {
// This test is with pipelineAgg and is passing. Same test without pipelineAgg is failing.
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@ -533,6 +843,30 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.Equal(t, plAgg.BucketPath, "3")
})
t.Run("With top_metrics (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
],
"metrics": [
{ "id": "2", "type": "top_metrics", "settings": { "order": "desc", "orderBy": "@timestamp", "metrics": ["@value"]} }
]
}`, from, to, 15*time.Second)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
require.Equal(t, firstLevel.Key, "3")
secondLevel := firstLevel.Aggregation.Aggs[0]
require.Equal(t, secondLevel.Key, "2")
require.Equal(t, secondLevel.Aggregation.Type, "top_metrics")
topMetricsBytes, _ := json.Marshal(firstLevel.Aggregation.Aggs[0].Aggregation.Aggregation)
require.Equal(t, string(topMetricsBytes), `{"metrics":[{"field":"@value"}],"size":"1","sort":[{"@timestamp":"desc"}]}`)
})
t.Run("With cumulative sum", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
@ -669,6 +1003,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With derivative doc count", func(t *testing.T) {
// This test is with pipelineAgg and is passing. Same test without pipelineAgg is failing.
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@ -697,7 +1032,39 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.Equal(t, plAgg.BucketPath, "_count")
})
t.Run("With derivative doc count (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
],
"metrics": [
{ "id": "3", "type": "count" },
{
"id": "2",
"type": "derivative",
"field": "3"
}
]
}`, from, to, 15*time.Second)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
require.Equal(t, firstLevel.Key, "4")
require.Equal(t, firstLevel.Aggregation.Type, "date_histogram")
// FIXME: This is currently fully missing
// in the test above with pipelineAgg it is working as expected
// derivativeAgg := firstLevel.Aggregation.Aggs[0]
// require.Equal(t, derivativeAgg.Key, "2")
// plAgg := derivativeAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
// require.Equal(t, plAgg.BucketPath, "_count")
})
t.Run("With serial_diff", func(t *testing.T) {
// This test is with pipelineAgg and is passing. Same test without pipelineAgg is failing.
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@ -709,7 +1076,8 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
{
"id": "2",
"type": "serial_diff",
"pipelineAgg": "3"
"pipelineAgg": "3",
"settings": { "lag": "5" }
}
]
}`, from, to, 15*time.Second)
@ -724,6 +1092,39 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.Equal(t, serialDiffAgg.Key, "2")
plAgg := serialDiffAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
require.Equal(t, plAgg.BucketPath, "3")
require.Equal(t, plAgg.Settings["lag"], 5.)
})
t.Run("With serial_diff (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
],
"metrics": [
{ "id": "3", "type": "max", "field": "@value" },
{
"id": "2",
"type": "serial_diff",
"field": "3",
"settings": { "lag": "5" }
}
]
}`, from, to, 15*time.Second)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
require.Equal(t, firstLevel.Key, "3")
require.Equal(t, firstLevel.Aggregation.Type, "date_histogram")
// FIXME: This is currently fully missing
// in the test above with pipelineAgg it is working as expected
// serialDiffAgg := firstLevel.Aggregation.Aggs[1]
// require.Equal(t, serialDiffAgg.Key, "2")
// plAgg := serialDiffAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
// require.Equal(t, plAgg.BucketPath, "3")
// require.Equal(t, plAgg.Settings["lag"], "5")
})
t.Run("With serial_diff doc count", func(t *testing.T) {
@ -756,6 +1157,43 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With bucket_script", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "2" }
],
"metrics": [
{ "id": "1", "type": "sum", "field": "@value" },
{ "id": "3", "type": "max", "field": "@value" },
{
"id": "4",
"type": "bucket_script",
"pipelineVariables": [
{ "name": "var1", "pipelineAgg": "1" },
{ "name": "var2", "pipelineAgg": "3" }
],
"settings": { "script": "params.var1 * params.var2" }
}
]
}`, from, to, 15*time.Second)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
require.Equal(t, firstLevel.Key, "2")
require.Equal(t, firstLevel.Aggregation.Type, "date_histogram")
bucketScriptAgg := firstLevel.Aggregation.Aggs[2]
require.Equal(t, bucketScriptAgg.Key, "4")
plAgg := bucketScriptAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
require.Equal(t, plAgg.BucketPath.(map[string]interface{}), map[string]interface{}{
"var1": "1",
"var2": "3",
})
})
t.Run("With bucket_script (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@ -825,6 +1263,100 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
"var1": "_count",
})
})
t.Run("With bucket_script doc count (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "2" }
],
"metrics": [
{ "id": "3", "type": "count"},
{
"id": "4",
"type": "bucket_script",
"pipelineVariables": [
{ "name": "var1", "pipelineAgg": "3" }
],
"settings": { "script": "params.var1 * 1000" }
}
]
}`, from, to, 15*time.Second)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
require.Equal(t, firstLevel.Key, "2")
require.Equal(t, firstLevel.Aggregation.Type, "date_histogram")
bucketScriptAgg := firstLevel.Aggregation.Aggs[0]
require.Equal(t, bucketScriptAgg.Key, "4")
plAgg := bucketScriptAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
require.Equal(t, plAgg.BucketPath.(map[string]interface{}), map[string]interface{}{
"var1": "_count",
})
})
t.Run("With lucene query should add query_string filter when query is not empty (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"query": "foo"
}`, from, to, 15*time.Second)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
filter := sr.Query.Bool.Filters[1].(*es.QueryStringFilter)
require.Equal(t, filter.Query, "foo")
require.Equal(t, filter.AnalyzeWildcard, true)
})
t.Run("With lucene query should add query_string filter when query is not empty (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"query": "foo"
}`, from, to, 15*time.Second)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
filter := sr.Query.Bool.Filters[1].(*es.QueryStringFilter)
require.Equal(t, filter.Query, "foo")
require.Equal(t, filter.AnalyzeWildcard, true)
})
// FIXME
// Log query is not implemented with defaults
// t.Run("With log query should return query with defaults (from frontend tests)", func(t *testing.T) {
// c := newFakeClient()
// _, err := executeTsdbQuery(c, `{
// "timeField": "@timestamp",
// "metrics": { "type": "logs", "id": "1"}
// }`, from, to, 15*time.Second)
// require.NoError(t, err)
// sr := c.multisearchRequests[0].Requests[0]
// require.Equal(t, sr.Size, 500)
// rangeFilter := sr.Query.Bool.Filters[0].(*es.RangeFilter)
// require.Equal(t, rangeFilter.Key, c.timeField)
// require.Equal(t, rangeFilter.Lte, toMs)
// require.Equal(t, rangeFilter.Gte, fromMs)
// require.Equal(t, rangeFilter.Format, es.DateFormatEpochMS)
// sort, _ := json.Marshal(sr.Sort)
// require.Equal(t, string(sort), `"sort":[{"@timestamp":{"order":"desc","unmapped_type":"boolean"}},{"_doc":{"order":"desc"}}]`)
// firstLevel := sr.Aggs[0]
// require.Equal(t, firstLevel.Key, "1")
// require.Equal(t, firstLevel.Aggregation.Type, "date_histogram")
// hAgg := firstLevel.Aggregation.Aggregation.(*es.DateHistogramAgg)
// require.Equal(t, hAgg.ExtendedBounds.Max, toMs)
// require.Equal(t, hAgg.ExtendedBounds.Min, fromMs)
// require.Equal(t, hAgg.Field, "@timestamp")
// require.Equal(t, hAgg.Format, es.DateFormatEpochMS)
// require.Equal(t, hAgg.FixedInterval, "$__interval_msms")
// require.Equal(t, hAgg.MinDocCount, 0)
// })
})
}
@ -832,7 +1364,50 @@ func TestSettingsCasting(t *testing.T) {
from := time.Date(2018, 5, 15, 17, 50, 0, 0, time.UTC)
to := time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC)
t.Run("Correctly casts values in moving_avg (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"metrics": [
{ "type": "avg", "id" : "2" },
{
"type": "moving_avg",
"id" : "3",
"pipelineAgg": "2",
"settings": {
"window": "5",
"model": "holt_winters",
"predict": "10",
"settings": {
"alpha": "1",
"beta": "2",
"gamma": "3",
"period": "4"
}
}
}
],
"bucketAggs": [{"type": "date_histogram", "field": "@timestamp", "id": "1"}]
}`, from, to, 15*time.Second)
require.NoError(t, err)
// FIXME
// This is working correctly if instead of field we use pipelineAgg
// sr := c.multisearchRequests[0].Requests[0]
// movingAvgSettings := sr.Aggs[0].Aggregation.Aggs[1].Aggregation.Aggregation.(*es.PipelineAggregation).Settings
// assert.Equal(t, movingAvgSettings["window"], 5)
// assert.Equal(t, movingAvgSettings["predict"], 10)
// modelSettings := movingAvgSettings["settings"].(map[string]interface{})
// assert.Equal(t, modelSettings["alpha"], 1)
// assert.Equal(t, modelSettings["beta"], 2)
// assert.Equal(t, modelSettings["gamma"], 3)
// assert.Equal(t, modelSettings["period"], 4)
})
t.Run("Correctly transforms moving_average settings", func(t *testing.T) {
// This test is with pipelineAgg and is passing. Same test without pipelineAgg is failing.
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@ -876,7 +1451,34 @@ func TestSettingsCasting(t *testing.T) {
assert.Equal(t, 4., modelSettings["period"])
})
t.Run("Correctly transforms serial_diff settings (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "1" }
],
"metrics": [
{ "id": "2", "type": "avg" },
{
"id": "3",
"type": "serial_diff",
"field": "2",
"settings": {
"lag": "1"
}
}
]
}`, from, to, 15*time.Second)
assert.Nil(t, err)
// FIXME This fails, but if we add pipelineAgg it works
// sr := c.multisearchRequests[0].Requests[0]
// serialDiffSettings := sr.Aggs[0].Aggregation.Aggs[1].Aggregation.Aggregation.(*es.PipelineAggregation).Settings
// assert.Equal(t, serialDiffSettings["lag"], 1.)
})
t.Run("Correctly transforms serial_diff settings", func(t *testing.T) {
// This test is with pipelineAgg and is passing. Same test without pipelineAgg is failing.
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@ -1042,6 +1644,57 @@ func TestSettingsCasting(t *testing.T) {
assert.Equal(t, "my_script", oldFormatAggSettings["script"])
})
})
t.Run("Field property (from frontend tests)", func(t *testing.T) {
t.Run("Should use timeField from datasource when not specified", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"metrics": [{ "type": "count", "id": "1" }],
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "id": "2", "settings": { "min_doc_count": "1" } }
]
}`, from, to, 15*time.Second)
assert.Nil(t, err)
// FIXME: This should be @timestamp, but Field is empty
// sr := c.multisearchRequests[0].Requests[0]
// dateHistogramAgg := sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg)
// assert.Equal(t, dateHistogramAgg.Field, "@timestamp")
})
t.Run("Should use field from bucket agg when specified", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"metrics": [{ "type": "count", "id": "1" }],
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "id": "2", "field": "@time", "settings": { "min_doc_count": "1" } }
]
}`, from, to, 15*time.Second)
assert.Nil(t, err)
sr := c.multisearchRequests[0].Requests[0]
dateHistogramAgg := sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg)
assert.Equal(t, dateHistogramAgg.Field, "@time")
})
t.Run("Should use fixed_interval", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"metrics": [{ "type": "count", "id": "1" }],
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "id": "2", "field": "@time", "settings": { "min_doc_count": "1", "interval": "1d" } }
]
}`, from, to, 15*time.Second)
assert.Nil(t, err)
sr := c.multisearchRequests[0].Requests[0]
dateHistogramAgg := sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg)
assert.Equal(t, dateHistogramAgg.FixedInterval, "1d")
})
})
}
type fakeClient struct {

View File

@ -1,5 +1,5 @@
import { ElasticQueryBuilder } from './QueryBuilder';
import { ElasticsearchQuery } from './types';
import { ElasticsearchQuery, TermsQuery } from './types';
describe('ElasticQueryBuilder', () => {
const builder = new ElasticQueryBuilder({ timeField: '@timestamp' }); // es2
@ -266,7 +266,7 @@ describe('ElasticQueryBuilder', () => {
refId: 'A',
metrics: [{ type: 'raw_document', id: '1', settings: {} }],
timeField: '@timestamp',
bucketAggs: [] as any[],
bucketAggs: [],
};
const query = builder.build(target);
@ -587,6 +587,8 @@ describe('ElasticQueryBuilder', () => {
expect(firstLevel.histogram.min_doc_count).toBe('2');
});
// This test wasn't migrated, as adhoc variables are going to be interpolated before
// Or we need to add this to backend query builder (TBD)
it('with adhoc filters', () => {
const query = builder.build(
{
@ -616,12 +618,17 @@ describe('ElasticQueryBuilder', () => {
});
describe('getTermsQuery', () => {
function testGetTermsQuery(queryDef: any) {
function testGetTermsQuery(queryDef: TermsQuery) {
const query = builder.getTermsQuery(queryDef);
return query.aggs['1'].terms.order;
}
function checkSort(order: any, expected: string) {
function checkSort(
order: {
[key: string]: string;
},
expected: string
) {
expect(order._term).toBeUndefined();
expect(order._key).toBe(expected);
}
@ -665,7 +672,9 @@ describe('ElasticQueryBuilder', () => {
it('should not add query_string filter when query is empty', () => {
const query = builder.getTermsQuery({ orderBy: 'doc_count', order: 'asc' });
expect(query.query.bool.filter.find((filter: any) => Object.keys(filter).includes('query_string'))).toBeFalsy();
expect(
query.query.bool.filter.find((filter: object) => Object.keys(filter).includes('query_string'))
).toBeFalsy();
});
});
});
@ -683,7 +692,9 @@ describe('ElasticQueryBuilder', () => {
it('should not add query_string filter when query is empty', () => {
const query = builder.build({ refId: 'A' });
expect(query.query.bool.filter.find((filter: any) => Object.keys(filter).includes('query_string'))).toBeFalsy();
expect(
query.query.bool.filter.find((filter: object) => Object.keys(filter).includes('query_string'))
).toBeFalsy();
});
});
@ -705,7 +716,7 @@ describe('ElasticQueryBuilder', () => {
{ _doc: { order: 'desc' } },
]);
const expectedAggs: any = {
const expectedAggs = {
// FIXME: It's pretty weak to include this '1' in the test as it's not part of what we are testing here and
// might change as a cause of unrelated changes
1: {
@ -736,7 +747,9 @@ describe('ElasticQueryBuilder', () => {
it('should not add query_string filter when query is empty', () => {
const query = builder.getLogsQuery({ refId: 'A' }, 500);
expect(query.query.bool.filter.find((filter: any) => Object.keys(filter).includes('query_string'))).toBeFalsy();
expect(
query.query.bool.filter.find((filter: object) => Object.keys(filter).includes('query_string'))
).toBeFalsy();
});
});