elastic: backend: removed unused time-field (#61362)

* elastic: backend: removed unused time-field

* improve comment formatting

Co-authored-by: Ivana Huckova <30407135+ivanahuckova@users.noreply.github.com>

Co-authored-by: Ivana Huckova <30407135+ivanahuckova@users.noreply.github.com>
This commit is contained in:
Gábor Farkas 2023-01-12 16:52:32 +01:00 committed by GitHub
parent 91322bebb5
commit 57425234c9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 6 additions and 127 deletions

View File

@ -8,7 +8,6 @@ import (
// Query represents the time series query model of the datasource
type Query struct {
TimeField string `json:"timeField"`
RawQuery string `json:"query"`
BucketAggs []*BucketAgg `json:"bucketAggs"`
Metrics []*MetricAgg `json:"metrics"`

View File

@ -12,10 +12,10 @@ func parseQuery(tsdbQuery []backend.DataQuery) ([]*Query, error) {
if err != nil {
return nil, err
}
timeField, err := model.Get("timeField").String()
if err != nil {
return nil, err
}
// we had a string-field named `timeField` in the past. we do not use it anymore.
// please do not create a new field with that name, to avoid potential problems with old, persisted queries.
rawQuery := model.Get("query").MustString()
bucketAggs, err := parseBucketAggs(model)
if err != nil {
@ -30,7 +30,6 @@ func parseQuery(tsdbQuery []backend.DataQuery) ([]*Query, error) {
interval := q.Interval
queries = append(queries, &Query{
TimeField: timeField,
RawQuery: rawQuery,
BucketAggs: bucketAggs,
Metrics: metrics,

View File

@ -10,7 +10,6 @@ func TestParseQuery(t *testing.T) {
t.Run("Test parse query", func(t *testing.T) {
t.Run("Should be able to parse query", func(t *testing.T) {
body := `{
"timeField": "@timestamp",
"query": "@metric:cpu",
"alias": "{{@hostname}} {{metric}}",
"interval": "10m",
@ -67,7 +66,6 @@ func TestParseQuery(t *testing.T) {
q := queries[0]
require.Equal(t, q.TimeField, "@timestamp")
require.Equal(t, q.RawQuery, "@metric:cpu")
require.Equal(t, q.Alias, "{{@hostname}} {{metric}}")
require.Equal(t, q.Interval.String(), "10s")

View File

@ -79,25 +79,21 @@ func TestRefIdMatching(t *testing.T) {
query := []byte(`
[
{
"timeField": "t",
"refId": "COUNT_GROUPBY_DATE_HISTOGRAM",
"metrics": [{ "type": "count", "id": "c_1" }],
"bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "c_2" }]
},
{
"timeField": "t",
"refId": "COUNT_GROUPBY_HISTOGRAM",
"metrics": [{ "type": "count", "id": "h_3" }],
"bucketAggs": [{ "type": "histogram", "field": "bytes", "id": "h_4" }]
},
{
"timeField": "t",
"refId": "RAW_DOC",
"metrics": [{ "type": "raw_document", "id": "r_5" }],
"bucketAggs": []
},
{
"timeField": "t",
"refId": "PERCENTILE",
"metrics": [
{
@ -109,7 +105,6 @@ func TestRefIdMatching(t *testing.T) {
"bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "p_3" }]
},
{
"timeField": "t",
"refId": "EXTENDEDSTATS",
"metrics": [
{
@ -124,7 +119,6 @@ func TestRefIdMatching(t *testing.T) {
]
},
{
"timeField": "t",
"refId": "D",
"metrics": [{ "type": "raw_data", "id": "6" }],
"bucketAggs": []
@ -268,7 +262,6 @@ func TestSimpleQueryReturns1Frame(t *testing.T) {
[
{
"refId": "A",
"timeField": "t",
"metrics": [{ "type": "count", "id": "1" }],
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "2" }
@ -313,7 +306,6 @@ func TestSimpleQueryCountAndAvg(t *testing.T) {
[
{
"refId": "A",
"timeField": "t",
"metrics": [
{ "type": "count", "id": "1" },
{ "type": "avg", "field": "value", "id": "2" }
@ -369,7 +361,6 @@ func TestSimpleGroupBy1Metric2Frames(t *testing.T) {
[
{
"refId": "A",
"timeField": "t",
"metrics": [{ "type": "count", "id": "1" }],
"bucketAggs": [
{ "type": "terms", "field": "host", "id": "2" },
@ -431,7 +422,6 @@ func TestSimpleGroupBy2Metrics4Frames(t *testing.T) {
[
{
"refId": "A",
"timeField": "t",
"metrics": [
{ "type": "count", "id": "1" },
{ "type": "avg", "field": "@value", "id": "4" }
@ -497,7 +487,6 @@ func TestPercentiles2Frames(t *testing.T) {
[
{
"refId": "A",
"timeField": "t",
"metrics": [
{
"type": "percentiles",
@ -559,7 +548,6 @@ func TestExtendedStats4Frames(t *testing.T) {
[
{
"refId": "A",
"timeField": "t",
"metrics": [
{
"type": "extended_stats",
@ -642,7 +630,6 @@ func TestTopMetrics2Frames(t *testing.T) {
[
{
"refId": "A",
"timeField": "t",
"metrics": [
{
"type": "top_metrics",
@ -733,7 +720,6 @@ func TestSingleGroupWithAliasPattern3Frames(t *testing.T) {
[
{
"refId": "A",
"timeField": "t",
"metrics": [{ "type": "count", "id": "1" }],
"alias": "{{term @host}} {{metric}} and {{not_exist}} {{@host}}",
"bucketAggs": [
@ -807,7 +793,6 @@ func TestHistogramSimple(t *testing.T) {
[
{
"refId": "A",
"timeField": "t",
"metrics": [{ "type": "count", "id": "1" }],
"bucketAggs": [{ "type": "histogram", "field": "bytes", "id": "3" }]
}
@ -864,7 +849,6 @@ func TestHistogramWith2FiltersAgg(t *testing.T) {
[
{
"refId": "A",
"timeField": "t",
"metrics": [{ "type": "count", "id": "1" }],
"bucketAggs": [
{
@ -930,7 +914,6 @@ func TestTrimEdges(t *testing.T) {
[
{
"refId": "A",
"timeField": "t",
"metrics": [
{ "type": "avg", "id": "1", "field": "@value" },
{ "type": "count", "id": "3" }
@ -981,7 +964,6 @@ func TestTermsAggWithoutDateHistogram(t *testing.T) {
[
{
"refId": "A",
"timeField": "t",
"metrics": [
{ "type": "avg", "id": "1", "field": "@value" },
{ "type": "count", "id": "3" }
@ -1038,7 +1020,6 @@ func TestPercentilesWithoutDateHistogram(t *testing.T) {
[
{
"refId": "A",
"timeField": "t",
"metrics": [
{
"type": "percentiles",
@ -1110,7 +1091,6 @@ func TestMultipleMetricsOfTheSameType(t *testing.T) {
[
{
"refId": "A",
"timeField": "t",
"metrics": [
{ "type": "avg", "id": "1", "field": "test" },
{ "type": "avg", "id": "2", "field": "test2" }
@ -1161,7 +1141,6 @@ func TestRawDocumentQuery(t *testing.T) {
[
{
"refId": "A",
"timeField": "t",
"metrics": [{ "type": "raw_document", "id": "1" }],
"bucketAggs": []
}
@ -1222,7 +1201,6 @@ func TestBucketScript(t *testing.T) {
[
{
"refId": "A",
"timeField": "t",
"metrics": [
{ "id": "1", "type": "sum", "field": "@value" },
{ "id": "3", "type": "max", "field": "@value" },
@ -1297,7 +1275,6 @@ func TestTwoBucketScripts(t *testing.T) {
[
{
"refId": "A",
"timeField": "t",
"metrics": [
{ "id": "1", "type": "sum", "field": "@value" },
{ "id": "3", "type": "max", "field": "@value" },
@ -1384,7 +1361,6 @@ func TestRawData(t *testing.T) {
[
{
"refId": "A",
"timeField": "t",
"metrics": [{ "type": "raw_data", "id": "1" }],
"bucketAggs": []
}
@ -1441,8 +1417,7 @@ func TestLogsAndCount(t *testing.T) {
}
],
"key": "Q-1561369883389-0.7611823271062786-0",
"query": "hello AND message",
"timeField": "@timestamp"
"query": "hello AND message"
}
]
`)
@ -1652,8 +1627,7 @@ func TestLogsEmptyResponse(t *testing.T) {
}
],
"key": "Q-1561369883389-0.7611823271062786-0",
"query": "hello AND message",
"timeField": "@timestamp"
"query": "hello AND message"
}
]
`)

View File

@ -21,7 +21,6 @@ func TestResponseParser(t *testing.T) {
t.Run("Simple query and count", func(t *testing.T) {
targets := map[string]string{
"A": `{
"timeField": "@timestamp",
"metrics": [{ "type": "count", "id": "1" }],
"bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "2" }]
}`,
@ -68,7 +67,6 @@ func TestResponseParser(t *testing.T) {
t.Run("Simple query count & avg aggregation", func(t *testing.T) {
targets := map[string]string{
"A": `{
"timeField": "@timestamp",
"metrics": [{ "type": "count", "id": "1" }, {"type": "avg", "field": "value", "id": "2" }],
"bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "3" }]
}`,
@ -127,7 +125,6 @@ func TestResponseParser(t *testing.T) {
t.Run("Single group by query one metric", func(t *testing.T) {
targets := map[string]string{
"A": `{
"timeField": "@timestamp",
"metrics": [{ "type": "count", "id": "1" }],
"bucketAggs": [
{ "type": "terms", "field": "host", "id": "2" },
@ -190,7 +187,6 @@ func TestResponseParser(t *testing.T) {
t.Run("Single group by query two metrics", func(t *testing.T) {
targets := map[string]string{
"A": `{
"timeField": "@timestamp",
"metrics": [{ "type": "count", "id": "1" }, { "type": "avg", "field": "@value", "id": "4" }],
"bucketAggs": [
{ "type": "terms", "field": "host", "id": "2" },
@ -276,7 +272,6 @@ func TestResponseParser(t *testing.T) {
t.Run("With percentiles", func(t *testing.T) {
targets := map[string]string{
"A": `{
"timeField": "@timestamp",
"metrics": [{ "type": "percentiles", "settings": { "percents": [75, 90] }, "id": "1" }],
"bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "3" }]
}`,
@ -333,7 +328,6 @@ func TestResponseParser(t *testing.T) {
t.Run("With extended stats", func(t *testing.T) {
targets := map[string]string{
"A": `{
"timeField": "@timestamp",
"metrics": [{ "type": "extended_stats", "meta": { "max": true, "std_deviation_bounds_upper": true, "std_deviation_bounds_lower": true }, "id": "1" }],
"bucketAggs": [
{ "type": "terms", "field": "host", "id": "3" },
@ -447,7 +441,6 @@ func TestResponseParser(t *testing.T) {
t.Run("Single group by with alias pattern", func(t *testing.T) {
targets := map[string]string{
"A": `{
"timeField": "@timestamp",
"alias": "{{term @host}} {{metric}} and {{not_exist}} {{@host}}",
"metrics": [{ "type": "count", "id": "1" }],
"bucketAggs": [
@ -527,7 +520,6 @@ func TestResponseParser(t *testing.T) {
t.Run("Histogram response", func(t *testing.T) {
targets := map[string]string{
"A": `{
"timeField": "@timestamp",
"metrics": [{ "type": "count", "id": "1" }],
"bucketAggs": [{ "type": "histogram", "field": "bytes", "id": "3" }]
}`,
@ -557,7 +549,6 @@ func TestResponseParser(t *testing.T) {
t.Run("With two filters agg", func(t *testing.T) {
targets := map[string]string{
"A": `{
"timeField": "@timestamp",
"metrics": [{ "type": "count", "id": "1" }],
"bucketAggs": [
{
@ -623,7 +614,6 @@ func TestResponseParser(t *testing.T) {
t.Run("With drop first and last aggregation (numeric)", func(t *testing.T) {
targets := map[string]string{
"A": `{
"timeField": "@timestamp",
"metrics": [{ "type": "avg", "id": "1" }, { "type": "count" }],
"bucketAggs": [
{
@ -692,7 +682,6 @@ func TestResponseParser(t *testing.T) {
t.Run("With drop first and last aggregation (string)", func(t *testing.T) {
targets := map[string]string{
"A": `{
"timeField": "@timestamp",
"metrics": [{ "type": "avg", "id": "1" }, { "type": "count" }],
"bucketAggs": [
{
@ -761,7 +750,6 @@ func TestResponseParser(t *testing.T) {
t.Run("Larger trimEdges value", func(t *testing.T) {
targets := map[string]string{
"A": `{
"timeField": "@timestamp",
"metrics": [{ "type": "count" }],
"bucketAggs": [
{
@ -808,7 +796,6 @@ func TestResponseParser(t *testing.T) {
t.Run("No group by time", func(t *testing.T) {
targets := map[string]string{
"A": `{
"timeField": "@timestamp",
"metrics": [{ "type": "avg", "id": "1" }, { "type": "count" }],
"bucketAggs": [{ "type": "terms", "field": "host", "id": "2" }]
}`,
@ -859,7 +846,6 @@ func TestResponseParser(t *testing.T) {
t.Run("Multiple metrics of same type", func(t *testing.T) {
targets := map[string]string{
"A": `{
"timeField": "@timestamp",
"metrics": [{ "type": "avg", "field": "test", "id": "1" }, { "type": "avg", "field": "test2", "id": "2" }],
"bucketAggs": [{ "type": "terms", "field": "host", "id": "2" }]
}`,
@ -906,7 +892,6 @@ func TestResponseParser(t *testing.T) {
t.Run("With bucket_script", func(t *testing.T) {
targets := map[string]string{
"A": `{
"timeField": "@timestamp",
"metrics": [
{ "id": "1", "type": "sum", "field": "@value" },
{ "id": "3", "type": "max", "field": "@value" },
@ -984,7 +969,6 @@ func TestResponseParser(t *testing.T) {
t.Run("Terms with two bucket_script", func(t *testing.T) {
targets := map[string]string{
"A": `{
"timeField": "@timestamp",
"metrics": [
{ "id": "1", "type": "sum", "field": "@value" },
{ "id": "3", "type": "max", "field": "@value" },
@ -1061,7 +1045,6 @@ func TestResponseParser(t *testing.T) {
t.Run("With top_metrics", func(t *testing.T) {
targets := map[string]string{
"A": `{
"timeField": "@timestamp",
"metrics": [
{
"type": "top_metrics",

View File

@ -14,7 +14,6 @@
},
"alias": "",
"bucketAggs": [],
"timeField": "testtime",
"key": "Q-ee8fea91-a4c4-4ded-9827-b362476a4083-0",
"datasourceId": 39,
"intervalMs": 1000,

View File

@ -45,7 +45,6 @@
],
"query": "",
"refId": "a",
"timeField": "testtime",
"intervalMs": 200,
"maxDataPoints": 779
}

View File

@ -14,7 +14,6 @@
"field": "testtime"
}
],
"timeField": "testtime",
"intervalMs": 1000,
"interval": 1000000000,
"maxDataPoints": 814
@ -35,7 +34,6 @@
"field": "testtime"
}
],
"timeField": "testtime",
"intervalMs": 1000,
"interval": 1000000000,
"maxDataPoints": 814

View File

@ -39,7 +39,6 @@
],
"query": "",
"refId": "a",
"timeField": "testtime",
"window": ""
}
]

View File

@ -17,7 +17,6 @@
},
"alias": "",
"bucketAggs": [],
"timeField": "testtime",
"key": "Q-ee8fea91-a4c4-4ded-9827-b362476a4083-0",
"datasourceId": 39,
"intervalMs": 2000,

View File

@ -17,7 +17,6 @@
},
"alias": "",
"bucketAggs": [],
"timeField": "testtime",
"key": "Q-ee8fea91-a4c4-4ded-9827-b362476a4083-0",
"datasourceId": 39,
"intervalMs": 2000,

View File

@ -45,7 +45,6 @@
],
"query": "",
"refId": "a",
"timeField": "testtime",
"intervalMs": 200,
"maxDataPoints": 779
}

View File

@ -14,7 +14,6 @@
"field": "testtime"
}
],
"timeField": "testtime",
"intervalMs": 30000,
"maxDataPoints": 814
},
@ -34,7 +33,6 @@
"field": "testtime"
}
],
"timeField": "testtime",
"intervalMs": 30000,
"maxDataPoints": 814
}

View File

@ -39,7 +39,6 @@
],
"query": "",
"refId": "a",
"timeField": "testtime",
"window": ""
}
]

View File

@ -21,7 +21,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With defaults", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "2" }],
"metrics": [{"type": "count", "id": "0" }]
}`, from, to)
@ -41,7 +40,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("Should clean settings from null values (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "1" }],
"metrics": [{"type": "avg", "id": "0", "settings": {"missing": "null", "script": "1" } }]
}`, from, to)
@ -56,7 +54,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With multiple bucket aggs", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "terms", "field": "@host", "id": "2", "settings": { "size": "0", "order": "asc" } },
{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
@ -78,7 +75,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With select field", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "2" }
],
@ -98,7 +94,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With term agg and order by term (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"type": "terms",
@ -122,7 +117,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With term agg and order by metric agg", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"type": "terms",
@ -154,7 +148,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With term agg and order by count metric agg", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"type": "terms",
@ -178,7 +171,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With term agg and order by count agg (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"metrics": [
{"type": "count", "id": "1" },
{"type": "avg", "field": "@value", "id": "5" }
@ -205,7 +197,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With term agg and order by percentiles agg", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"type": "terms",
@ -234,7 +225,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With term agg and order by extended stats agg", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"type": "terms",
@ -264,7 +254,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With term agg and order by term", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"type": "terms",
@ -291,7 +280,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With term agg and valid min_doc_count (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"type": "terms",
@ -317,7 +305,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With metric percentiles", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
],
@ -351,7 +338,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With filters aggs", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"id": "2",
@ -382,7 +368,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With filters aggs and empty label (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"id": "2",
@ -413,7 +398,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With raw document metric size", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [],
"metrics": [{ "id": "1", "type": "raw_document", "settings": {} }]
}`, from, to)
@ -426,7 +410,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With raw document metric query (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [],
"metrics": [{ "id": "1", "type": "raw_document", "settings": {} }]
}`, from, to)
@ -448,7 +431,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With raw data metric query (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [],
"metrics": [{ "id": "1", "type": "raw_data", "settings": {} }]
}`, from, to)
@ -470,7 +452,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With raw document metric size set", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [],
"metrics": [{ "id": "1", "type": "raw_document", "settings": { "size": 1337 } }]
}`, from, to)
@ -483,7 +464,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With date histogram agg", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"id": "2",
@ -508,7 +488,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("Should not include time_zone if not present in the query model (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"id": "2",
@ -531,7 +510,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("Should not include time_zone when timeZone is utc", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"id": "2",
@ -554,7 +532,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("Should include time_zone when timeZone is not utc", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"id": "2",
@ -578,7 +555,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With histogram agg", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"id": "3",
@ -605,7 +581,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With histogram (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"id": "3",
@ -631,7 +606,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With geo hash grid agg", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"id": "3",
@ -656,7 +630,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With moving average (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
],
@ -691,7 +664,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With moving average", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
],
@ -729,7 +701,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With moving average doc count (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
],
@ -760,7 +731,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With moving average doc count", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
],
@ -792,7 +762,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With broken moving average (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
],
@ -828,7 +797,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With broken moving average", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "5" }
],
@ -864,7 +832,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With top_metrics (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
],
@ -888,7 +855,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With cumulative sum", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
],
@ -926,7 +892,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With cumulative sum doc count", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
],
@ -958,7 +923,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With broken cumulative sum", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "5" }
],
@ -994,7 +958,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With derivative", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
],
@ -1024,7 +987,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
// This test is with pipelineAgg and is passing. Same test without pipelineAgg is failing.
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
],
@ -1053,7 +1015,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With derivative doc count (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
],
@ -1083,7 +1044,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
// This test is with pipelineAgg and is passing. Same test without pipelineAgg is failing.
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
],
@ -1114,7 +1074,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With serial_diff (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
],
@ -1144,7 +1103,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With serial_diff doc count", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
],
@ -1173,7 +1131,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With bucket_script", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "2" }
],
@ -1210,7 +1167,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With bucket_script (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
],
@ -1247,7 +1203,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With bucket_script doc count", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
],
@ -1281,7 +1236,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With bucket_script doc count (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "2" }
],
@ -1315,7 +1269,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With lucene query should add query_string filter when query is not empty (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"query": "foo",
"bucketAggs": [],
"metrics": [{ "id": "1", "type": "raw_data", "settings": {} }]
@ -1330,7 +1283,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With lucene query should add query_string filter when query is not empty (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"query": "foo",
"bucketAggs": [],
"metrics": [{ "id": "1", "type": "raw_data", "settings": {} }]
@ -1345,7 +1297,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With log query should return query with defaults (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"metrics": [{ "type": "logs", "id": "1"}]
}`, from, to)
require.NoError(t, err)
@ -1378,7 +1329,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With log query with limit should return query with correct size", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"metrics": [{ "type": "logs", "id": "1", "settings": { "limit": 1000 }}]
}`, from, to)
require.NoError(t, err)
@ -1389,7 +1339,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With log query should return highlight properties", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"metrics": [{ "type": "logs", "id": "1" }]
}`, from, to)
require.NoError(t, err)
@ -1407,7 +1356,6 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
t.Run("With invalid query should return error", (func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"query": "foo",
}`, from, to)
require.Error(t, err)
@ -1422,7 +1370,6 @@ func TestSettingsCasting(t *testing.T) {
t.Run("Correctly casts values in moving_avg (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"metrics": [
{ "type": "avg", "id" : "2" },
{
@ -1463,7 +1410,6 @@ func TestSettingsCasting(t *testing.T) {
// This test is with pipelineAgg and is passing. Same test without pipelineAgg is failing.
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "2" }
],
@ -1507,7 +1453,6 @@ func TestSettingsCasting(t *testing.T) {
t.Run("Correctly transforms serial_diff settings (from frontend tests)", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "1" }
],
@ -1533,7 +1478,6 @@ func TestSettingsCasting(t *testing.T) {
// This test is with pipelineAgg and is passing. Same test without pipelineAgg is failing.
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "2" }
],
@ -1562,7 +1506,6 @@ func TestSettingsCasting(t *testing.T) {
t.Run("Correctly transforms date_histogram settings", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"type": "date_histogram",
@ -1597,7 +1540,6 @@ func TestSettingsCasting(t *testing.T) {
t.Run("Correctly uses already int min_doc_count", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"type": "date_histogram",
@ -1633,7 +1575,6 @@ func TestSettingsCasting(t *testing.T) {
t.Run("Uses fixed_interval", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"type": "date_histogram",
@ -1662,7 +1603,6 @@ func TestSettingsCasting(t *testing.T) {
t.Run("Correctly handles scripts", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "2" }
],
@ -1702,7 +1642,6 @@ func TestSettingsCasting(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"metrics": [{ "type": "count", "id": "1" }],
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "id": "2", "settings": { "min_doc_count": "1" } }
]
@ -1718,7 +1657,6 @@ func TestSettingsCasting(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"metrics": [{ "type": "count", "id": "1" }],
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "id": "2", "field": "@time", "settings": { "min_doc_count": "1" } }
]
@ -1734,7 +1672,6 @@ func TestSettingsCasting(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"metrics": [{ "type": "count", "id": "1" }],
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "id": "2", "field": "@time", "settings": { "min_doc_count": "1", "interval": "1d" } }
]