diff --git a/.betterer.results b/.betterer.results index 16d6dde1d23..8bdf9034c93 100644 --- a/.betterer.results +++ b/.betterer.results @@ -77,7 +77,7 @@ exports[`no enzyme tests`] = { "public/app/plugins/datasource/cloudwatch/components/ConfigEditor.test.tsx:4057721851": [ [1, 19, 13, "RegExp match", "2409514259"] ], - "public/app/plugins/datasource/elasticsearch/configuration/ConfigEditor.test.tsx:3481855642": [ + "public/app/plugins/datasource/elasticsearch/configuration/ConfigEditor.test.tsx:4128034878": [ [0, 26, 13, "RegExp match", "2409514259"] ], "public/app/plugins/datasource/influxdb/components/ConfigEditor.test.tsx:57753101": [ diff --git a/pkg/tsdb/elasticsearch/client/client.go b/pkg/tsdb/elasticsearch/client/client.go index cbd018af976..d77a2f945ce 100644 --- a/pkg/tsdb/elasticsearch/client/client.go +++ b/pkg/tsdb/elasticsearch/client/client.go @@ -48,7 +48,6 @@ var newDatasourceHttpClient = func(httpClientProvider httpclient.Provider, ds *D // Client represents a client which can interact with elasticsearch api type Client interface { - GetVersion() *semver.Version GetTimeField() string GetMinInterval(queryInterval string) (time.Duration, error) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearchResponse, error) @@ -74,7 +73,6 @@ var NewClient = func(ctx context.Context, httpClientProvider httpclient.Provider ctx: ctx, httpClientProvider: httpClientProvider, ds: ds, - version: ds.ESVersion, timeField: ds.TimeField, indices: indices, timeRange: timeRange, @@ -85,17 +83,12 @@ type baseClientImpl struct { ctx context.Context httpClientProvider httpclient.Provider ds *DatasourceInfo - version *semver.Version timeField string indices []string timeRange backend.TimeRange debugEnabled bool } -func (c *baseClientImpl) GetVersion() *semver.Version { - return c.version -} - func (c *baseClientImpl) GetTimeField() string { return c.timeField } @@ -281,20 +274,6 @@ func (c *baseClientImpl) createMultiSearchRequests(searchRequests []*SearchReque interval: searchReq.Interval, } - if c.version.Major() < 5 { - mr.header["search_type"] = "count" - } else { - allowedVersionRange, _ := semver.NewConstraint(">=5.6.0, <7.0.0") - - if allowedVersionRange.Check(c.version) { - maxConcurrentShardRequests := c.ds.MaxConcurrentShardRequests - if maxConcurrentShardRequests == 0 { - maxConcurrentShardRequests = 256 - } - mr.header["max_concurrent_shard_requests"] = maxConcurrentShardRequests - } - } - multiRequests = append(multiRequests, &mr) } @@ -304,17 +283,13 @@ func (c *baseClientImpl) createMultiSearchRequests(searchRequests []*SearchReque func (c *baseClientImpl) getMultiSearchQueryParameters() string { var qs []string - if c.version.Major() >= 7 { - maxConcurrentShardRequests := c.ds.MaxConcurrentShardRequests - if maxConcurrentShardRequests == 0 { - maxConcurrentShardRequests = 5 - } - qs = append(qs, fmt.Sprintf("max_concurrent_shard_requests=%d", maxConcurrentShardRequests)) + maxConcurrentShardRequests := c.ds.MaxConcurrentShardRequests + if maxConcurrentShardRequests == 0 { + maxConcurrentShardRequests = 5 } + qs = append(qs, fmt.Sprintf("max_concurrent_shard_requests=%d", maxConcurrentShardRequests)) - allowedFrozenIndicesVersionRange, _ := semver.NewConstraint(">=6.6.0") - - if (allowedFrozenIndicesVersionRange.Check(c.version)) && c.ds.IncludeFrozen && c.ds.XPack { + if c.ds.IncludeFrozen && c.ds.XPack { qs = append(qs, "ignore_throttled=false") } @@ -322,7 +297,7 @@ func (c *baseClientImpl) getMultiSearchQueryParameters() string { } func (c *baseClientImpl) MultiSearch() *MultiSearchRequestBuilder { - return NewMultiSearchRequestBuilder(c.GetVersion()) + return NewMultiSearchRequestBuilder() } func (c *baseClientImpl) EnableDebug() { diff --git a/pkg/tsdb/elasticsearch/client/client_test.go b/pkg/tsdb/elasticsearch/client/client_test.go index 102cde6043e..fb57703fcc1 100644 --- a/pkg/tsdb/elasticsearch/client/client_test.go +++ b/pkg/tsdb/elasticsearch/client/client_test.go @@ -18,247 +18,8 @@ import ( "github.com/stretchr/testify/require" ) -func TestNewClient(t *testing.T) { - t.Run("When using legacy version numbers", func(t *testing.T) { - t.Run("When version 2 should return v2 client", func(t *testing.T) { - version, err := semver.NewVersion("2.0.0") - require.NoError(t, err) - ds := &DatasourceInfo{ - ESVersion: version, - TimeField: "@timestamp", - } - - c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{}) - require.NoError(t, err) - assert.Equal(t, "2.0.0", c.GetVersion().String()) - }) - - t.Run("When version 5 should return v5 client", func(t *testing.T) { - version, err := semver.NewVersion("5.0.0") - require.NoError(t, err) - ds := &DatasourceInfo{ - ESVersion: version, - TimeField: "@timestamp", - } - - c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{}) - require.NoError(t, err) - assert.Equal(t, "5.0.0", c.GetVersion().String()) - }) - - t.Run("When version 56 should return v5.6 client", func(t *testing.T) { - version, err := semver.NewVersion("5.6.0") - require.NoError(t, err) - ds := &DatasourceInfo{ - ESVersion: version, - TimeField: "@timestamp", - } - - c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{}) - require.NoError(t, err) - assert.Equal(t, "5.6.0", c.GetVersion().String()) - }) - - t.Run("When version 60 should return v6.0 client", func(t *testing.T) { - version, err := semver.NewVersion("6.0.0") - require.NoError(t, err) - ds := &DatasourceInfo{ - ESVersion: version, - TimeField: "@timestamp", - } - - c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{}) - require.NoError(t, err) - assert.Equal(t, "6.0.0", c.GetVersion().String()) - }) - - t.Run("When version 70 should return v7.0 client", func(t *testing.T) { - version, err := semver.NewVersion("7.0.0") - require.NoError(t, err) - ds := &DatasourceInfo{ - ESVersion: version, - TimeField: "@timestamp", - } - - c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{}) - require.NoError(t, err) - assert.Equal(t, "7.0.0", c.GetVersion().String()) - }) - }) - - t.Run("When version is a valid semver string should create a client", func(t *testing.T) { - version, err := semver.NewVersion("7.2.4") - require.NoError(t, err) - ds := &DatasourceInfo{ - ESVersion: version, - TimeField: "@timestamp", - } - - c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{}) - require.NoError(t, err) - assert.Equal(t, version.String(), c.GetVersion().String()) - }) -} - func TestClient_ExecuteMultisearch(t *testing.T) { - version, err := semver.NewVersion("2.0.0") - require.NoError(t, err) - httpClientScenario(t, "Given a fake http client and a v2.x client with response", &DatasourceInfo{ - Database: "[metrics-]YYYY.MM.DD", - ESVersion: version, - TimeField: "@timestamp", - Interval: "Daily", - }, func(sc *scenarioContext) { - sc.responseBody = `{ - "responses": [ - { - "hits": { "hits": [], "max_score": 0, "total": 4656 }, - "status": 200 - } - ] - }` - - ms, err := createMultisearchForTest(t, sc.client) - require.NoError(t, err) - res, err := sc.client.ExecuteMultisearch(ms) - require.NoError(t, err) - - require.NotNil(t, sc.request) - assert.Equal(t, http.MethodPost, sc.request.Method) - assert.Equal(t, "/_msearch", sc.request.URL.Path) - - require.NotNil(t, sc.requestBody) - headerBytes, err := sc.requestBody.ReadBytes('\n') - require.NoError(t, err) - bodyBytes := sc.requestBody.Bytes() - - jHeader, err := simplejson.NewJson(headerBytes) - require.NoError(t, err) - - jBody, err := simplejson.NewJson(bodyBytes) - require.NoError(t, err) - - assert.Equal(t, "metrics-2018.05.15", jHeader.Get("index").MustString()) - assert.True(t, jHeader.Get("ignore_unavailable").MustBool(false)) - assert.Equal(t, "count", jHeader.Get("search_type").MustString()) - assert.Empty(t, jHeader.Get("max_concurrent_shard_requests")) - - assert.Equal(t, "15000*@hostname", jBody.GetPath("aggs", "2", "aggs", "1", "avg", "script").MustString()) - - assert.Equal(t, "15s", jBody.GetPath("aggs", "2", "date_histogram", "fixed_interval").MustString()) - - assert.Equal(t, 200, res.Status) - require.Len(t, res.Responses, 1) - }) - - version, err = semver.NewVersion("5.0.0") - require.NoError(t, err) - httpClientScenario(t, "Given a fake http client and a v5.x client with response", &DatasourceInfo{ - Database: "[metrics-]YYYY.MM.DD", - ESVersion: version, - TimeField: "@timestamp", - Interval: "Daily", - MaxConcurrentShardRequests: 100, - }, func(sc *scenarioContext) { - sc.responseBody = `{ - "responses": [ - { - "hits": { "hits": [], "max_score": 0, "total": 4656 }, - "status": 200 - } - ] - }` - - ms, err := createMultisearchForTest(t, sc.client) - require.NoError(t, err) - res, err := sc.client.ExecuteMultisearch(ms) - require.NoError(t, err) - - require.NotNil(t, sc.request) - assert.Equal(t, http.MethodPost, sc.request.Method) - assert.Equal(t, "/_msearch", sc.request.URL.Path) - - require.NotNil(t, sc.requestBody) - - headerBytes, err := sc.requestBody.ReadBytes('\n') - require.NoError(t, err) - bodyBytes := sc.requestBody.Bytes() - - jHeader, err := simplejson.NewJson(headerBytes) - require.NoError(t, err) - - jBody, err := simplejson.NewJson(bodyBytes) - require.NoError(t, err) - - assert.Equal(t, "metrics-2018.05.15", jHeader.Get("index").MustString()) - assert.True(t, jHeader.Get("ignore_unavailable").MustBool(false)) - assert.Equal(t, "query_then_fetch", jHeader.Get("search_type").MustString()) - assert.Empty(t, jHeader.Get("max_concurrent_shard_requests")) - - assert.Equal(t, "15000*@hostname", jBody.GetPath("aggs", "2", "aggs", "1", "avg", "script").MustString()) - - assert.Equal(t, "15s", jBody.GetPath("aggs", "2", "date_histogram", "fixed_interval").MustString()) - - assert.Equal(t, 200, res.Status) - require.Len(t, res.Responses, 1) - }) - - version, err = semver.NewVersion("5.6.0") - require.NoError(t, err) - httpClientScenario(t, "Given a fake http client and a v5.6 client with response", &DatasourceInfo{ - Database: "[metrics-]YYYY.MM.DD", - ESVersion: version, - TimeField: "@timestamp", - Interval: "Daily", - MaxConcurrentShardRequests: 100, - IncludeFrozen: true, - XPack: true, - }, func(sc *scenarioContext) { - sc.responseBody = `{ - "responses": [ - { - "hits": { "hits": [], "max_score": 0, "total": 4656 }, - "status": 200 - } - ] - }` - - ms, err := createMultisearchForTest(t, sc.client) - require.NoError(t, err) - res, err := sc.client.ExecuteMultisearch(ms) - require.NoError(t, err) - - require.NotNil(t, sc.request) - assert.Equal(t, http.MethodPost, sc.request.Method) - assert.Equal(t, "/_msearch", sc.request.URL.Path) - assert.NotContains(t, sc.request.URL.RawQuery, "ignore_throttled=") - - require.NotNil(t, sc.requestBody) - - headerBytes, err := sc.requestBody.ReadBytes('\n') - require.NoError(t, err) - bodyBytes := sc.requestBody.Bytes() - - jHeader, err := simplejson.NewJson(headerBytes) - require.NoError(t, err) - - jBody, err := simplejson.NewJson(bodyBytes) - require.NoError(t, err) - - assert.Equal(t, "metrics-2018.05.15", jHeader.Get("index").MustString()) - assert.True(t, jHeader.Get("ignore_unavailable").MustBool(false)) - assert.Equal(t, "query_then_fetch", jHeader.Get("search_type").MustString()) - assert.Equal(t, 100, jHeader.Get("max_concurrent_shard_requests").MustInt()) - - assert.Equal(t, "15000*@hostname", jBody.GetPath("aggs", "2", "aggs", "1", "avg", "script").MustString()) - - assert.Equal(t, "15s", jBody.GetPath("aggs", "2", "date_histogram", "fixed_interval").MustString()) - - assert.Equal(t, 200, res.Status) - require.Len(t, res.Responses, 1) - }) - - version, err = semver.NewVersion("7.0.0") + version, err := semver.NewVersion("8.0.0") require.NoError(t, err) httpClientScenario(t, "Given a fake http client and a v7.0 client with response", &DatasourceInfo{ Database: "[metrics-]YYYY.MM.DD", diff --git a/pkg/tsdb/elasticsearch/client/search_request.go b/pkg/tsdb/elasticsearch/client/search_request.go index 24eae62df6a..226152ff4a3 100644 --- a/pkg/tsdb/elasticsearch/client/search_request.go +++ b/pkg/tsdb/elasticsearch/client/search_request.go @@ -3,13 +3,11 @@ package es import ( "strings" - "github.com/Masterminds/semver" "github.com/grafana/grafana/pkg/tsdb/intervalv2" ) // SearchRequestBuilder represents a builder which can build a search request type SearchRequestBuilder struct { - version *semver.Version interval intervalv2.Interval index string size int @@ -20,9 +18,8 @@ type SearchRequestBuilder struct { } // NewSearchRequestBuilder create a new search request builder -func NewSearchRequestBuilder(version *semver.Version, interval intervalv2.Interval) *SearchRequestBuilder { +func NewSearchRequestBuilder(interval intervalv2.Interval) *SearchRequestBuilder { builder := &SearchRequestBuilder{ - version: version, interval: interval, sort: make(map[string]interface{}), customProps: make(map[string]interface{}), @@ -87,13 +84,7 @@ func (b *SearchRequestBuilder) SortDesc(field, unmappedType string) *SearchReque // AddDocValueField adds a doc value field to the search request func (b *SearchRequestBuilder) AddDocValueField(field string) *SearchRequestBuilder { - // fields field not supported on version >= 5 - if b.version.Major() < 5 { - b.customProps["fields"] = []string{"*", "_source"} - b.customProps["fielddata_fields"] = []string{field} - } else { - b.customProps["docvalue_fields"] = []string{field} - } + b.customProps["docvalue_fields"] = []string{field} b.customProps["script_fields"] = make(map[string]interface{}) @@ -110,27 +101,24 @@ func (b *SearchRequestBuilder) Query() *QueryBuilder { // Agg initiate and returns a new aggregation builder func (b *SearchRequestBuilder) Agg() AggBuilder { - aggBuilder := newAggBuilder(b.version) + aggBuilder := newAggBuilder() b.aggBuilders = append(b.aggBuilders, aggBuilder) return aggBuilder } // MultiSearchRequestBuilder represents a builder which can build a multi search request type MultiSearchRequestBuilder struct { - version *semver.Version requestBuilders []*SearchRequestBuilder } // NewMultiSearchRequestBuilder creates a new multi search request builder -func NewMultiSearchRequestBuilder(version *semver.Version) *MultiSearchRequestBuilder { - return &MultiSearchRequestBuilder{ - version: version, - } +func NewMultiSearchRequestBuilder() *MultiSearchRequestBuilder { + return &MultiSearchRequestBuilder{} } // Search initiates and returns a new search request builder func (m *MultiSearchRequestBuilder) Search(interval intervalv2.Interval) *SearchRequestBuilder { - b := NewSearchRequestBuilder(m.version, interval) + b := NewSearchRequestBuilder(interval) m.requestBuilders = append(m.requestBuilders, b) return b } @@ -273,13 +261,11 @@ type AggBuilder interface { type aggBuilderImpl struct { AggBuilder aggDefs []*aggDef - version *semver.Version } -func newAggBuilder(version *semver.Version) *aggBuilderImpl { +func newAggBuilder() *aggBuilderImpl { return &aggBuilderImpl{ aggDefs: make([]*aggDef, 0), - version: version, } } @@ -317,7 +303,7 @@ func (b *aggBuilderImpl) Histogram(key, field string, fn func(a *HistogramAgg, b }) if fn != nil { - builder := newAggBuilder(b.version) + builder := newAggBuilder() aggDef.builders = append(aggDef.builders, builder) fn(innerAgg, builder) } @@ -337,7 +323,7 @@ func (b *aggBuilderImpl) DateHistogram(key, field string, fn func(a *DateHistogr }) if fn != nil { - builder := newAggBuilder(b.version) + builder := newAggBuilder() aggDef.builders = append(aggDef.builders, builder) fn(innerAgg, builder) } @@ -360,12 +346,12 @@ func (b *aggBuilderImpl) Terms(key, field string, fn func(a *TermsAggregation, b }) if fn != nil { - builder := newAggBuilder(b.version) + builder := newAggBuilder() aggDef.builders = append(aggDef.builders, builder) fn(innerAgg, builder) } - if b.version.Major() >= 6 && len(innerAgg.Order) > 0 { + if len(innerAgg.Order) > 0 { if orderBy, exists := innerAgg.Order[termsOrderTerm]; exists { innerAgg.Order["_key"] = orderBy delete(innerAgg.Order, termsOrderTerm) @@ -386,7 +372,7 @@ func (b *aggBuilderImpl) Filters(key string, fn func(a *FiltersAggregation, b Ag Aggregation: innerAgg, }) if fn != nil { - builder := newAggBuilder(b.version) + builder := newAggBuilder() aggDef.builders = append(aggDef.builders, builder) fn(innerAgg, builder) } @@ -407,7 +393,7 @@ func (b *aggBuilderImpl) GeoHashGrid(key, field string, fn func(a *GeoHashGridAg }) if fn != nil { - builder := newAggBuilder(b.version) + builder := newAggBuilder() aggDef.builders = append(aggDef.builders, builder) fn(innerAgg, builder) } diff --git a/pkg/tsdb/elasticsearch/client/search_request_test.go b/pkg/tsdb/elasticsearch/client/search_request_test.go index 98f99b42f9a..502b05d96f2 100644 --- a/pkg/tsdb/elasticsearch/client/search_request_test.go +++ b/pkg/tsdb/elasticsearch/client/search_request_test.go @@ -5,7 +5,6 @@ import ( "testing" "time" - "github.com/Masterminds/semver" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/tsdb/intervalv2" @@ -16,8 +15,7 @@ func TestSearchRequest(t *testing.T) { timeField := "@timestamp" setup := func() *SearchRequestBuilder { - version5, _ := semver.NewVersion("5.0.0") - return NewSearchRequestBuilder(version5, intervalv2.Interval{Value: 15 * time.Second, Text: "15s"}) + return NewSearchRequestBuilder(intervalv2.Interval{Value: 15 * time.Second, Text: "15s"}) } t.Run("When building search request", func(t *testing.T) { @@ -398,65 +396,11 @@ func TestSearchRequest(t *testing.T) { }) }) }) - - t.Run("Given new search request builder for es version 2", func(t *testing.T) { - version2, _ := semver.NewVersion("2.0.0") - b := NewSearchRequestBuilder(version2, intervalv2.Interval{Value: 15 * time.Second, Text: "15s"}) - - t.Run("When adding doc value field", func(t *testing.T) { - b.AddDocValueField(timeField) - - t.Run("should set correct props", func(t *testing.T) { - fields, ok := b.customProps["fields"].([]string) - require.True(t, ok) - require.Equal(t, 2, len(fields)) - require.Equal(t, "*", fields[0]) - require.Equal(t, "_source", fields[1]) - - scriptFields, ok := b.customProps["script_fields"].(map[string]interface{}) - require.True(t, ok) - require.Equal(t, 0, len(scriptFields)) - - fieldDataFields, ok := b.customProps["fielddata_fields"].([]string) - require.True(t, ok) - require.Equal(t, 1, len(fieldDataFields)) - require.Equal(t, timeField, fieldDataFields[0]) - }) - - t.Run("When building search request", func(t *testing.T) { - sr, err := b.Build() - require.Nil(t, err) - - t.Run("When marshal to JSON should generate correct json", func(t *testing.T) { - body, err := json.Marshal(sr) - require.Nil(t, err) - json, err := simplejson.NewJson(body) - require.Nil(t, err) - - scriptFields, err := json.Get("script_fields").Map() - require.Nil(t, err) - require.Equal(t, 0, len(scriptFields)) - - fields, err := json.Get("fields").StringArray() - require.Nil(t, err) - require.Equal(t, 2, len(fields)) - require.Equal(t, "*", fields[0]) - require.Equal(t, "_source", fields[1]) - - fieldDataFields, err := json.Get("fielddata_fields").StringArray() - require.Nil(t, err) - require.Equal(t, 1, len(fieldDataFields)) - require.Equal(t, timeField, fieldDataFields[0]) - }) - }) - }) - }) } func TestMultiSearchRequest(t *testing.T) { t.Run("When adding one search request", func(t *testing.T) { - version2, _ := semver.NewVersion("2.0.0") - b := NewMultiSearchRequestBuilder(version2) + b := NewMultiSearchRequestBuilder() b.Search(intervalv2.Interval{Value: 15 * time.Second, Text: "15s"}) t.Run("When building search request should contain one search request", func(t *testing.T) { @@ -467,8 +411,7 @@ func TestMultiSearchRequest(t *testing.T) { }) t.Run("When adding two search requests", func(t *testing.T) { - version2, _ := semver.NewVersion("2.0.0") - b := NewMultiSearchRequestBuilder(version2) + b := NewMultiSearchRequestBuilder() b.Search(intervalv2.Interval{Value: 15 * time.Second, Text: "15s"}) b.Search(intervalv2.Interval{Value: 15 * time.Second, Text: "15s"}) diff --git a/pkg/tsdb/elasticsearch/time_series_query.go b/pkg/tsdb/elasticsearch/time_series_query.go index 29bef578882..dfb192b5da2 100644 --- a/pkg/tsdb/elasticsearch/time_series_query.go +++ b/pkg/tsdb/elasticsearch/time_series_query.go @@ -6,7 +6,6 @@ import ( "strconv" "time" - "github.com/Masterminds/semver" "github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana/pkg/components/simplejson" es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client" @@ -144,7 +143,7 @@ func (e *timeSeriesQuery) processQuery(q *Query, ms *es.MultiSearchRequestBuilde } aggBuilder.Pipeline(m.ID, m.Type, bucketPaths, func(a *es.PipelineAggregation) { - a.Settings = m.generateSettingsForDSL(e.client.GetVersion()) + a.Settings = m.generateSettingsForDSL() }) } else { continue @@ -165,7 +164,7 @@ func (e *timeSeriesQuery) processQuery(q *Query, ms *es.MultiSearchRequestBuilde } aggBuilder.Pipeline(m.ID, m.Type, bucketPath, func(a *es.PipelineAggregation) { - a.Settings = m.generateSettingsForDSL(e.client.GetVersion()) + a.Settings = m.generateSettingsForDSL() }) } } else { @@ -174,7 +173,7 @@ func (e *timeSeriesQuery) processQuery(q *Query, ms *es.MultiSearchRequestBuilde } } else { aggBuilder.Metric(m.ID, m.Type, m.Field, func(a *es.MetricAggregation) { - a.Settings = m.generateSettingsForDSL(e.client.GetVersion()) + a.Settings = m.generateSettingsForDSL() }) } } @@ -199,7 +198,7 @@ func setIntPath(settings *simplejson.Json, path ...string) { } // Casts values to float when required by Elastic's query DSL -func (metricAggregation MetricAgg) generateSettingsForDSL(version *semver.Version) map[string]interface{} { +func (metricAggregation MetricAgg) generateSettingsForDSL() map[string]interface{} { switch metricAggregation.Type { case "moving_avg": setFloatPath(metricAggregation.Settings, "window") @@ -219,14 +218,8 @@ func (metricAggregation MetricAgg) generateSettingsForDSL(version *semver.Versio scriptValue, err = metricAggregation.Settings.GetPath("script", "inline").String() } - constraint, _ := semver.NewConstraint(">=5.6.0") - if err == nil { - if constraint.Check(version) { - metricAggregation.Settings.SetPath([]string{"script"}, scriptValue) - } else { - metricAggregation.Settings.SetPath([]string{"script"}, map[string]interface{}{"inline": scriptValue}) - } + metricAggregation.Settings.SetPath([]string{"script"}, scriptValue) } } diff --git a/pkg/tsdb/elasticsearch/time_series_query_test.go b/pkg/tsdb/elasticsearch/time_series_query_test.go index a1572f35c83..a5a26bbd2ed 100644 --- a/pkg/tsdb/elasticsearch/time_series_query_test.go +++ b/pkg/tsdb/elasticsearch/time_series_query_test.go @@ -5,7 +5,6 @@ import ( "testing" "time" - "github.com/Masterminds/semver" "github.com/grafana/grafana-plugin-sdk-go/backend" es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client" "github.com/grafana/grafana/pkg/tsdb/intervalv2" @@ -20,8 +19,8 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { toMs := to.UnixNano() / int64(time.Millisecond) t.Run("Test execute time series query", func(t *testing.T) { - t.Run("With defaults on es 2", func(t *testing.T) { - c := newFakeClient("2.0.0") + t.Run("With defaults", func(t *testing.T) { + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "2" }], @@ -41,23 +40,8 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { require.Equal(t, dateHistogramAgg.ExtendedBounds.Max, toMs) }) - t.Run("With defaults on es 5", func(t *testing.T) { - c := newFakeClient("5.0.0") - _, err := executeTsdbQuery(c, `{ - "timeField": "@timestamp", - "bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "2" }], - "metrics": [{"type": "count", "id": "0" }] - }`, from, to, 15*time.Second) - require.NoError(t, err) - sr := c.multisearchRequests[0].Requests[0] - require.Equal(t, sr.Query.Bool.Filters[0].(*es.RangeFilter).Key, c.timeField) - require.Equal(t, sr.Aggs[0].Key, "2") - require.Equal(t, sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg).ExtendedBounds.Min, fromMs) - require.Equal(t, sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg).ExtendedBounds.Max, toMs) - }) - t.Run("With multiple bucket aggs", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -79,7 +63,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With select field", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -99,7 +83,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With term agg and order by metric agg", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -129,7 +113,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With term agg and order by count metric agg", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -153,7 +137,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With term agg and order by percentiles agg", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -178,7 +162,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With term agg and order by extended stats agg", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -203,34 +187,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With term agg and order by term", func(t *testing.T) { - c := newFakeClient("5.0.0") - _, err := executeTsdbQuery(c, `{ - "timeField": "@timestamp", - "bucketAggs": [ - { - "type": "terms", - "field": "@host", - "id": "2", - "settings": { "size": "5", "order": "asc", "orderBy": "_term" } - }, - { "type": "date_histogram", "field": "@timestamp", "id": "3" } - ], - "metrics": [ - {"type": "count", "id": "1" }, - {"type": "avg", "field": "@value", "id": "5" } - ] - }`, from, to, 15*time.Second) - require.NoError(t, err) - sr := c.multisearchRequests[0].Requests[0] - - firstLevel := sr.Aggs[0] - require.Equal(t, firstLevel.Key, "2") - termsAgg := firstLevel.Aggregation.Aggregation.(*es.TermsAggregation) - require.Equal(t, termsAgg.Order["_term"], "asc") - }) - - t.Run("With term agg and order by term with es6.x", func(t *testing.T) { - c := newFakeClient("6.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -257,7 +214,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With metric percentiles", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -289,39 +246,8 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { require.Equal(t, percents[3], "4") }) - t.Run("With filters aggs on es 2", func(t *testing.T) { - c := newFakeClient("2.0.0") - _, err := executeTsdbQuery(c, `{ - "timeField": "@timestamp", - "bucketAggs": [ - { - "id": "2", - "type": "filters", - "settings": { - "filters": [ { "query": "@metric:cpu" }, { "query": "@metric:logins.count" } ] - } - }, - { "type": "date_histogram", "field": "@timestamp", "id": "4" } - ], - "metrics": [{"type": "count", "id": "1" }] - }`, from, to, 15*time.Second) - require.NoError(t, err) - sr := c.multisearchRequests[0].Requests[0] - - filtersAgg := sr.Aggs[0] - require.Equal(t, filtersAgg.Key, "2") - require.Equal(t, filtersAgg.Aggregation.Type, "filters") - fAgg := filtersAgg.Aggregation.Aggregation.(*es.FiltersAggregation) - require.Equal(t, fAgg.Filters["@metric:cpu"].(*es.QueryStringFilter).Query, "@metric:cpu") - require.Equal(t, fAgg.Filters["@metric:logins.count"].(*es.QueryStringFilter).Query, "@metric:logins.count") - - dateHistogramAgg := sr.Aggs[0].Aggregation.Aggs[0] - require.Equal(t, dateHistogramAgg.Key, "4") - require.Equal(t, dateHistogramAgg.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, "@timestamp") - }) - - t.Run("With filters aggs on es 5", func(t *testing.T) { - c := newFakeClient("5.0.0") + t.Run("With filters aggs", func(t *testing.T) { + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -352,7 +278,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With raw document metric", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [], @@ -365,7 +291,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With raw document metric size set", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [], @@ -378,7 +304,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With date histogram agg", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -403,7 +329,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { require.Equal(t, hAgg.MinDocCount, 2) t.Run("Should not include time_zone when timeZone is utc", func(t *testing.T) { - c := newFakeClient("7.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -426,7 +352,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("Should include time_zone when timeZone is not utc", func(t *testing.T) { - c := newFakeClient("7.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -450,7 +376,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With histogram agg", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -477,7 +403,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With geo hash grid agg", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -502,7 +428,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With moving average", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -540,7 +466,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With moving average doc count", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -572,7 +498,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With broken moving average", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -608,7 +534,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With cumulative sum", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -646,7 +572,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With cumulative sum doc count", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -678,7 +604,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With broken cumulative sum", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -714,7 +640,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With derivative", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -743,7 +669,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With derivative doc count", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -772,7 +698,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With serial_diff", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -801,7 +727,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With serial_diff doc count", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -830,7 +756,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With bucket_script", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -867,7 +793,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) { }) t.Run("With bucket_script doc count", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -907,7 +833,7 @@ func TestSettingsCasting(t *testing.T) { to := time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC) t.Run("Correctly transforms moving_average settings", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -951,7 +877,7 @@ func TestSettingsCasting(t *testing.T) { }) t.Run("Correctly transforms serial_diff settings", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -980,7 +906,7 @@ func TestSettingsCasting(t *testing.T) { t.Run("Date Histogram Settings", func(t *testing.T) { t.Run("Correctly transforms date_histogram settings", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -1015,7 +941,7 @@ func TestSettingsCasting(t *testing.T) { }) t.Run("Correctly uses already int min_doc_count", func(t *testing.T) { - c := newFakeClient("5.0.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -1051,7 +977,7 @@ func TestSettingsCasting(t *testing.T) { t.Run("interval parameter", func(t *testing.T) { t.Run("Uses fixed_interval", func(t *testing.T) { - c := newFakeClient("7.10.0") + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -1079,45 +1005,8 @@ func TestSettingsCasting(t *testing.T) { }) t.Run("Inline Script", func(t *testing.T) { - t.Run("Correctly handles scripts for ES < 5.6", func(t *testing.T) { - c := newFakeClient("5.0.0") - _, err := executeTsdbQuery(c, `{ - "timeField": "@timestamp", - "bucketAggs": [ - { "type": "date_histogram", "field": "@timestamp", "id": "2" } - ], - "metrics": [ - { - "id": "1", - "type": "avg", - "settings": { - "script": "my_script" - } - }, - { - "id": "3", - "type": "avg", - "settings": { - "script": { - "inline": "my_script" - } - } - } - ] - }`, from, to, 15*time.Second) - - assert.Nil(t, err) - sr := c.multisearchRequests[0].Requests[0] - - newFormatAggSettings := sr.Aggs[0].Aggregation.Aggs[0].Aggregation.Aggregation.(*es.MetricAggregation).Settings - oldFormatAggSettings := sr.Aggs[0].Aggregation.Aggs[1].Aggregation.Aggregation.(*es.MetricAggregation).Settings - - assert.Equal(t, map[string]interface{}{"inline": "my_script"}, newFormatAggSettings["script"]) - assert.Equal(t, map[string]interface{}{"inline": "my_script"}, oldFormatAggSettings["script"]) - }) - - t.Run("Correctly handles scripts for ES >= 5.6", func(t *testing.T) { - c := newFakeClient("5.6.0") + t.Run("Correctly handles scripts", func(t *testing.T) { + c := newFakeClient() _, err := executeTsdbQuery(c, `{ "timeField": "@timestamp", "bucketAggs": [ @@ -1156,7 +1045,6 @@ func TestSettingsCasting(t *testing.T) { } type fakeClient struct { - version *semver.Version timeField string multiSearchResponse *es.MultiSearchResponse multiSearchError error @@ -1164,10 +1052,8 @@ type fakeClient struct { multisearchRequests []*es.MultiSearchRequest } -func newFakeClient(versionString string) *fakeClient { - version, _ := semver.NewVersion(versionString) +func newFakeClient() *fakeClient { return &fakeClient{ - version: version, timeField: "@timestamp", multisearchRequests: make([]*es.MultiSearchRequest, 0), multiSearchResponse: &es.MultiSearchResponse{}, @@ -1176,10 +1062,6 @@ func newFakeClient(versionString string) *fakeClient { func (c *fakeClient) EnableDebug() {} -func (c *fakeClient) GetVersion() *semver.Version { - return c.version -} - func (c *fakeClient) GetTimeField() string { return c.timeField } @@ -1194,7 +1076,7 @@ func (c *fakeClient) ExecuteMultisearch(r *es.MultiSearchRequest) (*es.MultiSear } func (c *fakeClient) MultiSearch() *es.MultiSearchRequestBuilder { - c.builder = es.NewMultiSearchRequestBuilder(c.version) + c.builder = es.NewMultiSearchRequestBuilder() return c.builder } diff --git a/public/app/plugins/datasource/elasticsearch/components/QueryEditor/MetricAggregationsEditor/MetricEditor.test.tsx b/public/app/plugins/datasource/elasticsearch/components/QueryEditor/MetricAggregationsEditor/MetricEditor.test.tsx index df7b812ed98..4b3d407ff5b 100644 --- a/public/app/plugins/datasource/elasticsearch/components/QueryEditor/MetricAggregationsEditor/MetricEditor.test.tsx +++ b/public/app/plugins/datasource/elasticsearch/components/QueryEditor/MetricAggregationsEditor/MetricEditor.test.tsx @@ -87,7 +87,7 @@ describe('Metric Editor', () => { }); describe('Top Metrics Aggregation', () => { - const setupTopMetricsScreen = (esVersion: string, xpack: boolean) => { + const setupTopMetricsScreen = (xpack: boolean) => { const query: ElasticsearchQuery = { refId: 'A', query: '', @@ -97,6 +97,8 @@ describe('Metric Editor', () => { const getFields: ElasticDatasource['getFields'] = jest.fn(() => from([[]])); + const esVersion = '7.7.0'; + const wrapper = ({ children }: { children?: ReactNode }) => ( { }); }; - it('Should include top metrics aggregation when esVersion is 77 and X-Pack is enabled', () => { - setupTopMetricsScreen('7.7.0', true); + it('Should include top metrics aggregation when X-Pack is enabled', () => { + setupTopMetricsScreen(true); expect(screen.getByText('Top Metrics')).toBeInTheDocument(); }); - it('Should NOT include top metrics aggregation where esVersion is 77 and X-Pack is disabled', () => { - setupTopMetricsScreen('7.7.0', false); - expect(screen.queryByText('Top Metrics')).toBe(null); - }); - - it('Should NOT include top metrics aggregation when esVersion is 70 and X-Pack is enabled', () => { - setupTopMetricsScreen('7.0.0', true); + it('Should NOT include top metrics aggregation where X-Pack is disabled', () => { + setupTopMetricsScreen(false); expect(screen.queryByText('Top Metrics')).toBe(null); }); }); diff --git a/public/app/plugins/datasource/elasticsearch/components/QueryEditor/MetricAggregationsEditor/utils.ts b/public/app/plugins/datasource/elasticsearch/components/QueryEditor/MetricAggregationsEditor/utils.ts index e90b80f6c71..d79c187dc22 100644 --- a/public/app/plugins/datasource/elasticsearch/components/QueryEditor/MetricAggregationsEditor/utils.ts +++ b/public/app/plugins/datasource/elasticsearch/components/QueryEditor/MetricAggregationsEditor/utils.ts @@ -113,7 +113,7 @@ export const metricAggregationConfig: MetricsConfiguration = { label: 'Moving Average', requiresField: true, isPipelineAgg: true, - versionRange: '>=2.0.0 <8.0.0', + versionRange: '<8.0.0', supportsMissing: false, supportsMultipleBucketPaths: false, hasSettings: true, @@ -136,14 +136,12 @@ export const metricAggregationConfig: MetricsConfiguration = { supportsMissing: false, hasMeta: false, hasSettings: true, - versionRange: '>=7.0.0', defaults: {}, }, derivative: { label: 'Derivative', requiresField: true, isPipelineAgg: true, - versionRange: '>=2.0.0', supportsMissing: false, supportsMultipleBucketPaths: false, hasSettings: true, @@ -155,7 +153,6 @@ export const metricAggregationConfig: MetricsConfiguration = { label: 'Serial Difference', requiresField: true, isPipelineAgg: true, - versionRange: '>=2.0.0', supportsMissing: false, supportsMultipleBucketPaths: false, hasSettings: true, @@ -171,7 +168,6 @@ export const metricAggregationConfig: MetricsConfiguration = { label: 'Cumulative Sum', requiresField: true, isPipelineAgg: true, - versionRange: '>=2.0.0', supportsMissing: false, supportsMultipleBucketPaths: false, hasSettings: true, @@ -185,7 +181,6 @@ export const metricAggregationConfig: MetricsConfiguration = { isPipelineAgg: true, supportsMissing: false, supportsMultipleBucketPaths: true, - versionRange: '>=2.0.0', hasSettings: true, supportsInlineScript: false, hasMeta: false, @@ -250,7 +245,6 @@ export const metricAggregationConfig: MetricsConfiguration = { supportsMultipleBucketPaths: false, hasSettings: true, supportsInlineScript: false, - versionRange: '>=7.7.0', hasMeta: false, defaults: { settings: { @@ -261,7 +255,6 @@ export const metricAggregationConfig: MetricsConfiguration = { rate: { label: 'Rate', xpack: true, - versionRange: '>=7.10.0', requiresField: true, isPipelineAgg: false, supportsMissing: false, diff --git a/public/app/plugins/datasource/elasticsearch/configuration/ConfigEditor.test.tsx b/public/app/plugins/datasource/elasticsearch/configuration/ConfigEditor.test.tsx index ce6f7a4c2b5..10b4ad53b00 100644 --- a/public/app/plugins/datasource/elasticsearch/configuration/ConfigEditor.test.tsx +++ b/public/app/plugins/datasource/elasticsearch/configuration/ConfigEditor.test.tsx @@ -35,7 +35,7 @@ describe('ConfigEditor', () => { onOptionsChange={(options) => { expect(options.jsonData.esVersion).toBe('5.0.0'); expect(options.jsonData.timeField).toBe('@timestamp'); - expect(options.jsonData.maxConcurrentShardRequests).toBe(256); + expect(options.jsonData.maxConcurrentShardRequests).toBe(5); }} options={options} /> diff --git a/public/app/plugins/datasource/elasticsearch/configuration/ElasticDetails.test.tsx b/public/app/plugins/datasource/elasticsearch/configuration/ElasticDetails.test.tsx index 265f8051dc0..83f06bde4da 100644 --- a/public/app/plugins/datasource/elasticsearch/configuration/ElasticDetails.test.tsx +++ b/public/app/plugins/datasource/elasticsearch/configuration/ElasticDetails.test.tsx @@ -7,15 +7,10 @@ import { createDefaultConfigOptions } from './mocks'; describe('ElasticDetails', () => { describe('Max concurrent Shard Requests', () => { - it('should render "Max concurrent Shard Requests" if version >= 5.6.0', () => { - render( {}} value={createDefaultConfigOptions({ esVersion: '5.6.0' })} />); + it('should render "Max concurrent Shard Requests" ', () => { + render( {}} value={createDefaultConfigOptions({ esVersion: '8.2.0' })} />); expect(screen.getByLabelText('Max concurrent Shard Requests')).toBeInTheDocument(); }); - - it('should not render "Max concurrent Shard Requests" if version < 5.6.0', () => { - render( {}} value={createDefaultConfigOptions({ esVersion: '5.0.0' })} />); - expect(screen.queryByLabelText('Max concurrent Shard Requests')).not.toBeInTheDocument(); - }); }); it('should change database on interval change when not set explicitly', async () => { @@ -51,31 +46,29 @@ describe('ElasticDetails', () => { }); describe('version change', () => { - const testCases = [{ version: '7.10+', maxConcurrentShardRequests: 6, expectedMaxConcurrentShardRequests: 6 }]; + const tc = { version: '7.10+', maxConcurrentShardRequests: 6, expectedMaxConcurrentShardRequests: 6 }; - testCases.forEach((tc) => { - const onChangeMock = jest.fn(); - it(`sets maxConcurrentShardRequests=${tc.expectedMaxConcurrentShardRequests} if version=${tc.version},`, async () => { - render( - - ); + const onChangeMock = jest.fn(); + it(`sets maxConcurrentShardRequests=${tc.expectedMaxConcurrentShardRequests} if version=${tc.version},`, async () => { + render( + + ); - const selectEl = screen.getByLabelText('ElasticSearch version'); + const selectEl = screen.getByLabelText('ElasticSearch version'); - await selectEvent.select(selectEl, tc.version, { container: document.body }); + await selectEvent.select(selectEl, tc.version, { container: document.body }); - expect(onChangeMock).toHaveBeenCalledWith( - expect.objectContaining({ - jsonData: expect.objectContaining({ maxConcurrentShardRequests: tc.expectedMaxConcurrentShardRequests }), - }) - ); - }); + expect(onChangeMock).toHaveBeenCalledWith( + expect.objectContaining({ + jsonData: expect.objectContaining({ maxConcurrentShardRequests: tc.expectedMaxConcurrentShardRequests }), + }) + ); }); }); }); diff --git a/public/app/plugins/datasource/elasticsearch/configuration/ElasticDetails.tsx b/public/app/plugins/datasource/elasticsearch/configuration/ElasticDetails.tsx index 731225a7c3d..7d6fc367655 100644 --- a/public/app/plugins/datasource/elasticsearch/configuration/ElasticDetails.tsx +++ b/public/app/plugins/datasource/elasticsearch/configuration/ElasticDetails.tsx @@ -1,5 +1,5 @@ import React from 'react'; -import { gte, lt, valid } from 'semver'; +import { valid } from 'semver'; import { DataSourceSettings, SelectableValue } from '@grafana/data'; import { FieldSet, InlineField, Input, Select, InlineSwitch } from '@grafana/ui'; @@ -82,8 +82,7 @@ export const ElasticDetails = ({ value, onChange }: Props) => { options={[customOption, ...esVersions].filter(isTruthy)} onChange={(option) => { const maxConcurrentShardRequests = getMaxConcurrenShardRequestOrDefault( - value.jsonData.maxConcurrentShardRequests, - option.value! + value.jsonData.maxConcurrentShardRequests ); onChange({ ...value, @@ -99,16 +98,14 @@ export const ElasticDetails = ({ value, onChange }: Props) => { /> - {gte(value.jsonData.esVersion, '5.6.0') && ( - - - - )} + + + { /> - {gte(value.jsonData.esVersion, '6.6.0') && value.jsonData.xpack && ( + {value.jsonData.xpack && ( =5.6.0 <7.0.0')) { - queryHeader['max_concurrent_shard_requests'] = this.maxConcurrentShardRequests; - } - return JSON.stringify(queryHeader); } @@ -519,13 +508,8 @@ export class ElasticDatasource return text; } - /** - * This method checks to ensure the user is running a 5.0+ cluster. This is - * necessary bacause the query being used for the getLogRowContext relies on the - * search_after feature. - */ showContextToggle(): boolean { - return gte(this.esVersion, '5.0.0'); + return true; } getLogRowContext = async (row: LogRowModel, options?: RowContextOptions): Promise<{ data: DataFrame[] }> => { @@ -687,7 +671,7 @@ export class ElasticDatasource const esQuery = JSON.stringify(queryObj); - const searchType = queryObj.size === 0 && lt(this.esVersion, '5.0.0') ? 'count' : 'query_then_fetch'; + const searchType = 'query_then_fetch'; const header = this.getQueryHeader(searchType, options.range.from, options.range.to); payload += header + '\n'; @@ -804,15 +788,8 @@ export class ElasticDatasource if (index && index.mappings) { const mappings = index.mappings; - if (lt(this.esVersion, '7.0.0')) { - for (const typeName in mappings) { - const properties = mappings[typeName].properties; - getFieldsRecursively(properties); - } - } else { - const properties = mappings.properties; - getFieldsRecursively(properties); - } + const properties = mappings.properties; + getFieldsRecursively(properties); } } @@ -825,7 +802,7 @@ export class ElasticDatasource } getTerms(queryDef: TermsQuery, range = getDefaultTimeRange()): Observable { - const searchType = gte(this.esVersion, '5.0.0') ? 'query_then_fetch' : 'count'; + const searchType = 'query_then_fetch'; const header = this.getQueryHeader(searchType, range.from, range.to); let esQuery = JSON.stringify(this.queryBuilder.getTermsQuery(queryDef)); @@ -855,11 +832,11 @@ export class ElasticDatasource getMultiSearchUrl() { const searchParams = new URLSearchParams(); - if (gte(this.esVersion, '7.0.0') && this.maxConcurrentShardRequests) { + if (this.maxConcurrentShardRequests) { searchParams.append('max_concurrent_shard_requests', `${this.maxConcurrentShardRequests}`); } - if (gte(this.esVersion, '6.6.0') && this.xpack && this.includeFrozen) { + if (this.xpack && this.includeFrozen) { searchParams.append('ignore_throttled', 'false'); } diff --git a/public/app/plugins/datasource/elasticsearch/query_builder.ts b/public/app/plugins/datasource/elasticsearch/query_builder.ts index 3d9e9af82db..3dd0f15156b 100644 --- a/public/app/plugins/datasource/elasticsearch/query_builder.ts +++ b/public/app/plugins/datasource/elasticsearch/query_builder.ts @@ -1,5 +1,3 @@ -import { gte, lt } from 'semver'; - import { InternalTimeZones } from '@grafana/data'; import { @@ -23,11 +21,9 @@ import { convertOrderByToMetricId, getScriptValue } from './utils'; export class ElasticQueryBuilder { timeField: string; - esVersion: string; - constructor(options: { timeField: string; esVersion: string }) { + constructor(options: { timeField: string }) { this.timeField = options.timeField; - this.esVersion = options.esVersion; } getRangeFilter() { @@ -54,7 +50,7 @@ export class ElasticQueryBuilder { if (aggDef.settings.orderBy !== void 0) { queryNode.terms.order = {}; - if (aggDef.settings.orderBy === '_term' && gte(this.esVersion, '6.0.0')) { + if (aggDef.settings.orderBy === '_term') { queryNode.terms.order['_key'] = aggDef.settings.order; } else { queryNode.terms.order[aggDef.settings.orderBy] = aggDef.settings.order; @@ -153,11 +149,6 @@ export class ElasticQueryBuilder { }, ]; - // fields field not supported on ES 5.x - if (lt(this.esVersion, '5.0.0')) { - query.fields = ['*', '_source']; - } - query.script_fields = {}; return query; } @@ -415,13 +406,7 @@ export class ElasticQueryBuilder { } private buildScript(script: string) { - if (gte(this.esVersion, '5.6.0')) { - return script; - } - - return { - inline: script, - }; + return script; } private toNumber(stringValue: unknown): unknown | number { @@ -480,7 +465,7 @@ export class ElasticQueryBuilder { switch (orderBy) { case 'key': case 'term': - const keyname = gte(this.esVersion, '6.0.0') ? '_key' : '_term'; + const keyname = '_key'; query.aggs['1'].terms.order[keyname] = order; break; case 'doc_count': diff --git a/public/app/plugins/datasource/elasticsearch/specs/query_builder.test.ts b/public/app/plugins/datasource/elasticsearch/specs/query_builder.test.ts index 0c3ab8d612a..e38b3c5fef6 100644 --- a/public/app/plugins/datasource/elasticsearch/specs/query_builder.test.ts +++ b/public/app/plugins/datasource/elasticsearch/specs/query_builder.test.ts @@ -1,822 +1,793 @@ -import { gte, lt } from 'semver'; - import { ElasticQueryBuilder } from '../query_builder'; import { ElasticsearchQuery } from '../types'; describe('ElasticQueryBuilder', () => { - const builder = new ElasticQueryBuilder({ timeField: '@timestamp', esVersion: '2.0.0' }); - const builder5x = new ElasticQueryBuilder({ timeField: '@timestamp', esVersion: '5.0.0' }); - const builder56 = new ElasticQueryBuilder({ timeField: '@timestamp', esVersion: '5.6.0' }); - const builder6x = new ElasticQueryBuilder({ timeField: '@timestamp', esVersion: '6.0.0' }); - const builder7x = new ElasticQueryBuilder({ timeField: '@timestamp', esVersion: '7.0.0' }); - const builder77 = new ElasticQueryBuilder({ timeField: '@timestamp', esVersion: '7.7.0' }); - const builder8 = new ElasticQueryBuilder({ timeField: '@timestamp', esVersion: '8.0.0' }); + const builder = new ElasticQueryBuilder({ timeField: '@timestamp' }); // es2 - const allBuilders = [builder, builder5x, builder56, builder6x, builder7x, builder77, builder8]; + it('should return query with defaults', () => { + const query = builder.build({ + refId: 'A', + metrics: [{ type: 'count', id: '0' }], + timeField: '@timestamp', + bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '1' }], + }); - allBuilders.forEach((builder) => { - describe(`version ${builder.esVersion}`, () => { - it('should return query with defaults', () => { - const query = builder.build({ - refId: 'A', - metrics: [{ type: 'count', id: '0' }], - timeField: '@timestamp', - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '1' }], - }); + expect(query.query.bool.filter[0].range['@timestamp'].gte).toBe('$timeFrom'); + expect(query.aggs['1'].date_histogram.extended_bounds.min).toBe('$timeFrom'); + }); - expect(query.query.bool.filter[0].range['@timestamp'].gte).toBe('$timeFrom'); - expect(query.aggs['1'].date_histogram.extended_bounds.min).toBe('$timeFrom'); - }); + it('should clean settings from null values', () => { + const query = builder.build({ + refId: 'A', + // The following `missing: null as any` is because previous versions of the DS where + // storing null in the query model when inputting an empty string, + // which were then removed in the query builder. + // The new version doesn't store empty strings at all. This tests ensures backward compatinility. + metrics: [{ type: 'avg', id: '0', settings: { missing: null as any, script: '1' } }], + timeField: '@timestamp', + bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '1' }], + }); - it('should clean settings from null values', () => { - const query = builder.build({ - refId: 'A', - // The following `missing: null as any` is because previous versions of the DS where - // storing null in the query model when inputting an empty string, - // which were then removed in the query builder. - // The new version doesn't store empty strings at all. This tests ensures backward compatinility. - metrics: [{ type: 'avg', id: '0', settings: { missing: null as any, script: '1' } }], - timeField: '@timestamp', - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '1' }], - }); + expect(query.aggs['1'].aggs['0'].avg.missing).not.toBeDefined(); + expect(query.aggs['1'].aggs['0'].avg.script).toBeDefined(); + }); - expect(query.aggs['1'].aggs['0'].avg.missing).not.toBeDefined(); - expect(query.aggs['1'].aggs['0'].avg.script).toBeDefined(); - }); + it('with multiple bucket aggs', () => { + const query = builder.build({ + refId: 'A', + metrics: [{ type: 'count', id: '1' }], + timeField: '@timestamp', + bucketAggs: [ + { type: 'terms', field: '@host', id: '2' }, + { type: 'date_histogram', field: '@timestamp', id: '3' }, + ], + }); - it('with multiple bucket aggs', () => { - const query = builder.build({ - refId: 'A', - metrics: [{ type: 'count', id: '1' }], - timeField: '@timestamp', - bucketAggs: [ - { type: 'terms', field: '@host', id: '2' }, - { type: 'date_histogram', field: '@timestamp', id: '3' }, - ], - }); + expect(query.aggs['2'].terms.field).toBe('@host'); + expect(query.aggs['2'].aggs['3'].date_histogram.field).toBe('@timestamp'); + }); - expect(query.aggs['2'].terms.field).toBe('@host'); - expect(query.aggs['2'].aggs['3'].date_histogram.field).toBe('@timestamp'); - }); + it('with select field', () => { + const query = builder.build( + { + refId: 'A', + metrics: [{ type: 'avg', field: '@value', id: '1' }], + bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '2' }], + }, + 100 + ); - it('with select field', () => { - const query = builder.build( + const aggs = query.aggs['2'].aggs; + expect(aggs['1'].avg.field).toBe('@value'); + }); + + it('term agg and order by term', () => { + const target: ElasticsearchQuery = { + refId: 'A', + metrics: [ + { type: 'count', id: '1' }, + { type: 'avg', field: '@value', id: '5' }, + ], + bucketAggs: [ + { + type: 'terms', + field: '@host', + settings: { size: '5', order: 'asc', orderBy: '_term' }, + id: '2', + }, + { type: 'date_histogram', field: '@timestamp', id: '3' }, + ], + }; + + const query = builder.build(target, 100); + const firstLevel = query.aggs['2']; + + expect(firstLevel.terms.order._key).toBe('asc'); + }); + + it('with term agg and order by metric agg', () => { + const query = builder.build( + { + refId: 'A', + metrics: [ + { type: 'count', id: '1' }, + { type: 'avg', field: '@value', id: '5' }, + ], + bucketAggs: [ { - refId: 'A', - metrics: [{ type: 'avg', field: '@value', id: '1' }], - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '2' }], + type: 'terms', + field: '@host', + settings: { size: '5', order: 'asc', orderBy: '5' }, + id: '2', }, - 100 - ); + { type: 'date_histogram', field: '@timestamp', id: '3' }, + ], + }, + 100 + ); - const aggs = query.aggs['2'].aggs; - expect(aggs['1'].avg.field).toBe('@value'); - }); + const firstLevel = query.aggs['2']; + const secondLevel = firstLevel.aggs['3']; - it('term agg and order by term', () => { - const target: ElasticsearchQuery = { - refId: 'A', - metrics: [ - { type: 'count', id: '1' }, - { type: 'avg', field: '@value', id: '5' }, - ], - bucketAggs: [ - { - type: 'terms', - field: '@host', - settings: { size: '5', order: 'asc', orderBy: '_term' }, - id: '2', + expect(firstLevel.aggs['5'].avg.field).toBe('@value'); + expect(secondLevel.aggs['5'].avg.field).toBe('@value'); + }); + + it('with term agg and order by count agg', () => { + const query = builder.build( + { + refId: 'A', + metrics: [ + { type: 'count', id: '1' }, + { type: 'avg', field: '@value', id: '5' }, + ], + bucketAggs: [ + { + type: 'terms', + field: '@host', + settings: { size: '5', order: 'asc', orderBy: '1' }, + id: '2', + }, + { type: 'date_histogram', field: '@timestamp', id: '3' }, + ], + }, + 100 + ); + + expect(query.aggs['2'].terms.order._count).toEqual('asc'); + expect(query.aggs['2'].aggs).not.toHaveProperty('1'); + }); + + it('with term agg and order by extended_stats agg', () => { + const query = builder.build( + { + refId: 'A', + metrics: [{ type: 'extended_stats', id: '1', field: '@value', meta: { std_deviation: true } }], + bucketAggs: [ + { + type: 'terms', + field: '@host', + settings: { size: '5', order: 'asc', orderBy: '1[std_deviation]' }, + id: '2', + }, + { type: 'date_histogram', field: '@timestamp', id: '3' }, + ], + }, + 100 + ); + + const firstLevel = query.aggs['2']; + const secondLevel = firstLevel.aggs['3']; + + expect(firstLevel.aggs['1'].extended_stats.field).toBe('@value'); + expect(secondLevel.aggs['1'].extended_stats.field).toBe('@value'); + }); + + it('with term agg and order by percentiles agg', () => { + const query = builder.build( + { + refId: 'A', + metrics: [{ type: 'percentiles', id: '1', field: '@value', settings: { percents: ['95', '99'] } }], + bucketAggs: [ + { + type: 'terms', + field: '@host', + settings: { size: '5', order: 'asc', orderBy: '1[95.0]' }, + id: '2', + }, + { type: 'date_histogram', field: '@timestamp', id: '3' }, + ], + }, + 100 + ); + + const firstLevel = query.aggs['2']; + const secondLevel = firstLevel.aggs['3']; + + expect(firstLevel.aggs['1'].percentiles.field).toBe('@value'); + expect(secondLevel.aggs['1'].percentiles.field).toBe('@value'); + }); + + it('with term agg and valid min_doc_count', () => { + const query = builder.build( + { + refId: 'A', + metrics: [{ type: 'count', id: '1' }], + bucketAggs: [ + { + type: 'terms', + field: '@host', + settings: { min_doc_count: '1' }, + id: '2', + }, + { type: 'date_histogram', field: '@timestamp', id: '3' }, + ], + }, + 100 + ); + + const firstLevel = query.aggs['2']; + expect(firstLevel.terms.min_doc_count).toBe(1); + }); + + it('with term agg and variable as min_doc_count', () => { + const query = builder.build( + { + refId: 'A', + metrics: [{ type: 'count', id: '1' }], + bucketAggs: [ + { + type: 'terms', + field: '@host', + settings: { min_doc_count: '$min_doc_count' }, + id: '2', + }, + { type: 'date_histogram', field: '@timestamp', id: '3' }, + ], + }, + 100 + ); + + const firstLevel = query.aggs['2']; + expect(firstLevel.terms.min_doc_count).toBe('$min_doc_count'); + }); + + it('with metric percentiles', () => { + const percents = ['1', '2', '3', '4']; + const field = '@load_time'; + + const query = builder.build( + { + refId: 'A', + metrics: [ + { + id: '1', + type: 'percentiles', + field, + settings: { + percents, }, - { type: 'date_histogram', field: '@timestamp', id: '3' }, - ], - }; + }, + ], + bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }], + }, + 100 + ); - const query = builder.build(target, 100); - const firstLevel = query.aggs['2']; + const firstLevel = query.aggs['3']; - if (gte(builder.esVersion, '6.0.0')) { - expect(firstLevel.terms.order._key).toBe('asc'); - } else { - expect(firstLevel.terms.order._term).toBe('asc'); - } - }); + expect(firstLevel.aggs['1'].percentiles.field).toBe(field); + expect(firstLevel.aggs['1'].percentiles.percents).toEqual(percents); + }); - it('with term agg and order by metric agg', () => { - const query = builder.build( - { - refId: 'A', - metrics: [ - { type: 'count', id: '1' }, - { type: 'avg', field: '@value', id: '5' }, - ], - bucketAggs: [ - { - type: 'terms', - field: '@host', - settings: { size: '5', order: 'asc', orderBy: '5' }, - id: '2', - }, - { type: 'date_histogram', field: '@timestamp', id: '3' }, + it('with filters aggs', () => { + const query = builder.build({ + refId: 'A', + metrics: [{ type: 'count', id: '1' }], + timeField: '@timestamp', + bucketAggs: [ + { + id: '2', + type: 'filters', + settings: { + filters: [ + { query: '@metric:cpu', label: '' }, + { query: '@metric:logins.count', label: '' }, ], }, - 100 - ); + }, + { type: 'date_histogram', field: '@timestamp', id: '4' }, + ], + }); - const firstLevel = query.aggs['2']; - const secondLevel = firstLevel.aggs['3']; + expect(query.aggs['2'].filters.filters['@metric:cpu'].query_string.query).toBe('@metric:cpu'); + expect(query.aggs['2'].filters.filters['@metric:logins.count'].query_string.query).toBe('@metric:logins.count'); + expect(query.aggs['2'].aggs['4'].date_histogram.field).toBe('@timestamp'); + }); - expect(firstLevel.aggs['5'].avg.field).toBe('@value'); - expect(secondLevel.aggs['5'].avg.field).toBe('@value'); - }); + it('should return correct query for raw_document metric', () => { + const target: ElasticsearchQuery = { + refId: 'A', + metrics: [{ type: 'raw_document', id: '1', settings: {} }], + timeField: '@timestamp', + bucketAggs: [] as any[], + }; - it('with term agg and order by count agg', () => { - const query = builder.build( - { - refId: 'A', - metrics: [ - { type: 'count', id: '1' }, - { type: 'avg', field: '@value', id: '5' }, - ], - bucketAggs: [ - { - type: 'terms', - field: '@host', - settings: { size: '5', order: 'asc', orderBy: '1' }, - id: '2', - }, - { type: 'date_histogram', field: '@timestamp', id: '3' }, - ], - }, - 100 - ); - - expect(query.aggs['2'].terms.order._count).toEqual('asc'); - expect(query.aggs['2'].aggs).not.toHaveProperty('1'); - }); - - it('with term agg and order by extended_stats agg', () => { - const query = builder.build( - { - refId: 'A', - metrics: [{ type: 'extended_stats', id: '1', field: '@value', meta: { std_deviation: true } }], - bucketAggs: [ - { - type: 'terms', - field: '@host', - settings: { size: '5', order: 'asc', orderBy: '1[std_deviation]' }, - id: '2', - }, - { type: 'date_histogram', field: '@timestamp', id: '3' }, - ], - }, - 100 - ); - - const firstLevel = query.aggs['2']; - const secondLevel = firstLevel.aggs['3']; - - expect(firstLevel.aggs['1'].extended_stats.field).toBe('@value'); - expect(secondLevel.aggs['1'].extended_stats.field).toBe('@value'); - }); - - it('with term agg and order by percentiles agg', () => { - const query = builder.build( - { - refId: 'A', - metrics: [{ type: 'percentiles', id: '1', field: '@value', settings: { percents: ['95', '99'] } }], - bucketAggs: [ - { - type: 'terms', - field: '@host', - settings: { size: '5', order: 'asc', orderBy: '1[95.0]' }, - id: '2', - }, - { type: 'date_histogram', field: '@timestamp', id: '3' }, - ], - }, - 100 - ); - - const firstLevel = query.aggs['2']; - const secondLevel = firstLevel.aggs['3']; - - expect(firstLevel.aggs['1'].percentiles.field).toBe('@value'); - expect(secondLevel.aggs['1'].percentiles.field).toBe('@value'); - }); - - it('with term agg and valid min_doc_count', () => { - const query = builder.build( - { - refId: 'A', - metrics: [{ type: 'count', id: '1' }], - bucketAggs: [ - { - type: 'terms', - field: '@host', - settings: { min_doc_count: '1' }, - id: '2', - }, - { type: 'date_histogram', field: '@timestamp', id: '3' }, - ], - }, - 100 - ); - - const firstLevel = query.aggs['2']; - expect(firstLevel.terms.min_doc_count).toBe(1); - }); - - it('with term agg and variable as min_doc_count', () => { - const query = builder.build( - { - refId: 'A', - metrics: [{ type: 'count', id: '1' }], - bucketAggs: [ - { - type: 'terms', - field: '@host', - settings: { min_doc_count: '$min_doc_count' }, - id: '2', - }, - { type: 'date_histogram', field: '@timestamp', id: '3' }, - ], - }, - 100 - ); - - const firstLevel = query.aggs['2']; - expect(firstLevel.terms.min_doc_count).toBe('$min_doc_count'); - }); - - it('with metric percentiles', () => { - const percents = ['1', '2', '3', '4']; - const field = '@load_time'; - - const query = builder.build( - { - refId: 'A', - metrics: [ - { - id: '1', - type: 'percentiles', - field, - settings: { - percents, + const query = builder.build(target); + expect(query).toMatchObject({ + size: 500, + query: { + bool: { + filter: [ + { + range: { + '@timestamp': { + format: 'epoch_millis', + gte: '$timeFrom', + lte: '$timeTo', }, }, - ], - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }], + }, + ], + }, + }, + sort: [{ '@timestamp': { order: 'desc', unmapped_type: 'boolean' } }, { _doc: { order: 'desc' } }], + script_fields: {}, + }); + }); + + it('should set query size from settings when raw_documents', () => { + const query = builder.build({ + refId: 'A', + metrics: [{ type: 'raw_document', id: '1', settings: { size: '1337' } }], + timeField: '@timestamp', + bucketAggs: [], + }); + + expect(query.size).toBe(1337); + }); + + it('with moving average', () => { + const query = builder.build({ + refId: 'A', + metrics: [ + { + id: '3', + type: 'sum', + field: '@value', + }, + { + id: '2', + type: 'moving_avg', + field: '3', + }, + ], + bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }], + }); + + const firstLevel = query.aggs['3']; + + expect(firstLevel.aggs['2']).not.toBe(undefined); + expect(firstLevel.aggs['2'].moving_avg).not.toBe(undefined); + expect(firstLevel.aggs['2'].moving_avg.buckets_path).toBe('3'); + }); + + it('with moving average doc count', () => { + const query = builder.build({ + refId: 'A', + metrics: [ + { + id: '3', + type: 'count', + }, + { + id: '2', + type: 'moving_avg', + field: '3', + }, + ], + bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '4' }], + }); + + const firstLevel = query.aggs['4']; + + expect(firstLevel.aggs['2']).not.toBe(undefined); + expect(firstLevel.aggs['2'].moving_avg).not.toBe(undefined); + expect(firstLevel.aggs['2'].moving_avg.buckets_path).toBe('_count'); + }); + + it('with broken moving average', () => { + const query = builder.build({ + refId: 'A', + metrics: [ + { + id: '3', + type: 'sum', + field: '@value', + }, + { + id: '2', + type: 'moving_avg', + field: '3', + }, + { + id: '4', + type: 'moving_avg', + }, + ], + bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }], + }); + + const firstLevel = query.aggs['3']; + + expect(firstLevel.aggs['2']).not.toBe(undefined); + expect(firstLevel.aggs['2'].moving_avg).not.toBe(undefined); + expect(firstLevel.aggs['2'].moving_avg.buckets_path).toBe('3'); + expect(firstLevel.aggs['4']).toBe(undefined); + }); + + it('with top_metrics', () => { + const query = builder.build({ + refId: 'A', + metrics: [ + { + id: '2', + type: 'top_metrics', + settings: { + order: 'desc', + orderBy: '@timestamp', + metrics: ['@value'], }, - 100 - ); + }, + ], + bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }], + }); - const firstLevel = query.aggs['3']; + const firstLevel = query.aggs['3']; - expect(firstLevel.aggs['1'].percentiles.field).toBe(field); - expect(firstLevel.aggs['1'].percentiles.percents).toEqual(percents); - }); + expect(firstLevel.aggs['2']).not.toBe(undefined); + expect(firstLevel.aggs['2'].top_metrics).not.toBe(undefined); + expect(firstLevel.aggs['2'].top_metrics.metrics).not.toBe(undefined); + expect(firstLevel.aggs['2'].top_metrics.size).not.toBe(undefined); + expect(firstLevel.aggs['2'].top_metrics.sort).not.toBe(undefined); + expect(firstLevel.aggs['2'].top_metrics.metrics.length).toBe(1); + expect(firstLevel.aggs['2'].top_metrics.metrics).toEqual([{ field: '@value' }]); + expect(firstLevel.aggs['2'].top_metrics.sort).toEqual([{ '@timestamp': 'desc' }]); + expect(firstLevel.aggs['2'].top_metrics.size).toBe(1); + }); - it('with filters aggs', () => { - const query = builder.build({ - refId: 'A', - metrics: [{ type: 'count', id: '1' }], - timeField: '@timestamp', - bucketAggs: [ - { - id: '2', - type: 'filters', - settings: { - filters: [ - { query: '@metric:cpu', label: '' }, - { query: '@metric:logins.count', label: '' }, - ], - }, - }, - { type: 'date_histogram', field: '@timestamp', id: '4' }, - ], - }); + it('with derivative', () => { + const query = builder.build({ + refId: 'A', + metrics: [ + { + id: '3', + type: 'sum', + field: '@value', + }, + { + id: '2', + type: 'derivative', + field: '3', + }, + ], + bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }], + }); - expect(query.aggs['2'].filters.filters['@metric:cpu'].query_string.query).toBe('@metric:cpu'); - expect(query.aggs['2'].filters.filters['@metric:logins.count'].query_string.query).toBe('@metric:logins.count'); - expect(query.aggs['2'].aggs['4'].date_histogram.field).toBe('@timestamp'); - }); + const firstLevel = query.aggs['3']; - it('should return correct query for raw_document metric', () => { - const target: ElasticsearchQuery = { - refId: 'A', - metrics: [{ type: 'raw_document', id: '1', settings: {} }], - timeField: '@timestamp', - bucketAggs: [] as any[], - }; + expect(firstLevel.aggs['2']).not.toBe(undefined); + expect(firstLevel.aggs['2'].derivative).not.toBe(undefined); + expect(firstLevel.aggs['2'].derivative.buckets_path).toBe('3'); + }); - const query = builder.build(target); - expect(query).toMatchObject({ - size: 500, - query: { - bool: { - filter: [ - { - range: { - '@timestamp': { - format: 'epoch_millis', - gte: '$timeFrom', - lte: '$timeTo', - }, - }, - }, - ], - }, + it('with derivative doc count', () => { + const query = builder.build({ + refId: 'A', + metrics: [ + { + id: '3', + type: 'count', + }, + { + id: '2', + type: 'derivative', + field: '3', + }, + ], + bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '4' }], + }); + + const firstLevel = query.aggs['4']; + + expect(firstLevel.aggs['2']).not.toBe(undefined); + expect(firstLevel.aggs['2'].derivative).not.toBe(undefined); + expect(firstLevel.aggs['2'].derivative.buckets_path).toBe('_count'); + }); + + it('with serial_diff', () => { + const query = builder.build({ + refId: 'A', + metrics: [ + { + id: '3', + type: 'max', + field: '@value', + }, + { + id: '2', + type: 'serial_diff', + field: '3', + settings: { + lag: '5', }, - sort: [{ '@timestamp': { order: 'desc', unmapped_type: 'boolean' } }, { _doc: { order: 'desc' } }], - script_fields: {}, - }); - }); + }, + ], + bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }], + }); - it('should set query size from settings when raw_documents', () => { - const query = builder.build({ - refId: 'A', - metrics: [{ type: 'raw_document', id: '1', settings: { size: '1337' } }], - timeField: '@timestamp', - bucketAggs: [], - }); + const firstLevel = query.aggs['3']; - expect(query.size).toBe(1337); - }); + expect(firstLevel.aggs['2']).not.toBe(undefined); + expect(firstLevel.aggs['2'].serial_diff).not.toBe(undefined); + expect(firstLevel.aggs['2'].serial_diff.buckets_path).toBe('3'); + expect(firstLevel.aggs['2'].serial_diff.lag).toBe(5); + }); - it('with moving average', () => { - const query = builder.build({ - refId: 'A', - metrics: [ + it('with bucket_script', () => { + const query = builder.build({ + refId: 'A', + metrics: [ + { + id: '1', + type: 'sum', + field: '@value', + }, + { + id: '3', + type: 'max', + field: '@value', + }, + { + id: '4', + pipelineVariables: [ { - id: '3', - type: 'sum', - field: '@value', + name: 'var1', + pipelineAgg: '1', }, { - id: '2', - type: 'moving_avg', - field: '3', + name: 'var2', + pipelineAgg: '3', }, ], - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }], - }); - - const firstLevel = query.aggs['3']; - - expect(firstLevel.aggs['2']).not.toBe(undefined); - expect(firstLevel.aggs['2'].moving_avg).not.toBe(undefined); - expect(firstLevel.aggs['2'].moving_avg.buckets_path).toBe('3'); - }); - - it('with moving average doc count', () => { - const query = builder.build({ - refId: 'A', - metrics: [ - { - id: '3', - type: 'count', - }, - { - id: '2', - type: 'moving_avg', - field: '3', - }, - ], - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '4' }], - }); - - const firstLevel = query.aggs['4']; - - expect(firstLevel.aggs['2']).not.toBe(undefined); - expect(firstLevel.aggs['2'].moving_avg).not.toBe(undefined); - expect(firstLevel.aggs['2'].moving_avg.buckets_path).toBe('_count'); - }); - - it('with broken moving average', () => { - const query = builder.build({ - refId: 'A', - metrics: [ - { - id: '3', - type: 'sum', - field: '@value', - }, - { - id: '2', - type: 'moving_avg', - field: '3', - }, - { - id: '4', - type: 'moving_avg', - }, - ], - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }], - }); - - const firstLevel = query.aggs['3']; - - expect(firstLevel.aggs['2']).not.toBe(undefined); - expect(firstLevel.aggs['2'].moving_avg).not.toBe(undefined); - expect(firstLevel.aggs['2'].moving_avg.buckets_path).toBe('3'); - expect(firstLevel.aggs['4']).toBe(undefined); - }); - - it('with top_metrics', () => { - const query = builder.build({ - refId: 'A', - metrics: [ - { - id: '2', - type: 'top_metrics', - settings: { - order: 'desc', - orderBy: '@timestamp', - metrics: ['@value'], - }, - }, - ], - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }], - }); - - const firstLevel = query.aggs['3']; - - expect(firstLevel.aggs['2']).not.toBe(undefined); - expect(firstLevel.aggs['2'].top_metrics).not.toBe(undefined); - expect(firstLevel.aggs['2'].top_metrics.metrics).not.toBe(undefined); - expect(firstLevel.aggs['2'].top_metrics.size).not.toBe(undefined); - expect(firstLevel.aggs['2'].top_metrics.sort).not.toBe(undefined); - expect(firstLevel.aggs['2'].top_metrics.metrics.length).toBe(1); - expect(firstLevel.aggs['2'].top_metrics.metrics).toEqual([{ field: '@value' }]); - expect(firstLevel.aggs['2'].top_metrics.sort).toEqual([{ '@timestamp': 'desc' }]); - expect(firstLevel.aggs['2'].top_metrics.size).toBe(1); - }); - - it('with derivative', () => { - const query = builder.build({ - refId: 'A', - metrics: [ - { - id: '3', - type: 'sum', - field: '@value', - }, - { - id: '2', - type: 'derivative', - field: '3', - }, - ], - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }], - }); - - const firstLevel = query.aggs['3']; - - expect(firstLevel.aggs['2']).not.toBe(undefined); - expect(firstLevel.aggs['2'].derivative).not.toBe(undefined); - expect(firstLevel.aggs['2'].derivative.buckets_path).toBe('3'); - }); - - it('with derivative doc count', () => { - const query = builder.build({ - refId: 'A', - metrics: [ - { - id: '3', - type: 'count', - }, - { - id: '2', - type: 'derivative', - field: '3', - }, - ], - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '4' }], - }); - - const firstLevel = query.aggs['4']; - - expect(firstLevel.aggs['2']).not.toBe(undefined); - expect(firstLevel.aggs['2'].derivative).not.toBe(undefined); - expect(firstLevel.aggs['2'].derivative.buckets_path).toBe('_count'); - }); - - it('with serial_diff', () => { - const query = builder.build({ - refId: 'A', - metrics: [ - { - id: '3', - type: 'max', - field: '@value', - }, - { - id: '2', - type: 'serial_diff', - field: '3', - settings: { - lag: '5', - }, - }, - ], - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }], - }); - - const firstLevel = query.aggs['3']; - - expect(firstLevel.aggs['2']).not.toBe(undefined); - expect(firstLevel.aggs['2'].serial_diff).not.toBe(undefined); - expect(firstLevel.aggs['2'].serial_diff.buckets_path).toBe('3'); - expect(firstLevel.aggs['2'].serial_diff.lag).toBe(5); - }); - - it('with bucket_script', () => { - const query = builder.build({ - refId: 'A', - metrics: [ - { - id: '1', - type: 'sum', - field: '@value', - }, - { - id: '3', - type: 'max', - field: '@value', - }, - { - id: '4', - pipelineVariables: [ - { - name: 'var1', - pipelineAgg: '1', - }, - { - name: 'var2', - pipelineAgg: '3', - }, - ], - settings: { - script: 'params.var1 * params.var2', - }, - type: 'bucket_script', - }, - ], - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '2' }], - }); - - const firstLevel = query.aggs['2']; - - expect(firstLevel.aggs['4']).not.toBe(undefined); - expect(firstLevel.aggs['4'].bucket_script).not.toBe(undefined); - expect(firstLevel.aggs['4'].bucket_script.buckets_path).toMatchObject({ var1: '1', var2: '3' }); - }); - - it('with bucket_script doc count', () => { - const query = builder.build({ - refId: 'A', - metrics: [ - { - id: '3', - type: 'count', - }, - { - id: '4', - pipelineVariables: [ - { - name: 'var1', - pipelineAgg: '3', - }, - ], - settings: { - script: 'params.var1 * 1000', - }, - type: 'bucket_script', - }, - ], - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '2' }], - }); - - const firstLevel = query.aggs['2']; - - expect(firstLevel.aggs['4']).not.toBe(undefined); - expect(firstLevel.aggs['4'].bucket_script).not.toBe(undefined); - expect(firstLevel.aggs['4'].bucket_script.buckets_path).toMatchObject({ var1: '_count' }); - }); - - it('with histogram', () => { - const query = builder.build({ - refId: 'A', - metrics: [{ id: '1', type: 'count' }], - bucketAggs: [ - { - type: 'histogram', - field: 'bytes', - id: '3', - settings: { - interval: '10', - min_doc_count: '2', - }, - }, - ], - }); - - const firstLevel = query.aggs['3']; - expect(firstLevel.histogram.field).toBe('bytes'); - expect(firstLevel.histogram.interval).toBe('10'); - expect(firstLevel.histogram.min_doc_count).toBe('2'); - }); - - it('with adhoc filters', () => { - const query = builder.build( - { - refId: 'A', - metrics: [{ type: 'count', id: '0' }], - timeField: '@timestamp', - bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }], + settings: { + script: 'params.var1 * params.var2', }, - [ - { key: 'key1', operator: '=', value: 'value1' }, - { key: 'key2', operator: '=', value: 'value2' }, - { key: 'key2', operator: '!=', value: 'value2' }, - { key: 'key3', operator: '<', value: 'value3' }, - { key: 'key4', operator: '>', value: 'value4' }, - { key: 'key5', operator: '=~', value: 'value5' }, - { key: 'key6', operator: '!~', value: 'value6' }, - ] - ); + type: 'bucket_script', + }, + ], + bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '2' }], + }); - expect(query.query.bool.must[0].match_phrase['key1'].query).toBe('value1'); - expect(query.query.bool.must[1].match_phrase['key2'].query).toBe('value2'); - expect(query.query.bool.must_not[0].match_phrase['key2'].query).toBe('value2'); - expect(query.query.bool.filter[1].range['key3'].lt).toBe('value3'); - expect(query.query.bool.filter[2].range['key4'].gt).toBe('value4'); - expect(query.query.bool.filter[3].regexp['key5']).toBe('value5'); - expect(query.query.bool.filter[4].bool.must_not.regexp['key6']).toBe('value6'); - }); + const firstLevel = query.aggs['2']; - describe('getTermsQuery', () => { - function testGetTermsQuery(queryDef: any) { - const query = builder.getTermsQuery(queryDef); - return query.aggs['1'].terms.order; - } + expect(firstLevel.aggs['4']).not.toBe(undefined); + expect(firstLevel.aggs['4'].bucket_script).not.toBe(undefined); + expect(firstLevel.aggs['4'].bucket_script.buckets_path).toMatchObject({ var1: '1', var2: '3' }); + }); - function checkSort(order: any, expected: string) { - if (lt(builder.esVersion, '6.0.0')) { - expect(order._term).toBe(expected); - expect(order._key).toBeUndefined(); - } else { - expect(order._term).toBeUndefined(); - expect(order._key).toBe(expected); - } - } - - it('should set correct default sorting', () => { - const order = testGetTermsQuery({}); - checkSort(order, 'asc'); - expect(order._count).toBeUndefined(); - }); - - it('should set correct explicit sorting', () => { - const order = testGetTermsQuery({ order: 'desc' }); - checkSort(order, 'desc'); - expect(order._count).toBeUndefined(); - }); - - it('getTermsQuery(orderBy:doc_count) should set desc sorting on _count', () => { - const query = builder.getTermsQuery({ orderBy: 'doc_count' }); - expect(query.aggs['1'].terms.order._term).toBeUndefined(); - expect(query.aggs['1'].terms.order._key).toBeUndefined(); - expect(query.aggs['1'].terms.order._count).toBe('desc'); - }); - - it('getTermsQuery(orderBy:doc_count, order:asc) should set asc sorting on _count', () => { - const query = builder.getTermsQuery({ orderBy: 'doc_count', order: 'asc' }); - expect(query.aggs['1'].terms.order._term).toBeUndefined(); - expect(query.aggs['1'].terms.order._key).toBeUndefined(); - expect(query.aggs['1'].terms.order._count).toBe('asc'); - }); - - describe('lucene query', () => { - it('should add query_string filter when query is not empty', () => { - const luceneQuery = 'foo'; - const query = builder.getTermsQuery({ orderBy: 'doc_count', order: 'asc', query: luceneQuery }); - - expect(query.query.bool.filter).toContainEqual({ - query_string: { analyze_wildcard: true, query: luceneQuery }, - }); - }); - - it('should not add query_string filter when query is empty', () => { - const query = builder.getTermsQuery({ orderBy: 'doc_count', order: 'asc' }); - - expect( - query.query.bool.filter.find((filter: any) => Object.keys(filter).includes('query_string')) - ).toBeFalsy(); - }); - }); - }); - - describe('lucene query', () => { - it('should add query_string filter when query is not empty', () => { - const luceneQuery = 'foo'; - const query = builder.build({ refId: 'A', query: luceneQuery }); - - expect(query.query.bool.filter).toContainEqual({ - query_string: { analyze_wildcard: true, query: luceneQuery }, - }); - }); - - it('should not add query_string filter when query is empty', () => { - const query = builder.build({ refId: 'A' }); - - expect( - query.query.bool.filter.find((filter: any) => Object.keys(filter).includes('query_string')) - ).toBeFalsy(); - }); - }); - - describe('getLogsQuery', () => { - it('should return query with defaults', () => { - const query = builder.getLogsQuery({ refId: 'A' }, 500, null); - - expect(query.size).toEqual(500); - - const expectedQuery = { - bool: { - filter: [{ range: { '@timestamp': { gte: '$timeFrom', lte: '$timeTo', format: 'epoch_millis' } } }], + it('with bucket_script doc count', () => { + const query = builder.build({ + refId: 'A', + metrics: [ + { + id: '3', + type: 'count', + }, + { + id: '4', + pipelineVariables: [ + { + name: 'var1', + pipelineAgg: '3', }, - }; - expect(query.query).toEqual(expectedQuery); + ], + settings: { + script: 'params.var1 * 1000', + }, + type: 'bucket_script', + }, + ], + bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '2' }], + }); - expect(query.sort).toEqual([ - { '@timestamp': { order: 'desc', unmapped_type: 'boolean' } }, - { _doc: { order: 'desc' } }, - ]); + const firstLevel = query.aggs['2']; - const expectedAggs: any = { - // FIXME: It's pretty weak to include this '1' in the test as it's not part of what we are testing here and - // might change as a cause of unrelated changes - 1: { - aggs: {}, - date_histogram: { - extended_bounds: { max: '$timeTo', min: '$timeFrom' }, - field: '@timestamp', - format: 'epoch_millis', - fixed_interval: '$__interval', - min_doc_count: 0, - }, - }, - }; + expect(firstLevel.aggs['4']).not.toBe(undefined); + expect(firstLevel.aggs['4'].bucket_script).not.toBe(undefined); + expect(firstLevel.aggs['4'].bucket_script.buckets_path).toMatchObject({ var1: '_count' }); + }); - expect(query.aggs).toMatchObject(expectedAggs); - }); + it('with histogram', () => { + const query = builder.build({ + refId: 'A', + metrics: [{ id: '1', type: 'count' }], + bucketAggs: [ + { + type: 'histogram', + field: 'bytes', + id: '3', + settings: { + interval: '10', + min_doc_count: '2', + }, + }, + ], + }); - describe('lucene query', () => { - it('should add query_string filter when query is not empty', () => { - const luceneQuery = 'foo'; - const query = builder.getLogsQuery({ refId: 'A', query: luceneQuery }, 500, null); + const firstLevel = query.aggs['3']; + expect(firstLevel.histogram.field).toBe('bytes'); + expect(firstLevel.histogram.interval).toBe('10'); + expect(firstLevel.histogram.min_doc_count).toBe('2'); + }); - expect(query.query.bool.filter).toContainEqual({ - query_string: { analyze_wildcard: true, query: luceneQuery }, - }); - }); + it('with adhoc filters', () => { + const query = builder.build( + { + refId: 'A', + metrics: [{ type: 'count', id: '0' }], + timeField: '@timestamp', + bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }], + }, + [ + { key: 'key1', operator: '=', value: 'value1' }, + { key: 'key2', operator: '=', value: 'value2' }, + { key: 'key2', operator: '!=', value: 'value2' }, + { key: 'key3', operator: '<', value: 'value3' }, + { key: 'key4', operator: '>', value: 'value4' }, + { key: 'key5', operator: '=~', value: 'value5' }, + { key: 'key6', operator: '!~', value: 'value6' }, + ] + ); - it('should not add query_string filter when query is empty', () => { - const query = builder.getLogsQuery({ refId: 'A' }, 500, null); + expect(query.query.bool.must[0].match_phrase['key1'].query).toBe('value1'); + expect(query.query.bool.must[1].match_phrase['key2'].query).toBe('value2'); + expect(query.query.bool.must_not[0].match_phrase['key2'].query).toBe('value2'); + expect(query.query.bool.filter[1].range['key3'].lt).toBe('value3'); + expect(query.query.bool.filter[2].range['key4'].gt).toBe('value4'); + expect(query.query.bool.filter[3].regexp['key5']).toBe('value5'); + expect(query.query.bool.filter[4].bool.must_not.regexp['key6']).toBe('value6'); + }); - expect( - query.query.bool.filter.find((filter: any) => Object.keys(filter).includes('query_string')) - ).toBeFalsy(); - }); - }); + describe('getTermsQuery', () => { + function testGetTermsQuery(queryDef: any) { + const query = builder.getTermsQuery(queryDef); + return query.aggs['1'].terms.order; + } - it('with adhoc filters', () => { - // TODO: Types for AdHocFilters - const adhocFilters = [ - { key: 'key1', operator: '=', value: 'value1' }, - { key: 'key2', operator: '!=', value: 'value2' }, - { key: 'key3', operator: '<', value: 'value3' }, - { key: 'key4', operator: '>', value: 'value4' }, - { key: 'key5', operator: '=~', value: 'value5' }, - { key: 'key6', operator: '!~', value: 'value6' }, - ]; - const query = builder.getLogsQuery({ refId: 'A' }, 500, adhocFilters); + function checkSort(order: any, expected: string) { + expect(order._term).toBeUndefined(); + expect(order._key).toBe(expected); + } - expect(query.query.bool.must[0].match_phrase['key1'].query).toBe('value1'); - expect(query.query.bool.must_not[0].match_phrase['key2'].query).toBe('value2'); - expect(query.query.bool.filter[1].range['key3'].lt).toBe('value3'); - expect(query.query.bool.filter[2].range['key4'].gt).toBe('value4'); - expect(query.query.bool.filter[3].regexp['key5']).toBe('value5'); - expect(query.query.bool.filter[4].bool.must_not.regexp['key6']).toBe('value6'); + it('should set correct default sorting', () => { + const order = testGetTermsQuery({}); + checkSort(order, 'asc'); + expect(order._count).toBeUndefined(); + }); + + it('should set correct explicit sorting', () => { + const order = testGetTermsQuery({ order: 'desc' }); + checkSort(order, 'desc'); + expect(order._count).toBeUndefined(); + }); + + it('getTermsQuery(orderBy:doc_count) should set desc sorting on _count', () => { + const query = builder.getTermsQuery({ orderBy: 'doc_count' }); + expect(query.aggs['1'].terms.order._term).toBeUndefined(); + expect(query.aggs['1'].terms.order._key).toBeUndefined(); + expect(query.aggs['1'].terms.order._count).toBe('desc'); + }); + + it('getTermsQuery(orderBy:doc_count, order:asc) should set asc sorting on _count', () => { + const query = builder.getTermsQuery({ orderBy: 'doc_count', order: 'asc' }); + expect(query.aggs['1'].terms.order._term).toBeUndefined(); + expect(query.aggs['1'].terms.order._key).toBeUndefined(); + expect(query.aggs['1'].terms.order._count).toBe('asc'); + }); + + describe('lucene query', () => { + it('should add query_string filter when query is not empty', () => { + const luceneQuery = 'foo'; + const query = builder.getTermsQuery({ orderBy: 'doc_count', order: 'asc', query: luceneQuery }); + + expect(query.query.bool.filter).toContainEqual({ + query_string: { analyze_wildcard: true, query: luceneQuery }, }); }); + + it('should not add query_string filter when query is empty', () => { + const query = builder.getTermsQuery({ orderBy: 'doc_count', order: 'asc' }); + + expect(query.query.bool.filter.find((filter: any) => Object.keys(filter).includes('query_string'))).toBeFalsy(); + }); + }); + }); + + describe('lucene query', () => { + it('should add query_string filter when query is not empty', () => { + const luceneQuery = 'foo'; + const query = builder.build({ refId: 'A', query: luceneQuery }); + + expect(query.query.bool.filter).toContainEqual({ + query_string: { analyze_wildcard: true, query: luceneQuery }, + }); + }); + + it('should not add query_string filter when query is empty', () => { + const query = builder.build({ refId: 'A' }); + + expect(query.query.bool.filter.find((filter: any) => Object.keys(filter).includes('query_string'))).toBeFalsy(); + }); + }); + + describe('getLogsQuery', () => { + it('should return query with defaults', () => { + const query = builder.getLogsQuery({ refId: 'A' }, 500, null); + + expect(query.size).toEqual(500); + + const expectedQuery = { + bool: { + filter: [{ range: { '@timestamp': { gte: '$timeFrom', lte: '$timeTo', format: 'epoch_millis' } } }], + }, + }; + expect(query.query).toEqual(expectedQuery); + + expect(query.sort).toEqual([ + { '@timestamp': { order: 'desc', unmapped_type: 'boolean' } }, + { _doc: { order: 'desc' } }, + ]); + + const expectedAggs: any = { + // FIXME: It's pretty weak to include this '1' in the test as it's not part of what we are testing here and + // might change as a cause of unrelated changes + 1: { + aggs: {}, + date_histogram: { + extended_bounds: { max: '$timeTo', min: '$timeFrom' }, + field: '@timestamp', + format: 'epoch_millis', + fixed_interval: '$__interval', + min_doc_count: 0, + }, + }, + }; + + expect(query.aggs).toMatchObject(expectedAggs); + }); + + describe('lucene query', () => { + it('should add query_string filter when query is not empty', () => { + const luceneQuery = 'foo'; + const query = builder.getLogsQuery({ refId: 'A', query: luceneQuery }, 500, null); + + expect(query.query.bool.filter).toContainEqual({ + query_string: { analyze_wildcard: true, query: luceneQuery }, + }); + }); + + it('should not add query_string filter when query is empty', () => { + const query = builder.getLogsQuery({ refId: 'A' }, 500, null); + + expect(query.query.bool.filter.find((filter: any) => Object.keys(filter).includes('query_string'))).toBeFalsy(); + }); + }); + + it('with adhoc filters', () => { + // TODO: Types for AdHocFilters + const adhocFilters = [ + { key: 'key1', operator: '=', value: 'value1' }, + { key: 'key2', operator: '!=', value: 'value2' }, + { key: 'key3', operator: '<', value: 'value3' }, + { key: 'key4', operator: '>', value: 'value4' }, + { key: 'key5', operator: '=~', value: 'value5' }, + { key: 'key6', operator: '!~', value: 'value6' }, + ]; + const query = builder.getLogsQuery({ refId: 'A' }, 500, adhocFilters); + + expect(query.query.bool.must[0].match_phrase['key1'].query).toBe('value1'); + expect(query.query.bool.must_not[0].match_phrase['key2'].query).toBe('value2'); + expect(query.query.bool.filter[1].range['key3'].lt).toBe('value3'); + expect(query.query.bool.filter[2].range['key4'].gt).toBe('value4'); + expect(query.query.bool.filter[3].regexp['key5']).toBe('value5'); + expect(query.query.bool.filter[4].bool.must_not.regexp['key6']).toBe('value6'); }); }); describe('Value casting for settings', () => { it('correctly casts values in moving_avg ', () => { - const query = builder7x.build({ + const query = builder.build({ refId: 'A', metrics: [ { type: 'avg', id: '2' }, @@ -852,7 +823,7 @@ describe('ElasticQueryBuilder', () => { }); it('correctly casts values in serial_diff ', () => { - const query = builder7x.build({ + const query = builder.build({ refId: 'A', metrics: [ { type: 'avg', id: '2' }, @@ -954,28 +925,8 @@ describe('ElasticQueryBuilder', () => { expect(query.aggs['2'].date_histogram.field).toBe('@time'); }); - describe('interval parameter', () => { - it('should use fixed_interval', () => { - const query = builder77.build({ - refId: 'A', - metrics: [{ type: 'count', id: '1' }], - timeField: '@timestamp', - bucketAggs: [ - { - type: 'date_histogram', - id: '2', - field: '@time', - settings: { min_doc_count: '1', interval: '1d' }, - }, - ], - }); - - expect(query.aggs['2'].date_histogram.fixed_interval).toBe('1d'); - }); - }); - - it('should use fixed_interval if Elasticsearch version >=8.0.0', () => { - const query = builder8.build({ + it('should use fixed_interval', () => { + const query = builder.build({ refId: 'A', metrics: [{ type: 'count', id: '1' }], timeField: '@timestamp', diff --git a/public/app/plugins/datasource/elasticsearch/utils.test.ts b/public/app/plugins/datasource/elasticsearch/utils.test.ts index 22f396b3c18..d5b3ff298ff 100644 --- a/public/app/plugins/datasource/elasticsearch/utils.test.ts +++ b/public/app/plugins/datasource/elasticsearch/utils.test.ts @@ -1,4 +1,4 @@ -import { removeEmpty } from './utils'; +import { removeEmpty, coerceESVersion } from './utils'; describe('removeEmpty', () => { it('Should remove all empty', () => { @@ -33,4 +33,25 @@ describe('removeEmpty', () => { expect(removeEmpty(original)).toStrictEqual(expectedResult); }); + + it('should correctly coerce the version info', () => { + // valid string + expect(coerceESVersion('8.1.3')).toBe('8.1.3'); + + // invalid string + expect(coerceESVersion('haha')).toBe('5.0.0'); + + // known number + expect(coerceESVersion(2)).toBe('2.0.0'); + expect(coerceESVersion(5)).toBe('5.0.0'); + expect(coerceESVersion(56)).toBe('5.6.0'); + expect(coerceESVersion(60)).toBe('6.0.0'); + expect(coerceESVersion(70)).toBe('7.0.0'); + + // unknown number + expect(coerceESVersion(42)).toBe('5.0.0'); + + // undefined + expect(coerceESVersion(undefined)).toBe('5.0.0'); + }); }); diff --git a/public/app/plugins/datasource/elasticsearch/utils.ts b/public/app/plugins/datasource/elasticsearch/utils.ts index e83e936655d..dba0377475c 100644 --- a/public/app/plugins/datasource/elasticsearch/utils.ts +++ b/public/app/plugins/datasource/elasticsearch/utils.ts @@ -95,11 +95,11 @@ export const getScriptValue = (metric: MetricAggregationWithInlineScript) => (typeof metric.settings?.script === 'object' ? metric.settings?.script?.inline : metric.settings?.script) || ''; /** - * Coerces the a version string/number to a valid semver string. + * Coerces the version to a valid semver string. * It takes care of also converting from the legacy format (numeric) to the new one. * @param version */ -export const coerceESVersion = (version: string | number): string => { +export const coerceESVersion = (version: string | number | undefined): string => { if (typeof version === 'string') { return valid(version) || '5.0.0'; }