Elasticsearch: remove already-disabled code (#52932)

* elastic: removed unused code from go-part

* elastic: removed unused code from js-part
This commit is contained in:
Gábor Farkas 2022-08-22 16:25:20 +02:00 committed by GitHub
parent 9db9ebce02
commit 35253a909d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 880 additions and 1431 deletions

View File

@ -77,7 +77,7 @@ exports[`no enzyme tests`] = {
"public/app/plugins/datasource/cloudwatch/components/ConfigEditor.test.tsx:4057721851": [
[1, 19, 13, "RegExp match", "2409514259"]
],
"public/app/plugins/datasource/elasticsearch/configuration/ConfigEditor.test.tsx:3481855642": [
"public/app/plugins/datasource/elasticsearch/configuration/ConfigEditor.test.tsx:4128034878": [
[0, 26, 13, "RegExp match", "2409514259"]
],
"public/app/plugins/datasource/influxdb/components/ConfigEditor.test.tsx:57753101": [

View File

@ -48,7 +48,6 @@ var newDatasourceHttpClient = func(httpClientProvider httpclient.Provider, ds *D
// Client represents a client which can interact with elasticsearch api
type Client interface {
GetVersion() *semver.Version
GetTimeField() string
GetMinInterval(queryInterval string) (time.Duration, error)
ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearchResponse, error)
@ -74,7 +73,6 @@ var NewClient = func(ctx context.Context, httpClientProvider httpclient.Provider
ctx: ctx,
httpClientProvider: httpClientProvider,
ds: ds,
version: ds.ESVersion,
timeField: ds.TimeField,
indices: indices,
timeRange: timeRange,
@ -85,17 +83,12 @@ type baseClientImpl struct {
ctx context.Context
httpClientProvider httpclient.Provider
ds *DatasourceInfo
version *semver.Version
timeField string
indices []string
timeRange backend.TimeRange
debugEnabled bool
}
func (c *baseClientImpl) GetVersion() *semver.Version {
return c.version
}
func (c *baseClientImpl) GetTimeField() string {
return c.timeField
}
@ -281,20 +274,6 @@ func (c *baseClientImpl) createMultiSearchRequests(searchRequests []*SearchReque
interval: searchReq.Interval,
}
if c.version.Major() < 5 {
mr.header["search_type"] = "count"
} else {
allowedVersionRange, _ := semver.NewConstraint(">=5.6.0, <7.0.0")
if allowedVersionRange.Check(c.version) {
maxConcurrentShardRequests := c.ds.MaxConcurrentShardRequests
if maxConcurrentShardRequests == 0 {
maxConcurrentShardRequests = 256
}
mr.header["max_concurrent_shard_requests"] = maxConcurrentShardRequests
}
}
multiRequests = append(multiRequests, &mr)
}
@ -304,17 +283,13 @@ func (c *baseClientImpl) createMultiSearchRequests(searchRequests []*SearchReque
func (c *baseClientImpl) getMultiSearchQueryParameters() string {
var qs []string
if c.version.Major() >= 7 {
maxConcurrentShardRequests := c.ds.MaxConcurrentShardRequests
if maxConcurrentShardRequests == 0 {
maxConcurrentShardRequests = 5
}
qs = append(qs, fmt.Sprintf("max_concurrent_shard_requests=%d", maxConcurrentShardRequests))
maxConcurrentShardRequests := c.ds.MaxConcurrentShardRequests
if maxConcurrentShardRequests == 0 {
maxConcurrentShardRequests = 5
}
qs = append(qs, fmt.Sprintf("max_concurrent_shard_requests=%d", maxConcurrentShardRequests))
allowedFrozenIndicesVersionRange, _ := semver.NewConstraint(">=6.6.0")
if (allowedFrozenIndicesVersionRange.Check(c.version)) && c.ds.IncludeFrozen && c.ds.XPack {
if c.ds.IncludeFrozen && c.ds.XPack {
qs = append(qs, "ignore_throttled=false")
}
@ -322,7 +297,7 @@ func (c *baseClientImpl) getMultiSearchQueryParameters() string {
}
func (c *baseClientImpl) MultiSearch() *MultiSearchRequestBuilder {
return NewMultiSearchRequestBuilder(c.GetVersion())
return NewMultiSearchRequestBuilder()
}
func (c *baseClientImpl) EnableDebug() {

View File

@ -18,247 +18,8 @@ import (
"github.com/stretchr/testify/require"
)
func TestNewClient(t *testing.T) {
t.Run("When using legacy version numbers", func(t *testing.T) {
t.Run("When version 2 should return v2 client", func(t *testing.T) {
version, err := semver.NewVersion("2.0.0")
require.NoError(t, err)
ds := &DatasourceInfo{
ESVersion: version,
TimeField: "@timestamp",
}
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{})
require.NoError(t, err)
assert.Equal(t, "2.0.0", c.GetVersion().String())
})
t.Run("When version 5 should return v5 client", func(t *testing.T) {
version, err := semver.NewVersion("5.0.0")
require.NoError(t, err)
ds := &DatasourceInfo{
ESVersion: version,
TimeField: "@timestamp",
}
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{})
require.NoError(t, err)
assert.Equal(t, "5.0.0", c.GetVersion().String())
})
t.Run("When version 56 should return v5.6 client", func(t *testing.T) {
version, err := semver.NewVersion("5.6.0")
require.NoError(t, err)
ds := &DatasourceInfo{
ESVersion: version,
TimeField: "@timestamp",
}
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{})
require.NoError(t, err)
assert.Equal(t, "5.6.0", c.GetVersion().String())
})
t.Run("When version 60 should return v6.0 client", func(t *testing.T) {
version, err := semver.NewVersion("6.0.0")
require.NoError(t, err)
ds := &DatasourceInfo{
ESVersion: version,
TimeField: "@timestamp",
}
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{})
require.NoError(t, err)
assert.Equal(t, "6.0.0", c.GetVersion().String())
})
t.Run("When version 70 should return v7.0 client", func(t *testing.T) {
version, err := semver.NewVersion("7.0.0")
require.NoError(t, err)
ds := &DatasourceInfo{
ESVersion: version,
TimeField: "@timestamp",
}
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{})
require.NoError(t, err)
assert.Equal(t, "7.0.0", c.GetVersion().String())
})
})
t.Run("When version is a valid semver string should create a client", func(t *testing.T) {
version, err := semver.NewVersion("7.2.4")
require.NoError(t, err)
ds := &DatasourceInfo{
ESVersion: version,
TimeField: "@timestamp",
}
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{})
require.NoError(t, err)
assert.Equal(t, version.String(), c.GetVersion().String())
})
}
func TestClient_ExecuteMultisearch(t *testing.T) {
version, err := semver.NewVersion("2.0.0")
require.NoError(t, err)
httpClientScenario(t, "Given a fake http client and a v2.x client with response", &DatasourceInfo{
Database: "[metrics-]YYYY.MM.DD",
ESVersion: version,
TimeField: "@timestamp",
Interval: "Daily",
}, func(sc *scenarioContext) {
sc.responseBody = `{
"responses": [
{
"hits": { "hits": [], "max_score": 0, "total": 4656 },
"status": 200
}
]
}`
ms, err := createMultisearchForTest(t, sc.client)
require.NoError(t, err)
res, err := sc.client.ExecuteMultisearch(ms)
require.NoError(t, err)
require.NotNil(t, sc.request)
assert.Equal(t, http.MethodPost, sc.request.Method)
assert.Equal(t, "/_msearch", sc.request.URL.Path)
require.NotNil(t, sc.requestBody)
headerBytes, err := sc.requestBody.ReadBytes('\n')
require.NoError(t, err)
bodyBytes := sc.requestBody.Bytes()
jHeader, err := simplejson.NewJson(headerBytes)
require.NoError(t, err)
jBody, err := simplejson.NewJson(bodyBytes)
require.NoError(t, err)
assert.Equal(t, "metrics-2018.05.15", jHeader.Get("index").MustString())
assert.True(t, jHeader.Get("ignore_unavailable").MustBool(false))
assert.Equal(t, "count", jHeader.Get("search_type").MustString())
assert.Empty(t, jHeader.Get("max_concurrent_shard_requests"))
assert.Equal(t, "15000*@hostname", jBody.GetPath("aggs", "2", "aggs", "1", "avg", "script").MustString())
assert.Equal(t, "15s", jBody.GetPath("aggs", "2", "date_histogram", "fixed_interval").MustString())
assert.Equal(t, 200, res.Status)
require.Len(t, res.Responses, 1)
})
version, err = semver.NewVersion("5.0.0")
require.NoError(t, err)
httpClientScenario(t, "Given a fake http client and a v5.x client with response", &DatasourceInfo{
Database: "[metrics-]YYYY.MM.DD",
ESVersion: version,
TimeField: "@timestamp",
Interval: "Daily",
MaxConcurrentShardRequests: 100,
}, func(sc *scenarioContext) {
sc.responseBody = `{
"responses": [
{
"hits": { "hits": [], "max_score": 0, "total": 4656 },
"status": 200
}
]
}`
ms, err := createMultisearchForTest(t, sc.client)
require.NoError(t, err)
res, err := sc.client.ExecuteMultisearch(ms)
require.NoError(t, err)
require.NotNil(t, sc.request)
assert.Equal(t, http.MethodPost, sc.request.Method)
assert.Equal(t, "/_msearch", sc.request.URL.Path)
require.NotNil(t, sc.requestBody)
headerBytes, err := sc.requestBody.ReadBytes('\n')
require.NoError(t, err)
bodyBytes := sc.requestBody.Bytes()
jHeader, err := simplejson.NewJson(headerBytes)
require.NoError(t, err)
jBody, err := simplejson.NewJson(bodyBytes)
require.NoError(t, err)
assert.Equal(t, "metrics-2018.05.15", jHeader.Get("index").MustString())
assert.True(t, jHeader.Get("ignore_unavailable").MustBool(false))
assert.Equal(t, "query_then_fetch", jHeader.Get("search_type").MustString())
assert.Empty(t, jHeader.Get("max_concurrent_shard_requests"))
assert.Equal(t, "15000*@hostname", jBody.GetPath("aggs", "2", "aggs", "1", "avg", "script").MustString())
assert.Equal(t, "15s", jBody.GetPath("aggs", "2", "date_histogram", "fixed_interval").MustString())
assert.Equal(t, 200, res.Status)
require.Len(t, res.Responses, 1)
})
version, err = semver.NewVersion("5.6.0")
require.NoError(t, err)
httpClientScenario(t, "Given a fake http client and a v5.6 client with response", &DatasourceInfo{
Database: "[metrics-]YYYY.MM.DD",
ESVersion: version,
TimeField: "@timestamp",
Interval: "Daily",
MaxConcurrentShardRequests: 100,
IncludeFrozen: true,
XPack: true,
}, func(sc *scenarioContext) {
sc.responseBody = `{
"responses": [
{
"hits": { "hits": [], "max_score": 0, "total": 4656 },
"status": 200
}
]
}`
ms, err := createMultisearchForTest(t, sc.client)
require.NoError(t, err)
res, err := sc.client.ExecuteMultisearch(ms)
require.NoError(t, err)
require.NotNil(t, sc.request)
assert.Equal(t, http.MethodPost, sc.request.Method)
assert.Equal(t, "/_msearch", sc.request.URL.Path)
assert.NotContains(t, sc.request.URL.RawQuery, "ignore_throttled=")
require.NotNil(t, sc.requestBody)
headerBytes, err := sc.requestBody.ReadBytes('\n')
require.NoError(t, err)
bodyBytes := sc.requestBody.Bytes()
jHeader, err := simplejson.NewJson(headerBytes)
require.NoError(t, err)
jBody, err := simplejson.NewJson(bodyBytes)
require.NoError(t, err)
assert.Equal(t, "metrics-2018.05.15", jHeader.Get("index").MustString())
assert.True(t, jHeader.Get("ignore_unavailable").MustBool(false))
assert.Equal(t, "query_then_fetch", jHeader.Get("search_type").MustString())
assert.Equal(t, 100, jHeader.Get("max_concurrent_shard_requests").MustInt())
assert.Equal(t, "15000*@hostname", jBody.GetPath("aggs", "2", "aggs", "1", "avg", "script").MustString())
assert.Equal(t, "15s", jBody.GetPath("aggs", "2", "date_histogram", "fixed_interval").MustString())
assert.Equal(t, 200, res.Status)
require.Len(t, res.Responses, 1)
})
version, err = semver.NewVersion("7.0.0")
version, err := semver.NewVersion("8.0.0")
require.NoError(t, err)
httpClientScenario(t, "Given a fake http client and a v7.0 client with response", &DatasourceInfo{
Database: "[metrics-]YYYY.MM.DD",

View File

@ -3,13 +3,11 @@ package es
import (
"strings"
"github.com/Masterminds/semver"
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
)
// SearchRequestBuilder represents a builder which can build a search request
type SearchRequestBuilder struct {
version *semver.Version
interval intervalv2.Interval
index string
size int
@ -20,9 +18,8 @@ type SearchRequestBuilder struct {
}
// NewSearchRequestBuilder create a new search request builder
func NewSearchRequestBuilder(version *semver.Version, interval intervalv2.Interval) *SearchRequestBuilder {
func NewSearchRequestBuilder(interval intervalv2.Interval) *SearchRequestBuilder {
builder := &SearchRequestBuilder{
version: version,
interval: interval,
sort: make(map[string]interface{}),
customProps: make(map[string]interface{}),
@ -87,13 +84,7 @@ func (b *SearchRequestBuilder) SortDesc(field, unmappedType string) *SearchReque
// AddDocValueField adds a doc value field to the search request
func (b *SearchRequestBuilder) AddDocValueField(field string) *SearchRequestBuilder {
// fields field not supported on version >= 5
if b.version.Major() < 5 {
b.customProps["fields"] = []string{"*", "_source"}
b.customProps["fielddata_fields"] = []string{field}
} else {
b.customProps["docvalue_fields"] = []string{field}
}
b.customProps["docvalue_fields"] = []string{field}
b.customProps["script_fields"] = make(map[string]interface{})
@ -110,27 +101,24 @@ func (b *SearchRequestBuilder) Query() *QueryBuilder {
// Agg initiate and returns a new aggregation builder
func (b *SearchRequestBuilder) Agg() AggBuilder {
aggBuilder := newAggBuilder(b.version)
aggBuilder := newAggBuilder()
b.aggBuilders = append(b.aggBuilders, aggBuilder)
return aggBuilder
}
// MultiSearchRequestBuilder represents a builder which can build a multi search request
type MultiSearchRequestBuilder struct {
version *semver.Version
requestBuilders []*SearchRequestBuilder
}
// NewMultiSearchRequestBuilder creates a new multi search request builder
func NewMultiSearchRequestBuilder(version *semver.Version) *MultiSearchRequestBuilder {
return &MultiSearchRequestBuilder{
version: version,
}
func NewMultiSearchRequestBuilder() *MultiSearchRequestBuilder {
return &MultiSearchRequestBuilder{}
}
// Search initiates and returns a new search request builder
func (m *MultiSearchRequestBuilder) Search(interval intervalv2.Interval) *SearchRequestBuilder {
b := NewSearchRequestBuilder(m.version, interval)
b := NewSearchRequestBuilder(interval)
m.requestBuilders = append(m.requestBuilders, b)
return b
}
@ -273,13 +261,11 @@ type AggBuilder interface {
type aggBuilderImpl struct {
AggBuilder
aggDefs []*aggDef
version *semver.Version
}
func newAggBuilder(version *semver.Version) *aggBuilderImpl {
func newAggBuilder() *aggBuilderImpl {
return &aggBuilderImpl{
aggDefs: make([]*aggDef, 0),
version: version,
}
}
@ -317,7 +303,7 @@ func (b *aggBuilderImpl) Histogram(key, field string, fn func(a *HistogramAgg, b
})
if fn != nil {
builder := newAggBuilder(b.version)
builder := newAggBuilder()
aggDef.builders = append(aggDef.builders, builder)
fn(innerAgg, builder)
}
@ -337,7 +323,7 @@ func (b *aggBuilderImpl) DateHistogram(key, field string, fn func(a *DateHistogr
})
if fn != nil {
builder := newAggBuilder(b.version)
builder := newAggBuilder()
aggDef.builders = append(aggDef.builders, builder)
fn(innerAgg, builder)
}
@ -360,12 +346,12 @@ func (b *aggBuilderImpl) Terms(key, field string, fn func(a *TermsAggregation, b
})
if fn != nil {
builder := newAggBuilder(b.version)
builder := newAggBuilder()
aggDef.builders = append(aggDef.builders, builder)
fn(innerAgg, builder)
}
if b.version.Major() >= 6 && len(innerAgg.Order) > 0 {
if len(innerAgg.Order) > 0 {
if orderBy, exists := innerAgg.Order[termsOrderTerm]; exists {
innerAgg.Order["_key"] = orderBy
delete(innerAgg.Order, termsOrderTerm)
@ -386,7 +372,7 @@ func (b *aggBuilderImpl) Filters(key string, fn func(a *FiltersAggregation, b Ag
Aggregation: innerAgg,
})
if fn != nil {
builder := newAggBuilder(b.version)
builder := newAggBuilder()
aggDef.builders = append(aggDef.builders, builder)
fn(innerAgg, builder)
}
@ -407,7 +393,7 @@ func (b *aggBuilderImpl) GeoHashGrid(key, field string, fn func(a *GeoHashGridAg
})
if fn != nil {
builder := newAggBuilder(b.version)
builder := newAggBuilder()
aggDef.builders = append(aggDef.builders, builder)
fn(innerAgg, builder)
}

View File

@ -5,7 +5,6 @@ import (
"testing"
"time"
"github.com/Masterminds/semver"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
@ -16,8 +15,7 @@ func TestSearchRequest(t *testing.T) {
timeField := "@timestamp"
setup := func() *SearchRequestBuilder {
version5, _ := semver.NewVersion("5.0.0")
return NewSearchRequestBuilder(version5, intervalv2.Interval{Value: 15 * time.Second, Text: "15s"})
return NewSearchRequestBuilder(intervalv2.Interval{Value: 15 * time.Second, Text: "15s"})
}
t.Run("When building search request", func(t *testing.T) {
@ -398,65 +396,11 @@ func TestSearchRequest(t *testing.T) {
})
})
})
t.Run("Given new search request builder for es version 2", func(t *testing.T) {
version2, _ := semver.NewVersion("2.0.0")
b := NewSearchRequestBuilder(version2, intervalv2.Interval{Value: 15 * time.Second, Text: "15s"})
t.Run("When adding doc value field", func(t *testing.T) {
b.AddDocValueField(timeField)
t.Run("should set correct props", func(t *testing.T) {
fields, ok := b.customProps["fields"].([]string)
require.True(t, ok)
require.Equal(t, 2, len(fields))
require.Equal(t, "*", fields[0])
require.Equal(t, "_source", fields[1])
scriptFields, ok := b.customProps["script_fields"].(map[string]interface{})
require.True(t, ok)
require.Equal(t, 0, len(scriptFields))
fieldDataFields, ok := b.customProps["fielddata_fields"].([]string)
require.True(t, ok)
require.Equal(t, 1, len(fieldDataFields))
require.Equal(t, timeField, fieldDataFields[0])
})
t.Run("When building search request", func(t *testing.T) {
sr, err := b.Build()
require.Nil(t, err)
t.Run("When marshal to JSON should generate correct json", func(t *testing.T) {
body, err := json.Marshal(sr)
require.Nil(t, err)
json, err := simplejson.NewJson(body)
require.Nil(t, err)
scriptFields, err := json.Get("script_fields").Map()
require.Nil(t, err)
require.Equal(t, 0, len(scriptFields))
fields, err := json.Get("fields").StringArray()
require.Nil(t, err)
require.Equal(t, 2, len(fields))
require.Equal(t, "*", fields[0])
require.Equal(t, "_source", fields[1])
fieldDataFields, err := json.Get("fielddata_fields").StringArray()
require.Nil(t, err)
require.Equal(t, 1, len(fieldDataFields))
require.Equal(t, timeField, fieldDataFields[0])
})
})
})
})
}
func TestMultiSearchRequest(t *testing.T) {
t.Run("When adding one search request", func(t *testing.T) {
version2, _ := semver.NewVersion("2.0.0")
b := NewMultiSearchRequestBuilder(version2)
b := NewMultiSearchRequestBuilder()
b.Search(intervalv2.Interval{Value: 15 * time.Second, Text: "15s"})
t.Run("When building search request should contain one search request", func(t *testing.T) {
@ -467,8 +411,7 @@ func TestMultiSearchRequest(t *testing.T) {
})
t.Run("When adding two search requests", func(t *testing.T) {
version2, _ := semver.NewVersion("2.0.0")
b := NewMultiSearchRequestBuilder(version2)
b := NewMultiSearchRequestBuilder()
b.Search(intervalv2.Interval{Value: 15 * time.Second, Text: "15s"})
b.Search(intervalv2.Interval{Value: 15 * time.Second, Text: "15s"})

View File

@ -6,7 +6,6 @@ import (
"strconv"
"time"
"github.com/Masterminds/semver"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/components/simplejson"
es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
@ -144,7 +143,7 @@ func (e *timeSeriesQuery) processQuery(q *Query, ms *es.MultiSearchRequestBuilde
}
aggBuilder.Pipeline(m.ID, m.Type, bucketPaths, func(a *es.PipelineAggregation) {
a.Settings = m.generateSettingsForDSL(e.client.GetVersion())
a.Settings = m.generateSettingsForDSL()
})
} else {
continue
@ -165,7 +164,7 @@ func (e *timeSeriesQuery) processQuery(q *Query, ms *es.MultiSearchRequestBuilde
}
aggBuilder.Pipeline(m.ID, m.Type, bucketPath, func(a *es.PipelineAggregation) {
a.Settings = m.generateSettingsForDSL(e.client.GetVersion())
a.Settings = m.generateSettingsForDSL()
})
}
} else {
@ -174,7 +173,7 @@ func (e *timeSeriesQuery) processQuery(q *Query, ms *es.MultiSearchRequestBuilde
}
} else {
aggBuilder.Metric(m.ID, m.Type, m.Field, func(a *es.MetricAggregation) {
a.Settings = m.generateSettingsForDSL(e.client.GetVersion())
a.Settings = m.generateSettingsForDSL()
})
}
}
@ -199,7 +198,7 @@ func setIntPath(settings *simplejson.Json, path ...string) {
}
// Casts values to float when required by Elastic's query DSL
func (metricAggregation MetricAgg) generateSettingsForDSL(version *semver.Version) map[string]interface{} {
func (metricAggregation MetricAgg) generateSettingsForDSL() map[string]interface{} {
switch metricAggregation.Type {
case "moving_avg":
setFloatPath(metricAggregation.Settings, "window")
@ -219,14 +218,8 @@ func (metricAggregation MetricAgg) generateSettingsForDSL(version *semver.Versio
scriptValue, err = metricAggregation.Settings.GetPath("script", "inline").String()
}
constraint, _ := semver.NewConstraint(">=5.6.0")
if err == nil {
if constraint.Check(version) {
metricAggregation.Settings.SetPath([]string{"script"}, scriptValue)
} else {
metricAggregation.Settings.SetPath([]string{"script"}, map[string]interface{}{"inline": scriptValue})
}
metricAggregation.Settings.SetPath([]string{"script"}, scriptValue)
}
}

View File

@ -5,7 +5,6 @@ import (
"testing"
"time"
"github.com/Masterminds/semver"
"github.com/grafana/grafana-plugin-sdk-go/backend"
es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
@ -20,8 +19,8 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
toMs := to.UnixNano() / int64(time.Millisecond)
t.Run("Test execute time series query", func(t *testing.T) {
t.Run("With defaults on es 2", func(t *testing.T) {
c := newFakeClient("2.0.0")
t.Run("With defaults", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "2" }],
@ -41,23 +40,8 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.Equal(t, dateHistogramAgg.ExtendedBounds.Max, toMs)
})
t.Run("With defaults on es 5", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "2" }],
"metrics": [{"type": "count", "id": "0" }]
}`, from, to, 15*time.Second)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
require.Equal(t, sr.Query.Bool.Filters[0].(*es.RangeFilter).Key, c.timeField)
require.Equal(t, sr.Aggs[0].Key, "2")
require.Equal(t, sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg).ExtendedBounds.Min, fromMs)
require.Equal(t, sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg).ExtendedBounds.Max, toMs)
})
t.Run("With multiple bucket aggs", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -79,7 +63,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With select field", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -99,7 +83,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With term agg and order by metric agg", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -129,7 +113,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With term agg and order by count metric agg", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -153,7 +137,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With term agg and order by percentiles agg", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -178,7 +162,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With term agg and order by extended stats agg", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -203,34 +187,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With term agg and order by term", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"type": "terms",
"field": "@host",
"id": "2",
"settings": { "size": "5", "order": "asc", "orderBy": "_term" }
},
{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
],
"metrics": [
{"type": "count", "id": "1" },
{"type": "avg", "field": "@value", "id": "5" }
]
}`, from, to, 15*time.Second)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
require.Equal(t, firstLevel.Key, "2")
termsAgg := firstLevel.Aggregation.Aggregation.(*es.TermsAggregation)
require.Equal(t, termsAgg.Order["_term"], "asc")
})
t.Run("With term agg and order by term with es6.x", func(t *testing.T) {
c := newFakeClient("6.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -257,7 +214,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With metric percentiles", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -289,39 +246,8 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.Equal(t, percents[3], "4")
})
t.Run("With filters aggs on es 2", func(t *testing.T) {
c := newFakeClient("2.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
"id": "2",
"type": "filters",
"settings": {
"filters": [ { "query": "@metric:cpu" }, { "query": "@metric:logins.count" } ]
}
},
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
],
"metrics": [{"type": "count", "id": "1" }]
}`, from, to, 15*time.Second)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
filtersAgg := sr.Aggs[0]
require.Equal(t, filtersAgg.Key, "2")
require.Equal(t, filtersAgg.Aggregation.Type, "filters")
fAgg := filtersAgg.Aggregation.Aggregation.(*es.FiltersAggregation)
require.Equal(t, fAgg.Filters["@metric:cpu"].(*es.QueryStringFilter).Query, "@metric:cpu")
require.Equal(t, fAgg.Filters["@metric:logins.count"].(*es.QueryStringFilter).Query, "@metric:logins.count")
dateHistogramAgg := sr.Aggs[0].Aggregation.Aggs[0]
require.Equal(t, dateHistogramAgg.Key, "4")
require.Equal(t, dateHistogramAgg.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, "@timestamp")
})
t.Run("With filters aggs on es 5", func(t *testing.T) {
c := newFakeClient("5.0.0")
t.Run("With filters aggs", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -352,7 +278,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With raw document metric", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [],
@ -365,7 +291,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With raw document metric size set", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [],
@ -378,7 +304,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With date histogram agg", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -403,7 +329,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.Equal(t, hAgg.MinDocCount, 2)
t.Run("Should not include time_zone when timeZone is utc", func(t *testing.T) {
c := newFakeClient("7.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -426,7 +352,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("Should include time_zone when timeZone is not utc", func(t *testing.T) {
c := newFakeClient("7.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -450,7 +376,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With histogram agg", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -477,7 +403,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With geo hash grid agg", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -502,7 +428,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With moving average", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -540,7 +466,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With moving average doc count", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -572,7 +498,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With broken moving average", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -608,7 +534,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With cumulative sum", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -646,7 +572,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With cumulative sum doc count", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -678,7 +604,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With broken cumulative sum", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -714,7 +640,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With derivative", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -743,7 +669,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With derivative doc count", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -772,7 +698,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With serial_diff", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -801,7 +727,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With serial_diff doc count", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -830,7 +756,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With bucket_script", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -867,7 +793,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
})
t.Run("With bucket_script doc count", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -907,7 +833,7 @@ func TestSettingsCasting(t *testing.T) {
to := time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC)
t.Run("Correctly transforms moving_average settings", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -951,7 +877,7 @@ func TestSettingsCasting(t *testing.T) {
})
t.Run("Correctly transforms serial_diff settings", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -980,7 +906,7 @@ func TestSettingsCasting(t *testing.T) {
t.Run("Date Histogram Settings", func(t *testing.T) {
t.Run("Correctly transforms date_histogram settings", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -1015,7 +941,7 @@ func TestSettingsCasting(t *testing.T) {
})
t.Run("Correctly uses already int min_doc_count", func(t *testing.T) {
c := newFakeClient("5.0.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -1051,7 +977,7 @@ func TestSettingsCasting(t *testing.T) {
t.Run("interval parameter", func(t *testing.T) {
t.Run("Uses fixed_interval", func(t *testing.T) {
c := newFakeClient("7.10.0")
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -1079,45 +1005,8 @@ func TestSettingsCasting(t *testing.T) {
})
t.Run("Inline Script", func(t *testing.T) {
t.Run("Correctly handles scripts for ES < 5.6", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{ "type": "date_histogram", "field": "@timestamp", "id": "2" }
],
"metrics": [
{
"id": "1",
"type": "avg",
"settings": {
"script": "my_script"
}
},
{
"id": "3",
"type": "avg",
"settings": {
"script": {
"inline": "my_script"
}
}
}
]
}`, from, to, 15*time.Second)
assert.Nil(t, err)
sr := c.multisearchRequests[0].Requests[0]
newFormatAggSettings := sr.Aggs[0].Aggregation.Aggs[0].Aggregation.Aggregation.(*es.MetricAggregation).Settings
oldFormatAggSettings := sr.Aggs[0].Aggregation.Aggs[1].Aggregation.Aggregation.(*es.MetricAggregation).Settings
assert.Equal(t, map[string]interface{}{"inline": "my_script"}, newFormatAggSettings["script"])
assert.Equal(t, map[string]interface{}{"inline": "my_script"}, oldFormatAggSettings["script"])
})
t.Run("Correctly handles scripts for ES >= 5.6", func(t *testing.T) {
c := newFakeClient("5.6.0")
t.Run("Correctly handles scripts", func(t *testing.T) {
c := newFakeClient()
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
@ -1156,7 +1045,6 @@ func TestSettingsCasting(t *testing.T) {
}
type fakeClient struct {
version *semver.Version
timeField string
multiSearchResponse *es.MultiSearchResponse
multiSearchError error
@ -1164,10 +1052,8 @@ type fakeClient struct {
multisearchRequests []*es.MultiSearchRequest
}
func newFakeClient(versionString string) *fakeClient {
version, _ := semver.NewVersion(versionString)
func newFakeClient() *fakeClient {
return &fakeClient{
version: version,
timeField: "@timestamp",
multisearchRequests: make([]*es.MultiSearchRequest, 0),
multiSearchResponse: &es.MultiSearchResponse{},
@ -1176,10 +1062,6 @@ func newFakeClient(versionString string) *fakeClient {
func (c *fakeClient) EnableDebug() {}
func (c *fakeClient) GetVersion() *semver.Version {
return c.version
}
func (c *fakeClient) GetTimeField() string {
return c.timeField
}
@ -1194,7 +1076,7 @@ func (c *fakeClient) ExecuteMultisearch(r *es.MultiSearchRequest) (*es.MultiSear
}
func (c *fakeClient) MultiSearch() *es.MultiSearchRequestBuilder {
c.builder = es.NewMultiSearchRequestBuilder(c.version)
c.builder = es.NewMultiSearchRequestBuilder()
return c.builder
}

View File

@ -87,7 +87,7 @@ describe('Metric Editor', () => {
});
describe('Top Metrics Aggregation', () => {
const setupTopMetricsScreen = (esVersion: string, xpack: boolean) => {
const setupTopMetricsScreen = (xpack: boolean) => {
const query: ElasticsearchQuery = {
refId: 'A',
query: '',
@ -97,6 +97,8 @@ describe('Metric Editor', () => {
const getFields: ElasticDatasource['getFields'] = jest.fn(() => from([[]]));
const esVersion = '7.7.0';
const wrapper = ({ children }: { children?: ReactNode }) => (
<ElasticsearchProvider
datasource={{ getFields, esVersion, xpack } as ElasticDatasource}
@ -116,18 +118,13 @@ describe('Metric Editor', () => {
});
};
it('Should include top metrics aggregation when esVersion is 77 and X-Pack is enabled', () => {
setupTopMetricsScreen('7.7.0', true);
it('Should include top metrics aggregation when X-Pack is enabled', () => {
setupTopMetricsScreen(true);
expect(screen.getByText('Top Metrics')).toBeInTheDocument();
});
it('Should NOT include top metrics aggregation where esVersion is 77 and X-Pack is disabled', () => {
setupTopMetricsScreen('7.7.0', false);
expect(screen.queryByText('Top Metrics')).toBe(null);
});
it('Should NOT include top metrics aggregation when esVersion is 70 and X-Pack is enabled', () => {
setupTopMetricsScreen('7.0.0', true);
it('Should NOT include top metrics aggregation where X-Pack is disabled', () => {
setupTopMetricsScreen(false);
expect(screen.queryByText('Top Metrics')).toBe(null);
});
});

View File

@ -113,7 +113,7 @@ export const metricAggregationConfig: MetricsConfiguration = {
label: 'Moving Average',
requiresField: true,
isPipelineAgg: true,
versionRange: '>=2.0.0 <8.0.0',
versionRange: '<8.0.0',
supportsMissing: false,
supportsMultipleBucketPaths: false,
hasSettings: true,
@ -136,14 +136,12 @@ export const metricAggregationConfig: MetricsConfiguration = {
supportsMissing: false,
hasMeta: false,
hasSettings: true,
versionRange: '>=7.0.0',
defaults: {},
},
derivative: {
label: 'Derivative',
requiresField: true,
isPipelineAgg: true,
versionRange: '>=2.0.0',
supportsMissing: false,
supportsMultipleBucketPaths: false,
hasSettings: true,
@ -155,7 +153,6 @@ export const metricAggregationConfig: MetricsConfiguration = {
label: 'Serial Difference',
requiresField: true,
isPipelineAgg: true,
versionRange: '>=2.0.0',
supportsMissing: false,
supportsMultipleBucketPaths: false,
hasSettings: true,
@ -171,7 +168,6 @@ export const metricAggregationConfig: MetricsConfiguration = {
label: 'Cumulative Sum',
requiresField: true,
isPipelineAgg: true,
versionRange: '>=2.0.0',
supportsMissing: false,
supportsMultipleBucketPaths: false,
hasSettings: true,
@ -185,7 +181,6 @@ export const metricAggregationConfig: MetricsConfiguration = {
isPipelineAgg: true,
supportsMissing: false,
supportsMultipleBucketPaths: true,
versionRange: '>=2.0.0',
hasSettings: true,
supportsInlineScript: false,
hasMeta: false,
@ -250,7 +245,6 @@ export const metricAggregationConfig: MetricsConfiguration = {
supportsMultipleBucketPaths: false,
hasSettings: true,
supportsInlineScript: false,
versionRange: '>=7.7.0',
hasMeta: false,
defaults: {
settings: {
@ -261,7 +255,6 @@ export const metricAggregationConfig: MetricsConfiguration = {
rate: {
label: 'Rate',
xpack: true,
versionRange: '>=7.10.0',
requiresField: true,
isPipelineAgg: false,
supportsMissing: false,

View File

@ -35,7 +35,7 @@ describe('ConfigEditor', () => {
onOptionsChange={(options) => {
expect(options.jsonData.esVersion).toBe('5.0.0');
expect(options.jsonData.timeField).toBe('@timestamp');
expect(options.jsonData.maxConcurrentShardRequests).toBe(256);
expect(options.jsonData.maxConcurrentShardRequests).toBe(5);
}}
options={options}
/>

View File

@ -7,15 +7,10 @@ import { createDefaultConfigOptions } from './mocks';
describe('ElasticDetails', () => {
describe('Max concurrent Shard Requests', () => {
it('should render "Max concurrent Shard Requests" if version >= 5.6.0', () => {
render(<ElasticDetails onChange={() => {}} value={createDefaultConfigOptions({ esVersion: '5.6.0' })} />);
it('should render "Max concurrent Shard Requests" ', () => {
render(<ElasticDetails onChange={() => {}} value={createDefaultConfigOptions({ esVersion: '8.2.0' })} />);
expect(screen.getByLabelText('Max concurrent Shard Requests')).toBeInTheDocument();
});
it('should not render "Max concurrent Shard Requests" if version < 5.6.0', () => {
render(<ElasticDetails onChange={() => {}} value={createDefaultConfigOptions({ esVersion: '5.0.0' })} />);
expect(screen.queryByLabelText('Max concurrent Shard Requests')).not.toBeInTheDocument();
});
});
it('should change database on interval change when not set explicitly', async () => {
@ -51,31 +46,29 @@ describe('ElasticDetails', () => {
});
describe('version change', () => {
const testCases = [{ version: '7.10+', maxConcurrentShardRequests: 6, expectedMaxConcurrentShardRequests: 6 }];
const tc = { version: '7.10+', maxConcurrentShardRequests: 6, expectedMaxConcurrentShardRequests: 6 };
testCases.forEach((tc) => {
const onChangeMock = jest.fn();
it(`sets maxConcurrentShardRequests=${tc.expectedMaxConcurrentShardRequests} if version=${tc.version},`, async () => {
render(
<ElasticDetails
onChange={onChangeMock}
value={createDefaultConfigOptions({
maxConcurrentShardRequests: tc.maxConcurrentShardRequests,
esVersion: '7.0.0',
})}
/>
);
const onChangeMock = jest.fn();
it(`sets maxConcurrentShardRequests=${tc.expectedMaxConcurrentShardRequests} if version=${tc.version},`, async () => {
render(
<ElasticDetails
onChange={onChangeMock}
value={createDefaultConfigOptions({
maxConcurrentShardRequests: tc.maxConcurrentShardRequests,
esVersion: '7.0.0',
})}
/>
);
const selectEl = screen.getByLabelText('ElasticSearch version');
const selectEl = screen.getByLabelText('ElasticSearch version');
await selectEvent.select(selectEl, tc.version, { container: document.body });
await selectEvent.select(selectEl, tc.version, { container: document.body });
expect(onChangeMock).toHaveBeenCalledWith(
expect.objectContaining({
jsonData: expect.objectContaining({ maxConcurrentShardRequests: tc.expectedMaxConcurrentShardRequests }),
})
);
});
expect(onChangeMock).toHaveBeenCalledWith(
expect.objectContaining({
jsonData: expect.objectContaining({ maxConcurrentShardRequests: tc.expectedMaxConcurrentShardRequests }),
})
);
});
});
});

View File

@ -1,5 +1,5 @@
import React from 'react';
import { gte, lt, valid } from 'semver';
import { valid } from 'semver';
import { DataSourceSettings, SelectableValue } from '@grafana/data';
import { FieldSet, InlineField, Input, Select, InlineSwitch } from '@grafana/ui';
@ -82,8 +82,7 @@ export const ElasticDetails = ({ value, onChange }: Props) => {
options={[customOption, ...esVersions].filter(isTruthy)}
onChange={(option) => {
const maxConcurrentShardRequests = getMaxConcurrenShardRequestOrDefault(
value.jsonData.maxConcurrentShardRequests,
option.value!
value.jsonData.maxConcurrentShardRequests
);
onChange({
...value,
@ -99,16 +98,14 @@ export const ElasticDetails = ({ value, onChange }: Props) => {
/>
</InlineField>
{gte(value.jsonData.esVersion, '5.6.0') && (
<InlineField label="Max concurrent Shard Requests" labelWidth={26}>
<Input
id="es_config_shardRequests"
value={value.jsonData.maxConcurrentShardRequests || ''}
onChange={jsonDataChangeHandler('maxConcurrentShardRequests', value, onChange)}
width={24}
/>
</InlineField>
)}
<InlineField label="Max concurrent Shard Requests" labelWidth={26}>
<Input
id="es_config_shardRequests"
value={value.jsonData.maxConcurrentShardRequests || ''}
onChange={jsonDataChangeHandler('maxConcurrentShardRequests', value, onChange)}
width={24}
/>
</InlineField>
<InlineField
label="Min time interval"
@ -139,7 +136,7 @@ export const ElasticDetails = ({ value, onChange }: Props) => {
/>
</InlineField>
{gte(value.jsonData.esVersion, '6.6.0') && value.jsonData.xpack && (
{value.jsonData.xpack && (
<InlineField label="Include Frozen Indices" labelWidth={26}>
<InlineSwitch
id="es_config_frozenIndices"
@ -224,18 +221,14 @@ const intervalHandler =
}
};
function getMaxConcurrenShardRequestOrDefault(maxConcurrentShardRequests: number | undefined, version: string): number {
if (maxConcurrentShardRequests === 5 && lt(version, '7.0.0')) {
return 256;
}
if (maxConcurrentShardRequests === 256 && gte(version, '7.0.0')) {
function getMaxConcurrenShardRequestOrDefault(maxConcurrentShardRequests: number | undefined): number {
if (maxConcurrentShardRequests === 256) {
return 5;
}
return maxConcurrentShardRequests || defaultMaxConcurrentShardRequests(version);
return maxConcurrentShardRequests || defaultMaxConcurrentShardRequests();
}
export function defaultMaxConcurrentShardRequests(version: string) {
return gte(version, '7.0.0') ? 5 : 256;
export function defaultMaxConcurrentShardRequests() {
return 5;
}

View File

@ -18,8 +18,7 @@ export const coerceOptions = (
...options.jsonData,
timeField: options.jsonData.timeField || '@timestamp',
esVersion,
maxConcurrentShardRequests:
options.jsonData.maxConcurrentShardRequests || defaultMaxConcurrentShardRequests(esVersion),
maxConcurrentShardRequests: options.jsonData.maxConcurrentShardRequests || defaultMaxConcurrentShardRequests(),
logMessageField: options.jsonData.logMessageField || '',
logLevelField: options.jsonData.logLevelField || '',
includeFrozen: options.jsonData.includeFrozen ?? false,

View File

@ -1,7 +1,6 @@
import { cloneDeep, find, first as _first, isNumber, isObject, isString, map as _map } from 'lodash';
import { generate, lastValueFrom, Observable, of, throwError } from 'rxjs';
import { catchError, first, map, mergeMap, skipWhile, throwIfEmpty } from 'rxjs/operators';
import { gte, lt, satisfies } from 'semver';
import {
DataFrame,
@ -113,7 +112,6 @@ export class ElasticDatasource
this.maxConcurrentShardRequests = settingsData.maxConcurrentShardRequests;
this.queryBuilder = new ElasticQueryBuilder({
timeField: this.timeField,
esVersion: this.esVersion,
});
this.logMessageField = settingsData.logMessageField || '';
this.logLevelField = settingsData.logLevelField || '';
@ -297,11 +295,6 @@ export class ElasticDatasource
size: 10000,
};
// fields field not supported on ES 5.x
if (lt(this.esVersion, '5.0.0')) {
data['fields'] = [timeField, '_source'];
}
const header: any = {
search_type: 'query_then_fetch',
ignore_unavailable: true,
@ -461,10 +454,6 @@ export class ElasticDatasource
index: this.indexPattern.getIndexList(timeFrom, timeTo),
};
if (satisfies(this.esVersion, '>=5.6.0 <7.0.0')) {
queryHeader['max_concurrent_shard_requests'] = this.maxConcurrentShardRequests;
}
return JSON.stringify(queryHeader);
}
@ -519,13 +508,8 @@ export class ElasticDatasource
return text;
}
/**
* This method checks to ensure the user is running a 5.0+ cluster. This is
* necessary bacause the query being used for the getLogRowContext relies on the
* search_after feature.
*/
showContextToggle(): boolean {
return gte(this.esVersion, '5.0.0');
return true;
}
getLogRowContext = async (row: LogRowModel, options?: RowContextOptions): Promise<{ data: DataFrame[] }> => {
@ -687,7 +671,7 @@ export class ElasticDatasource
const esQuery = JSON.stringify(queryObj);
const searchType = queryObj.size === 0 && lt(this.esVersion, '5.0.0') ? 'count' : 'query_then_fetch';
const searchType = 'query_then_fetch';
const header = this.getQueryHeader(searchType, options.range.from, options.range.to);
payload += header + '\n';
@ -804,15 +788,8 @@ export class ElasticDatasource
if (index && index.mappings) {
const mappings = index.mappings;
if (lt(this.esVersion, '7.0.0')) {
for (const typeName in mappings) {
const properties = mappings[typeName].properties;
getFieldsRecursively(properties);
}
} else {
const properties = mappings.properties;
getFieldsRecursively(properties);
}
const properties = mappings.properties;
getFieldsRecursively(properties);
}
}
@ -825,7 +802,7 @@ export class ElasticDatasource
}
getTerms(queryDef: TermsQuery, range = getDefaultTimeRange()): Observable<MetricFindValue[]> {
const searchType = gte(this.esVersion, '5.0.0') ? 'query_then_fetch' : 'count';
const searchType = 'query_then_fetch';
const header = this.getQueryHeader(searchType, range.from, range.to);
let esQuery = JSON.stringify(this.queryBuilder.getTermsQuery(queryDef));
@ -855,11 +832,11 @@ export class ElasticDatasource
getMultiSearchUrl() {
const searchParams = new URLSearchParams();
if (gte(this.esVersion, '7.0.0') && this.maxConcurrentShardRequests) {
if (this.maxConcurrentShardRequests) {
searchParams.append('max_concurrent_shard_requests', `${this.maxConcurrentShardRequests}`);
}
if (gte(this.esVersion, '6.6.0') && this.xpack && this.includeFrozen) {
if (this.xpack && this.includeFrozen) {
searchParams.append('ignore_throttled', 'false');
}

View File

@ -1,5 +1,3 @@
import { gte, lt } from 'semver';
import { InternalTimeZones } from '@grafana/data';
import {
@ -23,11 +21,9 @@ import { convertOrderByToMetricId, getScriptValue } from './utils';
export class ElasticQueryBuilder {
timeField: string;
esVersion: string;
constructor(options: { timeField: string; esVersion: string }) {
constructor(options: { timeField: string }) {
this.timeField = options.timeField;
this.esVersion = options.esVersion;
}
getRangeFilter() {
@ -54,7 +50,7 @@ export class ElasticQueryBuilder {
if (aggDef.settings.orderBy !== void 0) {
queryNode.terms.order = {};
if (aggDef.settings.orderBy === '_term' && gte(this.esVersion, '6.0.0')) {
if (aggDef.settings.orderBy === '_term') {
queryNode.terms.order['_key'] = aggDef.settings.order;
} else {
queryNode.terms.order[aggDef.settings.orderBy] = aggDef.settings.order;
@ -153,11 +149,6 @@ export class ElasticQueryBuilder {
},
];
// fields field not supported on ES 5.x
if (lt(this.esVersion, '5.0.0')) {
query.fields = ['*', '_source'];
}
query.script_fields = {};
return query;
}
@ -415,13 +406,7 @@ export class ElasticQueryBuilder {
}
private buildScript(script: string) {
if (gte(this.esVersion, '5.6.0')) {
return script;
}
return {
inline: script,
};
return script;
}
private toNumber(stringValue: unknown): unknown | number {
@ -480,7 +465,7 @@ export class ElasticQueryBuilder {
switch (orderBy) {
case 'key':
case 'term':
const keyname = gte(this.esVersion, '6.0.0') ? '_key' : '_term';
const keyname = '_key';
query.aggs['1'].terms.order[keyname] = order;
break;
case 'doc_count':

View File

@ -1,4 +1,4 @@
import { removeEmpty } from './utils';
import { removeEmpty, coerceESVersion } from './utils';
describe('removeEmpty', () => {
it('Should remove all empty', () => {
@ -33,4 +33,25 @@ describe('removeEmpty', () => {
expect(removeEmpty(original)).toStrictEqual(expectedResult);
});
it('should correctly coerce the version info', () => {
// valid string
expect(coerceESVersion('8.1.3')).toBe('8.1.3');
// invalid string
expect(coerceESVersion('haha')).toBe('5.0.0');
// known number
expect(coerceESVersion(2)).toBe('2.0.0');
expect(coerceESVersion(5)).toBe('5.0.0');
expect(coerceESVersion(56)).toBe('5.6.0');
expect(coerceESVersion(60)).toBe('6.0.0');
expect(coerceESVersion(70)).toBe('7.0.0');
// unknown number
expect(coerceESVersion(42)).toBe('5.0.0');
// undefined
expect(coerceESVersion(undefined)).toBe('5.0.0');
});
});

View File

@ -95,11 +95,11 @@ export const getScriptValue = (metric: MetricAggregationWithInlineScript) =>
(typeof metric.settings?.script === 'object' ? metric.settings?.script?.inline : metric.settings?.script) || '';
/**
* Coerces the a version string/number to a valid semver string.
* Coerces the version to a valid semver string.
* It takes care of also converting from the legacy format (numeric) to the new one.
* @param version
*/
export const coerceESVersion = (version: string | number): string => {
export const coerceESVersion = (version: string | number | undefined): string => {
if (typeof version === 'string') {
return valid(version) || '5.0.0';
}