mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Elasticsearch: remove already-disabled code (#52932)
* elastic: removed unused code from go-part * elastic: removed unused code from js-part
This commit is contained in:
parent
9db9ebce02
commit
35253a909d
@ -77,7 +77,7 @@ exports[`no enzyme tests`] = {
|
|||||||
"public/app/plugins/datasource/cloudwatch/components/ConfigEditor.test.tsx:4057721851": [
|
"public/app/plugins/datasource/cloudwatch/components/ConfigEditor.test.tsx:4057721851": [
|
||||||
[1, 19, 13, "RegExp match", "2409514259"]
|
[1, 19, 13, "RegExp match", "2409514259"]
|
||||||
],
|
],
|
||||||
"public/app/plugins/datasource/elasticsearch/configuration/ConfigEditor.test.tsx:3481855642": [
|
"public/app/plugins/datasource/elasticsearch/configuration/ConfigEditor.test.tsx:4128034878": [
|
||||||
[0, 26, 13, "RegExp match", "2409514259"]
|
[0, 26, 13, "RegExp match", "2409514259"]
|
||||||
],
|
],
|
||||||
"public/app/plugins/datasource/influxdb/components/ConfigEditor.test.tsx:57753101": [
|
"public/app/plugins/datasource/influxdb/components/ConfigEditor.test.tsx:57753101": [
|
||||||
|
@ -48,7 +48,6 @@ var newDatasourceHttpClient = func(httpClientProvider httpclient.Provider, ds *D
|
|||||||
|
|
||||||
// Client represents a client which can interact with elasticsearch api
|
// Client represents a client which can interact with elasticsearch api
|
||||||
type Client interface {
|
type Client interface {
|
||||||
GetVersion() *semver.Version
|
|
||||||
GetTimeField() string
|
GetTimeField() string
|
||||||
GetMinInterval(queryInterval string) (time.Duration, error)
|
GetMinInterval(queryInterval string) (time.Duration, error)
|
||||||
ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearchResponse, error)
|
ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearchResponse, error)
|
||||||
@ -74,7 +73,6 @@ var NewClient = func(ctx context.Context, httpClientProvider httpclient.Provider
|
|||||||
ctx: ctx,
|
ctx: ctx,
|
||||||
httpClientProvider: httpClientProvider,
|
httpClientProvider: httpClientProvider,
|
||||||
ds: ds,
|
ds: ds,
|
||||||
version: ds.ESVersion,
|
|
||||||
timeField: ds.TimeField,
|
timeField: ds.TimeField,
|
||||||
indices: indices,
|
indices: indices,
|
||||||
timeRange: timeRange,
|
timeRange: timeRange,
|
||||||
@ -85,17 +83,12 @@ type baseClientImpl struct {
|
|||||||
ctx context.Context
|
ctx context.Context
|
||||||
httpClientProvider httpclient.Provider
|
httpClientProvider httpclient.Provider
|
||||||
ds *DatasourceInfo
|
ds *DatasourceInfo
|
||||||
version *semver.Version
|
|
||||||
timeField string
|
timeField string
|
||||||
indices []string
|
indices []string
|
||||||
timeRange backend.TimeRange
|
timeRange backend.TimeRange
|
||||||
debugEnabled bool
|
debugEnabled bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *baseClientImpl) GetVersion() *semver.Version {
|
|
||||||
return c.version
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *baseClientImpl) GetTimeField() string {
|
func (c *baseClientImpl) GetTimeField() string {
|
||||||
return c.timeField
|
return c.timeField
|
||||||
}
|
}
|
||||||
@ -281,20 +274,6 @@ func (c *baseClientImpl) createMultiSearchRequests(searchRequests []*SearchReque
|
|||||||
interval: searchReq.Interval,
|
interval: searchReq.Interval,
|
||||||
}
|
}
|
||||||
|
|
||||||
if c.version.Major() < 5 {
|
|
||||||
mr.header["search_type"] = "count"
|
|
||||||
} else {
|
|
||||||
allowedVersionRange, _ := semver.NewConstraint(">=5.6.0, <7.0.0")
|
|
||||||
|
|
||||||
if allowedVersionRange.Check(c.version) {
|
|
||||||
maxConcurrentShardRequests := c.ds.MaxConcurrentShardRequests
|
|
||||||
if maxConcurrentShardRequests == 0 {
|
|
||||||
maxConcurrentShardRequests = 256
|
|
||||||
}
|
|
||||||
mr.header["max_concurrent_shard_requests"] = maxConcurrentShardRequests
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
multiRequests = append(multiRequests, &mr)
|
multiRequests = append(multiRequests, &mr)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -304,17 +283,13 @@ func (c *baseClientImpl) createMultiSearchRequests(searchRequests []*SearchReque
|
|||||||
func (c *baseClientImpl) getMultiSearchQueryParameters() string {
|
func (c *baseClientImpl) getMultiSearchQueryParameters() string {
|
||||||
var qs []string
|
var qs []string
|
||||||
|
|
||||||
if c.version.Major() >= 7 {
|
|
||||||
maxConcurrentShardRequests := c.ds.MaxConcurrentShardRequests
|
maxConcurrentShardRequests := c.ds.MaxConcurrentShardRequests
|
||||||
if maxConcurrentShardRequests == 0 {
|
if maxConcurrentShardRequests == 0 {
|
||||||
maxConcurrentShardRequests = 5
|
maxConcurrentShardRequests = 5
|
||||||
}
|
}
|
||||||
qs = append(qs, fmt.Sprintf("max_concurrent_shard_requests=%d", maxConcurrentShardRequests))
|
qs = append(qs, fmt.Sprintf("max_concurrent_shard_requests=%d", maxConcurrentShardRequests))
|
||||||
}
|
|
||||||
|
|
||||||
allowedFrozenIndicesVersionRange, _ := semver.NewConstraint(">=6.6.0")
|
if c.ds.IncludeFrozen && c.ds.XPack {
|
||||||
|
|
||||||
if (allowedFrozenIndicesVersionRange.Check(c.version)) && c.ds.IncludeFrozen && c.ds.XPack {
|
|
||||||
qs = append(qs, "ignore_throttled=false")
|
qs = append(qs, "ignore_throttled=false")
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -322,7 +297,7 @@ func (c *baseClientImpl) getMultiSearchQueryParameters() string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *baseClientImpl) MultiSearch() *MultiSearchRequestBuilder {
|
func (c *baseClientImpl) MultiSearch() *MultiSearchRequestBuilder {
|
||||||
return NewMultiSearchRequestBuilder(c.GetVersion())
|
return NewMultiSearchRequestBuilder()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *baseClientImpl) EnableDebug() {
|
func (c *baseClientImpl) EnableDebug() {
|
||||||
|
@ -18,247 +18,8 @@ import (
|
|||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestNewClient(t *testing.T) {
|
|
||||||
t.Run("When using legacy version numbers", func(t *testing.T) {
|
|
||||||
t.Run("When version 2 should return v2 client", func(t *testing.T) {
|
|
||||||
version, err := semver.NewVersion("2.0.0")
|
|
||||||
require.NoError(t, err)
|
|
||||||
ds := &DatasourceInfo{
|
|
||||||
ESVersion: version,
|
|
||||||
TimeField: "@timestamp",
|
|
||||||
}
|
|
||||||
|
|
||||||
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{})
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, "2.0.0", c.GetVersion().String())
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("When version 5 should return v5 client", func(t *testing.T) {
|
|
||||||
version, err := semver.NewVersion("5.0.0")
|
|
||||||
require.NoError(t, err)
|
|
||||||
ds := &DatasourceInfo{
|
|
||||||
ESVersion: version,
|
|
||||||
TimeField: "@timestamp",
|
|
||||||
}
|
|
||||||
|
|
||||||
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{})
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, "5.0.0", c.GetVersion().String())
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("When version 56 should return v5.6 client", func(t *testing.T) {
|
|
||||||
version, err := semver.NewVersion("5.6.0")
|
|
||||||
require.NoError(t, err)
|
|
||||||
ds := &DatasourceInfo{
|
|
||||||
ESVersion: version,
|
|
||||||
TimeField: "@timestamp",
|
|
||||||
}
|
|
||||||
|
|
||||||
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{})
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, "5.6.0", c.GetVersion().String())
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("When version 60 should return v6.0 client", func(t *testing.T) {
|
|
||||||
version, err := semver.NewVersion("6.0.0")
|
|
||||||
require.NoError(t, err)
|
|
||||||
ds := &DatasourceInfo{
|
|
||||||
ESVersion: version,
|
|
||||||
TimeField: "@timestamp",
|
|
||||||
}
|
|
||||||
|
|
||||||
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{})
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, "6.0.0", c.GetVersion().String())
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("When version 70 should return v7.0 client", func(t *testing.T) {
|
|
||||||
version, err := semver.NewVersion("7.0.0")
|
|
||||||
require.NoError(t, err)
|
|
||||||
ds := &DatasourceInfo{
|
|
||||||
ESVersion: version,
|
|
||||||
TimeField: "@timestamp",
|
|
||||||
}
|
|
||||||
|
|
||||||
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{})
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, "7.0.0", c.GetVersion().String())
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("When version is a valid semver string should create a client", func(t *testing.T) {
|
|
||||||
version, err := semver.NewVersion("7.2.4")
|
|
||||||
require.NoError(t, err)
|
|
||||||
ds := &DatasourceInfo{
|
|
||||||
ESVersion: version,
|
|
||||||
TimeField: "@timestamp",
|
|
||||||
}
|
|
||||||
|
|
||||||
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{})
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, version.String(), c.GetVersion().String())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestClient_ExecuteMultisearch(t *testing.T) {
|
func TestClient_ExecuteMultisearch(t *testing.T) {
|
||||||
version, err := semver.NewVersion("2.0.0")
|
version, err := semver.NewVersion("8.0.0")
|
||||||
require.NoError(t, err)
|
|
||||||
httpClientScenario(t, "Given a fake http client and a v2.x client with response", &DatasourceInfo{
|
|
||||||
Database: "[metrics-]YYYY.MM.DD",
|
|
||||||
ESVersion: version,
|
|
||||||
TimeField: "@timestamp",
|
|
||||||
Interval: "Daily",
|
|
||||||
}, func(sc *scenarioContext) {
|
|
||||||
sc.responseBody = `{
|
|
||||||
"responses": [
|
|
||||||
{
|
|
||||||
"hits": { "hits": [], "max_score": 0, "total": 4656 },
|
|
||||||
"status": 200
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}`
|
|
||||||
|
|
||||||
ms, err := createMultisearchForTest(t, sc.client)
|
|
||||||
require.NoError(t, err)
|
|
||||||
res, err := sc.client.ExecuteMultisearch(ms)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
require.NotNil(t, sc.request)
|
|
||||||
assert.Equal(t, http.MethodPost, sc.request.Method)
|
|
||||||
assert.Equal(t, "/_msearch", sc.request.URL.Path)
|
|
||||||
|
|
||||||
require.NotNil(t, sc.requestBody)
|
|
||||||
headerBytes, err := sc.requestBody.ReadBytes('\n')
|
|
||||||
require.NoError(t, err)
|
|
||||||
bodyBytes := sc.requestBody.Bytes()
|
|
||||||
|
|
||||||
jHeader, err := simplejson.NewJson(headerBytes)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
jBody, err := simplejson.NewJson(bodyBytes)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, "metrics-2018.05.15", jHeader.Get("index").MustString())
|
|
||||||
assert.True(t, jHeader.Get("ignore_unavailable").MustBool(false))
|
|
||||||
assert.Equal(t, "count", jHeader.Get("search_type").MustString())
|
|
||||||
assert.Empty(t, jHeader.Get("max_concurrent_shard_requests"))
|
|
||||||
|
|
||||||
assert.Equal(t, "15000*@hostname", jBody.GetPath("aggs", "2", "aggs", "1", "avg", "script").MustString())
|
|
||||||
|
|
||||||
assert.Equal(t, "15s", jBody.GetPath("aggs", "2", "date_histogram", "fixed_interval").MustString())
|
|
||||||
|
|
||||||
assert.Equal(t, 200, res.Status)
|
|
||||||
require.Len(t, res.Responses, 1)
|
|
||||||
})
|
|
||||||
|
|
||||||
version, err = semver.NewVersion("5.0.0")
|
|
||||||
require.NoError(t, err)
|
|
||||||
httpClientScenario(t, "Given a fake http client and a v5.x client with response", &DatasourceInfo{
|
|
||||||
Database: "[metrics-]YYYY.MM.DD",
|
|
||||||
ESVersion: version,
|
|
||||||
TimeField: "@timestamp",
|
|
||||||
Interval: "Daily",
|
|
||||||
MaxConcurrentShardRequests: 100,
|
|
||||||
}, func(sc *scenarioContext) {
|
|
||||||
sc.responseBody = `{
|
|
||||||
"responses": [
|
|
||||||
{
|
|
||||||
"hits": { "hits": [], "max_score": 0, "total": 4656 },
|
|
||||||
"status": 200
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}`
|
|
||||||
|
|
||||||
ms, err := createMultisearchForTest(t, sc.client)
|
|
||||||
require.NoError(t, err)
|
|
||||||
res, err := sc.client.ExecuteMultisearch(ms)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
require.NotNil(t, sc.request)
|
|
||||||
assert.Equal(t, http.MethodPost, sc.request.Method)
|
|
||||||
assert.Equal(t, "/_msearch", sc.request.URL.Path)
|
|
||||||
|
|
||||||
require.NotNil(t, sc.requestBody)
|
|
||||||
|
|
||||||
headerBytes, err := sc.requestBody.ReadBytes('\n')
|
|
||||||
require.NoError(t, err)
|
|
||||||
bodyBytes := sc.requestBody.Bytes()
|
|
||||||
|
|
||||||
jHeader, err := simplejson.NewJson(headerBytes)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
jBody, err := simplejson.NewJson(bodyBytes)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, "metrics-2018.05.15", jHeader.Get("index").MustString())
|
|
||||||
assert.True(t, jHeader.Get("ignore_unavailable").MustBool(false))
|
|
||||||
assert.Equal(t, "query_then_fetch", jHeader.Get("search_type").MustString())
|
|
||||||
assert.Empty(t, jHeader.Get("max_concurrent_shard_requests"))
|
|
||||||
|
|
||||||
assert.Equal(t, "15000*@hostname", jBody.GetPath("aggs", "2", "aggs", "1", "avg", "script").MustString())
|
|
||||||
|
|
||||||
assert.Equal(t, "15s", jBody.GetPath("aggs", "2", "date_histogram", "fixed_interval").MustString())
|
|
||||||
|
|
||||||
assert.Equal(t, 200, res.Status)
|
|
||||||
require.Len(t, res.Responses, 1)
|
|
||||||
})
|
|
||||||
|
|
||||||
version, err = semver.NewVersion("5.6.0")
|
|
||||||
require.NoError(t, err)
|
|
||||||
httpClientScenario(t, "Given a fake http client and a v5.6 client with response", &DatasourceInfo{
|
|
||||||
Database: "[metrics-]YYYY.MM.DD",
|
|
||||||
ESVersion: version,
|
|
||||||
TimeField: "@timestamp",
|
|
||||||
Interval: "Daily",
|
|
||||||
MaxConcurrentShardRequests: 100,
|
|
||||||
IncludeFrozen: true,
|
|
||||||
XPack: true,
|
|
||||||
}, func(sc *scenarioContext) {
|
|
||||||
sc.responseBody = `{
|
|
||||||
"responses": [
|
|
||||||
{
|
|
||||||
"hits": { "hits": [], "max_score": 0, "total": 4656 },
|
|
||||||
"status": 200
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}`
|
|
||||||
|
|
||||||
ms, err := createMultisearchForTest(t, sc.client)
|
|
||||||
require.NoError(t, err)
|
|
||||||
res, err := sc.client.ExecuteMultisearch(ms)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
require.NotNil(t, sc.request)
|
|
||||||
assert.Equal(t, http.MethodPost, sc.request.Method)
|
|
||||||
assert.Equal(t, "/_msearch", sc.request.URL.Path)
|
|
||||||
assert.NotContains(t, sc.request.URL.RawQuery, "ignore_throttled=")
|
|
||||||
|
|
||||||
require.NotNil(t, sc.requestBody)
|
|
||||||
|
|
||||||
headerBytes, err := sc.requestBody.ReadBytes('\n')
|
|
||||||
require.NoError(t, err)
|
|
||||||
bodyBytes := sc.requestBody.Bytes()
|
|
||||||
|
|
||||||
jHeader, err := simplejson.NewJson(headerBytes)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
jBody, err := simplejson.NewJson(bodyBytes)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, "metrics-2018.05.15", jHeader.Get("index").MustString())
|
|
||||||
assert.True(t, jHeader.Get("ignore_unavailable").MustBool(false))
|
|
||||||
assert.Equal(t, "query_then_fetch", jHeader.Get("search_type").MustString())
|
|
||||||
assert.Equal(t, 100, jHeader.Get("max_concurrent_shard_requests").MustInt())
|
|
||||||
|
|
||||||
assert.Equal(t, "15000*@hostname", jBody.GetPath("aggs", "2", "aggs", "1", "avg", "script").MustString())
|
|
||||||
|
|
||||||
assert.Equal(t, "15s", jBody.GetPath("aggs", "2", "date_histogram", "fixed_interval").MustString())
|
|
||||||
|
|
||||||
assert.Equal(t, 200, res.Status)
|
|
||||||
require.Len(t, res.Responses, 1)
|
|
||||||
})
|
|
||||||
|
|
||||||
version, err = semver.NewVersion("7.0.0")
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
httpClientScenario(t, "Given a fake http client and a v7.0 client with response", &DatasourceInfo{
|
httpClientScenario(t, "Given a fake http client and a v7.0 client with response", &DatasourceInfo{
|
||||||
Database: "[metrics-]YYYY.MM.DD",
|
Database: "[metrics-]YYYY.MM.DD",
|
||||||
|
@ -3,13 +3,11 @@ package es
|
|||||||
import (
|
import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/Masterminds/semver"
|
|
||||||
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
|
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
|
||||||
)
|
)
|
||||||
|
|
||||||
// SearchRequestBuilder represents a builder which can build a search request
|
// SearchRequestBuilder represents a builder which can build a search request
|
||||||
type SearchRequestBuilder struct {
|
type SearchRequestBuilder struct {
|
||||||
version *semver.Version
|
|
||||||
interval intervalv2.Interval
|
interval intervalv2.Interval
|
||||||
index string
|
index string
|
||||||
size int
|
size int
|
||||||
@ -20,9 +18,8 @@ type SearchRequestBuilder struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// NewSearchRequestBuilder create a new search request builder
|
// NewSearchRequestBuilder create a new search request builder
|
||||||
func NewSearchRequestBuilder(version *semver.Version, interval intervalv2.Interval) *SearchRequestBuilder {
|
func NewSearchRequestBuilder(interval intervalv2.Interval) *SearchRequestBuilder {
|
||||||
builder := &SearchRequestBuilder{
|
builder := &SearchRequestBuilder{
|
||||||
version: version,
|
|
||||||
interval: interval,
|
interval: interval,
|
||||||
sort: make(map[string]interface{}),
|
sort: make(map[string]interface{}),
|
||||||
customProps: make(map[string]interface{}),
|
customProps: make(map[string]interface{}),
|
||||||
@ -87,13 +84,7 @@ func (b *SearchRequestBuilder) SortDesc(field, unmappedType string) *SearchReque
|
|||||||
|
|
||||||
// AddDocValueField adds a doc value field to the search request
|
// AddDocValueField adds a doc value field to the search request
|
||||||
func (b *SearchRequestBuilder) AddDocValueField(field string) *SearchRequestBuilder {
|
func (b *SearchRequestBuilder) AddDocValueField(field string) *SearchRequestBuilder {
|
||||||
// fields field not supported on version >= 5
|
|
||||||
if b.version.Major() < 5 {
|
|
||||||
b.customProps["fields"] = []string{"*", "_source"}
|
|
||||||
b.customProps["fielddata_fields"] = []string{field}
|
|
||||||
} else {
|
|
||||||
b.customProps["docvalue_fields"] = []string{field}
|
b.customProps["docvalue_fields"] = []string{field}
|
||||||
}
|
|
||||||
|
|
||||||
b.customProps["script_fields"] = make(map[string]interface{})
|
b.customProps["script_fields"] = make(map[string]interface{})
|
||||||
|
|
||||||
@ -110,27 +101,24 @@ func (b *SearchRequestBuilder) Query() *QueryBuilder {
|
|||||||
|
|
||||||
// Agg initiate and returns a new aggregation builder
|
// Agg initiate and returns a new aggregation builder
|
||||||
func (b *SearchRequestBuilder) Agg() AggBuilder {
|
func (b *SearchRequestBuilder) Agg() AggBuilder {
|
||||||
aggBuilder := newAggBuilder(b.version)
|
aggBuilder := newAggBuilder()
|
||||||
b.aggBuilders = append(b.aggBuilders, aggBuilder)
|
b.aggBuilders = append(b.aggBuilders, aggBuilder)
|
||||||
return aggBuilder
|
return aggBuilder
|
||||||
}
|
}
|
||||||
|
|
||||||
// MultiSearchRequestBuilder represents a builder which can build a multi search request
|
// MultiSearchRequestBuilder represents a builder which can build a multi search request
|
||||||
type MultiSearchRequestBuilder struct {
|
type MultiSearchRequestBuilder struct {
|
||||||
version *semver.Version
|
|
||||||
requestBuilders []*SearchRequestBuilder
|
requestBuilders []*SearchRequestBuilder
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewMultiSearchRequestBuilder creates a new multi search request builder
|
// NewMultiSearchRequestBuilder creates a new multi search request builder
|
||||||
func NewMultiSearchRequestBuilder(version *semver.Version) *MultiSearchRequestBuilder {
|
func NewMultiSearchRequestBuilder() *MultiSearchRequestBuilder {
|
||||||
return &MultiSearchRequestBuilder{
|
return &MultiSearchRequestBuilder{}
|
||||||
version: version,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Search initiates and returns a new search request builder
|
// Search initiates and returns a new search request builder
|
||||||
func (m *MultiSearchRequestBuilder) Search(interval intervalv2.Interval) *SearchRequestBuilder {
|
func (m *MultiSearchRequestBuilder) Search(interval intervalv2.Interval) *SearchRequestBuilder {
|
||||||
b := NewSearchRequestBuilder(m.version, interval)
|
b := NewSearchRequestBuilder(interval)
|
||||||
m.requestBuilders = append(m.requestBuilders, b)
|
m.requestBuilders = append(m.requestBuilders, b)
|
||||||
return b
|
return b
|
||||||
}
|
}
|
||||||
@ -273,13 +261,11 @@ type AggBuilder interface {
|
|||||||
type aggBuilderImpl struct {
|
type aggBuilderImpl struct {
|
||||||
AggBuilder
|
AggBuilder
|
||||||
aggDefs []*aggDef
|
aggDefs []*aggDef
|
||||||
version *semver.Version
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func newAggBuilder(version *semver.Version) *aggBuilderImpl {
|
func newAggBuilder() *aggBuilderImpl {
|
||||||
return &aggBuilderImpl{
|
return &aggBuilderImpl{
|
||||||
aggDefs: make([]*aggDef, 0),
|
aggDefs: make([]*aggDef, 0),
|
||||||
version: version,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -317,7 +303,7 @@ func (b *aggBuilderImpl) Histogram(key, field string, fn func(a *HistogramAgg, b
|
|||||||
})
|
})
|
||||||
|
|
||||||
if fn != nil {
|
if fn != nil {
|
||||||
builder := newAggBuilder(b.version)
|
builder := newAggBuilder()
|
||||||
aggDef.builders = append(aggDef.builders, builder)
|
aggDef.builders = append(aggDef.builders, builder)
|
||||||
fn(innerAgg, builder)
|
fn(innerAgg, builder)
|
||||||
}
|
}
|
||||||
@ -337,7 +323,7 @@ func (b *aggBuilderImpl) DateHistogram(key, field string, fn func(a *DateHistogr
|
|||||||
})
|
})
|
||||||
|
|
||||||
if fn != nil {
|
if fn != nil {
|
||||||
builder := newAggBuilder(b.version)
|
builder := newAggBuilder()
|
||||||
aggDef.builders = append(aggDef.builders, builder)
|
aggDef.builders = append(aggDef.builders, builder)
|
||||||
fn(innerAgg, builder)
|
fn(innerAgg, builder)
|
||||||
}
|
}
|
||||||
@ -360,12 +346,12 @@ func (b *aggBuilderImpl) Terms(key, field string, fn func(a *TermsAggregation, b
|
|||||||
})
|
})
|
||||||
|
|
||||||
if fn != nil {
|
if fn != nil {
|
||||||
builder := newAggBuilder(b.version)
|
builder := newAggBuilder()
|
||||||
aggDef.builders = append(aggDef.builders, builder)
|
aggDef.builders = append(aggDef.builders, builder)
|
||||||
fn(innerAgg, builder)
|
fn(innerAgg, builder)
|
||||||
}
|
}
|
||||||
|
|
||||||
if b.version.Major() >= 6 && len(innerAgg.Order) > 0 {
|
if len(innerAgg.Order) > 0 {
|
||||||
if orderBy, exists := innerAgg.Order[termsOrderTerm]; exists {
|
if orderBy, exists := innerAgg.Order[termsOrderTerm]; exists {
|
||||||
innerAgg.Order["_key"] = orderBy
|
innerAgg.Order["_key"] = orderBy
|
||||||
delete(innerAgg.Order, termsOrderTerm)
|
delete(innerAgg.Order, termsOrderTerm)
|
||||||
@ -386,7 +372,7 @@ func (b *aggBuilderImpl) Filters(key string, fn func(a *FiltersAggregation, b Ag
|
|||||||
Aggregation: innerAgg,
|
Aggregation: innerAgg,
|
||||||
})
|
})
|
||||||
if fn != nil {
|
if fn != nil {
|
||||||
builder := newAggBuilder(b.version)
|
builder := newAggBuilder()
|
||||||
aggDef.builders = append(aggDef.builders, builder)
|
aggDef.builders = append(aggDef.builders, builder)
|
||||||
fn(innerAgg, builder)
|
fn(innerAgg, builder)
|
||||||
}
|
}
|
||||||
@ -407,7 +393,7 @@ func (b *aggBuilderImpl) GeoHashGrid(key, field string, fn func(a *GeoHashGridAg
|
|||||||
})
|
})
|
||||||
|
|
||||||
if fn != nil {
|
if fn != nil {
|
||||||
builder := newAggBuilder(b.version)
|
builder := newAggBuilder()
|
||||||
aggDef.builders = append(aggDef.builders, builder)
|
aggDef.builders = append(aggDef.builders, builder)
|
||||||
fn(innerAgg, builder)
|
fn(innerAgg, builder)
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,6 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/Masterminds/semver"
|
|
||||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||||
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
|
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
|
||||||
|
|
||||||
@ -16,8 +15,7 @@ func TestSearchRequest(t *testing.T) {
|
|||||||
timeField := "@timestamp"
|
timeField := "@timestamp"
|
||||||
|
|
||||||
setup := func() *SearchRequestBuilder {
|
setup := func() *SearchRequestBuilder {
|
||||||
version5, _ := semver.NewVersion("5.0.0")
|
return NewSearchRequestBuilder(intervalv2.Interval{Value: 15 * time.Second, Text: "15s"})
|
||||||
return NewSearchRequestBuilder(version5, intervalv2.Interval{Value: 15 * time.Second, Text: "15s"})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Run("When building search request", func(t *testing.T) {
|
t.Run("When building search request", func(t *testing.T) {
|
||||||
@ -398,65 +396,11 @@ func TestSearchRequest(t *testing.T) {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Given new search request builder for es version 2", func(t *testing.T) {
|
|
||||||
version2, _ := semver.NewVersion("2.0.0")
|
|
||||||
b := NewSearchRequestBuilder(version2, intervalv2.Interval{Value: 15 * time.Second, Text: "15s"})
|
|
||||||
|
|
||||||
t.Run("When adding doc value field", func(t *testing.T) {
|
|
||||||
b.AddDocValueField(timeField)
|
|
||||||
|
|
||||||
t.Run("should set correct props", func(t *testing.T) {
|
|
||||||
fields, ok := b.customProps["fields"].([]string)
|
|
||||||
require.True(t, ok)
|
|
||||||
require.Equal(t, 2, len(fields))
|
|
||||||
require.Equal(t, "*", fields[0])
|
|
||||||
require.Equal(t, "_source", fields[1])
|
|
||||||
|
|
||||||
scriptFields, ok := b.customProps["script_fields"].(map[string]interface{})
|
|
||||||
require.True(t, ok)
|
|
||||||
require.Equal(t, 0, len(scriptFields))
|
|
||||||
|
|
||||||
fieldDataFields, ok := b.customProps["fielddata_fields"].([]string)
|
|
||||||
require.True(t, ok)
|
|
||||||
require.Equal(t, 1, len(fieldDataFields))
|
|
||||||
require.Equal(t, timeField, fieldDataFields[0])
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("When building search request", func(t *testing.T) {
|
|
||||||
sr, err := b.Build()
|
|
||||||
require.Nil(t, err)
|
|
||||||
|
|
||||||
t.Run("When marshal to JSON should generate correct json", func(t *testing.T) {
|
|
||||||
body, err := json.Marshal(sr)
|
|
||||||
require.Nil(t, err)
|
|
||||||
json, err := simplejson.NewJson(body)
|
|
||||||
require.Nil(t, err)
|
|
||||||
|
|
||||||
scriptFields, err := json.Get("script_fields").Map()
|
|
||||||
require.Nil(t, err)
|
|
||||||
require.Equal(t, 0, len(scriptFields))
|
|
||||||
|
|
||||||
fields, err := json.Get("fields").StringArray()
|
|
||||||
require.Nil(t, err)
|
|
||||||
require.Equal(t, 2, len(fields))
|
|
||||||
require.Equal(t, "*", fields[0])
|
|
||||||
require.Equal(t, "_source", fields[1])
|
|
||||||
|
|
||||||
fieldDataFields, err := json.Get("fielddata_fields").StringArray()
|
|
||||||
require.Nil(t, err)
|
|
||||||
require.Equal(t, 1, len(fieldDataFields))
|
|
||||||
require.Equal(t, timeField, fieldDataFields[0])
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMultiSearchRequest(t *testing.T) {
|
func TestMultiSearchRequest(t *testing.T) {
|
||||||
t.Run("When adding one search request", func(t *testing.T) {
|
t.Run("When adding one search request", func(t *testing.T) {
|
||||||
version2, _ := semver.NewVersion("2.0.0")
|
b := NewMultiSearchRequestBuilder()
|
||||||
b := NewMultiSearchRequestBuilder(version2)
|
|
||||||
b.Search(intervalv2.Interval{Value: 15 * time.Second, Text: "15s"})
|
b.Search(intervalv2.Interval{Value: 15 * time.Second, Text: "15s"})
|
||||||
|
|
||||||
t.Run("When building search request should contain one search request", func(t *testing.T) {
|
t.Run("When building search request should contain one search request", func(t *testing.T) {
|
||||||
@ -467,8 +411,7 @@ func TestMultiSearchRequest(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("When adding two search requests", func(t *testing.T) {
|
t.Run("When adding two search requests", func(t *testing.T) {
|
||||||
version2, _ := semver.NewVersion("2.0.0")
|
b := NewMultiSearchRequestBuilder()
|
||||||
b := NewMultiSearchRequestBuilder(version2)
|
|
||||||
b.Search(intervalv2.Interval{Value: 15 * time.Second, Text: "15s"})
|
b.Search(intervalv2.Interval{Value: 15 * time.Second, Text: "15s"})
|
||||||
b.Search(intervalv2.Interval{Value: 15 * time.Second, Text: "15s"})
|
b.Search(intervalv2.Interval{Value: 15 * time.Second, Text: "15s"})
|
||||||
|
|
||||||
|
@ -6,7 +6,6 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/Masterminds/semver"
|
|
||||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||||
es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
|
es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
|
||||||
@ -144,7 +143,7 @@ func (e *timeSeriesQuery) processQuery(q *Query, ms *es.MultiSearchRequestBuilde
|
|||||||
}
|
}
|
||||||
|
|
||||||
aggBuilder.Pipeline(m.ID, m.Type, bucketPaths, func(a *es.PipelineAggregation) {
|
aggBuilder.Pipeline(m.ID, m.Type, bucketPaths, func(a *es.PipelineAggregation) {
|
||||||
a.Settings = m.generateSettingsForDSL(e.client.GetVersion())
|
a.Settings = m.generateSettingsForDSL()
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
continue
|
continue
|
||||||
@ -165,7 +164,7 @@ func (e *timeSeriesQuery) processQuery(q *Query, ms *es.MultiSearchRequestBuilde
|
|||||||
}
|
}
|
||||||
|
|
||||||
aggBuilder.Pipeline(m.ID, m.Type, bucketPath, func(a *es.PipelineAggregation) {
|
aggBuilder.Pipeline(m.ID, m.Type, bucketPath, func(a *es.PipelineAggregation) {
|
||||||
a.Settings = m.generateSettingsForDSL(e.client.GetVersion())
|
a.Settings = m.generateSettingsForDSL()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -174,7 +173,7 @@ func (e *timeSeriesQuery) processQuery(q *Query, ms *es.MultiSearchRequestBuilde
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
aggBuilder.Metric(m.ID, m.Type, m.Field, func(a *es.MetricAggregation) {
|
aggBuilder.Metric(m.ID, m.Type, m.Field, func(a *es.MetricAggregation) {
|
||||||
a.Settings = m.generateSettingsForDSL(e.client.GetVersion())
|
a.Settings = m.generateSettingsForDSL()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -199,7 +198,7 @@ func setIntPath(settings *simplejson.Json, path ...string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Casts values to float when required by Elastic's query DSL
|
// Casts values to float when required by Elastic's query DSL
|
||||||
func (metricAggregation MetricAgg) generateSettingsForDSL(version *semver.Version) map[string]interface{} {
|
func (metricAggregation MetricAgg) generateSettingsForDSL() map[string]interface{} {
|
||||||
switch metricAggregation.Type {
|
switch metricAggregation.Type {
|
||||||
case "moving_avg":
|
case "moving_avg":
|
||||||
setFloatPath(metricAggregation.Settings, "window")
|
setFloatPath(metricAggregation.Settings, "window")
|
||||||
@ -219,14 +218,8 @@ func (metricAggregation MetricAgg) generateSettingsForDSL(version *semver.Versio
|
|||||||
scriptValue, err = metricAggregation.Settings.GetPath("script", "inline").String()
|
scriptValue, err = metricAggregation.Settings.GetPath("script", "inline").String()
|
||||||
}
|
}
|
||||||
|
|
||||||
constraint, _ := semver.NewConstraint(">=5.6.0")
|
|
||||||
|
|
||||||
if err == nil {
|
if err == nil {
|
||||||
if constraint.Check(version) {
|
|
||||||
metricAggregation.Settings.SetPath([]string{"script"}, scriptValue)
|
metricAggregation.Settings.SetPath([]string{"script"}, scriptValue)
|
||||||
} else {
|
|
||||||
metricAggregation.Settings.SetPath([]string{"script"}, map[string]interface{}{"inline": scriptValue})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5,7 +5,6 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/Masterminds/semver"
|
|
||||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||||
es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
|
es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
|
||||||
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
|
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
|
||||||
@ -20,8 +19,8 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
toMs := to.UnixNano() / int64(time.Millisecond)
|
toMs := to.UnixNano() / int64(time.Millisecond)
|
||||||
|
|
||||||
t.Run("Test execute time series query", func(t *testing.T) {
|
t.Run("Test execute time series query", func(t *testing.T) {
|
||||||
t.Run("With defaults on es 2", func(t *testing.T) {
|
t.Run("With defaults", func(t *testing.T) {
|
||||||
c := newFakeClient("2.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "2" }],
|
"bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "2" }],
|
||||||
@ -41,23 +40,8 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
require.Equal(t, dateHistogramAgg.ExtendedBounds.Max, toMs)
|
require.Equal(t, dateHistogramAgg.ExtendedBounds.Max, toMs)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With defaults on es 5", func(t *testing.T) {
|
|
||||||
c := newFakeClient("5.0.0")
|
|
||||||
_, err := executeTsdbQuery(c, `{
|
|
||||||
"timeField": "@timestamp",
|
|
||||||
"bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "2" }],
|
|
||||||
"metrics": [{"type": "count", "id": "0" }]
|
|
||||||
}`, from, to, 15*time.Second)
|
|
||||||
require.NoError(t, err)
|
|
||||||
sr := c.multisearchRequests[0].Requests[0]
|
|
||||||
require.Equal(t, sr.Query.Bool.Filters[0].(*es.RangeFilter).Key, c.timeField)
|
|
||||||
require.Equal(t, sr.Aggs[0].Key, "2")
|
|
||||||
require.Equal(t, sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg).ExtendedBounds.Min, fromMs)
|
|
||||||
require.Equal(t, sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg).ExtendedBounds.Max, toMs)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("With multiple bucket aggs", func(t *testing.T) {
|
t.Run("With multiple bucket aggs", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -79,7 +63,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With select field", func(t *testing.T) {
|
t.Run("With select field", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -99,7 +83,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With term agg and order by metric agg", func(t *testing.T) {
|
t.Run("With term agg and order by metric agg", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -129,7 +113,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With term agg and order by count metric agg", func(t *testing.T) {
|
t.Run("With term agg and order by count metric agg", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -153,7 +137,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With term agg and order by percentiles agg", func(t *testing.T) {
|
t.Run("With term agg and order by percentiles agg", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -178,7 +162,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With term agg and order by extended stats agg", func(t *testing.T) {
|
t.Run("With term agg and order by extended stats agg", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -203,34 +187,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With term agg and order by term", func(t *testing.T) {
|
t.Run("With term agg and order by term", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
|
||||||
"timeField": "@timestamp",
|
|
||||||
"bucketAggs": [
|
|
||||||
{
|
|
||||||
"type": "terms",
|
|
||||||
"field": "@host",
|
|
||||||
"id": "2",
|
|
||||||
"settings": { "size": "5", "order": "asc", "orderBy": "_term" }
|
|
||||||
},
|
|
||||||
{ "type": "date_histogram", "field": "@timestamp", "id": "3" }
|
|
||||||
],
|
|
||||||
"metrics": [
|
|
||||||
{"type": "count", "id": "1" },
|
|
||||||
{"type": "avg", "field": "@value", "id": "5" }
|
|
||||||
]
|
|
||||||
}`, from, to, 15*time.Second)
|
|
||||||
require.NoError(t, err)
|
|
||||||
sr := c.multisearchRequests[0].Requests[0]
|
|
||||||
|
|
||||||
firstLevel := sr.Aggs[0]
|
|
||||||
require.Equal(t, firstLevel.Key, "2")
|
|
||||||
termsAgg := firstLevel.Aggregation.Aggregation.(*es.TermsAggregation)
|
|
||||||
require.Equal(t, termsAgg.Order["_term"], "asc")
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("With term agg and order by term with es6.x", func(t *testing.T) {
|
|
||||||
c := newFakeClient("6.0.0")
|
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -257,7 +214,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With metric percentiles", func(t *testing.T) {
|
t.Run("With metric percentiles", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -289,39 +246,8 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
require.Equal(t, percents[3], "4")
|
require.Equal(t, percents[3], "4")
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With filters aggs on es 2", func(t *testing.T) {
|
t.Run("With filters aggs", func(t *testing.T) {
|
||||||
c := newFakeClient("2.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
|
||||||
"timeField": "@timestamp",
|
|
||||||
"bucketAggs": [
|
|
||||||
{
|
|
||||||
"id": "2",
|
|
||||||
"type": "filters",
|
|
||||||
"settings": {
|
|
||||||
"filters": [ { "query": "@metric:cpu" }, { "query": "@metric:logins.count" } ]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{ "type": "date_histogram", "field": "@timestamp", "id": "4" }
|
|
||||||
],
|
|
||||||
"metrics": [{"type": "count", "id": "1" }]
|
|
||||||
}`, from, to, 15*time.Second)
|
|
||||||
require.NoError(t, err)
|
|
||||||
sr := c.multisearchRequests[0].Requests[0]
|
|
||||||
|
|
||||||
filtersAgg := sr.Aggs[0]
|
|
||||||
require.Equal(t, filtersAgg.Key, "2")
|
|
||||||
require.Equal(t, filtersAgg.Aggregation.Type, "filters")
|
|
||||||
fAgg := filtersAgg.Aggregation.Aggregation.(*es.FiltersAggregation)
|
|
||||||
require.Equal(t, fAgg.Filters["@metric:cpu"].(*es.QueryStringFilter).Query, "@metric:cpu")
|
|
||||||
require.Equal(t, fAgg.Filters["@metric:logins.count"].(*es.QueryStringFilter).Query, "@metric:logins.count")
|
|
||||||
|
|
||||||
dateHistogramAgg := sr.Aggs[0].Aggregation.Aggs[0]
|
|
||||||
require.Equal(t, dateHistogramAgg.Key, "4")
|
|
||||||
require.Equal(t, dateHistogramAgg.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, "@timestamp")
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("With filters aggs on es 5", func(t *testing.T) {
|
|
||||||
c := newFakeClient("5.0.0")
|
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -352,7 +278,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With raw document metric", func(t *testing.T) {
|
t.Run("With raw document metric", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [],
|
"bucketAggs": [],
|
||||||
@ -365,7 +291,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With raw document metric size set", func(t *testing.T) {
|
t.Run("With raw document metric size set", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [],
|
"bucketAggs": [],
|
||||||
@ -378,7 +304,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With date histogram agg", func(t *testing.T) {
|
t.Run("With date histogram agg", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -403,7 +329,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
require.Equal(t, hAgg.MinDocCount, 2)
|
require.Equal(t, hAgg.MinDocCount, 2)
|
||||||
|
|
||||||
t.Run("Should not include time_zone when timeZone is utc", func(t *testing.T) {
|
t.Run("Should not include time_zone when timeZone is utc", func(t *testing.T) {
|
||||||
c := newFakeClient("7.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -426,7 +352,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Should include time_zone when timeZone is not utc", func(t *testing.T) {
|
t.Run("Should include time_zone when timeZone is not utc", func(t *testing.T) {
|
||||||
c := newFakeClient("7.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -450,7 +376,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With histogram agg", func(t *testing.T) {
|
t.Run("With histogram agg", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -477,7 +403,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With geo hash grid agg", func(t *testing.T) {
|
t.Run("With geo hash grid agg", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -502,7 +428,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With moving average", func(t *testing.T) {
|
t.Run("With moving average", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -540,7 +466,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With moving average doc count", func(t *testing.T) {
|
t.Run("With moving average doc count", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -572,7 +498,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With broken moving average", func(t *testing.T) {
|
t.Run("With broken moving average", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -608,7 +534,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With cumulative sum", func(t *testing.T) {
|
t.Run("With cumulative sum", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -646,7 +572,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With cumulative sum doc count", func(t *testing.T) {
|
t.Run("With cumulative sum doc count", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -678,7 +604,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With broken cumulative sum", func(t *testing.T) {
|
t.Run("With broken cumulative sum", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -714,7 +640,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With derivative", func(t *testing.T) {
|
t.Run("With derivative", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -743,7 +669,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With derivative doc count", func(t *testing.T) {
|
t.Run("With derivative doc count", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -772,7 +698,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With serial_diff", func(t *testing.T) {
|
t.Run("With serial_diff", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -801,7 +727,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With serial_diff doc count", func(t *testing.T) {
|
t.Run("With serial_diff doc count", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -830,7 +756,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With bucket_script", func(t *testing.T) {
|
t.Run("With bucket_script", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -867,7 +793,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("With bucket_script doc count", func(t *testing.T) {
|
t.Run("With bucket_script doc count", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -907,7 +833,7 @@ func TestSettingsCasting(t *testing.T) {
|
|||||||
to := time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC)
|
to := time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC)
|
||||||
|
|
||||||
t.Run("Correctly transforms moving_average settings", func(t *testing.T) {
|
t.Run("Correctly transforms moving_average settings", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -951,7 +877,7 @@ func TestSettingsCasting(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Correctly transforms serial_diff settings", func(t *testing.T) {
|
t.Run("Correctly transforms serial_diff settings", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -980,7 +906,7 @@ func TestSettingsCasting(t *testing.T) {
|
|||||||
|
|
||||||
t.Run("Date Histogram Settings", func(t *testing.T) {
|
t.Run("Date Histogram Settings", func(t *testing.T) {
|
||||||
t.Run("Correctly transforms date_histogram settings", func(t *testing.T) {
|
t.Run("Correctly transforms date_histogram settings", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -1015,7 +941,7 @@ func TestSettingsCasting(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Correctly uses already int min_doc_count", func(t *testing.T) {
|
t.Run("Correctly uses already int min_doc_count", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -1051,7 +977,7 @@ func TestSettingsCasting(t *testing.T) {
|
|||||||
|
|
||||||
t.Run("interval parameter", func(t *testing.T) {
|
t.Run("interval parameter", func(t *testing.T) {
|
||||||
t.Run("Uses fixed_interval", func(t *testing.T) {
|
t.Run("Uses fixed_interval", func(t *testing.T) {
|
||||||
c := newFakeClient("7.10.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -1079,45 +1005,8 @@ func TestSettingsCasting(t *testing.T) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
t.Run("Inline Script", func(t *testing.T) {
|
t.Run("Inline Script", func(t *testing.T) {
|
||||||
t.Run("Correctly handles scripts for ES < 5.6", func(t *testing.T) {
|
t.Run("Correctly handles scripts", func(t *testing.T) {
|
||||||
c := newFakeClient("5.0.0")
|
c := newFakeClient()
|
||||||
_, err := executeTsdbQuery(c, `{
|
|
||||||
"timeField": "@timestamp",
|
|
||||||
"bucketAggs": [
|
|
||||||
{ "type": "date_histogram", "field": "@timestamp", "id": "2" }
|
|
||||||
],
|
|
||||||
"metrics": [
|
|
||||||
{
|
|
||||||
"id": "1",
|
|
||||||
"type": "avg",
|
|
||||||
"settings": {
|
|
||||||
"script": "my_script"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "3",
|
|
||||||
"type": "avg",
|
|
||||||
"settings": {
|
|
||||||
"script": {
|
|
||||||
"inline": "my_script"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}`, from, to, 15*time.Second)
|
|
||||||
|
|
||||||
assert.Nil(t, err)
|
|
||||||
sr := c.multisearchRequests[0].Requests[0]
|
|
||||||
|
|
||||||
newFormatAggSettings := sr.Aggs[0].Aggregation.Aggs[0].Aggregation.Aggregation.(*es.MetricAggregation).Settings
|
|
||||||
oldFormatAggSettings := sr.Aggs[0].Aggregation.Aggs[1].Aggregation.Aggregation.(*es.MetricAggregation).Settings
|
|
||||||
|
|
||||||
assert.Equal(t, map[string]interface{}{"inline": "my_script"}, newFormatAggSettings["script"])
|
|
||||||
assert.Equal(t, map[string]interface{}{"inline": "my_script"}, oldFormatAggSettings["script"])
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Correctly handles scripts for ES >= 5.6", func(t *testing.T) {
|
|
||||||
c := newFakeClient("5.6.0")
|
|
||||||
_, err := executeTsdbQuery(c, `{
|
_, err := executeTsdbQuery(c, `{
|
||||||
"timeField": "@timestamp",
|
"timeField": "@timestamp",
|
||||||
"bucketAggs": [
|
"bucketAggs": [
|
||||||
@ -1156,7 +1045,6 @@ func TestSettingsCasting(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type fakeClient struct {
|
type fakeClient struct {
|
||||||
version *semver.Version
|
|
||||||
timeField string
|
timeField string
|
||||||
multiSearchResponse *es.MultiSearchResponse
|
multiSearchResponse *es.MultiSearchResponse
|
||||||
multiSearchError error
|
multiSearchError error
|
||||||
@ -1164,10 +1052,8 @@ type fakeClient struct {
|
|||||||
multisearchRequests []*es.MultiSearchRequest
|
multisearchRequests []*es.MultiSearchRequest
|
||||||
}
|
}
|
||||||
|
|
||||||
func newFakeClient(versionString string) *fakeClient {
|
func newFakeClient() *fakeClient {
|
||||||
version, _ := semver.NewVersion(versionString)
|
|
||||||
return &fakeClient{
|
return &fakeClient{
|
||||||
version: version,
|
|
||||||
timeField: "@timestamp",
|
timeField: "@timestamp",
|
||||||
multisearchRequests: make([]*es.MultiSearchRequest, 0),
|
multisearchRequests: make([]*es.MultiSearchRequest, 0),
|
||||||
multiSearchResponse: &es.MultiSearchResponse{},
|
multiSearchResponse: &es.MultiSearchResponse{},
|
||||||
@ -1176,10 +1062,6 @@ func newFakeClient(versionString string) *fakeClient {
|
|||||||
|
|
||||||
func (c *fakeClient) EnableDebug() {}
|
func (c *fakeClient) EnableDebug() {}
|
||||||
|
|
||||||
func (c *fakeClient) GetVersion() *semver.Version {
|
|
||||||
return c.version
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *fakeClient) GetTimeField() string {
|
func (c *fakeClient) GetTimeField() string {
|
||||||
return c.timeField
|
return c.timeField
|
||||||
}
|
}
|
||||||
@ -1194,7 +1076,7 @@ func (c *fakeClient) ExecuteMultisearch(r *es.MultiSearchRequest) (*es.MultiSear
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *fakeClient) MultiSearch() *es.MultiSearchRequestBuilder {
|
func (c *fakeClient) MultiSearch() *es.MultiSearchRequestBuilder {
|
||||||
c.builder = es.NewMultiSearchRequestBuilder(c.version)
|
c.builder = es.NewMultiSearchRequestBuilder()
|
||||||
return c.builder
|
return c.builder
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -87,7 +87,7 @@ describe('Metric Editor', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe('Top Metrics Aggregation', () => {
|
describe('Top Metrics Aggregation', () => {
|
||||||
const setupTopMetricsScreen = (esVersion: string, xpack: boolean) => {
|
const setupTopMetricsScreen = (xpack: boolean) => {
|
||||||
const query: ElasticsearchQuery = {
|
const query: ElasticsearchQuery = {
|
||||||
refId: 'A',
|
refId: 'A',
|
||||||
query: '',
|
query: '',
|
||||||
@ -97,6 +97,8 @@ describe('Metric Editor', () => {
|
|||||||
|
|
||||||
const getFields: ElasticDatasource['getFields'] = jest.fn(() => from([[]]));
|
const getFields: ElasticDatasource['getFields'] = jest.fn(() => from([[]]));
|
||||||
|
|
||||||
|
const esVersion = '7.7.0';
|
||||||
|
|
||||||
const wrapper = ({ children }: { children?: ReactNode }) => (
|
const wrapper = ({ children }: { children?: ReactNode }) => (
|
||||||
<ElasticsearchProvider
|
<ElasticsearchProvider
|
||||||
datasource={{ getFields, esVersion, xpack } as ElasticDatasource}
|
datasource={{ getFields, esVersion, xpack } as ElasticDatasource}
|
||||||
@ -116,18 +118,13 @@ describe('Metric Editor', () => {
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
it('Should include top metrics aggregation when esVersion is 77 and X-Pack is enabled', () => {
|
it('Should include top metrics aggregation when X-Pack is enabled', () => {
|
||||||
setupTopMetricsScreen('7.7.0', true);
|
setupTopMetricsScreen(true);
|
||||||
expect(screen.getByText('Top Metrics')).toBeInTheDocument();
|
expect(screen.getByText('Top Metrics')).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should NOT include top metrics aggregation where esVersion is 77 and X-Pack is disabled', () => {
|
it('Should NOT include top metrics aggregation where X-Pack is disabled', () => {
|
||||||
setupTopMetricsScreen('7.7.0', false);
|
setupTopMetricsScreen(false);
|
||||||
expect(screen.queryByText('Top Metrics')).toBe(null);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('Should NOT include top metrics aggregation when esVersion is 70 and X-Pack is enabled', () => {
|
|
||||||
setupTopMetricsScreen('7.0.0', true);
|
|
||||||
expect(screen.queryByText('Top Metrics')).toBe(null);
|
expect(screen.queryByText('Top Metrics')).toBe(null);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -113,7 +113,7 @@ export const metricAggregationConfig: MetricsConfiguration = {
|
|||||||
label: 'Moving Average',
|
label: 'Moving Average',
|
||||||
requiresField: true,
|
requiresField: true,
|
||||||
isPipelineAgg: true,
|
isPipelineAgg: true,
|
||||||
versionRange: '>=2.0.0 <8.0.0',
|
versionRange: '<8.0.0',
|
||||||
supportsMissing: false,
|
supportsMissing: false,
|
||||||
supportsMultipleBucketPaths: false,
|
supportsMultipleBucketPaths: false,
|
||||||
hasSettings: true,
|
hasSettings: true,
|
||||||
@ -136,14 +136,12 @@ export const metricAggregationConfig: MetricsConfiguration = {
|
|||||||
supportsMissing: false,
|
supportsMissing: false,
|
||||||
hasMeta: false,
|
hasMeta: false,
|
||||||
hasSettings: true,
|
hasSettings: true,
|
||||||
versionRange: '>=7.0.0',
|
|
||||||
defaults: {},
|
defaults: {},
|
||||||
},
|
},
|
||||||
derivative: {
|
derivative: {
|
||||||
label: 'Derivative',
|
label: 'Derivative',
|
||||||
requiresField: true,
|
requiresField: true,
|
||||||
isPipelineAgg: true,
|
isPipelineAgg: true,
|
||||||
versionRange: '>=2.0.0',
|
|
||||||
supportsMissing: false,
|
supportsMissing: false,
|
||||||
supportsMultipleBucketPaths: false,
|
supportsMultipleBucketPaths: false,
|
||||||
hasSettings: true,
|
hasSettings: true,
|
||||||
@ -155,7 +153,6 @@ export const metricAggregationConfig: MetricsConfiguration = {
|
|||||||
label: 'Serial Difference',
|
label: 'Serial Difference',
|
||||||
requiresField: true,
|
requiresField: true,
|
||||||
isPipelineAgg: true,
|
isPipelineAgg: true,
|
||||||
versionRange: '>=2.0.0',
|
|
||||||
supportsMissing: false,
|
supportsMissing: false,
|
||||||
supportsMultipleBucketPaths: false,
|
supportsMultipleBucketPaths: false,
|
||||||
hasSettings: true,
|
hasSettings: true,
|
||||||
@ -171,7 +168,6 @@ export const metricAggregationConfig: MetricsConfiguration = {
|
|||||||
label: 'Cumulative Sum',
|
label: 'Cumulative Sum',
|
||||||
requiresField: true,
|
requiresField: true,
|
||||||
isPipelineAgg: true,
|
isPipelineAgg: true,
|
||||||
versionRange: '>=2.0.0',
|
|
||||||
supportsMissing: false,
|
supportsMissing: false,
|
||||||
supportsMultipleBucketPaths: false,
|
supportsMultipleBucketPaths: false,
|
||||||
hasSettings: true,
|
hasSettings: true,
|
||||||
@ -185,7 +181,6 @@ export const metricAggregationConfig: MetricsConfiguration = {
|
|||||||
isPipelineAgg: true,
|
isPipelineAgg: true,
|
||||||
supportsMissing: false,
|
supportsMissing: false,
|
||||||
supportsMultipleBucketPaths: true,
|
supportsMultipleBucketPaths: true,
|
||||||
versionRange: '>=2.0.0',
|
|
||||||
hasSettings: true,
|
hasSettings: true,
|
||||||
supportsInlineScript: false,
|
supportsInlineScript: false,
|
||||||
hasMeta: false,
|
hasMeta: false,
|
||||||
@ -250,7 +245,6 @@ export const metricAggregationConfig: MetricsConfiguration = {
|
|||||||
supportsMultipleBucketPaths: false,
|
supportsMultipleBucketPaths: false,
|
||||||
hasSettings: true,
|
hasSettings: true,
|
||||||
supportsInlineScript: false,
|
supportsInlineScript: false,
|
||||||
versionRange: '>=7.7.0',
|
|
||||||
hasMeta: false,
|
hasMeta: false,
|
||||||
defaults: {
|
defaults: {
|
||||||
settings: {
|
settings: {
|
||||||
@ -261,7 +255,6 @@ export const metricAggregationConfig: MetricsConfiguration = {
|
|||||||
rate: {
|
rate: {
|
||||||
label: 'Rate',
|
label: 'Rate',
|
||||||
xpack: true,
|
xpack: true,
|
||||||
versionRange: '>=7.10.0',
|
|
||||||
requiresField: true,
|
requiresField: true,
|
||||||
isPipelineAgg: false,
|
isPipelineAgg: false,
|
||||||
supportsMissing: false,
|
supportsMissing: false,
|
||||||
|
@ -35,7 +35,7 @@ describe('ConfigEditor', () => {
|
|||||||
onOptionsChange={(options) => {
|
onOptionsChange={(options) => {
|
||||||
expect(options.jsonData.esVersion).toBe('5.0.0');
|
expect(options.jsonData.esVersion).toBe('5.0.0');
|
||||||
expect(options.jsonData.timeField).toBe('@timestamp');
|
expect(options.jsonData.timeField).toBe('@timestamp');
|
||||||
expect(options.jsonData.maxConcurrentShardRequests).toBe(256);
|
expect(options.jsonData.maxConcurrentShardRequests).toBe(5);
|
||||||
}}
|
}}
|
||||||
options={options}
|
options={options}
|
||||||
/>
|
/>
|
||||||
|
@ -7,15 +7,10 @@ import { createDefaultConfigOptions } from './mocks';
|
|||||||
|
|
||||||
describe('ElasticDetails', () => {
|
describe('ElasticDetails', () => {
|
||||||
describe('Max concurrent Shard Requests', () => {
|
describe('Max concurrent Shard Requests', () => {
|
||||||
it('should render "Max concurrent Shard Requests" if version >= 5.6.0', () => {
|
it('should render "Max concurrent Shard Requests" ', () => {
|
||||||
render(<ElasticDetails onChange={() => {}} value={createDefaultConfigOptions({ esVersion: '5.6.0' })} />);
|
render(<ElasticDetails onChange={() => {}} value={createDefaultConfigOptions({ esVersion: '8.2.0' })} />);
|
||||||
expect(screen.getByLabelText('Max concurrent Shard Requests')).toBeInTheDocument();
|
expect(screen.getByLabelText('Max concurrent Shard Requests')).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not render "Max concurrent Shard Requests" if version < 5.6.0', () => {
|
|
||||||
render(<ElasticDetails onChange={() => {}} value={createDefaultConfigOptions({ esVersion: '5.0.0' })} />);
|
|
||||||
expect(screen.queryByLabelText('Max concurrent Shard Requests')).not.toBeInTheDocument();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should change database on interval change when not set explicitly', async () => {
|
it('should change database on interval change when not set explicitly', async () => {
|
||||||
@ -51,9 +46,8 @@ describe('ElasticDetails', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe('version change', () => {
|
describe('version change', () => {
|
||||||
const testCases = [{ version: '7.10+', maxConcurrentShardRequests: 6, expectedMaxConcurrentShardRequests: 6 }];
|
const tc = { version: '7.10+', maxConcurrentShardRequests: 6, expectedMaxConcurrentShardRequests: 6 };
|
||||||
|
|
||||||
testCases.forEach((tc) => {
|
|
||||||
const onChangeMock = jest.fn();
|
const onChangeMock = jest.fn();
|
||||||
it(`sets maxConcurrentShardRequests=${tc.expectedMaxConcurrentShardRequests} if version=${tc.version},`, async () => {
|
it(`sets maxConcurrentShardRequests=${tc.expectedMaxConcurrentShardRequests} if version=${tc.version},`, async () => {
|
||||||
render(
|
render(
|
||||||
@ -78,4 +72,3 @@ describe('ElasticDetails', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import React from 'react';
|
import React from 'react';
|
||||||
import { gte, lt, valid } from 'semver';
|
import { valid } from 'semver';
|
||||||
|
|
||||||
import { DataSourceSettings, SelectableValue } from '@grafana/data';
|
import { DataSourceSettings, SelectableValue } from '@grafana/data';
|
||||||
import { FieldSet, InlineField, Input, Select, InlineSwitch } from '@grafana/ui';
|
import { FieldSet, InlineField, Input, Select, InlineSwitch } from '@grafana/ui';
|
||||||
@ -82,8 +82,7 @@ export const ElasticDetails = ({ value, onChange }: Props) => {
|
|||||||
options={[customOption, ...esVersions].filter(isTruthy)}
|
options={[customOption, ...esVersions].filter(isTruthy)}
|
||||||
onChange={(option) => {
|
onChange={(option) => {
|
||||||
const maxConcurrentShardRequests = getMaxConcurrenShardRequestOrDefault(
|
const maxConcurrentShardRequests = getMaxConcurrenShardRequestOrDefault(
|
||||||
value.jsonData.maxConcurrentShardRequests,
|
value.jsonData.maxConcurrentShardRequests
|
||||||
option.value!
|
|
||||||
);
|
);
|
||||||
onChange({
|
onChange({
|
||||||
...value,
|
...value,
|
||||||
@ -99,7 +98,6 @@ export const ElasticDetails = ({ value, onChange }: Props) => {
|
|||||||
/>
|
/>
|
||||||
</InlineField>
|
</InlineField>
|
||||||
|
|
||||||
{gte(value.jsonData.esVersion, '5.6.0') && (
|
|
||||||
<InlineField label="Max concurrent Shard Requests" labelWidth={26}>
|
<InlineField label="Max concurrent Shard Requests" labelWidth={26}>
|
||||||
<Input
|
<Input
|
||||||
id="es_config_shardRequests"
|
id="es_config_shardRequests"
|
||||||
@ -108,7 +106,6 @@ export const ElasticDetails = ({ value, onChange }: Props) => {
|
|||||||
width={24}
|
width={24}
|
||||||
/>
|
/>
|
||||||
</InlineField>
|
</InlineField>
|
||||||
)}
|
|
||||||
|
|
||||||
<InlineField
|
<InlineField
|
||||||
label="Min time interval"
|
label="Min time interval"
|
||||||
@ -139,7 +136,7 @@ export const ElasticDetails = ({ value, onChange }: Props) => {
|
|||||||
/>
|
/>
|
||||||
</InlineField>
|
</InlineField>
|
||||||
|
|
||||||
{gte(value.jsonData.esVersion, '6.6.0') && value.jsonData.xpack && (
|
{value.jsonData.xpack && (
|
||||||
<InlineField label="Include Frozen Indices" labelWidth={26}>
|
<InlineField label="Include Frozen Indices" labelWidth={26}>
|
||||||
<InlineSwitch
|
<InlineSwitch
|
||||||
id="es_config_frozenIndices"
|
id="es_config_frozenIndices"
|
||||||
@ -224,18 +221,14 @@ const intervalHandler =
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
function getMaxConcurrenShardRequestOrDefault(maxConcurrentShardRequests: number | undefined, version: string): number {
|
function getMaxConcurrenShardRequestOrDefault(maxConcurrentShardRequests: number | undefined): number {
|
||||||
if (maxConcurrentShardRequests === 5 && lt(version, '7.0.0')) {
|
if (maxConcurrentShardRequests === 256) {
|
||||||
return 256;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (maxConcurrentShardRequests === 256 && gte(version, '7.0.0')) {
|
|
||||||
return 5;
|
return 5;
|
||||||
}
|
}
|
||||||
|
|
||||||
return maxConcurrentShardRequests || defaultMaxConcurrentShardRequests(version);
|
return maxConcurrentShardRequests || defaultMaxConcurrentShardRequests();
|
||||||
}
|
}
|
||||||
|
|
||||||
export function defaultMaxConcurrentShardRequests(version: string) {
|
export function defaultMaxConcurrentShardRequests() {
|
||||||
return gte(version, '7.0.0') ? 5 : 256;
|
return 5;
|
||||||
}
|
}
|
||||||
|
@ -18,8 +18,7 @@ export const coerceOptions = (
|
|||||||
...options.jsonData,
|
...options.jsonData,
|
||||||
timeField: options.jsonData.timeField || '@timestamp',
|
timeField: options.jsonData.timeField || '@timestamp',
|
||||||
esVersion,
|
esVersion,
|
||||||
maxConcurrentShardRequests:
|
maxConcurrentShardRequests: options.jsonData.maxConcurrentShardRequests || defaultMaxConcurrentShardRequests(),
|
||||||
options.jsonData.maxConcurrentShardRequests || defaultMaxConcurrentShardRequests(esVersion),
|
|
||||||
logMessageField: options.jsonData.logMessageField || '',
|
logMessageField: options.jsonData.logMessageField || '',
|
||||||
logLevelField: options.jsonData.logLevelField || '',
|
logLevelField: options.jsonData.logLevelField || '',
|
||||||
includeFrozen: options.jsonData.includeFrozen ?? false,
|
includeFrozen: options.jsonData.includeFrozen ?? false,
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
import { cloneDeep, find, first as _first, isNumber, isObject, isString, map as _map } from 'lodash';
|
import { cloneDeep, find, first as _first, isNumber, isObject, isString, map as _map } from 'lodash';
|
||||||
import { generate, lastValueFrom, Observable, of, throwError } from 'rxjs';
|
import { generate, lastValueFrom, Observable, of, throwError } from 'rxjs';
|
||||||
import { catchError, first, map, mergeMap, skipWhile, throwIfEmpty } from 'rxjs/operators';
|
import { catchError, first, map, mergeMap, skipWhile, throwIfEmpty } from 'rxjs/operators';
|
||||||
import { gte, lt, satisfies } from 'semver';
|
|
||||||
|
|
||||||
import {
|
import {
|
||||||
DataFrame,
|
DataFrame,
|
||||||
@ -113,7 +112,6 @@ export class ElasticDatasource
|
|||||||
this.maxConcurrentShardRequests = settingsData.maxConcurrentShardRequests;
|
this.maxConcurrentShardRequests = settingsData.maxConcurrentShardRequests;
|
||||||
this.queryBuilder = new ElasticQueryBuilder({
|
this.queryBuilder = new ElasticQueryBuilder({
|
||||||
timeField: this.timeField,
|
timeField: this.timeField,
|
||||||
esVersion: this.esVersion,
|
|
||||||
});
|
});
|
||||||
this.logMessageField = settingsData.logMessageField || '';
|
this.logMessageField = settingsData.logMessageField || '';
|
||||||
this.logLevelField = settingsData.logLevelField || '';
|
this.logLevelField = settingsData.logLevelField || '';
|
||||||
@ -297,11 +295,6 @@ export class ElasticDatasource
|
|||||||
size: 10000,
|
size: 10000,
|
||||||
};
|
};
|
||||||
|
|
||||||
// fields field not supported on ES 5.x
|
|
||||||
if (lt(this.esVersion, '5.0.0')) {
|
|
||||||
data['fields'] = [timeField, '_source'];
|
|
||||||
}
|
|
||||||
|
|
||||||
const header: any = {
|
const header: any = {
|
||||||
search_type: 'query_then_fetch',
|
search_type: 'query_then_fetch',
|
||||||
ignore_unavailable: true,
|
ignore_unavailable: true,
|
||||||
@ -461,10 +454,6 @@ export class ElasticDatasource
|
|||||||
index: this.indexPattern.getIndexList(timeFrom, timeTo),
|
index: this.indexPattern.getIndexList(timeFrom, timeTo),
|
||||||
};
|
};
|
||||||
|
|
||||||
if (satisfies(this.esVersion, '>=5.6.0 <7.0.0')) {
|
|
||||||
queryHeader['max_concurrent_shard_requests'] = this.maxConcurrentShardRequests;
|
|
||||||
}
|
|
||||||
|
|
||||||
return JSON.stringify(queryHeader);
|
return JSON.stringify(queryHeader);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -519,13 +508,8 @@ export class ElasticDatasource
|
|||||||
return text;
|
return text;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* This method checks to ensure the user is running a 5.0+ cluster. This is
|
|
||||||
* necessary bacause the query being used for the getLogRowContext relies on the
|
|
||||||
* search_after feature.
|
|
||||||
*/
|
|
||||||
showContextToggle(): boolean {
|
showContextToggle(): boolean {
|
||||||
return gte(this.esVersion, '5.0.0');
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
getLogRowContext = async (row: LogRowModel, options?: RowContextOptions): Promise<{ data: DataFrame[] }> => {
|
getLogRowContext = async (row: LogRowModel, options?: RowContextOptions): Promise<{ data: DataFrame[] }> => {
|
||||||
@ -687,7 +671,7 @@ export class ElasticDatasource
|
|||||||
|
|
||||||
const esQuery = JSON.stringify(queryObj);
|
const esQuery = JSON.stringify(queryObj);
|
||||||
|
|
||||||
const searchType = queryObj.size === 0 && lt(this.esVersion, '5.0.0') ? 'count' : 'query_then_fetch';
|
const searchType = 'query_then_fetch';
|
||||||
const header = this.getQueryHeader(searchType, options.range.from, options.range.to);
|
const header = this.getQueryHeader(searchType, options.range.from, options.range.to);
|
||||||
payload += header + '\n';
|
payload += header + '\n';
|
||||||
|
|
||||||
@ -804,17 +788,10 @@ export class ElasticDatasource
|
|||||||
if (index && index.mappings) {
|
if (index && index.mappings) {
|
||||||
const mappings = index.mappings;
|
const mappings = index.mappings;
|
||||||
|
|
||||||
if (lt(this.esVersion, '7.0.0')) {
|
|
||||||
for (const typeName in mappings) {
|
|
||||||
const properties = mappings[typeName].properties;
|
|
||||||
getFieldsRecursively(properties);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
const properties = mappings.properties;
|
const properties = mappings.properties;
|
||||||
getFieldsRecursively(properties);
|
getFieldsRecursively(properties);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// transform to array
|
// transform to array
|
||||||
return _map(fields, (value) => {
|
return _map(fields, (value) => {
|
||||||
@ -825,7 +802,7 @@ export class ElasticDatasource
|
|||||||
}
|
}
|
||||||
|
|
||||||
getTerms(queryDef: TermsQuery, range = getDefaultTimeRange()): Observable<MetricFindValue[]> {
|
getTerms(queryDef: TermsQuery, range = getDefaultTimeRange()): Observable<MetricFindValue[]> {
|
||||||
const searchType = gte(this.esVersion, '5.0.0') ? 'query_then_fetch' : 'count';
|
const searchType = 'query_then_fetch';
|
||||||
const header = this.getQueryHeader(searchType, range.from, range.to);
|
const header = this.getQueryHeader(searchType, range.from, range.to);
|
||||||
let esQuery = JSON.stringify(this.queryBuilder.getTermsQuery(queryDef));
|
let esQuery = JSON.stringify(this.queryBuilder.getTermsQuery(queryDef));
|
||||||
|
|
||||||
@ -855,11 +832,11 @@ export class ElasticDatasource
|
|||||||
getMultiSearchUrl() {
|
getMultiSearchUrl() {
|
||||||
const searchParams = new URLSearchParams();
|
const searchParams = new URLSearchParams();
|
||||||
|
|
||||||
if (gte(this.esVersion, '7.0.0') && this.maxConcurrentShardRequests) {
|
if (this.maxConcurrentShardRequests) {
|
||||||
searchParams.append('max_concurrent_shard_requests', `${this.maxConcurrentShardRequests}`);
|
searchParams.append('max_concurrent_shard_requests', `${this.maxConcurrentShardRequests}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (gte(this.esVersion, '6.6.0') && this.xpack && this.includeFrozen) {
|
if (this.xpack && this.includeFrozen) {
|
||||||
searchParams.append('ignore_throttled', 'false');
|
searchParams.append('ignore_throttled', 'false');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
import { gte, lt } from 'semver';
|
|
||||||
|
|
||||||
import { InternalTimeZones } from '@grafana/data';
|
import { InternalTimeZones } from '@grafana/data';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
@ -23,11 +21,9 @@ import { convertOrderByToMetricId, getScriptValue } from './utils';
|
|||||||
|
|
||||||
export class ElasticQueryBuilder {
|
export class ElasticQueryBuilder {
|
||||||
timeField: string;
|
timeField: string;
|
||||||
esVersion: string;
|
|
||||||
|
|
||||||
constructor(options: { timeField: string; esVersion: string }) {
|
constructor(options: { timeField: string }) {
|
||||||
this.timeField = options.timeField;
|
this.timeField = options.timeField;
|
||||||
this.esVersion = options.esVersion;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
getRangeFilter() {
|
getRangeFilter() {
|
||||||
@ -54,7 +50,7 @@ export class ElasticQueryBuilder {
|
|||||||
|
|
||||||
if (aggDef.settings.orderBy !== void 0) {
|
if (aggDef.settings.orderBy !== void 0) {
|
||||||
queryNode.terms.order = {};
|
queryNode.terms.order = {};
|
||||||
if (aggDef.settings.orderBy === '_term' && gte(this.esVersion, '6.0.0')) {
|
if (aggDef.settings.orderBy === '_term') {
|
||||||
queryNode.terms.order['_key'] = aggDef.settings.order;
|
queryNode.terms.order['_key'] = aggDef.settings.order;
|
||||||
} else {
|
} else {
|
||||||
queryNode.terms.order[aggDef.settings.orderBy] = aggDef.settings.order;
|
queryNode.terms.order[aggDef.settings.orderBy] = aggDef.settings.order;
|
||||||
@ -153,11 +149,6 @@ export class ElasticQueryBuilder {
|
|||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
// fields field not supported on ES 5.x
|
|
||||||
if (lt(this.esVersion, '5.0.0')) {
|
|
||||||
query.fields = ['*', '_source'];
|
|
||||||
}
|
|
||||||
|
|
||||||
query.script_fields = {};
|
query.script_fields = {};
|
||||||
return query;
|
return query;
|
||||||
}
|
}
|
||||||
@ -415,15 +406,9 @@ export class ElasticQueryBuilder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private buildScript(script: string) {
|
private buildScript(script: string) {
|
||||||
if (gte(this.esVersion, '5.6.0')) {
|
|
||||||
return script;
|
return script;
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
|
||||||
inline: script,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
private toNumber(stringValue: unknown): unknown | number {
|
private toNumber(stringValue: unknown): unknown | number {
|
||||||
const parsedValue = parseFloat(`${stringValue}`);
|
const parsedValue = parseFloat(`${stringValue}`);
|
||||||
if (isNaN(parsedValue)) {
|
if (isNaN(parsedValue)) {
|
||||||
@ -480,7 +465,7 @@ export class ElasticQueryBuilder {
|
|||||||
switch (orderBy) {
|
switch (orderBy) {
|
||||||
case 'key':
|
case 'key':
|
||||||
case 'term':
|
case 'term':
|
||||||
const keyname = gte(this.esVersion, '6.0.0') ? '_key' : '_term';
|
const keyname = '_key';
|
||||||
query.aggs['1'].terms.order[keyname] = order;
|
query.aggs['1'].terms.order[keyname] = order;
|
||||||
break;
|
break;
|
||||||
case 'doc_count':
|
case 'doc_count':
|
||||||
|
@ -1,21 +1,9 @@
|
|||||||
import { gte, lt } from 'semver';
|
|
||||||
|
|
||||||
import { ElasticQueryBuilder } from '../query_builder';
|
import { ElasticQueryBuilder } from '../query_builder';
|
||||||
import { ElasticsearchQuery } from '../types';
|
import { ElasticsearchQuery } from '../types';
|
||||||
|
|
||||||
describe('ElasticQueryBuilder', () => {
|
describe('ElasticQueryBuilder', () => {
|
||||||
const builder = new ElasticQueryBuilder({ timeField: '@timestamp', esVersion: '2.0.0' });
|
const builder = new ElasticQueryBuilder({ timeField: '@timestamp' }); // es2
|
||||||
const builder5x = new ElasticQueryBuilder({ timeField: '@timestamp', esVersion: '5.0.0' });
|
|
||||||
const builder56 = new ElasticQueryBuilder({ timeField: '@timestamp', esVersion: '5.6.0' });
|
|
||||||
const builder6x = new ElasticQueryBuilder({ timeField: '@timestamp', esVersion: '6.0.0' });
|
|
||||||
const builder7x = new ElasticQueryBuilder({ timeField: '@timestamp', esVersion: '7.0.0' });
|
|
||||||
const builder77 = new ElasticQueryBuilder({ timeField: '@timestamp', esVersion: '7.7.0' });
|
|
||||||
const builder8 = new ElasticQueryBuilder({ timeField: '@timestamp', esVersion: '8.0.0' });
|
|
||||||
|
|
||||||
const allBuilders = [builder, builder5x, builder56, builder6x, builder7x, builder77, builder8];
|
|
||||||
|
|
||||||
allBuilders.forEach((builder) => {
|
|
||||||
describe(`version ${builder.esVersion}`, () => {
|
|
||||||
it('should return query with defaults', () => {
|
it('should return query with defaults', () => {
|
||||||
const query = builder.build({
|
const query = builder.build({
|
||||||
refId: 'A',
|
refId: 'A',
|
||||||
@ -94,11 +82,7 @@ describe('ElasticQueryBuilder', () => {
|
|||||||
const query = builder.build(target, 100);
|
const query = builder.build(target, 100);
|
||||||
const firstLevel = query.aggs['2'];
|
const firstLevel = query.aggs['2'];
|
||||||
|
|
||||||
if (gte(builder.esVersion, '6.0.0')) {
|
|
||||||
expect(firstLevel.terms.order._key).toBe('asc');
|
expect(firstLevel.terms.order._key).toBe('asc');
|
||||||
} else {
|
|
||||||
expect(firstLevel.terms.order._term).toBe('asc');
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('with term agg and order by metric agg', () => {
|
it('with term agg and order by metric agg', () => {
|
||||||
@ -662,14 +646,9 @@ describe('ElasticQueryBuilder', () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function checkSort(order: any, expected: string) {
|
function checkSort(order: any, expected: string) {
|
||||||
if (lt(builder.esVersion, '6.0.0')) {
|
|
||||||
expect(order._term).toBe(expected);
|
|
||||||
expect(order._key).toBeUndefined();
|
|
||||||
} else {
|
|
||||||
expect(order._term).toBeUndefined();
|
expect(order._term).toBeUndefined();
|
||||||
expect(order._key).toBe(expected);
|
expect(order._key).toBe(expected);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
it('should set correct default sorting', () => {
|
it('should set correct default sorting', () => {
|
||||||
const order = testGetTermsQuery({});
|
const order = testGetTermsQuery({});
|
||||||
@ -710,9 +689,7 @@ describe('ElasticQueryBuilder', () => {
|
|||||||
it('should not add query_string filter when query is empty', () => {
|
it('should not add query_string filter when query is empty', () => {
|
||||||
const query = builder.getTermsQuery({ orderBy: 'doc_count', order: 'asc' });
|
const query = builder.getTermsQuery({ orderBy: 'doc_count', order: 'asc' });
|
||||||
|
|
||||||
expect(
|
expect(query.query.bool.filter.find((filter: any) => Object.keys(filter).includes('query_string'))).toBeFalsy();
|
||||||
query.query.bool.filter.find((filter: any) => Object.keys(filter).includes('query_string'))
|
|
||||||
).toBeFalsy();
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@ -730,9 +707,7 @@ describe('ElasticQueryBuilder', () => {
|
|||||||
it('should not add query_string filter when query is empty', () => {
|
it('should not add query_string filter when query is empty', () => {
|
||||||
const query = builder.build({ refId: 'A' });
|
const query = builder.build({ refId: 'A' });
|
||||||
|
|
||||||
expect(
|
expect(query.query.bool.filter.find((filter: any) => Object.keys(filter).includes('query_string'))).toBeFalsy();
|
||||||
query.query.bool.filter.find((filter: any) => Object.keys(filter).includes('query_string'))
|
|
||||||
).toBeFalsy();
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -785,9 +760,7 @@ describe('ElasticQueryBuilder', () => {
|
|||||||
it('should not add query_string filter when query is empty', () => {
|
it('should not add query_string filter when query is empty', () => {
|
||||||
const query = builder.getLogsQuery({ refId: 'A' }, 500, null);
|
const query = builder.getLogsQuery({ refId: 'A' }, 500, null);
|
||||||
|
|
||||||
expect(
|
expect(query.query.bool.filter.find((filter: any) => Object.keys(filter).includes('query_string'))).toBeFalsy();
|
||||||
query.query.bool.filter.find((filter: any) => Object.keys(filter).includes('query_string'))
|
|
||||||
).toBeFalsy();
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -811,12 +784,10 @@ describe('ElasticQueryBuilder', () => {
|
|||||||
expect(query.query.bool.filter[4].bool.must_not.regexp['key6']).toBe('value6');
|
expect(query.query.bool.filter[4].bool.must_not.regexp['key6']).toBe('value6');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('Value casting for settings', () => {
|
describe('Value casting for settings', () => {
|
||||||
it('correctly casts values in moving_avg ', () => {
|
it('correctly casts values in moving_avg ', () => {
|
||||||
const query = builder7x.build({
|
const query = builder.build({
|
||||||
refId: 'A',
|
refId: 'A',
|
||||||
metrics: [
|
metrics: [
|
||||||
{ type: 'avg', id: '2' },
|
{ type: 'avg', id: '2' },
|
||||||
@ -852,7 +823,7 @@ describe('ElasticQueryBuilder', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('correctly casts values in serial_diff ', () => {
|
it('correctly casts values in serial_diff ', () => {
|
||||||
const query = builder7x.build({
|
const query = builder.build({
|
||||||
refId: 'A',
|
refId: 'A',
|
||||||
metrics: [
|
metrics: [
|
||||||
{ type: 'avg', id: '2' },
|
{ type: 'avg', id: '2' },
|
||||||
@ -954,28 +925,8 @@ describe('ElasticQueryBuilder', () => {
|
|||||||
expect(query.aggs['2'].date_histogram.field).toBe('@time');
|
expect(query.aggs['2'].date_histogram.field).toBe('@time');
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('interval parameter', () => {
|
|
||||||
it('should use fixed_interval', () => {
|
it('should use fixed_interval', () => {
|
||||||
const query = builder77.build({
|
const query = builder.build({
|
||||||
refId: 'A',
|
|
||||||
metrics: [{ type: 'count', id: '1' }],
|
|
||||||
timeField: '@timestamp',
|
|
||||||
bucketAggs: [
|
|
||||||
{
|
|
||||||
type: 'date_histogram',
|
|
||||||
id: '2',
|
|
||||||
field: '@time',
|
|
||||||
settings: { min_doc_count: '1', interval: '1d' },
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(query.aggs['2'].date_histogram.fixed_interval).toBe('1d');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should use fixed_interval if Elasticsearch version >=8.0.0', () => {
|
|
||||||
const query = builder8.build({
|
|
||||||
refId: 'A',
|
refId: 'A',
|
||||||
metrics: [{ type: 'count', id: '1' }],
|
metrics: [{ type: 'count', id: '1' }],
|
||||||
timeField: '@timestamp',
|
timeField: '@timestamp',
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { removeEmpty } from './utils';
|
import { removeEmpty, coerceESVersion } from './utils';
|
||||||
|
|
||||||
describe('removeEmpty', () => {
|
describe('removeEmpty', () => {
|
||||||
it('Should remove all empty', () => {
|
it('Should remove all empty', () => {
|
||||||
@ -33,4 +33,25 @@ describe('removeEmpty', () => {
|
|||||||
|
|
||||||
expect(removeEmpty(original)).toStrictEqual(expectedResult);
|
expect(removeEmpty(original)).toStrictEqual(expectedResult);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should correctly coerce the version info', () => {
|
||||||
|
// valid string
|
||||||
|
expect(coerceESVersion('8.1.3')).toBe('8.1.3');
|
||||||
|
|
||||||
|
// invalid string
|
||||||
|
expect(coerceESVersion('haha')).toBe('5.0.0');
|
||||||
|
|
||||||
|
// known number
|
||||||
|
expect(coerceESVersion(2)).toBe('2.0.0');
|
||||||
|
expect(coerceESVersion(5)).toBe('5.0.0');
|
||||||
|
expect(coerceESVersion(56)).toBe('5.6.0');
|
||||||
|
expect(coerceESVersion(60)).toBe('6.0.0');
|
||||||
|
expect(coerceESVersion(70)).toBe('7.0.0');
|
||||||
|
|
||||||
|
// unknown number
|
||||||
|
expect(coerceESVersion(42)).toBe('5.0.0');
|
||||||
|
|
||||||
|
// undefined
|
||||||
|
expect(coerceESVersion(undefined)).toBe('5.0.0');
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
@ -95,11 +95,11 @@ export const getScriptValue = (metric: MetricAggregationWithInlineScript) =>
|
|||||||
(typeof metric.settings?.script === 'object' ? metric.settings?.script?.inline : metric.settings?.script) || '';
|
(typeof metric.settings?.script === 'object' ? metric.settings?.script?.inline : metric.settings?.script) || '';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Coerces the a version string/number to a valid semver string.
|
* Coerces the version to a valid semver string.
|
||||||
* It takes care of also converting from the legacy format (numeric) to the new one.
|
* It takes care of also converting from the legacy format (numeric) to the new one.
|
||||||
* @param version
|
* @param version
|
||||||
*/
|
*/
|
||||||
export const coerceESVersion = (version: string | number): string => {
|
export const coerceESVersion = (version: string | number | undefined): string => {
|
||||||
if (typeof version === 'string') {
|
if (typeof version === 'string') {
|
||||||
return valid(version) || '5.0.0';
|
return valid(version) || '5.0.0';
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user