Elasticsearch: Use constants for default values and types (#60673)

This commit is contained in:
Ivana Huckova 2022-12-22 15:08:15 +01:00 committed by GitHub
parent 658414a025
commit a1ef3d77f2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 29 additions and 14 deletions

View File

@ -6,6 +6,12 @@ import (
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
)
const (
highlightPreTagsString = "@HIGHLIGHT@"
highlightPostTagsString = "@/HIGHLIGHT@"
highlightFragmentSize = 2147483647
)
// SearchRequestBuilder represents a builder which can build a search request
type SearchRequestBuilder struct {
interval intervalv2.Interval
@ -98,9 +104,9 @@ func (b *SearchRequestBuilder) AddHighlight() *SearchRequestBuilder {
"fields": map[string]interface{}{
"*": map[string]interface{}{},
},
"pre_tags": []string{"@HIGHLIGHT@"},
"post_tags": []string{"@/HIGHLIGHT@"},
"fragment_size": 2147483647,
"pre_tags": []string{highlightPreTagsString},
"post_tags": []string{highlightPostTagsString},
"fragment_size": highlightFragmentSize,
}
return b
}

View File

@ -26,6 +26,11 @@ const (
filtersType = "filters"
termsType = "terms"
geohashGridType = "geohash_grid"
// Document types
rawDocumentType = "raw_document"
rawDataType = "raw_data"
// Logs type
logsType = "logs"
)
func parseResponse(responses []*es.SearchResponse, targets []*Query) (*backend.QueryDataResponse, error) {

View File

@ -12,6 +12,10 @@ import (
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
)
const (
defaultSize = 500
)
type timeSeriesQuery struct {
client es.Client
dataQueries []backend.DataQuery
@ -76,7 +80,7 @@ func (e *timeSeriesQuery) processQuery(q *Query, ms *es.MultiSearchRequestBuilde
if len(q.BucketAggs) == 0 {
// If no aggregations, only document and logs queries are valid
if len(q.Metrics) == 0 || !(q.Metrics[0].Type == "raw_data" || q.Metrics[0].Type == "raw_document" || q.Metrics[0].Type == "logs") {
if len(q.Metrics) == 0 || !(q.Metrics[0].Type == rawDataType || q.Metrics[0].Type == rawDocumentType || q.Metrics[0].Type == logsType) {
result.Responses[q.RefID] = backend.DataResponse{
Error: fmt.Errorf("invalid query, missing metrics and aggregations"),
}
@ -88,11 +92,11 @@ func (e *timeSeriesQuery) processQuery(q *Query, ms *es.MultiSearchRequestBuilde
b.SortDesc(e.client.GetTimeField(), "boolean")
b.SortDesc("_doc", "")
b.AddDocValueField(e.client.GetTimeField())
b.Size(metric.Settings.Get("size").MustInt(500))
b.Size(metric.Settings.Get("size").MustInt(defaultSize))
if metric.Type == "logs" {
// Add additional defaults for log query
b.Size(metric.Settings.Get("limit").MustInt(500))
b.Size(metric.Settings.Get("limit").MustInt(defaultSize))
b.AddHighlight()
// For log query, we add a date histogram aggregation
@ -321,13 +325,13 @@ func addTermsAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg, metrics []*Metr
} else if size, err := bucketAgg.Settings.Get("size").String(); err == nil {
a.Size, err = strconv.Atoi(size)
if err != nil {
a.Size = 500
a.Size = defaultSize
}
} else {
a.Size = 500
a.Size = defaultSize
}
if a.Size == 0 {
a.Size = 500
a.Size = defaultSize
}
if minDocCount, err := bucketAgg.Settings.Get("min_doc_count").Int(); err == nil {

View File

@ -70,7 +70,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.Equal(t, firstLevel.Key, "2")
termsAgg := firstLevel.Aggregation.Aggregation.(*es.TermsAggregation)
require.Equal(t, termsAgg.Field, "@host")
require.Equal(t, termsAgg.Size, 500)
require.Equal(t, termsAgg.Size, defaultSize)
secondLevel := firstLevel.Aggregation.Aggs[0]
require.Equal(t, secondLevel.Key, "3")
require.Equal(t, secondLevel.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, "@timestamp")
@ -421,7 +421,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
require.Equal(t, sr.Size, 500)
require.Equal(t, sr.Size, defaultSize)
})
t.Run("With raw document metric query (from frontend tests)", func(t *testing.T) {
@ -440,7 +440,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.Equal(t, rangeFilter.Gte, fromMs)
require.Equal(t, rangeFilter.Format, es.DateFormatEpochMS)
require.Equal(t, sr.Size, 500)
require.Equal(t, sr.Size, defaultSize)
require.Equal(t, sr.Sort["@timestamp"], map[string]string{"order": "desc", "unmapped_type": "boolean"})
require.Equal(t, sr.Sort["_doc"], map[string]string{"order": "desc"})
require.Equal(t, sr.CustomProps["script_fields"], map[string]interface{}{})
@ -462,7 +462,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
require.Equal(t, rangeFilter.Gte, fromMs)
require.Equal(t, rangeFilter.Format, es.DateFormatEpochMS)
require.Equal(t, sr.Size, 500)
require.Equal(t, sr.Size, defaultSize)
require.Equal(t, sr.Sort["@timestamp"], map[string]string{"order": "desc", "unmapped_type": "boolean"})
require.Equal(t, sr.Sort["_doc"], map[string]string{"order": "desc"})
require.Equal(t, sr.CustomProps["script_fields"], map[string]interface{}{})
@ -1347,7 +1347,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
}`, from, to, 15*time.Second)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
require.Equal(t, sr.Size, 500)
require.Equal(t, sr.Size, defaultSize)
rangeFilter := sr.Query.Bool.Filters[0].(*es.RangeFilter)
require.Equal(t, rangeFilter.Key, c.timeField)