mirror of
https://github.com/grafana/grafana.git
synced 2025-02-16 18:34:52 -06:00
Elasticsearch: Fix calculation of trimEdges (#56148)
* Elasticsearch: Fix calculation of trimEdges When a value of trimEdges is set greater than 1 we need to drop both the first and last sample of the data from the response. * Elasticsearch: Fix reading trimEdges from the query settings Currently the trimEdges property in the JSON panel is stored as a string and not directly as a number. This caused that the reading of the value failed in the go backend because the simplejson.Int() method doesn't properly handle this case. This failure when decoding the value goes unnoticed because of the early return causing the trimEdges configuration to be ignored. * Refactor castToInt to also return an error Add a new test case that sets the `trimEdges` property as a quoted number.
This commit is contained in:
parent
4ecc9f42ac
commit
e4f2006cce
@ -527,7 +527,7 @@ func (rp *responseParser) trimDatapoints(queryResult backend.DataResponse, targe
|
||||
return
|
||||
}
|
||||
|
||||
trimEdges, err := histogram.Settings.Get("trimEdges").Int()
|
||||
trimEdges, err := castToInt(histogram.Settings.Get("trimEdges"))
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
@ -538,7 +538,7 @@ func (rp *responseParser) trimDatapoints(queryResult backend.DataResponse, targe
|
||||
for _, field := range frame.Fields {
|
||||
if field.Len() > trimEdges*2 {
|
||||
for i := 0; i < field.Len(); i++ {
|
||||
if i < trimEdges || i > field.Len()-trimEdges {
|
||||
if i < trimEdges || i >= field.Len()-trimEdges {
|
||||
field.Delete(i)
|
||||
}
|
||||
}
|
||||
@ -674,6 +674,25 @@ func (rp *responseParser) getMetricName(metric string) string {
|
||||
return metric
|
||||
}
|
||||
|
||||
func castToInt(j *simplejson.Json) (int, error) {
|
||||
i, err := j.Int()
|
||||
if err == nil {
|
||||
return i, nil
|
||||
}
|
||||
|
||||
s, err := j.String()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
v, err := strconv.Atoi(s)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return v, nil
|
||||
}
|
||||
|
||||
func castToFloat(j *simplejson.Json) *float64 {
|
||||
f, err := j.Float64()
|
||||
if err == nil {
|
||||
|
@ -634,7 +634,7 @@ func TestResponseParser(t *testing.T) {
|
||||
assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "@metric:logins.count")
|
||||
})
|
||||
|
||||
t.Run("With dropfirst and last aggregation", func(t *testing.T) {
|
||||
t.Run("With drop first and last aggregation (numeric)", func(t *testing.T) {
|
||||
targets := map[string]string{
|
||||
"A": `{
|
||||
"timeField": "@timestamp",
|
||||
@ -691,17 +691,88 @@ func TestResponseParser(t *testing.T) {
|
||||
frame := dataframes[0]
|
||||
require.Len(t, frame.Fields, 2)
|
||||
require.Equal(t, frame.Fields[0].Name, "time")
|
||||
require.Equal(t, frame.Fields[0].Len(), 2)
|
||||
require.Equal(t, frame.Fields[0].Len(), 1)
|
||||
require.Equal(t, frame.Fields[1].Name, "value")
|
||||
require.Equal(t, frame.Fields[1].Len(), 2)
|
||||
require.Equal(t, frame.Fields[1].Len(), 1)
|
||||
assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "Average")
|
||||
|
||||
frame = dataframes[1]
|
||||
require.Len(t, frame.Fields, 2)
|
||||
require.Equal(t, frame.Fields[0].Name, "time")
|
||||
require.Equal(t, frame.Fields[0].Len(), 2)
|
||||
require.Equal(t, frame.Fields[0].Len(), 1)
|
||||
require.Equal(t, frame.Fields[1].Name, "value")
|
||||
require.Equal(t, frame.Fields[1].Len(), 2)
|
||||
require.Equal(t, frame.Fields[1].Len(), 1)
|
||||
assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "Count")
|
||||
})
|
||||
|
||||
t.Run("With drop first and last aggregation (string)", func(t *testing.T) {
|
||||
targets := map[string]string{
|
||||
"A": `{
|
||||
"timeField": "@timestamp",
|
||||
"metrics": [{ "type": "avg", "id": "1" }, { "type": "count" }],
|
||||
"bucketAggs": [
|
||||
{
|
||||
"type": "date_histogram",
|
||||
"field": "@timestamp",
|
||||
"id": "2",
|
||||
"settings": { "trimEdges": "1" }
|
||||
}
|
||||
]
|
||||
}`,
|
||||
}
|
||||
response := `{
|
||||
"responses": [
|
||||
{
|
||||
"aggregations": {
|
||||
"2": {
|
||||
"buckets": [
|
||||
{
|
||||
"1": { "value": 1000 },
|
||||
"key": 1,
|
||||
"doc_count": 369
|
||||
},
|
||||
{
|
||||
"1": { "value": 2000 },
|
||||
"key": 2,
|
||||
"doc_count": 200
|
||||
},
|
||||
{
|
||||
"1": { "value": 2000 },
|
||||
"key": 3,
|
||||
"doc_count": 200
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}`
|
||||
rp, err := newResponseParserForTest(targets, response)
|
||||
require.NoError(t, err)
|
||||
result, err := rp.getTimeSeries()
|
||||
require.NoError(t, err)
|
||||
require.Len(t, result.Responses, 1)
|
||||
|
||||
queryRes := result.Responses["A"]
|
||||
require.NotNil(t, queryRes)
|
||||
dataframes := queryRes.Frames
|
||||
require.NoError(t, err)
|
||||
require.Len(t, dataframes, 2)
|
||||
|
||||
frame := dataframes[0]
|
||||
require.Len(t, frame.Fields, 2)
|
||||
require.Equal(t, frame.Fields[0].Name, "time")
|
||||
require.Equal(t, frame.Fields[0].Len(), 1)
|
||||
require.Equal(t, frame.Fields[1].Name, "value")
|
||||
require.Equal(t, frame.Fields[1].Len(), 1)
|
||||
assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "Average")
|
||||
|
||||
frame = dataframes[1]
|
||||
require.Len(t, frame.Fields, 2)
|
||||
require.Equal(t, frame.Fields[0].Name, "time")
|
||||
require.Equal(t, frame.Fields[0].Len(), 1)
|
||||
require.Equal(t, frame.Fields[1].Name, "value")
|
||||
require.Equal(t, frame.Fields[1].Len(), 1)
|
||||
assert.Equal(t, frame.Fields[1].Config.DisplayNameFromDS, "Count")
|
||||
})
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user